1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
47 #include "insn-codes.h"
53 #include "cfgcleanup.h"
54 #include "diagnostic-core.h"
58 #include "langhooks.h"
61 #include "stor-layout.h"
64 /* This file should be included last. */
65 #include "target-def.h"
67 /* Forward function declarations. */
68 static bool prologue_saved_reg_p (unsigned);
69 static void nios2_load_pic_register (void);
70 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
71 static const char *nios2_unspec_reloc_name (int);
72 static void nios2_register_builtin_fndecl (unsigned, tree
);
74 /* Threshold for data being put into the small data/bss area, instead
75 of the normal data area (references to the small data/bss area take
76 1 instruction, and use the global pointer, references to the normal
77 data area takes 2 instructions). */
78 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
80 struct GTY (()) machine_function
82 /* Current frame information, to be filled in by nios2_compute_frame_layout
83 with register save masks, and offsets for the current function. */
85 /* Mask of registers to save. */
86 unsigned int save_mask
;
87 /* Number of bytes that the entire frame takes up. */
89 /* Number of bytes that variables take up. */
91 /* Number of bytes that outgoing arguments take up. */
93 /* Number of bytes needed to store registers in frame. */
95 /* Offset from new stack pointer to store registers. */
97 /* Offset from save_regs_offset to store frame pointer register. */
99 /* != 0 if frame layout already calculated. */
103 /* State to track the assignment of custom codes to FPU/custom builtins. */
104 static enum nios2_ccs_code custom_code_status
[256];
105 static int custom_code_index
[256];
106 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
107 static bool custom_code_conflict
= false;
110 /* Definition of builtin function types for nios2. */
114 N2_FTYPE(1, (VOID)) \
115 N2_FTYPE(2, (DF, DF)) \
116 N2_FTYPE(3, (DF, DF, DF)) \
117 N2_FTYPE(2, (DF, SF)) \
118 N2_FTYPE(2, (DF, SI)) \
119 N2_FTYPE(2, (DF, UI)) \
120 N2_FTYPE(2, (SF, DF)) \
121 N2_FTYPE(2, (SF, SF)) \
122 N2_FTYPE(3, (SF, SF, SF)) \
123 N2_FTYPE(2, (SF, SI)) \
124 N2_FTYPE(2, (SF, UI)) \
125 N2_FTYPE(2, (SI, CVPTR)) \
126 N2_FTYPE(2, (SI, DF)) \
127 N2_FTYPE(3, (SI, DF, DF)) \
128 N2_FTYPE(2, (SI, SF)) \
129 N2_FTYPE(3, (SI, SF, SF)) \
130 N2_FTYPE(2, (SI, SI)) \
131 N2_FTYPE(2, (UI, CVPTR)) \
132 N2_FTYPE(2, (UI, DF)) \
133 N2_FTYPE(2, (UI, SF)) \
134 N2_FTYPE(2, (VOID, DF)) \
135 N2_FTYPE(2, (VOID, SF)) \
136 N2_FTYPE(3, (VOID, SI, SI)) \
137 N2_FTYPE(3, (VOID, VPTR, SI))
139 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
140 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
141 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
143 /* Expand ftcode enumeration. */
145 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
151 /* Return the tree function type, based on the ftcode. */
153 nios2_ftype (enum nios2_ftcode ftcode
)
155 static tree types
[(int) N2_FTYPE_MAX
];
157 tree N2_TYPE_SF
= float_type_node
;
158 tree N2_TYPE_DF
= double_type_node
;
159 tree N2_TYPE_SI
= integer_type_node
;
160 tree N2_TYPE_UI
= unsigned_type_node
;
161 tree N2_TYPE_VOID
= void_type_node
;
163 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
166 /* const volatile void *. */
168 = build_pointer_type (build_qualified_type (void_type_node
,
170 | TYPE_QUAL_VOLATILE
)));
171 /* volatile void *. */
173 = build_pointer_type (build_qualified_type (void_type_node
,
174 TYPE_QUAL_VOLATILE
));
176 if (types
[(int) ftcode
] == NULL_TREE
)
179 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
180 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
181 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
182 #define N2_FTYPE(N,ARGS) \
183 case N2_FTYPE_OP ## N ARGS: \
184 types[(int) ftcode] \
185 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
189 default: gcc_unreachable ();
191 return types
[(int) ftcode
];
195 /* Definition of FPU instruction descriptions. */
197 struct nios2_fpu_insn_info
200 int num_operands
, *optvar
;
203 #define N2F_DFREQ 0x2
204 #define N2F_UNSAFE 0x4
205 #define N2F_FINITE 0x8
206 #define N2F_NO_ERRNO 0x10
208 enum insn_code icode
;
209 enum nios2_ftcode ftcode
;
212 /* Base macro for defining FPU instructions. */
213 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
214 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
215 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
216 N2_FTYPE_OP ## nop args }
218 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
219 #define N2FPU_OP2(mode) (mode, mode)
220 #define N2FPU_OP3(mode) (mode, mode, mode)
221 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
222 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
223 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
224 #define N2FPU_INSN_SF(code, nop, flags) \
225 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
226 #define N2FPU_INSN_DF(code, nop, flags) \
227 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
229 /* Compare instructions, 3 operand FP operation with a SI result. */
230 #define N2FPU_CMP_DEF(code, flags, m, M) \
231 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
232 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
233 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
234 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
236 /* The order of definition needs to be maintained consistent with
237 enum n2fpu_code in nios2-opts.h. */
238 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
240 /* Single precision instructions. */
241 N2FPU_INSN_SF (add
, 3, 0),
242 N2FPU_INSN_SF (sub
, 3, 0),
243 N2FPU_INSN_SF (mul
, 3, 0),
244 N2FPU_INSN_SF (div
, 3, 0),
245 /* Due to textual difference between min/max and smin/smax. */
246 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
247 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
248 N2FPU_INSN_SF (neg
, 2, 0),
249 N2FPU_INSN_SF (abs
, 2, 0),
250 N2FPU_INSN_SF (sqrt
, 2, 0),
251 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
252 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
253 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
254 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
255 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
256 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
257 /* Single precision compares. */
258 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
259 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
260 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
262 /* Double precision instructions. */
263 N2FPU_INSN_DF (add
, 3, 0),
264 N2FPU_INSN_DF (sub
, 3, 0),
265 N2FPU_INSN_DF (mul
, 3, 0),
266 N2FPU_INSN_DF (div
, 3, 0),
267 /* Due to textual difference between min/max and smin/smax. */
268 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
269 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
270 N2FPU_INSN_DF (neg
, 2, 0),
271 N2FPU_INSN_DF (abs
, 2, 0),
272 N2FPU_INSN_DF (sqrt
, 2, 0),
273 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
274 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
275 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
276 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
277 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
278 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
279 /* Double precision compares. */
280 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
281 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
282 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
284 /* Conversion instructions. */
285 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
286 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
287 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
288 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
289 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
290 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
291 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
292 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
293 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
294 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
295 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
297 /* X, Y access instructions. */
298 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
299 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
300 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
301 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
302 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
305 /* Some macros for ease of access. */
306 #define N2FPU(code) nios2_fpu_insn[(int) code]
307 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
308 #define N2FPU_N(code) (*N2FPU(code).optvar)
309 #define N2FPU_NAME(code) (N2FPU(code).name)
310 #define N2FPU_ICODE(code) (N2FPU(code).icode)
311 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
312 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
313 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
314 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
315 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
316 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
318 /* Same as above, but for cases where using only the op part is shorter. */
319 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
320 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
321 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
323 /* Export the FPU insn enabled predicate to nios2.md. */
325 nios2_fpu_insn_enabled (enum n2fpu_code code
)
327 return N2FPU_ENABLED_P (code
);
330 /* Return true if COND comparison for mode MODE is enabled under current
334 nios2_fpu_compare_enabled (enum rtx_code cond
, machine_mode mode
)
339 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
340 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
341 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
342 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
343 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
344 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
347 else if (mode
== DFmode
)
350 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
351 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
352 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
353 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
354 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
355 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
361 /* Stack layout and calling conventions. */
363 #define NIOS2_STACK_ALIGN(LOC) \
364 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
365 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
367 /* Return the bytes needed to compute the frame pointer from the current
370 nios2_compute_frame_layout (void)
373 unsigned int save_mask
= 0;
379 if (cfun
->machine
->initialized
)
380 return cfun
->machine
->total_size
;
382 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
383 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
384 total_size
= var_size
+ out_args_size
;
386 /* Calculate space needed for gp registers. */
388 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
389 if (prologue_saved_reg_p (regno
))
391 save_mask
|= 1 << regno
;
395 /* If we call eh_return, we need to save the EH data registers. */
396 if (crtl
->calls_eh_return
)
401 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
402 if (!(save_mask
& (1 << r
)))
409 cfun
->machine
->fp_save_offset
= 0;
410 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
412 int fp_save_offset
= 0;
413 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
414 if (save_mask
& (1 << regno
))
417 cfun
->machine
->fp_save_offset
= fp_save_offset
;
420 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
421 total_size
+= save_reg_size
;
422 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
424 /* Save other computed information. */
425 cfun
->machine
->save_mask
= save_mask
;
426 cfun
->machine
->total_size
= total_size
;
427 cfun
->machine
->var_size
= var_size
;
428 cfun
->machine
->args_size
= out_args_size
;
429 cfun
->machine
->save_reg_size
= save_reg_size
;
430 cfun
->machine
->initialized
= reload_completed
;
431 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
436 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
437 prologue/epilogue expand routines. */
439 save_reg (int regno
, unsigned offset
)
441 rtx reg
= gen_rtx_REG (SImode
, regno
);
442 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
443 rtx_insn
*insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
444 RTX_FRAME_RELATED_P (insn
) = 1;
448 restore_reg (int regno
, unsigned offset
)
450 rtx reg
= gen_rtx_REG (SImode
, regno
);
451 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
452 rtx_insn
*insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
453 /* Tag epilogue unwind note. */
454 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
455 RTX_FRAME_RELATED_P (insn
) = 1;
458 /* Emit conditional trap for checking stack limit. */
460 nios2_emit_stack_limit_check (void)
462 if (REG_P (stack_limit_rtx
))
463 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
,
465 stack_pointer_rtx
, stack_limit_rtx
, GEN_INT (3)));
467 sorry ("only register based stack limit is supported");
470 /* Temp regno used inside prologue/epilogue. */
471 #define TEMP_REG_NUM 8
474 nios2_emit_add_constant (rtx reg
, HOST_WIDE_INT immed
)
477 if (SMALL_INT (immed
))
478 insn
= emit_insn (gen_add2_insn (reg
, gen_int_mode (immed
, Pmode
)));
481 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
482 emit_move_insn (tmp
, gen_int_mode (immed
, Pmode
));
483 insn
= emit_insn (gen_add2_insn (reg
, tmp
));
489 nios2_expand_prologue (void)
492 int total_frame_size
, save_offset
;
493 int sp_offset
; /* offset from base_reg to final stack value. */
494 int save_regs_base
; /* offset from base_reg to register save area. */
497 total_frame_size
= nios2_compute_frame_layout ();
499 if (flag_stack_usage_info
)
500 current_function_static_stack_size
= total_frame_size
;
502 /* Decrement the stack pointer. */
503 if (!SMALL_INT (total_frame_size
))
505 /* We need an intermediary point, this will point at the spill block. */
507 (gen_add2_insn (stack_pointer_rtx
,
508 gen_int_mode (cfun
->machine
->save_regs_offset
509 - total_frame_size
, Pmode
)));
510 RTX_FRAME_RELATED_P (insn
) = 1;
512 sp_offset
= -cfun
->machine
->save_regs_offset
;
514 else if (total_frame_size
)
516 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
517 gen_int_mode (-total_frame_size
,
519 RTX_FRAME_RELATED_P (insn
) = 1;
520 save_regs_base
= cfun
->machine
->save_regs_offset
;
524 save_regs_base
= sp_offset
= 0;
526 if (crtl
->limit_stack
)
527 nios2_emit_stack_limit_check ();
529 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
531 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
532 if (cfun
->machine
->save_mask
& (1 << regno
))
535 save_reg (regno
, save_offset
);
538 if (frame_pointer_needed
)
540 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
541 insn
= emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
543 gen_int_mode (fp_save_offset
, Pmode
)));
544 RTX_FRAME_RELATED_P (insn
) = 1;
550 = gen_rtx_SET (stack_pointer_rtx
,
551 plus_constant (Pmode
, stack_pointer_rtx
, sp_offset
));
552 if (SMALL_INT (sp_offset
))
553 insn
= emit_insn (sp_adjust
);
556 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
557 emit_move_insn (tmp
, gen_int_mode (sp_offset
, Pmode
));
558 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
559 /* Attach the sp_adjust as a note indicating what happened. */
560 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, sp_adjust
);
562 RTX_FRAME_RELATED_P (insn
) = 1;
564 if (crtl
->limit_stack
)
565 nios2_emit_stack_limit_check ();
568 /* Load the PIC register if needed. */
569 if (crtl
->uses_pic_offset_table
)
570 nios2_load_pic_register ();
572 /* If we are profiling, make sure no instructions are scheduled before
573 the call to mcount. */
575 emit_insn (gen_blockage ());
579 nios2_expand_epilogue (bool sibcall_p
)
583 int total_frame_size
;
584 int sp_adjust
, save_offset
;
587 if (!sibcall_p
&& nios2_can_use_return_insn ())
589 emit_jump_insn (gen_return ());
593 emit_insn (gen_blockage ());
595 total_frame_size
= nios2_compute_frame_layout ();
596 if (frame_pointer_needed
)
598 /* Recover the stack pointer. */
599 insn
= emit_insn (gen_add3_insn
600 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
601 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
)));
602 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
604 - cfun
->machine
->save_regs_offset
));
605 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
606 RTX_FRAME_RELATED_P (insn
) = 1;
609 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
611 else if (!SMALL_INT (total_frame_size
))
613 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
614 emit_move_insn (tmp
, gen_int_mode (cfun
->machine
->save_regs_offset
,
616 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
617 cfa_adj
= gen_rtx_SET (stack_pointer_rtx
,
618 plus_constant (Pmode
, stack_pointer_rtx
,
619 cfun
->machine
->save_regs_offset
));
620 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
621 RTX_FRAME_RELATED_P (insn
) = 1;
623 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
627 save_offset
= cfun
->machine
->save_regs_offset
;
628 sp_adjust
= total_frame_size
;
631 save_offset
+= cfun
->machine
->save_reg_size
;
633 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
634 if (cfun
->machine
->save_mask
& (1 << regno
))
637 restore_reg (regno
, save_offset
);
642 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
643 gen_int_mode (sp_adjust
, Pmode
)));
644 cfa_adj
= gen_rtx_SET (stack_pointer_rtx
,
645 plus_constant (Pmode
, stack_pointer_rtx
,
647 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
648 RTX_FRAME_RELATED_P (insn
) = 1;
651 /* Add in the __builtin_eh_return stack adjustment. */
652 if (crtl
->calls_eh_return
)
653 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
656 emit_jump_insn (gen_simple_return ());
659 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
660 back to a previous frame. */
662 nios2_get_return_address (int count
)
667 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
670 /* Emit code to change the current function's return address to
671 ADDRESS. SCRATCH is available as a scratch register, if needed.
672 ADDRESS and SCRATCH are both word-mode GPRs. */
674 nios2_set_return_address (rtx address
, rtx scratch
)
676 nios2_compute_frame_layout ();
677 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
679 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
682 if (frame_pointer_needed
)
683 base
= hard_frame_pointer_rtx
;
686 base
= stack_pointer_rtx
;
687 offset
+= cfun
->machine
->save_regs_offset
;
689 if (!SMALL_INT (offset
))
691 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
692 emit_insn (gen_add2_insn (scratch
, base
));
698 base
= plus_constant (Pmode
, base
, offset
);
699 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
702 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
705 /* Implement FUNCTION_PROFILER macro. */
707 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
709 fprintf (file
, "\tmov\tr8, ra\n");
712 fprintf (file
, "\tnextpc\tr2\n");
713 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
714 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
715 fprintf (file
, "\tadd\tr2, r2, r3\n");
716 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
717 fprintf (file
, "\tcallr\tr2\n");
719 else if (flag_pic
== 2)
721 fprintf (file
, "\tnextpc\tr2\n");
722 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
723 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
724 fprintf (file
, "\tadd\tr2, r2, r3\n");
725 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
726 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
727 fprintf (file
, "\tadd\tr3, r2, r3\n");
728 fprintf (file
, "\tldw\tr2, 0(r3)\n");
729 fprintf (file
, "\tcallr\tr2\n");
732 fprintf (file
, "\tcall\t_mcount\n");
733 fprintf (file
, "\tmov\tra, r8\n");
736 /* Dump stack layout. */
738 nios2_dump_frame_layout (FILE *file
)
740 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
741 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
742 cfun
->machine
->total_size
);
743 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
744 cfun
->machine
->var_size
);
745 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
746 cfun
->machine
->args_size
);
747 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
748 cfun
->machine
->save_reg_size
);
749 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
750 cfun
->machine
->initialized
);
751 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
752 cfun
->machine
->save_regs_offset
);
753 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
755 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
756 frame_pointer_needed
);
757 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
758 crtl
->args
.pretend_args_size
);
761 /* Return true if REGNO should be saved in the prologue. */
763 prologue_saved_reg_p (unsigned regno
)
765 gcc_assert (GP_REG_P (regno
));
767 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
770 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
773 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
776 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
782 /* Implement TARGET_CAN_ELIMINATE. */
784 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
786 if (to
== STACK_POINTER_REGNUM
)
787 return !frame_pointer_needed
;
791 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
793 nios2_initial_elimination_offset (int from
, int to
)
797 nios2_compute_frame_layout ();
799 /* Set OFFSET to the offset from the stack pointer. */
802 case FRAME_POINTER_REGNUM
:
803 offset
= cfun
->machine
->args_size
;
806 case ARG_POINTER_REGNUM
:
807 offset
= cfun
->machine
->total_size
;
808 offset
-= crtl
->args
.pretend_args_size
;
815 /* If we are asked for the frame pointer offset, then adjust OFFSET
816 by the offset from the frame pointer to the stack pointer. */
817 if (to
== HARD_FRAME_POINTER_REGNUM
)
818 offset
-= (cfun
->machine
->save_regs_offset
819 + cfun
->machine
->fp_save_offset
);
824 /* Return nonzero if this function is known to have a null epilogue.
825 This allows the optimizer to omit jumps to jumps if no stack
828 nios2_can_use_return_insn (void)
830 if (!reload_completed
|| crtl
->profile
)
833 return nios2_compute_frame_layout () == 0;
837 /* Check and signal some warnings/errors on FPU insn options. */
839 nios2_custom_check_insns (void)
844 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
845 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
847 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
848 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
850 error ("switch %<-mcustom-%s%> is required for double "
851 "precision floating point", N2FPU_NAME (j
));
857 /* Warn if the user has certain exotic operations that won't get used
858 without -funsafe-math-optimizations. See expand_builtin () in
860 if (!flag_unsafe_math_optimizations
)
861 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
862 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
863 warning (0, "switch %<-mcustom-%s%> has no effect unless "
864 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
866 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
867 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
869 if (!flag_finite_math_only
)
870 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
871 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
872 warning (0, "switch %<-mcustom-%s%> has no effect unless "
873 "-ffinite-math-only is specified", N2FPU_NAME (i
));
875 /* Warn if the user is trying to use a custom rounding instruction
876 that won't get used without -fno-math-errno. See
877 expand_builtin_int_roundingfn_2 () in builtins.c. */
879 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
880 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
881 warning (0, "switch %<-mcustom-%s%> has no effect unless "
882 "-fno-math-errno is specified", N2FPU_NAME (i
));
884 if (errors
|| custom_code_conflict
)
885 fatal_error (input_location
,
886 "conflicting use of -mcustom switches, target attributes, "
887 "and/or __builtin_custom_ functions");
891 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
893 if (override_p
|| N2FPU_N (code
) == -1)
895 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
898 /* Type to represent a standard FPU config. */
899 struct nios2_fpu_config
902 bool set_sp_constants
;
903 int code
[n2fpu_code_num
];
906 #define NIOS2_FPU_CONFIG_NUM 3
907 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
910 nios2_init_fpu_configs (void)
912 struct nios2_fpu_config
* cfg
;
914 #define NEXT_FPU_CONFIG \
916 cfg = &custom_fpu_config[i++]; \
917 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
922 cfg
->set_sp_constants
= true;
923 cfg
->code
[n2fpu_fmuls
] = 252;
924 cfg
->code
[n2fpu_fadds
] = 253;
925 cfg
->code
[n2fpu_fsubs
] = 254;
929 cfg
->set_sp_constants
= true;
930 cfg
->code
[n2fpu_fmuls
] = 252;
931 cfg
->code
[n2fpu_fadds
] = 253;
932 cfg
->code
[n2fpu_fsubs
] = 254;
933 cfg
->code
[n2fpu_fdivs
] = 255;
937 cfg
->set_sp_constants
= true;
938 cfg
->code
[n2fpu_floatus
] = 243;
939 cfg
->code
[n2fpu_fixsi
] = 244;
940 cfg
->code
[n2fpu_floatis
] = 245;
941 cfg
->code
[n2fpu_fcmpgts
] = 246;
942 cfg
->code
[n2fpu_fcmples
] = 249;
943 cfg
->code
[n2fpu_fcmpeqs
] = 250;
944 cfg
->code
[n2fpu_fcmpnes
] = 251;
945 cfg
->code
[n2fpu_fmuls
] = 252;
946 cfg
->code
[n2fpu_fadds
] = 253;
947 cfg
->code
[n2fpu_fsubs
] = 254;
948 cfg
->code
[n2fpu_fdivs
] = 255;
950 #undef NEXT_FPU_CONFIG
951 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
954 static struct nios2_fpu_config
*
955 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
958 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
960 bool match
= !(endp
!= NULL
961 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
963 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
965 return &custom_fpu_config
[i
];
970 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
971 OVERRIDE is true if loaded config codes should overwrite current state. */
973 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
976 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
980 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
981 if (cfg
->code
[i
] >= 0)
982 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
984 if (cfg
->set_sp_constants
)
985 flag_single_precision_constant
= 1;
988 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
989 "value %<%s%>", cfgname
);
991 /* Guard against errors in the standard configurations. */
992 nios2_custom_check_insns ();
995 /* Check individual FPU insn options, and register custom code. */
997 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
999 int param
= N2FPU_N (fpu_insn_index
);
1001 if (0 <= param
&& param
<= 255)
1002 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
1004 /* Valid values are 0-255, but also allow -1 so that the
1005 -mno-custom-<opt> switches work. */
1006 else if (param
!= -1)
1007 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1008 N2FPU_NAME (fpu_insn_index
), param
);
1011 /* Allocate a chunk of memory for per-function machine-dependent data. */
1012 static struct machine_function
*
1013 nios2_init_machine_status (void)
1015 return ggc_cleared_alloc
<machine_function
> ();
1018 /* Implement TARGET_OPTION_OVERRIDE. */
1020 nios2_option_override (void)
1024 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1025 SUBTARGET_OVERRIDE_OPTIONS
;
1028 /* Check for unsupported options. */
1029 if (flag_pic
&& !TARGET_LINUX_ABI
)
1030 sorry ("position-independent code requires the Linux ABI");
1032 /* Function to allocate machine-dependent function status. */
1033 init_machine_status
= &nios2_init_machine_status
;
1035 nios2_section_threshold
1036 = (global_options_set
.x_g_switch_value
1037 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1039 if (nios2_gpopt_option
== gpopt_unspecified
)
1041 /* Default to -mgpopt unless -fpic or -fPIC. */
1043 nios2_gpopt_option
= gpopt_none
;
1045 nios2_gpopt_option
= gpopt_local
;
1048 /* If we don't have mul, we don't have mulx either! */
1049 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1050 target_flags
&= ~MASK_HAS_MULX
;
1052 /* Initialize default FPU configurations. */
1053 nios2_init_fpu_configs ();
1055 /* Set up default handling for floating point custom instructions.
1057 Putting things in this order means that the -mcustom-fpu-cfg=
1058 switch will always be overridden by individual -mcustom-fadds=
1059 switches, regardless of the order in which they were specified
1060 on the command line.
1062 This behavior of prioritization of individual -mcustom-<insn>=
1063 options before the -mcustom-fpu-cfg= switch is maintained for
1065 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1066 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1068 /* Handle options for individual FPU insns. */
1069 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1070 nios2_handle_custom_fpu_insn_option (i
);
1072 nios2_custom_check_insns ();
1074 /* Save the initial options in case the user does function specific
1076 target_option_default_node
= target_option_current_node
1077 = build_target_option_node (&global_options
);
1081 /* Return true if CST is a constant within range of movi/movui/movhi. */
1083 nios2_simple_const_p (const_rtx cst
)
1085 HOST_WIDE_INT val
= INTVAL (cst
);
1086 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1089 /* Compute a (partial) cost for rtx X. Return true if the complete
1090 cost has been computed, and false if subexpressions should be
1091 scanned. In either case, *TOTAL contains the cost result. */
1093 nios2_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
1094 int opno ATTRIBUTE_UNUSED
,
1095 int *total
, bool speed ATTRIBUTE_UNUSED
)
1100 if (INTVAL (x
) == 0)
1102 *total
= COSTS_N_INSNS (0);
1105 else if (nios2_simple_const_p (x
))
1107 *total
= COSTS_N_INSNS (2);
1112 *total
= COSTS_N_INSNS (4);
1121 *total
= COSTS_N_INSNS (4);
1127 /* Recognize 'nor' insn pattern. */
1128 if (GET_CODE (XEXP (x
, 0)) == NOT
1129 && GET_CODE (XEXP (x
, 1)) == NOT
)
1131 *total
= COSTS_N_INSNS (1);
1139 *total
= COSTS_N_INSNS (1);
1144 *total
= COSTS_N_INSNS (3);
1149 *total
= COSTS_N_INSNS (1);
1158 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1160 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1162 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1165 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1166 RET is an RTX for the return value location. The entire insn sequence
1168 static GTY(()) rtx nios2_tls_symbol
;
1171 nios2_call_tls_get_addr (rtx ti
)
1173 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1174 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1178 if (!nios2_tls_symbol
)
1179 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1181 emit_move_insn (arg
, ti
);
1182 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1183 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1184 RTL_CONST_CALL_P (insn
) = 1;
1185 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1186 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1191 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1193 nios2_large_offset_p (int unspec
)
1195 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1198 /* FIXME: TLS GOT offset relocations will eventually also get this
1199 treatment, after binutils support for those are also completed. */
1200 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1203 /* 'gotoff' offsets are always hiadj/lo. */
1204 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1210 /* Return true for conforming unspec relocations. Also used in
1211 constraints.md and predicates.md. */
1213 nios2_unspec_reloc_p (rtx op
)
1215 return (GET_CODE (op
) == CONST
1216 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1217 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1220 /* Helper to generate unspec constant. */
1222 nios2_unspec_offset (rtx loc
, int unspec
)
1224 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1228 /* Generate GOT pointer based address with large offset. */
1230 nios2_large_got_address (rtx offset
, rtx tmp
)
1233 tmp
= gen_reg_rtx (Pmode
);
1234 emit_move_insn (tmp
, offset
);
1235 return gen_rtx_PLUS (Pmode
, tmp
, pic_offset_table_rtx
);
1238 /* Generate a GOT pointer based address. */
1240 nios2_got_address (rtx loc
, int unspec
)
1242 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1243 crtl
->uses_pic_offset_table
= 1;
1245 if (nios2_large_offset_p (unspec
))
1246 return force_reg (Pmode
, nios2_large_got_address (offset
, NULL_RTX
));
1248 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1251 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1252 return value will be a valid address and move_operand (either a REG
1255 nios2_legitimize_tls_address (rtx loc
)
1258 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1262 case TLS_MODEL_GLOBAL_DYNAMIC
:
1263 tmp
= gen_reg_rtx (Pmode
);
1264 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1265 return nios2_call_tls_get_addr (tmp
);
1267 case TLS_MODEL_LOCAL_DYNAMIC
:
1268 tmp
= gen_reg_rtx (Pmode
);
1269 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1270 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1271 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1273 case TLS_MODEL_INITIAL_EXEC
:
1274 tmp
= gen_reg_rtx (Pmode
);
1275 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1276 emit_move_insn (tmp
, mem
);
1277 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1278 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1280 case TLS_MODEL_LOCAL_EXEC
:
1281 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1282 return gen_rtx_PLUS (Pmode
, tp
,
1283 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1291 If -O3 is used, we want to output a table lookup for
1292 divides between small numbers (both num and den >= 0
1293 and < 0x10). The overhead of this method in the worst
1294 case is 40 bytes in the text section (10 insns) and
1295 256 bytes in the data section. Additional divides do
1296 not incur additional penalties in the data section.
1298 Code speed is improved for small divides by about 5x
1299 when using this method in the worse case (~9 cycles
1300 vs ~45). And in the worst case divides not within the
1301 table are penalized by about 10% (~5 cycles vs ~45).
1302 However in the typical case the penalty is not as bad
1303 because doing the long divide in only 45 cycles is
1306 ??? would be nice to have some benchmarks other
1307 than Dhrystone to back this up.
1309 This bit of expansion is to create this instruction
1316 add $12, $11, divide_table
1322 # continue here with result in $2
1324 ??? Ideally I would like the libcall block to contain all
1325 of this code, but I don't know how to do that. What it
1326 means is that if the divide can be eliminated, it may not
1327 completely disappear.
1329 ??? The __divsi3_table label should ideally be moved out
1330 of this block and into a global. If it is placed into the
1331 sdata section we can save even more cycles by doing things
1334 nios2_emit_expensive_div (rtx
*operands
, machine_mode mode
)
1336 rtx or_result
, shift_left_result
;
1338 rtx_code_label
*lab1
, *lab3
;
1345 /* It may look a little generic, but only SImode is supported for now. */
1346 gcc_assert (mode
== SImode
);
1347 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1349 lab1
= gen_label_rtx ();
1350 lab3
= gen_label_rtx ();
1352 or_result
= expand_simple_binop (SImode
, IOR
,
1353 operands
[1], operands
[2],
1354 0, 0, OPTAB_LIB_WIDEN
);
1356 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1357 GET_MODE (or_result
), 0, lab3
);
1358 JUMP_LABEL (get_last_insn ()) = lab3
;
1360 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1361 operands
[1], GEN_INT (4),
1362 0, 0, OPTAB_LIB_WIDEN
);
1364 lookup_value
= expand_simple_binop (SImode
, IOR
,
1365 shift_left_result
, operands
[2],
1366 0, 0, OPTAB_LIB_WIDEN
);
1367 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1368 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1369 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1371 tmp
= emit_jump_insn (gen_jump (lab1
));
1372 JUMP_LABEL (tmp
) = lab1
;
1376 LABEL_NUSES (lab3
) = 1;
1379 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1380 LCT_CONST
, SImode
, 2,
1381 operands
[1], SImode
,
1382 operands
[2], SImode
);
1384 insns
= get_insns ();
1386 emit_libcall_block (insns
, operands
[0], final_result
,
1387 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1390 LABEL_NUSES (lab1
) = 1;
1394 /* Branches and compares. */
1396 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1397 comparison, e.g. >= 1 into > 0. */
1399 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1400 enum rtx_code
*alt_code
, rtx
*alt_op
,
1403 HOST_WIDE_INT opval
= INTVAL (op
);
1404 enum rtx_code scode
= signed_condition (code
);
1405 bool dec_p
= (scode
== LT
|| scode
== GE
);
1407 if (code
== EQ
|| code
== NE
)
1415 ? gen_int_mode (opval
- 1, mode
)
1416 : gen_int_mode (opval
+ 1, mode
));
1418 /* The required conversion between [>,>=] and [<,<=] is captured
1419 by a reverse + swap of condition codes. */
1420 *alt_code
= reverse_condition (swap_condition (code
));
1423 /* Test if the incremented/decremented value crosses the over/underflow
1424 boundary. Supposedly, such boundary cases should already be transformed
1425 into always-true/false or EQ conditions, so use an assertion here. */
1426 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1428 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1429 alt_opval
&= GET_MODE_MASK (mode
);
1430 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1434 /* Return true if the constant comparison is supported by nios2. */
1436 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1440 case EQ
: case NE
: case GE
: case LT
:
1441 return SMALL_INT (INTVAL (op
));
1443 return SMALL_INT_UNSIGNED (INTVAL (op
));
1449 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1450 the current configuration. Perform modifications if MODIFY_P is true.
1451 Returns true if FPU compare can be done. */
1454 nios2_validate_fpu_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1458 enum rtx_code code
= GET_CODE (*cmp
);
1460 if (!nios2_fpu_compare_enabled (code
, mode
))
1462 code
= swap_condition (code
);
1463 if (nios2_fpu_compare_enabled (code
, mode
))
1477 *op1
= force_reg (mode
, *op1
);
1478 *op2
= force_reg (mode
, *op2
);
1479 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1484 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1485 nios2 supported form. Returns true if success. */
1487 nios2_validate_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1489 enum rtx_code code
= GET_CODE (*cmp
);
1490 enum rtx_code alt_code
;
1493 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1494 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1496 if (!reg_or_0_operand (*op2
, mode
))
1498 /* Create alternate constant compare. */
1499 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1501 /* If alterate op2 is zero(0), we can use it directly, possibly
1502 swapping the compare code. */
1503 if (alt_op2
== const0_rtx
)
1507 goto check_rebuild_cmp
;
1510 /* Check if either constant compare can be used. */
1511 if (nios2_valid_compare_const_p (code
, *op2
))
1513 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1520 /* We have to force op2 into a register now. Try to pick one
1521 with a lower cost. */
1522 if (! nios2_simple_const_p (*op2
)
1523 && nios2_simple_const_p (alt_op2
))
1528 *op2
= force_reg (SImode
, *op2
);
1531 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1533 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1534 code
= swap_condition (code
);
1537 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1542 /* Addressing Modes. */
1544 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1546 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1549 split_const (x
, &base
, &offset
);
1550 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1553 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1555 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1557 return nios2_legitimate_constant_p (mode
, x
) == false;
1560 /* Return true if register REGNO is a valid base register.
1561 STRICT_P is true if REG_OK_STRICT is in effect. */
1564 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1566 if (!HARD_REGISTER_NUM_P (regno
))
1574 regno
= reg_renumber
[regno
];
1577 /* The fake registers will be eliminated to either the stack or
1578 hard frame pointer, both of which are usually valid base registers.
1579 Reload deals with the cases where the eliminated form isn't valid. */
1580 return (GP_REG_P (regno
)
1581 || regno
== FRAME_POINTER_REGNUM
1582 || regno
== ARG_POINTER_REGNUM
);
1585 /* Return true if the address expression formed by BASE + OFFSET is
1588 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1590 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1591 base
= SUBREG_REG (base
);
1592 return (REG_P (base
)
1593 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1594 && (offset
== NULL_RTX
1595 || const_arith_operand (offset
, Pmode
)
1596 || nios2_unspec_reloc_p (offset
)));
1599 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1601 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
1602 rtx operand
, bool strict_p
)
1604 switch (GET_CODE (operand
))
1608 if (SYMBOL_REF_TLS_MODEL (operand
))
1611 /* Else, fall through. */
1613 if (gprel_constant_p (operand
))
1616 /* Else, fall through. */
1622 /* Register indirect. */
1624 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
1626 /* Register indirect with displacement. */
1629 rtx op0
= XEXP (operand
, 0);
1630 rtx op1
= XEXP (operand
, 1);
1632 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
1633 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
1642 /* Return true if SECTION is a small section name. */
1644 nios2_small_section_name_p (const char *section
)
1646 return (strcmp (section
, ".sbss") == 0
1647 || strncmp (section
, ".sbss.", 6) == 0
1648 || strcmp (section
, ".sdata") == 0
1649 || strncmp (section
, ".sdata.", 7) == 0);
1652 /* Return true if EXP should be placed in the small data section. */
1654 nios2_in_small_data_p (const_tree exp
)
1656 /* We want to merge strings, so we never consider them small data. */
1657 if (TREE_CODE (exp
) == STRING_CST
)
1660 if (TREE_CODE (exp
) == VAR_DECL
)
1662 if (DECL_SECTION_NAME (exp
))
1664 const char *section
= DECL_SECTION_NAME (exp
);
1665 if (nios2_small_section_name_p (section
))
1670 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
1672 /* If this is an incomplete type with size 0, then we can't put it
1673 in sdata because it might be too big when completed. */
1675 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
1683 /* Return true if symbol is in small data section. */
1686 nios2_symbol_ref_in_small_data_p (rtx sym
)
1690 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
1691 decl
= SYMBOL_REF_DECL (sym
);
1693 /* TLS variables are not accessed through the GP. */
1694 if (SYMBOL_REF_TLS_MODEL (sym
) != 0)
1697 /* If the user has explicitly placed the symbol in a small data section
1698 via an attribute, generate gp-relative addressing even if the symbol
1699 is external, weak, or larger than we'd automatically put in the
1700 small data section. OTOH, if the symbol is located in some
1701 non-small-data section, we can't use gp-relative accesses on it
1702 unless the user has requested gpopt_data or gpopt_all. */
1704 switch (nios2_gpopt_option
)
1707 /* Don't generate a gp-relative addressing mode if that's been
1712 /* Use GP-relative addressing for small data symbols that are
1713 not external or weak, plus any symbols that have explicitly
1714 been placed in a small data section. */
1715 if (decl
&& DECL_SECTION_NAME (decl
))
1716 return nios2_small_section_name_p (DECL_SECTION_NAME (decl
));
1717 return (SYMBOL_REF_SMALL_P (sym
)
1718 && !SYMBOL_REF_EXTERNAL_P (sym
)
1719 && !(decl
&& DECL_WEAK (decl
)));
1722 /* Use GP-relative addressing for small data symbols, even if
1723 they are external or weak. Note that SYMBOL_REF_SMALL_P
1724 is also true of symbols that have explicitly been placed
1725 in a small data section. */
1726 return SYMBOL_REF_SMALL_P (sym
);
1729 /* Use GP-relative addressing for all data symbols regardless
1730 of the object size, but not for code symbols. This option
1731 is equivalent to the user asserting that the entire data
1732 section is accessible from the GP. */
1733 return !SYMBOL_REF_FUNCTION_P (sym
);
1736 /* Use GP-relative addressing for everything, including code.
1737 Effectively, the user has asserted that the entire program
1738 fits within the 64K range of the GP offset. */
1742 /* We shouldn't get here. */
1747 /* Implement TARGET_SECTION_TYPE_FLAGS. */
1750 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
1754 flags
= default_section_type_flags (decl
, name
, reloc
);
1756 if (nios2_small_section_name_p (name
))
1757 flags
|= SECTION_SMALL
;
1762 /* Return true if SYMBOL_REF X binds locally. */
1765 nios2_symbol_binds_local_p (const_rtx x
)
1767 return (SYMBOL_REF_DECL (x
)
1768 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
1769 : SYMBOL_REF_LOCAL_P (x
));
1772 /* Position independent code related. */
1774 /* Emit code to load the PIC register. */
1776 nios2_load_pic_register (void)
1778 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
1780 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
1781 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
1784 /* Generate a PIC address as a MEM rtx. */
1786 nios2_load_pic_address (rtx sym
, int unspec
, rtx tmp
)
1789 && GET_CODE (sym
) == SYMBOL_REF
1790 && nios2_symbol_binds_local_p (sym
))
1791 /* Under -fPIC, generate a GOTOFF address for local symbols. */
1793 rtx offset
= nios2_unspec_offset (sym
, UNSPEC_PIC_GOTOFF_SYM
);
1794 crtl
->uses_pic_offset_table
= 1;
1795 return nios2_large_got_address (offset
, tmp
);
1798 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
1801 /* Nonzero if the constant value X is a legitimate general operand
1802 when generating PIC code. It is given that flag_pic is on and
1803 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1805 nios2_legitimate_pic_operand_p (rtx x
)
1807 if (GET_CODE (x
) == CONST
1808 && GET_CODE (XEXP (x
, 0)) == UNSPEC
1809 && nios2_large_offset_p (XINT (XEXP (x
, 0), 1)))
1812 return ! (GET_CODE (x
) == SYMBOL_REF
1813 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
1816 /* Return TRUE if X is a thread-local symbol. */
1818 nios2_tls_symbol_p (rtx x
)
1820 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
1821 && SYMBOL_REF_TLS_MODEL (x
) != 0);
1824 /* Legitimize addresses that are CONSTANT_P expressions. */
1826 nios2_legitimize_constant_address (rtx addr
)
1829 split_const (addr
, &base
, &offset
);
1831 if (nios2_tls_symbol_p (base
))
1832 base
= nios2_legitimize_tls_address (base
);
1834 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
, NULL_RTX
);
1838 if (offset
!= const0_rtx
)
1840 gcc_assert (can_create_pseudo_p ());
1841 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
1842 (CONST_INT_P (offset
)
1843 ? (SMALL_INT (INTVAL (offset
))
1844 ? offset
: force_reg (Pmode
, offset
))
1850 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1852 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1853 machine_mode mode ATTRIBUTE_UNUSED
)
1856 return nios2_legitimize_constant_address (x
);
1858 /* For the TLS LE (Local Exec) model, the compiler may try to
1859 combine constant offsets with unspec relocs, creating address RTXs
1861 (plus:SI (reg:SI 23 r23)
1864 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
1865 (const_int 48 [0x30]))))
1867 This usually happens when 'var' is a thread-local struct variable,
1868 and access of a field in var causes the addend.
1870 We typically want this combining, so transform the above into this
1871 form, which is allowed:
1872 (plus:SI (reg:SI 23 r23)
1876 (plus:SI (symbol_ref:SI ("var"))
1877 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
1879 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
1880 if (GET_CODE (x
) == PLUS
1881 && GET_CODE (XEXP (x
, 0)) == REG
1882 && GET_CODE (XEXP (x
, 1)) == CONST
)
1884 rtx unspec
, offset
, reg
= XEXP (x
, 0);
1885 split_const (XEXP (x
, 1), &unspec
, &offset
);
1886 if (GET_CODE (unspec
) == UNSPEC
1887 && !nios2_large_offset_p (XINT (unspec
, 1))
1888 && offset
!= const0_rtx
)
1890 unspec
= copy_rtx (unspec
);
1891 XVECEXP (unspec
, 0, 0)
1892 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
1893 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
1901 nios2_delegitimize_address (rtx x
)
1903 x
= delegitimize_mem_from_attrs (x
);
1905 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1907 switch (XINT (XEXP (x
, 0), 1))
1909 case UNSPEC_PIC_SYM
:
1910 case UNSPEC_PIC_CALL_SYM
:
1911 case UNSPEC_PIC_GOTOFF_SYM
:
1912 case UNSPEC_ADD_TLS_GD
:
1913 case UNSPEC_ADD_TLS_LDM
:
1914 case UNSPEC_LOAD_TLS_IE
:
1915 case UNSPEC_ADD_TLS_LE
:
1916 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1917 gcc_assert (CONSTANT_P (x
));
1924 /* Main expander function for RTL moves. */
1926 nios2_emit_move_sequence (rtx
*operands
, machine_mode mode
)
1928 rtx to
= operands
[0];
1929 rtx from
= operands
[1];
1931 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
1933 gcc_assert (can_create_pseudo_p ());
1934 from
= copy_to_mode_reg (mode
, from
);
1937 if (GET_CODE (from
) == SYMBOL_REF
|| GET_CODE (from
) == LABEL_REF
1938 || (GET_CODE (from
) == CONST
1939 && GET_CODE (XEXP (from
, 0)) != UNSPEC
))
1940 from
= nios2_legitimize_constant_address (from
);
1947 /* The function with address *ADDR is being called. If the address
1948 needs to be loaded from the GOT, emit the instruction to do so and
1949 update *ADDR to point to the rtx for the loaded value.
1950 If REG != NULL_RTX, it is used as the target/scratch register in the
1951 GOT address calculation. */
1953 nios2_adjust_call_address (rtx
*call_op
, rtx reg
)
1955 if (MEM_P (*call_op
))
1956 call_op
= &XEXP (*call_op
, 0);
1958 rtx addr
= *call_op
;
1959 if (flag_pic
&& CONSTANT_P (addr
))
1961 rtx tmp
= reg
? reg
: NULL_RTX
;
1963 reg
= gen_reg_rtx (Pmode
);
1964 addr
= nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
, tmp
);
1965 emit_insn (gen_rtx_SET (reg
, addr
));
1971 /* Output assembly language related definitions. */
1973 /* Print the operand OP to file stream FILE modified by LETTER.
1974 LETTER can be one of:
1976 i: print "i" if OP is an immediate, except 0
1977 o: print "io" if OP is volatile
1978 z: for const0_rtx print $0 instead of 0
1981 U: for upper half of 32 bit value
1982 D: for the upper 32-bits of a 64-bit double value
1983 R: prints reverse condition.
1986 nios2_print_operand (FILE *file
, rtx op
, int letter
)
1992 if (CONSTANT_P (op
) && op
!= const0_rtx
)
1993 fprintf (file
, "i");
1997 if (GET_CODE (op
) == MEM
1998 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
1999 || TARGET_BYPASS_CACHE
))
2000 fprintf (file
, "io");
2007 if (comparison_operator (op
, VOIDmode
))
2009 enum rtx_code cond
= GET_CODE (op
);
2012 fprintf (file
, "%s", GET_RTX_NAME (cond
));
2017 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
2022 switch (GET_CODE (op
))
2025 if (letter
== 0 || letter
== 'z')
2027 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
2030 else if (letter
== 'D')
2032 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
2038 if (INTVAL (op
) == 0 && letter
== 'z')
2040 fprintf (file
, "zero");
2046 HOST_WIDE_INT val
= INTVAL (op
);
2047 val
= (val
>> 16) & 0xFFFF;
2048 output_addr_const (file
, gen_int_mode (val
, SImode
));
2051 /* Else, fall through. */
2057 if (letter
== 0 || letter
== 'z')
2059 output_addr_const (file
, op
);
2062 else if (letter
== 'H' || letter
== 'L')
2064 fprintf (file
, "%%");
2065 if (GET_CODE (op
) == CONST
2066 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
2068 rtx unspec
= XEXP (op
, 0);
2069 int unspec_reloc
= XINT (unspec
, 1);
2070 gcc_assert (nios2_large_offset_p (unspec_reloc
));
2071 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
2072 op
= XVECEXP (unspec
, 0, 0);
2074 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
2075 output_addr_const (file
, op
);
2076 fprintf (file
, ")");
2085 output_address (op
);
2093 output_addr_const (file
, op
);
2102 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
2106 /* Return true if this is a GP-relative accessible reference. */
2108 gprel_constant_p (rtx op
)
2110 if (GET_CODE (op
) == SYMBOL_REF
2111 && nios2_symbol_ref_in_small_data_p (op
))
2113 else if (GET_CODE (op
) == CONST
2114 && GET_CODE (XEXP (op
, 0)) == PLUS
)
2115 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
2120 /* Return the name string for a supported unspec reloc offset. */
2122 nios2_unspec_reloc_name (int unspec
)
2126 case UNSPEC_PIC_SYM
:
2128 case UNSPEC_PIC_CALL_SYM
:
2130 case UNSPEC_PIC_GOTOFF_SYM
:
2132 case UNSPEC_LOAD_TLS_IE
:
2134 case UNSPEC_ADD_TLS_LE
:
2136 case UNSPEC_ADD_TLS_GD
:
2138 case UNSPEC_ADD_TLS_LDM
:
2140 case UNSPEC_ADD_TLS_LDO
:
2147 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2149 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2152 gcc_assert (GET_CODE (op
) == UNSPEC
);
2154 /* Support for printing out const unspec relocations. */
2155 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2158 fprintf (file
, "%%%s(", name
);
2159 output_addr_const (file
, XVECEXP (op
, 0, 0));
2160 fprintf (file
, ")");
2166 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2168 nios2_print_operand_address (FILE *file
, rtx op
)
2170 switch (GET_CODE (op
))
2177 if (gprel_constant_p (op
))
2179 fprintf (file
, "%%gprel(");
2180 output_addr_const (file
, op
);
2181 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2189 rtx op0
= XEXP (op
, 0);
2190 rtx op1
= XEXP (op
, 1);
2192 if (REG_P (op0
) && CONSTANT_P (op1
))
2194 output_addr_const (file
, op1
);
2195 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2198 else if (REG_P (op1
) && CONSTANT_P (op0
))
2200 output_addr_const (file
, op0
);
2201 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2208 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2213 rtx base
= XEXP (op
, 0);
2214 nios2_print_operand_address (file
, base
);
2221 fprintf (stderr
, "Missing way to print address\n");
2226 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2228 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2230 gcc_assert (size
== 4);
2231 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2232 output_addr_const (file
, x
);
2233 fprintf (file
, ")");
2236 /* Implemet TARGET_ASM_FILE_END. */
2239 nios2_asm_file_end (void)
2241 /* The Nios II Linux stack is mapped non-executable by default, so add a
2242 .note.GNU-stack section for switching to executable stacks only when
2243 trampolines are generated. */
2244 if (TARGET_LINUX_ABI
&& trampolines_created
)
2245 file_end_indicate_exec_stack ();
2248 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2250 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2252 if (flag_verbose_asm
|| flag_debug_asm
)
2254 nios2_compute_frame_layout ();
2255 nios2_dump_frame_layout (file
);
2259 /* Emit assembly of custom FPU instructions. */
2261 nios2_fpu_insn_asm (enum n2fpu_code code
)
2263 static char buf
[256];
2264 const char *op1
, *op2
, *op3
;
2265 int ln
= 256, n
= 0;
2267 int N
= N2FPU_N (code
);
2268 int num_operands
= N2FPU (code
).num_operands
;
2269 const char *insn_name
= N2FPU_NAME (code
);
2270 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2271 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2272 machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2274 /* Prepare X register for DF input operands. */
2275 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2276 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2277 N2FPU_N (n2fpu_fwrx
));
2279 if (src_mode
== SFmode
)
2281 if (dst_mode
== VOIDmode
)
2283 /* The fwry case. */
2290 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2292 op3
= (num_operands
== 2 ? "zero" : "%2");
2295 else if (src_mode
== DFmode
)
2297 if (dst_mode
== VOIDmode
)
2299 /* The fwrx case. */
2307 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2308 op2
= (num_operands
== 2 ? "%1" : "%2");
2309 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2312 else if (src_mode
== VOIDmode
)
2314 /* frdxlo, frdxhi, frdy cases. */
2315 gcc_assert (dst_mode
== SFmode
);
2319 else if (src_mode
== SImode
)
2321 /* Conversion operators. */
2322 gcc_assert (num_operands
== 2);
2323 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2330 /* Main instruction string. */
2331 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2332 N
, op1
, op2
, op3
, insn_name
,
2333 (num_operands
>= 2 ? ", %1" : ""),
2334 (num_operands
== 3 ? ", %2" : ""));
2336 /* Extraction of Y register for DF results. */
2337 if (dst_mode
== DFmode
)
2338 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2339 N2FPU_N (n2fpu_frdy
));
2345 /* Function argument related. */
2347 /* Define where to put the arguments to a function. Value is zero to
2348 push the argument on the stack, or a hard register in which to
2351 MODE is the argument's machine mode.
2352 TYPE is the data type of the argument (as a tree).
2353 This is null for libcalls where that information may
2355 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2356 the preceding args and about the function being called.
2357 NAMED is nonzero if this argument is a named parameter
2358 (otherwise it is an extra parameter matching an ellipsis). */
2361 nios2_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2362 const_tree type ATTRIBUTE_UNUSED
,
2363 bool named ATTRIBUTE_UNUSED
)
2365 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2366 rtx return_rtx
= NULL_RTX
;
2368 if (cum
->regs_used
< NUM_ARG_REGS
)
2369 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2374 /* Return number of bytes, at the beginning of the argument, that must be
2375 put in registers. 0 is the argument is entirely in registers or entirely
2379 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2380 machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2381 bool named ATTRIBUTE_UNUSED
)
2383 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2384 HOST_WIDE_INT param_size
;
2386 if (mode
== BLKmode
)
2388 param_size
= int_size_in_bytes (type
);
2389 gcc_assert (param_size
>= 0);
2392 param_size
= GET_MODE_SIZE (mode
);
2394 /* Convert to words (round up). */
2395 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2397 if (cum
->regs_used
< NUM_ARG_REGS
2398 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2399 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2404 /* Update the data in CUM to advance over an argument of mode MODE
2405 and data type TYPE; TYPE is null for libcalls where that information
2406 may not be available. */
2409 nios2_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2410 const_tree type ATTRIBUTE_UNUSED
,
2411 bool named ATTRIBUTE_UNUSED
)
2413 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2414 HOST_WIDE_INT param_size
;
2416 if (mode
== BLKmode
)
2418 param_size
= int_size_in_bytes (type
);
2419 gcc_assert (param_size
>= 0);
2422 param_size
= GET_MODE_SIZE (mode
);
2424 /* Convert to words (round up). */
2425 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2427 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2428 cum
->regs_used
= NUM_ARG_REGS
;
2430 cum
->regs_used
+= param_size
;
2434 nios2_function_arg_padding (machine_mode mode
, const_tree type
)
2436 /* On little-endian targets, the first byte of every stack argument
2437 is passed in the first byte of the stack slot. */
2438 if (!BYTES_BIG_ENDIAN
)
2441 /* Otherwise, integral types are padded downward: the last byte of a
2442 stack argument is passed in the last byte of the stack slot. */
2444 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2445 : GET_MODE_CLASS (mode
) == MODE_INT
)
2448 /* Arguments smaller than a stack slot are padded downward. */
2449 if (mode
!= BLKmode
)
2450 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
2452 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2453 ? upward
: downward
);
2457 nios2_block_reg_padding (machine_mode mode
, tree type
,
2458 int first ATTRIBUTE_UNUSED
)
2460 return nios2_function_arg_padding (mode
, type
);
2463 /* Emit RTL insns to initialize the variable parts of a trampoline.
2464 FNADDR is an RTX for the address of the function's pure code.
2465 CXT is an RTX for the static chain value for the function.
2466 On Nios II, we handle this by a library call. */
2468 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2470 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2471 rtx ctx_reg
= force_reg (Pmode
, cxt
);
2472 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
2474 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
2475 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
2479 /* Implement TARGET_FUNCTION_VALUE. */
2481 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
2482 bool outgoing ATTRIBUTE_UNUSED
)
2484 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
2487 /* Implement TARGET_LIBCALL_VALUE. */
2489 nios2_libcall_value (machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
2491 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
2494 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
2496 nios2_function_value_regno_p (const unsigned int regno
)
2498 return regno
== FIRST_RETVAL_REGNO
;
2501 /* Implement TARGET_RETURN_IN_MEMORY. */
2503 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2505 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
2506 || int_size_in_bytes (type
) == -1);
2509 /* TODO: It may be possible to eliminate the copyback and implement
2512 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
2513 machine_mode mode
, tree type
,
2514 int *pretend_size
, int second_time
)
2516 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2517 CUMULATIVE_ARGS local_cum
;
2518 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
2523 nios2_function_arg_advance (local_cum_v
, mode
, type
, 1);
2525 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
2527 if (!second_time
&& regs_to_push
> 0)
2529 rtx ptr
= virtual_incoming_args_rtx
;
2530 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
2531 emit_insn (gen_blockage ());
2532 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
2534 emit_insn (gen_blockage ());
2537 pret_size
= regs_to_push
* UNITS_PER_WORD
;
2539 *pretend_size
= pret_size
;
2544 /* Init FPU builtins. */
2546 nios2_init_fpu_builtins (int start_code
)
2549 char builtin_name
[64] = "__builtin_custom_";
2550 unsigned int i
, n
= strlen ("__builtin_custom_");
2552 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2554 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
2555 "%s", N2FPU_NAME (i
));
2557 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
2558 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2559 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
2563 /* Helper function for expanding FPU builtins. */
2565 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
2567 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2568 enum insn_code icode
= N2FPU_ICODE (code
);
2569 int nargs
, argno
, opno
= 0;
2570 int num_operands
= N2FPU (code
).num_operands
;
2571 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
2572 bool has_target_p
= (dst_mode
!= VOIDmode
);
2574 if (N2FPU_N (code
) < 0)
2575 fatal_error (input_location
,
2576 "Cannot call %<__builtin_custom_%s%> without specifying switch"
2577 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
2579 create_output_operand (&ops
[opno
++], target
, dst_mode
);
2581 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
2583 nargs
= call_expr_nargs (exp
);
2584 for (argno
= 0; argno
< nargs
; argno
++)
2586 tree arg
= CALL_EXPR_ARG (exp
, argno
);
2587 create_input_operand (&ops
[opno
++], expand_normal (arg
),
2588 TYPE_MODE (TREE_TYPE (arg
)));
2590 if (!maybe_expand_insn (icode
, num_operands
, ops
))
2592 error ("invalid argument to built-in function");
2593 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2595 return has_target_p
? ops
[0].value
: const0_rtx
;
2598 /* Nios II has custom instruction built-in functions of the forms:
2601 __builtin_custom_nXX
2603 __builtin_custom_XnX
2604 __builtin_custom_XnXX
2606 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
2607 Therefore with 0-1 return values, and 0-2 arguments, we have a
2608 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
2610 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
2611 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
2614 nios2_init_custom_builtins (int start_code
)
2616 tree builtin_ftype
, ret_type
, fndecl
;
2617 char builtin_name
[32] = "__builtin_custom_";
2618 int n
= strlen ("__builtin_custom_");
2619 int builtin_code
= 0;
2620 int lhs
, rhs1
, rhs2
;
2622 struct { tree type
; const char *c
; } op
[4];
2623 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
2624 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
2625 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
2626 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
2628 /* We enumerate through the possible operand types to create all the
2629 __builtin_custom_XnXX function tree types. Note that these may slightly
2630 overlap with the function types created for other fixed builtins. */
2632 for (lhs
= 0; lhs
< 4; lhs
++)
2633 for (rhs1
= 0; rhs1
< 4; rhs1
++)
2634 for (rhs2
= 0; rhs2
< 4; rhs2
++)
2636 if (rhs1
== 0 && rhs2
!= 0)
2638 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
2640 = build_function_type_list (ret_type
, integer_type_node
,
2641 op
[rhs1
].type
, op
[rhs2
].type
,
2643 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
2644 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
2645 /* Save copy of parameter string into custom_builtin_name[]. */
2646 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
2648 add_builtin_function (builtin_name
, builtin_ftype
,
2649 start_code
+ builtin_code
,
2650 BUILT_IN_MD
, NULL
, NULL_TREE
);
2651 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
2656 /* Helper function for expanding custom builtins. */
2658 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
2660 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
2661 machine_mode tmode
= VOIDmode
;
2663 rtx value
, insn
, unspec_args
[3];
2669 tmode
= TYPE_MODE (TREE_TYPE (exp
));
2670 if (!target
|| GET_MODE (target
) != tmode
2672 target
= gen_reg_rtx (tmode
);
2675 nargs
= call_expr_nargs (exp
);
2676 for (argno
= 0; argno
< nargs
; argno
++)
2678 arg
= CALL_EXPR_ARG (exp
, argno
);
2679 value
= expand_normal (arg
);
2680 unspec_args
[argno
] = value
;
2683 if (!custom_insn_opcode (value
, VOIDmode
))
2684 error ("custom instruction opcode must be compile time "
2685 "constant in the range 0-255 for __builtin_custom_%s",
2686 custom_builtin_name
[index
]);
2689 /* For other arguments, force into a register. */
2690 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
2691 unspec_args
[argno
]);
2693 /* Fill remaining unspec operands with zero. */
2694 for (; argno
< 3; argno
++)
2695 unspec_args
[argno
] = const0_rtx
;
2697 insn
= (has_target_p
2698 ? gen_rtx_SET (target
,
2699 gen_rtx_UNSPEC_VOLATILE (tmode
,
2700 gen_rtvec_v (3, unspec_args
),
2701 UNSPECV_CUSTOM_XNXX
))
2702 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
2703 UNSPECV_CUSTOM_NXX
));
2705 return has_target_p
? target
: const0_rtx
;
2711 /* Main definition of built-in functions. Nios II has a small number of fixed
2712 builtins, plus a large number of FPU insn builtins, and builtins for
2713 generating custom instructions. */
2715 struct nios2_builtin_desc
2717 enum insn_code icode
;
2718 enum nios2_ftcode ftype
;
2722 #define N2_BUILTINS \
2723 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
2724 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
2725 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
2726 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
2727 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
2728 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
2729 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
2730 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
2731 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
2732 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
2733 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
2735 enum nios2_builtin_code
{
2736 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
2738 #undef N2_BUILTIN_DEF
2739 NUM_FIXED_NIOS2_BUILTINS
2742 static const struct nios2_builtin_desc nios2_builtins
[] = {
2743 #define N2_BUILTIN_DEF(name, ftype) \
2744 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
2746 #undef N2_BUILTIN_DEF
2749 /* Start/ends of FPU/custom insn builtin index ranges. */
2750 static unsigned int nios2_fpu_builtin_base
;
2751 static unsigned int nios2_custom_builtin_base
;
2752 static unsigned int nios2_custom_builtin_end
;
2754 /* Implement TARGET_INIT_BUILTINS. */
2756 nios2_init_builtins (void)
2760 /* Initialize fixed builtins. */
2761 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
2763 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
2765 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
2766 BUILT_IN_MD
, NULL
, NULL
);
2767 nios2_register_builtin_fndecl (i
, fndecl
);
2770 /* Initialize FPU builtins. */
2771 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
2772 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
2774 /* Initialize custom insn builtins. */
2775 nios2_custom_builtin_base
2776 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
2777 nios2_custom_builtin_end
2778 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
2779 nios2_init_custom_builtins (nios2_custom_builtin_base
);
2782 /* Array of fndecls for TARGET_BUILTIN_DECL. */
2783 #define NIOS2_NUM_BUILTINS \
2784 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
2785 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
2788 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
2790 nios2_builtin_decls
[code
] = fndecl
;
2793 /* Implement TARGET_BUILTIN_DECL. */
2795 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
2797 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
2799 if (code
>= nios2_custom_builtin_end
)
2800 return error_mark_node
;
2802 if (code
>= nios2_fpu_builtin_base
2803 && code
< nios2_custom_builtin_base
2804 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
2805 return error_mark_node
;
2807 return nios2_builtin_decls
[code
];
2811 /* Low-level built-in expand routine. */
2813 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
2814 struct expand_operand
*ops
, bool has_target_p
)
2816 if (maybe_expand_insn (d
->icode
, n
, ops
))
2817 return has_target_p
? ops
[0].value
: const0_rtx
;
2820 error ("invalid argument to built-in function %s", d
->name
);
2821 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2825 /* Expand ldio/stio form load-store instruction builtins. */
2827 nios2_expand_ldstio_builtin (tree exp
, rtx target
,
2828 const struct nios2_builtin_desc
*d
)
2832 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2833 machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
2835 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2836 mem
= gen_rtx_MEM (mode
, addr
);
2838 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
2841 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2842 if (CONST_INT_P (val
))
2843 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
2844 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
2845 create_output_operand (&ops
[0], mem
, mode
);
2846 create_input_operand (&ops
[1], val
, mode
);
2847 has_target_p
= false;
2852 create_output_operand (&ops
[0], target
, mode
);
2853 create_input_operand (&ops
[1], mem
, mode
);
2854 has_target_p
= true;
2856 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2859 /* Expand rdctl/wrctl builtins. */
2861 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
2862 const struct nios2_builtin_desc
*d
)
2864 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
2865 == register_operand
);
2866 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2867 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2868 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
2870 error ("Control register number must be in range 0-31 for %s",
2872 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
2876 create_output_operand (&ops
[0], target
, SImode
);
2877 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
2881 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2882 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
2883 create_input_operand (&ops
[1], val
, SImode
);
2885 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2888 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
2889 a built-in function, with result going to TARGET if that's convenient
2890 (and in mode MODE if that's convenient).
2891 SUBTARGET may be used as the target for computing one of EXP's operands.
2892 IGNORE is nonzero if the value is to be ignored. */
2895 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
2896 machine_mode mode ATTRIBUTE_UNUSED
,
2897 int ignore ATTRIBUTE_UNUSED
)
2899 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2900 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2902 if (fcode
< nios2_fpu_builtin_base
)
2904 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
2908 case NIOS2_BUILTIN_sync
:
2909 emit_insn (gen_sync ());
2912 case NIOS2_BUILTIN_ldbio
:
2913 case NIOS2_BUILTIN_ldbuio
:
2914 case NIOS2_BUILTIN_ldhio
:
2915 case NIOS2_BUILTIN_ldhuio
:
2916 case NIOS2_BUILTIN_ldwio
:
2917 case NIOS2_BUILTIN_stbio
:
2918 case NIOS2_BUILTIN_sthio
:
2919 case NIOS2_BUILTIN_stwio
:
2920 return nios2_expand_ldstio_builtin (exp
, target
, d
);
2922 case NIOS2_BUILTIN_rdctl
:
2923 case NIOS2_BUILTIN_wrctl
:
2924 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
2930 else if (fcode
< nios2_custom_builtin_base
)
2931 /* FPU builtin range. */
2932 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
2934 else if (fcode
< nios2_custom_builtin_end
)
2935 /* Custom insn builtin range. */
2936 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
2942 /* Implement TARGET_INIT_LIBFUNCS. */
2944 nios2_init_libfuncs (void)
2946 /* For Linux, we have access to kernel support for atomic operations. */
2947 if (TARGET_LINUX_ABI
)
2948 init_sync_libfuncs (UNITS_PER_WORD
);
2953 /* Register a custom code use, and signal error if a conflict was found. */
2955 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
2958 gcc_assert (N
<= 255);
2960 if (status
== CCS_FPU
)
2962 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
2964 custom_code_conflict
= true;
2965 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
2966 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
2968 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
2970 custom_code_conflict
= true;
2971 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2972 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
2973 N2FPU_NAME (index
));
2976 else if (status
== CCS_BUILTIN_CALL
)
2978 if (custom_code_status
[N
] == CCS_FPU
)
2980 custom_code_conflict
= true;
2981 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2982 "%<-mcustom-%s%>", custom_builtin_name
[index
],
2983 N2FPU_NAME (custom_code_index
[N
]));
2987 /* Note that code conflicts between different __builtin_custom_xnxx
2988 calls are not checked. */
2994 custom_code_status
[N
] = status
;
2995 custom_code_index
[N
] = index
;
2998 /* Mark a custom code as not in use. */
3000 nios2_deregister_custom_code (unsigned int N
)
3004 custom_code_status
[N
] = CCS_UNUSED
;
3005 custom_code_index
[N
] = 0;
3009 /* Target attributes can affect per-function option state, so we need to
3010 save/restore the custom code tracking info using the
3011 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3014 nios2_option_save (struct cl_target_option
*ptr
,
3015 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
3018 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3019 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
3020 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
3021 sizeof (custom_code_status
));
3022 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
3023 sizeof (custom_code_index
));
3027 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
3028 struct cl_target_option
*ptr
)
3031 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3032 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
3033 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
3034 sizeof (custom_code_status
));
3035 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
3036 sizeof (custom_code_index
));
3039 /* Inner function to process the attribute((target(...))), take an argument and
3040 set the current options from the argument. If we have a list, recursively
3041 go over the list. */
3044 nios2_valid_target_attribute_rec (tree args
)
3046 if (TREE_CODE (args
) == TREE_LIST
)
3049 for (; args
; args
= TREE_CHAIN (args
))
3050 if (TREE_VALUE (args
)
3051 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
3055 else if (TREE_CODE (args
) == STRING_CST
)
3057 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
3058 while (argstr
&& *argstr
!= '\0')
3060 bool no_opt
= false, end_p
= false;
3061 char *eq
= NULL
, *p
;
3062 while (ISSPACE (*argstr
))
3065 while (*p
!= '\0' && *p
!= ',')
3067 if (!eq
&& *p
== '=')
3077 if (!strncmp (argstr
, "no-", 3))
3082 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
3087 error ("custom-fpu-cfg option does not support %<no-%>");
3092 error ("custom-fpu-cfg option requires configuration"
3096 /* Increment and skip whitespace. */
3097 while (ISSPACE (*(++eq
))) ;
3098 /* Decrement and skip to before any trailing whitespace. */
3099 while (ISSPACE (*(--end_eq
))) ;
3101 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
3103 else if (!strncmp (argstr
, "custom-", 7))
3107 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3108 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
3109 strlen (N2FPU_NAME (i
))))
3121 error ("%<no-custom-%s%> does not accept arguments",
3125 /* Disable option by setting to -1. */
3126 nios2_deregister_custom_code (N2FPU_N (code
));
3127 N2FPU_N (code
) = -1;
3133 while (ISSPACE (*(++eq
))) ;
3136 error ("%<custom-%s=%> requires argument",
3140 for (t
= eq
; t
!= p
; ++t
)
3146 error ("`custom-%s=' argument requires "
3147 "numeric digits", N2FPU_NAME (code
));
3151 /* Set option to argument. */
3152 N2FPU_N (code
) = atoi (eq
);
3153 nios2_handle_custom_fpu_insn_option (code
);
3158 error ("%<custom-%s=%> is not recognised as FPU instruction",
3165 error ("%<%s%> is unknown", argstr
);
3180 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3183 nios2_valid_target_attribute_tree (tree args
)
3185 if (!nios2_valid_target_attribute_rec (args
))
3187 nios2_custom_check_insns ();
3188 return build_target_option_node (&global_options
);
3191 /* Hook to validate attribute((target("string"))). */
3194 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3195 tree args
, int ARG_UNUSED (flags
))
3197 struct cl_target_option cur_target
;
3199 tree old_optimize
= build_optimization_node (&global_options
);
3200 tree new_target
, new_optimize
;
3201 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3203 /* If the function changed the optimization levels as well as setting target
3204 options, start with the optimizations specified. */
3205 if (func_optimize
&& func_optimize
!= old_optimize
)
3206 cl_optimization_restore (&global_options
,
3207 TREE_OPTIMIZATION (func_optimize
));
3209 /* The target attributes may also change some optimization flags, so update
3210 the optimization options if necessary. */
3211 cl_target_option_save (&cur_target
, &global_options
);
3212 new_target
= nios2_valid_target_attribute_tree (args
);
3213 new_optimize
= build_optimization_node (&global_options
);
3220 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3222 if (old_optimize
!= new_optimize
)
3223 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3226 cl_target_option_restore (&global_options
, &cur_target
);
3228 if (old_optimize
!= new_optimize
)
3229 cl_optimization_restore (&global_options
,
3230 TREE_OPTIMIZATION (old_optimize
));
3234 /* Remember the last target of nios2_set_current_function. */
3235 static GTY(()) tree nios2_previous_fndecl
;
3237 /* Establish appropriate back-end context for processing the function
3238 FNDECL. The argument might be NULL to indicate processing at top
3239 level, outside of any function scope. */
3241 nios2_set_current_function (tree fndecl
)
3243 tree old_tree
= (nios2_previous_fndecl
3244 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3247 tree new_tree
= (fndecl
3248 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3251 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3253 nios2_previous_fndecl
= fndecl
;
3254 if (old_tree
== new_tree
)
3259 cl_target_option_restore (&global_options
,
3260 TREE_TARGET_OPTION (new_tree
));
3266 struct cl_target_option
*def
3267 = TREE_TARGET_OPTION (target_option_current_node
);
3269 cl_target_option_restore (&global_options
, def
);
3275 /* Hook to validate the current #pragma GCC target and set the FPU custom
3276 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3279 nios2_pragma_target_parse (tree args
, tree pop_target
)
3284 cur_tree
= ((pop_target
)
3286 : target_option_default_node
);
3287 cl_target_option_restore (&global_options
,
3288 TREE_TARGET_OPTION (cur_tree
));
3292 cur_tree
= nios2_valid_target_attribute_tree (args
);
3297 target_option_current_node
= cur_tree
;
3301 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3302 We are just using this hook to add some additional error checking to
3303 the default behavior. GCC does not provide a target hook for merging
3304 the target options, and only correctly handles merging empty vs non-empty
3305 option data; see merge_decls() in c-decl.c.
3306 So here we require either that at least one of the decls has empty
3307 target options, or that the target options/data be identical. */
3309 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3311 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3312 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3313 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3315 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3316 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3317 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3319 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3320 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3321 if (olddata
!= newdata
3322 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3323 error ("%qE redeclared with conflicting %qs attributes",
3324 DECL_NAME (newdecl
), "target");
3327 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
3328 DECL_ATTRIBUTES (newdecl
));
3331 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
3333 nios2_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
3334 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
3337 rtx this_rtx
, funexp
;
3340 /* Pretend to be a post-reload pass while generating rtl. */
3341 reload_completed
= 1;
3344 nios2_load_pic_register ();
3346 /* Mark the end of the (empty) prologue. */
3347 emit_note (NOTE_INSN_PROLOGUE_END
);
3349 /* Find the "this" pointer. If the function returns a structure,
3350 the structure return pointer is in $5. */
3351 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
3352 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
+ 1);
3354 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
3356 /* Add DELTA to THIS_RTX. */
3357 nios2_emit_add_constant (this_rtx
, delta
);
3359 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
3364 tmp
= gen_rtx_REG (Pmode
, 2);
3365 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
3366 nios2_emit_add_constant (tmp
, vcall_offset
);
3367 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
3368 emit_insn (gen_add2_insn (this_rtx
, tmp
));
3371 /* Generate a tail call to the target function. */
3372 if (!TREE_USED (function
))
3374 assemble_external (function
);
3375 TREE_USED (function
) = 1;
3377 funexp
= XEXP (DECL_RTL (function
), 0);
3378 /* Function address needs to be constructed under PIC,
3379 provide r2 to use here. */
3380 nios2_adjust_call_address (&funexp
, gen_rtx_REG (Pmode
, 2));
3381 insn
= emit_call_insn (gen_sibcall_internal (funexp
, const0_rtx
));
3382 SIBLING_CALL_P (insn
) = 1;
3384 /* Run just enough of rest_of_compilation to get the insns emitted.
3385 There's not really enough bulk here to make other passes such as
3386 instruction scheduling worth while. Note that use_thunk calls
3387 assemble_start_function and assemble_end_function. */
3388 insn
= get_insns ();
3389 shorten_branches (insn
);
3390 final_start_function (insn
, file
, 1);
3391 final (insn
, file
, 1);
3392 final_end_function ();
3394 /* Stop pretending to be a post-reload pass. */
3395 reload_completed
= 0;
3399 /* Initialize the GCC target structure. */
3400 #undef TARGET_ASM_FUNCTION_PROLOGUE
3401 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
3403 #undef TARGET_IN_SMALL_DATA_P
3404 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
3406 #undef TARGET_SECTION_TYPE_FLAGS
3407 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
3409 #undef TARGET_INIT_BUILTINS
3410 #define TARGET_INIT_BUILTINS nios2_init_builtins
3411 #undef TARGET_EXPAND_BUILTIN
3412 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
3413 #undef TARGET_BUILTIN_DECL
3414 #define TARGET_BUILTIN_DECL nios2_builtin_decl
3416 #undef TARGET_INIT_LIBFUNCS
3417 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
3419 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3420 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
3422 #undef TARGET_CAN_ELIMINATE
3423 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
3425 #undef TARGET_FUNCTION_ARG
3426 #define TARGET_FUNCTION_ARG nios2_function_arg
3428 #undef TARGET_FUNCTION_ARG_ADVANCE
3429 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
3431 #undef TARGET_ARG_PARTIAL_BYTES
3432 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
3434 #undef TARGET_TRAMPOLINE_INIT
3435 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
3437 #undef TARGET_FUNCTION_VALUE
3438 #define TARGET_FUNCTION_VALUE nios2_function_value
3440 #undef TARGET_LIBCALL_VALUE
3441 #define TARGET_LIBCALL_VALUE nios2_libcall_value
3443 #undef TARGET_FUNCTION_VALUE_REGNO_P
3444 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
3446 #undef TARGET_RETURN_IN_MEMORY
3447 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
3449 #undef TARGET_PROMOTE_PROTOTYPES
3450 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3452 #undef TARGET_SETUP_INCOMING_VARARGS
3453 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
3455 #undef TARGET_MUST_PASS_IN_STACK
3456 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
3458 #undef TARGET_LEGITIMATE_CONSTANT_P
3459 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
3461 #undef TARGET_LEGITIMIZE_ADDRESS
3462 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
3464 #undef TARGET_DELEGITIMIZE_ADDRESS
3465 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
3467 #undef TARGET_LEGITIMATE_ADDRESS_P
3468 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
3470 #undef TARGET_PREFERRED_RELOAD_CLASS
3471 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
3473 #undef TARGET_RTX_COSTS
3474 #define TARGET_RTX_COSTS nios2_rtx_costs
3476 #undef TARGET_HAVE_TLS
3477 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
3479 #undef TARGET_CANNOT_FORCE_CONST_MEM
3480 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
3482 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
3483 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
3485 #undef TARGET_PRINT_OPERAND
3486 #define TARGET_PRINT_OPERAND nios2_print_operand
3488 #undef TARGET_PRINT_OPERAND_ADDRESS
3489 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
3491 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3492 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
3494 #undef TARGET_ASM_FILE_END
3495 #define TARGET_ASM_FILE_END nios2_asm_file_end
3497 #undef TARGET_OPTION_OVERRIDE
3498 #define TARGET_OPTION_OVERRIDE nios2_option_override
3500 #undef TARGET_OPTION_SAVE
3501 #define TARGET_OPTION_SAVE nios2_option_save
3503 #undef TARGET_OPTION_RESTORE
3504 #define TARGET_OPTION_RESTORE nios2_option_restore
3506 #undef TARGET_SET_CURRENT_FUNCTION
3507 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
3509 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
3510 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
3512 #undef TARGET_OPTION_PRAGMA_PARSE
3513 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
3515 #undef TARGET_MERGE_DECL_ATTRIBUTES
3516 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
3518 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3519 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
3520 hook_bool_const_tree_hwi_hwi_const_tree_true
3522 #undef TARGET_ASM_OUTPUT_MI_THUNK
3523 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
3525 struct gcc_target targetm
= TARGET_INITIALIZER
;
3527 #include "gt-nios2.h"