1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "diagnostic-core.h"
45 #include "target-def.h"
47 #include "langhooks.h"
52 #include "stor-layout.h"
57 /* Forward function declarations. */
58 static bool prologue_saved_reg_p (unsigned);
59 static void nios2_load_pic_register (void);
60 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
61 static const char *nios2_unspec_reloc_name (int);
62 static void nios2_register_builtin_fndecl (unsigned, tree
);
64 /* Threshold for data being put into the small data/bss area, instead
65 of the normal data area (references to the small data/bss area take
66 1 instruction, and use the global pointer, references to the normal
67 data area takes 2 instructions). */
68 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
70 struct GTY (()) machine_function
72 /* Current frame information, to be filled in by nios2_compute_frame_layout
73 with register save masks, and offsets for the current function. */
75 /* Mask of registers to save. */
76 unsigned int save_mask
;
77 /* Number of bytes that the entire frame takes up. */
79 /* Number of bytes that variables take up. */
81 /* Number of bytes that outgoing arguments take up. */
83 /* Number of bytes needed to store registers in frame. */
85 /* Offset from new stack pointer to store registers. */
87 /* Offset from save_regs_offset to store frame pointer register. */
89 /* != 0 if frame layout already calculated. */
93 /* State to track the assignment of custom codes to FPU/custom builtins. */
94 static enum nios2_ccs_code custom_code_status
[256];
95 static int custom_code_index
[256];
96 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
97 static bool custom_code_conflict
= false;
100 /* Definition of builtin function types for nios2. */
104 N2_FTYPE(1, (VOID)) \
105 N2_FTYPE(2, (DF, DF)) \
106 N2_FTYPE(3, (DF, DF, DF)) \
107 N2_FTYPE(2, (DF, SF)) \
108 N2_FTYPE(2, (DF, SI)) \
109 N2_FTYPE(2, (DF, UI)) \
110 N2_FTYPE(2, (SF, DF)) \
111 N2_FTYPE(2, (SF, SF)) \
112 N2_FTYPE(3, (SF, SF, SF)) \
113 N2_FTYPE(2, (SF, SI)) \
114 N2_FTYPE(2, (SF, UI)) \
115 N2_FTYPE(2, (SI, CVPTR)) \
116 N2_FTYPE(2, (SI, DF)) \
117 N2_FTYPE(3, (SI, DF, DF)) \
118 N2_FTYPE(2, (SI, SF)) \
119 N2_FTYPE(3, (SI, SF, SF)) \
120 N2_FTYPE(2, (SI, SI)) \
121 N2_FTYPE(2, (UI, CVPTR)) \
122 N2_FTYPE(2, (UI, DF)) \
123 N2_FTYPE(2, (UI, SF)) \
124 N2_FTYPE(2, (VOID, DF)) \
125 N2_FTYPE(2, (VOID, SF)) \
126 N2_FTYPE(3, (VOID, SI, SI)) \
127 N2_FTYPE(3, (VOID, VPTR, SI))
129 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
130 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
131 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
133 /* Expand ftcode enumeration. */
135 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
141 /* Return the tree function type, based on the ftcode. */
143 nios2_ftype (enum nios2_ftcode ftcode
)
145 static tree types
[(int) N2_FTYPE_MAX
];
147 tree N2_TYPE_SF
= float_type_node
;
148 tree N2_TYPE_DF
= double_type_node
;
149 tree N2_TYPE_SI
= integer_type_node
;
150 tree N2_TYPE_UI
= unsigned_type_node
;
151 tree N2_TYPE_VOID
= void_type_node
;
153 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
156 /* const volatile void *. */
158 = build_pointer_type (build_qualified_type (void_type_node
,
160 | TYPE_QUAL_VOLATILE
)));
161 /* volatile void *. */
163 = build_pointer_type (build_qualified_type (void_type_node
,
164 TYPE_QUAL_VOLATILE
));
166 if (types
[(int) ftcode
] == NULL_TREE
)
169 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
170 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
171 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
172 #define N2_FTYPE(N,ARGS) \
173 case N2_FTYPE_OP ## N ARGS: \
174 types[(int) ftcode] \
175 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
179 default: gcc_unreachable ();
181 return types
[(int) ftcode
];
185 /* Definition of FPU instruction descriptions. */
187 struct nios2_fpu_insn_info
190 int num_operands
, *optvar
;
193 #define N2F_DFREQ 0x2
194 #define N2F_UNSAFE 0x4
195 #define N2F_FINITE 0x8
196 #define N2F_NO_ERRNO 0x10
198 enum insn_code icode
;
199 enum nios2_ftcode ftcode
;
202 /* Base macro for defining FPU instructions. */
203 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
204 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
205 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
206 N2_FTYPE_OP ## nop args }
208 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
209 #define N2FPU_OP2(mode) (mode, mode)
210 #define N2FPU_OP3(mode) (mode, mode, mode)
211 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
212 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
213 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
214 #define N2FPU_INSN_SF(code, nop, flags) \
215 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
216 #define N2FPU_INSN_DF(code, nop, flags) \
217 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
219 /* Compare instructions, 3 operand FP operation with a SI result. */
220 #define N2FPU_CMP_DEF(code, flags, m, M) \
221 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
222 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
223 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
224 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
226 /* The order of definition needs to be maintained consistent with
227 enum n2fpu_code in nios2-opts.h. */
228 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
230 /* Single precision instructions. */
231 N2FPU_INSN_SF (add
, 3, 0),
232 N2FPU_INSN_SF (sub
, 3, 0),
233 N2FPU_INSN_SF (mul
, 3, 0),
234 N2FPU_INSN_SF (div
, 3, 0),
235 /* Due to textual difference between min/max and smin/smax. */
236 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
237 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
238 N2FPU_INSN_SF (neg
, 2, 0),
239 N2FPU_INSN_SF (abs
, 2, 0),
240 N2FPU_INSN_SF (sqrt
, 2, 0),
241 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
242 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
243 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
244 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
245 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
246 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
247 /* Single precision compares. */
248 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
249 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
250 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
252 /* Double precision instructions. */
253 N2FPU_INSN_DF (add
, 3, 0),
254 N2FPU_INSN_DF (sub
, 3, 0),
255 N2FPU_INSN_DF (mul
, 3, 0),
256 N2FPU_INSN_DF (div
, 3, 0),
257 /* Due to textual difference between min/max and smin/smax. */
258 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
259 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
260 N2FPU_INSN_DF (neg
, 2, 0),
261 N2FPU_INSN_DF (abs
, 2, 0),
262 N2FPU_INSN_DF (sqrt
, 2, 0),
263 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
264 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
265 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
266 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
267 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
268 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
269 /* Double precision compares. */
270 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
271 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
272 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
274 /* Conversion instructions. */
275 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
276 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
277 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
278 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
279 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
280 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
281 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
282 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
283 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
284 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
285 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
287 /* X, Y access instructions. */
288 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
289 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
290 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
291 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
292 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
295 /* Some macros for ease of access. */
296 #define N2FPU(code) nios2_fpu_insn[(int) code]
297 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
298 #define N2FPU_N(code) (*N2FPU(code).optvar)
299 #define N2FPU_NAME(code) (N2FPU(code).name)
300 #define N2FPU_ICODE(code) (N2FPU(code).icode)
301 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
302 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
303 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
304 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
305 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
306 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
308 /* Same as above, but for cases where using only the op part is shorter. */
309 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
310 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
311 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
313 /* Export the FPU insn enabled predicate to nios2.md. */
315 nios2_fpu_insn_enabled (enum n2fpu_code code
)
317 return N2FPU_ENABLED_P (code
);
320 /* Return true if COND comparison for mode MODE is enabled under current
324 nios2_fpu_compare_enabled (enum rtx_code cond
, enum machine_mode mode
)
329 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
330 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
331 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
332 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
333 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
334 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
337 else if (mode
== DFmode
)
340 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
341 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
342 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
343 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
344 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
345 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
351 /* Stack layout and calling conventions. */
353 #define NIOS2_STACK_ALIGN(LOC) \
354 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
355 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
357 /* Return the bytes needed to compute the frame pointer from the current
360 nios2_compute_frame_layout (void)
363 unsigned int save_mask
= 0;
369 if (cfun
->machine
->initialized
)
370 return cfun
->machine
->total_size
;
372 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
373 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
374 total_size
= var_size
+ out_args_size
;
376 /* Calculate space needed for gp registers. */
378 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
379 if (prologue_saved_reg_p (regno
))
381 save_mask
|= 1 << regno
;
385 /* If we call eh_return, we need to save the EH data registers. */
386 if (crtl
->calls_eh_return
)
391 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
392 if (!(save_mask
& (1 << r
)))
399 cfun
->machine
->fp_save_offset
= 0;
400 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
402 int fp_save_offset
= 0;
403 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
404 if (save_mask
& (1 << regno
))
407 cfun
->machine
->fp_save_offset
= fp_save_offset
;
410 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
411 total_size
+= save_reg_size
;
412 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
414 /* Save other computed information. */
415 cfun
->machine
->save_mask
= save_mask
;
416 cfun
->machine
->total_size
= total_size
;
417 cfun
->machine
->var_size
= var_size
;
418 cfun
->machine
->args_size
= out_args_size
;
419 cfun
->machine
->save_reg_size
= save_reg_size
;
420 cfun
->machine
->initialized
= reload_completed
;
421 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
426 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
427 prologue/epilogue expand routines. */
429 save_reg (int regno
, unsigned offset
)
431 rtx reg
= gen_rtx_REG (SImode
, regno
);
432 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
433 gen_int_mode (offset
, Pmode
));
434 rtx insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
435 RTX_FRAME_RELATED_P (insn
) = 1;
439 restore_reg (int regno
, unsigned offset
)
441 rtx reg
= gen_rtx_REG (SImode
, regno
);
442 rtx addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
443 gen_int_mode (offset
, Pmode
));
444 rtx insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
445 /* Tag epilogue unwind note. */
446 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
447 RTX_FRAME_RELATED_P (insn
) = 1;
450 /* Emit conditional trap for checking stack limit. */
452 nios2_emit_stack_limit_check (void)
454 if (REG_P (stack_limit_rtx
))
455 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
,
457 stack_pointer_rtx
, stack_limit_rtx
, GEN_INT (3)));
459 sorry ("only register based stack limit is supported");
462 /* Temp regno used inside prologue/epilogue. */
463 #define TEMP_REG_NUM 8
466 nios2_expand_prologue (void)
469 int total_frame_size
, save_offset
;
470 int sp_offset
; /* offset from base_reg to final stack value. */
471 int save_regs_base
; /* offset from base_reg to register save area. */
474 total_frame_size
= nios2_compute_frame_layout ();
476 if (flag_stack_usage_info
)
477 current_function_static_stack_size
= total_frame_size
;
479 /* Decrement the stack pointer. */
480 if (!SMALL_INT (total_frame_size
))
482 /* We need an intermediary point, this will point at the spill block. */
484 (gen_add2_insn (stack_pointer_rtx
,
485 gen_int_mode (cfun
->machine
->save_regs_offset
486 - total_frame_size
, Pmode
)));
487 RTX_FRAME_RELATED_P (insn
) = 1;
489 sp_offset
= -cfun
->machine
->save_regs_offset
;
491 else if (total_frame_size
)
493 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
494 gen_int_mode (-total_frame_size
,
496 RTX_FRAME_RELATED_P (insn
) = 1;
497 save_regs_base
= cfun
->machine
->save_regs_offset
;
501 save_regs_base
= sp_offset
= 0;
503 if (crtl
->limit_stack
)
504 nios2_emit_stack_limit_check ();
506 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
508 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
509 if (cfun
->machine
->save_mask
& (1 << regno
))
512 save_reg (regno
, save_offset
);
515 if (frame_pointer_needed
)
517 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
518 insn
= emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
520 gen_int_mode (fp_save_offset
, Pmode
)));
521 RTX_FRAME_RELATED_P (insn
) = 1;
527 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
528 plus_constant (Pmode
, stack_pointer_rtx
, sp_offset
));
529 if (SMALL_INT (sp_offset
))
530 insn
= emit_insn (sp_adjust
);
533 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
534 emit_move_insn (tmp
, gen_int_mode (sp_offset
, Pmode
));
535 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
536 /* Attach the sp_adjust as a note indicating what happened. */
537 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, sp_adjust
);
539 RTX_FRAME_RELATED_P (insn
) = 1;
541 if (crtl
->limit_stack
)
542 nios2_emit_stack_limit_check ();
545 /* Load the PIC register if needed. */
546 if (crtl
->uses_pic_offset_table
)
547 nios2_load_pic_register ();
549 /* If we are profiling, make sure no instructions are scheduled before
550 the call to mcount. */
552 emit_insn (gen_blockage ());
556 nios2_expand_epilogue (bool sibcall_p
)
559 int total_frame_size
;
560 int sp_adjust
, save_offset
;
563 if (!sibcall_p
&& nios2_can_use_return_insn ())
565 emit_jump_insn (gen_return ());
569 emit_insn (gen_blockage ());
571 total_frame_size
= nios2_compute_frame_layout ();
572 if (frame_pointer_needed
)
574 /* Recover the stack pointer. */
575 insn
= emit_insn (gen_add3_insn
576 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
577 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
)));
578 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
580 - cfun
->machine
->save_regs_offset
));
581 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
582 RTX_FRAME_RELATED_P (insn
) = 1;
585 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
587 else if (!SMALL_INT (total_frame_size
))
589 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
590 emit_move_insn (tmp
, gen_int_mode (cfun
->machine
->save_regs_offset
,
592 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
593 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
594 plus_constant (Pmode
, stack_pointer_rtx
,
595 cfun
->machine
->save_regs_offset
));
596 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
597 RTX_FRAME_RELATED_P (insn
) = 1;
599 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
603 save_offset
= cfun
->machine
->save_regs_offset
;
604 sp_adjust
= total_frame_size
;
607 save_offset
+= cfun
->machine
->save_reg_size
;
609 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
610 if (cfun
->machine
->save_mask
& (1 << regno
))
613 restore_reg (regno
, save_offset
);
618 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
619 gen_int_mode (sp_adjust
, Pmode
)));
620 cfa_adj
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
621 plus_constant (Pmode
, stack_pointer_rtx
,
623 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa_adj
);
624 RTX_FRAME_RELATED_P (insn
) = 1;
627 /* Add in the __builtin_eh_return stack adjustment. */
628 if (crtl
->calls_eh_return
)
629 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
632 emit_jump_insn (gen_simple_return ());
635 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
636 back to a previous frame. */
638 nios2_get_return_address (int count
)
643 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
646 /* Emit code to change the current function's return address to
647 ADDRESS. SCRATCH is available as a scratch register, if needed.
648 ADDRESS and SCRATCH are both word-mode GPRs. */
650 nios2_set_return_address (rtx address
, rtx scratch
)
652 nios2_compute_frame_layout ();
653 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
655 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
658 if (frame_pointer_needed
)
659 base
= hard_frame_pointer_rtx
;
662 base
= stack_pointer_rtx
;
663 offset
+= cfun
->machine
->save_regs_offset
;
665 if (!SMALL_INT (offset
))
667 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
668 emit_insn (gen_add2_insn (scratch
, base
));
674 base
= plus_constant (Pmode
, base
, offset
);
675 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
678 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
681 /* Implement FUNCTION_PROFILER macro. */
683 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
685 fprintf (file
, "\tmov\tr8, ra\n");
688 fprintf (file
, "\tnextpc\tr2\n");
689 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
690 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
691 fprintf (file
, "\tadd\tr2, r2, r3\n");
692 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
693 fprintf (file
, "\tcallr\tr2\n");
695 else if (flag_pic
== 2)
697 fprintf (file
, "\tnextpc\tr2\n");
698 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
699 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
700 fprintf (file
, "\tadd\tr2, r2, r3\n");
701 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
702 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
703 fprintf (file
, "\tadd\tr3, r2, r3\n");
704 fprintf (file
, "\tldw\tr2, 0(r3)\n");
705 fprintf (file
, "\tcallr\tr2\n");
708 fprintf (file
, "\tcall\t_mcount\n");
709 fprintf (file
, "\tmov\tra, r8\n");
712 /* Dump stack layout. */
714 nios2_dump_frame_layout (FILE *file
)
716 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
717 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
718 cfun
->machine
->total_size
);
719 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
720 cfun
->machine
->var_size
);
721 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
722 cfun
->machine
->args_size
);
723 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
724 cfun
->machine
->save_reg_size
);
725 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
726 cfun
->machine
->initialized
);
727 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
728 cfun
->machine
->save_regs_offset
);
729 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
731 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
732 frame_pointer_needed
);
733 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
734 crtl
->args
.pretend_args_size
);
737 /* Return true if REGNO should be saved in the prologue. */
739 prologue_saved_reg_p (unsigned regno
)
741 gcc_assert (GP_REG_P (regno
));
743 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
746 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
749 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
752 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
758 /* Implement TARGET_CAN_ELIMINATE. */
760 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
762 if (to
== STACK_POINTER_REGNUM
)
763 return !frame_pointer_needed
;
767 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
769 nios2_initial_elimination_offset (int from
, int to
)
773 nios2_compute_frame_layout ();
775 /* Set OFFSET to the offset from the stack pointer. */
778 case FRAME_POINTER_REGNUM
:
779 offset
= cfun
->machine
->args_size
;
782 case ARG_POINTER_REGNUM
:
783 offset
= cfun
->machine
->total_size
;
784 offset
-= crtl
->args
.pretend_args_size
;
791 /* If we are asked for the frame pointer offset, then adjust OFFSET
792 by the offset from the frame pointer to the stack pointer. */
793 if (to
== HARD_FRAME_POINTER_REGNUM
)
794 offset
-= (cfun
->machine
->save_regs_offset
795 + cfun
->machine
->fp_save_offset
);
800 /* Return nonzero if this function is known to have a null epilogue.
801 This allows the optimizer to omit jumps to jumps if no stack
804 nios2_can_use_return_insn (void)
806 if (!reload_completed
|| crtl
->profile
)
809 return nios2_compute_frame_layout () == 0;
813 /* Check and signal some warnings/errors on FPU insn options. */
815 nios2_custom_check_insns (void)
820 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
821 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
823 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
824 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
826 error ("switch %<-mcustom-%s%> is required for double "
827 "precision floating point", N2FPU_NAME (j
));
833 /* Warn if the user has certain exotic operations that won't get used
834 without -funsafe-math-optimizations. See expand_builtin () in
836 if (!flag_unsafe_math_optimizations
)
837 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
838 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
839 warning (0, "switch %<-mcustom-%s%> has no effect unless "
840 "-funsafe-math-optimizations is specified", N2FPU_NAME (i
));
842 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
843 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
845 if (!flag_finite_math_only
)
846 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
847 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
848 warning (0, "switch %<-mcustom-%s%> has no effect unless "
849 "-ffinite-math-only is specified", N2FPU_NAME (i
));
851 /* Warn if the user is trying to use a custom rounding instruction
852 that won't get used without -fno-math-errno. See
853 expand_builtin_int_roundingfn_2 () in builtins.c. */
855 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
856 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
857 warning (0, "switch %<-mcustom-%s%> has no effect unless "
858 "-fno-math-errno is specified", N2FPU_NAME (i
));
860 if (errors
|| custom_code_conflict
)
861 fatal_error ("conflicting use of -mcustom switches, target attributes, "
862 "and/or __builtin_custom_ functions");
866 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
868 if (override_p
|| N2FPU_N (code
) == -1)
870 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
873 /* Type to represent a standard FPU config. */
874 struct nios2_fpu_config
877 bool set_sp_constants
;
878 int code
[n2fpu_code_num
];
881 #define NIOS2_FPU_CONFIG_NUM 3
882 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
885 nios2_init_fpu_configs (void)
887 struct nios2_fpu_config
* cfg
;
889 #define NEXT_FPU_CONFIG \
891 cfg = &custom_fpu_config[i++]; \
892 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
897 cfg
->set_sp_constants
= true;
898 cfg
->code
[n2fpu_fmuls
] = 252;
899 cfg
->code
[n2fpu_fadds
] = 253;
900 cfg
->code
[n2fpu_fsubs
] = 254;
904 cfg
->set_sp_constants
= true;
905 cfg
->code
[n2fpu_fmuls
] = 252;
906 cfg
->code
[n2fpu_fadds
] = 253;
907 cfg
->code
[n2fpu_fsubs
] = 254;
908 cfg
->code
[n2fpu_fdivs
] = 255;
912 cfg
->set_sp_constants
= true;
913 cfg
->code
[n2fpu_floatus
] = 243;
914 cfg
->code
[n2fpu_fixsi
] = 244;
915 cfg
->code
[n2fpu_floatis
] = 245;
916 cfg
->code
[n2fpu_fcmpgts
] = 246;
917 cfg
->code
[n2fpu_fcmples
] = 249;
918 cfg
->code
[n2fpu_fcmpeqs
] = 250;
919 cfg
->code
[n2fpu_fcmpnes
] = 251;
920 cfg
->code
[n2fpu_fmuls
] = 252;
921 cfg
->code
[n2fpu_fadds
] = 253;
922 cfg
->code
[n2fpu_fsubs
] = 254;
923 cfg
->code
[n2fpu_fdivs
] = 255;
925 #undef NEXT_FPU_CONFIG
926 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
929 static struct nios2_fpu_config
*
930 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
933 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
935 bool match
= !(endp
!= NULL
936 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
938 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
940 return &custom_fpu_config
[i
];
945 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
946 OVERRIDE is true if loaded config codes should overwrite current state. */
948 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
951 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
955 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
956 if (cfg
->code
[i
] >= 0)
957 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
959 if (cfg
->set_sp_constants
)
960 flag_single_precision_constant
= 1;
963 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
964 "value %<%s%>", cfgname
);
966 /* Guard against errors in the standard configurations. */
967 nios2_custom_check_insns ();
970 /* Check individual FPU insn options, and register custom code. */
972 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
974 int param
= N2FPU_N (fpu_insn_index
);
976 if (0 <= param
&& param
<= 255)
977 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
979 /* Valid values are 0-255, but also allow -1 so that the
980 -mno-custom-<opt> switches work. */
981 else if (param
!= -1)
982 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
983 N2FPU_NAME (fpu_insn_index
), param
);
986 /* Allocate a chunk of memory for per-function machine-dependent data. */
987 static struct machine_function
*
988 nios2_init_machine_status (void)
990 return ggc_cleared_alloc
<machine_function
> ();
993 /* Implement TARGET_OPTION_OVERRIDE. */
995 nios2_option_override (void)
999 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1000 SUBTARGET_OVERRIDE_OPTIONS
;
1003 /* Check for unsupported options. */
1004 if (flag_pic
&& !TARGET_LINUX_ABI
)
1005 sorry ("position-independent code requires the Linux ABI");
1007 /* Function to allocate machine-dependent function status. */
1008 init_machine_status
= &nios2_init_machine_status
;
1010 nios2_section_threshold
1011 = (global_options_set
.x_g_switch_value
1012 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1014 /* Default to -mgpopt unless -fpic or -fPIC. */
1015 if (TARGET_GPOPT
== -1 && flag_pic
)
1018 /* If we don't have mul, we don't have mulx either! */
1019 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1020 target_flags
&= ~MASK_HAS_MULX
;
1022 /* Initialize default FPU configurations. */
1023 nios2_init_fpu_configs ();
1025 /* Set up default handling for floating point custom instructions.
1027 Putting things in this order means that the -mcustom-fpu-cfg=
1028 switch will always be overridden by individual -mcustom-fadds=
1029 switches, regardless of the order in which they were specified
1030 on the command line.
1032 This behavior of prioritization of individual -mcustom-<insn>=
1033 options before the -mcustom-fpu-cfg= switch is maintained for
1035 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1036 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1038 /* Handle options for individual FPU insns. */
1039 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1040 nios2_handle_custom_fpu_insn_option (i
);
1042 nios2_custom_check_insns ();
1044 /* Save the initial options in case the user does function specific
1046 target_option_default_node
= target_option_current_node
1047 = build_target_option_node (&global_options
);
1051 /* Return true if CST is a constant within range of movi/movui/movhi. */
1053 nios2_simple_const_p (const_rtx cst
)
1055 HOST_WIDE_INT val
= INTVAL (cst
);
1056 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1059 /* Compute a (partial) cost for rtx X. Return true if the complete
1060 cost has been computed, and false if subexpressions should be
1061 scanned. In either case, *TOTAL contains the cost result. */
1063 nios2_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
1064 int opno ATTRIBUTE_UNUSED
,
1065 int *total
, bool speed ATTRIBUTE_UNUSED
)
1070 if (INTVAL (x
) == 0)
1072 *total
= COSTS_N_INSNS (0);
1075 else if (nios2_simple_const_p (x
))
1077 *total
= COSTS_N_INSNS (2);
1082 *total
= COSTS_N_INSNS (4);
1091 *total
= COSTS_N_INSNS (4);
1097 /* Recognize 'nor' insn pattern. */
1098 if (GET_CODE (XEXP (x
, 0)) == NOT
1099 && GET_CODE (XEXP (x
, 1)) == NOT
)
1101 *total
= COSTS_N_INSNS (1);
1109 *total
= COSTS_N_INSNS (1);
1114 *total
= COSTS_N_INSNS (3);
1119 *total
= COSTS_N_INSNS (1);
1128 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1130 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1132 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1135 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1136 RET is an RTX for the return value location. The entire insn sequence
1138 static GTY(()) rtx nios2_tls_symbol
;
1141 nios2_call_tls_get_addr (rtx ti
)
1143 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1144 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1147 if (!nios2_tls_symbol
)
1148 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1150 emit_move_insn (arg
, ti
);
1151 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1152 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1153 RTL_CONST_CALL_P (insn
) = 1;
1154 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1155 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1160 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1162 nios2_large_offset_p (int unspec
)
1164 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1167 /* FIXME: TLS GOT offset relocations will eventually also get this
1168 treatment, after binutils support for those are also completed. */
1169 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1172 /* 'gotoff' offsets are always hiadj/lo. */
1173 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1179 /* Return true for conforming unspec relocations. Also used in
1180 constraints.md and predicates.md. */
1182 nios2_unspec_reloc_p (rtx op
)
1184 return (GET_CODE (op
) == CONST
1185 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1186 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1189 /* Helper to generate unspec constant. */
1191 nios2_unspec_offset (rtx loc
, int unspec
)
1193 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1197 /* Generate GOT pointer based address with large offset. */
1199 nios2_large_got_address (rtx offset
)
1201 rtx addr
= gen_reg_rtx (Pmode
);
1202 emit_insn (gen_add3_insn (addr
, pic_offset_table_rtx
,
1203 force_reg (Pmode
, offset
)));
1207 /* Generate a GOT pointer based address. */
1209 nios2_got_address (rtx loc
, int unspec
)
1211 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1212 crtl
->uses_pic_offset_table
= 1;
1214 if (nios2_large_offset_p (unspec
))
1215 return nios2_large_got_address (offset
);
1217 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1220 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1221 return value will be a valid address and move_operand (either a REG
1224 nios2_legitimize_tls_address (rtx loc
)
1227 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1231 case TLS_MODEL_GLOBAL_DYNAMIC
:
1232 tmp
= gen_reg_rtx (Pmode
);
1233 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1234 return nios2_call_tls_get_addr (tmp
);
1236 case TLS_MODEL_LOCAL_DYNAMIC
:
1237 tmp
= gen_reg_rtx (Pmode
);
1238 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1239 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1240 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1242 case TLS_MODEL_INITIAL_EXEC
:
1243 tmp
= gen_reg_rtx (Pmode
);
1244 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1245 emit_move_insn (tmp
, mem
);
1246 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1247 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1249 case TLS_MODEL_LOCAL_EXEC
:
1250 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1251 return gen_rtx_PLUS (Pmode
, tp
,
1252 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1260 If -O3 is used, we want to output a table lookup for
1261 divides between small numbers (both num and den >= 0
1262 and < 0x10). The overhead of this method in the worst
1263 case is 40 bytes in the text section (10 insns) and
1264 256 bytes in the data section. Additional divides do
1265 not incur additional penalties in the data section.
1267 Code speed is improved for small divides by about 5x
1268 when using this method in the worse case (~9 cycles
1269 vs ~45). And in the worst case divides not within the
1270 table are penalized by about 10% (~5 cycles vs ~45).
1271 However in the typical case the penalty is not as bad
1272 because doing the long divide in only 45 cycles is
1275 ??? would be nice to have some benchmarks other
1276 than Dhrystone to back this up.
1278 This bit of expansion is to create this instruction
1285 add $12, $11, divide_table
1291 # continue here with result in $2
1293 ??? Ideally I would like the libcall block to contain all
1294 of this code, but I don't know how to do that. What it
1295 means is that if the divide can be eliminated, it may not
1296 completely disappear.
1298 ??? The __divsi3_table label should ideally be moved out
1299 of this block and into a global. If it is placed into the
1300 sdata section we can save even more cycles by doing things
1303 nios2_emit_expensive_div (rtx
*operands
, enum machine_mode mode
)
1305 rtx or_result
, shift_left_result
;
1307 rtx_code_label
*lab1
, *lab3
;
1314 /* It may look a little generic, but only SImode is supported for now. */
1315 gcc_assert (mode
== SImode
);
1316 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1318 lab1
= gen_label_rtx ();
1319 lab3
= gen_label_rtx ();
1321 or_result
= expand_simple_binop (SImode
, IOR
,
1322 operands
[1], operands
[2],
1323 0, 0, OPTAB_LIB_WIDEN
);
1325 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1326 GET_MODE (or_result
), 0, lab3
);
1327 JUMP_LABEL (get_last_insn ()) = lab3
;
1329 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1330 operands
[1], GEN_INT (4),
1331 0, 0, OPTAB_LIB_WIDEN
);
1333 lookup_value
= expand_simple_binop (SImode
, IOR
,
1334 shift_left_result
, operands
[2],
1335 0, 0, OPTAB_LIB_WIDEN
);
1336 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1337 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1338 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1340 tmp
= emit_jump_insn (gen_jump (lab1
));
1341 JUMP_LABEL (tmp
) = lab1
;
1345 LABEL_NUSES (lab3
) = 1;
1348 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1349 LCT_CONST
, SImode
, 2,
1350 operands
[1], SImode
,
1351 operands
[2], SImode
);
1353 insns
= get_insns ();
1355 emit_libcall_block (insns
, operands
[0], final_result
,
1356 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1359 LABEL_NUSES (lab1
) = 1;
1363 /* Branches and compares. */
1365 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1366 comparison, e.g. >= 1 into > 0. */
1368 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1369 enum rtx_code
*alt_code
, rtx
*alt_op
,
1370 enum machine_mode mode
)
1372 HOST_WIDE_INT opval
= INTVAL (op
);
1373 enum rtx_code scode
= signed_condition (code
);
1374 bool dec_p
= (scode
== LT
|| scode
== GE
);
1376 if (code
== EQ
|| code
== NE
)
1384 ? gen_int_mode (opval
- 1, mode
)
1385 : gen_int_mode (opval
+ 1, mode
));
1387 /* The required conversion between [>,>=] and [<,<=] is captured
1388 by a reverse + swap of condition codes. */
1389 *alt_code
= reverse_condition (swap_condition (code
));
1392 /* Test if the incremented/decremented value crosses the over/underflow
1393 boundary. Supposedly, such boundary cases should already be transformed
1394 into always-true/false or EQ conditions, so use an assertion here. */
1395 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1397 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1398 alt_opval
&= GET_MODE_MASK (mode
);
1399 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1403 /* Return true if the constant comparison is supported by nios2. */
1405 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1409 case EQ
: case NE
: case GE
: case LT
:
1410 return SMALL_INT (INTVAL (op
));
1412 return SMALL_INT_UNSIGNED (INTVAL (op
));
1418 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1419 the current configuration. Perform modifications if MODIFY_P is true.
1420 Returns true if FPU compare can be done. */
1423 nios2_validate_fpu_compare (enum machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1427 enum rtx_code code
= GET_CODE (*cmp
);
1429 if (!nios2_fpu_compare_enabled (code
, mode
))
1431 code
= swap_condition (code
);
1432 if (nios2_fpu_compare_enabled (code
, mode
))
1446 *op1
= force_reg (mode
, *op1
);
1447 *op2
= force_reg (mode
, *op2
);
1448 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1453 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1454 nios2 supported form. Returns true if success. */
1456 nios2_validate_compare (enum machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1458 enum rtx_code code
= GET_CODE (*cmp
);
1459 enum rtx_code alt_code
;
1462 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1463 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1465 if (!reg_or_0_operand (*op2
, mode
))
1467 /* Create alternate constant compare. */
1468 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1470 /* If alterate op2 is zero(0), we can use it directly, possibly
1471 swapping the compare code. */
1472 if (alt_op2
== const0_rtx
)
1476 goto check_rebuild_cmp
;
1479 /* Check if either constant compare can be used. */
1480 if (nios2_valid_compare_const_p (code
, *op2
))
1482 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
1489 /* We have to force op2 into a register now. Try to pick one
1490 with a lower cost. */
1491 if (! nios2_simple_const_p (*op2
)
1492 && nios2_simple_const_p (alt_op2
))
1497 *op2
= force_reg (SImode
, *op2
);
1500 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
1502 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
1503 code
= swap_condition (code
);
1506 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1511 /* Addressing Modes. */
1513 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1515 nios2_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1518 split_const (x
, &base
, &offset
);
1519 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
1522 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1524 nios2_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1526 return nios2_legitimate_constant_p (mode
, x
) == false;
1529 /* Return true if register REGNO is a valid base register.
1530 STRICT_P is true if REG_OK_STRICT is in effect. */
1533 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
1535 if (!HARD_REGISTER_NUM_P (regno
))
1543 regno
= reg_renumber
[regno
];
1546 /* The fake registers will be eliminated to either the stack or
1547 hard frame pointer, both of which are usually valid base registers.
1548 Reload deals with the cases where the eliminated form isn't valid. */
1549 return (GP_REG_P (regno
)
1550 || regno
== FRAME_POINTER_REGNUM
1551 || regno
== ARG_POINTER_REGNUM
);
1554 /* Return true if the address expression formed by BASE + OFFSET is
1557 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
1559 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
1560 base
= SUBREG_REG (base
);
1561 return (REG_P (base
)
1562 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
1563 && (offset
== NULL_RTX
1564 || const_arith_operand (offset
, Pmode
)
1565 || nios2_unspec_reloc_p (offset
)));
1568 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1570 nios2_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1571 rtx operand
, bool strict_p
)
1573 switch (GET_CODE (operand
))
1577 if (SYMBOL_REF_TLS_MODEL (operand
))
1580 if (nios2_symbol_ref_in_small_data_p (operand
))
1583 /* Else, fall through. */
1590 /* Register indirect. */
1592 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
1594 /* Register indirect with displacement. */
1597 rtx op0
= XEXP (operand
, 0);
1598 rtx op1
= XEXP (operand
, 1);
1600 return (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
1601 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
));
1610 /* Return true if SECTION is a small section name. */
1612 nios2_small_section_name_p (const char *section
)
1614 return (strcmp (section
, ".sbss") == 0
1615 || strncmp (section
, ".sbss.", 6) == 0
1616 || strcmp (section
, ".sdata") == 0
1617 || strncmp (section
, ".sdata.", 7) == 0);
1620 /* Return true if EXP should be placed in the small data section. */
1622 nios2_in_small_data_p (const_tree exp
)
1624 /* We want to merge strings, so we never consider them small data. */
1625 if (TREE_CODE (exp
) == STRING_CST
)
1628 if (TREE_CODE (exp
) == VAR_DECL
)
1630 if (DECL_SECTION_NAME (exp
))
1632 const char *section
= DECL_SECTION_NAME (exp
);
1633 if (nios2_section_threshold
> 0
1634 && nios2_small_section_name_p (section
))
1639 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
1641 /* If this is an incomplete type with size 0, then we can't put it
1642 in sdata because it might be too big when completed. */
1644 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
1652 /* Return true if symbol is in small data section. */
1655 nios2_symbol_ref_in_small_data_p (rtx sym
)
1657 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
1660 /* GP-relative access cannot be used for externally defined symbols,
1661 because the compilation unit that defines the symbol may place it
1662 in a section that cannot be reached from GP. */
1663 && !SYMBOL_REF_EXTERNAL_P (sym
)
1664 /* True if a symbol is both small and not weak. */
1665 && SYMBOL_REF_SMALL_P (sym
)
1666 && !(SYMBOL_REF_DECL (sym
) && DECL_WEAK (SYMBOL_REF_DECL (sym
)))
1667 /* TLS variables are not accessed through the GP. */
1668 && SYMBOL_REF_TLS_MODEL (sym
) == 0);
1672 /* Implement TARGET_SECTION_TYPE_FLAGS. */
1675 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
1679 flags
= default_section_type_flags (decl
, name
, reloc
);
1681 if (nios2_small_section_name_p (name
))
1682 flags
|= SECTION_SMALL
;
1687 /* Return true if SYMBOL_REF X binds locally. */
1690 nios2_symbol_binds_local_p (const_rtx x
)
1692 return (SYMBOL_REF_DECL (x
)
1693 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
1694 : SYMBOL_REF_LOCAL_P (x
));
1697 /* Position independent code related. */
1699 /* Emit code to load the PIC register. */
1701 nios2_load_pic_register (void)
1703 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
1705 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
1706 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
1709 /* Generate a PIC address as a MEM rtx. */
1711 nios2_load_pic_address (rtx sym
, int unspec
)
1714 && GET_CODE (sym
) == SYMBOL_REF
1715 && nios2_symbol_binds_local_p (sym
))
1716 /* Under -fPIC, generate a GOTOFF address for local symbols. */
1717 return nios2_got_address (sym
, UNSPEC_PIC_GOTOFF_SYM
);
1719 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
1722 /* Nonzero if the constant value X is a legitimate general operand
1723 when generating PIC code. It is given that flag_pic is on and
1724 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1726 nios2_legitimate_pic_operand_p (rtx x
)
1728 if (GET_CODE (x
) == CONST
1729 && GET_CODE (XEXP (x
, 0)) == UNSPEC
1730 && nios2_large_offset_p (XINT (XEXP (x
, 0), 1)))
1733 return ! (GET_CODE (x
) == SYMBOL_REF
1734 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
1737 /* Return TRUE if X is a thread-local symbol. */
1739 nios2_tls_symbol_p (rtx x
)
1741 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
1742 && SYMBOL_REF_TLS_MODEL (x
) != 0);
1745 /* Legitimize addresses that are CONSTANT_P expressions. */
1747 nios2_legitimize_constant_address (rtx addr
)
1750 split_const (addr
, &base
, &offset
);
1752 if (nios2_tls_symbol_p (base
))
1753 base
= nios2_legitimize_tls_address (base
);
1755 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
);
1759 if (offset
!= const0_rtx
)
1761 gcc_assert (can_create_pseudo_p ());
1762 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
1763 (CONST_INT_P (offset
)
1764 ? (SMALL_INT (INTVAL (offset
))
1765 ? offset
: force_reg (Pmode
, offset
))
1771 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1773 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1774 enum machine_mode mode ATTRIBUTE_UNUSED
)
1777 return nios2_legitimize_constant_address (x
);
1779 /* For the TLS LE (Local Exec) model, the compiler may try to
1780 combine constant offsets with unspec relocs, creating address RTXs
1782 (plus:SI (reg:SI 23 r23)
1785 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
1786 (const_int 48 [0x30]))))
1788 This usually happens when 'var' is a thread-local struct variable,
1789 and access of a field in var causes the addend.
1791 We typically want this combining, so transform the above into this
1792 form, which is allowed:
1793 (plus:SI (reg:SI 23 r23)
1797 (plus:SI (symbol_ref:SI ("var"))
1798 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
1800 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
1801 if (GET_CODE (x
) == PLUS
1802 && GET_CODE (XEXP (x
, 0)) == REG
1803 && GET_CODE (XEXP (x
, 1)) == CONST
)
1805 rtx unspec
, offset
, reg
= XEXP (x
, 0);
1806 split_const (XEXP (x
, 1), &unspec
, &offset
);
1807 if (GET_CODE (unspec
) == UNSPEC
1808 && !nios2_large_offset_p (XINT (unspec
, 1))
1809 && offset
!= const0_rtx
)
1811 unspec
= copy_rtx (unspec
);
1812 XVECEXP (unspec
, 0, 0)
1813 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
1814 x
= gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
1822 nios2_delegitimize_address (rtx x
)
1824 x
= delegitimize_mem_from_attrs (x
);
1826 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1828 switch (XINT (XEXP (x
, 0), 1))
1830 case UNSPEC_PIC_SYM
:
1831 case UNSPEC_PIC_CALL_SYM
:
1832 case UNSPEC_PIC_GOTOFF_SYM
:
1833 case UNSPEC_ADD_TLS_GD
:
1834 case UNSPEC_ADD_TLS_LDM
:
1835 case UNSPEC_LOAD_TLS_IE
:
1836 case UNSPEC_ADD_TLS_LE
:
1837 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1838 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
1845 /* Main expander function for RTL moves. */
1847 nios2_emit_move_sequence (rtx
*operands
, enum machine_mode mode
)
1849 rtx to
= operands
[0];
1850 rtx from
= operands
[1];
1852 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
1854 gcc_assert (can_create_pseudo_p ());
1855 from
= copy_to_mode_reg (mode
, from
);
1858 if (GET_CODE (from
) == SYMBOL_REF
|| GET_CODE (from
) == LABEL_REF
1859 || (GET_CODE (from
) == CONST
1860 && GET_CODE (XEXP (from
, 0)) != UNSPEC
))
1861 from
= nios2_legitimize_constant_address (from
);
1868 /* The function with address *ADDR is being called. If the address
1869 needs to be loaded from the GOT, emit the instruction to do so and
1870 update *ADDR to point to the rtx for the loaded value. */
1872 nios2_adjust_call_address (rtx
*call_op
)
1875 gcc_assert (MEM_P (*call_op
));
1876 addr
= XEXP (*call_op
, 0);
1877 if (flag_pic
&& CONSTANT_P (addr
))
1879 rtx reg
= gen_reg_rtx (Pmode
);
1880 emit_move_insn (reg
, nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
));
1881 XEXP (*call_op
, 0) = reg
;
1886 /* Output assembly language related definitions. */
1888 /* Print the operand OP to file stream FILE modified by LETTER.
1889 LETTER can be one of:
1891 i: print "i" if OP is an immediate, except 0
1892 o: print "io" if OP is volatile
1893 z: for const0_rtx print $0 instead of 0
1896 U: for upper half of 32 bit value
1897 D: for the upper 32-bits of a 64-bit double value
1898 R: prints reverse condition.
1901 nios2_print_operand (FILE *file
, rtx op
, int letter
)
1907 if (CONSTANT_P (op
) && op
!= const0_rtx
)
1908 fprintf (file
, "i");
1912 if (GET_CODE (op
) == MEM
1913 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
1914 || TARGET_BYPASS_CACHE
))
1915 fprintf (file
, "io");
1922 if (comparison_operator (op
, VOIDmode
))
1924 enum rtx_code cond
= GET_CODE (op
);
1927 fprintf (file
, "%s", GET_RTX_NAME (cond
));
1932 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
1937 switch (GET_CODE (op
))
1940 if (letter
== 0 || letter
== 'z')
1942 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
1945 else if (letter
== 'D')
1947 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
1953 if (INTVAL (op
) == 0 && letter
== 'z')
1955 fprintf (file
, "zero");
1961 HOST_WIDE_INT val
= INTVAL (op
);
1962 val
= (val
>> 16) & 0xFFFF;
1963 output_addr_const (file
, gen_int_mode (val
, SImode
));
1966 /* Else, fall through. */
1972 if (letter
== 0 || letter
== 'z')
1974 output_addr_const (file
, op
);
1977 else if (letter
== 'H' || letter
== 'L')
1979 fprintf (file
, "%%");
1980 if (GET_CODE (op
) == CONST
1981 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
1983 rtx unspec
= XEXP (op
, 0);
1984 int unspec_reloc
= XINT (unspec
, 1);
1985 gcc_assert (nios2_large_offset_p (unspec_reloc
));
1986 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
1987 op
= XVECEXP (unspec
, 0, 0);
1989 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
1990 output_addr_const (file
, op
);
1991 fprintf (file
, ")");
2000 output_address (op
);
2008 output_addr_const (file
, op
);
2017 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
2021 /* Return true if this is a GP-relative accessible reference. */
2023 gprel_constant_p (rtx op
)
2025 if (GET_CODE (op
) == SYMBOL_REF
2026 && nios2_symbol_ref_in_small_data_p (op
))
2028 else if (GET_CODE (op
) == CONST
2029 && GET_CODE (XEXP (op
, 0)) == PLUS
)
2030 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
2035 /* Return the name string for a supported unspec reloc offset. */
2037 nios2_unspec_reloc_name (int unspec
)
2041 case UNSPEC_PIC_SYM
:
2043 case UNSPEC_PIC_CALL_SYM
:
2045 case UNSPEC_PIC_GOTOFF_SYM
:
2047 case UNSPEC_LOAD_TLS_IE
:
2049 case UNSPEC_ADD_TLS_LE
:
2051 case UNSPEC_ADD_TLS_GD
:
2053 case UNSPEC_ADD_TLS_LDM
:
2055 case UNSPEC_ADD_TLS_LDO
:
2062 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2064 nios2_output_addr_const_extra (FILE *file
, rtx op
)
2067 gcc_assert (GET_CODE (op
) == UNSPEC
);
2069 /* Support for printing out const unspec relocations. */
2070 name
= nios2_unspec_reloc_name (XINT (op
, 1));
2073 fprintf (file
, "%%%s(", name
);
2074 output_addr_const (file
, XVECEXP (op
, 0, 0));
2075 fprintf (file
, ")");
2081 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2083 nios2_print_operand_address (FILE *file
, rtx op
)
2085 switch (GET_CODE (op
))
2092 if (gprel_constant_p (op
))
2094 fprintf (file
, "%%gprel(");
2095 output_addr_const (file
, op
);
2096 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
2104 rtx op0
= XEXP (op
, 0);
2105 rtx op1
= XEXP (op
, 1);
2107 if (REG_P (op0
) && CONSTANT_P (op1
))
2109 output_addr_const (file
, op1
);
2110 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
2113 else if (REG_P (op1
) && CONSTANT_P (op0
))
2115 output_addr_const (file
, op0
);
2116 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
2123 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
2128 rtx base
= XEXP (op
, 0);
2129 nios2_print_operand_address (file
, base
);
2136 fprintf (stderr
, "Missing way to print address\n");
2141 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2143 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2145 gcc_assert (size
== 4);
2146 fprintf (file
, "\t.4byte\t%%tls_ldo(");
2147 output_addr_const (file
, x
);
2148 fprintf (file
, ")");
2151 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2153 nios2_asm_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
2155 if (flag_verbose_asm
|| flag_debug_asm
)
2157 nios2_compute_frame_layout ();
2158 nios2_dump_frame_layout (file
);
2162 /* Emit assembly of custom FPU instructions. */
2164 nios2_fpu_insn_asm (enum n2fpu_code code
)
2166 static char buf
[256];
2167 const char *op1
, *op2
, *op3
;
2168 int ln
= 256, n
= 0;
2170 int N
= N2FPU_N (code
);
2171 int num_operands
= N2FPU (code
).num_operands
;
2172 const char *insn_name
= N2FPU_NAME (code
);
2173 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
2174 enum machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
2175 enum machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
2177 /* Prepare X register for DF input operands. */
2178 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
2179 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2180 N2FPU_N (n2fpu_fwrx
));
2182 if (src_mode
== SFmode
)
2184 if (dst_mode
== VOIDmode
)
2186 /* The fwry case. */
2193 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2195 op3
= (num_operands
== 2 ? "zero" : "%2");
2198 else if (src_mode
== DFmode
)
2200 if (dst_mode
== VOIDmode
)
2202 /* The fwrx case. */
2210 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2211 op2
= (num_operands
== 2 ? "%1" : "%2");
2212 op3
= (num_operands
== 2 ? "%D1" : "%D2");
2215 else if (src_mode
== VOIDmode
)
2217 /* frdxlo, frdxhi, frdy cases. */
2218 gcc_assert (dst_mode
== SFmode
);
2222 else if (src_mode
== SImode
)
2224 /* Conversion operators. */
2225 gcc_assert (num_operands
== 2);
2226 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
2233 /* Main instruction string. */
2234 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2235 N
, op1
, op2
, op3
, insn_name
,
2236 (num_operands
>= 2 ? ", %1" : ""),
2237 (num_operands
== 3 ? ", %2" : ""));
2239 /* Extraction of Y register for DF results. */
2240 if (dst_mode
== DFmode
)
2241 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2242 N2FPU_N (n2fpu_frdy
));
2248 /* Function argument related. */
2250 /* Define where to put the arguments to a function. Value is zero to
2251 push the argument on the stack, or a hard register in which to
2254 MODE is the argument's machine mode.
2255 TYPE is the data type of the argument (as a tree).
2256 This is null for libcalls where that information may
2258 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2259 the preceding args and about the function being called.
2260 NAMED is nonzero if this argument is a named parameter
2261 (otherwise it is an extra parameter matching an ellipsis). */
2264 nios2_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2265 const_tree type ATTRIBUTE_UNUSED
,
2266 bool named ATTRIBUTE_UNUSED
)
2268 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2269 rtx return_rtx
= NULL_RTX
;
2271 if (cum
->regs_used
< NUM_ARG_REGS
)
2272 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
2277 /* Return number of bytes, at the beginning of the argument, that must be
2278 put in registers. 0 is the argument is entirely in registers or entirely
2282 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
2283 enum machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
2284 bool named ATTRIBUTE_UNUSED
)
2286 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2287 HOST_WIDE_INT param_size
;
2289 if (mode
== BLKmode
)
2291 param_size
= int_size_in_bytes (type
);
2292 gcc_assert (param_size
>= 0);
2295 param_size
= GET_MODE_SIZE (mode
);
2297 /* Convert to words (round up). */
2298 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2300 if (cum
->regs_used
< NUM_ARG_REGS
2301 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2302 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
2307 /* Update the data in CUM to advance over an argument of mode MODE
2308 and data type TYPE; TYPE is null for libcalls where that information
2309 may not be available. */
2312 nios2_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2313 const_tree type ATTRIBUTE_UNUSED
,
2314 bool named ATTRIBUTE_UNUSED
)
2316 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2317 HOST_WIDE_INT param_size
;
2319 if (mode
== BLKmode
)
2321 param_size
= int_size_in_bytes (type
);
2322 gcc_assert (param_size
>= 0);
2325 param_size
= GET_MODE_SIZE (mode
);
2327 /* Convert to words (round up). */
2328 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
2330 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
2331 cum
->regs_used
= NUM_ARG_REGS
;
2333 cum
->regs_used
+= param_size
;
2337 nios2_function_arg_padding (enum machine_mode mode
, const_tree type
)
2339 /* On little-endian targets, the first byte of every stack argument
2340 is passed in the first byte of the stack slot. */
2341 if (!BYTES_BIG_ENDIAN
)
2344 /* Otherwise, integral types are padded downward: the last byte of a
2345 stack argument is passed in the last byte of the stack slot. */
2347 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
2348 : GET_MODE_CLASS (mode
) == MODE_INT
)
2351 /* Arguments smaller than a stack slot are padded downward. */
2352 if (mode
!= BLKmode
)
2353 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
) ? upward
: downward
;
2355 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2356 ? upward
: downward
);
2360 nios2_block_reg_padding (enum machine_mode mode
, tree type
,
2361 int first ATTRIBUTE_UNUSED
)
2363 return nios2_function_arg_padding (mode
, type
);
2366 /* Emit RTL insns to initialize the variable parts of a trampoline.
2367 FNADDR is an RTX for the address of the function's pure code.
2368 CXT is an RTX for the static chain value for the function.
2369 On Nios II, we handle this by a library call. */
2371 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2373 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2374 rtx ctx_reg
= force_reg (Pmode
, cxt
);
2375 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
2377 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
2378 LCT_NORMAL
, VOIDmode
, 3, addr
, Pmode
, fnaddr
, Pmode
,
2382 /* Implement TARGET_FUNCTION_VALUE. */
2384 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
2385 bool outgoing ATTRIBUTE_UNUSED
)
2387 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
2390 /* Implement TARGET_LIBCALL_VALUE. */
2392 nios2_libcall_value (enum machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
2394 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
2397 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
2399 nios2_function_value_regno_p (const unsigned int regno
)
2401 return regno
== FIRST_RETVAL_REGNO
;
2404 /* Implement TARGET_RETURN_IN_MEMORY. */
2406 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2408 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
2409 || int_size_in_bytes (type
) == -1);
2412 /* TODO: It may be possible to eliminate the copyback and implement
2415 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
2416 enum machine_mode mode
, tree type
,
2417 int *pretend_size
, int second_time
)
2419 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2420 CUMULATIVE_ARGS local_cum
;
2421 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
2426 nios2_function_arg_advance (local_cum_v
, mode
, type
, 1);
2428 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
2430 if (!second_time
&& regs_to_push
> 0)
2432 rtx ptr
= virtual_incoming_args_rtx
;
2433 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
2434 emit_insn (gen_blockage ());
2435 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
2437 emit_insn (gen_blockage ());
2440 pret_size
= regs_to_push
* UNITS_PER_WORD
;
2442 *pretend_size
= pret_size
;
2447 /* Init FPU builtins. */
2449 nios2_init_fpu_builtins (int start_code
)
2452 char builtin_name
[64] = "__builtin_custom_";
2453 unsigned int i
, n
= strlen ("__builtin_custom_");
2455 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2457 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
2458 "%s", N2FPU_NAME (i
));
2460 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
2461 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
2462 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
2466 /* Helper function for expanding FPU builtins. */
2468 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
2470 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2471 enum insn_code icode
= N2FPU_ICODE (code
);
2472 int nargs
, argno
, opno
= 0;
2473 int num_operands
= N2FPU (code
).num_operands
;
2474 enum machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
2475 bool has_target_p
= (dst_mode
!= VOIDmode
);
2477 if (N2FPU_N (code
) < 0)
2478 fatal_error ("Cannot call %<__builtin_custom_%s%> without specifying switch"
2479 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
2481 create_output_operand (&ops
[opno
++], target
, dst_mode
);
2483 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
2485 nargs
= call_expr_nargs (exp
);
2486 for (argno
= 0; argno
< nargs
; argno
++)
2488 tree arg
= CALL_EXPR_ARG (exp
, argno
);
2489 create_input_operand (&ops
[opno
++], expand_normal (arg
),
2490 TYPE_MODE (TREE_TYPE (arg
)));
2492 if (!maybe_expand_insn (icode
, num_operands
, ops
))
2494 error ("invalid argument to built-in function");
2495 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2497 return has_target_p
? ops
[0].value
: const0_rtx
;
2500 /* Nios II has custom instruction built-in functions of the forms:
2503 __builtin_custom_nXX
2505 __builtin_custom_XnX
2506 __builtin_custom_XnXX
2508 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
2509 Therefore with 0-1 return values, and 0-2 arguments, we have a
2510 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
2512 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
2513 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
2516 nios2_init_custom_builtins (int start_code
)
2518 tree builtin_ftype
, ret_type
, fndecl
;
2519 char builtin_name
[32] = "__builtin_custom_";
2520 int n
= strlen ("__builtin_custom_");
2521 int builtin_code
= 0;
2522 int lhs
, rhs1
, rhs2
;
2524 struct { tree type
; const char *c
; } op
[4];
2525 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
2526 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
2527 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
2528 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
2530 /* We enumerate through the possible operand types to create all the
2531 __builtin_custom_XnXX function tree types. Note that these may slightly
2532 overlap with the function types created for other fixed builtins. */
2534 for (lhs
= 0; lhs
< 4; lhs
++)
2535 for (rhs1
= 0; rhs1
< 4; rhs1
++)
2536 for (rhs2
= 0; rhs2
< 4; rhs2
++)
2538 if (rhs1
== 0 && rhs2
!= 0)
2540 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
2542 = build_function_type_list (ret_type
, integer_type_node
,
2543 op
[rhs1
].type
, op
[rhs2
].type
,
2545 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
2546 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
2547 /* Save copy of parameter string into custom_builtin_name[]. */
2548 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
2550 add_builtin_function (builtin_name
, builtin_ftype
,
2551 start_code
+ builtin_code
,
2552 BUILT_IN_MD
, NULL
, NULL_TREE
);
2553 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
2558 /* Helper function for expanding custom builtins. */
2560 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
2562 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
2563 enum machine_mode tmode
= VOIDmode
;
2565 rtx value
, insn
, unspec_args
[3];
2571 tmode
= TYPE_MODE (TREE_TYPE (exp
));
2572 if (!target
|| GET_MODE (target
) != tmode
2574 target
= gen_reg_rtx (tmode
);
2577 nargs
= call_expr_nargs (exp
);
2578 for (argno
= 0; argno
< nargs
; argno
++)
2580 arg
= CALL_EXPR_ARG (exp
, argno
);
2581 value
= expand_normal (arg
);
2582 unspec_args
[argno
] = value
;
2585 if (!custom_insn_opcode (value
, VOIDmode
))
2586 error ("custom instruction opcode must be compile time "
2587 "constant in the range 0-255 for __builtin_custom_%s",
2588 custom_builtin_name
[index
]);
2591 /* For other arguments, force into a register. */
2592 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
2593 unspec_args
[argno
]);
2595 /* Fill remaining unspec operands with zero. */
2596 for (; argno
< 3; argno
++)
2597 unspec_args
[argno
] = const0_rtx
;
2599 insn
= (has_target_p
2600 ? gen_rtx_SET (VOIDmode
, target
,
2601 gen_rtx_UNSPEC_VOLATILE (tmode
,
2602 gen_rtvec_v (3, unspec_args
),
2603 UNSPECV_CUSTOM_XNXX
))
2604 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
2605 UNSPECV_CUSTOM_NXX
));
2607 return has_target_p
? target
: const0_rtx
;
2613 /* Main definition of built-in functions. Nios II has a small number of fixed
2614 builtins, plus a large number of FPU insn builtins, and builtins for
2615 generating custom instructions. */
2617 struct nios2_builtin_desc
2619 enum insn_code icode
;
2620 enum nios2_ftcode ftype
;
2624 #define N2_BUILTINS \
2625 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
2626 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
2627 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
2628 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
2629 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
2630 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
2631 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
2632 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
2633 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
2634 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
2635 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
2637 enum nios2_builtin_code
{
2638 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
2640 #undef N2_BUILTIN_DEF
2641 NUM_FIXED_NIOS2_BUILTINS
2644 static const struct nios2_builtin_desc nios2_builtins
[] = {
2645 #define N2_BUILTIN_DEF(name, ftype) \
2646 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
2648 #undef N2_BUILTIN_DEF
2651 /* Start/ends of FPU/custom insn builtin index ranges. */
2652 static unsigned int nios2_fpu_builtin_base
;
2653 static unsigned int nios2_custom_builtin_base
;
2654 static unsigned int nios2_custom_builtin_end
;
2656 /* Implement TARGET_INIT_BUILTINS. */
2658 nios2_init_builtins (void)
2662 /* Initialize fixed builtins. */
2663 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
2665 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
2667 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
2668 BUILT_IN_MD
, NULL
, NULL
);
2669 nios2_register_builtin_fndecl (i
, fndecl
);
2672 /* Initialize FPU builtins. */
2673 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
2674 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
2676 /* Initialize custom insn builtins. */
2677 nios2_custom_builtin_base
2678 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
2679 nios2_custom_builtin_end
2680 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
2681 nios2_init_custom_builtins (nios2_custom_builtin_base
);
2684 /* Array of fndecls for TARGET_BUILTIN_DECL. */
2685 #define NIOS2_NUM_BUILTINS \
2686 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
2687 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
2690 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
2692 nios2_builtin_decls
[code
] = fndecl
;
2695 /* Implement TARGET_BUILTIN_DECL. */
2697 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
2699 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
2701 if (code
>= nios2_custom_builtin_end
)
2702 return error_mark_node
;
2704 if (code
>= nios2_fpu_builtin_base
2705 && code
< nios2_custom_builtin_base
2706 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
2707 return error_mark_node
;
2709 return nios2_builtin_decls
[code
];
2713 /* Low-level built-in expand routine. */
2715 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
2716 struct expand_operand
*ops
, bool has_target_p
)
2718 if (maybe_expand_insn (d
->icode
, n
, ops
))
2719 return has_target_p
? ops
[0].value
: const0_rtx
;
2722 error ("invalid argument to built-in function %s", d
->name
);
2723 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
2727 /* Expand ldio/stio form load-store instruction builtins. */
2729 nios2_expand_ldstio_builtin (tree exp
, rtx target
,
2730 const struct nios2_builtin_desc
*d
)
2734 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2735 enum machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
2737 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2738 mem
= gen_rtx_MEM (mode
, addr
);
2740 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
2743 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2744 if (CONST_INT_P (val
))
2745 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
2746 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
2747 create_output_operand (&ops
[0], mem
, mode
);
2748 create_input_operand (&ops
[1], val
, mode
);
2749 has_target_p
= false;
2754 create_output_operand (&ops
[0], target
, mode
);
2755 create_input_operand (&ops
[1], mem
, mode
);
2756 has_target_p
= true;
2758 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2761 /* Expand rdctl/wrctl builtins. */
2763 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
2764 const struct nios2_builtin_desc
*d
)
2766 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
2767 == register_operand
);
2768 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2769 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
2770 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
2772 error ("Control register number must be in range 0-31 for %s",
2774 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
2778 create_output_operand (&ops
[0], target
, SImode
);
2779 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
2783 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2784 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
2785 create_input_operand (&ops
[1], val
, SImode
);
2787 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
2790 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
2791 a built-in function, with result going to TARGET if that's convenient
2792 (and in mode MODE if that's convenient).
2793 SUBTARGET may be used as the target for computing one of EXP's operands.
2794 IGNORE is nonzero if the value is to be ignored. */
2797 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
2798 enum machine_mode mode ATTRIBUTE_UNUSED
,
2799 int ignore ATTRIBUTE_UNUSED
)
2801 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2802 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2804 if (fcode
< nios2_fpu_builtin_base
)
2806 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
2810 case NIOS2_BUILTIN_sync
:
2811 emit_insn (gen_sync ());
2814 case NIOS2_BUILTIN_ldbio
:
2815 case NIOS2_BUILTIN_ldbuio
:
2816 case NIOS2_BUILTIN_ldhio
:
2817 case NIOS2_BUILTIN_ldhuio
:
2818 case NIOS2_BUILTIN_ldwio
:
2819 case NIOS2_BUILTIN_stbio
:
2820 case NIOS2_BUILTIN_sthio
:
2821 case NIOS2_BUILTIN_stwio
:
2822 return nios2_expand_ldstio_builtin (exp
, target
, d
);
2824 case NIOS2_BUILTIN_rdctl
:
2825 case NIOS2_BUILTIN_wrctl
:
2826 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
2832 else if (fcode
< nios2_custom_builtin_base
)
2833 /* FPU builtin range. */
2834 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
2836 else if (fcode
< nios2_custom_builtin_end
)
2837 /* Custom insn builtin range. */
2838 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
2844 /* Implement TARGET_INIT_LIBFUNCS. */
2846 nios2_init_libfuncs (void)
2848 /* For Linux, we have access to kernel support for atomic operations. */
2849 if (TARGET_LINUX_ABI
)
2850 init_sync_libfuncs (UNITS_PER_WORD
);
2855 /* Register a custom code use, and signal error if a conflict was found. */
2857 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
2860 gcc_assert (N
<= 255);
2862 if (status
== CCS_FPU
)
2864 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
2866 custom_code_conflict
= true;
2867 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
2868 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
2870 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
2872 custom_code_conflict
= true;
2873 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2874 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
2875 N2FPU_NAME (index
));
2878 else if (status
== CCS_BUILTIN_CALL
)
2880 if (custom_code_status
[N
] == CCS_FPU
)
2882 custom_code_conflict
= true;
2883 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2884 "%<-mcustom-%s%>", custom_builtin_name
[index
],
2885 N2FPU_NAME (custom_code_index
[N
]));
2889 /* Note that code conflicts between different __builtin_custom_xnxx
2890 calls are not checked. */
2896 custom_code_status
[N
] = status
;
2897 custom_code_index
[N
] = index
;
2900 /* Mark a custom code as not in use. */
2902 nios2_deregister_custom_code (unsigned int N
)
2906 custom_code_status
[N
] = CCS_UNUSED
;
2907 custom_code_index
[N
] = 0;
2911 /* Target attributes can affect per-function option state, so we need to
2912 save/restore the custom code tracking info using the
2913 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
2916 nios2_option_save (struct cl_target_option
*ptr
,
2917 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
2920 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2921 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
2922 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
2923 sizeof (custom_code_status
));
2924 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
2925 sizeof (custom_code_index
));
2929 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
2930 struct cl_target_option
*ptr
)
2933 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
2934 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
2935 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
2936 sizeof (custom_code_status
));
2937 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
2938 sizeof (custom_code_index
));
2941 /* Inner function to process the attribute((target(...))), take an argument and
2942 set the current options from the argument. If we have a list, recursively
2943 go over the list. */
2946 nios2_valid_target_attribute_rec (tree args
)
2948 if (TREE_CODE (args
) == TREE_LIST
)
2951 for (; args
; args
= TREE_CHAIN (args
))
2952 if (TREE_VALUE (args
)
2953 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
2957 else if (TREE_CODE (args
) == STRING_CST
)
2959 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
2960 while (argstr
&& *argstr
!= '\0')
2962 bool no_opt
= false, end_p
= false;
2963 char *eq
= NULL
, *p
;
2964 while (ISSPACE (*argstr
))
2967 while (*p
!= '\0' && *p
!= ',')
2969 if (!eq
&& *p
== '=')
2979 if (!strncmp (argstr
, "no-", 3))
2984 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
2989 error ("custom-fpu-cfg option does not support %<no-%>");
2994 error ("custom-fpu-cfg option requires configuration"
2998 /* Increment and skip whitespace. */
2999 while (ISSPACE (*(++eq
))) ;
3000 /* Decrement and skip to before any trailing whitespace. */
3001 while (ISSPACE (*(--end_eq
))) ;
3003 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
3005 else if (!strncmp (argstr
, "custom-", 7))
3009 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3010 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
3011 strlen (N2FPU_NAME (i
))))
3023 error ("%<no-custom-%s%> does not accept arguments",
3027 /* Disable option by setting to -1. */
3028 nios2_deregister_custom_code (N2FPU_N (code
));
3029 N2FPU_N (code
) = -1;
3035 while (ISSPACE (*(++eq
))) ;
3038 error ("%<custom-%s=%> requires argument",
3042 for (t
= eq
; t
!= p
; ++t
)
3048 error ("`custom-%s=' argument requires "
3049 "numeric digits", N2FPU_NAME (code
));
3053 /* Set option to argument. */
3054 N2FPU_N (code
) = atoi (eq
);
3055 nios2_handle_custom_fpu_insn_option (code
);
3060 error ("%<custom-%s=%> is not recognised as FPU instruction",
3067 error ("%<%s%> is unknown", argstr
);
3082 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3085 nios2_valid_target_attribute_tree (tree args
)
3087 if (!nios2_valid_target_attribute_rec (args
))
3089 nios2_custom_check_insns ();
3090 return build_target_option_node (&global_options
);
3093 /* Hook to validate attribute((target("string"))). */
3096 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
3097 tree args
, int ARG_UNUSED (flags
))
3099 struct cl_target_option cur_target
;
3101 tree old_optimize
= build_optimization_node (&global_options
);
3102 tree new_target
, new_optimize
;
3103 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
3105 /* If the function changed the optimization levels as well as setting target
3106 options, start with the optimizations specified. */
3107 if (func_optimize
&& func_optimize
!= old_optimize
)
3108 cl_optimization_restore (&global_options
,
3109 TREE_OPTIMIZATION (func_optimize
));
3111 /* The target attributes may also change some optimization flags, so update
3112 the optimization options if necessary. */
3113 cl_target_option_save (&cur_target
, &global_options
);
3114 new_target
= nios2_valid_target_attribute_tree (args
);
3115 new_optimize
= build_optimization_node (&global_options
);
3122 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
3124 if (old_optimize
!= new_optimize
)
3125 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
3128 cl_target_option_restore (&global_options
, &cur_target
);
3130 if (old_optimize
!= new_optimize
)
3131 cl_optimization_restore (&global_options
,
3132 TREE_OPTIMIZATION (old_optimize
));
3136 /* Remember the last target of nios2_set_current_function. */
3137 static GTY(()) tree nios2_previous_fndecl
;
3139 /* Establish appropriate back-end context for processing the function
3140 FNDECL. The argument might be NULL to indicate processing at top
3141 level, outside of any function scope. */
3143 nios2_set_current_function (tree fndecl
)
3145 tree old_tree
= (nios2_previous_fndecl
3146 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
3149 tree new_tree
= (fndecl
3150 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
3153 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
3155 nios2_previous_fndecl
= fndecl
;
3156 if (old_tree
== new_tree
)
3161 cl_target_option_restore (&global_options
,
3162 TREE_TARGET_OPTION (new_tree
));
3168 struct cl_target_option
*def
3169 = TREE_TARGET_OPTION (target_option_current_node
);
3171 cl_target_option_restore (&global_options
, def
);
3177 /* Hook to validate the current #pragma GCC target and set the FPU custom
3178 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3181 nios2_pragma_target_parse (tree args
, tree pop_target
)
3186 cur_tree
= ((pop_target
)
3188 : target_option_default_node
);
3189 cl_target_option_restore (&global_options
,
3190 TREE_TARGET_OPTION (cur_tree
));
3194 cur_tree
= nios2_valid_target_attribute_tree (args
);
3199 target_option_current_node
= cur_tree
;
3203 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3204 We are just using this hook to add some additional error checking to
3205 the default behavior. GCC does not provide a target hook for merging
3206 the target options, and only correctly handles merging empty vs non-empty
3207 option data; see merge_decls() in c-decl.c.
3208 So here we require either that at least one of the decls has empty
3209 target options, or that the target options/data be identical. */
3211 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
3213 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
3214 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
3215 if (newopts
&& oldopts
&& newopts
!= oldopts
)
3217 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
3218 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
3219 if (oldtree
&& newtree
&& oldtree
!= newtree
)
3221 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
3222 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
3223 if (olddata
!= newdata
3224 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
3225 error ("%qE redeclared with conflicting %qs attributes",
3226 DECL_NAME (newdecl
), "target");
3229 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
3230 DECL_ATTRIBUTES (newdecl
));
3234 /* Initialize the GCC target structure. */
3235 #undef TARGET_ASM_FUNCTION_PROLOGUE
3236 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
3238 #undef TARGET_IN_SMALL_DATA_P
3239 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
3241 #undef TARGET_SECTION_TYPE_FLAGS
3242 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
3244 #undef TARGET_INIT_BUILTINS
3245 #define TARGET_INIT_BUILTINS nios2_init_builtins
3246 #undef TARGET_EXPAND_BUILTIN
3247 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
3248 #undef TARGET_BUILTIN_DECL
3249 #define TARGET_BUILTIN_DECL nios2_builtin_decl
3251 #undef TARGET_INIT_LIBFUNCS
3252 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
3254 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3255 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
3257 #undef TARGET_CAN_ELIMINATE
3258 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
3260 #undef TARGET_FUNCTION_ARG
3261 #define TARGET_FUNCTION_ARG nios2_function_arg
3263 #undef TARGET_FUNCTION_ARG_ADVANCE
3264 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
3266 #undef TARGET_ARG_PARTIAL_BYTES
3267 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
3269 #undef TARGET_TRAMPOLINE_INIT
3270 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
3272 #undef TARGET_FUNCTION_VALUE
3273 #define TARGET_FUNCTION_VALUE nios2_function_value
3275 #undef TARGET_LIBCALL_VALUE
3276 #define TARGET_LIBCALL_VALUE nios2_libcall_value
3278 #undef TARGET_FUNCTION_VALUE_REGNO_P
3279 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
3281 #undef TARGET_RETURN_IN_MEMORY
3282 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
3284 #undef TARGET_PROMOTE_PROTOTYPES
3285 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3287 #undef TARGET_SETUP_INCOMING_VARARGS
3288 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
3290 #undef TARGET_MUST_PASS_IN_STACK
3291 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
3293 #undef TARGET_LEGITIMATE_CONSTANT_P
3294 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
3296 #undef TARGET_LEGITIMIZE_ADDRESS
3297 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
3299 #undef TARGET_DELEGITIMIZE_ADDRESS
3300 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
3302 #undef TARGET_LEGITIMATE_ADDRESS_P
3303 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
3305 #undef TARGET_PREFERRED_RELOAD_CLASS
3306 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
3308 #undef TARGET_RTX_COSTS
3309 #define TARGET_RTX_COSTS nios2_rtx_costs
3311 #undef TARGET_HAVE_TLS
3312 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
3314 #undef TARGET_CANNOT_FORCE_CONST_MEM
3315 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
3317 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
3318 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
3320 #undef TARGET_PRINT_OPERAND
3321 #define TARGET_PRINT_OPERAND nios2_print_operand
3323 #undef TARGET_PRINT_OPERAND_ADDRESS
3324 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
3326 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3327 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
3329 #undef TARGET_OPTION_OVERRIDE
3330 #define TARGET_OPTION_OVERRIDE nios2_option_override
3332 #undef TARGET_OPTION_SAVE
3333 #define TARGET_OPTION_SAVE nios2_option_save
3335 #undef TARGET_OPTION_RESTORE
3336 #define TARGET_OPTION_RESTORE nios2_option_restore
3338 #undef TARGET_SET_CURRENT_FUNCTION
3339 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
3341 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
3342 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
3344 #undef TARGET_OPTION_PRAGMA_PARSE
3345 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
3347 #undef TARGET_MERGE_DECL_ATTRIBUTES
3348 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
3350 struct gcc_target targetm
= TARGET_INITIALIZER
;
3352 #include "gt-nios2.h"