1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2019 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #define IN_TARGET_CODE 1
27 #include "coretypes.h"
32 #include "stringpool.h"
41 #include "diagnostic-core.h"
43 #include "insn-attr.h"
50 #include "langhooks.h"
51 #include "stor-layout.h"
53 #include "tree-pass.h"
56 /* This file should be included last. */
57 #include "target-def.h"
59 /* Forward function declarations. */
60 static bool nios2_symbolic_constant_p (rtx
);
61 static bool prologue_saved_reg_p (unsigned);
62 static void nios2_load_pic_register (void);
63 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code
, int);
64 static const char *nios2_unspec_reloc_name (int);
65 static void nios2_register_builtin_fndecl (unsigned, tree
);
66 static rtx
nios2_ldst_parallel (bool, bool, bool, rtx
, int,
67 unsigned HOST_WIDE_INT
, bool);
68 static int nios2_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
70 /* Threshold for data being put into the small data/bss area, instead
71 of the normal data area (references to the small data/bss area take
72 1 instruction, and use the global pointer, references to the normal
73 data area takes 2 instructions). */
74 unsigned HOST_WIDE_INT nios2_section_threshold
= NIOS2_DEFAULT_GVALUE
;
76 struct GTY (()) machine_function
78 /* Current frame information, to be filled in by nios2_compute_frame_layout
79 with register save masks, and offsets for the current function. */
81 /* Mask of registers to save. */
82 unsigned int save_mask
;
83 /* Number of bytes that the entire frame takes up. */
85 /* Number of bytes that variables take up. */
87 /* Number of bytes that outgoing arguments take up. */
89 /* Number of bytes needed to store registers in frame. */
91 /* Number of bytes used to store callee-saved registers. */
92 int callee_save_reg_size
;
93 /* Offset from new stack pointer to store registers. */
95 /* Offset from save_regs_offset to store frame pointer register. */
97 /* != 0 if function has a variable argument list. */
98 int uses_anonymous_args
;
99 /* != 0 if frame layout already calculated. */
103 /* State to track the assignment of custom codes to FPU/custom builtins. */
104 static enum nios2_ccs_code custom_code_status
[256];
105 static int custom_code_index
[256];
106 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
107 static bool custom_code_conflict
= false;
109 /* State for command-line options. */
110 regex_t nios2_gprel_sec_regex
;
111 regex_t nios2_r0rel_sec_regex
;
114 /* Definition of builtin function types for nios2. */
118 N2_FTYPE(1, (VOID)) \
119 N2_FTYPE(2, (DF, DF)) \
120 N2_FTYPE(3, (DF, DF, DF)) \
121 N2_FTYPE(2, (DF, SF)) \
122 N2_FTYPE(2, (DF, SI)) \
123 N2_FTYPE(2, (DF, UI)) \
124 N2_FTYPE(2, (SF, DF)) \
125 N2_FTYPE(2, (SF, SF)) \
126 N2_FTYPE(3, (SF, SF, SF)) \
127 N2_FTYPE(2, (SF, SI)) \
128 N2_FTYPE(2, (SF, UI)) \
129 N2_FTYPE(2, (SI, CVPTR)) \
130 N2_FTYPE(2, (SI, DF)) \
131 N2_FTYPE(3, (SI, DF, DF)) \
132 N2_FTYPE(2, (SI, SF)) \
133 N2_FTYPE(3, (SI, SF, SF)) \
134 N2_FTYPE(2, (SI, SI)) \
135 N2_FTYPE(3, (SI, SI, SI)) \
136 N2_FTYPE(3, (SI, VPTR, SI)) \
137 N2_FTYPE(2, (UI, CVPTR)) \
138 N2_FTYPE(2, (UI, DF)) \
139 N2_FTYPE(2, (UI, SF)) \
140 N2_FTYPE(2, (VOID, DF)) \
141 N2_FTYPE(2, (VOID, SF)) \
142 N2_FTYPE(2, (VOID, SI)) \
143 N2_FTYPE(3, (VOID, SI, SI)) \
144 N2_FTYPE(2, (VOID, VPTR)) \
145 N2_FTYPE(3, (VOID, VPTR, SI))
147 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
148 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
149 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
151 /* Expand ftcode enumeration. */
153 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
159 /* Return the tree function type, based on the ftcode. */
161 nios2_ftype (enum nios2_ftcode ftcode
)
163 static tree types
[(int) N2_FTYPE_MAX
];
165 tree N2_TYPE_SF
= float_type_node
;
166 tree N2_TYPE_DF
= double_type_node
;
167 tree N2_TYPE_SI
= integer_type_node
;
168 tree N2_TYPE_UI
= unsigned_type_node
;
169 tree N2_TYPE_VOID
= void_type_node
;
171 static const_tree N2_TYPE_CVPTR
, N2_TYPE_VPTR
;
174 /* const volatile void *. */
176 = build_pointer_type (build_qualified_type (void_type_node
,
178 | TYPE_QUAL_VOLATILE
)));
179 /* volatile void *. */
181 = build_pointer_type (build_qualified_type (void_type_node
,
182 TYPE_QUAL_VOLATILE
));
184 if (types
[(int) ftcode
] == NULL_TREE
)
187 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
188 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
189 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
190 #define N2_FTYPE(N,ARGS) \
191 case N2_FTYPE_OP ## N ARGS: \
192 types[(int) ftcode] \
193 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
197 default: gcc_unreachable ();
199 return types
[(int) ftcode
];
203 /* Definition of FPU instruction descriptions. */
205 struct nios2_fpu_insn_info
208 int num_operands
, *optvar
;
211 #define N2F_DFREQ 0x2
212 #define N2F_UNSAFE 0x4
213 #define N2F_FINITE 0x8
214 #define N2F_NO_ERRNO 0x10
216 enum insn_code icode
;
217 enum nios2_ftcode ftcode
;
220 /* Base macro for defining FPU instructions. */
221 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
222 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
223 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
224 N2_FTYPE_OP ## nop args }
226 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
227 #define N2FPU_OP2(mode) (mode, mode)
228 #define N2FPU_OP3(mode) (mode, mode, mode)
229 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
230 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
231 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
232 #define N2FPU_INSN_SF(code, nop, flags) \
233 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
234 #define N2FPU_INSN_DF(code, nop, flags) \
235 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
237 /* Compare instructions, 3 operand FP operation with a SI result. */
238 #define N2FPU_CMP_DEF(code, flags, m, M) \
239 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
240 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
241 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
242 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
244 /* The order of definition needs to be maintained consistent with
245 enum n2fpu_code in nios2-opts.h. */
246 struct nios2_fpu_insn_info nios2_fpu_insn
[] =
248 /* Single precision instructions. */
249 N2FPU_INSN_SF (add
, 3, 0),
250 N2FPU_INSN_SF (sub
, 3, 0),
251 N2FPU_INSN_SF (mul
, 3, 0),
252 N2FPU_INSN_SF (div
, 3, 0),
253 /* Due to textual difference between min/max and smin/smax. */
254 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, s
, S
),
255 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, s
, S
),
256 N2FPU_INSN_SF (neg
, 2, 0),
257 N2FPU_INSN_SF (abs
, 2, 0),
258 N2FPU_INSN_SF (sqrt
, 2, 0),
259 N2FPU_INSN_SF (sin
, 2, N2F_UNSAFE
),
260 N2FPU_INSN_SF (cos
, 2, N2F_UNSAFE
),
261 N2FPU_INSN_SF (tan
, 2, N2F_UNSAFE
),
262 N2FPU_INSN_SF (atan
, 2, N2F_UNSAFE
),
263 N2FPU_INSN_SF (exp
, 2, N2F_UNSAFE
),
264 N2FPU_INSN_SF (log
, 2, N2F_UNSAFE
),
265 /* Single precision compares. */
266 N2FPU_CMP_SF (eq
), N2FPU_CMP_SF (ne
),
267 N2FPU_CMP_SF (lt
), N2FPU_CMP_SF (le
),
268 N2FPU_CMP_SF (gt
), N2FPU_CMP_SF (ge
),
270 /* Double precision instructions. */
271 N2FPU_INSN_DF (add
, 3, 0),
272 N2FPU_INSN_DF (sub
, 3, 0),
273 N2FPU_INSN_DF (mul
, 3, 0),
274 N2FPU_INSN_DF (div
, 3, 0),
275 /* Due to textual difference between min/max and smin/smax. */
276 N2FPU_INSN_DEF (min
, smin
, 3, N2F_FINITE
, d
, D
),
277 N2FPU_INSN_DEF (max
, smax
, 3, N2F_FINITE
, d
, D
),
278 N2FPU_INSN_DF (neg
, 2, 0),
279 N2FPU_INSN_DF (abs
, 2, 0),
280 N2FPU_INSN_DF (sqrt
, 2, 0),
281 N2FPU_INSN_DF (sin
, 2, N2F_UNSAFE
),
282 N2FPU_INSN_DF (cos
, 2, N2F_UNSAFE
),
283 N2FPU_INSN_DF (tan
, 2, N2F_UNSAFE
),
284 N2FPU_INSN_DF (atan
, 2, N2F_UNSAFE
),
285 N2FPU_INSN_DF (exp
, 2, N2F_UNSAFE
),
286 N2FPU_INSN_DF (log
, 2, N2F_UNSAFE
),
287 /* Double precision compares. */
288 N2FPU_CMP_DF (eq
), N2FPU_CMP_DF (ne
),
289 N2FPU_CMP_DF (lt
), N2FPU_CMP_DF (le
),
290 N2FPU_CMP_DF (gt
), N2FPU_CMP_DF (ge
),
292 /* Conversion instructions. */
293 N2FPU_INSN_DEF_BASE (floatis
, 2, 0, floatsisf2
, (SF
, SI
)),
294 N2FPU_INSN_DEF_BASE (floatus
, 2, 0, floatunssisf2
, (SF
, UI
)),
295 N2FPU_INSN_DEF_BASE (floatid
, 2, 0, floatsidf2
, (DF
, SI
)),
296 N2FPU_INSN_DEF_BASE (floatud
, 2, 0, floatunssidf2
, (DF
, UI
)),
297 N2FPU_INSN_DEF_BASE (round
, 2, N2F_NO_ERRNO
, lroundsfsi2
, (SI
, SF
)),
298 N2FPU_INSN_DEF_BASE (fixsi
, 2, 0, fix_truncsfsi2
, (SI
, SF
)),
299 N2FPU_INSN_DEF_BASE (fixsu
, 2, 0, fixuns_truncsfsi2
, (UI
, SF
)),
300 N2FPU_INSN_DEF_BASE (fixdi
, 2, 0, fix_truncdfsi2
, (SI
, DF
)),
301 N2FPU_INSN_DEF_BASE (fixdu
, 2, 0, fixuns_truncdfsi2
, (UI
, DF
)),
302 N2FPU_INSN_DEF_BASE (fextsd
, 2, 0, extendsfdf2
, (DF
, SF
)),
303 N2FPU_INSN_DEF_BASE (ftruncds
, 2, 0, truncdfsf2
, (SF
, DF
)),
305 /* X, Y access instructions. */
306 N2FPU_INSN_DEF_BASE (fwrx
, 2, N2F_DFREQ
, nios2_fwrx
, (VOID
, DF
)),
307 N2FPU_INSN_DEF_BASE (fwry
, 2, N2F_DFREQ
, nios2_fwry
, (VOID
, SF
)),
308 N2FPU_INSN_DEF_BASE (frdxlo
, 1, N2F_DFREQ
, nios2_frdxlo
, (SF
)),
309 N2FPU_INSN_DEF_BASE (frdxhi
, 1, N2F_DFREQ
, nios2_frdxhi
, (SF
)),
310 N2FPU_INSN_DEF_BASE (frdy
, 1, N2F_DFREQ
, nios2_frdy
, (SF
))
313 /* Some macros for ease of access. */
314 #define N2FPU(code) nios2_fpu_insn[(int) code]
315 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
316 #define N2FPU_N(code) (*N2FPU(code).optvar)
317 #define N2FPU_NAME(code) (N2FPU(code).name)
318 #define N2FPU_ICODE(code) (N2FPU(code).icode)
319 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
320 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
321 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
322 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
323 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
324 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
326 /* Same as above, but for cases where using only the op part is shorter. */
327 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
328 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
329 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
331 /* Export the FPU insn enabled predicate to nios2.md. */
333 nios2_fpu_insn_enabled (enum n2fpu_code code
)
335 return N2FPU_ENABLED_P (code
);
338 /* Return true if COND comparison for mode MODE is enabled under current
342 nios2_fpu_compare_enabled (enum rtx_code cond
, machine_mode mode
)
347 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqs
);
348 case NE
: return N2FPU_OP_ENABLED_P (fcmpnes
);
349 case GT
: return N2FPU_OP_ENABLED_P (fcmpgts
);
350 case GE
: return N2FPU_OP_ENABLED_P (fcmpges
);
351 case LT
: return N2FPU_OP_ENABLED_P (fcmplts
);
352 case LE
: return N2FPU_OP_ENABLED_P (fcmples
);
355 else if (mode
== DFmode
)
358 case EQ
: return N2FPU_OP_ENABLED_P (fcmpeqd
);
359 case NE
: return N2FPU_OP_ENABLED_P (fcmpned
);
360 case GT
: return N2FPU_OP_ENABLED_P (fcmpgtd
);
361 case GE
: return N2FPU_OP_ENABLED_P (fcmpged
);
362 case LT
: return N2FPU_OP_ENABLED_P (fcmpltd
);
363 case LE
: return N2FPU_OP_ENABLED_P (fcmpled
);
369 /* Stack layout and calling conventions. */
371 #define NIOS2_STACK_ALIGN(LOC) \
372 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
373 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
375 /* Return the bytes needed to compute the frame pointer from the current
378 nios2_compute_frame_layout (void)
381 unsigned int save_mask
= 0;
386 int callee_save_reg_size
;
388 if (cfun
->machine
->initialized
)
389 return cfun
->machine
->total_size
;
391 /* Calculate space needed for gp registers. */
393 for (regno
= 0; regno
<= LAST_GP_REG
; regno
++)
394 if (prologue_saved_reg_p (regno
))
396 save_mask
|= 1 << regno
;
400 /* If we are saving any callee-save register, then assume
401 push.n/pop.n should be used. Make sure RA is saved, and
402 contiguous registers starting from r16-- are all saved. */
403 if (TARGET_HAS_CDX
&& save_reg_size
!= 0)
405 if ((save_mask
& (1 << RA_REGNO
)) == 0)
407 save_mask
|= 1 << RA_REGNO
;
411 for (regno
= 23; regno
>= 16; regno
--)
412 if ((save_mask
& (1 << regno
)) != 0)
414 /* Starting from highest numbered callee-saved
415 register that is used, make sure all regs down
416 to r16 is saved, to maintain contiguous range
419 for (i
= regno
- 1; i
>= 16; i
--)
420 if ((save_mask
& (1 << i
)) == 0)
429 callee_save_reg_size
= save_reg_size
;
431 /* If we call eh_return, we need to save the EH data registers. */
432 if (crtl
->calls_eh_return
)
437 for (i
= 0; (r
= EH_RETURN_DATA_REGNO (i
)) != INVALID_REGNUM
; i
++)
438 if (!(save_mask
& (1 << r
)))
445 cfun
->machine
->fp_save_offset
= 0;
446 if (save_mask
& (1 << HARD_FRAME_POINTER_REGNUM
))
448 int fp_save_offset
= 0;
449 for (regno
= 0; regno
< HARD_FRAME_POINTER_REGNUM
; regno
++)
450 if (save_mask
& (1 << regno
))
453 cfun
->machine
->fp_save_offset
= fp_save_offset
;
456 var_size
= NIOS2_STACK_ALIGN (get_frame_size ());
457 out_args_size
= NIOS2_STACK_ALIGN (crtl
->outgoing_args_size
);
458 total_size
= var_size
+ out_args_size
;
460 save_reg_size
= NIOS2_STACK_ALIGN (save_reg_size
);
461 total_size
+= save_reg_size
;
462 total_size
+= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
464 /* Save other computed information. */
465 cfun
->machine
->save_mask
= save_mask
;
466 cfun
->machine
->total_size
= total_size
;
467 cfun
->machine
->var_size
= var_size
;
468 cfun
->machine
->args_size
= out_args_size
;
469 cfun
->machine
->save_reg_size
= save_reg_size
;
470 cfun
->machine
->callee_save_reg_size
= callee_save_reg_size
;
471 cfun
->machine
->initialized
= reload_completed
;
472 cfun
->machine
->save_regs_offset
= out_args_size
+ var_size
;
477 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
478 prologue/epilogue expand routines. */
480 save_reg (int regno
, unsigned offset
)
482 rtx reg
= gen_rtx_REG (SImode
, regno
);
483 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
484 rtx_insn
*insn
= emit_move_insn (gen_frame_mem (Pmode
, addr
), reg
);
485 RTX_FRAME_RELATED_P (insn
) = 1;
489 restore_reg (int regno
, unsigned offset
)
491 rtx reg
= gen_rtx_REG (SImode
, regno
);
492 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
, false);
493 rtx_insn
*insn
= emit_move_insn (reg
, gen_frame_mem (Pmode
, addr
));
494 /* Tag epilogue unwind note. */
495 add_reg_note (insn
, REG_CFA_RESTORE
, reg
);
496 RTX_FRAME_RELATED_P (insn
) = 1;
499 /* This routine tests for the base register update SET in load/store
500 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
502 base_reg_adjustment_p (rtx set
, rtx
*base_reg
, rtx
*offset
)
504 if (GET_CODE (set
) == SET
505 && REG_P (SET_DEST (set
))
506 && GET_CODE (SET_SRC (set
)) == PLUS
507 && REG_P (XEXP (SET_SRC (set
), 0))
508 && rtx_equal_p (SET_DEST (set
), XEXP (SET_SRC (set
), 0))
509 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
511 *base_reg
= XEXP (SET_SRC (set
), 0);
512 *offset
= XEXP (SET_SRC (set
), 1);
518 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
520 nios2_create_cfa_notes (rtx_insn
*insn
, bool epilogue_p
)
523 rtx base_reg
, offset
, elt
, pat
= PATTERN (insn
);
526 elt
= XVECEXP (pat
, 0, 0);
527 if (GET_CODE (elt
) == RETURN
)
529 elt
= XVECEXP (pat
, 0, i
);
530 if (base_reg_adjustment_p (elt
, &base_reg
, &offset
))
532 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, copy_rtx (elt
));
535 for (; i
< XVECLEN (pat
, 0); i
++)
537 elt
= SET_DEST (XVECEXP (pat
, 0, i
));
538 gcc_assert (REG_P (elt
));
539 add_reg_note (insn
, REG_CFA_RESTORE
, elt
);
544 /* Tag each of the prologue sets. */
545 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
546 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
550 /* Temp regno used inside prologue/epilogue. */
551 #define TEMP_REG_NUM 8
553 /* Emit conditional trap for checking stack limit. SIZE is the number of
554 additional bytes required.
556 GDB prologue analysis depends on this generating a direct comparison
557 to the SP register, so the adjustment to add SIZE needs to be done on
558 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
561 nios2_emit_stack_limit_check (int size
)
565 if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
567 /* This generates a %hiadj/%lo pair with the constant size
568 add handled by the relocations. */
569 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
570 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
572 else if (!REG_P (stack_limit_rtx
))
573 sorry ("Unknown form for stack limit expression");
575 sum
= stack_limit_rtx
;
576 else if (SMALL_INT (size
))
578 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
579 emit_move_insn (sum
, plus_constant (Pmode
, stack_limit_rtx
, size
));
583 sum
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
584 emit_move_insn (sum
, gen_int_mode (size
, Pmode
));
585 emit_insn (gen_add2_insn (sum
, stack_limit_rtx
));
588 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode
, stack_pointer_rtx
, sum
),
589 stack_pointer_rtx
, sum
, GEN_INT (3)));
593 nios2_emit_add_constant (rtx reg
, HOST_WIDE_INT immed
)
596 if (SMALL_INT (immed
))
597 insn
= emit_insn (gen_add2_insn (reg
, gen_int_mode (immed
, Pmode
)));
600 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
601 emit_move_insn (tmp
, gen_int_mode (immed
, Pmode
));
602 insn
= emit_insn (gen_add2_insn (reg
, tmp
));
608 nios2_adjust_stack (int sp_adjust
, bool epilogue_p
)
610 enum reg_note note_kind
= REG_NOTE_MAX
;
611 rtx_insn
*insn
= NULL
;
614 if (SMALL_INT (sp_adjust
))
615 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
,
616 gen_int_mode (sp_adjust
, Pmode
)));
619 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
620 emit_move_insn (tmp
, gen_int_mode (sp_adjust
, Pmode
));
621 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, tmp
));
622 /* Attach a note indicating what happened. */
624 note_kind
= REG_FRAME_RELATED_EXPR
;
627 note_kind
= REG_CFA_ADJUST_CFA
;
628 if (note_kind
!= REG_NOTE_MAX
)
630 rtx cfa_adj
= gen_rtx_SET (stack_pointer_rtx
,
631 plus_constant (Pmode
, stack_pointer_rtx
,
633 add_reg_note (insn
, note_kind
, cfa_adj
);
635 RTX_FRAME_RELATED_P (insn
) = 1;
641 nios2_expand_prologue (void)
644 int total_frame_size
, save_offset
;
645 int sp_offset
; /* offset from base_reg to final stack value. */
646 int save_regs_base
; /* offset from base_reg to register save area. */
649 total_frame_size
= nios2_compute_frame_layout ();
651 if (flag_stack_usage_info
)
652 current_function_static_stack_size
= total_frame_size
;
654 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
657 && (cfun
->machine
->save_reg_size
!= 0
658 || cfun
->machine
->uses_anonymous_args
))
660 unsigned int regmask
= cfun
->machine
->save_mask
;
661 unsigned int callee_save_regs
= regmask
& 0xffff0000;
662 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
664 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
666 gen_frame_mem (SImode
, plus_constant (Pmode
, stack_pointer_rtx
, -4));
668 /* Check that there is room for the entire stack frame before doing
669 any SP adjustments or pushes. */
670 if (crtl
->limit_stack
)
671 nios2_emit_stack_limit_check (total_frame_size
);
673 if (pretend_args_size
)
675 if (cfun
->machine
->uses_anonymous_args
)
677 /* Emit a stwm to push copy of argument registers onto
678 the stack for va_arg processing. */
679 unsigned int r
, mask
= 0, n
= pretend_args_size
/ 4;
680 for (r
= LAST_ARG_REGNO
- n
+ 1; r
<= LAST_ARG_REGNO
; r
++)
682 insn
= emit_insn (nios2_ldst_parallel
683 (false, false, false, stack_mem
,
684 -pretend_args_size
, mask
, false));
685 /* Tag first SP adjustment as frame-related. */
686 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn
), 0, 0)) = 1;
687 RTX_FRAME_RELATED_P (insn
) = 1;
690 nios2_adjust_stack (-pretend_args_size
, false);
692 if (callee_save_regs
)
694 /* Emit a push.n to save registers and optionally allocate
695 push_immed extra bytes on the stack. */
697 if (caller_save_regs
)
698 /* Can't allocate extra stack space yet. */
700 else if (cfun
->machine
->save_regs_offset
<= 60)
701 /* Stack adjustment fits entirely in the push.n. */
702 push_immed
= cfun
->machine
->save_regs_offset
;
703 else if (frame_pointer_needed
704 && cfun
->machine
->fp_save_offset
== 0)
705 /* Deferring the entire stack adjustment until later
706 allows us to use a mov.n instead of a 32-bit addi
707 instruction to set the frame pointer. */
710 /* Splitting the stack adjustment between the push.n
711 and an explicit adjustment makes it more likely that
712 we can use spdeci.n for the explicit part. */
714 sp_adjust
= -(cfun
->machine
->callee_save_reg_size
+ push_immed
);
715 insn
= emit_insn (nios2_ldst_parallel (false, false, false,
716 stack_mem
, sp_adjust
,
717 callee_save_regs
, false));
718 nios2_create_cfa_notes (insn
, false);
719 RTX_FRAME_RELATED_P (insn
) = 1;
722 if (caller_save_regs
)
724 /* Emit a stwm to save the EH data regs, r4-r7. */
725 int caller_save_size
= (cfun
->machine
->save_reg_size
726 - cfun
->machine
->callee_save_reg_size
);
727 gcc_assert ((caller_save_regs
& ~0xf0) == 0);
728 insn
= emit_insn (nios2_ldst_parallel
729 (false, false, false, stack_mem
,
730 -caller_save_size
, caller_save_regs
, false));
731 nios2_create_cfa_notes (insn
, false);
732 RTX_FRAME_RELATED_P (insn
) = 1;
735 save_regs_base
= push_immed
;
736 sp_offset
= -(cfun
->machine
->save_regs_offset
- push_immed
);
738 /* The non-CDX cases decrement the stack pointer, to prepare for individual
739 register saves to the stack. */
740 else if (!SMALL_INT (total_frame_size
))
742 /* We need an intermediary point, this will point at the spill block. */
743 nios2_adjust_stack (cfun
->machine
->save_regs_offset
- total_frame_size
,
746 sp_offset
= -cfun
->machine
->save_regs_offset
;
747 if (crtl
->limit_stack
)
748 nios2_emit_stack_limit_check (cfun
->machine
->save_regs_offset
);
750 else if (total_frame_size
)
752 nios2_adjust_stack (-total_frame_size
, false);
753 save_regs_base
= cfun
->machine
->save_regs_offset
;
755 if (crtl
->limit_stack
)
756 nios2_emit_stack_limit_check (0);
759 save_regs_base
= sp_offset
= 0;
761 /* Save the registers individually in the non-CDX case. */
764 save_offset
= save_regs_base
+ cfun
->machine
->save_reg_size
;
766 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
767 if (cfun
->machine
->save_mask
& (1 << regno
))
770 save_reg (regno
, save_offset
);
774 /* Set the hard frame pointer. */
775 if (frame_pointer_needed
)
777 int fp_save_offset
= save_regs_base
+ cfun
->machine
->fp_save_offset
;
780 ? emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
)
781 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx
,
783 gen_int_mode (fp_save_offset
, Pmode
))));
784 RTX_FRAME_RELATED_P (insn
) = 1;
787 /* Allocate sp_offset more bytes in the stack frame. */
788 nios2_adjust_stack (sp_offset
, false);
790 /* Load the PIC register if needed. */
791 if (crtl
->uses_pic_offset_table
)
792 nios2_load_pic_register ();
794 /* If we are profiling, make sure no instructions are scheduled before
795 the call to mcount. */
797 emit_insn (gen_blockage ());
801 nios2_expand_epilogue (bool sibcall_p
)
805 int total_frame_size
;
806 int sp_adjust
, save_offset
;
809 if (!sibcall_p
&& nios2_can_use_return_insn ())
811 emit_jump_insn (gen_return ());
815 emit_insn (gen_blockage ());
817 total_frame_size
= nios2_compute_frame_layout ();
818 if (frame_pointer_needed
)
820 /* Recover the stack pointer. */
822 (cfun
->machine
->fp_save_offset
== 0
823 ? emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
)
824 : emit_insn (gen_add3_insn
825 (stack_pointer_rtx
, hard_frame_pointer_rtx
,
826 gen_int_mode (-cfun
->machine
->fp_save_offset
, Pmode
))));
827 cfa_adj
= plus_constant (Pmode
, stack_pointer_rtx
,
829 - cfun
->machine
->save_regs_offset
));
830 add_reg_note (insn
, REG_CFA_DEF_CFA
, cfa_adj
);
831 RTX_FRAME_RELATED_P (insn
) = 1;
834 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
836 else if (!SMALL_INT (total_frame_size
))
838 nios2_adjust_stack (cfun
->machine
->save_regs_offset
, true);
840 sp_adjust
= total_frame_size
- cfun
->machine
->save_regs_offset
;
844 save_offset
= cfun
->machine
->save_regs_offset
;
845 sp_adjust
= total_frame_size
;
850 /* Generate individual register restores. */
851 save_offset
+= cfun
->machine
->save_reg_size
;
853 for (regno
= LAST_GP_REG
; regno
> 0; regno
--)
854 if (cfun
->machine
->save_mask
& (1 << regno
))
857 restore_reg (regno
, save_offset
);
859 nios2_adjust_stack (sp_adjust
, true);
861 else if (cfun
->machine
->save_reg_size
== 0)
863 /* Nothing to restore, just recover the stack position. */
864 nios2_adjust_stack (sp_adjust
, true);
868 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
869 unsigned int regmask
= cfun
->machine
->save_mask
;
870 unsigned int callee_save_regs
= regmask
& 0xffff0000;
871 unsigned int caller_save_regs
= regmask
& 0x0000ffff;
872 int callee_save_size
= cfun
->machine
->callee_save_reg_size
;
873 int caller_save_size
= cfun
->machine
->save_reg_size
- callee_save_size
;
874 int pretend_args_size
= NIOS2_STACK_ALIGN (crtl
->args
.pretend_args_size
);
875 bool ret_p
= (!pretend_args_size
&& !crtl
->calls_eh_return
878 if (!ret_p
|| caller_save_size
> 0)
879 sp_adjust
= save_offset
;
881 sp_adjust
= (save_offset
> 60 ? save_offset
- 60 : 0);
883 save_offset
-= sp_adjust
;
885 nios2_adjust_stack (sp_adjust
, true);
887 if (caller_save_regs
)
889 /* Emit a ldwm to restore EH data regs. */
890 rtx stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
891 insn
= emit_insn (nios2_ldst_parallel
892 (true, true, true, stack_mem
,
893 caller_save_size
, caller_save_regs
, false));
894 RTX_FRAME_RELATED_P (insn
) = 1;
895 nios2_create_cfa_notes (insn
, true);
898 if (callee_save_regs
)
900 int sp_adjust
= save_offset
+ callee_save_size
;
904 /* Emit a pop.n to restore regs and return. */
906 gen_frame_mem (SImode
,
907 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
908 gen_int_mode (sp_adjust
- 4,
911 emit_jump_insn (nios2_ldst_parallel (true, false, false,
912 stack_mem
, sp_adjust
,
913 callee_save_regs
, ret_p
));
914 RTX_FRAME_RELATED_P (insn
) = 1;
915 /* No need to attach CFA notes since we cannot step over
921 /* If no return, we have to use the ldwm form. */
922 stack_mem
= gen_frame_mem (SImode
, stack_pointer_rtx
);
924 emit_insn (nios2_ldst_parallel (true, true, true,
925 stack_mem
, sp_adjust
,
926 callee_save_regs
, ret_p
));
927 RTX_FRAME_RELATED_P (insn
) = 1;
928 nios2_create_cfa_notes (insn
, true);
932 if (pretend_args_size
)
933 nios2_adjust_stack (pretend_args_size
, true);
936 /* Add in the __builtin_eh_return stack adjustment. */
937 if (crtl
->calls_eh_return
)
938 emit_insn (gen_add2_insn (stack_pointer_rtx
, EH_RETURN_STACKADJ_RTX
));
941 emit_jump_insn (gen_simple_return ());
945 nios2_expand_return (void)
947 /* If CDX is available, generate a pop.n instruction to do both
948 the stack pop and return. */
951 int total_frame_size
= nios2_compute_frame_layout ();
952 int sp_adjust
= (cfun
->machine
->save_regs_offset
953 + cfun
->machine
->callee_save_reg_size
);
954 gcc_assert (sp_adjust
== total_frame_size
);
958 gen_frame_mem (SImode
,
959 plus_constant (Pmode
, stack_pointer_rtx
,
960 sp_adjust
- 4, false));
962 emit_jump_insn (nios2_ldst_parallel (true, false, false,
964 cfun
->machine
->save_mask
,
966 RTX_FRAME_RELATED_P (insn
) = 1;
967 /* No need to create CFA notes since we can't step over
975 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
976 back to a previous frame. */
978 nios2_get_return_address (int count
)
983 return get_hard_reg_initial_val (Pmode
, RA_REGNO
);
986 /* Emit code to change the current function's return address to
987 ADDRESS. SCRATCH is available as a scratch register, if needed.
988 ADDRESS and SCRATCH are both word-mode GPRs. */
990 nios2_set_return_address (rtx address
, rtx scratch
)
992 nios2_compute_frame_layout ();
993 if (cfun
->machine
->save_mask
& (1 << RA_REGNO
))
995 unsigned offset
= cfun
->machine
->save_reg_size
- 4;
998 if (frame_pointer_needed
)
999 base
= hard_frame_pointer_rtx
;
1002 base
= stack_pointer_rtx
;
1003 offset
+= cfun
->machine
->save_regs_offset
;
1005 if (!SMALL_INT (offset
))
1007 emit_move_insn (scratch
, gen_int_mode (offset
, Pmode
));
1008 emit_insn (gen_add2_insn (scratch
, base
));
1014 base
= plus_constant (Pmode
, base
, offset
);
1015 emit_move_insn (gen_rtx_MEM (Pmode
, base
), address
);
1018 emit_move_insn (gen_rtx_REG (Pmode
, RA_REGNO
), address
);
1021 /* Implement FUNCTION_PROFILER macro. */
1023 nios2_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
1025 fprintf (file
, "\tmov\tr8, ra\n");
1028 fprintf (file
, "\tnextpc\tr2\n");
1029 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1030 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1031 fprintf (file
, "\tadd\tr2, r2, r3\n");
1032 fprintf (file
, "\tldw\tr2, %%call(_mcount)(r2)\n");
1033 fprintf (file
, "\tcallr\tr2\n");
1035 else if (flag_pic
== 2)
1037 fprintf (file
, "\tnextpc\tr2\n");
1038 fprintf (file
, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1039 fprintf (file
, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1040 fprintf (file
, "\tadd\tr2, r2, r3\n");
1041 fprintf (file
, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1042 fprintf (file
, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1043 fprintf (file
, "\tadd\tr3, r2, r3\n");
1044 fprintf (file
, "\tldw\tr2, 0(r3)\n");
1045 fprintf (file
, "\tcallr\tr2\n");
1048 fprintf (file
, "\tcall\t_mcount\n");
1049 fprintf (file
, "\tmov\tra, r8\n");
1052 /* Dump stack layout. */
1054 nios2_dump_frame_layout (FILE *file
)
1056 fprintf (file
, "\t%s Current Frame Info\n", ASM_COMMENT_START
);
1057 fprintf (file
, "\t%s total_size = %d\n", ASM_COMMENT_START
,
1058 cfun
->machine
->total_size
);
1059 fprintf (file
, "\t%s var_size = %d\n", ASM_COMMENT_START
,
1060 cfun
->machine
->var_size
);
1061 fprintf (file
, "\t%s args_size = %d\n", ASM_COMMENT_START
,
1062 cfun
->machine
->args_size
);
1063 fprintf (file
, "\t%s save_reg_size = %d\n", ASM_COMMENT_START
,
1064 cfun
->machine
->save_reg_size
);
1065 fprintf (file
, "\t%s initialized = %d\n", ASM_COMMENT_START
,
1066 cfun
->machine
->initialized
);
1067 fprintf (file
, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START
,
1068 cfun
->machine
->save_regs_offset
);
1069 fprintf (file
, "\t%s is_leaf = %d\n", ASM_COMMENT_START
,
1071 fprintf (file
, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START
,
1072 frame_pointer_needed
);
1073 fprintf (file
, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START
,
1074 crtl
->args
.pretend_args_size
);
1077 /* Return true if REGNO should be saved in the prologue. */
1079 prologue_saved_reg_p (unsigned regno
)
1081 gcc_assert (GP_REG_P (regno
));
1083 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
1086 if (regno
== HARD_FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
1089 if (regno
== PIC_OFFSET_TABLE_REGNUM
&& crtl
->uses_pic_offset_table
)
1092 if (regno
== RA_REGNO
&& df_regs_ever_live_p (RA_REGNO
))
1098 /* Implement TARGET_CAN_ELIMINATE. */
1100 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1102 if (to
== STACK_POINTER_REGNUM
)
1103 return !frame_pointer_needed
;
1107 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1109 nios2_initial_elimination_offset (int from
, int to
)
1113 nios2_compute_frame_layout ();
1115 /* Set OFFSET to the offset from the stack pointer. */
1118 case FRAME_POINTER_REGNUM
:
1119 /* This is the high end of the local variable storage, not the
1120 hard frame pointer. */
1121 offset
= cfun
->machine
->args_size
+ cfun
->machine
->var_size
;
1124 case ARG_POINTER_REGNUM
:
1125 offset
= cfun
->machine
->total_size
;
1126 offset
-= crtl
->args
.pretend_args_size
;
1133 /* If we are asked for the frame pointer offset, then adjust OFFSET
1134 by the offset from the frame pointer to the stack pointer. */
1135 if (to
== HARD_FRAME_POINTER_REGNUM
)
1136 offset
-= (cfun
->machine
->save_regs_offset
1137 + cfun
->machine
->fp_save_offset
);
1142 /* Return nonzero if this function is known to have a null epilogue.
1143 This allows the optimizer to omit jumps to jumps if no stack
1146 nios2_can_use_return_insn (void)
1148 int total_frame_size
;
1150 if (!reload_completed
|| crtl
->profile
)
1153 total_frame_size
= nios2_compute_frame_layout ();
1155 /* If CDX is available, check if we can return using a
1156 single pop.n instruction. */
1158 && !frame_pointer_needed
1159 && cfun
->machine
->save_regs_offset
<= 60
1160 && (cfun
->machine
->save_mask
& 0x80000000) != 0
1161 && (cfun
->machine
->save_mask
& 0xffff) == 0
1162 && crtl
->args
.pretend_args_size
== 0)
1165 return total_frame_size
== 0;
1169 /* Check and signal some warnings/errors on FPU insn options. */
1171 nios2_custom_check_insns (void)
1174 bool errors
= false;
1176 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1177 if (N2FPU_ENABLED_P (i
) && N2FPU_DOUBLE_P (i
))
1179 for (j
= 0; j
< ARRAY_SIZE (nios2_fpu_insn
); j
++)
1180 if (N2FPU_DOUBLE_REQUIRED_P (j
) && ! N2FPU_ENABLED_P (j
))
1182 error ("switch %<-mcustom-%s%> is required for double "
1183 "precision floating point", N2FPU_NAME (j
));
1189 /* Warn if the user has certain exotic operations that won't get used
1190 without -funsafe-math-optimizations. See expand_builtin () in
1192 if (!flag_unsafe_math_optimizations
)
1193 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1194 if (N2FPU_ENABLED_P (i
) && N2FPU_UNSAFE_P (i
))
1195 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1196 "%<-funsafe-math-optimizations%> is specified",
1199 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1200 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1202 if (!flag_finite_math_only
)
1203 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1204 if (N2FPU_ENABLED_P (i
) && N2FPU_FINITE_P (i
))
1205 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1206 "%<-ffinite-math-only%> is specified", N2FPU_NAME (i
));
1208 /* Warn if the user is trying to use a custom rounding instruction
1209 that won't get used without -fno-math-errno. See
1210 expand_builtin_int_roundingfn_2 () in builtins.c. */
1211 if (flag_errno_math
)
1212 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1213 if (N2FPU_ENABLED_P (i
) && N2FPU_NO_ERRNO_P (i
))
1214 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1215 "%<-fno-math-errno%> is specified", N2FPU_NAME (i
));
1217 if (errors
|| custom_code_conflict
)
1218 fatal_error (input_location
,
1219 "conflicting use of %<-mcustom%> switches, target attributes, "
1220 "and/or %<__builtin_custom_%> functions");
1224 nios2_set_fpu_custom_code (enum n2fpu_code code
, int n
, bool override_p
)
1226 if (override_p
|| N2FPU_N (code
) == -1)
1228 nios2_register_custom_code (n
, CCS_FPU
, (int) code
);
1231 /* Type to represent a standard FPU config. */
1232 struct nios2_fpu_config
1235 bool set_sp_constants
;
1236 int code
[n2fpu_code_num
];
1239 #define NIOS2_FPU_CONFIG_NUM 3
1240 static struct nios2_fpu_config custom_fpu_config
[NIOS2_FPU_CONFIG_NUM
];
1243 nios2_init_fpu_configs (void)
1245 struct nios2_fpu_config
* cfg
;
1247 #define NEXT_FPU_CONFIG \
1249 cfg = &custom_fpu_config[i++]; \
1250 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1255 cfg
->set_sp_constants
= true;
1256 cfg
->code
[n2fpu_fmuls
] = 252;
1257 cfg
->code
[n2fpu_fadds
] = 253;
1258 cfg
->code
[n2fpu_fsubs
] = 254;
1262 cfg
->set_sp_constants
= true;
1263 cfg
->code
[n2fpu_fmuls
] = 252;
1264 cfg
->code
[n2fpu_fadds
] = 253;
1265 cfg
->code
[n2fpu_fsubs
] = 254;
1266 cfg
->code
[n2fpu_fdivs
] = 255;
1270 cfg
->set_sp_constants
= true;
1271 cfg
->code
[n2fpu_floatus
] = 243;
1272 cfg
->code
[n2fpu_fixsi
] = 244;
1273 cfg
->code
[n2fpu_floatis
] = 245;
1274 cfg
->code
[n2fpu_fcmpgts
] = 246;
1275 cfg
->code
[n2fpu_fcmples
] = 249;
1276 cfg
->code
[n2fpu_fcmpeqs
] = 250;
1277 cfg
->code
[n2fpu_fcmpnes
] = 251;
1278 cfg
->code
[n2fpu_fmuls
] = 252;
1279 cfg
->code
[n2fpu_fadds
] = 253;
1280 cfg
->code
[n2fpu_fsubs
] = 254;
1281 cfg
->code
[n2fpu_fdivs
] = 255;
1283 #undef NEXT_FPU_CONFIG
1284 gcc_assert (i
== NIOS2_FPU_CONFIG_NUM
);
1287 static struct nios2_fpu_config
*
1288 nios2_match_custom_fpu_cfg (const char *cfgname
, const char *endp
)
1291 for (i
= 0; i
< NIOS2_FPU_CONFIG_NUM
; i
++)
1293 bool match
= !(endp
!= NULL
1294 ? strncmp (custom_fpu_config
[i
].name
, cfgname
,
1296 : strcmp (custom_fpu_config
[i
].name
, cfgname
));
1298 return &custom_fpu_config
[i
];
1303 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1304 OVERRIDE is true if loaded config codes should overwrite current state. */
1306 nios2_handle_custom_fpu_cfg (const char *cfgname
, const char *endp
,
1309 struct nios2_fpu_config
*cfg
= nios2_match_custom_fpu_cfg (cfgname
, endp
);
1313 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1314 if (cfg
->code
[i
] >= 0)
1315 nios2_set_fpu_custom_code ((enum n2fpu_code
) i
, cfg
->code
[i
],
1317 if (cfg
->set_sp_constants
)
1318 flag_single_precision_constant
= 1;
1321 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1322 "value %<%s%>", cfgname
);
1324 /* Guard against errors in the standard configurations. */
1325 nios2_custom_check_insns ();
1328 /* Check individual FPU insn options, and register custom code. */
1330 nios2_handle_custom_fpu_insn_option (int fpu_insn_index
)
1332 int param
= N2FPU_N (fpu_insn_index
);
1334 if (param
>= 0 && param
<= 255)
1335 nios2_register_custom_code (param
, CCS_FPU
, fpu_insn_index
);
1337 /* Valid values are 0-255, but also allow -1 so that the
1338 -mno-custom-<opt> switches work. */
1339 else if (param
!= -1)
1340 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1341 N2FPU_NAME (fpu_insn_index
), param
);
1344 /* Allocate a chunk of memory for per-function machine-dependent data. */
1345 static struct machine_function
*
1346 nios2_init_machine_status (void)
1348 return ggc_cleared_alloc
<machine_function
> ();
1351 /* Implement TARGET_OPTION_OVERRIDE. */
1353 nios2_option_override (void)
1357 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1358 SUBTARGET_OVERRIDE_OPTIONS
;
1361 /* Check for unsupported options. */
1362 if (flag_pic
&& !TARGET_LINUX_ABI
)
1363 sorry ("position-independent code requires the Linux ABI");
1364 if (flag_pic
&& stack_limit_rtx
1365 && GET_CODE (stack_limit_rtx
) == SYMBOL_REF
)
1366 sorry ("PIC support for %<-fstack-limit-symbol%>");
1368 /* Function to allocate machine-dependent function status. */
1369 init_machine_status
= &nios2_init_machine_status
;
1371 nios2_section_threshold
1372 = (global_options_set
.x_g_switch_value
1373 ? g_switch_value
: NIOS2_DEFAULT_GVALUE
);
1375 if (nios2_gpopt_option
== gpopt_unspecified
)
1377 /* Default to -mgpopt unless -fpic or -fPIC. */
1379 nios2_gpopt_option
= gpopt_none
;
1381 nios2_gpopt_option
= gpopt_local
;
1384 /* GP-relative and r0-relative addressing don't make sense for PIC. */
1387 if (nios2_gpopt_option
!= gpopt_none
)
1388 error ("%<-mgpopt%> not supported with PIC.");
1389 if (nios2_gprel_sec
)
1390 error ("%<-mgprel-sec=%> not supported with PIC.");
1391 if (nios2_r0rel_sec
)
1392 error ("%<-mr0rel-sec=%> not supported with PIC.");
1395 /* Process -mgprel-sec= and -m0rel-sec=. */
1396 if (nios2_gprel_sec
)
1398 if (regcomp (&nios2_gprel_sec_regex
, nios2_gprel_sec
,
1399 REG_EXTENDED
| REG_NOSUB
))
1400 error ("%<-mgprel-sec=%> argument is not a valid regular expression.");
1402 if (nios2_r0rel_sec
)
1404 if (regcomp (&nios2_r0rel_sec_regex
, nios2_r0rel_sec
,
1405 REG_EXTENDED
| REG_NOSUB
))
1406 error ("%<-mr0rel-sec=%> argument is not a valid regular expression.");
1409 /* If we don't have mul, we don't have mulx either! */
1410 if (!TARGET_HAS_MUL
&& TARGET_HAS_MULX
)
1411 target_flags
&= ~MASK_HAS_MULX
;
1413 /* Optional BMX and CDX instructions only make sense for R2. */
1414 if (!TARGET_ARCH_R2
)
1417 error ("BMX instructions are only supported with R2 architecture");
1419 error ("CDX instructions are only supported with R2 architecture");
1422 /* R2 is little-endian only. */
1423 if (TARGET_ARCH_R2
&& TARGET_BIG_ENDIAN
)
1424 error ("R2 architecture is little-endian only");
1426 /* Initialize default FPU configurations. */
1427 nios2_init_fpu_configs ();
1429 /* Set up default handling for floating point custom instructions.
1431 Putting things in this order means that the -mcustom-fpu-cfg=
1432 switch will always be overridden by individual -mcustom-fadds=
1433 switches, regardless of the order in which they were specified
1434 on the command line.
1436 This behavior of prioritization of individual -mcustom-<insn>=
1437 options before the -mcustom-fpu-cfg= switch is maintained for
1439 if (nios2_custom_fpu_cfg_string
&& *nios2_custom_fpu_cfg_string
)
1440 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string
, NULL
, false);
1442 /* Handle options for individual FPU insns. */
1443 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
1444 nios2_handle_custom_fpu_insn_option (i
);
1446 nios2_custom_check_insns ();
1448 /* Save the initial options in case the user does function specific
1450 target_option_default_node
= target_option_current_node
1451 = build_target_option_node (&global_options
);
1455 /* Return true if CST is a constant within range of movi/movui/movhi. */
1457 nios2_simple_const_p (const_rtx cst
)
1459 if (!CONST_INT_P (cst
))
1461 HOST_WIDE_INT val
= INTVAL (cst
);
1462 return SMALL_INT (val
) || SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
);
1465 /* Compute a (partial) cost for rtx X. Return true if the complete
1466 cost has been computed, and false if subexpressions should be
1467 scanned. In either case, *TOTAL contains the cost result. */
1469 nios2_rtx_costs (rtx x
, machine_mode mode
,
1472 int *total
, bool speed
)
1474 int code
= GET_CODE (x
);
1479 if (INTVAL (x
) == 0 || nios2_simple_const_p (x
))
1481 *total
= COSTS_N_INSNS (0);
1486 /* High + lo_sum. */
1487 *total
= COSTS_N_INSNS (1);
1495 if (gprel_constant_p (x
) || r0rel_constant_p (x
))
1497 *total
= COSTS_N_INSNS (1);
1502 /* High + lo_sum. */
1503 *total
= COSTS_N_INSNS (1);
1509 /* This is essentially a constant. */
1510 *total
= COSTS_N_INSNS (0);
1516 *total
= COSTS_N_INSNS (0);
1522 /* Recognize 'nor' insn pattern. */
1523 if (GET_CODE (XEXP (x
, 0)) == NOT
1524 && GET_CODE (XEXP (x
, 1)) == NOT
)
1526 *total
= COSTS_N_INSNS (1);
1532 /* For insns that have an execution latency (3 cycles), don't
1533 penalize by the full amount since we can often schedule
1537 if (!TARGET_HAS_MUL
)
1538 *total
= COSTS_N_INSNS (5); /* Guess? */
1540 *total
= COSTS_N_INSNS (2); /* Latency adjustment. */
1542 *total
= COSTS_N_INSNS (1);
1543 if (TARGET_HAS_MULX
&& GET_MODE (x
) == DImode
)
1545 enum rtx_code c0
= GET_CODE (XEXP (x
, 0));
1546 enum rtx_code c1
= GET_CODE (XEXP (x
, 1));
1547 if ((c0
== SIGN_EXTEND
&& c1
== SIGN_EXTEND
)
1548 || (c0
== ZERO_EXTEND
&& c1
== ZERO_EXTEND
))
1549 /* This is the <mul>sidi3 pattern, which expands into 4 insns,
1550 2 multiplies and 2 moves. */
1552 *total
= *total
* 2 + COSTS_N_INSNS (2);
1561 if (!TARGET_HAS_DIV
)
1562 *total
= COSTS_N_INSNS (5); /* Guess? */
1564 *total
= COSTS_N_INSNS (2); /* Latency adjustment. */
1566 *total
= COSTS_N_INSNS (1);
1576 *total
= COSTS_N_INSNS (1);
1578 *total
= COSTS_N_INSNS (2); /* Latency adjustment. */
1585 *total
= COSTS_N_INSNS (1);
1592 if (MEM_P (XEXP (x
, 0)))
1593 *total
= COSTS_N_INSNS (1);
1595 *total
= COSTS_N_INSNS (3);
1601 rtx addr
= XEXP (x
, 0);
1603 /* Account for cost of different addressing modes. */
1604 *total
= nios2_address_cost (addr
, mode
, ADDR_SPACE_GENERIC
, speed
);
1606 if (outer_code
== SET
&& opno
== 0)
1607 /* Stores execute in 1 cycle accounted for by
1610 else if (outer_code
== SET
|| outer_code
== SIGN_EXTEND
1611 || outer_code
== ZERO_EXTEND
)
1612 /* Latency adjustment. */
1615 *total
+= COSTS_N_INSNS (1);
1618 /* This is going to have to be split into a load. */
1619 *total
+= COSTS_N_INSNS (speed
? 2 : 1);
1628 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1630 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, reg_class_t regclass
)
1632 return regclass
== NO_REGS
? GENERAL_REGS
: regclass
;
1635 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1636 RET is an RTX for the return value location. The entire insn sequence
1638 static GTY(()) rtx nios2_tls_symbol
;
1641 nios2_call_tls_get_addr (rtx ti
)
1643 rtx arg
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
1644 rtx ret
= gen_rtx_REG (Pmode
, FIRST_RETVAL_REGNO
);
1648 if (!nios2_tls_symbol
)
1649 nios2_tls_symbol
= init_one_libfunc ("__tls_get_addr");
1651 emit_move_insn (arg
, ti
);
1652 fn
= gen_rtx_MEM (QImode
, nios2_tls_symbol
);
1653 insn
= emit_call_insn (gen_call_value (ret
, fn
, const0_rtx
));
1654 RTL_CONST_CALL_P (insn
) = 1;
1655 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), ret
);
1656 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), arg
);
1661 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1663 nios2_large_offset_p (int unspec
)
1665 gcc_assert (nios2_unspec_reloc_name (unspec
) != NULL
);
1668 /* FIXME: TLS GOT offset relocations will eventually also get this
1669 treatment, after binutils support for those are also completed. */
1670 && (unspec
== UNSPEC_PIC_SYM
|| unspec
== UNSPEC_PIC_CALL_SYM
))
1673 /* 'gotoff' offsets are always hiadj/lo. */
1674 if (unspec
== UNSPEC_PIC_GOTOFF_SYM
)
1680 /* Return true for conforming unspec relocations. Also used in
1681 constraints.md and predicates.md. */
1683 nios2_unspec_reloc_p (rtx op
)
1685 return (GET_CODE (op
) == CONST
1686 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1687 && ! nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1691 nios2_large_unspec_reloc_p (rtx op
)
1693 return (GET_CODE (op
) == CONST
1694 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1695 && nios2_large_offset_p (XINT (XEXP (op
, 0), 1)));
1698 /* Helper to generate unspec constant. */
1700 nios2_unspec_offset (rtx loc
, int unspec
)
1702 return gen_rtx_CONST (Pmode
, gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, loc
),
1706 /* Generate GOT pointer based address with large offset. */
1708 nios2_large_got_address (rtx offset
, rtx tmp
)
1711 tmp
= gen_reg_rtx (Pmode
);
1712 emit_move_insn (tmp
, offset
);
1713 return gen_rtx_PLUS (Pmode
, tmp
, pic_offset_table_rtx
);
1716 /* Generate a GOT pointer based address. */
1718 nios2_got_address (rtx loc
, int unspec
)
1720 rtx offset
= nios2_unspec_offset (loc
, unspec
);
1721 crtl
->uses_pic_offset_table
= 1;
1723 if (nios2_large_offset_p (unspec
))
1724 return force_reg (Pmode
, nios2_large_got_address (offset
, NULL_RTX
));
1726 return gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
1729 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1730 return value will be a valid address and move_operand (either a REG
1733 nios2_legitimize_tls_address (rtx loc
)
1736 enum tls_model model
= SYMBOL_REF_TLS_MODEL (loc
);
1740 case TLS_MODEL_GLOBAL_DYNAMIC
:
1741 tmp
= gen_reg_rtx (Pmode
);
1742 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_GD
));
1743 return nios2_call_tls_get_addr (tmp
);
1745 case TLS_MODEL_LOCAL_DYNAMIC
:
1746 tmp
= gen_reg_rtx (Pmode
);
1747 emit_move_insn (tmp
, nios2_got_address (loc
, UNSPEC_ADD_TLS_LDM
));
1748 return gen_rtx_PLUS (Pmode
, nios2_call_tls_get_addr (tmp
),
1749 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LDO
));
1751 case TLS_MODEL_INITIAL_EXEC
:
1752 tmp
= gen_reg_rtx (Pmode
);
1753 mem
= gen_const_mem (Pmode
, nios2_got_address (loc
, UNSPEC_LOAD_TLS_IE
));
1754 emit_move_insn (tmp
, mem
);
1755 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1756 return gen_rtx_PLUS (Pmode
, tp
, tmp
);
1758 case TLS_MODEL_LOCAL_EXEC
:
1759 tp
= gen_rtx_REG (Pmode
, TP_REGNO
);
1760 return gen_rtx_PLUS (Pmode
, tp
,
1761 nios2_unspec_offset (loc
, UNSPEC_ADD_TLS_LE
));
1769 If -O3 is used, we want to output a table lookup for
1770 divides between small numbers (both num and den >= 0
1771 and < 0x10). The overhead of this method in the worst
1772 case is 40 bytes in the text section (10 insns) and
1773 256 bytes in the data section. Additional divides do
1774 not incur additional penalties in the data section.
1776 Code speed is improved for small divides by about 5x
1777 when using this method in the worse case (~9 cycles
1778 vs ~45). And in the worst case divides not within the
1779 table are penalized by about 10% (~5 cycles vs ~45).
1780 However in the typical case the penalty is not as bad
1781 because doing the long divide in only 45 cycles is
1784 ??? would be nice to have some benchmarks other
1785 than Dhrystone to back this up.
1787 This bit of expansion is to create this instruction
1794 add $12, $11, divide_table
1800 # continue here with result in $2
1802 ??? Ideally I would like the libcall block to contain all
1803 of this code, but I don't know how to do that. What it
1804 means is that if the divide can be eliminated, it may not
1805 completely disappear.
1807 ??? The __divsi3_table label should ideally be moved out
1808 of this block and into a global. If it is placed into the
1809 sdata section we can save even more cycles by doing things
1812 nios2_emit_expensive_div (rtx
*operands
, machine_mode mode
)
1814 rtx or_result
, shift_left_result
;
1816 rtx_code_label
*lab1
, *lab3
;
1823 /* It may look a little generic, but only SImode is supported for now. */
1824 gcc_assert (mode
== SImode
);
1825 libfunc
= optab_libfunc (sdiv_optab
, SImode
);
1827 lab1
= gen_label_rtx ();
1828 lab3
= gen_label_rtx ();
1830 or_result
= expand_simple_binop (SImode
, IOR
,
1831 operands
[1], operands
[2],
1832 0, 0, OPTAB_LIB_WIDEN
);
1834 emit_cmp_and_jump_insns (or_result
, GEN_INT (15), GTU
, 0,
1835 GET_MODE (or_result
), 0, lab3
);
1836 JUMP_LABEL (get_last_insn ()) = lab3
;
1838 shift_left_result
= expand_simple_binop (SImode
, ASHIFT
,
1839 operands
[1], GEN_INT (4),
1840 0, 0, OPTAB_LIB_WIDEN
);
1842 lookup_value
= expand_simple_binop (SImode
, IOR
,
1843 shift_left_result
, operands
[2],
1844 0, 0, OPTAB_LIB_WIDEN
);
1845 table
= gen_rtx_PLUS (SImode
, lookup_value
,
1846 gen_rtx_SYMBOL_REF (SImode
, "__divsi3_table"));
1847 convert_move (operands
[0], gen_rtx_MEM (QImode
, table
), 1);
1849 tmp
= emit_jump_insn (gen_jump (lab1
));
1850 JUMP_LABEL (tmp
) = lab1
;
1854 LABEL_NUSES (lab3
) = 1;
1857 final_result
= emit_library_call_value (libfunc
, NULL_RTX
,
1859 operands
[1], SImode
,
1860 operands
[2], SImode
);
1862 insns
= get_insns ();
1864 emit_libcall_block (insns
, operands
[0], final_result
,
1865 gen_rtx_DIV (SImode
, operands
[1], operands
[2]));
1868 LABEL_NUSES (lab1
) = 1;
1872 /* Branches and compares. */
1874 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1875 comparison, e.g. >= 1 into > 0. */
1877 nios2_alternate_compare_const (enum rtx_code code
, rtx op
,
1878 enum rtx_code
*alt_code
, rtx
*alt_op
,
1881 gcc_assert (CONST_INT_P (op
));
1883 HOST_WIDE_INT opval
= INTVAL (op
);
1884 enum rtx_code scode
= signed_condition (code
);
1885 bool dec_p
= (scode
== LT
|| scode
== GE
);
1887 if (code
== EQ
|| code
== NE
)
1895 ? gen_int_mode (opval
- 1, mode
)
1896 : gen_int_mode (opval
+ 1, mode
));
1898 /* The required conversion between [>,>=] and [<,<=] is captured
1899 by a reverse + swap of condition codes. */
1900 *alt_code
= reverse_condition (swap_condition (code
));
1903 /* Test if the incremented/decremented value crosses the over/underflow
1904 boundary. Supposedly, such boundary cases should already be transformed
1905 into always-true/false or EQ conditions, so use an assertion here. */
1906 unsigned HOST_WIDE_INT alt_opval
= INTVAL (*alt_op
);
1908 alt_opval
^= (1 << (GET_MODE_BITSIZE (mode
) - 1));
1909 alt_opval
&= GET_MODE_MASK (mode
);
1910 gcc_assert (dec_p
? alt_opval
!= GET_MODE_MASK (mode
) : alt_opval
!= 0);
1914 /* Return true if the constant comparison is supported by nios2. */
1916 nios2_valid_compare_const_p (enum rtx_code code
, rtx op
)
1918 gcc_assert (CONST_INT_P (op
));
1921 case EQ
: case NE
: case GE
: case LT
:
1922 return SMALL_INT (INTVAL (op
));
1924 return SMALL_INT_UNSIGNED (INTVAL (op
));
1930 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1931 the current configuration. Perform modifications if MODIFY_P is true.
1932 Returns true if FPU compare can be done. */
1935 nios2_validate_fpu_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
,
1939 enum rtx_code code
= GET_CODE (*cmp
);
1941 if (!nios2_fpu_compare_enabled (code
, mode
))
1943 code
= swap_condition (code
);
1944 if (nios2_fpu_compare_enabled (code
, mode
))
1958 *op1
= force_reg (mode
, *op1
);
1959 *op2
= force_reg (mode
, *op2
);
1960 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
1965 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1966 nios2 supported form. Returns true if success. */
1968 nios2_validate_compare (machine_mode mode
, rtx
*cmp
, rtx
*op1
, rtx
*op2
)
1970 enum rtx_code code
= GET_CODE (*cmp
);
1971 enum rtx_code alt_code
;
1974 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1975 return nios2_validate_fpu_compare (mode
, cmp
, op1
, op2
, true);
1977 if (CONST_INT_P (*op2
) && *op2
!= const0_rtx
)
1979 /* Create alternate constant compare. */
1980 nios2_alternate_compare_const (code
, *op2
, &alt_code
, &alt_op2
, mode
);
1982 /* If alterate op2 is zero(0), we can use it directly, possibly
1983 swapping the compare code. */
1984 if (alt_op2
== const0_rtx
)
1988 goto check_rebuild_cmp
;
1991 /* Check if either constant compare can be used. */
1992 if (nios2_valid_compare_const_p (code
, *op2
))
1994 else if (nios2_valid_compare_const_p (alt_code
, alt_op2
))
2001 /* We have to force op2 into a register now. Try to pick one
2002 with a lower cost. */
2003 if (! nios2_simple_const_p (*op2
)
2004 && nios2_simple_const_p (alt_op2
))
2009 *op2
= force_reg (mode
, *op2
);
2011 else if (!reg_or_0_operand (*op2
, mode
))
2012 *op2
= force_reg (mode
, *op2
);
2015 if (code
== GT
|| code
== GTU
|| code
== LE
|| code
== LEU
)
2017 rtx t
= *op1
; *op1
= *op2
; *op2
= t
;
2018 code
= swap_condition (code
);
2021 *cmp
= gen_rtx_fmt_ee (code
, mode
, *op1
, *op2
);
2026 /* Addressing modes and constants. */
2028 /* Symbol references and other 32-bit constants are split into
2029 high/lo_sum pairs during the split1 pass. After that, they are not
2030 considered legitimate addresses.
2031 This function returns true if in a pre-split context where these
2032 constants are allowed. */
2034 nios2_large_constant_allowed (void)
2036 /* The reload_completed check is for the benefit of
2037 nios2_asm_output_mi_thunk and perhaps other places that try to
2038 emulate a post-reload pass. */
2039 return !(cfun
->curr_properties
& PROP_rtl_split_insns
) && !reload_completed
;
2042 /* Return true if X is constant expression with a reference to an
2043 "ordinary" symbol; not GOT-relative, not GP-relative, not TLS. */
2045 nios2_symbolic_constant_p (rtx x
)
2051 if (GET_CODE (x
) == LABEL_REF
)
2053 else if (CONSTANT_P (x
))
2055 split_const (x
, &base
, &offset
);
2056 return (SYMBOL_REF_P (base
)
2057 && !SYMBOL_REF_TLS_MODEL (base
)
2058 && !gprel_constant_p (base
)
2059 && !r0rel_constant_p (base
)
2060 && SMALL_INT (INTVAL (offset
)));
2065 /* Return true if X is an expression of the form
2066 (PLUS reg large_constant). */
2068 nios2_plus_large_constant_p (rtx x
)
2070 return (GET_CODE (x
) == PLUS
2071 && REG_P (XEXP (x
, 0))
2072 && nios2_large_constant_p (XEXP (x
, 1)));
2075 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
2077 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2080 split_const (x
, &base
, &offset
);
2081 return GET_CODE (base
) != SYMBOL_REF
|| !SYMBOL_REF_TLS_MODEL (base
);
2084 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2086 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2088 return nios2_legitimate_constant_p (mode
, x
) == false;
2091 /* Return true if register REGNO is a valid base register.
2092 STRICT_P is true if REG_OK_STRICT is in effect. */
2095 nios2_regno_ok_for_base_p (int regno
, bool strict_p
)
2097 if (!HARD_REGISTER_NUM_P (regno
))
2105 regno
= reg_renumber
[regno
];
2108 /* The fake registers will be eliminated to either the stack or
2109 hard frame pointer, both of which are usually valid base registers.
2110 Reload deals with the cases where the eliminated form isn't valid. */
2111 return (GP_REG_P (regno
)
2112 || regno
== FRAME_POINTER_REGNUM
2113 || regno
== ARG_POINTER_REGNUM
);
2116 /* Return true if OFFSET is permitted in a load/store address expression.
2117 Normally any 16-bit value is permitted, but on R2 if we may be emitting
2118 the IO forms of these instructions we must restrict the offset to fit
2119 in a 12-bit field instead. */
2122 nios2_valid_addr_offset_p (rtx offset
)
2124 return (CONST_INT_P (offset
)
2125 && ((TARGET_ARCH_R2
&& (TARGET_BYPASS_CACHE
2126 || TARGET_BYPASS_CACHE_VOLATILE
))
2127 ? SMALL_INT12 (INTVAL (offset
))
2128 : SMALL_INT (INTVAL (offset
))));
2131 /* Return true if the address expression formed by BASE + OFFSET is
2134 nios2_valid_addr_expr_p (rtx base
, rtx offset
, bool strict_p
)
2136 if (!strict_p
&& GET_CODE (base
) == SUBREG
)
2137 base
= SUBREG_REG (base
);
2138 return (REG_P (base
)
2139 && nios2_regno_ok_for_base_p (REGNO (base
), strict_p
)
2140 && (offset
== NULL_RTX
2141 || nios2_valid_addr_offset_p (offset
)
2142 || (nios2_large_constant_allowed ()
2143 && nios2_symbolic_constant_p (offset
))
2144 || nios2_unspec_reloc_p (offset
)));
2147 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2149 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
2150 rtx operand
, bool strict_p
)
2152 switch (GET_CODE (operand
))
2156 if (SYMBOL_REF_TLS_MODEL (operand
))
2159 /* Else, fall through. */
2161 if (gprel_constant_p (operand
) || r0rel_constant_p (operand
))
2164 /* Else, fall through. */
2166 if (nios2_large_constant_allowed ()
2167 && nios2_symbolic_constant_p (operand
))
2172 if (r0rel_constant_p (operand
))
2174 return nios2_large_constant_allowed ();
2179 /* Register indirect. */
2181 return nios2_regno_ok_for_base_p (REGNO (operand
), strict_p
);
2183 /* Register indirect with displacement. */
2186 rtx op0
= XEXP (operand
, 0);
2187 rtx op1
= XEXP (operand
, 1);
2189 if (nios2_valid_addr_expr_p (op0
, op1
, strict_p
)
2190 || nios2_valid_addr_expr_p (op1
, op0
, strict_p
))
2195 /* %lo(constant)(reg)
2196 This requires a 16-bit relocation and isn't valid with R2
2197 io-variant load/stores. */
2200 && (TARGET_BYPASS_CACHE
|| TARGET_BYPASS_CACHE_VOLATILE
))
2204 rtx op0
= XEXP (operand
, 0);
2205 rtx op1
= XEXP (operand
, 1);
2208 && nios2_regno_ok_for_base_p (REGNO (op0
), strict_p
)
2209 && nios2_large_constant_p (op1
));
2218 /* Implement TARGET_ADDRESS_COST.
2219 Experimentation has shown that we get better code by penalizing the
2220 the (plus reg symbolic_constant) and (plus reg (const ...)) forms
2221 but giving (plus reg symbol_ref) address modes the same cost as those
2222 that don't require splitting. Also, from a theoretical point of view:
2223 - This is in line with the recommendation in the GCC internals
2224 documentation to make address forms involving multiple
2225 registers more expensive than single-register forms.
2226 - OTOH it still encourages fwprop1 to propagate constants into
2227 address expressions more aggressively.
2228 - We should discourage splitting (symbol + offset) into hi/lo pairs
2229 to allow CSE'ing the symbol when it's used with more than one offset,
2230 but not so heavily as to avoid this addressing mode at all. */
2232 nios2_address_cost (rtx address
,
2233 machine_mode mode ATTRIBUTE_UNUSED
,
2234 addr_space_t as ATTRIBUTE_UNUSED
,
2235 bool speed ATTRIBUTE_UNUSED
)
2237 if (nios2_plus_large_constant_p (address
))
2238 return COSTS_N_INSNS (1);
2239 if (nios2_large_constant_p (address
))
2241 if (GET_CODE (address
) == CONST
)
2242 return COSTS_N_INSNS (1);
2244 return COSTS_N_INSNS (0);
2246 return COSTS_N_INSNS (0);
2249 /* Return true if X is a MEM whose address expression involves a large (32-bit)
2252 nios2_large_constant_memory_operand_p (rtx x
)
2256 if (GET_CODE (x
) != MEM
)
2260 return (nios2_large_constant_p (addr
)
2261 || nios2_plus_large_constant_p (addr
));
2265 /* Return true if X is something that needs to be split into a
2266 high/lo_sum pair. */
2268 nios2_large_constant_p (rtx x
)
2270 return (nios2_symbolic_constant_p (x
)
2271 || nios2_large_unspec_reloc_p (x
)
2272 || (CONST_INT_P (x
) && !SMALL_INT (INTVAL (x
))));
2275 /* Given an RTX X that satisfies nios2_large_constant_p, split it into
2276 high and lo_sum parts using TEMP as a scratch register. Emit the high
2277 instruction and return the lo_sum expression.
2278 Also handle special cases involving constant integers. */
2280 nios2_split_large_constant (rtx x
, rtx temp
)
2282 if (CONST_INT_P (x
))
2284 HOST_WIDE_INT val
= INTVAL (x
);
2285 if (SMALL_INT (val
))
2287 else if (SMALL_INT_UNSIGNED (val
) || UPPER16_INT (val
))
2289 emit_move_insn (temp
, x
);
2294 HOST_WIDE_INT high
= (val
+ 0x8000) & ~0xffff;
2295 HOST_WIDE_INT low
= val
- high
;
2296 emit_move_insn (temp
, gen_int_mode (high
, Pmode
));
2297 return gen_rtx_PLUS (Pmode
, temp
, gen_int_mode (low
, Pmode
));
2301 emit_insn (gen_rtx_SET (temp
, gen_rtx_HIGH (Pmode
, copy_rtx (x
))));
2302 return gen_rtx_LO_SUM (Pmode
, temp
, copy_rtx (x
));
2305 /* Split an RTX of the form
2307 where op1 is a large constant into
2308 (set temp (high op1))
2309 (set temp (plus op0 temp))
2311 returning the lo_sum expression as the value. */
2313 nios2_split_plus_large_constant (rtx op0
, rtx op1
)
2315 rtx temp
= gen_reg_rtx (Pmode
);
2316 op0
= force_reg (Pmode
, op0
);
2318 emit_insn (gen_rtx_SET (temp
, gen_rtx_HIGH (Pmode
, copy_rtx (op1
))));
2319 emit_insn (gen_rtx_SET (temp
, gen_rtx_PLUS (Pmode
, op0
, temp
)));
2320 return gen_rtx_LO_SUM (Pmode
, temp
, copy_rtx (op1
));
2323 /* Given a MEM OP with an address that includes a splittable symbol or
2324 other large constant, emit some instructions to do the split and
2325 return a new MEM. */
2327 nios2_split_large_constant_memory_operand (rtx op
)
2329 rtx addr
= XEXP (op
, 0);
2331 if (nios2_large_constant_p (addr
))
2332 addr
= nios2_split_large_constant (addr
, gen_reg_rtx (Pmode
));
2333 else if (nios2_plus_large_constant_p (addr
))
2334 addr
= nios2_split_plus_large_constant (XEXP (addr
, 0), XEXP (addr
, 1));
2337 return replace_equiv_address (op
, addr
, false);
2340 /* Return true if SECTION is a small section name. */
2342 nios2_small_section_name_p (const char *section
)
2344 return (strcmp (section
, ".sbss") == 0
2345 || strncmp (section
, ".sbss.", 6) == 0
2346 || strcmp (section
, ".sdata") == 0
2347 || strncmp (section
, ".sdata.", 7) == 0
2349 && regexec (&nios2_gprel_sec_regex
, section
, 0, NULL
, 0) == 0));
2352 /* Return true if SECTION is a r0-relative section name. */
2354 nios2_r0rel_section_name_p (const char *section
)
2356 return (nios2_r0rel_sec
2357 && regexec (&nios2_r0rel_sec_regex
, section
, 0, NULL
, 0) == 0);
2360 /* Return true if EXP should be placed in the small data section. */
2362 nios2_in_small_data_p (const_tree exp
)
2364 /* We want to merge strings, so we never consider them small data. */
2365 if (TREE_CODE (exp
) == STRING_CST
)
2368 if (TREE_CODE (exp
) == VAR_DECL
)
2370 if (DECL_SECTION_NAME (exp
))
2372 const char *section
= DECL_SECTION_NAME (exp
);
2373 if (nios2_small_section_name_p (section
))
2378 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
2380 /* If this is an incomplete type with size 0, then we can't put it
2381 in sdata because it might be too big when completed. */
2383 && (unsigned HOST_WIDE_INT
) size
<= nios2_section_threshold
)
2391 /* Return true if symbol is in small data section. */
2394 nios2_symbol_ref_in_small_data_p (rtx sym
)
2398 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
2399 decl
= SYMBOL_REF_DECL (sym
);
2401 /* TLS variables are not accessed through the GP. */
2402 if (SYMBOL_REF_TLS_MODEL (sym
) != 0)
2405 /* On Nios II R2, there is no GP-relative relocation that can be
2406 used with "io" instructions. So, if we are implicitly generating
2407 those instructions, we cannot emit GP-relative accesses. */
2409 && (TARGET_BYPASS_CACHE
|| TARGET_BYPASS_CACHE_VOLATILE
))
2412 /* If the user has explicitly placed the symbol in a small data section
2413 via an attribute, generate gp-relative addressing even if the symbol
2414 is external, weak, or larger than we'd automatically put in the
2415 small data section. OTOH, if the symbol is located in some
2416 non-small-data section, we can't use gp-relative accesses on it
2417 unless the user has requested gpopt_data or gpopt_all. */
2419 switch (nios2_gpopt_option
)
2422 /* Don't generate a gp-relative addressing mode if that's been
2427 /* Use GP-relative addressing for small data symbols that are
2428 not external or weak or uninitialized common, plus any symbols
2429 that have explicitly been placed in a small data section. */
2430 if (decl
&& DECL_SECTION_NAME (decl
))
2431 return nios2_small_section_name_p (DECL_SECTION_NAME (decl
));
2432 return (SYMBOL_REF_SMALL_P (sym
)
2433 && !SYMBOL_REF_EXTERNAL_P (sym
)
2434 && !(decl
&& DECL_WEAK (decl
))
2435 && !(decl
&& DECL_COMMON (decl
)
2436 && (DECL_INITIAL (decl
) == NULL
2438 && DECL_INITIAL (decl
) == error_mark_node
))));
2441 /* Use GP-relative addressing for small data symbols, even if
2442 they are external or weak. Note that SYMBOL_REF_SMALL_P
2443 is also true of symbols that have explicitly been placed
2444 in a small data section. */
2445 return SYMBOL_REF_SMALL_P (sym
);
2448 /* Use GP-relative addressing for all data symbols regardless
2449 of the object size, but not for code symbols. This option
2450 is equivalent to the user asserting that the entire data
2451 section is accessible from the GP. */
2452 return !SYMBOL_REF_FUNCTION_P (sym
);
2455 /* Use GP-relative addressing for everything, including code.
2456 Effectively, the user has asserted that the entire program
2457 fits within the 64K range of the GP offset. */
2461 /* We shouldn't get here. */
2466 /* Likewise for r0-relative addressing. */
2468 nios2_symbol_ref_in_r0rel_data_p (rtx sym
)
2472 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
);
2473 decl
= SYMBOL_REF_DECL (sym
);
2475 /* TLS variables are not accessed through r0. */
2476 if (SYMBOL_REF_TLS_MODEL (sym
) != 0)
2479 /* On Nios II R2, there is no r0-relative relocation that can be
2480 used with "io" instructions. So, if we are implicitly generating
2481 those instructions, we cannot emit r0-relative accesses. */
2483 && (TARGET_BYPASS_CACHE
|| TARGET_BYPASS_CACHE_VOLATILE
))
2486 /* If the user has explicitly placed the symbol in a r0rel section
2487 via an attribute, generate r0-relative addressing. */
2488 if (decl
&& DECL_SECTION_NAME (decl
))
2489 return nios2_r0rel_section_name_p (DECL_SECTION_NAME (decl
));
2493 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2496 nios2_section_type_flags (tree decl
, const char *name
, int reloc
)
2500 flags
= default_section_type_flags (decl
, name
, reloc
);
2502 if (nios2_small_section_name_p (name
))
2503 flags
|= SECTION_SMALL
;
2508 /* Return true if SYMBOL_REF X binds locally. */
2511 nios2_symbol_binds_local_p (const_rtx x
)
2513 return (SYMBOL_REF_DECL (x
)
2514 ? targetm
.binds_local_p (SYMBOL_REF_DECL (x
))
2515 : SYMBOL_REF_LOCAL_P (x
));
2518 /* Position independent code related. */
2520 /* Emit code to load the PIC register. */
2522 nios2_load_pic_register (void)
2524 rtx tmp
= gen_rtx_REG (Pmode
, TEMP_REG_NUM
);
2526 emit_insn (gen_load_got_register (pic_offset_table_rtx
, tmp
));
2527 emit_insn (gen_add3_insn (pic_offset_table_rtx
, pic_offset_table_rtx
, tmp
));
2530 /* Generate a PIC address as a MEM rtx. */
2532 nios2_load_pic_address (rtx sym
, int unspec
, rtx tmp
)
2535 && GET_CODE (sym
) == SYMBOL_REF
2536 && nios2_symbol_binds_local_p (sym
))
2537 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2539 rtx offset
= nios2_unspec_offset (sym
, UNSPEC_PIC_GOTOFF_SYM
);
2540 crtl
->uses_pic_offset_table
= 1;
2541 return nios2_large_got_address (offset
, tmp
);
2544 return gen_const_mem (Pmode
, nios2_got_address (sym
, unspec
));
2547 /* Nonzero if the constant value X is a legitimate general operand
2548 when generating PIC code. It is given that flag_pic is on and
2549 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2551 nios2_legitimate_pic_operand_p (rtx x
)
2553 if (nios2_large_unspec_reloc_p (x
))
2556 return ! (GET_CODE (x
) == SYMBOL_REF
2557 || GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == CONST
);
2560 /* Return TRUE if X is a thread-local symbol. */
2562 nios2_tls_symbol_p (rtx x
)
2564 return (targetm
.have_tls
&& GET_CODE (x
) == SYMBOL_REF
2565 && SYMBOL_REF_TLS_MODEL (x
) != 0);
2568 /* Legitimize addresses that are CONSTANT_P expressions. */
2570 nios2_legitimize_constant_address (rtx addr
)
2573 split_const (addr
, &base
, &offset
);
2575 if (nios2_tls_symbol_p (base
))
2576 base
= nios2_legitimize_tls_address (base
);
2578 base
= nios2_load_pic_address (base
, UNSPEC_PIC_SYM
, NULL_RTX
);
2579 else if (!nios2_large_constant_allowed ()
2580 && nios2_symbolic_constant_p (addr
))
2581 return nios2_split_large_constant (addr
, gen_reg_rtx (Pmode
));
2582 else if (CONST_INT_P (addr
))
2584 HOST_WIDE_INT val
= INTVAL (addr
);
2585 if (SMALL_INT (val
))
2586 /* Use r0-relative addressing. */
2588 else if (!nios2_large_constant_allowed ())
2589 /* Split into high/lo pair. */
2590 return nios2_split_large_constant (addr
, gen_reg_rtx (Pmode
));
2595 if (offset
!= const0_rtx
)
2597 gcc_assert (can_create_pseudo_p ());
2598 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, base
),
2599 (CONST_INT_P (offset
)
2600 ? (SMALL_INT (INTVAL (offset
))
2601 ? offset
: force_reg (Pmode
, offset
))
2607 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2609 nios2_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
2610 machine_mode mode ATTRIBUTE_UNUSED
)
2615 return nios2_legitimize_constant_address (x
);
2617 /* Remaining cases all involve something + a constant. */
2618 if (GET_CODE (x
) != PLUS
)
2624 /* Target-independent code turns (exp + constant) into plain
2625 register indirect. Although subsequent optimization passes will
2626 eventually sort that out, ivopts uses the unoptimized form for
2627 computing its cost model, so we get better results by generating
2628 the correct form from the start. */
2629 if (nios2_valid_addr_offset_p (op1
))
2630 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, op0
), copy_rtx (op1
));
2632 /* We may need to split symbolic constants now. */
2633 else if (nios2_symbolic_constant_p (op1
))
2635 if (nios2_large_constant_allowed ())
2636 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, op0
), copy_rtx (op1
));
2638 return nios2_split_plus_large_constant (op0
, op1
);
2641 /* For the TLS LE (Local Exec) model, the compiler may try to
2642 combine constant offsets with unspec relocs, creating address RTXs
2644 (plus:SI (reg:SI 23 r23)
2647 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2648 (const_int 48 [0x30]))))
2650 This usually happens when 'var' is a thread-local struct variable,
2651 and access of a field in var causes the addend.
2653 We typically want this combining, so transform the above into this
2654 form, which is allowed:
2655 (plus:SI (reg:SI 23 r23)
2659 (plus:SI (symbol_ref:SI ("var"))
2660 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2662 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2663 else if (GET_CODE (op1
) == CONST
)
2666 split_const (op1
, &unspec
, &offset
);
2667 if (GET_CODE (unspec
) == UNSPEC
2668 && !nios2_large_offset_p (XINT (unspec
, 1))
2669 && offset
!= const0_rtx
)
2671 rtx reg
= force_reg (Pmode
, op0
);
2672 unspec
= copy_rtx (unspec
);
2673 XVECEXP (unspec
, 0, 0)
2674 = plus_constant (Pmode
, XVECEXP (unspec
, 0, 0), INTVAL (offset
));
2675 return gen_rtx_PLUS (Pmode
, reg
, gen_rtx_CONST (Pmode
, unspec
));
2683 nios2_delegitimize_address (rtx x
)
2685 x
= delegitimize_mem_from_attrs (x
);
2687 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
2689 switch (XINT (XEXP (x
, 0), 1))
2691 case UNSPEC_PIC_SYM
:
2692 case UNSPEC_PIC_CALL_SYM
:
2693 case UNSPEC_PIC_GOTOFF_SYM
:
2694 case UNSPEC_ADD_TLS_GD
:
2695 case UNSPEC_ADD_TLS_LDM
:
2696 case UNSPEC_LOAD_TLS_IE
:
2697 case UNSPEC_ADD_TLS_LE
:
2698 x
= XVECEXP (XEXP (x
, 0), 0, 0);
2699 gcc_assert (CONSTANT_P (x
));
2706 /* Main expander function for RTL moves. */
2708 nios2_emit_move_sequence (rtx
*operands
, machine_mode mode
)
2710 rtx to
= operands
[0];
2711 rtx from
= operands
[1];
2713 if (!register_operand (to
, mode
) && !reg_or_0_operand (from
, mode
))
2715 gcc_assert (can_create_pseudo_p ());
2716 from
= copy_to_mode_reg (mode
, from
);
2719 if (CONSTANT_P (from
))
2721 if (CONST_INT_P (from
))
2723 if (!SMALL_INT (INTVAL (from
))
2724 && !SMALL_INT_UNSIGNED (INTVAL (from
))
2725 && !UPPER16_INT (INTVAL (from
)))
2727 HOST_WIDE_INT high
= (INTVAL (from
) + 0x8000) & ~0xffff;
2728 HOST_WIDE_INT low
= INTVAL (from
) & 0xffff;
2729 emit_move_insn (to
, gen_int_mode (high
, SImode
));
2730 emit_insn (gen_add2_insn (to
, gen_int_mode (low
, HImode
)));
2731 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2736 else if (gprel_constant_p (from
) || r0rel_constant_p (from
))
2737 /* Handled directly by movsi_internal as gp + offset
2740 else if (nios2_large_constant_p (from
))
2741 /* This case covers either a regular symbol reference or an UNSPEC
2742 representing a 32-bit offset. We split the former
2743 only conditionally and the latter always. */
2745 if (!nios2_large_constant_allowed ()
2746 || nios2_large_unspec_reloc_p (from
))
2748 rtx lo
= nios2_split_large_constant (from
, to
);
2749 emit_insn (gen_rtx_SET (to
, lo
));
2750 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2751 copy_rtx (operands
[1]));
2756 /* This is a TLS or PIC symbol. */
2758 from
= nios2_legitimize_constant_address (from
);
2759 if (CONSTANT_P (from
))
2761 emit_insn (gen_rtx_SET (to
,
2762 gen_rtx_HIGH (Pmode
, copy_rtx (from
))));
2763 emit_insn (gen_rtx_SET (to
, gen_rtx_LO_SUM (Pmode
, to
, from
)));
2764 set_unique_reg_note (get_last_insn (), REG_EQUAL
,
2765 copy_rtx (operands
[1]));
2776 /* The function with address *ADDR is being called. If the address
2777 needs to be loaded from the GOT, emit the instruction to do so and
2778 update *ADDR to point to the rtx for the loaded value.
2779 If REG != NULL_RTX, it is used as the target/scratch register in the
2780 GOT address calculation. */
2782 nios2_adjust_call_address (rtx
*call_op
, rtx reg
)
2784 if (MEM_P (*call_op
))
2785 call_op
= &XEXP (*call_op
, 0);
2787 rtx addr
= *call_op
;
2788 if (flag_pic
&& CONSTANT_P (addr
))
2790 rtx tmp
= reg
? reg
: NULL_RTX
;
2792 reg
= gen_reg_rtx (Pmode
);
2793 addr
= nios2_load_pic_address (addr
, UNSPEC_PIC_CALL_SYM
, tmp
);
2794 emit_insn (gen_rtx_SET (reg
, addr
));
2800 /* Output assembly language related definitions. */
2802 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2804 nios2_print_operand_punct_valid_p (unsigned char code
)
2806 return (code
== '.' || code
== '!');
2810 /* Print the operand OP to file stream FILE modified by LETTER.
2811 LETTER can be one of:
2813 i: print i/hi/ui suffixes (used for mov instruction variants),
2814 when OP is the appropriate immediate operand.
2816 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2818 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2819 or TARGET_BYPASS_CACHE_VOLATILE).
2821 x: print i/hi/ci/chi suffixes for the and instruction,
2822 when OP is the appropriate immediate operand.
2824 z: prints the third register immediate operand in assembly
2825 instructions. Outputs const0_rtx as the 'zero' register
2828 y: same as 'z', but for specifically for logical instructions,
2829 where the processing for immediates are slightly different.
2833 D: for the upper 32-bits of a 64-bit double value
2834 R: prints reverse condition.
2835 A: prints (reg) operand for ld[s]ex and st[s]ex.
2837 .: print .n suffix for 16-bit instructions.
2838 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2841 nios2_print_operand (FILE *file
, rtx op
, int letter
)
2844 /* First take care of the format letters that just insert a string
2845 into the output stream. */
2849 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2850 fprintf (file
, ".n");
2854 if (current_output_insn
&& get_attr_length (current_output_insn
) == 2)
2855 fprintf (file
, "r.n");
2859 if (CONST_INT_P (op
))
2861 HOST_WIDE_INT val
= INTVAL (op
);
2862 HOST_WIDE_INT low
= val
& 0xffff;
2863 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2871 gcc_assert (TARGET_ARCH_R2
);
2873 fprintf (file
, "c");
2874 else if (low
== 0xffff)
2875 fprintf (file
, "ch");
2880 fprintf (file
, "h");
2882 fprintf (file
, "i");
2889 if (CONST_INT_P (op
))
2891 HOST_WIDE_INT val
= INTVAL (op
);
2892 HOST_WIDE_INT low
= val
& 0xffff;
2893 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2896 if (low
== 0 && high
!= 0)
2897 fprintf (file
, "h");
2898 else if (high
== 0 && (low
& 0x8000) != 0 && letter
!= 'u')
2899 fprintf (file
, "u");
2902 if (CONSTANT_P (op
) && op
!= const0_rtx
)
2903 fprintf (file
, "i");
2907 if (GET_CODE (op
) == MEM
2908 && ((MEM_VOLATILE_P (op
) && TARGET_BYPASS_CACHE_VOLATILE
)
2909 || TARGET_BYPASS_CACHE
))
2911 gcc_assert (current_output_insn
2912 && get_attr_length (current_output_insn
) == 4);
2913 fprintf (file
, "io");
2921 /* Handle comparison operator names. */
2922 if (comparison_operator (op
, VOIDmode
))
2924 enum rtx_code cond
= GET_CODE (op
);
2927 fprintf (file
, "%s", GET_RTX_NAME (cond
));
2932 fprintf (file
, "%s", GET_RTX_NAME (reverse_condition (cond
)));
2937 /* Now handle the cases where we actually need to format an operand. */
2938 switch (GET_CODE (op
))
2941 if (letter
== 0 || letter
== 'z' || letter
== 'y')
2943 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
2946 else if (letter
== 'D')
2948 fprintf (file
, "%s", reg_names
[REGNO (op
)+1]);
2956 HOST_WIDE_INT val
= INTVAL (int_rtx
);
2957 HOST_WIDE_INT low
= val
& 0xffff;
2958 HOST_WIDE_INT high
= (val
>> 16) & 0xffff;
2963 fprintf (file
, "zero");
2970 gcc_assert (TARGET_ARCH_R2
);
2973 int_rtx
= gen_int_mode (low
, SImode
);
2974 else if (low
== 0xffff)
2976 int_rtx
= gen_int_mode (high
, SImode
);
2982 int_rtx
= gen_int_mode (high
, SImode
);
2986 int_rtx
= gen_int_mode (low
, SImode
);
2987 output_addr_const (file
, int_rtx
);
2991 else if (letter
== 'z')
2994 fprintf (file
, "zero");
2997 if (low
== 0 && high
!= 0)
2998 int_rtx
= gen_int_mode (high
, SImode
);
3001 gcc_assert (high
== 0 || high
== 0xffff);
3002 int_rtx
= gen_int_mode (low
, high
== 0 ? SImode
: HImode
);
3006 output_addr_const (file
, int_rtx
);
3012 /* Else, fall through. */
3018 if (letter
== 0 || letter
== 'z')
3020 output_addr_const (file
, op
);
3023 else if (letter
== 'H' || letter
== 'L')
3025 fprintf (file
, "%%");
3026 if (GET_CODE (op
) == CONST
3027 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
3029 rtx unspec
= XEXP (op
, 0);
3030 int unspec_reloc
= XINT (unspec
, 1);
3031 gcc_assert (nios2_large_offset_p (unspec_reloc
));
3032 fprintf (file
, "%s_", nios2_unspec_reloc_name (unspec_reloc
));
3033 op
= XVECEXP (unspec
, 0, 0);
3035 fprintf (file
, letter
== 'H' ? "hiadj(" : "lo(");
3036 output_addr_const (file
, op
);
3037 fprintf (file
, ")");
3046 /* Address of '(reg)' form, with no index. */
3047 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (op
, 0))]);
3052 output_address (VOIDmode
, op
);
3060 output_addr_const (file
, op
);
3070 output_operand_lossage ("Unsupported operand for code '%c'", letter
);
3074 /* Return true if this is a GP-relative accessible reference. */
3076 gprel_constant_p (rtx op
)
3078 if (GET_CODE (op
) == SYMBOL_REF
3079 && nios2_symbol_ref_in_small_data_p (op
))
3081 else if (GET_CODE (op
) == CONST
3082 && GET_CODE (XEXP (op
, 0)) == PLUS
)
3083 return gprel_constant_p (XEXP (XEXP (op
, 0), 0));
3088 /* Likewise if this is a zero-relative accessible reference. */
3090 r0rel_constant_p (rtx op
)
3092 if (GET_CODE (op
) == SYMBOL_REF
3093 && nios2_symbol_ref_in_r0rel_data_p (op
))
3095 else if (GET_CODE (op
) == CONST
3096 && GET_CODE (XEXP (op
, 0)) == PLUS
)
3097 return r0rel_constant_p (XEXP (XEXP (op
, 0), 0));
3098 else if (GET_CODE (op
) == CONST_INT
3099 && SMALL_INT (INTVAL (op
)))
3105 /* Return the name string for a supported unspec reloc offset. */
3107 nios2_unspec_reloc_name (int unspec
)
3111 case UNSPEC_PIC_SYM
:
3113 case UNSPEC_PIC_CALL_SYM
:
3115 case UNSPEC_PIC_GOTOFF_SYM
:
3117 case UNSPEC_LOAD_TLS_IE
:
3119 case UNSPEC_ADD_TLS_LE
:
3121 case UNSPEC_ADD_TLS_GD
:
3123 case UNSPEC_ADD_TLS_LDM
:
3125 case UNSPEC_ADD_TLS_LDO
:
3132 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
3134 nios2_output_addr_const_extra (FILE *file
, rtx op
)
3137 gcc_assert (GET_CODE (op
) == UNSPEC
);
3139 /* Support for printing out const unspec relocations. */
3140 name
= nios2_unspec_reloc_name (XINT (op
, 1));
3143 fprintf (file
, "%%%s(", name
);
3144 output_addr_const (file
, XVECEXP (op
, 0, 0));
3145 fprintf (file
, ")");
3151 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
3153 nios2_print_operand_address (FILE *file
, machine_mode mode
, rtx op
)
3155 switch (GET_CODE (op
))
3162 if (gprel_constant_p (op
))
3164 fprintf (file
, "%%gprel(");
3165 output_addr_const (file
, op
);
3166 fprintf (file
, ")(%s)", reg_names
[GP_REGNO
]);
3169 else if (r0rel_constant_p (op
))
3171 if (CONST_INT_P (op
))
3173 output_addr_const (file
, op
);
3174 fprintf (file
, "(r0)");
3179 fprintf (file
, "%%lo(");
3180 output_addr_const (file
, op
);
3181 fprintf (file
, ")(r0)");
3189 rtx op0
= XEXP (op
, 0);
3190 rtx op1
= XEXP (op
, 1);
3192 if (REG_P (op0
) && CONSTANT_P (op1
))
3194 output_addr_const (file
, op1
);
3195 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
3198 else if (REG_P (op1
) && CONSTANT_P (op0
))
3200 output_addr_const (file
, op0
);
3201 fprintf (file
, "(%s)", reg_names
[REGNO (op1
)]);
3209 rtx op0
= XEXP (op
, 0);
3210 rtx op1
= XEXP (op
, 1);
3212 if (REG_P (op0
) && CONSTANT_P (op1
))
3214 nios2_print_operand (file
, op1
, 'L');
3215 fprintf (file
, "(%s)", reg_names
[REGNO (op0
)]);
3222 fprintf (file
, "0(%s)", reg_names
[REGNO (op
)]);
3227 rtx base
= XEXP (op
, 0);
3228 nios2_print_operand_address (file
, mode
, base
);
3235 fprintf (stderr
, "Missing way to print address\n");
3240 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
3242 nios2_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3244 gcc_assert (size
== 4);
3245 fprintf (file
, "\t.4byte\t%%tls_ldo(");
3246 output_addr_const (file
, x
);
3247 fprintf (file
, ")");
3250 /* Implemet TARGET_ASM_FILE_END. */
3253 nios2_asm_file_end (void)
3255 /* The Nios II Linux stack is mapped non-executable by default, so add a
3256 .note.GNU-stack section for switching to executable stacks only when
3257 trampolines are generated. */
3258 if (TARGET_LINUX_ABI
&& trampolines_created
)
3259 file_end_indicate_exec_stack ();
3262 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
3264 nios2_asm_function_prologue (FILE *file
)
3266 if (flag_verbose_asm
|| flag_debug_asm
)
3268 nios2_compute_frame_layout ();
3269 nios2_dump_frame_layout (file
);
3273 /* Emit assembly of custom FPU instructions. */
3275 nios2_fpu_insn_asm (enum n2fpu_code code
)
3277 static char buf
[256];
3278 const char *op1
, *op2
, *op3
;
3279 int ln
= 256, n
= 0;
3281 int N
= N2FPU_N (code
);
3282 int num_operands
= N2FPU (code
).num_operands
;
3283 const char *insn_name
= N2FPU_NAME (code
);
3284 tree ftype
= nios2_ftype (N2FPU_FTCODE (code
));
3285 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (ftype
));
3286 machine_mode src_mode
= TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype
)));
3288 /* Prepare X register for DF input operands. */
3289 if (GET_MODE_SIZE (src_mode
) == 8 && num_operands
== 3)
3290 n
= snprintf (buf
, ln
, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
3291 N2FPU_N (n2fpu_fwrx
));
3293 if (src_mode
== SFmode
)
3295 if (dst_mode
== VOIDmode
)
3297 /* The fwry case. */
3304 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
3306 op3
= (num_operands
== 2 ? "zero" : "%2");
3309 else if (src_mode
== DFmode
)
3311 if (dst_mode
== VOIDmode
)
3313 /* The fwrx case. */
3321 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
3322 op2
= (num_operands
== 2 ? "%1" : "%2");
3323 op3
= (num_operands
== 2 ? "%D1" : "%D2");
3326 else if (src_mode
== VOIDmode
)
3328 /* frdxlo, frdxhi, frdy cases. */
3329 gcc_assert (dst_mode
== SFmode
);
3333 else if (src_mode
== SImode
)
3335 /* Conversion operators. */
3336 gcc_assert (num_operands
== 2);
3337 op1
= (dst_mode
== DFmode
? "%D0" : "%0");
3344 /* Main instruction string. */
3345 n
+= snprintf (buf
+ n
, ln
- n
, "custom\t%d, %s, %s, %s # %s %%0%s%s",
3346 N
, op1
, op2
, op3
, insn_name
,
3347 (num_operands
>= 2 ? ", %1" : ""),
3348 (num_operands
== 3 ? ", %2" : ""));
3350 /* Extraction of Y register for DF results. */
3351 if (dst_mode
== DFmode
)
3352 snprintf (buf
+ n
, ln
- n
, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
3353 N2FPU_N (n2fpu_frdy
));
3359 /* Function argument related. */
3361 /* Define where to put the arguments to a function. Value is zero to
3362 push the argument on the stack, or a hard register in which to
3365 MODE is the argument's machine mode.
3366 TYPE is the data type of the argument (as a tree).
3367 This is null for libcalls where that information may
3369 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3370 the preceding args and about the function being called.
3371 NAMED is nonzero if this argument is a named parameter
3372 (otherwise it is an extra parameter matching an ellipsis). */
3375 nios2_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
3376 const_tree type ATTRIBUTE_UNUSED
,
3377 bool named ATTRIBUTE_UNUSED
)
3379 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3380 rtx return_rtx
= NULL_RTX
;
3382 if (cum
->regs_used
< NUM_ARG_REGS
)
3383 return_rtx
= gen_rtx_REG (mode
, FIRST_ARG_REGNO
+ cum
->regs_used
);
3388 /* Return number of bytes, at the beginning of the argument, that must be
3389 put in registers. 0 is the argument is entirely in registers or entirely
3393 nios2_arg_partial_bytes (cumulative_args_t cum_v
,
3394 machine_mode mode
, tree type ATTRIBUTE_UNUSED
,
3395 bool named ATTRIBUTE_UNUSED
)
3397 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3398 HOST_WIDE_INT param_size
;
3400 if (mode
== BLKmode
)
3402 param_size
= int_size_in_bytes (type
);
3403 gcc_assert (param_size
>= 0);
3406 param_size
= GET_MODE_SIZE (mode
);
3408 /* Convert to words (round up). */
3409 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
3411 if (cum
->regs_used
< NUM_ARG_REGS
3412 && cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
3413 return (NUM_ARG_REGS
- cum
->regs_used
) * UNITS_PER_WORD
;
3418 /* Update the data in CUM to advance over an argument of mode MODE
3419 and data type TYPE; TYPE is null for libcalls where that information
3420 may not be available. */
3423 nios2_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
3424 const_tree type ATTRIBUTE_UNUSED
,
3425 bool named ATTRIBUTE_UNUSED
)
3427 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3428 HOST_WIDE_INT param_size
;
3430 if (mode
== BLKmode
)
3432 param_size
= int_size_in_bytes (type
);
3433 gcc_assert (param_size
>= 0);
3436 param_size
= GET_MODE_SIZE (mode
);
3438 /* Convert to words (round up). */
3439 param_size
= (UNITS_PER_WORD
- 1 + param_size
) / UNITS_PER_WORD
;
3441 if (cum
->regs_used
+ param_size
> NUM_ARG_REGS
)
3442 cum
->regs_used
= NUM_ARG_REGS
;
3444 cum
->regs_used
+= param_size
;
3447 static pad_direction
3448 nios2_function_arg_padding (machine_mode mode
, const_tree type
)
3450 /* On little-endian targets, the first byte of every stack argument
3451 is passed in the first byte of the stack slot. */
3452 if (!BYTES_BIG_ENDIAN
)
3455 /* Otherwise, integral types are padded downward: the last byte of a
3456 stack argument is passed in the last byte of the stack slot. */
3458 ? INTEGRAL_TYPE_P (type
) || POINTER_TYPE_P (type
)
3459 : GET_MODE_CLASS (mode
) == MODE_INT
)
3460 return PAD_DOWNWARD
;
3462 /* Arguments smaller than a stack slot are padded downward. */
3463 if (mode
!= BLKmode
)
3464 return (GET_MODE_BITSIZE (mode
) >= PARM_BOUNDARY
3465 ? PAD_UPWARD
: PAD_DOWNWARD
);
3467 return ((int_size_in_bytes (type
) >= (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3468 ? PAD_UPWARD
: PAD_DOWNWARD
);
3472 nios2_block_reg_padding (machine_mode mode
, tree type
,
3473 int first ATTRIBUTE_UNUSED
)
3475 return nios2_function_arg_padding (mode
, type
);
3478 /* Emit RTL insns to initialize the variable parts of a trampoline.
3479 FNADDR is an RTX for the address of the function's pure code.
3480 CXT is an RTX for the static chain value for the function.
3481 On Nios II, we handle this by a library call. */
3483 nios2_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
3485 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3486 rtx ctx_reg
= force_reg (Pmode
, cxt
);
3487 rtx addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
3489 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
3490 LCT_NORMAL
, VOIDmode
, addr
, Pmode
, fnaddr
, Pmode
,
3494 /* Implement TARGET_FUNCTION_VALUE. */
3496 nios2_function_value (const_tree ret_type
, const_tree fn ATTRIBUTE_UNUSED
,
3497 bool outgoing ATTRIBUTE_UNUSED
)
3499 return gen_rtx_REG (TYPE_MODE (ret_type
), FIRST_RETVAL_REGNO
);
3502 /* Implement TARGET_LIBCALL_VALUE. */
3504 nios2_libcall_value (machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
3506 return gen_rtx_REG (mode
, FIRST_RETVAL_REGNO
);
3509 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3511 nios2_function_value_regno_p (const unsigned int regno
)
3513 return regno
== FIRST_RETVAL_REGNO
;
3516 /* Implement TARGET_RETURN_IN_MEMORY. */
3518 nios2_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3520 return (int_size_in_bytes (type
) > (2 * UNITS_PER_WORD
)
3521 || int_size_in_bytes (type
) == -1);
3524 /* TODO: It may be possible to eliminate the copyback and implement
3527 nios2_setup_incoming_varargs (cumulative_args_t cum_v
,
3528 machine_mode mode
, tree type
,
3529 int *pretend_size
, int second_time
)
3531 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3532 CUMULATIVE_ARGS local_cum
;
3533 cumulative_args_t local_cum_v
= pack_cumulative_args (&local_cum
);
3537 cfun
->machine
->uses_anonymous_args
= 1;
3539 nios2_function_arg_advance (local_cum_v
, mode
, type
, true);
3541 regs_to_push
= NUM_ARG_REGS
- local_cum
.regs_used
;
3543 /* If we can use CDX stwm to push the arguments on the stack,
3544 nios2_expand_prologue will do that instead. */
3545 if (!TARGET_HAS_CDX
&& !second_time
&& regs_to_push
> 0)
3547 rtx ptr
= virtual_incoming_args_rtx
;
3548 rtx mem
= gen_rtx_MEM (BLKmode
, ptr
);
3549 emit_insn (gen_blockage ());
3550 move_block_from_reg (local_cum
.regs_used
+ FIRST_ARG_REGNO
, mem
,
3552 emit_insn (gen_blockage ());
3555 pret_size
= regs_to_push
* UNITS_PER_WORD
;
3557 *pretend_size
= pret_size
;
3562 /* Init FPU builtins. */
3564 nios2_init_fpu_builtins (int start_code
)
3567 char builtin_name
[64] = "__builtin_custom_";
3568 unsigned int i
, n
= strlen ("__builtin_custom_");
3570 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
3572 snprintf (builtin_name
+ n
, sizeof (builtin_name
) - n
,
3573 "%s", N2FPU_NAME (i
));
3575 add_builtin_function (builtin_name
, nios2_ftype (N2FPU_FTCODE (i
)),
3576 start_code
+ i
, BUILT_IN_MD
, NULL
, NULL_TREE
);
3577 nios2_register_builtin_fndecl (start_code
+ i
, fndecl
);
3581 /* Helper function for expanding FPU builtins. */
3583 nios2_expand_fpu_builtin (tree exp
, unsigned int code
, rtx target
)
3585 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3586 enum insn_code icode
= N2FPU_ICODE (code
);
3587 int nargs
, argno
, opno
= 0;
3588 int num_operands
= N2FPU (code
).num_operands
;
3589 machine_mode dst_mode
= TYPE_MODE (TREE_TYPE (exp
));
3590 bool has_target_p
= (dst_mode
!= VOIDmode
);
3592 if (N2FPU_N (code
) < 0)
3593 fatal_error (input_location
,
3594 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3595 " %<-mcustom-%s%>", N2FPU_NAME (code
), N2FPU_NAME (code
));
3597 create_output_operand (&ops
[opno
++], target
, dst_mode
);
3599 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3601 nargs
= call_expr_nargs (exp
);
3602 for (argno
= 0; argno
< nargs
; argno
++)
3604 tree arg
= CALL_EXPR_ARG (exp
, argno
);
3605 create_input_operand (&ops
[opno
++], expand_normal (arg
),
3606 TYPE_MODE (TREE_TYPE (arg
)));
3608 if (!maybe_expand_insn (icode
, num_operands
, ops
))
3610 error ("invalid argument to built-in function");
3611 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3613 return has_target_p
? ops
[0].value
: const0_rtx
;
3616 /* Nios II has custom instruction built-in functions of the forms:
3619 __builtin_custom_nXX
3621 __builtin_custom_XnX
3622 __builtin_custom_XnXX
3624 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3625 Therefore with 0-1 return values, and 0-2 arguments, we have a
3626 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3628 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3629 static char custom_builtin_name
[NUM_CUSTOM_BUILTINS
][5];
3632 nios2_init_custom_builtins (int start_code
)
3634 tree builtin_ftype
, ret_type
, fndecl
;
3635 char builtin_name
[32] = "__builtin_custom_";
3636 int n
= strlen ("__builtin_custom_");
3637 int builtin_code
= 0;
3638 int lhs
, rhs1
, rhs2
;
3640 struct { tree type
; const char *c
; } op
[4];
3641 /* z */ op
[0].c
= ""; op
[0].type
= NULL_TREE
;
3642 /* f */ op
[1].c
= "f"; op
[1].type
= float_type_node
;
3643 /* i */ op
[2].c
= "i"; op
[2].type
= integer_type_node
;
3644 /* p */ op
[3].c
= "p"; op
[3].type
= ptr_type_node
;
3646 /* We enumerate through the possible operand types to create all the
3647 __builtin_custom_XnXX function tree types. Note that these may slightly
3648 overlap with the function types created for other fixed builtins. */
3650 for (lhs
= 0; lhs
< 4; lhs
++)
3651 for (rhs1
= 0; rhs1
< 4; rhs1
++)
3652 for (rhs2
= 0; rhs2
< 4; rhs2
++)
3654 if (rhs1
== 0 && rhs2
!= 0)
3656 ret_type
= (op
[lhs
].type
? op
[lhs
].type
: void_type_node
);
3658 = build_function_type_list (ret_type
, integer_type_node
,
3659 op
[rhs1
].type
, op
[rhs2
].type
,
3661 snprintf (builtin_name
+ n
, 32 - n
, "%sn%s%s",
3662 op
[lhs
].c
, op
[rhs1
].c
, op
[rhs2
].c
);
3663 /* Save copy of parameter string into custom_builtin_name[]. */
3664 strncpy (custom_builtin_name
[builtin_code
], builtin_name
+ n
, 5);
3666 add_builtin_function (builtin_name
, builtin_ftype
,
3667 start_code
+ builtin_code
,
3668 BUILT_IN_MD
, NULL
, NULL_TREE
);
3669 nios2_register_builtin_fndecl (start_code
+ builtin_code
, fndecl
);
3674 /* Helper function for expanding custom builtins. */
3676 nios2_expand_custom_builtin (tree exp
, unsigned int index
, rtx target
)
3678 bool has_target_p
= (TREE_TYPE (exp
) != void_type_node
);
3679 machine_mode tmode
= VOIDmode
;
3681 rtx value
, insn
, unspec_args
[3];
3687 tmode
= TYPE_MODE (TREE_TYPE (exp
));
3688 if (!target
|| GET_MODE (target
) != tmode
3690 target
= gen_reg_rtx (tmode
);
3693 nargs
= call_expr_nargs (exp
);
3694 for (argno
= 0; argno
< nargs
; argno
++)
3696 arg
= CALL_EXPR_ARG (exp
, argno
);
3697 value
= expand_normal (arg
);
3698 unspec_args
[argno
] = value
;
3701 if (!custom_insn_opcode (value
, VOIDmode
))
3702 error ("custom instruction opcode must be compile time "
3703 "constant in the range 0-255 for %<__builtin_custom_%s%>",
3704 custom_builtin_name
[index
]);
3707 /* For other arguments, force into a register. */
3708 unspec_args
[argno
] = force_reg (TYPE_MODE (TREE_TYPE (arg
)),
3709 unspec_args
[argno
]);
3711 /* Fill remaining unspec operands with zero. */
3712 for (; argno
< 3; argno
++)
3713 unspec_args
[argno
] = const0_rtx
;
3715 insn
= (has_target_p
3716 ? gen_rtx_SET (target
,
3717 gen_rtx_UNSPEC_VOLATILE (tmode
,
3718 gen_rtvec_v (3, unspec_args
),
3719 UNSPECV_CUSTOM_XNXX
))
3720 : gen_rtx_UNSPEC_VOLATILE (VOIDmode
, gen_rtvec_v (3, unspec_args
),
3721 UNSPECV_CUSTOM_NXX
));
3723 return has_target_p
? target
: const0_rtx
;
3729 /* Main definition of built-in functions. Nios II has a small number of fixed
3730 builtins, plus a large number of FPU insn builtins, and builtins for
3731 generating custom instructions. */
3733 struct nios2_builtin_desc
3735 enum insn_code icode
;
3736 enum nios2_arch_type arch
;
3737 enum nios2_ftcode ftype
;
3741 #define N2_BUILTINS \
3742 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3743 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3744 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3745 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3746 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3747 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3748 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3749 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3750 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3751 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3752 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3753 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3754 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3755 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3756 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3757 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3758 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3759 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3760 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3761 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3763 enum nios2_builtin_code
{
3764 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3766 #undef N2_BUILTIN_DEF
3767 NUM_FIXED_NIOS2_BUILTINS
3770 static const struct nios2_builtin_desc nios2_builtins
[] = {
3771 #define N2_BUILTIN_DEF(name, arch, ftype) \
3772 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3774 #undef N2_BUILTIN_DEF
3777 /* Start/ends of FPU/custom insn builtin index ranges. */
3778 static unsigned int nios2_fpu_builtin_base
;
3779 static unsigned int nios2_custom_builtin_base
;
3780 static unsigned int nios2_custom_builtin_end
;
3782 /* Implement TARGET_INIT_BUILTINS. */
3784 nios2_init_builtins (void)
3788 /* Initialize fixed builtins. */
3789 for (i
= 0; i
< ARRAY_SIZE (nios2_builtins
); i
++)
3791 const struct nios2_builtin_desc
*d
= &nios2_builtins
[i
];
3793 add_builtin_function (d
->name
, nios2_ftype (d
->ftype
), i
,
3794 BUILT_IN_MD
, NULL
, NULL
);
3795 nios2_register_builtin_fndecl (i
, fndecl
);
3798 /* Initialize FPU builtins. */
3799 nios2_fpu_builtin_base
= ARRAY_SIZE (nios2_builtins
);
3800 nios2_init_fpu_builtins (nios2_fpu_builtin_base
);
3802 /* Initialize custom insn builtins. */
3803 nios2_custom_builtin_base
3804 = nios2_fpu_builtin_base
+ ARRAY_SIZE (nios2_fpu_insn
);
3805 nios2_custom_builtin_end
3806 = nios2_custom_builtin_base
+ NUM_CUSTOM_BUILTINS
;
3807 nios2_init_custom_builtins (nios2_custom_builtin_base
);
3810 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3811 #define NIOS2_NUM_BUILTINS \
3812 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3813 static GTY(()) tree nios2_builtin_decls
[NIOS2_NUM_BUILTINS
];
3816 nios2_register_builtin_fndecl (unsigned code
, tree fndecl
)
3818 nios2_builtin_decls
[code
] = fndecl
;
3821 /* Implement TARGET_BUILTIN_DECL. */
3823 nios2_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3825 gcc_assert (nios2_custom_builtin_end
== ARRAY_SIZE (nios2_builtin_decls
));
3827 if (code
>= nios2_custom_builtin_end
)
3828 return error_mark_node
;
3830 if (code
>= nios2_fpu_builtin_base
3831 && code
< nios2_custom_builtin_base
3832 && ! N2FPU_ENABLED_P (code
- nios2_fpu_builtin_base
))
3833 return error_mark_node
;
3835 return nios2_builtin_decls
[code
];
3839 /* Low-level built-in expand routine. */
3841 nios2_expand_builtin_insn (const struct nios2_builtin_desc
*d
, int n
,
3842 struct expand_operand
*ops
, bool has_target_p
)
3844 if (maybe_expand_insn (d
->icode
, n
, ops
))
3845 return has_target_p
? ops
[0].value
: const0_rtx
;
3848 error ("invalid argument to built-in function %s", d
->name
);
3849 return has_target_p
? gen_reg_rtx (ops
[0].mode
) : const0_rtx
;
3853 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3854 instruction builtins. */
3856 nios2_expand_ldst_builtin (tree exp
, rtx target
,
3857 const struct nios2_builtin_desc
*d
)
3861 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3862 machine_mode mode
= insn_data
[d
->icode
].operand
[0].mode
;
3864 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3865 mem
= gen_rtx_MEM (mode
, addr
);
3867 if (insn_data
[d
->icode
].operand
[0].allows_mem
)
3869 /* stxio/stex/stsex. */
3870 val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3871 if (CONST_INT_P (val
))
3872 val
= force_reg (mode
, gen_int_mode (INTVAL (val
), mode
));
3873 val
= simplify_gen_subreg (mode
, val
, GET_MODE (val
), 0);
3874 create_output_operand (&ops
[0], mem
, mode
);
3875 create_input_operand (&ops
[1], val
, mode
);
3876 if (insn_data
[d
->icode
].n_operands
== 3)
3878 /* stex/stsex status value, returned as result of function. */
3879 create_output_operand (&ops
[2], target
, mode
);
3880 has_target_p
= true;
3883 has_target_p
= false;
3888 create_output_operand (&ops
[0], target
, mode
);
3889 create_input_operand (&ops
[1], mem
, mode
);
3890 has_target_p
= true;
3892 return nios2_expand_builtin_insn (d
, insn_data
[d
->icode
].n_operands
, ops
,
3896 /* Expand rdctl/wrctl builtins. */
3898 nios2_expand_rdwrctl_builtin (tree exp
, rtx target
,
3899 const struct nios2_builtin_desc
*d
)
3901 bool has_target_p
= (insn_data
[d
->icode
].operand
[0].predicate
3902 == register_operand
);
3903 rtx ctlcode
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3904 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3905 if (!rdwrctl_operand (ctlcode
, VOIDmode
))
3907 error ("Control register number must be in range 0-31 for %s",
3909 return has_target_p
? gen_reg_rtx (SImode
) : const0_rtx
;
3913 create_output_operand (&ops
[0], target
, SImode
);
3914 create_integer_operand (&ops
[1], INTVAL (ctlcode
));
3918 rtx val
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3919 create_integer_operand (&ops
[0], INTVAL (ctlcode
));
3920 create_input_operand (&ops
[1], val
, SImode
);
3922 return nios2_expand_builtin_insn (d
, 2, ops
, has_target_p
);
3926 nios2_expand_rdprs_builtin (tree exp
, rtx target
,
3927 const struct nios2_builtin_desc
*d
)
3929 rtx reg
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3930 rtx imm
= expand_normal (CALL_EXPR_ARG (exp
, 1));
3931 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3933 if (!rdwrctl_operand (reg
, VOIDmode
))
3935 error ("Register number must be in range 0-31 for %s",
3937 return gen_reg_rtx (SImode
);
3940 if (!rdprs_dcache_operand (imm
, VOIDmode
))
3942 error ("The immediate value must fit into a %d-bit integer for %s",
3943 (TARGET_ARCH_R2
) ? 12 : 16, d
->name
);
3944 return gen_reg_rtx (SImode
);
3947 create_output_operand (&ops
[0], target
, SImode
);
3948 create_input_operand (&ops
[1], reg
, SImode
);
3949 create_integer_operand (&ops
[2], INTVAL (imm
));
3951 return nios2_expand_builtin_insn (d
, 3, ops
, true);
3955 nios2_expand_cache_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
3956 const struct nios2_builtin_desc
*d
)
3959 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3961 addr
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3962 mem
= gen_rtx_MEM (SImode
, addr
);
3964 create_input_operand (&ops
[0], mem
, SImode
);
3966 return nios2_expand_builtin_insn (d
, 1, ops
, false);
3970 nios2_expand_wrpie_builtin (tree exp
, rtx target
,
3971 const struct nios2_builtin_desc
*d
)
3974 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3976 val
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3977 create_input_operand (&ops
[1], val
, SImode
);
3978 create_output_operand (&ops
[0], target
, SImode
);
3980 return nios2_expand_builtin_insn (d
, 2, ops
, true);
3984 nios2_expand_eni_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
3985 const struct nios2_builtin_desc
*d
)
3987 rtx imm
= expand_normal (CALL_EXPR_ARG (exp
, 0));
3988 struct expand_operand ops
[MAX_RECOG_OPERANDS
];
3990 if (INTVAL (imm
) != 0 && INTVAL (imm
) != 1)
3992 error ("The ENI instruction operand must be either 0 or 1");
3995 create_integer_operand (&ops
[0], INTVAL (imm
));
3997 return nios2_expand_builtin_insn (d
, 1, ops
, false);
4000 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
4001 a built-in function, with result going to TARGET if that's convenient
4002 (and in mode MODE if that's convenient).
4003 SUBTARGET may be used as the target for computing one of EXP's operands.
4004 IGNORE is nonzero if the value is to be ignored. */
4007 nios2_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
4008 machine_mode mode ATTRIBUTE_UNUSED
,
4009 int ignore ATTRIBUTE_UNUSED
)
4011 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
4012 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4014 if (fcode
< nios2_fpu_builtin_base
)
4016 const struct nios2_builtin_desc
*d
= &nios2_builtins
[fcode
];
4018 if (d
->arch
> nios2_arch_option
)
4020 error ("Builtin function %s requires Nios II R%d",
4021 d
->name
, (int) d
->arch
);
4022 /* Given it is invalid, just generate a normal call. */
4023 return expand_call (exp
, target
, ignore
);
4028 case NIOS2_BUILTIN_sync
:
4029 emit_insn (gen_sync ());
4032 case NIOS2_BUILTIN_ldbio
:
4033 case NIOS2_BUILTIN_ldbuio
:
4034 case NIOS2_BUILTIN_ldhio
:
4035 case NIOS2_BUILTIN_ldhuio
:
4036 case NIOS2_BUILTIN_ldwio
:
4037 case NIOS2_BUILTIN_stbio
:
4038 case NIOS2_BUILTIN_sthio
:
4039 case NIOS2_BUILTIN_stwio
:
4040 case NIOS2_BUILTIN_ldex
:
4041 case NIOS2_BUILTIN_ldsex
:
4042 case NIOS2_BUILTIN_stex
:
4043 case NIOS2_BUILTIN_stsex
:
4044 return nios2_expand_ldst_builtin (exp
, target
, d
);
4046 case NIOS2_BUILTIN_rdctl
:
4047 case NIOS2_BUILTIN_wrctl
:
4048 return nios2_expand_rdwrctl_builtin (exp
, target
, d
);
4050 case NIOS2_BUILTIN_rdprs
:
4051 return nios2_expand_rdprs_builtin (exp
, target
, d
);
4053 case NIOS2_BUILTIN_flushd
:
4054 case NIOS2_BUILTIN_flushda
:
4055 return nios2_expand_cache_builtin (exp
, target
, d
);
4057 case NIOS2_BUILTIN_wrpie
:
4058 return nios2_expand_wrpie_builtin (exp
, target
, d
);
4060 case NIOS2_BUILTIN_eni
:
4061 return nios2_expand_eni_builtin (exp
, target
, d
);
4067 else if (fcode
< nios2_custom_builtin_base
)
4068 /* FPU builtin range. */
4069 return nios2_expand_fpu_builtin (exp
, fcode
- nios2_fpu_builtin_base
,
4071 else if (fcode
< nios2_custom_builtin_end
)
4072 /* Custom insn builtin range. */
4073 return nios2_expand_custom_builtin (exp
, fcode
- nios2_custom_builtin_base
,
4079 /* Implement TARGET_INIT_LIBFUNCS. */
4080 static void ATTRIBUTE_UNUSED
4081 nios2_init_libfuncs (void)
4083 init_sync_libfuncs (UNITS_PER_WORD
);
4088 /* Register a custom code use, and signal error if a conflict was found. */
4090 nios2_register_custom_code (unsigned int N
, enum nios2_ccs_code status
,
4093 gcc_assert (N
<= 255);
4095 if (status
== CCS_FPU
)
4097 if (custom_code_status
[N
] == CCS_FPU
&& index
!= custom_code_index
[N
])
4099 custom_code_conflict
= true;
4100 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
4101 N2FPU_NAME (custom_code_index
[N
]), N2FPU_NAME (index
));
4103 else if (custom_code_status
[N
] == CCS_BUILTIN_CALL
)
4105 custom_code_conflict
= true;
4106 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4107 "%<-mcustom-%s%>", custom_builtin_name
[custom_code_index
[N
]],
4108 N2FPU_NAME (index
));
4111 else if (status
== CCS_BUILTIN_CALL
)
4113 if (custom_code_status
[N
] == CCS_FPU
)
4115 custom_code_conflict
= true;
4116 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4117 "%<-mcustom-%s%>", custom_builtin_name
[index
],
4118 N2FPU_NAME (custom_code_index
[N
]));
4122 /* Note that code conflicts between different __builtin_custom_xnxx
4123 calls are not checked. */
4129 custom_code_status
[N
] = status
;
4130 custom_code_index
[N
] = index
;
4133 /* Mark a custom code as not in use. */
4135 nios2_deregister_custom_code (unsigned int N
)
4139 custom_code_status
[N
] = CCS_UNUSED
;
4140 custom_code_index
[N
] = 0;
4144 /* Target attributes can affect per-function option state, so we need to
4145 save/restore the custom code tracking info using the
4146 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
4149 nios2_option_save (struct cl_target_option
*ptr
,
4150 struct gcc_options
*opts ATTRIBUTE_UNUSED
)
4153 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
4154 ptr
->saved_fpu_custom_code
[i
] = N2FPU_N (i
);
4155 memcpy (ptr
->saved_custom_code_status
, custom_code_status
,
4156 sizeof (custom_code_status
));
4157 memcpy (ptr
->saved_custom_code_index
, custom_code_index
,
4158 sizeof (custom_code_index
));
4162 nios2_option_restore (struct gcc_options
*opts ATTRIBUTE_UNUSED
,
4163 struct cl_target_option
*ptr
)
4166 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
4167 N2FPU_N (i
) = ptr
->saved_fpu_custom_code
[i
];
4168 memcpy (custom_code_status
, ptr
->saved_custom_code_status
,
4169 sizeof (custom_code_status
));
4170 memcpy (custom_code_index
, ptr
->saved_custom_code_index
,
4171 sizeof (custom_code_index
));
4174 /* Inner function to process the attribute((target(...))), take an argument and
4175 set the current options from the argument. If we have a list, recursively
4176 go over the list. */
4179 nios2_valid_target_attribute_rec (tree args
)
4181 if (TREE_CODE (args
) == TREE_LIST
)
4184 for (; args
; args
= TREE_CHAIN (args
))
4185 if (TREE_VALUE (args
)
4186 && !nios2_valid_target_attribute_rec (TREE_VALUE (args
)))
4190 else if (TREE_CODE (args
) == STRING_CST
)
4192 char *argstr
= ASTRDUP (TREE_STRING_POINTER (args
));
4193 while (argstr
&& *argstr
!= '\0')
4195 bool no_opt
= false, end_p
= false;
4196 char *eq
= NULL
, *p
;
4197 while (ISSPACE (*argstr
))
4200 while (*p
!= '\0' && *p
!= ',')
4202 if (!eq
&& *p
== '=')
4212 if (!strncmp (argstr
, "no-", 3))
4217 if (!strncmp (argstr
, "custom-fpu-cfg", 14))
4222 error ("custom-fpu-cfg option does not support %<no-%>");
4227 error ("custom-fpu-cfg option requires configuration"
4231 /* Increment and skip whitespace. */
4232 while (ISSPACE (*(++eq
))) ;
4233 /* Decrement and skip to before any trailing whitespace. */
4234 while (ISSPACE (*(--end_eq
))) ;
4236 nios2_handle_custom_fpu_cfg (eq
, end_eq
+ 1, true);
4238 else if (!strncmp (argstr
, "custom-", 7))
4242 for (i
= 0; i
< ARRAY_SIZE (nios2_fpu_insn
); i
++)
4243 if (!strncmp (argstr
+ 7, N2FPU_NAME (i
),
4244 strlen (N2FPU_NAME (i
))))
4256 error ("%<no-custom-%s%> does not accept arguments",
4260 /* Disable option by setting to -1. */
4261 nios2_deregister_custom_code (N2FPU_N (code
));
4262 N2FPU_N (code
) = -1;
4268 while (ISSPACE (*(++eq
))) ;
4271 error ("%<custom-%s=%> requires argument",
4275 for (t
= eq
; t
!= p
; ++t
)
4281 error ("%<custom-%s=%> argument should be "
4282 "a non-negative integer", N2FPU_NAME (code
));
4286 /* Set option to argument. */
4287 N2FPU_N (code
) = atoi (eq
);
4288 nios2_handle_custom_fpu_insn_option (code
);
4293 error ("%<custom-%s=%> is not recognized as FPU instruction",
4300 error ("%<%s%> is unknown", argstr
);
4315 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
4318 nios2_valid_target_attribute_tree (tree args
)
4320 if (!nios2_valid_target_attribute_rec (args
))
4322 nios2_custom_check_insns ();
4323 return build_target_option_node (&global_options
);
4326 /* Hook to validate attribute((target("string"))). */
4329 nios2_valid_target_attribute_p (tree fndecl
, tree
ARG_UNUSED (name
),
4330 tree args
, int ARG_UNUSED (flags
))
4332 struct cl_target_option cur_target
;
4334 tree old_optimize
= build_optimization_node (&global_options
);
4335 tree new_target
, new_optimize
;
4336 tree func_optimize
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
);
4338 /* If the function changed the optimization levels as well as setting target
4339 options, start with the optimizations specified. */
4340 if (func_optimize
&& func_optimize
!= old_optimize
)
4341 cl_optimization_restore (&global_options
,
4342 TREE_OPTIMIZATION (func_optimize
));
4344 /* The target attributes may also change some optimization flags, so update
4345 the optimization options if necessary. */
4346 cl_target_option_save (&cur_target
, &global_options
);
4347 new_target
= nios2_valid_target_attribute_tree (args
);
4348 new_optimize
= build_optimization_node (&global_options
);
4355 DECL_FUNCTION_SPECIFIC_TARGET (fndecl
) = new_target
;
4357 if (old_optimize
!= new_optimize
)
4358 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
) = new_optimize
;
4361 cl_target_option_restore (&global_options
, &cur_target
);
4363 if (old_optimize
!= new_optimize
)
4364 cl_optimization_restore (&global_options
,
4365 TREE_OPTIMIZATION (old_optimize
));
4369 /* Remember the last target of nios2_set_current_function. */
4370 static GTY(()) tree nios2_previous_fndecl
;
4372 /* Establish appropriate back-end context for processing the function
4373 FNDECL. The argument might be NULL to indicate processing at top
4374 level, outside of any function scope. */
4376 nios2_set_current_function (tree fndecl
)
4378 tree old_tree
= (nios2_previous_fndecl
4379 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl
)
4382 tree new_tree
= (fndecl
4383 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl
)
4386 if (fndecl
&& fndecl
!= nios2_previous_fndecl
)
4388 nios2_previous_fndecl
= fndecl
;
4389 if (old_tree
== new_tree
)
4394 cl_target_option_restore (&global_options
,
4395 TREE_TARGET_OPTION (new_tree
));
4401 struct cl_target_option
*def
4402 = TREE_TARGET_OPTION (target_option_current_node
);
4404 cl_target_option_restore (&global_options
, def
);
4410 /* Hook to validate the current #pragma GCC target and set the FPU custom
4411 code option state. If ARGS is NULL, then POP_TARGET is used to reset
4414 nios2_pragma_target_parse (tree args
, tree pop_target
)
4419 cur_tree
= ((pop_target
)
4421 : target_option_default_node
);
4422 cl_target_option_restore (&global_options
,
4423 TREE_TARGET_OPTION (cur_tree
));
4427 cur_tree
= nios2_valid_target_attribute_tree (args
);
4432 target_option_current_node
= cur_tree
;
4436 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
4437 We are just using this hook to add some additional error checking to
4438 the default behavior. GCC does not provide a target hook for merging
4439 the target options, and only correctly handles merging empty vs non-empty
4440 option data; see merge_decls() in c-decl.c.
4441 So here we require either that at least one of the decls has empty
4442 target options, or that the target options/data be identical. */
4444 nios2_merge_decl_attributes (tree olddecl
, tree newdecl
)
4446 tree oldopts
= lookup_attribute ("target", DECL_ATTRIBUTES (olddecl
));
4447 tree newopts
= lookup_attribute ("target", DECL_ATTRIBUTES (newdecl
));
4448 if (newopts
&& oldopts
&& newopts
!= oldopts
)
4450 tree oldtree
= DECL_FUNCTION_SPECIFIC_TARGET (olddecl
);
4451 tree newtree
= DECL_FUNCTION_SPECIFIC_TARGET (newdecl
);
4452 if (oldtree
&& newtree
&& oldtree
!= newtree
)
4454 struct cl_target_option
*olddata
= TREE_TARGET_OPTION (oldtree
);
4455 struct cl_target_option
*newdata
= TREE_TARGET_OPTION (newtree
);
4456 if (olddata
!= newdata
4457 && memcmp (olddata
, newdata
, sizeof (struct cl_target_option
)))
4458 error ("%qE redeclared with conflicting %qs attributes",
4459 DECL_NAME (newdecl
), "target");
4462 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
4463 DECL_ATTRIBUTES (newdecl
));
4466 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4468 nios2_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4469 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4472 const char *fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
4473 rtx this_rtx
, funexp
;
4476 /* Pretend to be a post-reload pass while generating rtl. */
4477 reload_completed
= 1;
4480 nios2_load_pic_register ();
4482 /* Mark the end of the (empty) prologue. */
4483 emit_note (NOTE_INSN_PROLOGUE_END
);
4485 /* Find the "this" pointer. If the function returns a structure,
4486 the structure return pointer is in $5. */
4487 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4488 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
+ 1);
4490 this_rtx
= gen_rtx_REG (Pmode
, FIRST_ARG_REGNO
);
4492 /* Add DELTA to THIS_RTX. */
4493 nios2_emit_add_constant (this_rtx
, delta
);
4495 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4500 tmp
= gen_rtx_REG (Pmode
, 2);
4501 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4502 nios2_emit_add_constant (tmp
, vcall_offset
);
4503 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4504 emit_insn (gen_add2_insn (this_rtx
, tmp
));
4507 /* Generate a tail call to the target function. */
4508 if (!TREE_USED (function
))
4510 assemble_external (function
);
4511 TREE_USED (function
) = 1;
4513 funexp
= XEXP (DECL_RTL (function
), 0);
4514 /* Function address needs to be constructed under PIC,
4515 provide r2 to use here. */
4516 nios2_adjust_call_address (&funexp
, gen_rtx_REG (Pmode
, 2));
4517 insn
= emit_call_insn (gen_sibcall_internal (funexp
, const0_rtx
));
4518 SIBLING_CALL_P (insn
) = 1;
4520 /* Run just enough of rest_of_compilation to get the insns emitted.
4521 There's not really enough bulk here to make other passes such as
4522 instruction scheduling worth while. Note that use_thunk calls
4523 assemble_start_function and assemble_end_function. */
4524 insn
= get_insns ();
4525 shorten_branches (insn
);
4526 assemble_start_function (thunk_fndecl
, fnname
);
4527 final_start_function (insn
, file
, 1);
4528 final (insn
, file
, 1);
4529 final_end_function ();
4530 assemble_end_function (thunk_fndecl
, fnname
);
4532 /* Stop pretending to be a post-reload pass. */
4533 reload_completed
= 0;
4537 /* Utility function to break a memory address into
4538 base register + constant offset. Return false if something
4539 unexpected is seen. */
4541 split_mem_address (rtx addr
, rtx
*base_reg
, rtx
*offset
)
4546 *offset
= const0_rtx
;
4549 else if (GET_CODE (addr
) == PLUS
)
4551 *base_reg
= XEXP (addr
, 0);
4552 *offset
= XEXP (addr
, 1);
4558 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4560 split_alu_insn (rtx_insn
*insn
, rtx
*lhs
, rtx
*rhs1
, rtx
*rhs2
)
4562 rtx pat
= PATTERN (insn
);
4563 gcc_assert (GET_CODE (pat
) == SET
);
4564 *lhs
= SET_DEST (pat
);
4565 *rhs1
= XEXP (SET_SRC (pat
), 0);
4566 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat
))) != RTX_UNARY
)
4567 *rhs2
= XEXP (SET_SRC (pat
), 1);
4571 /* Returns true if OP is a REG and assigned a CDX reg. */
4575 return REG_P (op
) && (!reload_completed
|| CDX_REG_P (REGNO (op
)));
4578 /* Returns true if OP is within range of CDX addi.n immediates. */
4580 cdx_add_immed (rtx op
)
4582 if (CONST_INT_P (op
))
4584 HOST_WIDE_INT ival
= INTVAL (op
);
4585 return ival
<= 128 && ival
> 0 && (ival
& (ival
- 1)) == 0;
4590 /* Returns true if OP is within range of CDX andi.n immediates. */
4592 cdx_and_immed (rtx op
)
4594 if (CONST_INT_P (op
))
4596 HOST_WIDE_INT ival
= INTVAL (op
);
4597 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 4
4598 || ival
== 8 || ival
== 0xf || ival
== 0x10
4599 || ival
== 0x1f || ival
== 0x20
4600 || ival
== 0x3f || ival
== 0x7f
4601 || ival
== 0x80 || ival
== 0xff || ival
== 0x7ff
4602 || ival
== 0xff00 || ival
== 0xffff);
4607 /* Returns true if OP is within range of CDX movi.n immediates. */
4609 cdx_mov_immed (rtx op
)
4611 if (CONST_INT_P (op
))
4613 HOST_WIDE_INT ival
= INTVAL (op
);
4614 return ((ival
>= 0 && ival
<= 124)
4615 || ival
== 0xff || ival
== -2 || ival
== -1);
4620 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4622 cdx_shift_immed (rtx op
)
4624 if (CONST_INT_P (op
))
4626 HOST_WIDE_INT ival
= INTVAL (op
);
4627 return (ival
== 1 || ival
== 2 || ival
== 3 || ival
== 8
4628 || ival
== 12 || ival
== 16 || ival
== 24
4636 /* Classification of different kinds of add instructions. */
4637 enum nios2_add_insn_kind
{
4641 nios2_spaddi_n_kind
,
4642 nios2_spinci_n_kind
,
4643 nios2_spdeci_n_kind
,
4648 static const char *nios2_add_insn_names
[] = {
4649 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4651 static bool nios2_add_insn_narrow
[] = {
4652 true, true, true, true, true, true,
4655 /* Function to classify kinds of add instruction patterns. */
4656 static enum nios2_add_insn_kind
4657 nios2_add_insn_classify (rtx_insn
*insn ATTRIBUTE_UNUSED
,
4658 rtx lhs
, rtx rhs1
, rtx rhs2
)
4662 if (cdxreg (lhs
) && cdxreg (rhs1
))
4665 return nios2_add_n_kind
;
4666 if (CONST_INT_P (rhs2
))
4668 HOST_WIDE_INT ival
= INTVAL (rhs2
);
4669 if (ival
> 0 && cdx_add_immed (rhs2
))
4670 return nios2_addi_n_kind
;
4671 if (ival
< 0 && cdx_add_immed (GEN_INT (-ival
)))
4672 return nios2_subi_n_kind
;
4675 else if (rhs1
== stack_pointer_rtx
4676 && CONST_INT_P (rhs2
))
4678 HOST_WIDE_INT imm7
= INTVAL (rhs2
) >> 2;
4679 HOST_WIDE_INT rem
= INTVAL (rhs2
) & 3;
4680 if (rem
== 0 && (imm7
& ~0x7f) == 0)
4683 return nios2_spaddi_n_kind
;
4684 if (lhs
== stack_pointer_rtx
)
4685 return nios2_spinci_n_kind
;
4687 imm7
= -INTVAL(rhs2
) >> 2;
4688 rem
= -INTVAL (rhs2
) & 3;
4689 if (lhs
== stack_pointer_rtx
4690 && rem
== 0 && (imm7
& ~0x7f) == 0)
4691 return nios2_spdeci_n_kind
;
4694 return ((REG_P (rhs2
) || rhs2
== const0_rtx
)
4695 ? nios2_add_kind
: nios2_addi_kind
);
4698 /* Emit assembly language for the different kinds of add instructions. */
4700 nios2_add_insn_asm (rtx_insn
*insn
, rtx
*operands
)
4702 static char buf
[256];
4704 enum nios2_add_insn_kind kind
4705 = nios2_add_insn_classify (insn
, operands
[0], operands
[1], operands
[2]);
4706 if (kind
== nios2_subi_n_kind
)
4707 snprintf (buf
, ln
, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands
[2]));
4708 else if (kind
== nios2_spaddi_n_kind
)
4709 snprintf (buf
, ln
, "spaddi.n\t%%0, %%2");
4710 else if (kind
== nios2_spinci_n_kind
)
4711 snprintf (buf
, ln
, "spinci.n\t%%2");
4712 else if (kind
== nios2_spdeci_n_kind
)
4713 snprintf (buf
, ln
, "spdeci.n\t%d", (int) -INTVAL (operands
[2]));
4715 snprintf (buf
, ln
, "%s\t%%0, %%1, %%z2", nios2_add_insn_names
[(int)kind
]);
4719 /* This routine, which the default "length" attribute computation is
4720 based on, encapsulates information about all the cases where CDX
4721 provides a narrow 2-byte instruction form. */
4723 nios2_cdx_narrow_form_p (rtx_insn
*insn
)
4725 rtx pat
, lhs
, rhs1
, rhs2
;
4726 enum attr_type type
;
4727 if (!TARGET_HAS_CDX
)
4729 type
= get_attr_type (insn
);
4730 pat
= PATTERN (insn
);
4731 gcc_assert (reload_completed
);
4735 if (GET_CODE (pat
) == SIMPLE_RETURN
)
4737 if (GET_CODE (pat
) == PARALLEL
)
4738 pat
= XVECEXP (pat
, 0, 0);
4739 if (GET_CODE (pat
) == SET
)
4740 pat
= SET_SRC (pat
);
4741 if (GET_CODE (pat
) == IF_THEN_ELSE
)
4743 /* Conditional branch patterns; for these we
4744 only check the comparison to find beqz.n/bnez.n cases.
4745 For the 'nios2_cbranch' pattern, we cannot also check
4746 the branch range here. That will be done at the md
4747 pattern "length" attribute computation. */
4748 rtx cmp
= XEXP (pat
, 0);
4749 return ((GET_CODE (cmp
) == EQ
|| GET_CODE (cmp
) == NE
)
4750 && cdxreg (XEXP (cmp
, 0))
4751 && XEXP (cmp
, 1) == const0_rtx
);
4753 if (GET_CODE (pat
) == TRAP_IF
)
4754 /* trap.n is always usable. */
4756 if (GET_CODE (pat
) == CALL
)
4757 pat
= XEXP (XEXP (pat
, 0), 0);
4759 /* Control instructions taking a register operand are indirect
4760 jumps and calls. The CDX instructions have a 5-bit register
4761 field so any reg is valid. */
4765 gcc_assert (!insn_variable_length_p (insn
));
4770 enum nios2_add_insn_kind kind
;
4771 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4772 kind
= nios2_add_insn_classify (insn
, lhs
, rhs1
, rhs2
);
4773 return nios2_add_insn_narrow
[(int)kind
];
4778 HOST_WIDE_INT offset
, rem
= 0;
4779 rtx addr
, reg
= SET_DEST (pat
), mem
= SET_SRC (pat
);
4780 if (GET_CODE (mem
) == SIGN_EXTEND
)
4781 /* No CDX form for sign-extended load. */
4783 if (GET_CODE (mem
) == ZERO_EXTEND
)
4784 /* The load alternatives in the zero_extend* patterns. */
4785 mem
= XEXP (mem
, 0);
4789 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4790 || TARGET_BYPASS_CACHE
)
4792 addr
= XEXP (mem
, 0);
4793 /* GP-based and R0-based references are never narrow. */
4794 if (gprel_constant_p (addr
) || r0rel_constant_p (addr
))
4796 /* %lo requires a 16-bit relocation and is never narrow. */
4797 if (GET_CODE (addr
) == LO_SUM
)
4799 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4805 offset
= INTVAL (rhs2
);
4806 if (GET_MODE (mem
) == SImode
)
4811 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4812 && rem
== 0 && (offset
& ~0x1f) == 0)
4815 else if (GET_MODE (mem
) == HImode
)
4820 /* ldbu.n, ldhu.n, ldw.n cases. */
4821 return (cdxreg (reg
) && cdxreg (rhs1
)
4822 && rem
== 0 && (offset
& ~0xf) == 0);
4825 if (GET_CODE (pat
) == PARALLEL
)
4831 HOST_WIDE_INT offset
, rem
= 0;
4832 rtx addr
, reg
= SET_SRC (pat
), mem
= SET_DEST (pat
);
4836 if ((MEM_VOLATILE_P (mem
) && TARGET_BYPASS_CACHE_VOLATILE
)
4837 || TARGET_BYPASS_CACHE
)
4839 addr
= XEXP (mem
, 0);
4840 /* GP-based and r0-based references are never narrow. */
4841 if (gprel_constant_p (addr
) || r0rel_constant_p (addr
))
4843 /* %lo requires a 16-bit relocation and is never narrow. */
4844 if (GET_CODE (addr
) == LO_SUM
)
4846 ret
= split_mem_address (addr
, &rhs1
, &rhs2
);
4848 offset
= INTVAL (rhs2
);
4849 if (GET_MODE (mem
) == SImode
)
4854 if (rtx_equal_p (rhs1
, stack_pointer_rtx
)
4855 && rem
== 0 && (offset
& ~0x1f) == 0)
4858 else if (reg
== const0_rtx
&& cdxreg (rhs1
)
4859 && rem
== 0 && (offset
& ~0x3f) == 0)
4862 else if (GET_MODE (mem
) == HImode
)
4869 gcc_assert (GET_MODE (mem
) == QImode
);
4871 if (reg
== const0_rtx
&& cdxreg (rhs1
)
4872 && (offset
& ~0x3f) == 0)
4876 /* stbu.n, sthu.n, stw.n cases. */
4877 return (cdxreg (reg
) && cdxreg (rhs1
)
4878 && rem
== 0 && (offset
& ~0xf) == 0);
4881 lhs
= SET_DEST (pat
);
4882 rhs1
= SET_SRC (pat
);
4883 if (CONST_INT_P (rhs1
))
4884 return (cdxreg (lhs
) && cdx_mov_immed (rhs1
));
4885 gcc_assert (REG_P (lhs
) && REG_P (rhs1
));
4889 /* Some zero_extend* alternatives are and insns. */
4890 if (GET_CODE (SET_SRC (pat
)) == ZERO_EXTEND
)
4891 return (cdxreg (SET_DEST (pat
))
4892 && cdxreg (XEXP (SET_SRC (pat
), 0)));
4893 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4894 if (CONST_INT_P (rhs2
))
4895 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdx_and_immed (rhs2
));
4896 return (cdxreg (lhs
) && cdxreg (rhs2
)
4897 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4901 /* Note the two-address limitation for CDX form. */
4902 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4903 return (cdxreg (lhs
) && cdxreg (rhs2
)
4904 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)));
4907 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4908 return (cdxreg (lhs
) && cdxreg (rhs1
) && cdxreg (rhs2
));
4912 split_alu_insn (insn
, &lhs
, &rhs1
, NULL
);
4913 return (cdxreg (lhs
) && cdxreg (rhs1
));
4917 split_alu_insn (insn
, &lhs
, &rhs1
, &rhs2
);
4918 return (cdxreg (lhs
)
4919 && ((cdxreg (rhs1
) && cdx_shift_immed (rhs2
))
4921 && (!reload_completed
|| rtx_equal_p (lhs
, rhs1
)))));
4932 /* Main function to implement the pop_operation predicate that
4933 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4934 hardcode the restored registers, so the main checking is for the
4937 pop_operation_p (rtx op
)
4940 HOST_WIDE_INT last_offset
= -1, len
= XVECLEN (op
, 0);
4941 rtx base_reg
, offset
;
4943 if (len
< 3 /* At least has a return, SP-update, and RA restore. */
4944 || GET_CODE (XVECEXP (op
, 0, 0)) != RETURN
4945 || !base_reg_adjustment_p (XVECEXP (op
, 0, 1), &base_reg
, &offset
)
4946 || !rtx_equal_p (base_reg
, stack_pointer_rtx
)
4947 || !CONST_INT_P (offset
)
4948 || (INTVAL (offset
) & 3) != 0)
4951 for (i
= len
- 1; i
> 1; i
--)
4953 rtx set
= XVECEXP (op
, 0, i
);
4954 rtx curr_base_reg
, curr_offset
;
4956 if (GET_CODE (set
) != SET
|| !MEM_P (SET_SRC (set
))
4957 || !split_mem_address (XEXP (SET_SRC (set
), 0),
4958 &curr_base_reg
, &curr_offset
)
4959 || !rtx_equal_p (base_reg
, curr_base_reg
)
4960 || !CONST_INT_P (curr_offset
))
4964 last_offset
= INTVAL (curr_offset
);
4965 if ((last_offset
& 3) != 0 || last_offset
> 60)
4971 if (INTVAL (curr_offset
) != last_offset
)
4975 if (last_offset
< 0 || last_offset
+ 4 != INTVAL (offset
))
4982 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4983 The instruction can encode subsets drawn from either R2-R13 or
4984 R14-R23 + FP + RA. */
4985 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4986 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4989 nios2_ldstwm_regset_p (unsigned int regno
, unsigned int *regset
)
4993 if (CDX_LDSTWM_VALID_REGS_0
& (1 << regno
))
4994 *regset
= CDX_LDSTWM_VALID_REGS_0
;
4995 else if (CDX_LDSTWM_VALID_REGS_1
& (1 << regno
))
4996 *regset
= CDX_LDSTWM_VALID_REGS_1
;
5002 return (*regset
& (1 << regno
)) != 0;
5005 /* Main function to implement ldwm_operation/stwm_operation
5006 predicates that check ldwm/stwm insn pattern integrity. */
5008 ldstwm_operation_p (rtx op
, bool load_p
)
5010 int start
, i
, end
= XVECLEN (op
, 0) - 1, last_regno
= -1;
5011 unsigned int regset
= 0;
5012 rtx base_reg
, offset
;
5013 rtx first_elt
= XVECEXP (op
, 0, 0);
5015 bool wb_p
= base_reg_adjustment_p (first_elt
, &base_reg
, &offset
);
5016 if (GET_CODE (XVECEXP (op
, 0, end
)) == RETURN
)
5018 start
= wb_p
? 1 : 0;
5019 for (i
= start
; i
<= end
; i
++)
5022 rtx reg
, mem
, elt
= XVECEXP (op
, 0, i
);
5023 /* Return early if not a SET at all. */
5024 if (GET_CODE (elt
) != SET
)
5026 reg
= load_p
? SET_DEST (elt
) : SET_SRC (elt
);
5027 mem
= load_p
? SET_SRC (elt
) : SET_DEST (elt
);
5028 if (!REG_P (reg
) || !MEM_P (mem
))
5030 regno
= REGNO (reg
);
5031 if (!nios2_ldstwm_regset_p (regno
, ®set
))
5033 /* If no writeback to determine direction, use offset of first MEM. */
5035 inc_p
= INTVAL (offset
) > 0;
5036 else if (i
== start
)
5038 rtx first_base
, first_offset
;
5039 if (!split_mem_address (XEXP (mem
, 0),
5040 &first_base
, &first_offset
))
5042 if (!REG_P (first_base
) || !CONST_INT_P (first_offset
))
5044 base_reg
= first_base
;
5045 inc_p
= INTVAL (first_offset
) >= 0;
5047 /* Ensure that the base register is not loaded into. */
5048 if (load_p
&& regno
== (int) REGNO (base_reg
))
5050 /* Check for register order inc/dec integrity. */
5051 if (last_regno
>= 0)
5053 if (inc_p
&& last_regno
>= regno
)
5055 if (!inc_p
&& last_regno
<= regno
)
5063 /* Helper for nios2_ldst_parallel, for generating a parallel vector
5066 gen_ldst (bool load_p
, int regno
, rtx base_mem
, int offset
)
5068 rtx reg
= gen_rtx_REG (SImode
, regno
);
5069 rtx mem
= adjust_address_nv (base_mem
, SImode
, offset
);
5070 return gen_rtx_SET (load_p
? reg
: mem
,
5071 load_p
? mem
: reg
);
5074 /* A general routine for creating the body RTL pattern of
5075 ldwm/stwm/push.n/pop.n insns.
5076 LOAD_P: true/false for load/store direction.
5077 REG_INC_P: whether registers are incrementing/decrementing in the
5078 *RTL vector* (not necessarily the order defined in the ISA specification).
5079 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
5080 BASE_MEM: starting MEM.
5081 BASE_UPDATE: amount to update base register; zero means no writeback.
5082 REGMASK: register mask to load/store.
5083 RET_P: true if to tag a (return) element at the end.
5085 Note that this routine does not do any checking. It's the job of the
5086 caller to do the right thing, and the insn patterns to do the
5089 nios2_ldst_parallel (bool load_p
, bool reg_inc_p
, bool offset_inc_p
,
5090 rtx base_mem
, int base_update
,
5091 unsigned HOST_WIDE_INT regmask
, bool ret_p
)
5094 int regno
, b
= 0, i
= 0, n
= 0, len
= popcount_hwi (regmask
);
5095 if (ret_p
) len
++, i
++, b
++;
5096 if (base_update
!= 0) len
++, i
++;
5097 p
= rtvec_alloc (len
);
5098 for (regno
= (reg_inc_p
? 0 : 31);
5099 regno
!= (reg_inc_p
? 32 : -1);
5100 regno
+= (reg_inc_p
? 1 : -1))
5101 if ((regmask
& (1 << regno
)) != 0)
5103 int offset
= (offset_inc_p
? 4 : -4) * n
++;
5104 RTVEC_ELT (p
, i
++) = gen_ldst (load_p
, regno
, base_mem
, offset
);
5107 RTVEC_ELT (p
, 0) = ret_rtx
;
5108 if (base_update
!= 0)
5111 if (!split_mem_address (XEXP (base_mem
, 0), ®
, &offset
))
5114 gen_rtx_SET (reg
, plus_constant (Pmode
, reg
, base_update
));
5116 return gen_rtx_PARALLEL (VOIDmode
, p
);
5119 /* CDX ldwm/stwm peephole optimization pattern related routines. */
5121 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
5122 struct ldstwm_operand
5124 int offset
; /* Offset from base register. */
5125 rtx reg
; /* Register to store at this offset. */
5126 rtx mem
; /* Original mem. */
5127 bool bad
; /* True if this load/store can't be combined. */
5128 bool rewrite
; /* True if we should rewrite using scratch. */
5132 compare_ldstwm_operands (const void *arg1
, const void *arg2
)
5134 const struct ldstwm_operand
*op1
= (const struct ldstwm_operand
*) arg1
;
5135 const struct ldstwm_operand
*op2
= (const struct ldstwm_operand
*) arg2
;
5137 return op2
->bad
? 0 : 1;
5141 return op1
->offset
- op2
->offset
;
5144 /* Helper function: return true if a load/store using REGNO with address
5145 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
5146 stw.n, ldwsp.n, or stwsp.n instruction. */
5148 can_use_cdx_ldstw (int regno
, int basereg
, int offset
)
5150 if (CDX_REG_P (regno
) && CDX_REG_P (basereg
)
5151 && (offset
& 0x3) == 0 && offset
>= 0 && offset
< 0x40)
5153 else if (basereg
== SP_REGNO
5154 && offset
>= 0 && offset
< 0x80 && (offset
& 0x3) == 0)
5159 /* This function is called from peephole2 optimizers to try to merge
5160 a series of individual loads and stores into a ldwm or stwm. It
5161 can also rewrite addresses inside the individual loads and stores
5162 using a common base register using a scratch register and smaller
5163 offsets if that allows them to use CDX ldw.n or stw.n instructions
5164 instead of 4-byte loads or stores.
5165 N is the number of insns we are trying to merge. SCRATCH is non-null
5166 if there is a scratch register available. The OPERANDS array contains
5167 alternating REG (even) and MEM (odd) operands. */
5169 gen_ldstwm_peep (bool load_p
, int n
, rtx scratch
, rtx
*operands
)
5171 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
5173 #define MAX_LDSTWM_OPS 12
5174 struct ldstwm_operand sort
[MAX_LDSTWM_OPS
];
5177 int i
, m
, lastoffset
, lastreg
;
5178 unsigned int regmask
= 0, usemask
= 0, regset
;
5183 if (!TARGET_HAS_CDX
)
5185 if (n
< 2 || n
> MAX_LDSTWM_OPS
)
5188 /* Check all the operands for validity and initialize the sort array.
5189 The places where we return false here are all situations that aren't
5190 expected to ever happen -- invalid patterns, invalid registers, etc. */
5191 for (i
= 0; i
< n
; i
++)
5194 rtx reg
= operands
[i
];
5195 rtx mem
= operands
[i
+ n
];
5199 if (!REG_P (reg
) || !MEM_P (mem
))
5202 regno
= REGNO (reg
);
5205 if (load_p
&& (regmask
& (1 << regno
)) != 0)
5207 regmask
|= 1 << regno
;
5209 if (!split_mem_address (XEXP (mem
, 0), &base
, &offset
))
5212 o
= INTVAL (offset
);
5216 else if (r
!= basereg
)
5221 sort
[i
].rewrite
= false;
5227 /* If we are doing a series of register loads, we can't safely reorder
5228 them if any of the regs used in addr expressions are also being set. */
5229 if (load_p
&& (regmask
& usemask
))
5232 /* Sort the array by increasing mem offset order, then check that
5233 offsets are valid and register order matches mem order. At the
5234 end of this loop, m is the number of loads/stores we will try to
5235 combine; the rest are leftovers. */
5236 qsort (sort
, n
, sizeof (struct ldstwm_operand
), compare_ldstwm_operands
);
5238 baseoffset
= sort
[0].offset
;
5239 needscratch
= baseoffset
!= 0;
5240 if (needscratch
&& !scratch
)
5243 lastreg
= regmask
= regset
= 0;
5244 lastoffset
= baseoffset
;
5245 for (m
= 0; m
< n
&& !sort
[m
].bad
; m
++)
5247 int thisreg
= REGNO (sort
[m
].reg
);
5248 if (sort
[m
].offset
!= lastoffset
5249 || (m
> 0 && lastreg
>= thisreg
)
5250 || !nios2_ldstwm_regset_p (thisreg
, ®set
))
5254 regmask
|= (1 << thisreg
);
5257 /* For loads, make sure we are not overwriting the scratch reg.
5258 The peephole2 pattern isn't supposed to match unless the register is
5259 unused all the way through, so this isn't supposed to happen anyway. */
5262 && ((1 << REGNO (scratch
)) & regmask
) != 0)
5264 newbasereg
= needscratch
? (int) REGNO (scratch
) : basereg
;
5266 /* We may be able to combine only the first m of the n total loads/stores
5267 into a single instruction. If m < 2, there's no point in emitting
5268 a ldwm/stwm at all, but we might be able to do further optimizations
5269 if we have a scratch. We will count the instruction lengths of the
5270 old and new patterns and store the savings in nbytes. */
5279 nbytes
= -4; /* Size of ldwm/stwm. */
5282 int bo
= baseoffset
> 0 ? baseoffset
: -baseoffset
;
5283 if (CDX_REG_P (newbasereg
)
5284 && CDX_REG_P (basereg
)
5285 && bo
<= 128 && bo
> 0 && (bo
& (bo
- 1)) == 0)
5286 nbytes
-= 2; /* Size of addi.n/subi.n. */
5288 nbytes
-= 4; /* Size of non-CDX addi. */
5291 /* Count the size of the input load/store instructions being replaced. */
5292 for (i
= 0; i
< m
; i
++)
5293 if (can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
))
5298 /* We may also be able to save a bit if we can rewrite non-CDX
5299 load/stores that can't be combined into the ldwm/stwm into CDX
5300 load/stores using the scratch reg. For example, this might happen
5301 if baseoffset is large, by bringing in the offsets in the load/store
5302 instructions within the range that fits in the CDX instruction. */
5303 if (needscratch
&& CDX_REG_P (newbasereg
))
5304 for (i
= m
; i
< n
&& !sort
[i
].bad
; i
++)
5305 if (!can_use_cdx_ldstw (REGNO (sort
[i
].reg
), basereg
, sort
[i
].offset
)
5306 && can_use_cdx_ldstw (REGNO (sort
[i
].reg
), newbasereg
,
5307 sort
[i
].offset
- baseoffset
))
5309 sort
[i
].rewrite
= true;
5313 /* Are we good to go? */
5317 /* Emit the scratch load. */
5319 emit_insn (gen_rtx_SET (scratch
, XEXP (sort
[0].mem
, 0)));
5321 /* Emit the ldwm/stwm insn. */
5324 rtvec p
= rtvec_alloc (m
);
5325 for (i
= 0; i
< m
; i
++)
5327 int offset
= sort
[i
].offset
;
5328 rtx mem
, reg
= sort
[i
].reg
;
5329 rtx base_reg
= gen_rtx_REG (Pmode
, newbasereg
);
5331 offset
-= baseoffset
;
5332 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, base_reg
, offset
));
5334 RTVEC_ELT (p
, i
) = gen_rtx_SET (reg
, mem
);
5336 RTVEC_ELT (p
, i
) = gen_rtx_SET (mem
, reg
);
5338 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
5341 /* Emit any leftover load/stores as individual instructions, doing
5342 the previously-noted rewrites to use the scratch reg. */
5343 for (i
= m
; i
< n
; i
++)
5345 rtx reg
= sort
[i
].reg
;
5346 rtx mem
= sort
[i
].mem
;
5347 if (sort
[i
].rewrite
)
5349 int offset
= sort
[i
].offset
- baseoffset
;
5350 mem
= gen_rtx_MEM (SImode
, plus_constant (Pmode
, scratch
, offset
));
5353 emit_move_insn (reg
, mem
);
5355 emit_move_insn (mem
, reg
);
5360 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
5361 We use this hook when emitting CDX code to enforce the 4-byte
5362 alignment requirement for labels that are used as the targets of
5363 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
5364 and 32-bit instructions aligned on any 16-bit boundary, but functions
5365 and jmpi labels have to be 32-bit aligned because of the way the address
5366 is encoded in the instruction. */
5368 static unsigned char *label_align
;
5369 static int min_labelno
, max_labelno
;
5374 bool changed
= true;
5377 if (!TARGET_HAS_CDX
)
5380 /* Initialize the data structures. */
5383 max_labelno
= max_label_num ();
5384 min_labelno
= get_first_label_num ();
5385 label_align
= XCNEWVEC (unsigned char, max_labelno
- min_labelno
+ 1);
5387 /* Iterate on inserting alignment and adjusting branch lengths until
5392 shorten_branches (get_insns ());
5394 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
5395 if (JUMP_P (insn
) && insn_variable_length_p (insn
))
5397 rtx label
= JUMP_LABEL (insn
);
5398 /* We use the current fact that all cases of 'jmpi'
5399 doing the actual branch in the machine description
5400 has a computed length of 6 or 8. Length 4 and below
5401 are all PC-relative 'br' branches without the jump-align
5403 if (label
&& LABEL_P (label
) && get_attr_length (insn
) > 4)
5405 int index
= CODE_LABEL_NUMBER (label
) - min_labelno
;
5406 if (label_align
[index
] != 2)
5408 label_align
[index
] = 2;
5416 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
5418 nios2_label_align (rtx label
)
5420 int n
= CODE_LABEL_NUMBER (label
);
5422 if (label_align
&& n
>= min_labelno
&& n
<= max_labelno
)
5423 return MAX (label_align
[n
- min_labelno
], align_labels
.levels
[0].log
);
5424 return align_labels
.levels
[0].log
;
5427 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
5428 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
5429 the registers that can be used as operands to instructions that
5430 have 3-bit register fields. */
5432 nios2_adjust_reg_alloc_order (void)
5434 const int cdx_reg_alloc_order
[] =
5436 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
5438 /* Call-saved GPRs within CDX 3-bit encoded range. */
5440 /* Other call-clobbered GPRs. */
5441 8, 9, 10, 11, 12, 13, 14, 15,
5442 /* Other call-saved GPRs. RA placed first since it is always saved. */
5443 31, 18, 19, 20, 21, 22, 23, 28,
5444 /* Fixed GPRs, not used by the register allocator. */
5445 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
5449 memcpy (reg_alloc_order
, cdx_reg_alloc_order
,
5450 sizeof (int) * FIRST_PSEUDO_REGISTER
);
5454 /* Initialize the GCC target structure. */
5455 #undef TARGET_ASM_FUNCTION_PROLOGUE
5456 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
5458 #undef TARGET_IN_SMALL_DATA_P
5459 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
5461 #undef TARGET_SECTION_TYPE_FLAGS
5462 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
5464 #undef TARGET_INIT_BUILTINS
5465 #define TARGET_INIT_BUILTINS nios2_init_builtins
5466 #undef TARGET_EXPAND_BUILTIN
5467 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
5468 #undef TARGET_BUILTIN_DECL
5469 #define TARGET_BUILTIN_DECL nios2_builtin_decl
5471 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5472 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
5474 #undef TARGET_CAN_ELIMINATE
5475 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
5477 #undef TARGET_FUNCTION_ARG
5478 #define TARGET_FUNCTION_ARG nios2_function_arg
5480 #undef TARGET_FUNCTION_ARG_ADVANCE
5481 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5483 #undef TARGET_FUNCTION_ARG_PADDING
5484 #define TARGET_FUNCTION_ARG_PADDING nios2_function_arg_padding
5486 #undef TARGET_ARG_PARTIAL_BYTES
5487 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5489 #undef TARGET_TRAMPOLINE_INIT
5490 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5492 #undef TARGET_FUNCTION_VALUE
5493 #define TARGET_FUNCTION_VALUE nios2_function_value
5495 #undef TARGET_LIBCALL_VALUE
5496 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5498 #undef TARGET_FUNCTION_VALUE_REGNO_P
5499 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5501 #undef TARGET_RETURN_IN_MEMORY
5502 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5504 #undef TARGET_PROMOTE_PROTOTYPES
5505 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5507 #undef TARGET_SETUP_INCOMING_VARARGS
5508 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5510 #undef TARGET_MUST_PASS_IN_STACK
5511 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5513 #undef TARGET_LEGITIMATE_CONSTANT_P
5514 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5516 #undef TARGET_LEGITIMIZE_ADDRESS
5517 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5519 #undef TARGET_DELEGITIMIZE_ADDRESS
5520 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5522 #undef TARGET_LEGITIMATE_ADDRESS_P
5523 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5525 #undef TARGET_PREFERRED_RELOAD_CLASS
5526 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5528 #undef TARGET_RTX_COSTS
5529 #define TARGET_RTX_COSTS nios2_rtx_costs
5531 #undef TARGET_ADDRESS_COST
5532 #define TARGET_ADDRESS_COST nios2_address_cost
5534 #undef TARGET_HAVE_TLS
5535 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5537 #undef TARGET_CANNOT_FORCE_CONST_MEM
5538 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5540 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5541 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5543 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5544 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5546 #undef TARGET_PRINT_OPERAND
5547 #define TARGET_PRINT_OPERAND nios2_print_operand
5549 #undef TARGET_PRINT_OPERAND_ADDRESS
5550 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5552 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5553 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5555 #undef TARGET_ASM_FILE_END
5556 #define TARGET_ASM_FILE_END nios2_asm_file_end
5558 #undef TARGET_OPTION_OVERRIDE
5559 #define TARGET_OPTION_OVERRIDE nios2_option_override
5561 #undef TARGET_OPTION_SAVE
5562 #define TARGET_OPTION_SAVE nios2_option_save
5564 #undef TARGET_OPTION_RESTORE
5565 #define TARGET_OPTION_RESTORE nios2_option_restore
5567 #undef TARGET_SET_CURRENT_FUNCTION
5568 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5570 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5571 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5573 #undef TARGET_OPTION_PRAGMA_PARSE
5574 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5576 #undef TARGET_MERGE_DECL_ATTRIBUTES
5577 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5579 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5580 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5581 hook_bool_const_tree_hwi_hwi_const_tree_true
5583 #undef TARGET_ASM_OUTPUT_MI_THUNK
5584 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5586 #undef TARGET_MACHINE_DEPENDENT_REORG
5587 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5589 #undef TARGET_CONSTANT_ALIGNMENT
5590 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5592 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
5593 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
5595 struct gcc_target targetm
= TARGET_INITIALIZER
;
5597 #include "gt-nios2.h"