]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/nios2/nios2.c
coretypes.h: Include input.h and as-a.h.
[thirdparty/gcc.git] / gcc / config / nios2 / nios2.c
1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "tree.h"
31 #include "fold-const.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "function.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "emit-rtl.h"
46 #include "varasm.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "insn-codes.h"
50 #include "optabs.h"
51 #include "predict.h"
52 #include "dominance.h"
53 #include "cfg.h"
54 #include "cfgrtl.h"
55 #include "cfganal.h"
56 #include "lcm.h"
57 #include "cfgbuild.h"
58 #include "cfgcleanup.h"
59 #include "basic-block.h"
60 #include "diagnostic-core.h"
61 #include "toplev.h"
62 #include "target.h"
63 #include "target-def.h"
64 #include "tm_p.h"
65 #include "langhooks.h"
66 #include "df.h"
67 #include "debug.h"
68 #include "reload.h"
69 #include "stor-layout.h"
70 #include "builtins.h"
71
72 /* Forward function declarations. */
73 static bool prologue_saved_reg_p (unsigned);
74 static void nios2_load_pic_register (void);
75 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
76 static const char *nios2_unspec_reloc_name (int);
77 static void nios2_register_builtin_fndecl (unsigned, tree);
78
79 /* Threshold for data being put into the small data/bss area, instead
80 of the normal data area (references to the small data/bss area take
81 1 instruction, and use the global pointer, references to the normal
82 data area takes 2 instructions). */
83 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
84
85 struct GTY (()) machine_function
86 {
87 /* Current frame information, to be filled in by nios2_compute_frame_layout
88 with register save masks, and offsets for the current function. */
89
90 /* Mask of registers to save. */
91 unsigned int save_mask;
92 /* Number of bytes that the entire frame takes up. */
93 int total_size;
94 /* Number of bytes that variables take up. */
95 int var_size;
96 /* Number of bytes that outgoing arguments take up. */
97 int args_size;
98 /* Number of bytes needed to store registers in frame. */
99 int save_reg_size;
100 /* Offset from new stack pointer to store registers. */
101 int save_regs_offset;
102 /* Offset from save_regs_offset to store frame pointer register. */
103 int fp_save_offset;
104 /* != 0 if frame layout already calculated. */
105 int initialized;
106 };
107
108 /* State to track the assignment of custom codes to FPU/custom builtins. */
109 static enum nios2_ccs_code custom_code_status[256];
110 static int custom_code_index[256];
111 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
112 static bool custom_code_conflict = false;
113
114 \f
115 /* Definition of builtin function types for nios2. */
116
117 #define N2_FTYPES \
118 N2_FTYPE(1, (SF)) \
119 N2_FTYPE(1, (VOID)) \
120 N2_FTYPE(2, (DF, DF)) \
121 N2_FTYPE(3, (DF, DF, DF)) \
122 N2_FTYPE(2, (DF, SF)) \
123 N2_FTYPE(2, (DF, SI)) \
124 N2_FTYPE(2, (DF, UI)) \
125 N2_FTYPE(2, (SF, DF)) \
126 N2_FTYPE(2, (SF, SF)) \
127 N2_FTYPE(3, (SF, SF, SF)) \
128 N2_FTYPE(2, (SF, SI)) \
129 N2_FTYPE(2, (SF, UI)) \
130 N2_FTYPE(2, (SI, CVPTR)) \
131 N2_FTYPE(2, (SI, DF)) \
132 N2_FTYPE(3, (SI, DF, DF)) \
133 N2_FTYPE(2, (SI, SF)) \
134 N2_FTYPE(3, (SI, SF, SF)) \
135 N2_FTYPE(2, (SI, SI)) \
136 N2_FTYPE(2, (UI, CVPTR)) \
137 N2_FTYPE(2, (UI, DF)) \
138 N2_FTYPE(2, (UI, SF)) \
139 N2_FTYPE(2, (VOID, DF)) \
140 N2_FTYPE(2, (VOID, SF)) \
141 N2_FTYPE(3, (VOID, SI, SI)) \
142 N2_FTYPE(3, (VOID, VPTR, SI))
143
144 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
145 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
146 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
147
148 /* Expand ftcode enumeration. */
149 enum nios2_ftcode {
150 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
151 N2_FTYPES
152 #undef N2_FTYPE
153 N2_FTYPE_MAX
154 };
155
156 /* Return the tree function type, based on the ftcode. */
157 static tree
158 nios2_ftype (enum nios2_ftcode ftcode)
159 {
160 static tree types[(int) N2_FTYPE_MAX];
161
162 tree N2_TYPE_SF = float_type_node;
163 tree N2_TYPE_DF = double_type_node;
164 tree N2_TYPE_SI = integer_type_node;
165 tree N2_TYPE_UI = unsigned_type_node;
166 tree N2_TYPE_VOID = void_type_node;
167
168 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
169 if (!N2_TYPE_CVPTR)
170 {
171 /* const volatile void *. */
172 N2_TYPE_CVPTR
173 = build_pointer_type (build_qualified_type (void_type_node,
174 (TYPE_QUAL_CONST
175 | TYPE_QUAL_VOLATILE)));
176 /* volatile void *. */
177 N2_TYPE_VPTR
178 = build_pointer_type (build_qualified_type (void_type_node,
179 TYPE_QUAL_VOLATILE));
180 }
181 if (types[(int) ftcode] == NULL_TREE)
182 switch (ftcode)
183 {
184 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
185 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
186 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
187 #define N2_FTYPE(N,ARGS) \
188 case N2_FTYPE_OP ## N ARGS: \
189 types[(int) ftcode] \
190 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
191 break;
192 N2_FTYPES
193 #undef N2_FTYPE
194 default: gcc_unreachable ();
195 }
196 return types[(int) ftcode];
197 }
198
199 \f
200 /* Definition of FPU instruction descriptions. */
201
202 struct nios2_fpu_insn_info
203 {
204 const char *name;
205 int num_operands, *optvar;
206 int opt, no_opt;
207 #define N2F_DF 0x1
208 #define N2F_DFREQ 0x2
209 #define N2F_UNSAFE 0x4
210 #define N2F_FINITE 0x8
211 #define N2F_NO_ERRNO 0x10
212 unsigned int flags;
213 enum insn_code icode;
214 enum nios2_ftcode ftcode;
215 };
216
217 /* Base macro for defining FPU instructions. */
218 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
219 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
220 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
221 N2_FTYPE_OP ## nop args }
222
223 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
224 #define N2FPU_OP2(mode) (mode, mode)
225 #define N2FPU_OP3(mode) (mode, mode, mode)
226 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
227 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
228 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
229 #define N2FPU_INSN_SF(code, nop, flags) \
230 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
231 #define N2FPU_INSN_DF(code, nop, flags) \
232 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
233
234 /* Compare instructions, 3 operand FP operation with a SI result. */
235 #define N2FPU_CMP_DEF(code, flags, m, M) \
236 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
237 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
238 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
239 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
240
241 /* The order of definition needs to be maintained consistent with
242 enum n2fpu_code in nios2-opts.h. */
243 struct nios2_fpu_insn_info nios2_fpu_insn[] =
244 {
245 /* Single precision instructions. */
246 N2FPU_INSN_SF (add, 3, 0),
247 N2FPU_INSN_SF (sub, 3, 0),
248 N2FPU_INSN_SF (mul, 3, 0),
249 N2FPU_INSN_SF (div, 3, 0),
250 /* Due to textual difference between min/max and smin/smax. */
251 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
252 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
253 N2FPU_INSN_SF (neg, 2, 0),
254 N2FPU_INSN_SF (abs, 2, 0),
255 N2FPU_INSN_SF (sqrt, 2, 0),
256 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
257 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
258 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
259 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
260 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
261 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
262 /* Single precision compares. */
263 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
264 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
265 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
266
267 /* Double precision instructions. */
268 N2FPU_INSN_DF (add, 3, 0),
269 N2FPU_INSN_DF (sub, 3, 0),
270 N2FPU_INSN_DF (mul, 3, 0),
271 N2FPU_INSN_DF (div, 3, 0),
272 /* Due to textual difference between min/max and smin/smax. */
273 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
274 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
275 N2FPU_INSN_DF (neg, 2, 0),
276 N2FPU_INSN_DF (abs, 2, 0),
277 N2FPU_INSN_DF (sqrt, 2, 0),
278 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
279 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
280 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
281 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
282 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
283 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
284 /* Double precision compares. */
285 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
286 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
287 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
288
289 /* Conversion instructions. */
290 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
291 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
292 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
293 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
294 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
295 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
296 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
297 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
298 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
299 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
300 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
301
302 /* X, Y access instructions. */
303 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
304 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
305 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
306 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
307 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
308 };
309
310 /* Some macros for ease of access. */
311 #define N2FPU(code) nios2_fpu_insn[(int) code]
312 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
313 #define N2FPU_N(code) (*N2FPU(code).optvar)
314 #define N2FPU_NAME(code) (N2FPU(code).name)
315 #define N2FPU_ICODE(code) (N2FPU(code).icode)
316 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
317 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
318 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
319 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
320 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
321 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
322
323 /* Same as above, but for cases where using only the op part is shorter. */
324 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
325 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
326 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
327
328 /* Export the FPU insn enabled predicate to nios2.md. */
329 bool
330 nios2_fpu_insn_enabled (enum n2fpu_code code)
331 {
332 return N2FPU_ENABLED_P (code);
333 }
334
335 /* Return true if COND comparison for mode MODE is enabled under current
336 settings. */
337
338 static bool
339 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
340 {
341 if (mode == SFmode)
342 switch (cond)
343 {
344 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
345 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
346 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
347 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
348 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
349 case LE: return N2FPU_OP_ENABLED_P (fcmples);
350 default: break;
351 }
352 else if (mode == DFmode)
353 switch (cond)
354 {
355 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
356 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
357 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
358 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
359 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
360 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
361 default: break;
362 }
363 return false;
364 }
365
366 /* Stack layout and calling conventions. */
367
368 #define NIOS2_STACK_ALIGN(LOC) \
369 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
370 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
371
372 /* Return the bytes needed to compute the frame pointer from the current
373 stack pointer. */
374 static int
375 nios2_compute_frame_layout (void)
376 {
377 unsigned int regno;
378 unsigned int save_mask = 0;
379 int total_size;
380 int var_size;
381 int out_args_size;
382 int save_reg_size;
383
384 if (cfun->machine->initialized)
385 return cfun->machine->total_size;
386
387 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
388 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
389 total_size = var_size + out_args_size;
390
391 /* Calculate space needed for gp registers. */
392 save_reg_size = 0;
393 for (regno = 0; regno <= LAST_GP_REG; regno++)
394 if (prologue_saved_reg_p (regno))
395 {
396 save_mask |= 1 << regno;
397 save_reg_size += 4;
398 }
399
400 /* If we call eh_return, we need to save the EH data registers. */
401 if (crtl->calls_eh_return)
402 {
403 unsigned i;
404 unsigned r;
405
406 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
407 if (!(save_mask & (1 << r)))
408 {
409 save_mask |= 1 << r;
410 save_reg_size += 4;
411 }
412 }
413
414 cfun->machine->fp_save_offset = 0;
415 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
416 {
417 int fp_save_offset = 0;
418 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
419 if (save_mask & (1 << regno))
420 fp_save_offset += 4;
421
422 cfun->machine->fp_save_offset = fp_save_offset;
423 }
424
425 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
426 total_size += save_reg_size;
427 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
428
429 /* Save other computed information. */
430 cfun->machine->save_mask = save_mask;
431 cfun->machine->total_size = total_size;
432 cfun->machine->var_size = var_size;
433 cfun->machine->args_size = out_args_size;
434 cfun->machine->save_reg_size = save_reg_size;
435 cfun->machine->initialized = reload_completed;
436 cfun->machine->save_regs_offset = out_args_size + var_size;
437
438 return total_size;
439 }
440
441 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
442 prologue/epilogue expand routines. */
443 static void
444 save_reg (int regno, unsigned offset)
445 {
446 rtx reg = gen_rtx_REG (SImode, regno);
447 rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
448 gen_int_mode (offset, Pmode));
449 rtx insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
450 RTX_FRAME_RELATED_P (insn) = 1;
451 }
452
453 static void
454 restore_reg (int regno, unsigned offset)
455 {
456 rtx reg = gen_rtx_REG (SImode, regno);
457 rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
458 gen_int_mode (offset, Pmode));
459 rtx insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
460 /* Tag epilogue unwind note. */
461 add_reg_note (insn, REG_CFA_RESTORE, reg);
462 RTX_FRAME_RELATED_P (insn) = 1;
463 }
464
465 /* Emit conditional trap for checking stack limit. */
466 static void
467 nios2_emit_stack_limit_check (void)
468 {
469 if (REG_P (stack_limit_rtx))
470 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx,
471 stack_limit_rtx),
472 stack_pointer_rtx, stack_limit_rtx, GEN_INT (3)));
473 else
474 sorry ("only register based stack limit is supported");
475 }
476
477 /* Temp regno used inside prologue/epilogue. */
478 #define TEMP_REG_NUM 8
479
480 static rtx
481 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
482 {
483 rtx insn;
484 if (SMALL_INT (immed))
485 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
486 else
487 {
488 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
489 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
490 insn = emit_insn (gen_add2_insn (reg, tmp));
491 }
492 return insn;
493 }
494
495 void
496 nios2_expand_prologue (void)
497 {
498 unsigned int regno;
499 int total_frame_size, save_offset;
500 int sp_offset; /* offset from base_reg to final stack value. */
501 int save_regs_base; /* offset from base_reg to register save area. */
502 rtx insn;
503
504 total_frame_size = nios2_compute_frame_layout ();
505
506 if (flag_stack_usage_info)
507 current_function_static_stack_size = total_frame_size;
508
509 /* Decrement the stack pointer. */
510 if (!SMALL_INT (total_frame_size))
511 {
512 /* We need an intermediary point, this will point at the spill block. */
513 insn = emit_insn
514 (gen_add2_insn (stack_pointer_rtx,
515 gen_int_mode (cfun->machine->save_regs_offset
516 - total_frame_size, Pmode)));
517 RTX_FRAME_RELATED_P (insn) = 1;
518 save_regs_base = 0;
519 sp_offset = -cfun->machine->save_regs_offset;
520 }
521 else if (total_frame_size)
522 {
523 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
524 gen_int_mode (-total_frame_size,
525 Pmode)));
526 RTX_FRAME_RELATED_P (insn) = 1;
527 save_regs_base = cfun->machine->save_regs_offset;
528 sp_offset = 0;
529 }
530 else
531 save_regs_base = sp_offset = 0;
532
533 if (crtl->limit_stack)
534 nios2_emit_stack_limit_check ();
535
536 save_offset = save_regs_base + cfun->machine->save_reg_size;
537
538 for (regno = LAST_GP_REG; regno > 0; regno--)
539 if (cfun->machine->save_mask & (1 << regno))
540 {
541 save_offset -= 4;
542 save_reg (regno, save_offset);
543 }
544
545 if (frame_pointer_needed)
546 {
547 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
548 insn = emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
549 stack_pointer_rtx,
550 gen_int_mode (fp_save_offset, Pmode)));
551 RTX_FRAME_RELATED_P (insn) = 1;
552 }
553
554 if (sp_offset)
555 {
556 rtx sp_adjust
557 = gen_rtx_SET (stack_pointer_rtx,
558 plus_constant (Pmode, stack_pointer_rtx, sp_offset));
559 if (SMALL_INT (sp_offset))
560 insn = emit_insn (sp_adjust);
561 else
562 {
563 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
564 emit_move_insn (tmp, gen_int_mode (sp_offset, Pmode));
565 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
566 /* Attach the sp_adjust as a note indicating what happened. */
567 add_reg_note (insn, REG_FRAME_RELATED_EXPR, sp_adjust);
568 }
569 RTX_FRAME_RELATED_P (insn) = 1;
570
571 if (crtl->limit_stack)
572 nios2_emit_stack_limit_check ();
573 }
574
575 /* Load the PIC register if needed. */
576 if (crtl->uses_pic_offset_table)
577 nios2_load_pic_register ();
578
579 /* If we are profiling, make sure no instructions are scheduled before
580 the call to mcount. */
581 if (crtl->profile)
582 emit_insn (gen_blockage ());
583 }
584
585 void
586 nios2_expand_epilogue (bool sibcall_p)
587 {
588 rtx insn, cfa_adj;
589 int total_frame_size;
590 int sp_adjust, save_offset;
591 unsigned int regno;
592
593 if (!sibcall_p && nios2_can_use_return_insn ())
594 {
595 emit_jump_insn (gen_return ());
596 return;
597 }
598
599 emit_insn (gen_blockage ());
600
601 total_frame_size = nios2_compute_frame_layout ();
602 if (frame_pointer_needed)
603 {
604 /* Recover the stack pointer. */
605 insn = emit_insn (gen_add3_insn
606 (stack_pointer_rtx, hard_frame_pointer_rtx,
607 gen_int_mode (-cfun->machine->fp_save_offset, Pmode)));
608 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
609 (total_frame_size
610 - cfun->machine->save_regs_offset));
611 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
612 RTX_FRAME_RELATED_P (insn) = 1;
613
614 save_offset = 0;
615 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
616 }
617 else if (!SMALL_INT (total_frame_size))
618 {
619 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
620 emit_move_insn (tmp, gen_int_mode (cfun->machine->save_regs_offset,
621 Pmode));
622 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
623 cfa_adj = gen_rtx_SET (stack_pointer_rtx,
624 plus_constant (Pmode, stack_pointer_rtx,
625 cfun->machine->save_regs_offset));
626 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa_adj);
627 RTX_FRAME_RELATED_P (insn) = 1;
628 save_offset = 0;
629 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
630 }
631 else
632 {
633 save_offset = cfun->machine->save_regs_offset;
634 sp_adjust = total_frame_size;
635 }
636
637 save_offset += cfun->machine->save_reg_size;
638
639 for (regno = LAST_GP_REG; regno > 0; regno--)
640 if (cfun->machine->save_mask & (1 << regno))
641 {
642 save_offset -= 4;
643 restore_reg (regno, save_offset);
644 }
645
646 if (sp_adjust)
647 {
648 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
649 gen_int_mode (sp_adjust, Pmode)));
650 cfa_adj = gen_rtx_SET (stack_pointer_rtx,
651 plus_constant (Pmode, stack_pointer_rtx,
652 sp_adjust));
653 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa_adj);
654 RTX_FRAME_RELATED_P (insn) = 1;
655 }
656
657 /* Add in the __builtin_eh_return stack adjustment. */
658 if (crtl->calls_eh_return)
659 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
660
661 if (!sibcall_p)
662 emit_jump_insn (gen_simple_return ());
663 }
664
665 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
666 back to a previous frame. */
667 rtx
668 nios2_get_return_address (int count)
669 {
670 if (count != 0)
671 return const0_rtx;
672
673 return get_hard_reg_initial_val (Pmode, RA_REGNO);
674 }
675
676 /* Emit code to change the current function's return address to
677 ADDRESS. SCRATCH is available as a scratch register, if needed.
678 ADDRESS and SCRATCH are both word-mode GPRs. */
679 void
680 nios2_set_return_address (rtx address, rtx scratch)
681 {
682 nios2_compute_frame_layout ();
683 if (cfun->machine->save_mask & (1 << RA_REGNO))
684 {
685 unsigned offset = cfun->machine->save_reg_size - 4;
686 rtx base;
687
688 if (frame_pointer_needed)
689 base = hard_frame_pointer_rtx;
690 else
691 {
692 base = stack_pointer_rtx;
693 offset += cfun->machine->save_regs_offset;
694
695 if (!SMALL_INT (offset))
696 {
697 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
698 emit_insn (gen_add2_insn (scratch, base));
699 base = scratch;
700 offset = 0;
701 }
702 }
703 if (offset)
704 base = plus_constant (Pmode, base, offset);
705 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
706 }
707 else
708 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
709 }
710
711 /* Implement FUNCTION_PROFILER macro. */
712 void
713 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
714 {
715 fprintf (file, "\tmov\tr8, ra\n");
716 if (flag_pic == 1)
717 {
718 fprintf (file, "\tnextpc\tr2\n");
719 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
720 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
721 fprintf (file, "\tadd\tr2, r2, r3\n");
722 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
723 fprintf (file, "\tcallr\tr2\n");
724 }
725 else if (flag_pic == 2)
726 {
727 fprintf (file, "\tnextpc\tr2\n");
728 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
729 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
730 fprintf (file, "\tadd\tr2, r2, r3\n");
731 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
732 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
733 fprintf (file, "\tadd\tr3, r2, r3\n");
734 fprintf (file, "\tldw\tr2, 0(r3)\n");
735 fprintf (file, "\tcallr\tr2\n");
736 }
737 else
738 fprintf (file, "\tcall\t_mcount\n");
739 fprintf (file, "\tmov\tra, r8\n");
740 }
741
742 /* Dump stack layout. */
743 static void
744 nios2_dump_frame_layout (FILE *file)
745 {
746 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
747 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
748 cfun->machine->total_size);
749 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
750 cfun->machine->var_size);
751 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
752 cfun->machine->args_size);
753 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
754 cfun->machine->save_reg_size);
755 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
756 cfun->machine->initialized);
757 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
758 cfun->machine->save_regs_offset);
759 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
760 crtl->is_leaf);
761 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
762 frame_pointer_needed);
763 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
764 crtl->args.pretend_args_size);
765 }
766
767 /* Return true if REGNO should be saved in the prologue. */
768 static bool
769 prologue_saved_reg_p (unsigned regno)
770 {
771 gcc_assert (GP_REG_P (regno));
772
773 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
774 return true;
775
776 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
777 return true;
778
779 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
780 return true;
781
782 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
783 return true;
784
785 return false;
786 }
787
788 /* Implement TARGET_CAN_ELIMINATE. */
789 static bool
790 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
791 {
792 if (to == STACK_POINTER_REGNUM)
793 return !frame_pointer_needed;
794 return true;
795 }
796
797 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
798 int
799 nios2_initial_elimination_offset (int from, int to)
800 {
801 int offset;
802
803 nios2_compute_frame_layout ();
804
805 /* Set OFFSET to the offset from the stack pointer. */
806 switch (from)
807 {
808 case FRAME_POINTER_REGNUM:
809 offset = cfun->machine->args_size;
810 break;
811
812 case ARG_POINTER_REGNUM:
813 offset = cfun->machine->total_size;
814 offset -= crtl->args.pretend_args_size;
815 break;
816
817 default:
818 gcc_unreachable ();
819 }
820
821 /* If we are asked for the frame pointer offset, then adjust OFFSET
822 by the offset from the frame pointer to the stack pointer. */
823 if (to == HARD_FRAME_POINTER_REGNUM)
824 offset -= (cfun->machine->save_regs_offset
825 + cfun->machine->fp_save_offset);
826
827 return offset;
828 }
829
830 /* Return nonzero if this function is known to have a null epilogue.
831 This allows the optimizer to omit jumps to jumps if no stack
832 was created. */
833 int
834 nios2_can_use_return_insn (void)
835 {
836 if (!reload_completed || crtl->profile)
837 return 0;
838
839 return nios2_compute_frame_layout () == 0;
840 }
841
842 \f
843 /* Check and signal some warnings/errors on FPU insn options. */
844 static void
845 nios2_custom_check_insns (void)
846 {
847 unsigned int i, j;
848 bool errors = false;
849
850 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
851 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
852 {
853 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
854 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
855 {
856 error ("switch %<-mcustom-%s%> is required for double "
857 "precision floating point", N2FPU_NAME (j));
858 errors = true;
859 }
860 break;
861 }
862
863 /* Warn if the user has certain exotic operations that won't get used
864 without -funsafe-math-optimizations. See expand_builtin () in
865 builtins.c. */
866 if (!flag_unsafe_math_optimizations)
867 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
868 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
869 warning (0, "switch %<-mcustom-%s%> has no effect unless "
870 "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
871
872 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
873 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
874 in builtins.c. */
875 if (!flag_finite_math_only)
876 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
877 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
878 warning (0, "switch %<-mcustom-%s%> has no effect unless "
879 "-ffinite-math-only is specified", N2FPU_NAME (i));
880
881 /* Warn if the user is trying to use a custom rounding instruction
882 that won't get used without -fno-math-errno. See
883 expand_builtin_int_roundingfn_2 () in builtins.c. */
884 if (flag_errno_math)
885 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
886 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
887 warning (0, "switch %<-mcustom-%s%> has no effect unless "
888 "-fno-math-errno is specified", N2FPU_NAME (i));
889
890 if (errors || custom_code_conflict)
891 fatal_error (input_location,
892 "conflicting use of -mcustom switches, target attributes, "
893 "and/or __builtin_custom_ functions");
894 }
895
896 static void
897 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
898 {
899 if (override_p || N2FPU_N (code) == -1)
900 N2FPU_N (code) = n;
901 nios2_register_custom_code (n, CCS_FPU, (int) code);
902 }
903
904 /* Type to represent a standard FPU config. */
905 struct nios2_fpu_config
906 {
907 const char *name;
908 bool set_sp_constants;
909 int code[n2fpu_code_num];
910 };
911
912 #define NIOS2_FPU_CONFIG_NUM 3
913 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
914
915 static void
916 nios2_init_fpu_configs (void)
917 {
918 struct nios2_fpu_config* cfg;
919 int i = 0;
920 #define NEXT_FPU_CONFIG \
921 do { \
922 cfg = &custom_fpu_config[i++]; \
923 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
924 } while (0)
925
926 NEXT_FPU_CONFIG;
927 cfg->name = "60-1";
928 cfg->set_sp_constants = true;
929 cfg->code[n2fpu_fmuls] = 252;
930 cfg->code[n2fpu_fadds] = 253;
931 cfg->code[n2fpu_fsubs] = 254;
932
933 NEXT_FPU_CONFIG;
934 cfg->name = "60-2";
935 cfg->set_sp_constants = true;
936 cfg->code[n2fpu_fmuls] = 252;
937 cfg->code[n2fpu_fadds] = 253;
938 cfg->code[n2fpu_fsubs] = 254;
939 cfg->code[n2fpu_fdivs] = 255;
940
941 NEXT_FPU_CONFIG;
942 cfg->name = "72-3";
943 cfg->set_sp_constants = true;
944 cfg->code[n2fpu_floatus] = 243;
945 cfg->code[n2fpu_fixsi] = 244;
946 cfg->code[n2fpu_floatis] = 245;
947 cfg->code[n2fpu_fcmpgts] = 246;
948 cfg->code[n2fpu_fcmples] = 249;
949 cfg->code[n2fpu_fcmpeqs] = 250;
950 cfg->code[n2fpu_fcmpnes] = 251;
951 cfg->code[n2fpu_fmuls] = 252;
952 cfg->code[n2fpu_fadds] = 253;
953 cfg->code[n2fpu_fsubs] = 254;
954 cfg->code[n2fpu_fdivs] = 255;
955
956 #undef NEXT_FPU_CONFIG
957 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
958 }
959
960 static struct nios2_fpu_config *
961 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
962 {
963 int i;
964 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
965 {
966 bool match = !(endp != NULL
967 ? strncmp (custom_fpu_config[i].name, cfgname,
968 endp - cfgname)
969 : strcmp (custom_fpu_config[i].name, cfgname));
970 if (match)
971 return &custom_fpu_config[i];
972 }
973 return NULL;
974 }
975
976 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
977 OVERRIDE is true if loaded config codes should overwrite current state. */
978 static void
979 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
980 bool override)
981 {
982 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
983 if (cfg)
984 {
985 unsigned int i;
986 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
987 if (cfg->code[i] >= 0)
988 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
989 override);
990 if (cfg->set_sp_constants)
991 flag_single_precision_constant = 1;
992 }
993 else
994 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
995 "value %<%s%>", cfgname);
996
997 /* Guard against errors in the standard configurations. */
998 nios2_custom_check_insns ();
999 }
1000
1001 /* Check individual FPU insn options, and register custom code. */
1002 static void
1003 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1004 {
1005 int param = N2FPU_N (fpu_insn_index);
1006
1007 if (0 <= param && param <= 255)
1008 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1009
1010 /* Valid values are 0-255, but also allow -1 so that the
1011 -mno-custom-<opt> switches work. */
1012 else if (param != -1)
1013 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1014 N2FPU_NAME (fpu_insn_index), param);
1015 }
1016
1017 /* Allocate a chunk of memory for per-function machine-dependent data. */
1018 static struct machine_function *
1019 nios2_init_machine_status (void)
1020 {
1021 return ggc_cleared_alloc<machine_function> ();
1022 }
1023
1024 /* Implement TARGET_OPTION_OVERRIDE. */
1025 static void
1026 nios2_option_override (void)
1027 {
1028 unsigned int i;
1029
1030 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1031 SUBTARGET_OVERRIDE_OPTIONS;
1032 #endif
1033
1034 /* Check for unsupported options. */
1035 if (flag_pic && !TARGET_LINUX_ABI)
1036 sorry ("position-independent code requires the Linux ABI");
1037
1038 /* Function to allocate machine-dependent function status. */
1039 init_machine_status = &nios2_init_machine_status;
1040
1041 nios2_section_threshold
1042 = (global_options_set.x_g_switch_value
1043 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1044
1045 if (nios2_gpopt_option == gpopt_unspecified)
1046 {
1047 /* Default to -mgpopt unless -fpic or -fPIC. */
1048 if (flag_pic)
1049 nios2_gpopt_option = gpopt_none;
1050 else
1051 nios2_gpopt_option = gpopt_local;
1052 }
1053
1054 /* If we don't have mul, we don't have mulx either! */
1055 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1056 target_flags &= ~MASK_HAS_MULX;
1057
1058 /* Initialize default FPU configurations. */
1059 nios2_init_fpu_configs ();
1060
1061 /* Set up default handling for floating point custom instructions.
1062
1063 Putting things in this order means that the -mcustom-fpu-cfg=
1064 switch will always be overridden by individual -mcustom-fadds=
1065 switches, regardless of the order in which they were specified
1066 on the command line.
1067
1068 This behavior of prioritization of individual -mcustom-<insn>=
1069 options before the -mcustom-fpu-cfg= switch is maintained for
1070 compatibility. */
1071 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1072 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1073
1074 /* Handle options for individual FPU insns. */
1075 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1076 nios2_handle_custom_fpu_insn_option (i);
1077
1078 nios2_custom_check_insns ();
1079
1080 /* Save the initial options in case the user does function specific
1081 options. */
1082 target_option_default_node = target_option_current_node
1083 = build_target_option_node (&global_options);
1084 }
1085
1086 \f
1087 /* Return true if CST is a constant within range of movi/movui/movhi. */
1088 static bool
1089 nios2_simple_const_p (const_rtx cst)
1090 {
1091 HOST_WIDE_INT val = INTVAL (cst);
1092 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1093 }
1094
1095 /* Compute a (partial) cost for rtx X. Return true if the complete
1096 cost has been computed, and false if subexpressions should be
1097 scanned. In either case, *TOTAL contains the cost result. */
1098 static bool
1099 nios2_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1100 int opno ATTRIBUTE_UNUSED,
1101 int *total, bool speed ATTRIBUTE_UNUSED)
1102 {
1103 switch (code)
1104 {
1105 case CONST_INT:
1106 if (INTVAL (x) == 0)
1107 {
1108 *total = COSTS_N_INSNS (0);
1109 return true;
1110 }
1111 else if (nios2_simple_const_p (x))
1112 {
1113 *total = COSTS_N_INSNS (2);
1114 return true;
1115 }
1116 else
1117 {
1118 *total = COSTS_N_INSNS (4);
1119 return true;
1120 }
1121
1122 case LABEL_REF:
1123 case SYMBOL_REF:
1124 case CONST:
1125 case CONST_DOUBLE:
1126 {
1127 *total = COSTS_N_INSNS (4);
1128 return true;
1129 }
1130
1131 case AND:
1132 {
1133 /* Recognize 'nor' insn pattern. */
1134 if (GET_CODE (XEXP (x, 0)) == NOT
1135 && GET_CODE (XEXP (x, 1)) == NOT)
1136 {
1137 *total = COSTS_N_INSNS (1);
1138 return true;
1139 }
1140 return false;
1141 }
1142
1143 case MULT:
1144 {
1145 *total = COSTS_N_INSNS (1);
1146 return false;
1147 }
1148 case SIGN_EXTEND:
1149 {
1150 *total = COSTS_N_INSNS (3);
1151 return false;
1152 }
1153 case ZERO_EXTEND:
1154 {
1155 *total = COSTS_N_INSNS (1);
1156 return false;
1157 }
1158
1159 default:
1160 return false;
1161 }
1162 }
1163
1164 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1165 static reg_class_t
1166 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1167 {
1168 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1169 }
1170
1171 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1172 RET is an RTX for the return value location. The entire insn sequence
1173 is returned. */
1174 static GTY(()) rtx nios2_tls_symbol;
1175
1176 static rtx
1177 nios2_call_tls_get_addr (rtx ti)
1178 {
1179 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1180 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1181 rtx fn, insn;
1182
1183 if (!nios2_tls_symbol)
1184 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1185
1186 emit_move_insn (arg, ti);
1187 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1188 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1189 RTL_CONST_CALL_P (insn) = 1;
1190 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1191 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1192
1193 return ret;
1194 }
1195
1196 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1197 static bool
1198 nios2_large_offset_p (int unspec)
1199 {
1200 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1201
1202 if (flag_pic == 2
1203 /* FIXME: TLS GOT offset relocations will eventually also get this
1204 treatment, after binutils support for those are also completed. */
1205 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1206 return true;
1207
1208 /* 'gotoff' offsets are always hiadj/lo. */
1209 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1210 return true;
1211
1212 return false;
1213 }
1214
1215 /* Return true for conforming unspec relocations. Also used in
1216 constraints.md and predicates.md. */
1217 bool
1218 nios2_unspec_reloc_p (rtx op)
1219 {
1220 return (GET_CODE (op) == CONST
1221 && GET_CODE (XEXP (op, 0)) == UNSPEC
1222 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1223 }
1224
1225 /* Helper to generate unspec constant. */
1226 static rtx
1227 nios2_unspec_offset (rtx loc, int unspec)
1228 {
1229 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1230 unspec));
1231 }
1232
1233 /* Generate GOT pointer based address with large offset. */
1234 static rtx
1235 nios2_large_got_address (rtx offset, rtx tmp)
1236 {
1237 if (!tmp)
1238 tmp = gen_reg_rtx (Pmode);
1239 emit_move_insn (tmp, offset);
1240 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1241 }
1242
1243 /* Generate a GOT pointer based address. */
1244 static rtx
1245 nios2_got_address (rtx loc, int unspec)
1246 {
1247 rtx offset = nios2_unspec_offset (loc, unspec);
1248 crtl->uses_pic_offset_table = 1;
1249
1250 if (nios2_large_offset_p (unspec))
1251 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1252
1253 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1254 }
1255
1256 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1257 return value will be a valid address and move_operand (either a REG
1258 or a LO_SUM). */
1259 static rtx
1260 nios2_legitimize_tls_address (rtx loc)
1261 {
1262 rtx tmp, mem, tp;
1263 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1264
1265 switch (model)
1266 {
1267 case TLS_MODEL_GLOBAL_DYNAMIC:
1268 tmp = gen_reg_rtx (Pmode);
1269 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1270 return nios2_call_tls_get_addr (tmp);
1271
1272 case TLS_MODEL_LOCAL_DYNAMIC:
1273 tmp = gen_reg_rtx (Pmode);
1274 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1275 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1276 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1277
1278 case TLS_MODEL_INITIAL_EXEC:
1279 tmp = gen_reg_rtx (Pmode);
1280 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1281 emit_move_insn (tmp, mem);
1282 tp = gen_rtx_REG (Pmode, TP_REGNO);
1283 return gen_rtx_PLUS (Pmode, tp, tmp);
1284
1285 case TLS_MODEL_LOCAL_EXEC:
1286 tp = gen_rtx_REG (Pmode, TP_REGNO);
1287 return gen_rtx_PLUS (Pmode, tp,
1288 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1289 default:
1290 gcc_unreachable ();
1291 }
1292 }
1293
1294 /* Divide Support
1295
1296 If -O3 is used, we want to output a table lookup for
1297 divides between small numbers (both num and den >= 0
1298 and < 0x10). The overhead of this method in the worst
1299 case is 40 bytes in the text section (10 insns) and
1300 256 bytes in the data section. Additional divides do
1301 not incur additional penalties in the data section.
1302
1303 Code speed is improved for small divides by about 5x
1304 when using this method in the worse case (~9 cycles
1305 vs ~45). And in the worst case divides not within the
1306 table are penalized by about 10% (~5 cycles vs ~45).
1307 However in the typical case the penalty is not as bad
1308 because doing the long divide in only 45 cycles is
1309 quite optimistic.
1310
1311 ??? would be nice to have some benchmarks other
1312 than Dhrystone to back this up.
1313
1314 This bit of expansion is to create this instruction
1315 sequence as rtl.
1316 or $8, $4, $5
1317 slli $9, $4, 4
1318 cmpgeui $3, $8, 16
1319 beq $3, $0, .L3
1320 or $10, $9, $5
1321 add $12, $11, divide_table
1322 ldbu $2, 0($12)
1323 br .L1
1324 .L3:
1325 call slow_div
1326 .L1:
1327 # continue here with result in $2
1328
1329 ??? Ideally I would like the libcall block to contain all
1330 of this code, but I don't know how to do that. What it
1331 means is that if the divide can be eliminated, it may not
1332 completely disappear.
1333
1334 ??? The __divsi3_table label should ideally be moved out
1335 of this block and into a global. If it is placed into the
1336 sdata section we can save even more cycles by doing things
1337 gp relative. */
1338 void
1339 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1340 {
1341 rtx or_result, shift_left_result;
1342 rtx lookup_value;
1343 rtx_code_label *lab1, *lab3;
1344 rtx insns;
1345 rtx libfunc;
1346 rtx final_result;
1347 rtx tmp;
1348 rtx table;
1349
1350 /* It may look a little generic, but only SImode is supported for now. */
1351 gcc_assert (mode == SImode);
1352 libfunc = optab_libfunc (sdiv_optab, SImode);
1353
1354 lab1 = gen_label_rtx ();
1355 lab3 = gen_label_rtx ();
1356
1357 or_result = expand_simple_binop (SImode, IOR,
1358 operands[1], operands[2],
1359 0, 0, OPTAB_LIB_WIDEN);
1360
1361 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1362 GET_MODE (or_result), 0, lab3);
1363 JUMP_LABEL (get_last_insn ()) = lab3;
1364
1365 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1366 operands[1], GEN_INT (4),
1367 0, 0, OPTAB_LIB_WIDEN);
1368
1369 lookup_value = expand_simple_binop (SImode, IOR,
1370 shift_left_result, operands[2],
1371 0, 0, OPTAB_LIB_WIDEN);
1372 table = gen_rtx_PLUS (SImode, lookup_value,
1373 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1374 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1375
1376 tmp = emit_jump_insn (gen_jump (lab1));
1377 JUMP_LABEL (tmp) = lab1;
1378 emit_barrier ();
1379
1380 emit_label (lab3);
1381 LABEL_NUSES (lab3) = 1;
1382
1383 start_sequence ();
1384 final_result = emit_library_call_value (libfunc, NULL_RTX,
1385 LCT_CONST, SImode, 2,
1386 operands[1], SImode,
1387 operands[2], SImode);
1388
1389 insns = get_insns ();
1390 end_sequence ();
1391 emit_libcall_block (insns, operands[0], final_result,
1392 gen_rtx_DIV (SImode, operands[1], operands[2]));
1393
1394 emit_label (lab1);
1395 LABEL_NUSES (lab1) = 1;
1396 }
1397
1398 \f
1399 /* Branches and compares. */
1400
1401 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1402 comparison, e.g. >= 1 into > 0. */
1403 static void
1404 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1405 enum rtx_code *alt_code, rtx *alt_op,
1406 machine_mode mode)
1407 {
1408 HOST_WIDE_INT opval = INTVAL (op);
1409 enum rtx_code scode = signed_condition (code);
1410 bool dec_p = (scode == LT || scode == GE);
1411
1412 if (code == EQ || code == NE)
1413 {
1414 *alt_code = code;
1415 *alt_op = op;
1416 return;
1417 }
1418
1419 *alt_op = (dec_p
1420 ? gen_int_mode (opval - 1, mode)
1421 : gen_int_mode (opval + 1, mode));
1422
1423 /* The required conversion between [>,>=] and [<,<=] is captured
1424 by a reverse + swap of condition codes. */
1425 *alt_code = reverse_condition (swap_condition (code));
1426
1427 {
1428 /* Test if the incremented/decremented value crosses the over/underflow
1429 boundary. Supposedly, such boundary cases should already be transformed
1430 into always-true/false or EQ conditions, so use an assertion here. */
1431 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1432 if (code == scode)
1433 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1434 alt_opval &= GET_MODE_MASK (mode);
1435 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1436 }
1437 }
1438
1439 /* Return true if the constant comparison is supported by nios2. */
1440 static bool
1441 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1442 {
1443 switch (code)
1444 {
1445 case EQ: case NE: case GE: case LT:
1446 return SMALL_INT (INTVAL (op));
1447 case GEU: case LTU:
1448 return SMALL_INT_UNSIGNED (INTVAL (op));
1449 default:
1450 return false;
1451 }
1452 }
1453
1454 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1455 the current configuration. Perform modifications if MODIFY_P is true.
1456 Returns true if FPU compare can be done. */
1457
1458 bool
1459 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1460 bool modify_p)
1461 {
1462 bool rev_p = false;
1463 enum rtx_code code = GET_CODE (*cmp);
1464
1465 if (!nios2_fpu_compare_enabled (code, mode))
1466 {
1467 code = swap_condition (code);
1468 if (nios2_fpu_compare_enabled (code, mode))
1469 rev_p = true;
1470 else
1471 return false;
1472 }
1473
1474 if (modify_p)
1475 {
1476 if (rev_p)
1477 {
1478 rtx tmp = *op1;
1479 *op1 = *op2;
1480 *op2 = tmp;
1481 }
1482 *op1 = force_reg (mode, *op1);
1483 *op2 = force_reg (mode, *op2);
1484 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1485 }
1486 return true;
1487 }
1488
1489 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1490 nios2 supported form. Returns true if success. */
1491 bool
1492 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1493 {
1494 enum rtx_code code = GET_CODE (*cmp);
1495 enum rtx_code alt_code;
1496 rtx alt_op2;
1497
1498 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1499 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1500
1501 if (!reg_or_0_operand (*op2, mode))
1502 {
1503 /* Create alternate constant compare. */
1504 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1505
1506 /* If alterate op2 is zero(0), we can use it directly, possibly
1507 swapping the compare code. */
1508 if (alt_op2 == const0_rtx)
1509 {
1510 code = alt_code;
1511 *op2 = alt_op2;
1512 goto check_rebuild_cmp;
1513 }
1514
1515 /* Check if either constant compare can be used. */
1516 if (nios2_valid_compare_const_p (code, *op2))
1517 return true;
1518 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1519 {
1520 code = alt_code;
1521 *op2 = alt_op2;
1522 goto rebuild_cmp;
1523 }
1524
1525 /* We have to force op2 into a register now. Try to pick one
1526 with a lower cost. */
1527 if (! nios2_simple_const_p (*op2)
1528 && nios2_simple_const_p (alt_op2))
1529 {
1530 code = alt_code;
1531 *op2 = alt_op2;
1532 }
1533 *op2 = force_reg (SImode, *op2);
1534 }
1535 check_rebuild_cmp:
1536 if (code == GT || code == GTU || code == LE || code == LEU)
1537 {
1538 rtx t = *op1; *op1 = *op2; *op2 = t;
1539 code = swap_condition (code);
1540 }
1541 rebuild_cmp:
1542 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1543 return true;
1544 }
1545
1546
1547 /* Addressing Modes. */
1548
1549 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1550 static bool
1551 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1552 {
1553 rtx base, offset;
1554 split_const (x, &base, &offset);
1555 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
1556 }
1557
1558 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1559 static bool
1560 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1561 {
1562 return nios2_legitimate_constant_p (mode, x) == false;
1563 }
1564
1565 /* Return true if register REGNO is a valid base register.
1566 STRICT_P is true if REG_OK_STRICT is in effect. */
1567
1568 bool
1569 nios2_regno_ok_for_base_p (int regno, bool strict_p)
1570 {
1571 if (!HARD_REGISTER_NUM_P (regno))
1572 {
1573 if (!strict_p)
1574 return true;
1575
1576 if (!reg_renumber)
1577 return false;
1578
1579 regno = reg_renumber[regno];
1580 }
1581
1582 /* The fake registers will be eliminated to either the stack or
1583 hard frame pointer, both of which are usually valid base registers.
1584 Reload deals with the cases where the eliminated form isn't valid. */
1585 return (GP_REG_P (regno)
1586 || regno == FRAME_POINTER_REGNUM
1587 || regno == ARG_POINTER_REGNUM);
1588 }
1589
1590 /* Return true if the address expression formed by BASE + OFFSET is
1591 valid. */
1592 static bool
1593 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
1594 {
1595 if (!strict_p && GET_CODE (base) == SUBREG)
1596 base = SUBREG_REG (base);
1597 return (REG_P (base)
1598 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
1599 && (offset == NULL_RTX
1600 || const_arith_operand (offset, Pmode)
1601 || nios2_unspec_reloc_p (offset)));
1602 }
1603
1604 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1605 static bool
1606 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
1607 rtx operand, bool strict_p)
1608 {
1609 switch (GET_CODE (operand))
1610 {
1611 /* Direct. */
1612 case SYMBOL_REF:
1613 if (SYMBOL_REF_TLS_MODEL (operand))
1614 return false;
1615
1616 if (nios2_symbol_ref_in_small_data_p (operand))
1617 return true;
1618
1619 /* Else, fall through. */
1620 case LABEL_REF:
1621 case CONST_INT:
1622 case CONST:
1623 case CONST_DOUBLE:
1624 return false;
1625
1626 /* Register indirect. */
1627 case REG:
1628 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
1629
1630 /* Register indirect with displacement. */
1631 case PLUS:
1632 {
1633 rtx op0 = XEXP (operand, 0);
1634 rtx op1 = XEXP (operand, 1);
1635
1636 return (nios2_valid_addr_expr_p (op0, op1, strict_p)
1637 || nios2_valid_addr_expr_p (op1, op0, strict_p));
1638 }
1639
1640 default:
1641 break;
1642 }
1643 return false;
1644 }
1645
1646 /* Return true if SECTION is a small section name. */
1647 static bool
1648 nios2_small_section_name_p (const char *section)
1649 {
1650 return (strcmp (section, ".sbss") == 0
1651 || strncmp (section, ".sbss.", 6) == 0
1652 || strcmp (section, ".sdata") == 0
1653 || strncmp (section, ".sdata.", 7) == 0);
1654 }
1655
1656 /* Return true if EXP should be placed in the small data section. */
1657 static bool
1658 nios2_in_small_data_p (const_tree exp)
1659 {
1660 /* We want to merge strings, so we never consider them small data. */
1661 if (TREE_CODE (exp) == STRING_CST)
1662 return false;
1663
1664 if (TREE_CODE (exp) == VAR_DECL)
1665 {
1666 if (DECL_SECTION_NAME (exp))
1667 {
1668 const char *section = DECL_SECTION_NAME (exp);
1669 if (nios2_small_section_name_p (section))
1670 return true;
1671 }
1672 else
1673 {
1674 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
1675
1676 /* If this is an incomplete type with size 0, then we can't put it
1677 in sdata because it might be too big when completed. */
1678 if (size > 0
1679 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
1680 return true;
1681 }
1682 }
1683
1684 return false;
1685 }
1686
1687 /* Return true if symbol is in small data section. */
1688
1689 bool
1690 nios2_symbol_ref_in_small_data_p (rtx sym)
1691 {
1692 tree decl;
1693
1694 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
1695 decl = SYMBOL_REF_DECL (sym);
1696
1697 /* TLS variables are not accessed through the GP. */
1698 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
1699 return false;
1700
1701 /* If the user has explicitly placed the symbol in a small data section
1702 via an attribute, generate gp-relative addressing even if the symbol
1703 is external, weak, or larger than we'd automatically put in the
1704 small data section. OTOH, if the symbol is located in some
1705 non-small-data section, we can't use gp-relative accesses on it
1706 unless the user has requested gpopt_data or gpopt_all. */
1707
1708 switch (nios2_gpopt_option)
1709 {
1710 case gpopt_none:
1711 /* Don't generate a gp-relative addressing mode if that's been
1712 disabled. */
1713 return false;
1714
1715 case gpopt_local:
1716 /* Use GP-relative addressing for small data symbols that are
1717 not external or weak, plus any symbols that have explicitly
1718 been placed in a small data section. */
1719 if (decl && DECL_SECTION_NAME (decl))
1720 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
1721 return (SYMBOL_REF_SMALL_P (sym)
1722 && !SYMBOL_REF_EXTERNAL_P (sym)
1723 && !(decl && DECL_WEAK (decl)));
1724
1725 case gpopt_global:
1726 /* Use GP-relative addressing for small data symbols, even if
1727 they are external or weak. Note that SYMBOL_REF_SMALL_P
1728 is also true of symbols that have explicitly been placed
1729 in a small data section. */
1730 return SYMBOL_REF_SMALL_P (sym);
1731
1732 case gpopt_data:
1733 /* Use GP-relative addressing for all data symbols regardless
1734 of the object size, but not for code symbols. This option
1735 is equivalent to the user asserting that the entire data
1736 section is accessible from the GP. */
1737 return !SYMBOL_REF_FUNCTION_P (sym);
1738
1739 case gpopt_all:
1740 /* Use GP-relative addressing for everything, including code.
1741 Effectively, the user has asserted that the entire program
1742 fits within the 64K range of the GP offset. */
1743 return true;
1744
1745 default:
1746 /* We shouldn't get here. */
1747 return false;
1748 }
1749 }
1750
1751 /* Implement TARGET_SECTION_TYPE_FLAGS. */
1752
1753 static unsigned int
1754 nios2_section_type_flags (tree decl, const char *name, int reloc)
1755 {
1756 unsigned int flags;
1757
1758 flags = default_section_type_flags (decl, name, reloc);
1759
1760 if (nios2_small_section_name_p (name))
1761 flags |= SECTION_SMALL;
1762
1763 return flags;
1764 }
1765
1766 /* Return true if SYMBOL_REF X binds locally. */
1767
1768 static bool
1769 nios2_symbol_binds_local_p (const_rtx x)
1770 {
1771 return (SYMBOL_REF_DECL (x)
1772 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
1773 : SYMBOL_REF_LOCAL_P (x));
1774 }
1775
1776 /* Position independent code related. */
1777
1778 /* Emit code to load the PIC register. */
1779 static void
1780 nios2_load_pic_register (void)
1781 {
1782 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
1783
1784 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
1785 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
1786 }
1787
1788 /* Generate a PIC address as a MEM rtx. */
1789 static rtx
1790 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
1791 {
1792 if (flag_pic == 2
1793 && GET_CODE (sym) == SYMBOL_REF
1794 && nios2_symbol_binds_local_p (sym))
1795 /* Under -fPIC, generate a GOTOFF address for local symbols. */
1796 {
1797 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
1798 crtl->uses_pic_offset_table = 1;
1799 return nios2_large_got_address (offset, tmp);
1800 }
1801
1802 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
1803 }
1804
1805 /* Nonzero if the constant value X is a legitimate general operand
1806 when generating PIC code. It is given that flag_pic is on and
1807 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1808 bool
1809 nios2_legitimate_pic_operand_p (rtx x)
1810 {
1811 if (GET_CODE (x) == CONST
1812 && GET_CODE (XEXP (x, 0)) == UNSPEC
1813 && nios2_large_offset_p (XINT (XEXP (x, 0), 1)))
1814 return true;
1815
1816 return ! (GET_CODE (x) == SYMBOL_REF
1817 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
1818 }
1819
1820 /* Return TRUE if X is a thread-local symbol. */
1821 static bool
1822 nios2_tls_symbol_p (rtx x)
1823 {
1824 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
1825 && SYMBOL_REF_TLS_MODEL (x) != 0);
1826 }
1827
1828 /* Legitimize addresses that are CONSTANT_P expressions. */
1829 static rtx
1830 nios2_legitimize_constant_address (rtx addr)
1831 {
1832 rtx base, offset;
1833 split_const (addr, &base, &offset);
1834
1835 if (nios2_tls_symbol_p (base))
1836 base = nios2_legitimize_tls_address (base);
1837 else if (flag_pic)
1838 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
1839 else
1840 return addr;
1841
1842 if (offset != const0_rtx)
1843 {
1844 gcc_assert (can_create_pseudo_p ());
1845 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
1846 (CONST_INT_P (offset)
1847 ? (SMALL_INT (INTVAL (offset))
1848 ? offset : force_reg (Pmode, offset))
1849 : offset));
1850 }
1851 return base;
1852 }
1853
1854 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1855 static rtx
1856 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1857 machine_mode mode ATTRIBUTE_UNUSED)
1858 {
1859 if (CONSTANT_P (x))
1860 return nios2_legitimize_constant_address (x);
1861
1862 /* For the TLS LE (Local Exec) model, the compiler may try to
1863 combine constant offsets with unspec relocs, creating address RTXs
1864 looking like this:
1865 (plus:SI (reg:SI 23 r23)
1866 (const:SI
1867 (plus:SI
1868 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
1869 (const_int 48 [0x30]))))
1870
1871 This usually happens when 'var' is a thread-local struct variable,
1872 and access of a field in var causes the addend.
1873
1874 We typically want this combining, so transform the above into this
1875 form, which is allowed:
1876 (plus:SI (reg:SI 23 r23)
1877 (const:SI
1878 (unspec:SI
1879 [(const:SI
1880 (plus:SI (symbol_ref:SI ("var"))
1881 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
1882
1883 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
1884 if (GET_CODE (x) == PLUS
1885 && GET_CODE (XEXP (x, 0)) == REG
1886 && GET_CODE (XEXP (x, 1)) == CONST)
1887 {
1888 rtx unspec, offset, reg = XEXP (x, 0);
1889 split_const (XEXP (x, 1), &unspec, &offset);
1890 if (GET_CODE (unspec) == UNSPEC
1891 && !nios2_large_offset_p (XINT (unspec, 1))
1892 && offset != const0_rtx)
1893 {
1894 unspec = copy_rtx (unspec);
1895 XVECEXP (unspec, 0, 0)
1896 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
1897 x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
1898 }
1899 }
1900
1901 return x;
1902 }
1903
1904 static rtx
1905 nios2_delegitimize_address (rtx x)
1906 {
1907 x = delegitimize_mem_from_attrs (x);
1908
1909 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1910 {
1911 switch (XINT (XEXP (x, 0), 1))
1912 {
1913 case UNSPEC_PIC_SYM:
1914 case UNSPEC_PIC_CALL_SYM:
1915 case UNSPEC_PIC_GOTOFF_SYM:
1916 case UNSPEC_ADD_TLS_GD:
1917 case UNSPEC_ADD_TLS_LDM:
1918 case UNSPEC_LOAD_TLS_IE:
1919 case UNSPEC_ADD_TLS_LE:
1920 x = XVECEXP (XEXP (x, 0), 0, 0);
1921 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1922 break;
1923 }
1924 }
1925 return x;
1926 }
1927
1928 /* Main expander function for RTL moves. */
1929 int
1930 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
1931 {
1932 rtx to = operands[0];
1933 rtx from = operands[1];
1934
1935 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
1936 {
1937 gcc_assert (can_create_pseudo_p ());
1938 from = copy_to_mode_reg (mode, from);
1939 }
1940
1941 if (GET_CODE (from) == SYMBOL_REF || GET_CODE (from) == LABEL_REF
1942 || (GET_CODE (from) == CONST
1943 && GET_CODE (XEXP (from, 0)) != UNSPEC))
1944 from = nios2_legitimize_constant_address (from);
1945
1946 operands[0] = to;
1947 operands[1] = from;
1948 return 0;
1949 }
1950
1951 /* The function with address *ADDR is being called. If the address
1952 needs to be loaded from the GOT, emit the instruction to do so and
1953 update *ADDR to point to the rtx for the loaded value.
1954 If REG != NULL_RTX, it is used as the target/scratch register in the
1955 GOT address calculation. */
1956 void
1957 nios2_adjust_call_address (rtx *call_op, rtx reg)
1958 {
1959 if (MEM_P (*call_op))
1960 call_op = &XEXP (*call_op, 0);
1961
1962 rtx addr = *call_op;
1963 if (flag_pic && CONSTANT_P (addr))
1964 {
1965 rtx tmp = reg ? reg : NULL_RTX;
1966 if (!reg)
1967 reg = gen_reg_rtx (Pmode);
1968 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
1969 emit_insn (gen_rtx_SET (reg, addr));
1970 *call_op = reg;
1971 }
1972 }
1973
1974 \f
1975 /* Output assembly language related definitions. */
1976
1977 /* Print the operand OP to file stream FILE modified by LETTER.
1978 LETTER can be one of:
1979
1980 i: print "i" if OP is an immediate, except 0
1981 o: print "io" if OP is volatile
1982 z: for const0_rtx print $0 instead of 0
1983 H: for %hiadj
1984 L: for %lo
1985 U: for upper half of 32 bit value
1986 D: for the upper 32-bits of a 64-bit double value
1987 R: prints reverse condition.
1988 */
1989 static void
1990 nios2_print_operand (FILE *file, rtx op, int letter)
1991 {
1992
1993 switch (letter)
1994 {
1995 case 'i':
1996 if (CONSTANT_P (op) && op != const0_rtx)
1997 fprintf (file, "i");
1998 return;
1999
2000 case 'o':
2001 if (GET_CODE (op) == MEM
2002 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2003 || TARGET_BYPASS_CACHE))
2004 fprintf (file, "io");
2005 return;
2006
2007 default:
2008 break;
2009 }
2010
2011 if (comparison_operator (op, VOIDmode))
2012 {
2013 enum rtx_code cond = GET_CODE (op);
2014 if (letter == 0)
2015 {
2016 fprintf (file, "%s", GET_RTX_NAME (cond));
2017 return;
2018 }
2019 if (letter == 'R')
2020 {
2021 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2022 return;
2023 }
2024 }
2025
2026 switch (GET_CODE (op))
2027 {
2028 case REG:
2029 if (letter == 0 || letter == 'z')
2030 {
2031 fprintf (file, "%s", reg_names[REGNO (op)]);
2032 return;
2033 }
2034 else if (letter == 'D')
2035 {
2036 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2037 return;
2038 }
2039 break;
2040
2041 case CONST_INT:
2042 if (INTVAL (op) == 0 && letter == 'z')
2043 {
2044 fprintf (file, "zero");
2045 return;
2046 }
2047
2048 if (letter == 'U')
2049 {
2050 HOST_WIDE_INT val = INTVAL (op);
2051 val = (val >> 16) & 0xFFFF;
2052 output_addr_const (file, gen_int_mode (val, SImode));
2053 return;
2054 }
2055 /* Else, fall through. */
2056
2057 case CONST:
2058 case LABEL_REF:
2059 case SYMBOL_REF:
2060 case CONST_DOUBLE:
2061 if (letter == 0 || letter == 'z')
2062 {
2063 output_addr_const (file, op);
2064 return;
2065 }
2066 else if (letter == 'H' || letter == 'L')
2067 {
2068 fprintf (file, "%%");
2069 if (GET_CODE (op) == CONST
2070 && GET_CODE (XEXP (op, 0)) == UNSPEC)
2071 {
2072 rtx unspec = XEXP (op, 0);
2073 int unspec_reloc = XINT (unspec, 1);
2074 gcc_assert (nios2_large_offset_p (unspec_reloc));
2075 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
2076 op = XVECEXP (unspec, 0, 0);
2077 }
2078 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
2079 output_addr_const (file, op);
2080 fprintf (file, ")");
2081 return;
2082 }
2083 break;
2084
2085 case SUBREG:
2086 case MEM:
2087 if (letter == 0)
2088 {
2089 output_address (op);
2090 return;
2091 }
2092 break;
2093
2094 case CODE_LABEL:
2095 if (letter == 0)
2096 {
2097 output_addr_const (file, op);
2098 return;
2099 }
2100 break;
2101
2102 default:
2103 break;
2104 }
2105
2106 output_operand_lossage ("Unsupported operand for code '%c'", letter);
2107 gcc_unreachable ();
2108 }
2109
2110 /* Return true if this is a GP-relative accessible reference. */
2111 static bool
2112 gprel_constant_p (rtx op)
2113 {
2114 if (GET_CODE (op) == SYMBOL_REF
2115 && nios2_symbol_ref_in_small_data_p (op))
2116 return true;
2117 else if (GET_CODE (op) == CONST
2118 && GET_CODE (XEXP (op, 0)) == PLUS)
2119 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
2120
2121 return false;
2122 }
2123
2124 /* Return the name string for a supported unspec reloc offset. */
2125 static const char *
2126 nios2_unspec_reloc_name (int unspec)
2127 {
2128 switch (unspec)
2129 {
2130 case UNSPEC_PIC_SYM:
2131 return "got";
2132 case UNSPEC_PIC_CALL_SYM:
2133 return "call";
2134 case UNSPEC_PIC_GOTOFF_SYM:
2135 return "gotoff";
2136 case UNSPEC_LOAD_TLS_IE:
2137 return "tls_ie";
2138 case UNSPEC_ADD_TLS_LE:
2139 return "tls_le";
2140 case UNSPEC_ADD_TLS_GD:
2141 return "tls_gd";
2142 case UNSPEC_ADD_TLS_LDM:
2143 return "tls_ldm";
2144 case UNSPEC_ADD_TLS_LDO:
2145 return "tls_ldo";
2146 default:
2147 return NULL;
2148 }
2149 }
2150
2151 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2152 static bool
2153 nios2_output_addr_const_extra (FILE *file, rtx op)
2154 {
2155 const char *name;
2156 gcc_assert (GET_CODE (op) == UNSPEC);
2157
2158 /* Support for printing out const unspec relocations. */
2159 name = nios2_unspec_reloc_name (XINT (op, 1));
2160 if (name)
2161 {
2162 fprintf (file, "%%%s(", name);
2163 output_addr_const (file, XVECEXP (op, 0, 0));
2164 fprintf (file, ")");
2165 return true;
2166 }
2167 return false;
2168 }
2169
2170 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2171 static void
2172 nios2_print_operand_address (FILE *file, rtx op)
2173 {
2174 switch (GET_CODE (op))
2175 {
2176 case CONST:
2177 case CONST_INT:
2178 case LABEL_REF:
2179 case CONST_DOUBLE:
2180 case SYMBOL_REF:
2181 if (gprel_constant_p (op))
2182 {
2183 fprintf (file, "%%gprel(");
2184 output_addr_const (file, op);
2185 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
2186 return;
2187 }
2188
2189 break;
2190
2191 case PLUS:
2192 {
2193 rtx op0 = XEXP (op, 0);
2194 rtx op1 = XEXP (op, 1);
2195
2196 if (REG_P (op0) && CONSTANT_P (op1))
2197 {
2198 output_addr_const (file, op1);
2199 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
2200 return;
2201 }
2202 else if (REG_P (op1) && CONSTANT_P (op0))
2203 {
2204 output_addr_const (file, op0);
2205 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
2206 return;
2207 }
2208 }
2209 break;
2210
2211 case REG:
2212 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
2213 return;
2214
2215 case MEM:
2216 {
2217 rtx base = XEXP (op, 0);
2218 nios2_print_operand_address (file, base);
2219 return;
2220 }
2221 default:
2222 break;
2223 }
2224
2225 fprintf (stderr, "Missing way to print address\n");
2226 debug_rtx (op);
2227 gcc_unreachable ();
2228 }
2229
2230 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2231 static void
2232 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
2233 {
2234 gcc_assert (size == 4);
2235 fprintf (file, "\t.4byte\t%%tls_ldo(");
2236 output_addr_const (file, x);
2237 fprintf (file, ")");
2238 }
2239
2240 /* Implemet TARGET_ASM_FILE_END. */
2241
2242 static void
2243 nios2_asm_file_end (void)
2244 {
2245 /* The Nios II Linux stack is mapped non-executable by default, so add a
2246 .note.GNU-stack section for switching to executable stacks only when
2247 trampolines are generated. */
2248 if (TARGET_LINUX_ABI && trampolines_created)
2249 file_end_indicate_exec_stack ();
2250 }
2251
2252 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2253 static void
2254 nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2255 {
2256 if (flag_verbose_asm || flag_debug_asm)
2257 {
2258 nios2_compute_frame_layout ();
2259 nios2_dump_frame_layout (file);
2260 }
2261 }
2262
2263 /* Emit assembly of custom FPU instructions. */
2264 const char *
2265 nios2_fpu_insn_asm (enum n2fpu_code code)
2266 {
2267 static char buf[256];
2268 const char *op1, *op2, *op3;
2269 int ln = 256, n = 0;
2270
2271 int N = N2FPU_N (code);
2272 int num_operands = N2FPU (code).num_operands;
2273 const char *insn_name = N2FPU_NAME (code);
2274 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
2275 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
2276 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
2277
2278 /* Prepare X register for DF input operands. */
2279 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
2280 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2281 N2FPU_N (n2fpu_fwrx));
2282
2283 if (src_mode == SFmode)
2284 {
2285 if (dst_mode == VOIDmode)
2286 {
2287 /* The fwry case. */
2288 op1 = op3 = "zero";
2289 op2 = "%0";
2290 num_operands -= 1;
2291 }
2292 else
2293 {
2294 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2295 op2 = "%1";
2296 op3 = (num_operands == 2 ? "zero" : "%2");
2297 }
2298 }
2299 else if (src_mode == DFmode)
2300 {
2301 if (dst_mode == VOIDmode)
2302 {
2303 /* The fwrx case. */
2304 op1 = "zero";
2305 op2 = "%0";
2306 op3 = "%D0";
2307 num_operands -= 1;
2308 }
2309 else
2310 {
2311 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2312 op2 = (num_operands == 2 ? "%1" : "%2");
2313 op3 = (num_operands == 2 ? "%D1" : "%D2");
2314 }
2315 }
2316 else if (src_mode == VOIDmode)
2317 {
2318 /* frdxlo, frdxhi, frdy cases. */
2319 gcc_assert (dst_mode == SFmode);
2320 op1 = "%0";
2321 op2 = op3 = "zero";
2322 }
2323 else if (src_mode == SImode)
2324 {
2325 /* Conversion operators. */
2326 gcc_assert (num_operands == 2);
2327 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2328 op2 = "%1";
2329 op3 = "zero";
2330 }
2331 else
2332 gcc_unreachable ();
2333
2334 /* Main instruction string. */
2335 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2336 N, op1, op2, op3, insn_name,
2337 (num_operands >= 2 ? ", %1" : ""),
2338 (num_operands == 3 ? ", %2" : ""));
2339
2340 /* Extraction of Y register for DF results. */
2341 if (dst_mode == DFmode)
2342 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2343 N2FPU_N (n2fpu_frdy));
2344 return buf;
2345 }
2346
2347 \f
2348
2349 /* Function argument related. */
2350
2351 /* Define where to put the arguments to a function. Value is zero to
2352 push the argument on the stack, or a hard register in which to
2353 store the argument.
2354
2355 MODE is the argument's machine mode.
2356 TYPE is the data type of the argument (as a tree).
2357 This is null for libcalls where that information may
2358 not be available.
2359 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2360 the preceding args and about the function being called.
2361 NAMED is nonzero if this argument is a named parameter
2362 (otherwise it is an extra parameter matching an ellipsis). */
2363
2364 static rtx
2365 nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
2366 const_tree type ATTRIBUTE_UNUSED,
2367 bool named ATTRIBUTE_UNUSED)
2368 {
2369 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2370 rtx return_rtx = NULL_RTX;
2371
2372 if (cum->regs_used < NUM_ARG_REGS)
2373 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
2374
2375 return return_rtx;
2376 }
2377
2378 /* Return number of bytes, at the beginning of the argument, that must be
2379 put in registers. 0 is the argument is entirely in registers or entirely
2380 in memory. */
2381
2382 static int
2383 nios2_arg_partial_bytes (cumulative_args_t cum_v,
2384 machine_mode mode, tree type ATTRIBUTE_UNUSED,
2385 bool named ATTRIBUTE_UNUSED)
2386 {
2387 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2388 HOST_WIDE_INT param_size;
2389
2390 if (mode == BLKmode)
2391 {
2392 param_size = int_size_in_bytes (type);
2393 gcc_assert (param_size >= 0);
2394 }
2395 else
2396 param_size = GET_MODE_SIZE (mode);
2397
2398 /* Convert to words (round up). */
2399 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2400
2401 if (cum->regs_used < NUM_ARG_REGS
2402 && cum->regs_used + param_size > NUM_ARG_REGS)
2403 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
2404
2405 return 0;
2406 }
2407
2408 /* Update the data in CUM to advance over an argument of mode MODE
2409 and data type TYPE; TYPE is null for libcalls where that information
2410 may not be available. */
2411
2412 static void
2413 nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2414 const_tree type ATTRIBUTE_UNUSED,
2415 bool named ATTRIBUTE_UNUSED)
2416 {
2417 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2418 HOST_WIDE_INT param_size;
2419
2420 if (mode == BLKmode)
2421 {
2422 param_size = int_size_in_bytes (type);
2423 gcc_assert (param_size >= 0);
2424 }
2425 else
2426 param_size = GET_MODE_SIZE (mode);
2427
2428 /* Convert to words (round up). */
2429 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2430
2431 if (cum->regs_used + param_size > NUM_ARG_REGS)
2432 cum->regs_used = NUM_ARG_REGS;
2433 else
2434 cum->regs_used += param_size;
2435 }
2436
2437 enum direction
2438 nios2_function_arg_padding (machine_mode mode, const_tree type)
2439 {
2440 /* On little-endian targets, the first byte of every stack argument
2441 is passed in the first byte of the stack slot. */
2442 if (!BYTES_BIG_ENDIAN)
2443 return upward;
2444
2445 /* Otherwise, integral types are padded downward: the last byte of a
2446 stack argument is passed in the last byte of the stack slot. */
2447 if (type != 0
2448 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
2449 : GET_MODE_CLASS (mode) == MODE_INT)
2450 return downward;
2451
2452 /* Arguments smaller than a stack slot are padded downward. */
2453 if (mode != BLKmode)
2454 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward;
2455
2456 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
2457 ? upward : downward);
2458 }
2459
2460 enum direction
2461 nios2_block_reg_padding (machine_mode mode, tree type,
2462 int first ATTRIBUTE_UNUSED)
2463 {
2464 return nios2_function_arg_padding (mode, type);
2465 }
2466
2467 /* Emit RTL insns to initialize the variable parts of a trampoline.
2468 FNADDR is an RTX for the address of the function's pure code.
2469 CXT is an RTX for the static chain value for the function.
2470 On Nios II, we handle this by a library call. */
2471 static void
2472 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2473 {
2474 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2475 rtx ctx_reg = force_reg (Pmode, cxt);
2476 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
2477
2478 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
2479 LCT_NORMAL, VOIDmode, 3, addr, Pmode, fnaddr, Pmode,
2480 ctx_reg, Pmode);
2481 }
2482
2483 /* Implement TARGET_FUNCTION_VALUE. */
2484 static rtx
2485 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
2486 bool outgoing ATTRIBUTE_UNUSED)
2487 {
2488 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
2489 }
2490
2491 /* Implement TARGET_LIBCALL_VALUE. */
2492 static rtx
2493 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
2494 {
2495 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
2496 }
2497
2498 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
2499 static bool
2500 nios2_function_value_regno_p (const unsigned int regno)
2501 {
2502 return regno == FIRST_RETVAL_REGNO;
2503 }
2504
2505 /* Implement TARGET_RETURN_IN_MEMORY. */
2506 static bool
2507 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2508 {
2509 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
2510 || int_size_in_bytes (type) == -1);
2511 }
2512
2513 /* TODO: It may be possible to eliminate the copyback and implement
2514 own va_arg type. */
2515 static void
2516 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
2517 machine_mode mode, tree type,
2518 int *pretend_size, int second_time)
2519 {
2520 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2521 CUMULATIVE_ARGS local_cum;
2522 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
2523 int regs_to_push;
2524 int pret_size;
2525
2526 local_cum = *cum;
2527 nios2_function_arg_advance (local_cum_v, mode, type, 1);
2528
2529 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
2530
2531 if (!second_time && regs_to_push > 0)
2532 {
2533 rtx ptr = virtual_incoming_args_rtx;
2534 rtx mem = gen_rtx_MEM (BLKmode, ptr);
2535 emit_insn (gen_blockage ());
2536 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
2537 regs_to_push);
2538 emit_insn (gen_blockage ());
2539 }
2540
2541 pret_size = regs_to_push * UNITS_PER_WORD;
2542 if (pret_size)
2543 *pretend_size = pret_size;
2544 }
2545
2546 \f
2547
2548 /* Init FPU builtins. */
2549 static void
2550 nios2_init_fpu_builtins (int start_code)
2551 {
2552 tree fndecl;
2553 char builtin_name[64] = "__builtin_custom_";
2554 unsigned int i, n = strlen ("__builtin_custom_");
2555
2556 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
2557 {
2558 snprintf (builtin_name + n, sizeof (builtin_name) - n,
2559 "%s", N2FPU_NAME (i));
2560 fndecl =
2561 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
2562 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
2563 nios2_register_builtin_fndecl (start_code + i, fndecl);
2564 }
2565 }
2566
2567 /* Helper function for expanding FPU builtins. */
2568 static rtx
2569 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
2570 {
2571 struct expand_operand ops[MAX_RECOG_OPERANDS];
2572 enum insn_code icode = N2FPU_ICODE (code);
2573 int nargs, argno, opno = 0;
2574 int num_operands = N2FPU (code).num_operands;
2575 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
2576 bool has_target_p = (dst_mode != VOIDmode);
2577
2578 if (N2FPU_N (code) < 0)
2579 fatal_error (input_location,
2580 "Cannot call %<__builtin_custom_%s%> without specifying switch"
2581 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
2582 if (has_target_p)
2583 create_output_operand (&ops[opno++], target, dst_mode);
2584 else
2585 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
2586 num_operands -= 1;
2587 nargs = call_expr_nargs (exp);
2588 for (argno = 0; argno < nargs; argno++)
2589 {
2590 tree arg = CALL_EXPR_ARG (exp, argno);
2591 create_input_operand (&ops[opno++], expand_normal (arg),
2592 TYPE_MODE (TREE_TYPE (arg)));
2593 }
2594 if (!maybe_expand_insn (icode, num_operands, ops))
2595 {
2596 error ("invalid argument to built-in function");
2597 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
2598 }
2599 return has_target_p ? ops[0].value : const0_rtx;
2600 }
2601
2602 /* Nios II has custom instruction built-in functions of the forms:
2603 __builtin_custom_n
2604 __builtin_custom_nX
2605 __builtin_custom_nXX
2606 __builtin_custom_Xn
2607 __builtin_custom_XnX
2608 __builtin_custom_XnXX
2609
2610 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
2611 Therefore with 0-1 return values, and 0-2 arguments, we have a
2612 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
2613 */
2614 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
2615 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
2616
2617 static void
2618 nios2_init_custom_builtins (int start_code)
2619 {
2620 tree builtin_ftype, ret_type, fndecl;
2621 char builtin_name[32] = "__builtin_custom_";
2622 int n = strlen ("__builtin_custom_");
2623 int builtin_code = 0;
2624 int lhs, rhs1, rhs2;
2625
2626 struct { tree type; const char *c; } op[4];
2627 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
2628 /* f */ op[1].c = "f"; op[1].type = float_type_node;
2629 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
2630 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
2631
2632 /* We enumerate through the possible operand types to create all the
2633 __builtin_custom_XnXX function tree types. Note that these may slightly
2634 overlap with the function types created for other fixed builtins. */
2635
2636 for (lhs = 0; lhs < 4; lhs++)
2637 for (rhs1 = 0; rhs1 < 4; rhs1++)
2638 for (rhs2 = 0; rhs2 < 4; rhs2++)
2639 {
2640 if (rhs1 == 0 && rhs2 != 0)
2641 continue;
2642 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
2643 builtin_ftype
2644 = build_function_type_list (ret_type, integer_type_node,
2645 op[rhs1].type, op[rhs2].type,
2646 NULL_TREE);
2647 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
2648 op[lhs].c, op[rhs1].c, op[rhs2].c);
2649 /* Save copy of parameter string into custom_builtin_name[]. */
2650 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
2651 fndecl =
2652 add_builtin_function (builtin_name, builtin_ftype,
2653 start_code + builtin_code,
2654 BUILT_IN_MD, NULL, NULL_TREE);
2655 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
2656 builtin_code += 1;
2657 }
2658 }
2659
2660 /* Helper function for expanding custom builtins. */
2661 static rtx
2662 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
2663 {
2664 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
2665 machine_mode tmode = VOIDmode;
2666 int nargs, argno;
2667 rtx value, insn, unspec_args[3];
2668 tree arg;
2669
2670 /* XnXX form. */
2671 if (has_target_p)
2672 {
2673 tmode = TYPE_MODE (TREE_TYPE (exp));
2674 if (!target || GET_MODE (target) != tmode
2675 || !REG_P (target))
2676 target = gen_reg_rtx (tmode);
2677 }
2678
2679 nargs = call_expr_nargs (exp);
2680 for (argno = 0; argno < nargs; argno++)
2681 {
2682 arg = CALL_EXPR_ARG (exp, argno);
2683 value = expand_normal (arg);
2684 unspec_args[argno] = value;
2685 if (argno == 0)
2686 {
2687 if (!custom_insn_opcode (value, VOIDmode))
2688 error ("custom instruction opcode must be compile time "
2689 "constant in the range 0-255 for __builtin_custom_%s",
2690 custom_builtin_name[index]);
2691 }
2692 else
2693 /* For other arguments, force into a register. */
2694 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
2695 unspec_args[argno]);
2696 }
2697 /* Fill remaining unspec operands with zero. */
2698 for (; argno < 3; argno++)
2699 unspec_args[argno] = const0_rtx;
2700
2701 insn = (has_target_p
2702 ? gen_rtx_SET (target,
2703 gen_rtx_UNSPEC_VOLATILE (tmode,
2704 gen_rtvec_v (3, unspec_args),
2705 UNSPECV_CUSTOM_XNXX))
2706 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
2707 UNSPECV_CUSTOM_NXX));
2708 emit_insn (insn);
2709 return has_target_p ? target : const0_rtx;
2710 }
2711
2712
2713 \f
2714
2715 /* Main definition of built-in functions. Nios II has a small number of fixed
2716 builtins, plus a large number of FPU insn builtins, and builtins for
2717 generating custom instructions. */
2718
2719 struct nios2_builtin_desc
2720 {
2721 enum insn_code icode;
2722 enum nios2_ftcode ftype;
2723 const char *name;
2724 };
2725
2726 #define N2_BUILTINS \
2727 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
2728 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
2729 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
2730 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
2731 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
2732 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
2733 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
2734 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
2735 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
2736 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
2737 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
2738
2739 enum nios2_builtin_code {
2740 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
2741 N2_BUILTINS
2742 #undef N2_BUILTIN_DEF
2743 NUM_FIXED_NIOS2_BUILTINS
2744 };
2745
2746 static const struct nios2_builtin_desc nios2_builtins[] = {
2747 #define N2_BUILTIN_DEF(name, ftype) \
2748 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
2749 N2_BUILTINS
2750 #undef N2_BUILTIN_DEF
2751 };
2752
2753 /* Start/ends of FPU/custom insn builtin index ranges. */
2754 static unsigned int nios2_fpu_builtin_base;
2755 static unsigned int nios2_custom_builtin_base;
2756 static unsigned int nios2_custom_builtin_end;
2757
2758 /* Implement TARGET_INIT_BUILTINS. */
2759 static void
2760 nios2_init_builtins (void)
2761 {
2762 unsigned int i;
2763
2764 /* Initialize fixed builtins. */
2765 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
2766 {
2767 const struct nios2_builtin_desc *d = &nios2_builtins[i];
2768 tree fndecl =
2769 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
2770 BUILT_IN_MD, NULL, NULL);
2771 nios2_register_builtin_fndecl (i, fndecl);
2772 }
2773
2774 /* Initialize FPU builtins. */
2775 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
2776 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
2777
2778 /* Initialize custom insn builtins. */
2779 nios2_custom_builtin_base
2780 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
2781 nios2_custom_builtin_end
2782 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
2783 nios2_init_custom_builtins (nios2_custom_builtin_base);
2784 }
2785
2786 /* Array of fndecls for TARGET_BUILTIN_DECL. */
2787 #define NIOS2_NUM_BUILTINS \
2788 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
2789 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
2790
2791 static void
2792 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
2793 {
2794 nios2_builtin_decls[code] = fndecl;
2795 }
2796
2797 /* Implement TARGET_BUILTIN_DECL. */
2798 static tree
2799 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2800 {
2801 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
2802
2803 if (code >= nios2_custom_builtin_end)
2804 return error_mark_node;
2805
2806 if (code >= nios2_fpu_builtin_base
2807 && code < nios2_custom_builtin_base
2808 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
2809 return error_mark_node;
2810
2811 return nios2_builtin_decls[code];
2812 }
2813
2814 \f
2815 /* Low-level built-in expand routine. */
2816 static rtx
2817 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
2818 struct expand_operand *ops, bool has_target_p)
2819 {
2820 if (maybe_expand_insn (d->icode, n, ops))
2821 return has_target_p ? ops[0].value : const0_rtx;
2822 else
2823 {
2824 error ("invalid argument to built-in function %s", d->name);
2825 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
2826 }
2827 }
2828
2829 /* Expand ldio/stio form load-store instruction builtins. */
2830 static rtx
2831 nios2_expand_ldstio_builtin (tree exp, rtx target,
2832 const struct nios2_builtin_desc *d)
2833 {
2834 bool has_target_p;
2835 rtx addr, mem, val;
2836 struct expand_operand ops[MAX_RECOG_OPERANDS];
2837 machine_mode mode = insn_data[d->icode].operand[0].mode;
2838
2839 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
2840 mem = gen_rtx_MEM (mode, addr);
2841
2842 if (insn_data[d->icode].operand[0].allows_mem)
2843 {
2844 /* stxio. */
2845 val = expand_normal (CALL_EXPR_ARG (exp, 1));
2846 if (CONST_INT_P (val))
2847 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
2848 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
2849 create_output_operand (&ops[0], mem, mode);
2850 create_input_operand (&ops[1], val, mode);
2851 has_target_p = false;
2852 }
2853 else
2854 {
2855 /* ldxio. */
2856 create_output_operand (&ops[0], target, mode);
2857 create_input_operand (&ops[1], mem, mode);
2858 has_target_p = true;
2859 }
2860 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
2861 }
2862
2863 /* Expand rdctl/wrctl builtins. */
2864 static rtx
2865 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
2866 const struct nios2_builtin_desc *d)
2867 {
2868 bool has_target_p = (insn_data[d->icode].operand[0].predicate
2869 == register_operand);
2870 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
2871 struct expand_operand ops[MAX_RECOG_OPERANDS];
2872 if (!rdwrctl_operand (ctlcode, VOIDmode))
2873 {
2874 error ("Control register number must be in range 0-31 for %s",
2875 d->name);
2876 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
2877 }
2878 if (has_target_p)
2879 {
2880 create_output_operand (&ops[0], target, SImode);
2881 create_integer_operand (&ops[1], INTVAL (ctlcode));
2882 }
2883 else
2884 {
2885 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
2886 create_integer_operand (&ops[0], INTVAL (ctlcode));
2887 create_input_operand (&ops[1], val, SImode);
2888 }
2889 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
2890 }
2891
2892 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
2893 a built-in function, with result going to TARGET if that's convenient
2894 (and in mode MODE if that's convenient).
2895 SUBTARGET may be used as the target for computing one of EXP's operands.
2896 IGNORE is nonzero if the value is to be ignored. */
2897
2898 static rtx
2899 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
2900 machine_mode mode ATTRIBUTE_UNUSED,
2901 int ignore ATTRIBUTE_UNUSED)
2902 {
2903 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2904 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2905
2906 if (fcode < nios2_fpu_builtin_base)
2907 {
2908 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
2909
2910 switch (fcode)
2911 {
2912 case NIOS2_BUILTIN_sync:
2913 emit_insn (gen_sync ());
2914 return const0_rtx;
2915
2916 case NIOS2_BUILTIN_ldbio:
2917 case NIOS2_BUILTIN_ldbuio:
2918 case NIOS2_BUILTIN_ldhio:
2919 case NIOS2_BUILTIN_ldhuio:
2920 case NIOS2_BUILTIN_ldwio:
2921 case NIOS2_BUILTIN_stbio:
2922 case NIOS2_BUILTIN_sthio:
2923 case NIOS2_BUILTIN_stwio:
2924 return nios2_expand_ldstio_builtin (exp, target, d);
2925
2926 case NIOS2_BUILTIN_rdctl:
2927 case NIOS2_BUILTIN_wrctl:
2928 return nios2_expand_rdwrctl_builtin (exp, target, d);
2929
2930 default:
2931 gcc_unreachable ();
2932 }
2933 }
2934 else if (fcode < nios2_custom_builtin_base)
2935 /* FPU builtin range. */
2936 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
2937 target);
2938 else if (fcode < nios2_custom_builtin_end)
2939 /* Custom insn builtin range. */
2940 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
2941 target);
2942 else
2943 gcc_unreachable ();
2944 }
2945
2946 /* Implement TARGET_INIT_LIBFUNCS. */
2947 static void
2948 nios2_init_libfuncs (void)
2949 {
2950 /* For Linux, we have access to kernel support for atomic operations. */
2951 if (TARGET_LINUX_ABI)
2952 init_sync_libfuncs (UNITS_PER_WORD);
2953 }
2954
2955 \f
2956
2957 /* Register a custom code use, and signal error if a conflict was found. */
2958 static void
2959 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
2960 int index)
2961 {
2962 gcc_assert (N <= 255);
2963
2964 if (status == CCS_FPU)
2965 {
2966 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
2967 {
2968 custom_code_conflict = true;
2969 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
2970 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
2971 }
2972 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
2973 {
2974 custom_code_conflict = true;
2975 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2976 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
2977 N2FPU_NAME (index));
2978 }
2979 }
2980 else if (status == CCS_BUILTIN_CALL)
2981 {
2982 if (custom_code_status[N] == CCS_FPU)
2983 {
2984 custom_code_conflict = true;
2985 error ("call to %<__builtin_custom_%s%> conflicts with switch "
2986 "%<-mcustom-%s%>", custom_builtin_name[index],
2987 N2FPU_NAME (custom_code_index[N]));
2988 }
2989 else
2990 {
2991 /* Note that code conflicts between different __builtin_custom_xnxx
2992 calls are not checked. */
2993 }
2994 }
2995 else
2996 gcc_unreachable ();
2997
2998 custom_code_status[N] = status;
2999 custom_code_index[N] = index;
3000 }
3001
3002 /* Mark a custom code as not in use. */
3003 static void
3004 nios2_deregister_custom_code (unsigned int N)
3005 {
3006 if (N <= 255)
3007 {
3008 custom_code_status[N] = CCS_UNUSED;
3009 custom_code_index[N] = 0;
3010 }
3011 }
3012
3013 /* Target attributes can affect per-function option state, so we need to
3014 save/restore the custom code tracking info using the
3015 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3016
3017 static void
3018 nios2_option_save (struct cl_target_option *ptr,
3019 struct gcc_options *opts ATTRIBUTE_UNUSED)
3020 {
3021 unsigned int i;
3022 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3023 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
3024 memcpy (ptr->saved_custom_code_status, custom_code_status,
3025 sizeof (custom_code_status));
3026 memcpy (ptr->saved_custom_code_index, custom_code_index,
3027 sizeof (custom_code_index));
3028 }
3029
3030 static void
3031 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
3032 struct cl_target_option *ptr)
3033 {
3034 unsigned int i;
3035 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3036 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
3037 memcpy (custom_code_status, ptr->saved_custom_code_status,
3038 sizeof (custom_code_status));
3039 memcpy (custom_code_index, ptr->saved_custom_code_index,
3040 sizeof (custom_code_index));
3041 }
3042
3043 /* Inner function to process the attribute((target(...))), take an argument and
3044 set the current options from the argument. If we have a list, recursively
3045 go over the list. */
3046
3047 static bool
3048 nios2_valid_target_attribute_rec (tree args)
3049 {
3050 if (TREE_CODE (args) == TREE_LIST)
3051 {
3052 bool ret = true;
3053 for (; args; args = TREE_CHAIN (args))
3054 if (TREE_VALUE (args)
3055 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
3056 ret = false;
3057 return ret;
3058 }
3059 else if (TREE_CODE (args) == STRING_CST)
3060 {
3061 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
3062 while (argstr && *argstr != '\0')
3063 {
3064 bool no_opt = false, end_p = false;
3065 char *eq = NULL, *p;
3066 while (ISSPACE (*argstr))
3067 argstr++;
3068 p = argstr;
3069 while (*p != '\0' && *p != ',')
3070 {
3071 if (!eq && *p == '=')
3072 eq = p;
3073 ++p;
3074 }
3075 if (*p == '\0')
3076 end_p = true;
3077 else
3078 *p = '\0';
3079 if (eq) *eq = '\0';
3080
3081 if (!strncmp (argstr, "no-", 3))
3082 {
3083 no_opt = true;
3084 argstr += 3;
3085 }
3086 if (!strncmp (argstr, "custom-fpu-cfg", 14))
3087 {
3088 char *end_eq = p;
3089 if (no_opt)
3090 {
3091 error ("custom-fpu-cfg option does not support %<no-%>");
3092 return false;
3093 }
3094 if (!eq)
3095 {
3096 error ("custom-fpu-cfg option requires configuration"
3097 " argument");
3098 return false;
3099 }
3100 /* Increment and skip whitespace. */
3101 while (ISSPACE (*(++eq))) ;
3102 /* Decrement and skip to before any trailing whitespace. */
3103 while (ISSPACE (*(--end_eq))) ;
3104
3105 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
3106 }
3107 else if (!strncmp (argstr, "custom-", 7))
3108 {
3109 int code = -1;
3110 unsigned int i;
3111 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3112 if (!strncmp (argstr + 7, N2FPU_NAME (i),
3113 strlen (N2FPU_NAME (i))))
3114 {
3115 /* Found insn. */
3116 code = i;
3117 break;
3118 }
3119 if (code >= 0)
3120 {
3121 if (no_opt)
3122 {
3123 if (eq)
3124 {
3125 error ("%<no-custom-%s%> does not accept arguments",
3126 N2FPU_NAME (code));
3127 return false;
3128 }
3129 /* Disable option by setting to -1. */
3130 nios2_deregister_custom_code (N2FPU_N (code));
3131 N2FPU_N (code) = -1;
3132 }
3133 else
3134 {
3135 char *t;
3136 if (eq)
3137 while (ISSPACE (*(++eq))) ;
3138 if (!eq || eq == p)
3139 {
3140 error ("%<custom-%s=%> requires argument",
3141 N2FPU_NAME (code));
3142 return false;
3143 }
3144 for (t = eq; t != p; ++t)
3145 {
3146 if (ISSPACE (*t))
3147 continue;
3148 if (!ISDIGIT (*t))
3149 {
3150 error ("`custom-%s=' argument requires "
3151 "numeric digits", N2FPU_NAME (code));
3152 return false;
3153 }
3154 }
3155 /* Set option to argument. */
3156 N2FPU_N (code) = atoi (eq);
3157 nios2_handle_custom_fpu_insn_option (code);
3158 }
3159 }
3160 else
3161 {
3162 error ("%<custom-%s=%> is not recognised as FPU instruction",
3163 argstr + 7);
3164 return false;
3165 }
3166 }
3167 else
3168 {
3169 error ("%<%s%> is unknown", argstr);
3170 return false;
3171 }
3172
3173 if (end_p)
3174 break;
3175 else
3176 argstr = p + 1;
3177 }
3178 return true;
3179 }
3180 else
3181 gcc_unreachable ();
3182 }
3183
3184 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3185
3186 static tree
3187 nios2_valid_target_attribute_tree (tree args)
3188 {
3189 if (!nios2_valid_target_attribute_rec (args))
3190 return NULL_TREE;
3191 nios2_custom_check_insns ();
3192 return build_target_option_node (&global_options);
3193 }
3194
3195 /* Hook to validate attribute((target("string"))). */
3196
3197 static bool
3198 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
3199 tree args, int ARG_UNUSED (flags))
3200 {
3201 struct cl_target_option cur_target;
3202 bool ret = true;
3203 tree old_optimize = build_optimization_node (&global_options);
3204 tree new_target, new_optimize;
3205 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
3206
3207 /* If the function changed the optimization levels as well as setting target
3208 options, start with the optimizations specified. */
3209 if (func_optimize && func_optimize != old_optimize)
3210 cl_optimization_restore (&global_options,
3211 TREE_OPTIMIZATION (func_optimize));
3212
3213 /* The target attributes may also change some optimization flags, so update
3214 the optimization options if necessary. */
3215 cl_target_option_save (&cur_target, &global_options);
3216 new_target = nios2_valid_target_attribute_tree (args);
3217 new_optimize = build_optimization_node (&global_options);
3218
3219 if (!new_target)
3220 ret = false;
3221
3222 else if (fndecl)
3223 {
3224 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
3225
3226 if (old_optimize != new_optimize)
3227 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
3228 }
3229
3230 cl_target_option_restore (&global_options, &cur_target);
3231
3232 if (old_optimize != new_optimize)
3233 cl_optimization_restore (&global_options,
3234 TREE_OPTIMIZATION (old_optimize));
3235 return ret;
3236 }
3237
3238 /* Remember the last target of nios2_set_current_function. */
3239 static GTY(()) tree nios2_previous_fndecl;
3240
3241 /* Establish appropriate back-end context for processing the function
3242 FNDECL. The argument might be NULL to indicate processing at top
3243 level, outside of any function scope. */
3244 static void
3245 nios2_set_current_function (tree fndecl)
3246 {
3247 tree old_tree = (nios2_previous_fndecl
3248 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
3249 : NULL_TREE);
3250
3251 tree new_tree = (fndecl
3252 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
3253 : NULL_TREE);
3254
3255 if (fndecl && fndecl != nios2_previous_fndecl)
3256 {
3257 nios2_previous_fndecl = fndecl;
3258 if (old_tree == new_tree)
3259 ;
3260
3261 else if (new_tree)
3262 {
3263 cl_target_option_restore (&global_options,
3264 TREE_TARGET_OPTION (new_tree));
3265 target_reinit ();
3266 }
3267
3268 else if (old_tree)
3269 {
3270 struct cl_target_option *def
3271 = TREE_TARGET_OPTION (target_option_current_node);
3272
3273 cl_target_option_restore (&global_options, def);
3274 target_reinit ();
3275 }
3276 }
3277 }
3278
3279 /* Hook to validate the current #pragma GCC target and set the FPU custom
3280 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3281 the options. */
3282 static bool
3283 nios2_pragma_target_parse (tree args, tree pop_target)
3284 {
3285 tree cur_tree;
3286 if (! args)
3287 {
3288 cur_tree = ((pop_target)
3289 ? pop_target
3290 : target_option_default_node);
3291 cl_target_option_restore (&global_options,
3292 TREE_TARGET_OPTION (cur_tree));
3293 }
3294 else
3295 {
3296 cur_tree = nios2_valid_target_attribute_tree (args);
3297 if (!cur_tree)
3298 return false;
3299 }
3300
3301 target_option_current_node = cur_tree;
3302 return true;
3303 }
3304
3305 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3306 We are just using this hook to add some additional error checking to
3307 the default behavior. GCC does not provide a target hook for merging
3308 the target options, and only correctly handles merging empty vs non-empty
3309 option data; see merge_decls() in c-decl.c.
3310 So here we require either that at least one of the decls has empty
3311 target options, or that the target options/data be identical. */
3312 static tree
3313 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
3314 {
3315 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
3316 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
3317 if (newopts && oldopts && newopts != oldopts)
3318 {
3319 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
3320 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
3321 if (oldtree && newtree && oldtree != newtree)
3322 {
3323 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
3324 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
3325 if (olddata != newdata
3326 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
3327 error ("%qE redeclared with conflicting %qs attributes",
3328 DECL_NAME (newdecl), "target");
3329 }
3330 }
3331 return merge_attributes (DECL_ATTRIBUTES (olddecl),
3332 DECL_ATTRIBUTES (newdecl));
3333 }
3334
3335 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
3336 static void
3337 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
3338 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
3339 tree function)
3340 {
3341 rtx this_rtx, funexp;
3342 rtx_insn *insn;
3343
3344 /* Pretend to be a post-reload pass while generating rtl. */
3345 reload_completed = 1;
3346
3347 if (flag_pic)
3348 nios2_load_pic_register ();
3349
3350 /* Mark the end of the (empty) prologue. */
3351 emit_note (NOTE_INSN_PROLOGUE_END);
3352
3353 /* Find the "this" pointer. If the function returns a structure,
3354 the structure return pointer is in $5. */
3355 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
3356 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
3357 else
3358 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
3359
3360 /* Add DELTA to THIS_RTX. */
3361 nios2_emit_add_constant (this_rtx, delta);
3362
3363 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
3364 if (vcall_offset)
3365 {
3366 rtx tmp;
3367
3368 tmp = gen_rtx_REG (Pmode, 2);
3369 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
3370 nios2_emit_add_constant (tmp, vcall_offset);
3371 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
3372 emit_insn (gen_add2_insn (this_rtx, tmp));
3373 }
3374
3375 /* Generate a tail call to the target function. */
3376 if (!TREE_USED (function))
3377 {
3378 assemble_external (function);
3379 TREE_USED (function) = 1;
3380 }
3381 funexp = XEXP (DECL_RTL (function), 0);
3382 /* Function address needs to be constructed under PIC,
3383 provide r2 to use here. */
3384 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
3385 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
3386 SIBLING_CALL_P (insn) = 1;
3387
3388 /* Run just enough of rest_of_compilation to get the insns emitted.
3389 There's not really enough bulk here to make other passes such as
3390 instruction scheduling worth while. Note that use_thunk calls
3391 assemble_start_function and assemble_end_function. */
3392 insn = get_insns ();
3393 shorten_branches (insn);
3394 final_start_function (insn, file, 1);
3395 final (insn, file, 1);
3396 final_end_function ();
3397
3398 /* Stop pretending to be a post-reload pass. */
3399 reload_completed = 0;
3400 }
3401
3402 \f
3403 /* Initialize the GCC target structure. */
3404 #undef TARGET_ASM_FUNCTION_PROLOGUE
3405 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
3406
3407 #undef TARGET_IN_SMALL_DATA_P
3408 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
3409
3410 #undef TARGET_SECTION_TYPE_FLAGS
3411 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
3412
3413 #undef TARGET_INIT_BUILTINS
3414 #define TARGET_INIT_BUILTINS nios2_init_builtins
3415 #undef TARGET_EXPAND_BUILTIN
3416 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
3417 #undef TARGET_BUILTIN_DECL
3418 #define TARGET_BUILTIN_DECL nios2_builtin_decl
3419
3420 #undef TARGET_INIT_LIBFUNCS
3421 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
3422
3423 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3424 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
3425
3426 #undef TARGET_CAN_ELIMINATE
3427 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
3428
3429 #undef TARGET_FUNCTION_ARG
3430 #define TARGET_FUNCTION_ARG nios2_function_arg
3431
3432 #undef TARGET_FUNCTION_ARG_ADVANCE
3433 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
3434
3435 #undef TARGET_ARG_PARTIAL_BYTES
3436 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
3437
3438 #undef TARGET_TRAMPOLINE_INIT
3439 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
3440
3441 #undef TARGET_FUNCTION_VALUE
3442 #define TARGET_FUNCTION_VALUE nios2_function_value
3443
3444 #undef TARGET_LIBCALL_VALUE
3445 #define TARGET_LIBCALL_VALUE nios2_libcall_value
3446
3447 #undef TARGET_FUNCTION_VALUE_REGNO_P
3448 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
3449
3450 #undef TARGET_RETURN_IN_MEMORY
3451 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
3452
3453 #undef TARGET_PROMOTE_PROTOTYPES
3454 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3455
3456 #undef TARGET_SETUP_INCOMING_VARARGS
3457 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
3458
3459 #undef TARGET_MUST_PASS_IN_STACK
3460 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
3461
3462 #undef TARGET_LEGITIMATE_CONSTANT_P
3463 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
3464
3465 #undef TARGET_LEGITIMIZE_ADDRESS
3466 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
3467
3468 #undef TARGET_DELEGITIMIZE_ADDRESS
3469 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
3470
3471 #undef TARGET_LEGITIMATE_ADDRESS_P
3472 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
3473
3474 #undef TARGET_PREFERRED_RELOAD_CLASS
3475 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
3476
3477 #undef TARGET_RTX_COSTS
3478 #define TARGET_RTX_COSTS nios2_rtx_costs
3479
3480 #undef TARGET_HAVE_TLS
3481 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
3482
3483 #undef TARGET_CANNOT_FORCE_CONST_MEM
3484 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
3485
3486 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
3487 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
3488
3489 #undef TARGET_PRINT_OPERAND
3490 #define TARGET_PRINT_OPERAND nios2_print_operand
3491
3492 #undef TARGET_PRINT_OPERAND_ADDRESS
3493 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
3494
3495 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3496 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
3497
3498 #undef TARGET_ASM_FILE_END
3499 #define TARGET_ASM_FILE_END nios2_asm_file_end
3500
3501 #undef TARGET_OPTION_OVERRIDE
3502 #define TARGET_OPTION_OVERRIDE nios2_option_override
3503
3504 #undef TARGET_OPTION_SAVE
3505 #define TARGET_OPTION_SAVE nios2_option_save
3506
3507 #undef TARGET_OPTION_RESTORE
3508 #define TARGET_OPTION_RESTORE nios2_option_restore
3509
3510 #undef TARGET_SET_CURRENT_FUNCTION
3511 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
3512
3513 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
3514 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
3515
3516 #undef TARGET_OPTION_PRAGMA_PARSE
3517 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
3518
3519 #undef TARGET_MERGE_DECL_ATTRIBUTES
3520 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
3521
3522 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3523 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
3524 hook_bool_const_tree_hwi_hwi_const_tree_true
3525
3526 #undef TARGET_ASM_OUTPUT_MI_THUNK
3527 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
3528
3529 struct gcc_target targetm = TARGET_INITIALIZER;
3530
3531 #include "gt-nios2.h"