]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/nios2/nios2.c
nios2-protos.h (nios2_expand_return): Declare.
[thirdparty/gcc.git] / gcc / config / nios2 / nios2.c
1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "cfghooks.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "df.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "regs.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "recog.h"
40 #include "expmed.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "emit-rtl.h"
45 #include "varasm.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "cfgrtl.h"
51 #include "cfganal.h"
52 #include "lcm.h"
53 #include "cfgbuild.h"
54 #include "cfgcleanup.h"
55 #include "diagnostic-core.h"
56 #include "toplev.h"
57 #include "target.h"
58 #include "tm_p.h"
59 #include "langhooks.h"
60 #include "debug.h"
61 #include "reload.h"
62 #include "stor-layout.h"
63 #include "builtins.h"
64
65 /* This file should be included last. */
66 #include "target-def.h"
67
68 /* Forward function declarations. */
69 static bool prologue_saved_reg_p (unsigned);
70 static void nios2_load_pic_register (void);
71 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
72 static const char *nios2_unspec_reloc_name (int);
73 static void nios2_register_builtin_fndecl (unsigned, tree);
74 static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
75 unsigned HOST_WIDE_INT, bool);
76
77 /* Threshold for data being put into the small data/bss area, instead
78 of the normal data area (references to the small data/bss area take
79 1 instruction, and use the global pointer, references to the normal
80 data area takes 2 instructions). */
81 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
82
83 struct GTY (()) machine_function
84 {
85 /* Current frame information, to be filled in by nios2_compute_frame_layout
86 with register save masks, and offsets for the current function. */
87
88 /* Mask of registers to save. */
89 unsigned int save_mask;
90 /* Number of bytes that the entire frame takes up. */
91 int total_size;
92 /* Number of bytes that variables take up. */
93 int var_size;
94 /* Number of bytes that outgoing arguments take up. */
95 int args_size;
96 /* Number of bytes needed to store registers in frame. */
97 int save_reg_size;
98 /* Number of bytes used to store callee-saved registers. */
99 int callee_save_reg_size;
100 /* Offset from new stack pointer to store registers. */
101 int save_regs_offset;
102 /* Offset from save_regs_offset to store frame pointer register. */
103 int fp_save_offset;
104 /* != 0 if function has a variable argument list. */
105 int uses_anonymous_args;
106 /* != 0 if frame layout already calculated. */
107 int initialized;
108 };
109
110 /* State to track the assignment of custom codes to FPU/custom builtins. */
111 static enum nios2_ccs_code custom_code_status[256];
112 static int custom_code_index[256];
113 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
114 static bool custom_code_conflict = false;
115
116 \f
117 /* Definition of builtin function types for nios2. */
118
119 #define N2_FTYPES \
120 N2_FTYPE(1, (SF)) \
121 N2_FTYPE(1, (VOID)) \
122 N2_FTYPE(2, (DF, DF)) \
123 N2_FTYPE(3, (DF, DF, DF)) \
124 N2_FTYPE(2, (DF, SF)) \
125 N2_FTYPE(2, (DF, SI)) \
126 N2_FTYPE(2, (DF, UI)) \
127 N2_FTYPE(2, (SF, DF)) \
128 N2_FTYPE(2, (SF, SF)) \
129 N2_FTYPE(3, (SF, SF, SF)) \
130 N2_FTYPE(2, (SF, SI)) \
131 N2_FTYPE(2, (SF, UI)) \
132 N2_FTYPE(2, (SI, CVPTR)) \
133 N2_FTYPE(2, (SI, DF)) \
134 N2_FTYPE(3, (SI, DF, DF)) \
135 N2_FTYPE(2, (SI, SF)) \
136 N2_FTYPE(3, (SI, SF, SF)) \
137 N2_FTYPE(2, (SI, SI)) \
138 N2_FTYPE(2, (UI, CVPTR)) \
139 N2_FTYPE(2, (UI, DF)) \
140 N2_FTYPE(2, (UI, SF)) \
141 N2_FTYPE(2, (VOID, DF)) \
142 N2_FTYPE(2, (VOID, SF)) \
143 N2_FTYPE(3, (VOID, SI, SI)) \
144 N2_FTYPE(3, (VOID, VPTR, SI))
145
146 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
147 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
148 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
149
150 /* Expand ftcode enumeration. */
151 enum nios2_ftcode {
152 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
153 N2_FTYPES
154 #undef N2_FTYPE
155 N2_FTYPE_MAX
156 };
157
158 /* Return the tree function type, based on the ftcode. */
159 static tree
160 nios2_ftype (enum nios2_ftcode ftcode)
161 {
162 static tree types[(int) N2_FTYPE_MAX];
163
164 tree N2_TYPE_SF = float_type_node;
165 tree N2_TYPE_DF = double_type_node;
166 tree N2_TYPE_SI = integer_type_node;
167 tree N2_TYPE_UI = unsigned_type_node;
168 tree N2_TYPE_VOID = void_type_node;
169
170 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
171 if (!N2_TYPE_CVPTR)
172 {
173 /* const volatile void *. */
174 N2_TYPE_CVPTR
175 = build_pointer_type (build_qualified_type (void_type_node,
176 (TYPE_QUAL_CONST
177 | TYPE_QUAL_VOLATILE)));
178 /* volatile void *. */
179 N2_TYPE_VPTR
180 = build_pointer_type (build_qualified_type (void_type_node,
181 TYPE_QUAL_VOLATILE));
182 }
183 if (types[(int) ftcode] == NULL_TREE)
184 switch (ftcode)
185 {
186 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
187 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
188 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
189 #define N2_FTYPE(N,ARGS) \
190 case N2_FTYPE_OP ## N ARGS: \
191 types[(int) ftcode] \
192 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
193 break;
194 N2_FTYPES
195 #undef N2_FTYPE
196 default: gcc_unreachable ();
197 }
198 return types[(int) ftcode];
199 }
200
201 \f
202 /* Definition of FPU instruction descriptions. */
203
204 struct nios2_fpu_insn_info
205 {
206 const char *name;
207 int num_operands, *optvar;
208 int opt, no_opt;
209 #define N2F_DF 0x1
210 #define N2F_DFREQ 0x2
211 #define N2F_UNSAFE 0x4
212 #define N2F_FINITE 0x8
213 #define N2F_NO_ERRNO 0x10
214 unsigned int flags;
215 enum insn_code icode;
216 enum nios2_ftcode ftcode;
217 };
218
219 /* Base macro for defining FPU instructions. */
220 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
221 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
222 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
223 N2_FTYPE_OP ## nop args }
224
225 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
226 #define N2FPU_OP2(mode) (mode, mode)
227 #define N2FPU_OP3(mode) (mode, mode, mode)
228 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
229 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
230 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
231 #define N2FPU_INSN_SF(code, nop, flags) \
232 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
233 #define N2FPU_INSN_DF(code, nop, flags) \
234 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
235
236 /* Compare instructions, 3 operand FP operation with a SI result. */
237 #define N2FPU_CMP_DEF(code, flags, m, M) \
238 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
239 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
240 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
241 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
242
243 /* The order of definition needs to be maintained consistent with
244 enum n2fpu_code in nios2-opts.h. */
245 struct nios2_fpu_insn_info nios2_fpu_insn[] =
246 {
247 /* Single precision instructions. */
248 N2FPU_INSN_SF (add, 3, 0),
249 N2FPU_INSN_SF (sub, 3, 0),
250 N2FPU_INSN_SF (mul, 3, 0),
251 N2FPU_INSN_SF (div, 3, 0),
252 /* Due to textual difference between min/max and smin/smax. */
253 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
254 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
255 N2FPU_INSN_SF (neg, 2, 0),
256 N2FPU_INSN_SF (abs, 2, 0),
257 N2FPU_INSN_SF (sqrt, 2, 0),
258 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
259 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
260 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
261 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
262 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
263 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
264 /* Single precision compares. */
265 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
266 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
267 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
268
269 /* Double precision instructions. */
270 N2FPU_INSN_DF (add, 3, 0),
271 N2FPU_INSN_DF (sub, 3, 0),
272 N2FPU_INSN_DF (mul, 3, 0),
273 N2FPU_INSN_DF (div, 3, 0),
274 /* Due to textual difference between min/max and smin/smax. */
275 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
276 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
277 N2FPU_INSN_DF (neg, 2, 0),
278 N2FPU_INSN_DF (abs, 2, 0),
279 N2FPU_INSN_DF (sqrt, 2, 0),
280 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
281 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
282 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
283 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
284 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
285 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
286 /* Double precision compares. */
287 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
288 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
289 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
290
291 /* Conversion instructions. */
292 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
293 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
294 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
295 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
296 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
297 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
298 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
299 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
300 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
301 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
302 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
303
304 /* X, Y access instructions. */
305 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
306 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
307 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
308 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
309 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
310 };
311
312 /* Some macros for ease of access. */
313 #define N2FPU(code) nios2_fpu_insn[(int) code]
314 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
315 #define N2FPU_N(code) (*N2FPU(code).optvar)
316 #define N2FPU_NAME(code) (N2FPU(code).name)
317 #define N2FPU_ICODE(code) (N2FPU(code).icode)
318 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
319 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
320 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
321 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
322 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
323 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
324
325 /* Same as above, but for cases where using only the op part is shorter. */
326 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
327 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
328 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
329
330 /* Export the FPU insn enabled predicate to nios2.md. */
331 bool
332 nios2_fpu_insn_enabled (enum n2fpu_code code)
333 {
334 return N2FPU_ENABLED_P (code);
335 }
336
337 /* Return true if COND comparison for mode MODE is enabled under current
338 settings. */
339
340 static bool
341 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
342 {
343 if (mode == SFmode)
344 switch (cond)
345 {
346 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
347 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
348 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
349 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
350 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
351 case LE: return N2FPU_OP_ENABLED_P (fcmples);
352 default: break;
353 }
354 else if (mode == DFmode)
355 switch (cond)
356 {
357 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
358 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
359 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
360 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
361 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
362 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
363 default: break;
364 }
365 return false;
366 }
367
368 /* Stack layout and calling conventions. */
369
370 #define NIOS2_STACK_ALIGN(LOC) \
371 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
372 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
373
374 /* Return the bytes needed to compute the frame pointer from the current
375 stack pointer. */
376 static int
377 nios2_compute_frame_layout (void)
378 {
379 unsigned int regno;
380 unsigned int save_mask = 0;
381 int total_size;
382 int var_size;
383 int out_args_size;
384 int save_reg_size;
385 int callee_save_reg_size;
386
387 if (cfun->machine->initialized)
388 return cfun->machine->total_size;
389
390 /* Calculate space needed for gp registers. */
391 save_reg_size = 0;
392 for (regno = 0; regno <= LAST_GP_REG; regno++)
393 if (prologue_saved_reg_p (regno))
394 {
395 save_mask |= 1 << regno;
396 save_reg_size += 4;
397 }
398
399 /* If we are saving any callee-save register, then assume
400 push.n/pop.n should be used. Make sure RA is saved, and
401 contiguous registers starting from r16-- are all saved. */
402 if (TARGET_HAS_CDX && save_reg_size != 0)
403 {
404 if ((save_mask & (1 << RA_REGNO)) == 0)
405 {
406 save_mask |= 1 << RA_REGNO;
407 save_reg_size += 4;
408 }
409
410 for (regno = 23; regno >= 16; regno--)
411 if ((save_mask & (1 << regno)) != 0)
412 {
413 /* Starting from highest numbered callee-saved
414 register that is used, make sure all regs down
415 to r16 is saved, to maintain contiguous range
416 for push.n/pop.n. */
417 unsigned int i;
418 for (i = regno - 1; i >= 16; i--)
419 if ((save_mask & (1 << i)) == 0)
420 {
421 save_mask |= 1 << i;
422 save_reg_size += 4;
423 }
424 break;
425 }
426 }
427
428 callee_save_reg_size = save_reg_size;
429
430 /* If we call eh_return, we need to save the EH data registers. */
431 if (crtl->calls_eh_return)
432 {
433 unsigned i;
434 unsigned r;
435
436 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
437 if (!(save_mask & (1 << r)))
438 {
439 save_mask |= 1 << r;
440 save_reg_size += 4;
441 }
442 }
443
444 cfun->machine->fp_save_offset = 0;
445 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
446 {
447 int fp_save_offset = 0;
448 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
449 if (save_mask & (1 << regno))
450 fp_save_offset += 4;
451
452 cfun->machine->fp_save_offset = fp_save_offset;
453 }
454
455 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
456 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
457 total_size = var_size + out_args_size;
458
459 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
460 total_size += save_reg_size;
461 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
462
463 /* Save other computed information. */
464 cfun->machine->save_mask = save_mask;
465 cfun->machine->total_size = total_size;
466 cfun->machine->var_size = var_size;
467 cfun->machine->args_size = out_args_size;
468 cfun->machine->save_reg_size = save_reg_size;
469 cfun->machine->callee_save_reg_size = callee_save_reg_size;
470 cfun->machine->initialized = reload_completed;
471 cfun->machine->save_regs_offset = out_args_size + var_size;
472
473 return total_size;
474 }
475
476 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
477 prologue/epilogue expand routines. */
478 static void
479 save_reg (int regno, unsigned offset)
480 {
481 rtx reg = gen_rtx_REG (SImode, regno);
482 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
483 rtx_insn *insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
484 RTX_FRAME_RELATED_P (insn) = 1;
485 }
486
487 static void
488 restore_reg (int regno, unsigned offset)
489 {
490 rtx reg = gen_rtx_REG (SImode, regno);
491 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
492 rtx_insn *insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
493 /* Tag epilogue unwind note. */
494 add_reg_note (insn, REG_CFA_RESTORE, reg);
495 RTX_FRAME_RELATED_P (insn) = 1;
496 }
497
498 /* This routine tests for the base register update SET in load/store
499 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
500 static bool
501 base_reg_adjustment_p (rtx set, rtx *base_reg, rtx *offset)
502 {
503 if (GET_CODE (set) == SET
504 && REG_P (SET_DEST (set))
505 && GET_CODE (SET_SRC (set)) == PLUS
506 && REG_P (XEXP (SET_SRC (set), 0))
507 && rtx_equal_p (SET_DEST (set), XEXP (SET_SRC (set), 0))
508 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
509 {
510 *base_reg = XEXP (SET_SRC (set), 0);
511 *offset = XEXP (SET_SRC (set), 1);
512 return true;
513 }
514 return false;
515 }
516
517 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
518 static void
519 nios2_create_cfa_notes (rtx_insn *insn, bool epilogue_p)
520 {
521 int i = 0;
522 rtx base_reg, offset, elt, pat = PATTERN (insn);
523 if (epilogue_p)
524 {
525 elt = XVECEXP (pat, 0, 0);
526 if (GET_CODE (elt) == RETURN)
527 i++;
528 elt = XVECEXP (pat, 0, i);
529 if (base_reg_adjustment_p (elt, &base_reg, &offset))
530 {
531 add_reg_note (insn, REG_CFA_ADJUST_CFA, copy_rtx (elt));
532 i++;
533 }
534 for (; i < XVECLEN (pat, 0); i++)
535 {
536 elt = SET_DEST (XVECEXP (pat, 0, i));
537 gcc_assert (REG_P (elt));
538 add_reg_note (insn, REG_CFA_RESTORE, elt);
539 }
540 }
541 else
542 {
543 /* Tag each of the prologue sets. */
544 for (i = 0; i < XVECLEN (pat, 0); i++)
545 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
546 }
547 }
548
549 /* Temp regno used inside prologue/epilogue. */
550 #define TEMP_REG_NUM 8
551
552 /* Emit conditional trap for checking stack limit. SIZE is the number of
553 additional bytes required.
554
555 GDB prologue analysis depends on this generating a direct comparison
556 to the SP register, so the adjustment to add SIZE needs to be done on
557 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
558 if necessary. */
559 static void
560 nios2_emit_stack_limit_check (int size)
561 {
562 rtx sum;
563
564 if (GET_CODE (stack_limit_rtx) == SYMBOL_REF)
565 {
566 /* This generates a %hiadj/%lo pair with the constant size
567 add handled by the relocations. */
568 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
569 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
570 }
571 else if (!REG_P (stack_limit_rtx))
572 sorry ("Unknown form for stack limit expression");
573 else if (size == 0)
574 sum = stack_limit_rtx;
575 else if (SMALL_INT (size))
576 {
577 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
578 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
579 }
580 else
581 {
582 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
583 emit_move_insn (sum, gen_int_mode (size, Pmode));
584 emit_insn (gen_add2_insn (sum, stack_limit_rtx));
585 }
586
587 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, sum),
588 stack_pointer_rtx, sum, GEN_INT (3)));
589 }
590
591 static rtx_insn *
592 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
593 {
594 rtx_insn *insn;
595 if (SMALL_INT (immed))
596 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
597 else
598 {
599 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
600 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
601 insn = emit_insn (gen_add2_insn (reg, tmp));
602 }
603 return insn;
604 }
605
606 static rtx_insn *
607 nios2_adjust_stack (int sp_adjust, bool epilogue_p)
608 {
609 enum reg_note note_kind = REG_NOTE_MAX;
610 rtx_insn *insn = NULL;
611 if (sp_adjust)
612 {
613 if (SMALL_INT (sp_adjust))
614 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
615 gen_int_mode (sp_adjust, Pmode)));
616 else
617 {
618 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
619 emit_move_insn (tmp, gen_int_mode (sp_adjust, Pmode));
620 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
621 /* Attach a note indicating what happened. */
622 if (!epilogue_p)
623 note_kind = REG_FRAME_RELATED_EXPR;
624 }
625 if (epilogue_p)
626 note_kind = REG_CFA_ADJUST_CFA;
627 if (note_kind != REG_NOTE_MAX)
628 {
629 rtx cfa_adj = gen_rtx_SET (stack_pointer_rtx,
630 plus_constant (Pmode, stack_pointer_rtx,
631 sp_adjust));
632 add_reg_note (insn, note_kind, cfa_adj);
633 }
634 RTX_FRAME_RELATED_P (insn) = 1;
635 }
636 return insn;
637 }
638
639 void
640 nios2_expand_prologue (void)
641 {
642 unsigned int regno;
643 int total_frame_size, save_offset;
644 int sp_offset; /* offset from base_reg to final stack value. */
645 int save_regs_base; /* offset from base_reg to register save area. */
646 rtx_insn *insn;
647
648 total_frame_size = nios2_compute_frame_layout ();
649
650 if (flag_stack_usage_info)
651 current_function_static_stack_size = total_frame_size;
652
653 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
654 using them. */
655 if (TARGET_HAS_CDX
656 && (cfun->machine->save_reg_size != 0
657 || cfun->machine->uses_anonymous_args))
658 {
659 unsigned int regmask = cfun->machine->save_mask;
660 unsigned int callee_save_regs = regmask & 0xffff0000;
661 unsigned int caller_save_regs = regmask & 0x0000ffff;
662 int push_immed = 0;
663 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
664 rtx stack_mem =
665 gen_frame_mem (SImode, plus_constant (Pmode, stack_pointer_rtx, -4));
666
667 /* Check that there is room for the entire stack frame before doing
668 any SP adjustments or pushes. */
669 if (crtl->limit_stack)
670 nios2_emit_stack_limit_check (total_frame_size);
671
672 if (pretend_args_size)
673 {
674 if (cfun->machine->uses_anonymous_args)
675 {
676 /* Emit a stwm to push copy of argument registers onto
677 the stack for va_arg processing. */
678 unsigned int r, mask = 0, n = pretend_args_size / 4;
679 for (r = LAST_ARG_REGNO - n + 1; r <= LAST_ARG_REGNO; r++)
680 mask |= (1 << r);
681 insn = emit_insn (nios2_ldst_parallel
682 (false, false, false, stack_mem,
683 -pretend_args_size, mask, false));
684 /* Tag first SP adjustment as frame-related. */
685 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
686 RTX_FRAME_RELATED_P (insn) = 1;
687 }
688 else
689 nios2_adjust_stack (-pretend_args_size, false);
690 }
691 if (callee_save_regs)
692 {
693 /* Emit a push.n to save registers and optionally allocate
694 push_immed extra bytes on the stack. */
695 int sp_adjust;
696 if (caller_save_regs)
697 /* Can't allocate extra stack space yet. */
698 push_immed = 0;
699 else if (cfun->machine->save_regs_offset <= 60)
700 /* Stack adjustment fits entirely in the push.n. */
701 push_immed = cfun->machine->save_regs_offset;
702 else if (frame_pointer_needed
703 && cfun->machine->fp_save_offset == 0)
704 /* Deferring the entire stack adjustment until later
705 allows us to use a mov.n instead of a 32-bit addi
706 instruction to set the frame pointer. */
707 push_immed = 0;
708 else
709 /* Splitting the stack adjustment between the push.n
710 and an explicit adjustment makes it more likely that
711 we can use spdeci.n for the explicit part. */
712 push_immed = 60;
713 sp_adjust = -(cfun->machine->callee_save_reg_size + push_immed);
714 insn = emit_insn (nios2_ldst_parallel (false, false, false,
715 stack_mem, sp_adjust,
716 callee_save_regs, false));
717 nios2_create_cfa_notes (insn, false);
718 RTX_FRAME_RELATED_P (insn) = 1;
719 }
720
721 if (caller_save_regs)
722 {
723 /* Emit a stwm to save the EH data regs, r4-r7. */
724 int caller_save_size = (cfun->machine->save_reg_size
725 - cfun->machine->callee_save_reg_size);
726 gcc_assert ((caller_save_regs & ~0xf0) == 0);
727 insn = emit_insn (nios2_ldst_parallel
728 (false, false, false, stack_mem,
729 -caller_save_size, caller_save_regs, false));
730 nios2_create_cfa_notes (insn, false);
731 RTX_FRAME_RELATED_P (insn) = 1;
732 }
733
734 save_regs_base = push_immed;
735 sp_offset = -(cfun->machine->save_regs_offset - push_immed);
736 }
737 /* The non-CDX cases decrement the stack pointer, to prepare for individual
738 register saves to the stack. */
739 else if (!SMALL_INT (total_frame_size))
740 {
741 /* We need an intermediary point, this will point at the spill block. */
742 nios2_adjust_stack (cfun->machine->save_regs_offset - total_frame_size,
743 false);
744 save_regs_base = 0;
745 sp_offset = -cfun->machine->save_regs_offset;
746 if (crtl->limit_stack)
747 nios2_emit_stack_limit_check (cfun->machine->save_regs_offset);
748 }
749 else if (total_frame_size)
750 {
751 nios2_adjust_stack (-total_frame_size, false);
752 save_regs_base = cfun->machine->save_regs_offset;
753 sp_offset = 0;
754 if (crtl->limit_stack)
755 nios2_emit_stack_limit_check (0);
756 }
757 else
758 save_regs_base = sp_offset = 0;
759
760 /* Save the registers individually in the non-CDX case. */
761 if (!TARGET_HAS_CDX)
762 {
763 save_offset = save_regs_base + cfun->machine->save_reg_size;
764
765 for (regno = LAST_GP_REG; regno > 0; regno--)
766 if (cfun->machine->save_mask & (1 << regno))
767 {
768 save_offset -= 4;
769 save_reg (regno, save_offset);
770 }
771 }
772
773 /* Set the hard frame pointer. */
774 if (frame_pointer_needed)
775 {
776 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
777 insn =
778 (fp_save_offset == 0
779 ? emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)
780 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
781 stack_pointer_rtx,
782 gen_int_mode (fp_save_offset, Pmode))));
783 RTX_FRAME_RELATED_P (insn) = 1;
784 }
785
786 /* Allocate sp_offset more bytes in the stack frame. */
787 nios2_adjust_stack (sp_offset, false);
788
789 /* Load the PIC register if needed. */
790 if (crtl->uses_pic_offset_table)
791 nios2_load_pic_register ();
792
793 /* If we are profiling, make sure no instructions are scheduled before
794 the call to mcount. */
795 if (crtl->profile)
796 emit_insn (gen_blockage ());
797 }
798
799 void
800 nios2_expand_epilogue (bool sibcall_p)
801 {
802 rtx_insn *insn;
803 rtx cfa_adj;
804 int total_frame_size;
805 int sp_adjust, save_offset;
806 unsigned int regno;
807
808 if (!sibcall_p && nios2_can_use_return_insn ())
809 {
810 emit_jump_insn (gen_return ());
811 return;
812 }
813
814 emit_insn (gen_blockage ());
815
816 total_frame_size = nios2_compute_frame_layout ();
817 if (frame_pointer_needed)
818 {
819 /* Recover the stack pointer. */
820 insn =
821 (cfun->machine->fp_save_offset == 0
822 ? emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx)
823 : emit_insn (gen_add3_insn
824 (stack_pointer_rtx, hard_frame_pointer_rtx,
825 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))));
826 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
827 (total_frame_size
828 - cfun->machine->save_regs_offset));
829 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
830 RTX_FRAME_RELATED_P (insn) = 1;
831
832 save_offset = 0;
833 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
834 }
835 else if (!SMALL_INT (total_frame_size))
836 {
837 nios2_adjust_stack (cfun->machine->save_regs_offset, true);
838 save_offset = 0;
839 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
840 }
841 else
842 {
843 save_offset = cfun->machine->save_regs_offset;
844 sp_adjust = total_frame_size;
845 }
846
847 if (!TARGET_HAS_CDX)
848 {
849 /* Generate individual register restores. */
850 save_offset += cfun->machine->save_reg_size;
851
852 for (regno = LAST_GP_REG; regno > 0; regno--)
853 if (cfun->machine->save_mask & (1 << regno))
854 {
855 save_offset -= 4;
856 restore_reg (regno, save_offset);
857 }
858 nios2_adjust_stack (sp_adjust, true);
859 }
860 else if (cfun->machine->save_reg_size == 0)
861 {
862 /* Nothing to restore, just recover the stack position. */
863 nios2_adjust_stack (sp_adjust, true);
864 }
865 else
866 {
867 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
868 unsigned int regmask = cfun->machine->save_mask;
869 unsigned int callee_save_regs = regmask & 0xffff0000;
870 unsigned int caller_save_regs = regmask & 0x0000ffff;
871 int callee_save_size = cfun->machine->callee_save_reg_size;
872 int caller_save_size = cfun->machine->save_reg_size - callee_save_size;
873 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
874 bool ret_p = (!pretend_args_size && !crtl->calls_eh_return
875 && !sibcall_p);
876
877 if (!ret_p || caller_save_size > 0)
878 sp_adjust = save_offset;
879 else
880 sp_adjust = (save_offset > 60 ? save_offset - 60 : 0);
881
882 save_offset -= sp_adjust;
883
884 nios2_adjust_stack (sp_adjust, true);
885
886 if (caller_save_regs)
887 {
888 /* Emit a ldwm to restore EH data regs. */
889 rtx stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
890 insn = emit_insn (nios2_ldst_parallel
891 (true, true, true, stack_mem,
892 caller_save_size, caller_save_regs, false));
893 RTX_FRAME_RELATED_P (insn) = 1;
894 nios2_create_cfa_notes (insn, true);
895 }
896
897 if (callee_save_regs)
898 {
899 int sp_adjust = save_offset + callee_save_size;
900 rtx stack_mem;
901 if (ret_p)
902 {
903 /* Emit a pop.n to restore regs and return. */
904 stack_mem =
905 gen_frame_mem (SImode,
906 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
907 gen_int_mode (sp_adjust - 4,
908 Pmode)));
909 insn =
910 emit_jump_insn (nios2_ldst_parallel (true, false, false,
911 stack_mem, sp_adjust,
912 callee_save_regs, ret_p));
913 RTX_FRAME_RELATED_P (insn) = 1;
914 /* No need to attach CFA notes since we cannot step over
915 a return. */
916 return;
917 }
918 else
919 {
920 /* If no return, we have to use the ldwm form. */
921 stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
922 insn =
923 emit_insn (nios2_ldst_parallel (true, true, true,
924 stack_mem, sp_adjust,
925 callee_save_regs, ret_p));
926 RTX_FRAME_RELATED_P (insn) = 1;
927 nios2_create_cfa_notes (insn, true);
928 }
929 }
930
931 if (pretend_args_size)
932 nios2_adjust_stack (pretend_args_size, true);
933 }
934
935 /* Add in the __builtin_eh_return stack adjustment. */
936 if (crtl->calls_eh_return)
937 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
938
939 if (!sibcall_p)
940 emit_jump_insn (gen_simple_return ());
941 }
942
943 bool
944 nios2_expand_return (void)
945 {
946 /* If CDX is available, generate a pop.n instruction to do both
947 the stack pop and return. */
948 if (TARGET_HAS_CDX)
949 {
950 int total_frame_size = nios2_compute_frame_layout ();
951 int sp_adjust = (cfun->machine->save_regs_offset
952 + cfun->machine->callee_save_reg_size);
953 gcc_assert (sp_adjust == total_frame_size);
954 if (sp_adjust != 0)
955 {
956 rtx mem =
957 gen_frame_mem (SImode,
958 plus_constant (Pmode, stack_pointer_rtx,
959 sp_adjust - 4, false));
960 rtx_insn *insn =
961 emit_jump_insn (nios2_ldst_parallel (true, false, false,
962 mem, sp_adjust,
963 cfun->machine->save_mask,
964 true));
965 RTX_FRAME_RELATED_P (insn) = 1;
966 /* No need to create CFA notes since we can't step over
967 a return. */
968 return true;
969 }
970 }
971 return false;
972 }
973
974 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
975 back to a previous frame. */
976 rtx
977 nios2_get_return_address (int count)
978 {
979 if (count != 0)
980 return const0_rtx;
981
982 return get_hard_reg_initial_val (Pmode, RA_REGNO);
983 }
984
985 /* Emit code to change the current function's return address to
986 ADDRESS. SCRATCH is available as a scratch register, if needed.
987 ADDRESS and SCRATCH are both word-mode GPRs. */
988 void
989 nios2_set_return_address (rtx address, rtx scratch)
990 {
991 nios2_compute_frame_layout ();
992 if (cfun->machine->save_mask & (1 << RA_REGNO))
993 {
994 unsigned offset = cfun->machine->save_reg_size - 4;
995 rtx base;
996
997 if (frame_pointer_needed)
998 base = hard_frame_pointer_rtx;
999 else
1000 {
1001 base = stack_pointer_rtx;
1002 offset += cfun->machine->save_regs_offset;
1003
1004 if (!SMALL_INT (offset))
1005 {
1006 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
1007 emit_insn (gen_add2_insn (scratch, base));
1008 base = scratch;
1009 offset = 0;
1010 }
1011 }
1012 if (offset)
1013 base = plus_constant (Pmode, base, offset);
1014 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
1015 }
1016 else
1017 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
1018 }
1019
1020 /* Implement FUNCTION_PROFILER macro. */
1021 void
1022 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
1023 {
1024 fprintf (file, "\tmov\tr8, ra\n");
1025 if (flag_pic == 1)
1026 {
1027 fprintf (file, "\tnextpc\tr2\n");
1028 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1029 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1030 fprintf (file, "\tadd\tr2, r2, r3\n");
1031 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
1032 fprintf (file, "\tcallr\tr2\n");
1033 }
1034 else if (flag_pic == 2)
1035 {
1036 fprintf (file, "\tnextpc\tr2\n");
1037 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1038 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1039 fprintf (file, "\tadd\tr2, r2, r3\n");
1040 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1041 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1042 fprintf (file, "\tadd\tr3, r2, r3\n");
1043 fprintf (file, "\tldw\tr2, 0(r3)\n");
1044 fprintf (file, "\tcallr\tr2\n");
1045 }
1046 else
1047 fprintf (file, "\tcall\t_mcount\n");
1048 fprintf (file, "\tmov\tra, r8\n");
1049 }
1050
1051 /* Dump stack layout. */
1052 static void
1053 nios2_dump_frame_layout (FILE *file)
1054 {
1055 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
1056 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
1057 cfun->machine->total_size);
1058 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
1059 cfun->machine->var_size);
1060 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
1061 cfun->machine->args_size);
1062 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
1063 cfun->machine->save_reg_size);
1064 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
1065 cfun->machine->initialized);
1066 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
1067 cfun->machine->save_regs_offset);
1068 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
1069 crtl->is_leaf);
1070 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
1071 frame_pointer_needed);
1072 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
1073 crtl->args.pretend_args_size);
1074 }
1075
1076 /* Return true if REGNO should be saved in the prologue. */
1077 static bool
1078 prologue_saved_reg_p (unsigned regno)
1079 {
1080 gcc_assert (GP_REG_P (regno));
1081
1082 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
1083 return true;
1084
1085 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
1086 return true;
1087
1088 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
1089 return true;
1090
1091 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
1092 return true;
1093
1094 return false;
1095 }
1096
1097 /* Implement TARGET_CAN_ELIMINATE. */
1098 static bool
1099 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1100 {
1101 if (to == STACK_POINTER_REGNUM)
1102 return !frame_pointer_needed;
1103 return true;
1104 }
1105
1106 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1107 int
1108 nios2_initial_elimination_offset (int from, int to)
1109 {
1110 int offset;
1111
1112 nios2_compute_frame_layout ();
1113
1114 /* Set OFFSET to the offset from the stack pointer. */
1115 switch (from)
1116 {
1117 case FRAME_POINTER_REGNUM:
1118 offset = cfun->machine->args_size;
1119 break;
1120
1121 case ARG_POINTER_REGNUM:
1122 offset = cfun->machine->total_size;
1123 offset -= crtl->args.pretend_args_size;
1124 break;
1125
1126 default:
1127 gcc_unreachable ();
1128 }
1129
1130 /* If we are asked for the frame pointer offset, then adjust OFFSET
1131 by the offset from the frame pointer to the stack pointer. */
1132 if (to == HARD_FRAME_POINTER_REGNUM)
1133 offset -= (cfun->machine->save_regs_offset
1134 + cfun->machine->fp_save_offset);
1135
1136 return offset;
1137 }
1138
1139 /* Return nonzero if this function is known to have a null epilogue.
1140 This allows the optimizer to omit jumps to jumps if no stack
1141 was created. */
1142 int
1143 nios2_can_use_return_insn (void)
1144 {
1145 int total_frame_size;
1146
1147 if (!reload_completed || crtl->profile)
1148 return 0;
1149
1150 total_frame_size = nios2_compute_frame_layout ();
1151
1152 /* If CDX is available, check if we can return using a
1153 single pop.n instruction. */
1154 if (TARGET_HAS_CDX
1155 && !frame_pointer_needed
1156 && cfun->machine->save_regs_offset <= 60
1157 && (cfun->machine->save_mask & 0x80000000) != 0
1158 && (cfun->machine->save_mask & 0xffff) == 0
1159 && crtl->args.pretend_args_size == 0)
1160 return true;
1161
1162 return total_frame_size == 0;
1163 }
1164
1165 \f
1166 /* Check and signal some warnings/errors on FPU insn options. */
1167 static void
1168 nios2_custom_check_insns (void)
1169 {
1170 unsigned int i, j;
1171 bool errors = false;
1172
1173 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1174 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
1175 {
1176 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
1177 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
1178 {
1179 error ("switch %<-mcustom-%s%> is required for double "
1180 "precision floating point", N2FPU_NAME (j));
1181 errors = true;
1182 }
1183 break;
1184 }
1185
1186 /* Warn if the user has certain exotic operations that won't get used
1187 without -funsafe-math-optimizations. See expand_builtin () in
1188 builtins.c. */
1189 if (!flag_unsafe_math_optimizations)
1190 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1191 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
1192 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1193 "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
1194
1195 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1196 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1197 in builtins.c. */
1198 if (!flag_finite_math_only)
1199 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1200 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
1201 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1202 "-ffinite-math-only is specified", N2FPU_NAME (i));
1203
1204 /* Warn if the user is trying to use a custom rounding instruction
1205 that won't get used without -fno-math-errno. See
1206 expand_builtin_int_roundingfn_2 () in builtins.c. */
1207 if (flag_errno_math)
1208 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1209 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
1210 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1211 "-fno-math-errno is specified", N2FPU_NAME (i));
1212
1213 if (errors || custom_code_conflict)
1214 fatal_error (input_location,
1215 "conflicting use of -mcustom switches, target attributes, "
1216 "and/or __builtin_custom_ functions");
1217 }
1218
1219 static void
1220 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
1221 {
1222 if (override_p || N2FPU_N (code) == -1)
1223 N2FPU_N (code) = n;
1224 nios2_register_custom_code (n, CCS_FPU, (int) code);
1225 }
1226
1227 /* Type to represent a standard FPU config. */
1228 struct nios2_fpu_config
1229 {
1230 const char *name;
1231 bool set_sp_constants;
1232 int code[n2fpu_code_num];
1233 };
1234
1235 #define NIOS2_FPU_CONFIG_NUM 3
1236 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
1237
1238 static void
1239 nios2_init_fpu_configs (void)
1240 {
1241 struct nios2_fpu_config* cfg;
1242 int i = 0;
1243 #define NEXT_FPU_CONFIG \
1244 do { \
1245 cfg = &custom_fpu_config[i++]; \
1246 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1247 } while (0)
1248
1249 NEXT_FPU_CONFIG;
1250 cfg->name = "60-1";
1251 cfg->set_sp_constants = true;
1252 cfg->code[n2fpu_fmuls] = 252;
1253 cfg->code[n2fpu_fadds] = 253;
1254 cfg->code[n2fpu_fsubs] = 254;
1255
1256 NEXT_FPU_CONFIG;
1257 cfg->name = "60-2";
1258 cfg->set_sp_constants = true;
1259 cfg->code[n2fpu_fmuls] = 252;
1260 cfg->code[n2fpu_fadds] = 253;
1261 cfg->code[n2fpu_fsubs] = 254;
1262 cfg->code[n2fpu_fdivs] = 255;
1263
1264 NEXT_FPU_CONFIG;
1265 cfg->name = "72-3";
1266 cfg->set_sp_constants = true;
1267 cfg->code[n2fpu_floatus] = 243;
1268 cfg->code[n2fpu_fixsi] = 244;
1269 cfg->code[n2fpu_floatis] = 245;
1270 cfg->code[n2fpu_fcmpgts] = 246;
1271 cfg->code[n2fpu_fcmples] = 249;
1272 cfg->code[n2fpu_fcmpeqs] = 250;
1273 cfg->code[n2fpu_fcmpnes] = 251;
1274 cfg->code[n2fpu_fmuls] = 252;
1275 cfg->code[n2fpu_fadds] = 253;
1276 cfg->code[n2fpu_fsubs] = 254;
1277 cfg->code[n2fpu_fdivs] = 255;
1278
1279 #undef NEXT_FPU_CONFIG
1280 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
1281 }
1282
1283 static struct nios2_fpu_config *
1284 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
1285 {
1286 int i;
1287 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
1288 {
1289 bool match = !(endp != NULL
1290 ? strncmp (custom_fpu_config[i].name, cfgname,
1291 endp - cfgname)
1292 : strcmp (custom_fpu_config[i].name, cfgname));
1293 if (match)
1294 return &custom_fpu_config[i];
1295 }
1296 return NULL;
1297 }
1298
1299 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1300 OVERRIDE is true if loaded config codes should overwrite current state. */
1301 static void
1302 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
1303 bool override)
1304 {
1305 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
1306 if (cfg)
1307 {
1308 unsigned int i;
1309 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1310 if (cfg->code[i] >= 0)
1311 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
1312 override);
1313 if (cfg->set_sp_constants)
1314 flag_single_precision_constant = 1;
1315 }
1316 else
1317 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1318 "value %<%s%>", cfgname);
1319
1320 /* Guard against errors in the standard configurations. */
1321 nios2_custom_check_insns ();
1322 }
1323
1324 /* Check individual FPU insn options, and register custom code. */
1325 static void
1326 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1327 {
1328 int param = N2FPU_N (fpu_insn_index);
1329
1330 if (0 <= param && param <= 255)
1331 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1332
1333 /* Valid values are 0-255, but also allow -1 so that the
1334 -mno-custom-<opt> switches work. */
1335 else if (param != -1)
1336 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1337 N2FPU_NAME (fpu_insn_index), param);
1338 }
1339
1340 /* Allocate a chunk of memory for per-function machine-dependent data. */
1341 static struct machine_function *
1342 nios2_init_machine_status (void)
1343 {
1344 return ggc_cleared_alloc<machine_function> ();
1345 }
1346
1347 /* Implement TARGET_OPTION_OVERRIDE. */
1348 static void
1349 nios2_option_override (void)
1350 {
1351 unsigned int i;
1352
1353 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1354 SUBTARGET_OVERRIDE_OPTIONS;
1355 #endif
1356
1357 /* Check for unsupported options. */
1358 if (flag_pic && !TARGET_LINUX_ABI)
1359 sorry ("position-independent code requires the Linux ABI");
1360 if (flag_pic && stack_limit_rtx
1361 && GET_CODE (stack_limit_rtx) == SYMBOL_REF)
1362 sorry ("PIC support for -fstack-limit-symbol");
1363
1364 /* Function to allocate machine-dependent function status. */
1365 init_machine_status = &nios2_init_machine_status;
1366
1367 nios2_section_threshold
1368 = (global_options_set.x_g_switch_value
1369 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1370
1371 if (nios2_gpopt_option == gpopt_unspecified)
1372 {
1373 /* Default to -mgpopt unless -fpic or -fPIC. */
1374 if (flag_pic)
1375 nios2_gpopt_option = gpopt_none;
1376 else
1377 nios2_gpopt_option = gpopt_local;
1378 }
1379
1380 /* If we don't have mul, we don't have mulx either! */
1381 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1382 target_flags &= ~MASK_HAS_MULX;
1383
1384 /* Optional BMX and CDX instructions only make sense for R2. */
1385 if (!TARGET_ARCH_R2)
1386 {
1387 if (TARGET_HAS_BMX)
1388 error ("BMX instructions are only supported with R2 architecture");
1389 if (TARGET_HAS_CDX)
1390 error ("CDX instructions are only supported with R2 architecture");
1391 }
1392
1393 /* R2 is little-endian only. */
1394 if (TARGET_ARCH_R2 && TARGET_BIG_ENDIAN)
1395 error ("R2 architecture is little-endian only");
1396
1397 /* Initialize default FPU configurations. */
1398 nios2_init_fpu_configs ();
1399
1400 /* Set up default handling for floating point custom instructions.
1401
1402 Putting things in this order means that the -mcustom-fpu-cfg=
1403 switch will always be overridden by individual -mcustom-fadds=
1404 switches, regardless of the order in which they were specified
1405 on the command line.
1406
1407 This behavior of prioritization of individual -mcustom-<insn>=
1408 options before the -mcustom-fpu-cfg= switch is maintained for
1409 compatibility. */
1410 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1411 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1412
1413 /* Handle options for individual FPU insns. */
1414 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1415 nios2_handle_custom_fpu_insn_option (i);
1416
1417 nios2_custom_check_insns ();
1418
1419 /* Save the initial options in case the user does function specific
1420 options. */
1421 target_option_default_node = target_option_current_node
1422 = build_target_option_node (&global_options);
1423 }
1424
1425 \f
1426 /* Return true if CST is a constant within range of movi/movui/movhi. */
1427 static bool
1428 nios2_simple_const_p (const_rtx cst)
1429 {
1430 HOST_WIDE_INT val = INTVAL (cst);
1431 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1432 }
1433
1434 /* Compute a (partial) cost for rtx X. Return true if the complete
1435 cost has been computed, and false if subexpressions should be
1436 scanned. In either case, *TOTAL contains the cost result. */
1437 static bool
1438 nios2_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1439 int outer_code ATTRIBUTE_UNUSED,
1440 int opno ATTRIBUTE_UNUSED,
1441 int *total, bool speed ATTRIBUTE_UNUSED)
1442 {
1443 int code = GET_CODE (x);
1444
1445 switch (code)
1446 {
1447 case CONST_INT:
1448 if (INTVAL (x) == 0)
1449 {
1450 *total = COSTS_N_INSNS (0);
1451 return true;
1452 }
1453 else if (nios2_simple_const_p (x))
1454 {
1455 *total = COSTS_N_INSNS (2);
1456 return true;
1457 }
1458 else
1459 {
1460 *total = COSTS_N_INSNS (4);
1461 return true;
1462 }
1463
1464 case LABEL_REF:
1465 case SYMBOL_REF:
1466 case CONST:
1467 case CONST_DOUBLE:
1468 {
1469 *total = COSTS_N_INSNS (4);
1470 return true;
1471 }
1472
1473 case AND:
1474 {
1475 /* Recognize 'nor' insn pattern. */
1476 if (GET_CODE (XEXP (x, 0)) == NOT
1477 && GET_CODE (XEXP (x, 1)) == NOT)
1478 {
1479 *total = COSTS_N_INSNS (1);
1480 return true;
1481 }
1482 return false;
1483 }
1484
1485 case MULT:
1486 {
1487 *total = COSTS_N_INSNS (1);
1488 return false;
1489 }
1490 case SIGN_EXTEND:
1491 {
1492 *total = COSTS_N_INSNS (3);
1493 return false;
1494 }
1495 case ZERO_EXTEND:
1496 {
1497 *total = COSTS_N_INSNS (1);
1498 return false;
1499 }
1500
1501 case ZERO_EXTRACT:
1502 if (TARGET_HAS_BMX)
1503 {
1504 *total = COSTS_N_INSNS (1);
1505 return true;
1506 }
1507
1508 default:
1509 return false;
1510 }
1511 }
1512
1513 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1514 static reg_class_t
1515 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1516 {
1517 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1518 }
1519
1520 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1521 RET is an RTX for the return value location. The entire insn sequence
1522 is returned. */
1523 static GTY(()) rtx nios2_tls_symbol;
1524
1525 static rtx
1526 nios2_call_tls_get_addr (rtx ti)
1527 {
1528 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1529 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1530 rtx fn;
1531 rtx_insn *insn;
1532
1533 if (!nios2_tls_symbol)
1534 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1535
1536 emit_move_insn (arg, ti);
1537 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1538 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1539 RTL_CONST_CALL_P (insn) = 1;
1540 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1541 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1542
1543 return ret;
1544 }
1545
1546 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1547 static bool
1548 nios2_large_offset_p (int unspec)
1549 {
1550 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1551
1552 if (flag_pic == 2
1553 /* FIXME: TLS GOT offset relocations will eventually also get this
1554 treatment, after binutils support for those are also completed. */
1555 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1556 return true;
1557
1558 /* 'gotoff' offsets are always hiadj/lo. */
1559 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1560 return true;
1561
1562 return false;
1563 }
1564
1565 /* Return true for conforming unspec relocations. Also used in
1566 constraints.md and predicates.md. */
1567 bool
1568 nios2_unspec_reloc_p (rtx op)
1569 {
1570 return (GET_CODE (op) == CONST
1571 && GET_CODE (XEXP (op, 0)) == UNSPEC
1572 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1573 }
1574
1575 static bool
1576 nios2_large_unspec_reloc_p (rtx op)
1577 {
1578 return (GET_CODE (op) == CONST
1579 && GET_CODE (XEXP (op, 0)) == UNSPEC
1580 && nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1581 }
1582
1583 /* Helper to generate unspec constant. */
1584 static rtx
1585 nios2_unspec_offset (rtx loc, int unspec)
1586 {
1587 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1588 unspec));
1589 }
1590
1591 /* Generate GOT pointer based address with large offset. */
1592 static rtx
1593 nios2_large_got_address (rtx offset, rtx tmp)
1594 {
1595 if (!tmp)
1596 tmp = gen_reg_rtx (Pmode);
1597 emit_move_insn (tmp, offset);
1598 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1599 }
1600
1601 /* Generate a GOT pointer based address. */
1602 static rtx
1603 nios2_got_address (rtx loc, int unspec)
1604 {
1605 rtx offset = nios2_unspec_offset (loc, unspec);
1606 crtl->uses_pic_offset_table = 1;
1607
1608 if (nios2_large_offset_p (unspec))
1609 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1610
1611 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1612 }
1613
1614 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1615 return value will be a valid address and move_operand (either a REG
1616 or a LO_SUM). */
1617 static rtx
1618 nios2_legitimize_tls_address (rtx loc)
1619 {
1620 rtx tmp, mem, tp;
1621 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1622
1623 switch (model)
1624 {
1625 case TLS_MODEL_GLOBAL_DYNAMIC:
1626 tmp = gen_reg_rtx (Pmode);
1627 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1628 return nios2_call_tls_get_addr (tmp);
1629
1630 case TLS_MODEL_LOCAL_DYNAMIC:
1631 tmp = gen_reg_rtx (Pmode);
1632 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1633 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1634 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1635
1636 case TLS_MODEL_INITIAL_EXEC:
1637 tmp = gen_reg_rtx (Pmode);
1638 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1639 emit_move_insn (tmp, mem);
1640 tp = gen_rtx_REG (Pmode, TP_REGNO);
1641 return gen_rtx_PLUS (Pmode, tp, tmp);
1642
1643 case TLS_MODEL_LOCAL_EXEC:
1644 tp = gen_rtx_REG (Pmode, TP_REGNO);
1645 return gen_rtx_PLUS (Pmode, tp,
1646 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1647 default:
1648 gcc_unreachable ();
1649 }
1650 }
1651
1652 /* Divide Support
1653
1654 If -O3 is used, we want to output a table lookup for
1655 divides between small numbers (both num and den >= 0
1656 and < 0x10). The overhead of this method in the worst
1657 case is 40 bytes in the text section (10 insns) and
1658 256 bytes in the data section. Additional divides do
1659 not incur additional penalties in the data section.
1660
1661 Code speed is improved for small divides by about 5x
1662 when using this method in the worse case (~9 cycles
1663 vs ~45). And in the worst case divides not within the
1664 table are penalized by about 10% (~5 cycles vs ~45).
1665 However in the typical case the penalty is not as bad
1666 because doing the long divide in only 45 cycles is
1667 quite optimistic.
1668
1669 ??? would be nice to have some benchmarks other
1670 than Dhrystone to back this up.
1671
1672 This bit of expansion is to create this instruction
1673 sequence as rtl.
1674 or $8, $4, $5
1675 slli $9, $4, 4
1676 cmpgeui $3, $8, 16
1677 beq $3, $0, .L3
1678 or $10, $9, $5
1679 add $12, $11, divide_table
1680 ldbu $2, 0($12)
1681 br .L1
1682 .L3:
1683 call slow_div
1684 .L1:
1685 # continue here with result in $2
1686
1687 ??? Ideally I would like the libcall block to contain all
1688 of this code, but I don't know how to do that. What it
1689 means is that if the divide can be eliminated, it may not
1690 completely disappear.
1691
1692 ??? The __divsi3_table label should ideally be moved out
1693 of this block and into a global. If it is placed into the
1694 sdata section we can save even more cycles by doing things
1695 gp relative. */
1696 void
1697 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1698 {
1699 rtx or_result, shift_left_result;
1700 rtx lookup_value;
1701 rtx_code_label *lab1, *lab3;
1702 rtx_insn *insns;
1703 rtx libfunc;
1704 rtx final_result;
1705 rtx_insn *tmp;
1706 rtx table;
1707
1708 /* It may look a little generic, but only SImode is supported for now. */
1709 gcc_assert (mode == SImode);
1710 libfunc = optab_libfunc (sdiv_optab, SImode);
1711
1712 lab1 = gen_label_rtx ();
1713 lab3 = gen_label_rtx ();
1714
1715 or_result = expand_simple_binop (SImode, IOR,
1716 operands[1], operands[2],
1717 0, 0, OPTAB_LIB_WIDEN);
1718
1719 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1720 GET_MODE (or_result), 0, lab3);
1721 JUMP_LABEL (get_last_insn ()) = lab3;
1722
1723 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1724 operands[1], GEN_INT (4),
1725 0, 0, OPTAB_LIB_WIDEN);
1726
1727 lookup_value = expand_simple_binop (SImode, IOR,
1728 shift_left_result, operands[2],
1729 0, 0, OPTAB_LIB_WIDEN);
1730 table = gen_rtx_PLUS (SImode, lookup_value,
1731 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1732 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1733
1734 tmp = emit_jump_insn (gen_jump (lab1));
1735 JUMP_LABEL (tmp) = lab1;
1736 emit_barrier ();
1737
1738 emit_label (lab3);
1739 LABEL_NUSES (lab3) = 1;
1740
1741 start_sequence ();
1742 final_result = emit_library_call_value (libfunc, NULL_RTX,
1743 LCT_CONST, SImode, 2,
1744 operands[1], SImode,
1745 operands[2], SImode);
1746
1747 insns = get_insns ();
1748 end_sequence ();
1749 emit_libcall_block (insns, operands[0], final_result,
1750 gen_rtx_DIV (SImode, operands[1], operands[2]));
1751
1752 emit_label (lab1);
1753 LABEL_NUSES (lab1) = 1;
1754 }
1755
1756 \f
1757 /* Branches and compares. */
1758
1759 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1760 comparison, e.g. >= 1 into > 0. */
1761 static void
1762 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1763 enum rtx_code *alt_code, rtx *alt_op,
1764 machine_mode mode)
1765 {
1766 HOST_WIDE_INT opval = INTVAL (op);
1767 enum rtx_code scode = signed_condition (code);
1768 bool dec_p = (scode == LT || scode == GE);
1769
1770 if (code == EQ || code == NE)
1771 {
1772 *alt_code = code;
1773 *alt_op = op;
1774 return;
1775 }
1776
1777 *alt_op = (dec_p
1778 ? gen_int_mode (opval - 1, mode)
1779 : gen_int_mode (opval + 1, mode));
1780
1781 /* The required conversion between [>,>=] and [<,<=] is captured
1782 by a reverse + swap of condition codes. */
1783 *alt_code = reverse_condition (swap_condition (code));
1784
1785 {
1786 /* Test if the incremented/decremented value crosses the over/underflow
1787 boundary. Supposedly, such boundary cases should already be transformed
1788 into always-true/false or EQ conditions, so use an assertion here. */
1789 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1790 if (code == scode)
1791 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1792 alt_opval &= GET_MODE_MASK (mode);
1793 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1794 }
1795 }
1796
1797 /* Return true if the constant comparison is supported by nios2. */
1798 static bool
1799 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1800 {
1801 switch (code)
1802 {
1803 case EQ: case NE: case GE: case LT:
1804 return SMALL_INT (INTVAL (op));
1805 case GEU: case LTU:
1806 return SMALL_INT_UNSIGNED (INTVAL (op));
1807 default:
1808 return false;
1809 }
1810 }
1811
1812 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1813 the current configuration. Perform modifications if MODIFY_P is true.
1814 Returns true if FPU compare can be done. */
1815
1816 bool
1817 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1818 bool modify_p)
1819 {
1820 bool rev_p = false;
1821 enum rtx_code code = GET_CODE (*cmp);
1822
1823 if (!nios2_fpu_compare_enabled (code, mode))
1824 {
1825 code = swap_condition (code);
1826 if (nios2_fpu_compare_enabled (code, mode))
1827 rev_p = true;
1828 else
1829 return false;
1830 }
1831
1832 if (modify_p)
1833 {
1834 if (rev_p)
1835 {
1836 rtx tmp = *op1;
1837 *op1 = *op2;
1838 *op2 = tmp;
1839 }
1840 *op1 = force_reg (mode, *op1);
1841 *op2 = force_reg (mode, *op2);
1842 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1843 }
1844 return true;
1845 }
1846
1847 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1848 nios2 supported form. Returns true if success. */
1849 bool
1850 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1851 {
1852 enum rtx_code code = GET_CODE (*cmp);
1853 enum rtx_code alt_code;
1854 rtx alt_op2;
1855
1856 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1857 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1858
1859 if (!reg_or_0_operand (*op2, mode))
1860 {
1861 /* Create alternate constant compare. */
1862 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1863
1864 /* If alterate op2 is zero(0), we can use it directly, possibly
1865 swapping the compare code. */
1866 if (alt_op2 == const0_rtx)
1867 {
1868 code = alt_code;
1869 *op2 = alt_op2;
1870 goto check_rebuild_cmp;
1871 }
1872
1873 /* Check if either constant compare can be used. */
1874 if (nios2_valid_compare_const_p (code, *op2))
1875 return true;
1876 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1877 {
1878 code = alt_code;
1879 *op2 = alt_op2;
1880 goto rebuild_cmp;
1881 }
1882
1883 /* We have to force op2 into a register now. Try to pick one
1884 with a lower cost. */
1885 if (! nios2_simple_const_p (*op2)
1886 && nios2_simple_const_p (alt_op2))
1887 {
1888 code = alt_code;
1889 *op2 = alt_op2;
1890 }
1891 *op2 = force_reg (SImode, *op2);
1892 }
1893 check_rebuild_cmp:
1894 if (code == GT || code == GTU || code == LE || code == LEU)
1895 {
1896 rtx t = *op1; *op1 = *op2; *op2 = t;
1897 code = swap_condition (code);
1898 }
1899 rebuild_cmp:
1900 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1901 return true;
1902 }
1903
1904
1905 /* Addressing Modes. */
1906
1907 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1908 static bool
1909 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1910 {
1911 rtx base, offset;
1912 split_const (x, &base, &offset);
1913 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
1914 }
1915
1916 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1917 static bool
1918 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1919 {
1920 return nios2_legitimate_constant_p (mode, x) == false;
1921 }
1922
1923 /* Return true if register REGNO is a valid base register.
1924 STRICT_P is true if REG_OK_STRICT is in effect. */
1925
1926 bool
1927 nios2_regno_ok_for_base_p (int regno, bool strict_p)
1928 {
1929 if (!HARD_REGISTER_NUM_P (regno))
1930 {
1931 if (!strict_p)
1932 return true;
1933
1934 if (!reg_renumber)
1935 return false;
1936
1937 regno = reg_renumber[regno];
1938 }
1939
1940 /* The fake registers will be eliminated to either the stack or
1941 hard frame pointer, both of which are usually valid base registers.
1942 Reload deals with the cases where the eliminated form isn't valid. */
1943 return (GP_REG_P (regno)
1944 || regno == FRAME_POINTER_REGNUM
1945 || regno == ARG_POINTER_REGNUM);
1946 }
1947
1948 /* Return true if OFFSET is permitted in a load/store address expression.
1949 Normally any 16-bit value is permitted, but on R2 if we may be emitting
1950 the IO forms of these instructions we must restrict the offset to fit
1951 in a 12-bit field instead. */
1952
1953 static bool
1954 nios2_valid_addr_offset_p (rtx offset)
1955 {
1956 return (CONST_INT_P (offset)
1957 && ((TARGET_ARCH_R2 && (TARGET_BYPASS_CACHE
1958 || TARGET_BYPASS_CACHE_VOLATILE))
1959 ? SMALL_INT12 (INTVAL (offset))
1960 : SMALL_INT (INTVAL (offset))));
1961 }
1962
1963 /* Return true if the address expression formed by BASE + OFFSET is
1964 valid. */
1965 static bool
1966 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
1967 {
1968 if (!strict_p && GET_CODE (base) == SUBREG)
1969 base = SUBREG_REG (base);
1970 return (REG_P (base)
1971 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
1972 && (offset == NULL_RTX
1973 || nios2_valid_addr_offset_p (offset)
1974 || nios2_unspec_reloc_p (offset)));
1975 }
1976
1977 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1978 static bool
1979 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
1980 rtx operand, bool strict_p)
1981 {
1982 switch (GET_CODE (operand))
1983 {
1984 /* Direct. */
1985 case SYMBOL_REF:
1986 if (SYMBOL_REF_TLS_MODEL (operand))
1987 return false;
1988
1989 /* Else, fall through. */
1990 case CONST:
1991 if (gprel_constant_p (operand))
1992 return true;
1993
1994 /* Else, fall through. */
1995 case LABEL_REF:
1996 case CONST_INT:
1997 case CONST_DOUBLE:
1998 return false;
1999
2000 /* Register indirect. */
2001 case REG:
2002 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
2003
2004 /* Register indirect with displacement. */
2005 case PLUS:
2006 {
2007 rtx op0 = XEXP (operand, 0);
2008 rtx op1 = XEXP (operand, 1);
2009
2010 return (nios2_valid_addr_expr_p (op0, op1, strict_p)
2011 || nios2_valid_addr_expr_p (op1, op0, strict_p));
2012 }
2013
2014 default:
2015 break;
2016 }
2017 return false;
2018 }
2019
2020 /* Return true if SECTION is a small section name. */
2021 static bool
2022 nios2_small_section_name_p (const char *section)
2023 {
2024 return (strcmp (section, ".sbss") == 0
2025 || strncmp (section, ".sbss.", 6) == 0
2026 || strcmp (section, ".sdata") == 0
2027 || strncmp (section, ".sdata.", 7) == 0);
2028 }
2029
2030 /* Return true if EXP should be placed in the small data section. */
2031 static bool
2032 nios2_in_small_data_p (const_tree exp)
2033 {
2034 /* We want to merge strings, so we never consider them small data. */
2035 if (TREE_CODE (exp) == STRING_CST)
2036 return false;
2037
2038 if (TREE_CODE (exp) == VAR_DECL)
2039 {
2040 if (DECL_SECTION_NAME (exp))
2041 {
2042 const char *section = DECL_SECTION_NAME (exp);
2043 if (nios2_small_section_name_p (section))
2044 return true;
2045 }
2046 else
2047 {
2048 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
2049
2050 /* If this is an incomplete type with size 0, then we can't put it
2051 in sdata because it might be too big when completed. */
2052 if (size > 0
2053 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
2054 return true;
2055 }
2056 }
2057
2058 return false;
2059 }
2060
2061 /* Return true if symbol is in small data section. */
2062
2063 static bool
2064 nios2_symbol_ref_in_small_data_p (rtx sym)
2065 {
2066 tree decl;
2067
2068 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2069 decl = SYMBOL_REF_DECL (sym);
2070
2071 /* TLS variables are not accessed through the GP. */
2072 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2073 return false;
2074
2075 /* On Nios II R2, there is no GP-relative relocation that can be
2076 used with "io" instructions. So, if we are implicitly generating
2077 those instructions, we cannot emit GP-relative accesses. */
2078 if (TARGET_ARCH_R2
2079 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2080 return false;
2081
2082 /* If the user has explicitly placed the symbol in a small data section
2083 via an attribute, generate gp-relative addressing even if the symbol
2084 is external, weak, or larger than we'd automatically put in the
2085 small data section. OTOH, if the symbol is located in some
2086 non-small-data section, we can't use gp-relative accesses on it
2087 unless the user has requested gpopt_data or gpopt_all. */
2088
2089 switch (nios2_gpopt_option)
2090 {
2091 case gpopt_none:
2092 /* Don't generate a gp-relative addressing mode if that's been
2093 disabled. */
2094 return false;
2095
2096 case gpopt_local:
2097 /* Use GP-relative addressing for small data symbols that are
2098 not external or weak, plus any symbols that have explicitly
2099 been placed in a small data section. */
2100 if (decl && DECL_SECTION_NAME (decl))
2101 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
2102 return (SYMBOL_REF_SMALL_P (sym)
2103 && !SYMBOL_REF_EXTERNAL_P (sym)
2104 && !(decl && DECL_WEAK (decl)));
2105
2106 case gpopt_global:
2107 /* Use GP-relative addressing for small data symbols, even if
2108 they are external or weak. Note that SYMBOL_REF_SMALL_P
2109 is also true of symbols that have explicitly been placed
2110 in a small data section. */
2111 return SYMBOL_REF_SMALL_P (sym);
2112
2113 case gpopt_data:
2114 /* Use GP-relative addressing for all data symbols regardless
2115 of the object size, but not for code symbols. This option
2116 is equivalent to the user asserting that the entire data
2117 section is accessible from the GP. */
2118 return !SYMBOL_REF_FUNCTION_P (sym);
2119
2120 case gpopt_all:
2121 /* Use GP-relative addressing for everything, including code.
2122 Effectively, the user has asserted that the entire program
2123 fits within the 64K range of the GP offset. */
2124 return true;
2125
2126 default:
2127 /* We shouldn't get here. */
2128 return false;
2129 }
2130 }
2131
2132 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2133
2134 static unsigned int
2135 nios2_section_type_flags (tree decl, const char *name, int reloc)
2136 {
2137 unsigned int flags;
2138
2139 flags = default_section_type_flags (decl, name, reloc);
2140
2141 if (nios2_small_section_name_p (name))
2142 flags |= SECTION_SMALL;
2143
2144 return flags;
2145 }
2146
2147 /* Return true if SYMBOL_REF X binds locally. */
2148
2149 static bool
2150 nios2_symbol_binds_local_p (const_rtx x)
2151 {
2152 return (SYMBOL_REF_DECL (x)
2153 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
2154 : SYMBOL_REF_LOCAL_P (x));
2155 }
2156
2157 /* Position independent code related. */
2158
2159 /* Emit code to load the PIC register. */
2160 static void
2161 nios2_load_pic_register (void)
2162 {
2163 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
2164
2165 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
2166 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
2167 }
2168
2169 /* Generate a PIC address as a MEM rtx. */
2170 static rtx
2171 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
2172 {
2173 if (flag_pic == 2
2174 && GET_CODE (sym) == SYMBOL_REF
2175 && nios2_symbol_binds_local_p (sym))
2176 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2177 {
2178 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
2179 crtl->uses_pic_offset_table = 1;
2180 return nios2_large_got_address (offset, tmp);
2181 }
2182
2183 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
2184 }
2185
2186 /* Nonzero if the constant value X is a legitimate general operand
2187 when generating PIC code. It is given that flag_pic is on and
2188 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2189 bool
2190 nios2_legitimate_pic_operand_p (rtx x)
2191 {
2192 if (nios2_large_unspec_reloc_p (x))
2193 return true;
2194
2195 return ! (GET_CODE (x) == SYMBOL_REF
2196 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
2197 }
2198
2199 /* Return TRUE if X is a thread-local symbol. */
2200 static bool
2201 nios2_tls_symbol_p (rtx x)
2202 {
2203 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
2204 && SYMBOL_REF_TLS_MODEL (x) != 0);
2205 }
2206
2207 /* Legitimize addresses that are CONSTANT_P expressions. */
2208 static rtx
2209 nios2_legitimize_constant_address (rtx addr)
2210 {
2211 rtx base, offset;
2212 split_const (addr, &base, &offset);
2213
2214 if (nios2_tls_symbol_p (base))
2215 base = nios2_legitimize_tls_address (base);
2216 else if (flag_pic)
2217 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
2218 else
2219 return addr;
2220
2221 if (offset != const0_rtx)
2222 {
2223 gcc_assert (can_create_pseudo_p ());
2224 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
2225 (CONST_INT_P (offset)
2226 ? (SMALL_INT (INTVAL (offset))
2227 ? offset : force_reg (Pmode, offset))
2228 : offset));
2229 }
2230 return base;
2231 }
2232
2233 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2234 static rtx
2235 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2236 machine_mode mode ATTRIBUTE_UNUSED)
2237 {
2238 if (CONSTANT_P (x))
2239 return nios2_legitimize_constant_address (x);
2240
2241 /* For the TLS LE (Local Exec) model, the compiler may try to
2242 combine constant offsets with unspec relocs, creating address RTXs
2243 looking like this:
2244 (plus:SI (reg:SI 23 r23)
2245 (const:SI
2246 (plus:SI
2247 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2248 (const_int 48 [0x30]))))
2249
2250 This usually happens when 'var' is a thread-local struct variable,
2251 and access of a field in var causes the addend.
2252
2253 We typically want this combining, so transform the above into this
2254 form, which is allowed:
2255 (plus:SI (reg:SI 23 r23)
2256 (const:SI
2257 (unspec:SI
2258 [(const:SI
2259 (plus:SI (symbol_ref:SI ("var"))
2260 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2261
2262 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2263 if (GET_CODE (x) == PLUS
2264 && GET_CODE (XEXP (x, 0)) == REG
2265 && GET_CODE (XEXP (x, 1)) == CONST)
2266 {
2267 rtx unspec, offset, reg = XEXP (x, 0);
2268 split_const (XEXP (x, 1), &unspec, &offset);
2269 if (GET_CODE (unspec) == UNSPEC
2270 && !nios2_large_offset_p (XINT (unspec, 1))
2271 && offset != const0_rtx)
2272 {
2273 unspec = copy_rtx (unspec);
2274 XVECEXP (unspec, 0, 0)
2275 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
2276 x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
2277 }
2278 }
2279
2280 return x;
2281 }
2282
2283 static rtx
2284 nios2_delegitimize_address (rtx x)
2285 {
2286 x = delegitimize_mem_from_attrs (x);
2287
2288 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
2289 {
2290 switch (XINT (XEXP (x, 0), 1))
2291 {
2292 case UNSPEC_PIC_SYM:
2293 case UNSPEC_PIC_CALL_SYM:
2294 case UNSPEC_PIC_GOTOFF_SYM:
2295 case UNSPEC_ADD_TLS_GD:
2296 case UNSPEC_ADD_TLS_LDM:
2297 case UNSPEC_LOAD_TLS_IE:
2298 case UNSPEC_ADD_TLS_LE:
2299 x = XVECEXP (XEXP (x, 0), 0, 0);
2300 gcc_assert (CONSTANT_P (x));
2301 break;
2302 }
2303 }
2304 return x;
2305 }
2306
2307 /* Main expander function for RTL moves. */
2308 bool
2309 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
2310 {
2311 rtx to = operands[0];
2312 rtx from = operands[1];
2313
2314 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
2315 {
2316 gcc_assert (can_create_pseudo_p ());
2317 from = copy_to_mode_reg (mode, from);
2318 }
2319
2320 if (CONSTANT_P (from))
2321 {
2322 if (CONST_INT_P (from))
2323 {
2324 if (!SMALL_INT (INTVAL (from))
2325 && !SMALL_INT_UNSIGNED (INTVAL (from))
2326 && !UPPER16_INT (INTVAL (from)))
2327 {
2328 HOST_WIDE_INT high = (INTVAL (from) + 0x8000) & ~0xffff;
2329 HOST_WIDE_INT low = INTVAL (from) & 0xffff;
2330 emit_move_insn (to, gen_int_mode (high, SImode));
2331 emit_insn (gen_add2_insn (to, gen_int_mode (low, HImode)));
2332 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2333 copy_rtx (from));
2334 return true;
2335 }
2336 }
2337 else if (!gprel_constant_p (from))
2338 {
2339 if (!nios2_large_unspec_reloc_p (from))
2340 from = nios2_legitimize_constant_address (from);
2341 if (CONSTANT_P (from))
2342 {
2343 emit_insn (gen_rtx_SET (to, gen_rtx_HIGH (Pmode, from)));
2344 emit_insn (gen_rtx_SET (to, gen_rtx_LO_SUM (Pmode, to, from)));
2345 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2346 copy_rtx (operands[1]));
2347 return true;
2348 }
2349 }
2350 }
2351
2352 operands[0] = to;
2353 operands[1] = from;
2354 return false;
2355 }
2356
2357 /* The function with address *ADDR is being called. If the address
2358 needs to be loaded from the GOT, emit the instruction to do so and
2359 update *ADDR to point to the rtx for the loaded value.
2360 If REG != NULL_RTX, it is used as the target/scratch register in the
2361 GOT address calculation. */
2362 void
2363 nios2_adjust_call_address (rtx *call_op, rtx reg)
2364 {
2365 if (MEM_P (*call_op))
2366 call_op = &XEXP (*call_op, 0);
2367
2368 rtx addr = *call_op;
2369 if (flag_pic && CONSTANT_P (addr))
2370 {
2371 rtx tmp = reg ? reg : NULL_RTX;
2372 if (!reg)
2373 reg = gen_reg_rtx (Pmode);
2374 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
2375 emit_insn (gen_rtx_SET (reg, addr));
2376 *call_op = reg;
2377 }
2378 }
2379
2380 \f
2381 /* Output assembly language related definitions. */
2382
2383 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2384 static bool
2385 nios2_print_operand_punct_valid_p (unsigned char code)
2386 {
2387 return (code == '.' || code == '!');
2388 }
2389
2390
2391 /* Print the operand OP to file stream FILE modified by LETTER.
2392 LETTER can be one of:
2393
2394 i: print i/hi/ui suffixes (used for mov instruction variants),
2395 when OP is the appropriate immediate operand.
2396
2397 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2398
2399 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2400 or TARGET_BYPASS_CACHE_VOLATILE).
2401
2402 x: print i/hi/ci/chi suffixes for the and instruction,
2403 when OP is the appropriate immediate operand.
2404
2405 z: prints the third register immediate operand in assembly
2406 instructions. Outputs const0_rtx as the 'zero' register
2407 instead of '0'.
2408
2409 y: same as 'z', but for specifically for logical instructions,
2410 where the processing for immediates are slightly different.
2411
2412 H: for %hiadj
2413 L: for %lo
2414 D: for the upper 32-bits of a 64-bit double value
2415 R: prints reverse condition.
2416 A: prints (reg) operand for ld[s]ex and st[s]ex.
2417
2418 .: print .n suffix for 16-bit instructions.
2419 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2420 */
2421 static void
2422 nios2_print_operand (FILE *file, rtx op, int letter)
2423 {
2424
2425 /* First take care of the format letters that just insert a string
2426 into the output stream. */
2427 switch (letter)
2428 {
2429 case '.':
2430 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2431 fprintf (file, ".n");
2432 return;
2433
2434 case '!':
2435 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2436 fprintf (file, "r.n");
2437 return;
2438
2439 case 'x':
2440 if (CONST_INT_P (op))
2441 {
2442 HOST_WIDE_INT val = INTVAL (op);
2443 HOST_WIDE_INT low = val & 0xffff;
2444 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2445
2446 if (val != 0)
2447 {
2448 if (high != 0)
2449 {
2450 if (low != 0)
2451 {
2452 gcc_assert (TARGET_ARCH_R2);
2453 if (high == 0xffff)
2454 fprintf (file, "c");
2455 else if (low == 0xffff)
2456 fprintf (file, "ch");
2457 else
2458 gcc_unreachable ();
2459 }
2460 else
2461 fprintf (file, "h");
2462 }
2463 fprintf (file, "i");
2464 }
2465 }
2466 return;
2467
2468 case 'u':
2469 case 'i':
2470 if (CONST_INT_P (op))
2471 {
2472 HOST_WIDE_INT val = INTVAL (op);
2473 HOST_WIDE_INT low = val & 0xffff;
2474 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2475 if (val != 0)
2476 {
2477 if (low == 0 && high != 0)
2478 fprintf (file, "h");
2479 else if (high == 0 && (low & 0x8000) != 0 && letter != 'u')
2480 fprintf (file, "u");
2481 }
2482 }
2483 if (CONSTANT_P (op) && op != const0_rtx)
2484 fprintf (file, "i");
2485 return;
2486
2487 case 'o':
2488 if (GET_CODE (op) == MEM
2489 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2490 || TARGET_BYPASS_CACHE))
2491 {
2492 gcc_assert (current_output_insn
2493 && get_attr_length (current_output_insn) == 4);
2494 fprintf (file, "io");
2495 }
2496 return;
2497
2498 default:
2499 break;
2500 }
2501
2502 /* Handle comparison operator names. */
2503 if (comparison_operator (op, VOIDmode))
2504 {
2505 enum rtx_code cond = GET_CODE (op);
2506 if (letter == 0)
2507 {
2508 fprintf (file, "%s", GET_RTX_NAME (cond));
2509 return;
2510 }
2511 if (letter == 'R')
2512 {
2513 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2514 return;
2515 }
2516 }
2517
2518 /* Now handle the cases where we actually need to format an operand. */
2519 switch (GET_CODE (op))
2520 {
2521 case REG:
2522 if (letter == 0 || letter == 'z' || letter == 'y')
2523 {
2524 fprintf (file, "%s", reg_names[REGNO (op)]);
2525 return;
2526 }
2527 else if (letter == 'D')
2528 {
2529 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2530 return;
2531 }
2532 break;
2533
2534 case CONST_INT:
2535 {
2536 rtx int_rtx = op;
2537 HOST_WIDE_INT val = INTVAL (int_rtx);
2538 HOST_WIDE_INT low = val & 0xffff;
2539 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2540
2541 if (letter == 'y')
2542 {
2543 if (val == 0)
2544 fprintf (file, "zero");
2545 else
2546 {
2547 if (high != 0)
2548 {
2549 if (low != 0)
2550 {
2551 gcc_assert (TARGET_ARCH_R2);
2552 if (high == 0xffff)
2553 /* andci. */
2554 int_rtx = gen_int_mode (low, SImode);
2555 else if (low == 0xffff)
2556 /* andchi. */
2557 int_rtx = gen_int_mode (high, SImode);
2558 else
2559 gcc_unreachable ();
2560 }
2561 else
2562 /* andhi. */
2563 int_rtx = gen_int_mode (high, SImode);
2564 }
2565 else
2566 /* andi. */
2567 int_rtx = gen_int_mode (low, SImode);
2568 output_addr_const (file, int_rtx);
2569 }
2570 return;
2571 }
2572 else if (letter == 'z')
2573 {
2574 if (val == 0)
2575 fprintf (file, "zero");
2576 else
2577 {
2578 if (low == 0 && high != 0)
2579 int_rtx = gen_int_mode (high, SImode);
2580 else if (low != 0)
2581 {
2582 gcc_assert (high == 0 || high == 0xffff);
2583 int_rtx = gen_int_mode (low, high == 0 ? SImode : HImode);
2584 }
2585 else
2586 gcc_unreachable ();
2587 output_addr_const (file, int_rtx);
2588 }
2589 return;
2590 }
2591 }
2592
2593 /* Else, fall through. */
2594
2595 case CONST:
2596 case LABEL_REF:
2597 case SYMBOL_REF:
2598 case CONST_DOUBLE:
2599 if (letter == 0 || letter == 'z')
2600 {
2601 output_addr_const (file, op);
2602 return;
2603 }
2604 else if (letter == 'H' || letter == 'L')
2605 {
2606 fprintf (file, "%%");
2607 if (GET_CODE (op) == CONST
2608 && GET_CODE (XEXP (op, 0)) == UNSPEC)
2609 {
2610 rtx unspec = XEXP (op, 0);
2611 int unspec_reloc = XINT (unspec, 1);
2612 gcc_assert (nios2_large_offset_p (unspec_reloc));
2613 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
2614 op = XVECEXP (unspec, 0, 0);
2615 }
2616 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
2617 output_addr_const (file, op);
2618 fprintf (file, ")");
2619 return;
2620 }
2621 break;
2622
2623 case SUBREG:
2624 case MEM:
2625 if (letter == 'A')
2626 {
2627 /* Address of '(reg)' form, with no index. */
2628 fprintf (file, "(%s)", reg_names[REGNO (XEXP (op, 0))]);
2629 return;
2630 }
2631 if (letter == 0)
2632 {
2633 output_address (op);
2634 return;
2635 }
2636 break;
2637
2638 case CODE_LABEL:
2639 if (letter == 0)
2640 {
2641 output_addr_const (file, op);
2642 return;
2643 }
2644 break;
2645
2646 default:
2647 break;
2648 }
2649
2650 output_operand_lossage ("Unsupported operand for code '%c'", letter);
2651 gcc_unreachable ();
2652 }
2653
2654 /* Return true if this is a GP-relative accessible reference. */
2655 bool
2656 gprel_constant_p (rtx op)
2657 {
2658 if (GET_CODE (op) == SYMBOL_REF
2659 && nios2_symbol_ref_in_small_data_p (op))
2660 return true;
2661 else if (GET_CODE (op) == CONST
2662 && GET_CODE (XEXP (op, 0)) == PLUS)
2663 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
2664
2665 return false;
2666 }
2667
2668 /* Return the name string for a supported unspec reloc offset. */
2669 static const char *
2670 nios2_unspec_reloc_name (int unspec)
2671 {
2672 switch (unspec)
2673 {
2674 case UNSPEC_PIC_SYM:
2675 return "got";
2676 case UNSPEC_PIC_CALL_SYM:
2677 return "call";
2678 case UNSPEC_PIC_GOTOFF_SYM:
2679 return "gotoff";
2680 case UNSPEC_LOAD_TLS_IE:
2681 return "tls_ie";
2682 case UNSPEC_ADD_TLS_LE:
2683 return "tls_le";
2684 case UNSPEC_ADD_TLS_GD:
2685 return "tls_gd";
2686 case UNSPEC_ADD_TLS_LDM:
2687 return "tls_ldm";
2688 case UNSPEC_ADD_TLS_LDO:
2689 return "tls_ldo";
2690 default:
2691 return NULL;
2692 }
2693 }
2694
2695 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2696 static bool
2697 nios2_output_addr_const_extra (FILE *file, rtx op)
2698 {
2699 const char *name;
2700 gcc_assert (GET_CODE (op) == UNSPEC);
2701
2702 /* Support for printing out const unspec relocations. */
2703 name = nios2_unspec_reloc_name (XINT (op, 1));
2704 if (name)
2705 {
2706 fprintf (file, "%%%s(", name);
2707 output_addr_const (file, XVECEXP (op, 0, 0));
2708 fprintf (file, ")");
2709 return true;
2710 }
2711 return false;
2712 }
2713
2714 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2715 static void
2716 nios2_print_operand_address (FILE *file, rtx op)
2717 {
2718 switch (GET_CODE (op))
2719 {
2720 case CONST:
2721 case CONST_INT:
2722 case LABEL_REF:
2723 case CONST_DOUBLE:
2724 case SYMBOL_REF:
2725 if (gprel_constant_p (op))
2726 {
2727 fprintf (file, "%%gprel(");
2728 output_addr_const (file, op);
2729 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
2730 return;
2731 }
2732
2733 break;
2734
2735 case PLUS:
2736 {
2737 rtx op0 = XEXP (op, 0);
2738 rtx op1 = XEXP (op, 1);
2739
2740 if (REG_P (op0) && CONSTANT_P (op1))
2741 {
2742 output_addr_const (file, op1);
2743 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
2744 return;
2745 }
2746 else if (REG_P (op1) && CONSTANT_P (op0))
2747 {
2748 output_addr_const (file, op0);
2749 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
2750 return;
2751 }
2752 }
2753 break;
2754
2755 case REG:
2756 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
2757 return;
2758
2759 case MEM:
2760 {
2761 rtx base = XEXP (op, 0);
2762 nios2_print_operand_address (file, base);
2763 return;
2764 }
2765 default:
2766 break;
2767 }
2768
2769 fprintf (stderr, "Missing way to print address\n");
2770 debug_rtx (op);
2771 gcc_unreachable ();
2772 }
2773
2774 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2775 static void
2776 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
2777 {
2778 gcc_assert (size == 4);
2779 fprintf (file, "\t.4byte\t%%tls_ldo(");
2780 output_addr_const (file, x);
2781 fprintf (file, ")");
2782 }
2783
2784 /* Implemet TARGET_ASM_FILE_END. */
2785
2786 static void
2787 nios2_asm_file_end (void)
2788 {
2789 /* The Nios II Linux stack is mapped non-executable by default, so add a
2790 .note.GNU-stack section for switching to executable stacks only when
2791 trampolines are generated. */
2792 if (TARGET_LINUX_ABI && trampolines_created)
2793 file_end_indicate_exec_stack ();
2794 }
2795
2796 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2797 static void
2798 nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2799 {
2800 if (flag_verbose_asm || flag_debug_asm)
2801 {
2802 nios2_compute_frame_layout ();
2803 nios2_dump_frame_layout (file);
2804 }
2805 }
2806
2807 /* Emit assembly of custom FPU instructions. */
2808 const char *
2809 nios2_fpu_insn_asm (enum n2fpu_code code)
2810 {
2811 static char buf[256];
2812 const char *op1, *op2, *op3;
2813 int ln = 256, n = 0;
2814
2815 int N = N2FPU_N (code);
2816 int num_operands = N2FPU (code).num_operands;
2817 const char *insn_name = N2FPU_NAME (code);
2818 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
2819 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
2820 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
2821
2822 /* Prepare X register for DF input operands. */
2823 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
2824 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2825 N2FPU_N (n2fpu_fwrx));
2826
2827 if (src_mode == SFmode)
2828 {
2829 if (dst_mode == VOIDmode)
2830 {
2831 /* The fwry case. */
2832 op1 = op3 = "zero";
2833 op2 = "%0";
2834 num_operands -= 1;
2835 }
2836 else
2837 {
2838 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2839 op2 = "%1";
2840 op3 = (num_operands == 2 ? "zero" : "%2");
2841 }
2842 }
2843 else if (src_mode == DFmode)
2844 {
2845 if (dst_mode == VOIDmode)
2846 {
2847 /* The fwrx case. */
2848 op1 = "zero";
2849 op2 = "%0";
2850 op3 = "%D0";
2851 num_operands -= 1;
2852 }
2853 else
2854 {
2855 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2856 op2 = (num_operands == 2 ? "%1" : "%2");
2857 op3 = (num_operands == 2 ? "%D1" : "%D2");
2858 }
2859 }
2860 else if (src_mode == VOIDmode)
2861 {
2862 /* frdxlo, frdxhi, frdy cases. */
2863 gcc_assert (dst_mode == SFmode);
2864 op1 = "%0";
2865 op2 = op3 = "zero";
2866 }
2867 else if (src_mode == SImode)
2868 {
2869 /* Conversion operators. */
2870 gcc_assert (num_operands == 2);
2871 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2872 op2 = "%1";
2873 op3 = "zero";
2874 }
2875 else
2876 gcc_unreachable ();
2877
2878 /* Main instruction string. */
2879 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2880 N, op1, op2, op3, insn_name,
2881 (num_operands >= 2 ? ", %1" : ""),
2882 (num_operands == 3 ? ", %2" : ""));
2883
2884 /* Extraction of Y register for DF results. */
2885 if (dst_mode == DFmode)
2886 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2887 N2FPU_N (n2fpu_frdy));
2888 return buf;
2889 }
2890
2891 \f
2892
2893 /* Function argument related. */
2894
2895 /* Define where to put the arguments to a function. Value is zero to
2896 push the argument on the stack, or a hard register in which to
2897 store the argument.
2898
2899 MODE is the argument's machine mode.
2900 TYPE is the data type of the argument (as a tree).
2901 This is null for libcalls where that information may
2902 not be available.
2903 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2904 the preceding args and about the function being called.
2905 NAMED is nonzero if this argument is a named parameter
2906 (otherwise it is an extra parameter matching an ellipsis). */
2907
2908 static rtx
2909 nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
2910 const_tree type ATTRIBUTE_UNUSED,
2911 bool named ATTRIBUTE_UNUSED)
2912 {
2913 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2914 rtx return_rtx = NULL_RTX;
2915
2916 if (cum->regs_used < NUM_ARG_REGS)
2917 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
2918
2919 return return_rtx;
2920 }
2921
2922 /* Return number of bytes, at the beginning of the argument, that must be
2923 put in registers. 0 is the argument is entirely in registers or entirely
2924 in memory. */
2925
2926 static int
2927 nios2_arg_partial_bytes (cumulative_args_t cum_v,
2928 machine_mode mode, tree type ATTRIBUTE_UNUSED,
2929 bool named ATTRIBUTE_UNUSED)
2930 {
2931 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2932 HOST_WIDE_INT param_size;
2933
2934 if (mode == BLKmode)
2935 {
2936 param_size = int_size_in_bytes (type);
2937 gcc_assert (param_size >= 0);
2938 }
2939 else
2940 param_size = GET_MODE_SIZE (mode);
2941
2942 /* Convert to words (round up). */
2943 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2944
2945 if (cum->regs_used < NUM_ARG_REGS
2946 && cum->regs_used + param_size > NUM_ARG_REGS)
2947 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
2948
2949 return 0;
2950 }
2951
2952 /* Update the data in CUM to advance over an argument of mode MODE
2953 and data type TYPE; TYPE is null for libcalls where that information
2954 may not be available. */
2955
2956 static void
2957 nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2958 const_tree type ATTRIBUTE_UNUSED,
2959 bool named ATTRIBUTE_UNUSED)
2960 {
2961 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2962 HOST_WIDE_INT param_size;
2963
2964 if (mode == BLKmode)
2965 {
2966 param_size = int_size_in_bytes (type);
2967 gcc_assert (param_size >= 0);
2968 }
2969 else
2970 param_size = GET_MODE_SIZE (mode);
2971
2972 /* Convert to words (round up). */
2973 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2974
2975 if (cum->regs_used + param_size > NUM_ARG_REGS)
2976 cum->regs_used = NUM_ARG_REGS;
2977 else
2978 cum->regs_used += param_size;
2979 }
2980
2981 enum direction
2982 nios2_function_arg_padding (machine_mode mode, const_tree type)
2983 {
2984 /* On little-endian targets, the first byte of every stack argument
2985 is passed in the first byte of the stack slot. */
2986 if (!BYTES_BIG_ENDIAN)
2987 return upward;
2988
2989 /* Otherwise, integral types are padded downward: the last byte of a
2990 stack argument is passed in the last byte of the stack slot. */
2991 if (type != 0
2992 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
2993 : GET_MODE_CLASS (mode) == MODE_INT)
2994 return downward;
2995
2996 /* Arguments smaller than a stack slot are padded downward. */
2997 if (mode != BLKmode)
2998 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward;
2999
3000 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
3001 ? upward : downward);
3002 }
3003
3004 enum direction
3005 nios2_block_reg_padding (machine_mode mode, tree type,
3006 int first ATTRIBUTE_UNUSED)
3007 {
3008 return nios2_function_arg_padding (mode, type);
3009 }
3010
3011 /* Emit RTL insns to initialize the variable parts of a trampoline.
3012 FNADDR is an RTX for the address of the function's pure code.
3013 CXT is an RTX for the static chain value for the function.
3014 On Nios II, we handle this by a library call. */
3015 static void
3016 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
3017 {
3018 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3019 rtx ctx_reg = force_reg (Pmode, cxt);
3020 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
3021
3022 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
3023 LCT_NORMAL, VOIDmode, 3, addr, Pmode, fnaddr, Pmode,
3024 ctx_reg, Pmode);
3025 }
3026
3027 /* Implement TARGET_FUNCTION_VALUE. */
3028 static rtx
3029 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
3030 bool outgoing ATTRIBUTE_UNUSED)
3031 {
3032 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
3033 }
3034
3035 /* Implement TARGET_LIBCALL_VALUE. */
3036 static rtx
3037 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3038 {
3039 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
3040 }
3041
3042 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3043 static bool
3044 nios2_function_value_regno_p (const unsigned int regno)
3045 {
3046 return regno == FIRST_RETVAL_REGNO;
3047 }
3048
3049 /* Implement TARGET_RETURN_IN_MEMORY. */
3050 static bool
3051 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3052 {
3053 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3054 || int_size_in_bytes (type) == -1);
3055 }
3056
3057 /* TODO: It may be possible to eliminate the copyback and implement
3058 own va_arg type. */
3059 static void
3060 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
3061 machine_mode mode, tree type,
3062 int *pretend_size, int second_time)
3063 {
3064 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3065 CUMULATIVE_ARGS local_cum;
3066 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
3067 int regs_to_push;
3068 int pret_size;
3069
3070 cfun->machine->uses_anonymous_args = 1;
3071 local_cum = *cum;
3072 nios2_function_arg_advance (local_cum_v, mode, type, true);
3073
3074 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
3075
3076 /* If we can use CDX stwm to push the arguments on the stack,
3077 nios2_expand_prologue will do that instead. */
3078 if (!TARGET_HAS_CDX && !second_time && regs_to_push > 0)
3079 {
3080 rtx ptr = virtual_incoming_args_rtx;
3081 rtx mem = gen_rtx_MEM (BLKmode, ptr);
3082 emit_insn (gen_blockage ());
3083 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
3084 regs_to_push);
3085 emit_insn (gen_blockage ());
3086 }
3087
3088 pret_size = regs_to_push * UNITS_PER_WORD;
3089 if (pret_size)
3090 *pretend_size = pret_size;
3091 }
3092
3093 \f
3094
3095 /* Init FPU builtins. */
3096 static void
3097 nios2_init_fpu_builtins (int start_code)
3098 {
3099 tree fndecl;
3100 char builtin_name[64] = "__builtin_custom_";
3101 unsigned int i, n = strlen ("__builtin_custom_");
3102
3103 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3104 {
3105 snprintf (builtin_name + n, sizeof (builtin_name) - n,
3106 "%s", N2FPU_NAME (i));
3107 fndecl =
3108 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
3109 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
3110 nios2_register_builtin_fndecl (start_code + i, fndecl);
3111 }
3112 }
3113
3114 /* Helper function for expanding FPU builtins. */
3115 static rtx
3116 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
3117 {
3118 struct expand_operand ops[MAX_RECOG_OPERANDS];
3119 enum insn_code icode = N2FPU_ICODE (code);
3120 int nargs, argno, opno = 0;
3121 int num_operands = N2FPU (code).num_operands;
3122 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
3123 bool has_target_p = (dst_mode != VOIDmode);
3124
3125 if (N2FPU_N (code) < 0)
3126 fatal_error (input_location,
3127 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3128 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
3129 if (has_target_p)
3130 create_output_operand (&ops[opno++], target, dst_mode);
3131 else
3132 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3133 num_operands -= 1;
3134 nargs = call_expr_nargs (exp);
3135 for (argno = 0; argno < nargs; argno++)
3136 {
3137 tree arg = CALL_EXPR_ARG (exp, argno);
3138 create_input_operand (&ops[opno++], expand_normal (arg),
3139 TYPE_MODE (TREE_TYPE (arg)));
3140 }
3141 if (!maybe_expand_insn (icode, num_operands, ops))
3142 {
3143 error ("invalid argument to built-in function");
3144 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3145 }
3146 return has_target_p ? ops[0].value : const0_rtx;
3147 }
3148
3149 /* Nios II has custom instruction built-in functions of the forms:
3150 __builtin_custom_n
3151 __builtin_custom_nX
3152 __builtin_custom_nXX
3153 __builtin_custom_Xn
3154 __builtin_custom_XnX
3155 __builtin_custom_XnXX
3156
3157 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3158 Therefore with 0-1 return values, and 0-2 arguments, we have a
3159 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3160 */
3161 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3162 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
3163
3164 static void
3165 nios2_init_custom_builtins (int start_code)
3166 {
3167 tree builtin_ftype, ret_type, fndecl;
3168 char builtin_name[32] = "__builtin_custom_";
3169 int n = strlen ("__builtin_custom_");
3170 int builtin_code = 0;
3171 int lhs, rhs1, rhs2;
3172
3173 struct { tree type; const char *c; } op[4];
3174 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
3175 /* f */ op[1].c = "f"; op[1].type = float_type_node;
3176 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
3177 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
3178
3179 /* We enumerate through the possible operand types to create all the
3180 __builtin_custom_XnXX function tree types. Note that these may slightly
3181 overlap with the function types created for other fixed builtins. */
3182
3183 for (lhs = 0; lhs < 4; lhs++)
3184 for (rhs1 = 0; rhs1 < 4; rhs1++)
3185 for (rhs2 = 0; rhs2 < 4; rhs2++)
3186 {
3187 if (rhs1 == 0 && rhs2 != 0)
3188 continue;
3189 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
3190 builtin_ftype
3191 = build_function_type_list (ret_type, integer_type_node,
3192 op[rhs1].type, op[rhs2].type,
3193 NULL_TREE);
3194 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
3195 op[lhs].c, op[rhs1].c, op[rhs2].c);
3196 /* Save copy of parameter string into custom_builtin_name[]. */
3197 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
3198 fndecl =
3199 add_builtin_function (builtin_name, builtin_ftype,
3200 start_code + builtin_code,
3201 BUILT_IN_MD, NULL, NULL_TREE);
3202 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
3203 builtin_code += 1;
3204 }
3205 }
3206
3207 /* Helper function for expanding custom builtins. */
3208 static rtx
3209 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
3210 {
3211 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
3212 machine_mode tmode = VOIDmode;
3213 int nargs, argno;
3214 rtx value, insn, unspec_args[3];
3215 tree arg;
3216
3217 /* XnXX form. */
3218 if (has_target_p)
3219 {
3220 tmode = TYPE_MODE (TREE_TYPE (exp));
3221 if (!target || GET_MODE (target) != tmode
3222 || !REG_P (target))
3223 target = gen_reg_rtx (tmode);
3224 }
3225
3226 nargs = call_expr_nargs (exp);
3227 for (argno = 0; argno < nargs; argno++)
3228 {
3229 arg = CALL_EXPR_ARG (exp, argno);
3230 value = expand_normal (arg);
3231 unspec_args[argno] = value;
3232 if (argno == 0)
3233 {
3234 if (!custom_insn_opcode (value, VOIDmode))
3235 error ("custom instruction opcode must be compile time "
3236 "constant in the range 0-255 for __builtin_custom_%s",
3237 custom_builtin_name[index]);
3238 }
3239 else
3240 /* For other arguments, force into a register. */
3241 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
3242 unspec_args[argno]);
3243 }
3244 /* Fill remaining unspec operands with zero. */
3245 for (; argno < 3; argno++)
3246 unspec_args[argno] = const0_rtx;
3247
3248 insn = (has_target_p
3249 ? gen_rtx_SET (target,
3250 gen_rtx_UNSPEC_VOLATILE (tmode,
3251 gen_rtvec_v (3, unspec_args),
3252 UNSPECV_CUSTOM_XNXX))
3253 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
3254 UNSPECV_CUSTOM_NXX));
3255 emit_insn (insn);
3256 return has_target_p ? target : const0_rtx;
3257 }
3258
3259
3260 \f
3261
3262 /* Main definition of built-in functions. Nios II has a small number of fixed
3263 builtins, plus a large number of FPU insn builtins, and builtins for
3264 generating custom instructions. */
3265
3266 struct nios2_builtin_desc
3267 {
3268 enum insn_code icode;
3269 enum nios2_ftcode ftype;
3270 const char *name;
3271 };
3272
3273 #define N2_BUILTINS \
3274 N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
3275 N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
3276 N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
3277 N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
3278 N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
3279 N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
3280 N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
3281 N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
3282 N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
3283 N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
3284 N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
3285
3286 enum nios2_builtin_code {
3287 #define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
3288 N2_BUILTINS
3289 #undef N2_BUILTIN_DEF
3290 NUM_FIXED_NIOS2_BUILTINS
3291 };
3292
3293 static const struct nios2_builtin_desc nios2_builtins[] = {
3294 #define N2_BUILTIN_DEF(name, ftype) \
3295 { CODE_FOR_ ## name, ftype, "__builtin_" #name },
3296 N2_BUILTINS
3297 #undef N2_BUILTIN_DEF
3298 };
3299
3300 /* Start/ends of FPU/custom insn builtin index ranges. */
3301 static unsigned int nios2_fpu_builtin_base;
3302 static unsigned int nios2_custom_builtin_base;
3303 static unsigned int nios2_custom_builtin_end;
3304
3305 /* Implement TARGET_INIT_BUILTINS. */
3306 static void
3307 nios2_init_builtins (void)
3308 {
3309 unsigned int i;
3310
3311 /* Initialize fixed builtins. */
3312 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
3313 {
3314 const struct nios2_builtin_desc *d = &nios2_builtins[i];
3315 tree fndecl =
3316 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
3317 BUILT_IN_MD, NULL, NULL);
3318 nios2_register_builtin_fndecl (i, fndecl);
3319 }
3320
3321 /* Initialize FPU builtins. */
3322 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
3323 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
3324
3325 /* Initialize custom insn builtins. */
3326 nios2_custom_builtin_base
3327 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
3328 nios2_custom_builtin_end
3329 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
3330 nios2_init_custom_builtins (nios2_custom_builtin_base);
3331 }
3332
3333 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3334 #define NIOS2_NUM_BUILTINS \
3335 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3336 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
3337
3338 static void
3339 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
3340 {
3341 nios2_builtin_decls[code] = fndecl;
3342 }
3343
3344 /* Implement TARGET_BUILTIN_DECL. */
3345 static tree
3346 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3347 {
3348 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
3349
3350 if (code >= nios2_custom_builtin_end)
3351 return error_mark_node;
3352
3353 if (code >= nios2_fpu_builtin_base
3354 && code < nios2_custom_builtin_base
3355 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
3356 return error_mark_node;
3357
3358 return nios2_builtin_decls[code];
3359 }
3360
3361 \f
3362 /* Low-level built-in expand routine. */
3363 static rtx
3364 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
3365 struct expand_operand *ops, bool has_target_p)
3366 {
3367 if (maybe_expand_insn (d->icode, n, ops))
3368 return has_target_p ? ops[0].value : const0_rtx;
3369 else
3370 {
3371 error ("invalid argument to built-in function %s", d->name);
3372 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3373 }
3374 }
3375
3376 /* Expand ldio/stio form load-store instruction builtins. */
3377 static rtx
3378 nios2_expand_ldstio_builtin (tree exp, rtx target,
3379 const struct nios2_builtin_desc *d)
3380 {
3381 bool has_target_p;
3382 rtx addr, mem, val;
3383 struct expand_operand ops[MAX_RECOG_OPERANDS];
3384 machine_mode mode = insn_data[d->icode].operand[0].mode;
3385
3386 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3387 mem = gen_rtx_MEM (mode, addr);
3388
3389 if (insn_data[d->icode].operand[0].allows_mem)
3390 {
3391 /* stxio. */
3392 val = expand_normal (CALL_EXPR_ARG (exp, 1));
3393 if (CONST_INT_P (val))
3394 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
3395 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
3396 create_output_operand (&ops[0], mem, mode);
3397 create_input_operand (&ops[1], val, mode);
3398 has_target_p = false;
3399 }
3400 else
3401 {
3402 /* ldxio. */
3403 create_output_operand (&ops[0], target, mode);
3404 create_input_operand (&ops[1], mem, mode);
3405 has_target_p = true;
3406 }
3407 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3408 }
3409
3410 /* Expand rdctl/wrctl builtins. */
3411 static rtx
3412 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
3413 const struct nios2_builtin_desc *d)
3414 {
3415 bool has_target_p = (insn_data[d->icode].operand[0].predicate
3416 == register_operand);
3417 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
3418 struct expand_operand ops[MAX_RECOG_OPERANDS];
3419 if (!rdwrctl_operand (ctlcode, VOIDmode))
3420 {
3421 error ("Control register number must be in range 0-31 for %s",
3422 d->name);
3423 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
3424 }
3425 if (has_target_p)
3426 {
3427 create_output_operand (&ops[0], target, SImode);
3428 create_integer_operand (&ops[1], INTVAL (ctlcode));
3429 }
3430 else
3431 {
3432 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
3433 create_integer_operand (&ops[0], INTVAL (ctlcode));
3434 create_input_operand (&ops[1], val, SImode);
3435 }
3436 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3437 }
3438
3439 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3440 a built-in function, with result going to TARGET if that's convenient
3441 (and in mode MODE if that's convenient).
3442 SUBTARGET may be used as the target for computing one of EXP's operands.
3443 IGNORE is nonzero if the value is to be ignored. */
3444
3445 static rtx
3446 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
3447 machine_mode mode ATTRIBUTE_UNUSED,
3448 int ignore ATTRIBUTE_UNUSED)
3449 {
3450 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3451 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3452
3453 if (fcode < nios2_fpu_builtin_base)
3454 {
3455 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
3456
3457 switch (fcode)
3458 {
3459 case NIOS2_BUILTIN_sync:
3460 emit_insn (gen_sync ());
3461 return const0_rtx;
3462
3463 case NIOS2_BUILTIN_ldbio:
3464 case NIOS2_BUILTIN_ldbuio:
3465 case NIOS2_BUILTIN_ldhio:
3466 case NIOS2_BUILTIN_ldhuio:
3467 case NIOS2_BUILTIN_ldwio:
3468 case NIOS2_BUILTIN_stbio:
3469 case NIOS2_BUILTIN_sthio:
3470 case NIOS2_BUILTIN_stwio:
3471 return nios2_expand_ldstio_builtin (exp, target, d);
3472
3473 case NIOS2_BUILTIN_rdctl:
3474 case NIOS2_BUILTIN_wrctl:
3475 return nios2_expand_rdwrctl_builtin (exp, target, d);
3476
3477 default:
3478 gcc_unreachable ();
3479 }
3480 }
3481 else if (fcode < nios2_custom_builtin_base)
3482 /* FPU builtin range. */
3483 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
3484 target);
3485 else if (fcode < nios2_custom_builtin_end)
3486 /* Custom insn builtin range. */
3487 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
3488 target);
3489 else
3490 gcc_unreachable ();
3491 }
3492
3493 /* Implement TARGET_INIT_LIBFUNCS. */
3494 static void
3495 nios2_init_libfuncs (void)
3496 {
3497 /* For Linux, we have access to kernel support for atomic operations. */
3498 if (TARGET_LINUX_ABI)
3499 init_sync_libfuncs (UNITS_PER_WORD);
3500 }
3501
3502 \f
3503
3504 /* Register a custom code use, and signal error if a conflict was found. */
3505 static void
3506 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
3507 int index)
3508 {
3509 gcc_assert (N <= 255);
3510
3511 if (status == CCS_FPU)
3512 {
3513 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
3514 {
3515 custom_code_conflict = true;
3516 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
3517 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
3518 }
3519 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
3520 {
3521 custom_code_conflict = true;
3522 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3523 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
3524 N2FPU_NAME (index));
3525 }
3526 }
3527 else if (status == CCS_BUILTIN_CALL)
3528 {
3529 if (custom_code_status[N] == CCS_FPU)
3530 {
3531 custom_code_conflict = true;
3532 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3533 "%<-mcustom-%s%>", custom_builtin_name[index],
3534 N2FPU_NAME (custom_code_index[N]));
3535 }
3536 else
3537 {
3538 /* Note that code conflicts between different __builtin_custom_xnxx
3539 calls are not checked. */
3540 }
3541 }
3542 else
3543 gcc_unreachable ();
3544
3545 custom_code_status[N] = status;
3546 custom_code_index[N] = index;
3547 }
3548
3549 /* Mark a custom code as not in use. */
3550 static void
3551 nios2_deregister_custom_code (unsigned int N)
3552 {
3553 if (N <= 255)
3554 {
3555 custom_code_status[N] = CCS_UNUSED;
3556 custom_code_index[N] = 0;
3557 }
3558 }
3559
3560 /* Target attributes can affect per-function option state, so we need to
3561 save/restore the custom code tracking info using the
3562 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3563
3564 static void
3565 nios2_option_save (struct cl_target_option *ptr,
3566 struct gcc_options *opts ATTRIBUTE_UNUSED)
3567 {
3568 unsigned int i;
3569 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3570 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
3571 memcpy (ptr->saved_custom_code_status, custom_code_status,
3572 sizeof (custom_code_status));
3573 memcpy (ptr->saved_custom_code_index, custom_code_index,
3574 sizeof (custom_code_index));
3575 }
3576
3577 static void
3578 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
3579 struct cl_target_option *ptr)
3580 {
3581 unsigned int i;
3582 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3583 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
3584 memcpy (custom_code_status, ptr->saved_custom_code_status,
3585 sizeof (custom_code_status));
3586 memcpy (custom_code_index, ptr->saved_custom_code_index,
3587 sizeof (custom_code_index));
3588 }
3589
3590 /* Inner function to process the attribute((target(...))), take an argument and
3591 set the current options from the argument. If we have a list, recursively
3592 go over the list. */
3593
3594 static bool
3595 nios2_valid_target_attribute_rec (tree args)
3596 {
3597 if (TREE_CODE (args) == TREE_LIST)
3598 {
3599 bool ret = true;
3600 for (; args; args = TREE_CHAIN (args))
3601 if (TREE_VALUE (args)
3602 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
3603 ret = false;
3604 return ret;
3605 }
3606 else if (TREE_CODE (args) == STRING_CST)
3607 {
3608 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
3609 while (argstr && *argstr != '\0')
3610 {
3611 bool no_opt = false, end_p = false;
3612 char *eq = NULL, *p;
3613 while (ISSPACE (*argstr))
3614 argstr++;
3615 p = argstr;
3616 while (*p != '\0' && *p != ',')
3617 {
3618 if (!eq && *p == '=')
3619 eq = p;
3620 ++p;
3621 }
3622 if (*p == '\0')
3623 end_p = true;
3624 else
3625 *p = '\0';
3626 if (eq) *eq = '\0';
3627
3628 if (!strncmp (argstr, "no-", 3))
3629 {
3630 no_opt = true;
3631 argstr += 3;
3632 }
3633 if (!strncmp (argstr, "custom-fpu-cfg", 14))
3634 {
3635 char *end_eq = p;
3636 if (no_opt)
3637 {
3638 error ("custom-fpu-cfg option does not support %<no-%>");
3639 return false;
3640 }
3641 if (!eq)
3642 {
3643 error ("custom-fpu-cfg option requires configuration"
3644 " argument");
3645 return false;
3646 }
3647 /* Increment and skip whitespace. */
3648 while (ISSPACE (*(++eq))) ;
3649 /* Decrement and skip to before any trailing whitespace. */
3650 while (ISSPACE (*(--end_eq))) ;
3651
3652 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
3653 }
3654 else if (!strncmp (argstr, "custom-", 7))
3655 {
3656 int code = -1;
3657 unsigned int i;
3658 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3659 if (!strncmp (argstr + 7, N2FPU_NAME (i),
3660 strlen (N2FPU_NAME (i))))
3661 {
3662 /* Found insn. */
3663 code = i;
3664 break;
3665 }
3666 if (code >= 0)
3667 {
3668 if (no_opt)
3669 {
3670 if (eq)
3671 {
3672 error ("%<no-custom-%s%> does not accept arguments",
3673 N2FPU_NAME (code));
3674 return false;
3675 }
3676 /* Disable option by setting to -1. */
3677 nios2_deregister_custom_code (N2FPU_N (code));
3678 N2FPU_N (code) = -1;
3679 }
3680 else
3681 {
3682 char *t;
3683 if (eq)
3684 while (ISSPACE (*(++eq))) ;
3685 if (!eq || eq == p)
3686 {
3687 error ("%<custom-%s=%> requires argument",
3688 N2FPU_NAME (code));
3689 return false;
3690 }
3691 for (t = eq; t != p; ++t)
3692 {
3693 if (ISSPACE (*t))
3694 continue;
3695 if (!ISDIGIT (*t))
3696 {
3697 error ("`custom-%s=' argument requires "
3698 "numeric digits", N2FPU_NAME (code));
3699 return false;
3700 }
3701 }
3702 /* Set option to argument. */
3703 N2FPU_N (code) = atoi (eq);
3704 nios2_handle_custom_fpu_insn_option (code);
3705 }
3706 }
3707 else
3708 {
3709 error ("%<custom-%s=%> is not recognised as FPU instruction",
3710 argstr + 7);
3711 return false;
3712 }
3713 }
3714 else
3715 {
3716 error ("%<%s%> is unknown", argstr);
3717 return false;
3718 }
3719
3720 if (end_p)
3721 break;
3722 else
3723 argstr = p + 1;
3724 }
3725 return true;
3726 }
3727 else
3728 gcc_unreachable ();
3729 }
3730
3731 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3732
3733 static tree
3734 nios2_valid_target_attribute_tree (tree args)
3735 {
3736 if (!nios2_valid_target_attribute_rec (args))
3737 return NULL_TREE;
3738 nios2_custom_check_insns ();
3739 return build_target_option_node (&global_options);
3740 }
3741
3742 /* Hook to validate attribute((target("string"))). */
3743
3744 static bool
3745 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
3746 tree args, int ARG_UNUSED (flags))
3747 {
3748 struct cl_target_option cur_target;
3749 bool ret = true;
3750 tree old_optimize = build_optimization_node (&global_options);
3751 tree new_target, new_optimize;
3752 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
3753
3754 /* If the function changed the optimization levels as well as setting target
3755 options, start with the optimizations specified. */
3756 if (func_optimize && func_optimize != old_optimize)
3757 cl_optimization_restore (&global_options,
3758 TREE_OPTIMIZATION (func_optimize));
3759
3760 /* The target attributes may also change some optimization flags, so update
3761 the optimization options if necessary. */
3762 cl_target_option_save (&cur_target, &global_options);
3763 new_target = nios2_valid_target_attribute_tree (args);
3764 new_optimize = build_optimization_node (&global_options);
3765
3766 if (!new_target)
3767 ret = false;
3768
3769 else if (fndecl)
3770 {
3771 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
3772
3773 if (old_optimize != new_optimize)
3774 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
3775 }
3776
3777 cl_target_option_restore (&global_options, &cur_target);
3778
3779 if (old_optimize != new_optimize)
3780 cl_optimization_restore (&global_options,
3781 TREE_OPTIMIZATION (old_optimize));
3782 return ret;
3783 }
3784
3785 /* Remember the last target of nios2_set_current_function. */
3786 static GTY(()) tree nios2_previous_fndecl;
3787
3788 /* Establish appropriate back-end context for processing the function
3789 FNDECL. The argument might be NULL to indicate processing at top
3790 level, outside of any function scope. */
3791 static void
3792 nios2_set_current_function (tree fndecl)
3793 {
3794 tree old_tree = (nios2_previous_fndecl
3795 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
3796 : NULL_TREE);
3797
3798 tree new_tree = (fndecl
3799 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
3800 : NULL_TREE);
3801
3802 if (fndecl && fndecl != nios2_previous_fndecl)
3803 {
3804 nios2_previous_fndecl = fndecl;
3805 if (old_tree == new_tree)
3806 ;
3807
3808 else if (new_tree)
3809 {
3810 cl_target_option_restore (&global_options,
3811 TREE_TARGET_OPTION (new_tree));
3812 target_reinit ();
3813 }
3814
3815 else if (old_tree)
3816 {
3817 struct cl_target_option *def
3818 = TREE_TARGET_OPTION (target_option_current_node);
3819
3820 cl_target_option_restore (&global_options, def);
3821 target_reinit ();
3822 }
3823 }
3824 }
3825
3826 /* Hook to validate the current #pragma GCC target and set the FPU custom
3827 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3828 the options. */
3829 static bool
3830 nios2_pragma_target_parse (tree args, tree pop_target)
3831 {
3832 tree cur_tree;
3833 if (! args)
3834 {
3835 cur_tree = ((pop_target)
3836 ? pop_target
3837 : target_option_default_node);
3838 cl_target_option_restore (&global_options,
3839 TREE_TARGET_OPTION (cur_tree));
3840 }
3841 else
3842 {
3843 cur_tree = nios2_valid_target_attribute_tree (args);
3844 if (!cur_tree)
3845 return false;
3846 }
3847
3848 target_option_current_node = cur_tree;
3849 return true;
3850 }
3851
3852 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3853 We are just using this hook to add some additional error checking to
3854 the default behavior. GCC does not provide a target hook for merging
3855 the target options, and only correctly handles merging empty vs non-empty
3856 option data; see merge_decls() in c-decl.c.
3857 So here we require either that at least one of the decls has empty
3858 target options, or that the target options/data be identical. */
3859 static tree
3860 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
3861 {
3862 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
3863 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
3864 if (newopts && oldopts && newopts != oldopts)
3865 {
3866 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
3867 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
3868 if (oldtree && newtree && oldtree != newtree)
3869 {
3870 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
3871 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
3872 if (olddata != newdata
3873 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
3874 error ("%qE redeclared with conflicting %qs attributes",
3875 DECL_NAME (newdecl), "target");
3876 }
3877 }
3878 return merge_attributes (DECL_ATTRIBUTES (olddecl),
3879 DECL_ATTRIBUTES (newdecl));
3880 }
3881
3882 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
3883 static void
3884 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
3885 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
3886 tree function)
3887 {
3888 rtx this_rtx, funexp;
3889 rtx_insn *insn;
3890
3891 /* Pretend to be a post-reload pass while generating rtl. */
3892 reload_completed = 1;
3893
3894 if (flag_pic)
3895 nios2_load_pic_register ();
3896
3897 /* Mark the end of the (empty) prologue. */
3898 emit_note (NOTE_INSN_PROLOGUE_END);
3899
3900 /* Find the "this" pointer. If the function returns a structure,
3901 the structure return pointer is in $5. */
3902 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
3903 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
3904 else
3905 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
3906
3907 /* Add DELTA to THIS_RTX. */
3908 nios2_emit_add_constant (this_rtx, delta);
3909
3910 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
3911 if (vcall_offset)
3912 {
3913 rtx tmp;
3914
3915 tmp = gen_rtx_REG (Pmode, 2);
3916 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
3917 nios2_emit_add_constant (tmp, vcall_offset);
3918 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
3919 emit_insn (gen_add2_insn (this_rtx, tmp));
3920 }
3921
3922 /* Generate a tail call to the target function. */
3923 if (!TREE_USED (function))
3924 {
3925 assemble_external (function);
3926 TREE_USED (function) = 1;
3927 }
3928 funexp = XEXP (DECL_RTL (function), 0);
3929 /* Function address needs to be constructed under PIC,
3930 provide r2 to use here. */
3931 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
3932 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
3933 SIBLING_CALL_P (insn) = 1;
3934
3935 /* Run just enough of rest_of_compilation to get the insns emitted.
3936 There's not really enough bulk here to make other passes such as
3937 instruction scheduling worth while. Note that use_thunk calls
3938 assemble_start_function and assemble_end_function. */
3939 insn = get_insns ();
3940 shorten_branches (insn);
3941 final_start_function (insn, file, 1);
3942 final (insn, file, 1);
3943 final_end_function ();
3944
3945 /* Stop pretending to be a post-reload pass. */
3946 reload_completed = 0;
3947 }
3948
3949
3950 /* Utility function to break a memory address into
3951 base register + constant offset. Return false if something
3952 unexpected is seen. */
3953 static bool
3954 split_mem_address (rtx addr, rtx *base_reg, rtx *offset)
3955 {
3956 if (REG_P (addr))
3957 {
3958 *base_reg = addr;
3959 *offset = const0_rtx;
3960 return true;
3961 }
3962 else if (GET_CODE (addr) == PLUS)
3963 {
3964 *base_reg = XEXP (addr, 0);
3965 *offset = XEXP (addr, 1);
3966 return true;
3967 }
3968 return false;
3969 }
3970
3971 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
3972 static void
3973 split_alu_insn (rtx_insn *insn, rtx *lhs, rtx *rhs1, rtx *rhs2)
3974 {
3975 rtx pat = PATTERN (insn);
3976 gcc_assert (GET_CODE (pat) == SET);
3977 *lhs = SET_DEST (pat);
3978 *rhs1 = XEXP (SET_SRC (pat), 0);
3979 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat))) != RTX_UNARY)
3980 *rhs2 = XEXP (SET_SRC (pat), 1);
3981 return;
3982 }
3983
3984 /* Returns true if OP is a REG and assigned a CDX reg. */
3985 static bool
3986 cdxreg (rtx op)
3987 {
3988 return REG_P (op) && (!reload_completed || CDX_REG_P (REGNO (op)));
3989 }
3990
3991 /* Returns true if OP is within range of CDX addi.n immediates. */
3992 static bool
3993 cdx_add_immed (rtx op)
3994 {
3995 if (CONST_INT_P (op))
3996 {
3997 HOST_WIDE_INT ival = INTVAL (op);
3998 return ival <= 128 && ival > 0 && (ival & (ival - 1)) == 0;
3999 }
4000 return false;
4001 }
4002
4003 /* Returns true if OP is within range of CDX andi.n immediates. */
4004 static bool
4005 cdx_and_immed (rtx op)
4006 {
4007 if (CONST_INT_P (op))
4008 {
4009 HOST_WIDE_INT ival = INTVAL (op);
4010 return (ival == 1 || ival == 2 || ival == 3 || ival == 4
4011 || ival == 8 || ival == 0xf || ival == 0x10
4012 || ival == 0x10 || ival == 0x1f || ival == 0x20
4013 || ival == 0x3f || ival == 0x3f || ival == 0x7f
4014 || ival == 0x80 || ival == 0xff || ival == 0x7ff
4015 || ival == 0xff00 || ival == 0xffff);
4016 }
4017 return false;
4018 }
4019
4020 /* Returns true if OP is within range of CDX movi.n immediates. */
4021 static bool
4022 cdx_mov_immed (rtx op)
4023 {
4024 if (CONST_INT_P (op))
4025 {
4026 HOST_WIDE_INT ival = INTVAL (op);
4027 return ((ival >= 0 && ival <= 124)
4028 || ival == 0xff || ival == -2 || ival == -1);
4029 }
4030 return false;
4031 }
4032
4033 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4034 static bool
4035 cdx_shift_immed (rtx op)
4036 {
4037 if (CONST_INT_P (op))
4038 {
4039 HOST_WIDE_INT ival = INTVAL (op);
4040 return (ival == 1 || ival == 2 || ival == 3 || ival == 8
4041 || ival == 12 || ival == 16 || ival == 24
4042 || ival == 31);
4043 }
4044 return false;
4045 }
4046
4047
4048
4049 /* Classification of different kinds of add instructions. */
4050 enum nios2_add_insn_kind {
4051 nios2_add_n_kind,
4052 nios2_addi_n_kind,
4053 nios2_subi_n_kind,
4054 nios2_spaddi_n_kind,
4055 nios2_spinci_n_kind,
4056 nios2_spdeci_n_kind,
4057 nios2_add_kind,
4058 nios2_addi_kind
4059 };
4060
4061 static const char *nios2_add_insn_names[] = {
4062 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4063 "add", "addi" };
4064 static bool nios2_add_insn_narrow[] = {
4065 true, true, true, true, true, true,
4066 false, false};
4067
4068 /* Function to classify kinds of add instruction patterns. */
4069 static enum nios2_add_insn_kind
4070 nios2_add_insn_classify (rtx_insn *insn ATTRIBUTE_UNUSED,
4071 rtx lhs, rtx rhs1, rtx rhs2)
4072 {
4073 if (TARGET_HAS_CDX)
4074 {
4075 if (cdxreg (lhs) && cdxreg (rhs1))
4076 {
4077 if (cdxreg (rhs2))
4078 return nios2_add_n_kind;
4079 if (CONST_INT_P (rhs2))
4080 {
4081 HOST_WIDE_INT ival = INTVAL (rhs2);
4082 if (ival > 0 && cdx_add_immed (rhs2))
4083 return nios2_addi_n_kind;
4084 if (ival < 0 && cdx_add_immed (GEN_INT (-ival)))
4085 return nios2_subi_n_kind;
4086 }
4087 }
4088 else if (rhs1 == stack_pointer_rtx
4089 && CONST_INT_P (rhs2))
4090 {
4091 HOST_WIDE_INT imm7 = INTVAL (rhs2) >> 2;
4092 HOST_WIDE_INT rem = INTVAL (rhs2) & 3;
4093 if (rem == 0 && (imm7 & ~0x7f) == 0)
4094 {
4095 if (cdxreg (lhs))
4096 return nios2_spaddi_n_kind;
4097 if (lhs == stack_pointer_rtx)
4098 return nios2_spinci_n_kind;
4099 }
4100 imm7 = -INTVAL(rhs2) >> 2;
4101 rem = -INTVAL (rhs2) & 3;
4102 if (lhs == stack_pointer_rtx
4103 && rem == 0 && (imm7 & ~0x7f) == 0)
4104 return nios2_spdeci_n_kind;
4105 }
4106 }
4107 return ((REG_P (rhs2) || rhs2 == const0_rtx)
4108 ? nios2_add_kind : nios2_addi_kind);
4109 }
4110
4111 /* Emit assembly language for the different kinds of add instructions. */
4112 const char*
4113 nios2_add_insn_asm (rtx_insn *insn, rtx *operands)
4114 {
4115 static char buf[256];
4116 int ln = 256;
4117 enum nios2_add_insn_kind kind
4118 = nios2_add_insn_classify (insn, operands[0], operands[1], operands[2]);
4119 if (kind == nios2_subi_n_kind)
4120 snprintf (buf, ln, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands[2]));
4121 else if (kind == nios2_spaddi_n_kind)
4122 snprintf (buf, ln, "spaddi.n\t%%0, %%2");
4123 else if (kind == nios2_spinci_n_kind)
4124 snprintf (buf, ln, "spinci.n\t%%2");
4125 else if (kind == nios2_spdeci_n_kind)
4126 snprintf (buf, ln, "spdeci.n\t%d", (int) -INTVAL (operands[2]));
4127 else
4128 snprintf (buf, ln, "%s\t%%0, %%1, %%z2", nios2_add_insn_names[(int)kind]);
4129 return buf;
4130 }
4131
4132 /* This routine, which the default "length" attribute computation is
4133 based on, encapsulates information about all the cases where CDX
4134 provides a narrow 2-byte instruction form. */
4135 bool
4136 nios2_cdx_narrow_form_p (rtx_insn *insn)
4137 {
4138 rtx pat, lhs, rhs1, rhs2;
4139 enum attr_type type;
4140 if (!TARGET_HAS_CDX)
4141 return false;
4142 type = get_attr_type (insn);
4143 pat = PATTERN (insn);
4144 gcc_assert (reload_completed);
4145 switch (type)
4146 {
4147 case TYPE_CONTROL:
4148 if (GET_CODE (pat) == SIMPLE_RETURN)
4149 return true;
4150 if (GET_CODE (pat) == PARALLEL)
4151 pat = XVECEXP (pat, 0, 0);
4152 if (GET_CODE (pat) == SET)
4153 pat = SET_SRC (pat);
4154 if (GET_CODE (pat) == IF_THEN_ELSE)
4155 {
4156 /* Conditional branch patterns; for these we
4157 only check the comparison to find beqz.n/bnez.n cases.
4158 For the 'nios2_cbranch' pattern, we cannot also check
4159 the branch range here. That will be done at the md
4160 pattern "length" attribute computation. */
4161 rtx cmp = XEXP (pat, 0);
4162 return ((GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
4163 && cdxreg (XEXP (cmp, 0))
4164 && XEXP (cmp, 1) == const0_rtx);
4165 }
4166 if (GET_CODE (pat) == TRAP_IF)
4167 /* trap.n is always usable. */
4168 return true;
4169 if (GET_CODE (pat) == CALL)
4170 pat = XEXP (XEXP (pat, 0), 0);
4171 if (REG_P (pat))
4172 /* Control instructions taking a register operand are indirect
4173 jumps and calls. The CDX instructions have a 5-bit register
4174 field so any reg is valid. */
4175 return true;
4176 else
4177 {
4178 gcc_assert (!insn_variable_length_p (insn));
4179 return false;
4180 }
4181 case TYPE_ADD:
4182 {
4183 enum nios2_add_insn_kind kind;
4184 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4185 kind = nios2_add_insn_classify (insn, lhs, rhs1, rhs2);
4186 return nios2_add_insn_narrow[(int)kind];
4187 }
4188 case TYPE_LD:
4189 {
4190 bool ret;
4191 HOST_WIDE_INT offset, rem = 0;
4192 rtx addr, reg = SET_DEST (pat), mem = SET_SRC (pat);
4193 if (GET_CODE (mem) == SIGN_EXTEND)
4194 /* No CDX form for sign-extended load. */
4195 return false;
4196 if (GET_CODE (mem) == ZERO_EXTEND)
4197 /* The load alternatives in the zero_extend* patterns. */
4198 mem = XEXP (mem, 0);
4199 if (MEM_P (mem))
4200 {
4201 /* ldxio. */
4202 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4203 || TARGET_BYPASS_CACHE)
4204 return false;
4205 addr = XEXP (mem, 0);
4206 /* GP-based references are never narrow. */
4207 if (gprel_constant_p (addr))
4208 return false;
4209 ret = split_mem_address (addr, &rhs1, &rhs2);
4210 gcc_assert (ret);
4211 }
4212 else
4213 return false;
4214
4215 offset = INTVAL (rhs2);
4216 if (GET_MODE (mem) == SImode)
4217 {
4218 rem = offset & 3;
4219 offset >>= 2;
4220 /* ldwsp.n case. */
4221 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4222 && rem == 0 && (offset & ~0x1f) == 0)
4223 return true;
4224 }
4225 else if (GET_MODE (mem) == HImode)
4226 {
4227 rem = offset & 1;
4228 offset >>= 1;
4229 }
4230 /* ldbu.n, ldhu.n, ldw.n cases. */
4231 return (cdxreg (reg) && cdxreg (rhs1)
4232 && rem == 0 && (offset & ~0xf) == 0);
4233 }
4234 case TYPE_ST:
4235 if (GET_CODE (pat) == PARALLEL)
4236 /* stex, stsex. */
4237 return false;
4238 else
4239 {
4240 bool ret;
4241 HOST_WIDE_INT offset, rem = 0;
4242 rtx addr, reg = SET_SRC (pat), mem = SET_DEST (pat);
4243 if (!MEM_P (mem))
4244 return false;
4245 /* stxio. */
4246 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4247 || TARGET_BYPASS_CACHE)
4248 return false;
4249 addr = XEXP (mem, 0);
4250 /* GP-based references are never narrow. */
4251 if (gprel_constant_p (addr))
4252 return false;
4253 ret = split_mem_address (addr, &rhs1, &rhs2);
4254 gcc_assert (ret);
4255 offset = INTVAL (rhs2);
4256 if (GET_MODE (mem) == SImode)
4257 {
4258 rem = offset & 3;
4259 offset >>= 2;
4260 /* stwsp.n case. */
4261 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4262 && rem == 0 && (offset & ~0x1f) == 0)
4263 return true;
4264 /* stwz.n case. */
4265 else if (reg == const0_rtx && cdxreg (rhs1)
4266 && rem == 0 && (offset & ~0x3f) == 0)
4267 return true;
4268 }
4269 else if (GET_MODE (mem) == HImode)
4270 {
4271 rem = offset & 1;
4272 offset >>= 1;
4273 }
4274 else
4275 {
4276 gcc_assert (GET_MODE (mem) == QImode);
4277 /* stbz.n case. */
4278 if (reg == const0_rtx && cdxreg (rhs1)
4279 && (offset & ~0x3f) == 0)
4280 return true;
4281 }
4282
4283 /* stbu.n, sthu.n, stw.n cases. */
4284 return (cdxreg (reg) && cdxreg (rhs1)
4285 && rem == 0 && (offset & ~0xf) == 0);
4286 }
4287 case TYPE_MOV:
4288 lhs = SET_DEST (pat);
4289 rhs1 = SET_SRC (pat);
4290 if (CONST_INT_P (rhs1))
4291 return (cdxreg (lhs) && cdx_mov_immed (rhs1));
4292 gcc_assert (REG_P (lhs) && REG_P (rhs1));
4293 return true;
4294
4295 case TYPE_AND:
4296 /* Some zero_extend* alternatives are and insns. */
4297 if (GET_CODE (SET_SRC (pat)) == ZERO_EXTEND)
4298 return (cdxreg (SET_DEST (pat))
4299 && cdxreg (XEXP (SET_SRC (pat), 0)));
4300 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4301 if (CONST_INT_P (rhs2))
4302 return (cdxreg (lhs) && cdxreg (rhs1) && cdx_and_immed (rhs2));
4303 return (cdxreg (lhs) && cdxreg (rhs2)
4304 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4305
4306 case TYPE_OR:
4307 case TYPE_XOR:
4308 /* Note the two-address limitation for CDX form. */
4309 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4310 return (cdxreg (lhs) && cdxreg (rhs2)
4311 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4312
4313 case TYPE_SUB:
4314 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4315 return (cdxreg (lhs) && cdxreg (rhs1) && cdxreg (rhs2));
4316
4317 case TYPE_NEG:
4318 case TYPE_NOT:
4319 split_alu_insn (insn, &lhs, &rhs1, NULL);
4320 return (cdxreg (lhs) && cdxreg (rhs1));
4321
4322 case TYPE_SLL:
4323 case TYPE_SRL:
4324 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4325 return (cdxreg (lhs)
4326 && ((cdxreg (rhs1) && cdx_shift_immed (rhs2))
4327 || (cdxreg (rhs2)
4328 && (!reload_completed || rtx_equal_p (lhs, rhs1)))));
4329 case TYPE_NOP:
4330 case TYPE_PUSH:
4331 case TYPE_POP:
4332 return true;
4333 default:
4334 break;
4335 }
4336 return false;
4337 }
4338
4339 /* Main function to implement the pop_operation predicate that
4340 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4341 hardcode the restored registers, so the main checking is for the
4342 SP offsets. */
4343 bool
4344 pop_operation_p (rtx op)
4345 {
4346 int i;
4347 HOST_WIDE_INT last_offset = -1, len = XVECLEN (op, 0);
4348 rtx base_reg, offset;
4349
4350 if (len < 3 /* At least has a return, SP-update, and RA restore. */
4351 || GET_CODE (XVECEXP (op, 0, 0)) != RETURN
4352 || !base_reg_adjustment_p (XVECEXP (op, 0, 1), &base_reg, &offset)
4353 || !rtx_equal_p (base_reg, stack_pointer_rtx)
4354 || !CONST_INT_P (offset)
4355 || (INTVAL (offset) & 3) != 0)
4356 return false;
4357
4358 for (i = len - 1; i > 1; i--)
4359 {
4360 rtx set = XVECEXP (op, 0, i);
4361 rtx curr_base_reg, curr_offset;
4362
4363 if (GET_CODE (set) != SET || !MEM_P (SET_SRC (set))
4364 || !split_mem_address (XEXP (SET_SRC (set), 0),
4365 &curr_base_reg, &curr_offset)
4366 || !rtx_equal_p (base_reg, curr_base_reg)
4367 || !CONST_INT_P (curr_offset))
4368 return false;
4369 if (i == len - 1)
4370 {
4371 last_offset = INTVAL (curr_offset);
4372 if ((last_offset & 3) != 0 || last_offset > 60)
4373 return false;
4374 }
4375 else
4376 {
4377 last_offset += 4;
4378 if (INTVAL (curr_offset) != last_offset)
4379 return false;
4380 }
4381 }
4382 if (last_offset < 0 || last_offset + 4 != INTVAL (offset))
4383 return false;
4384
4385 return true;
4386 }
4387
4388
4389 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4390 The instruction can encode subsets drawn from either R2-R13 or
4391 R14-R23 + FP + RA. */
4392 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4393 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4394
4395 static bool
4396 nios2_ldstwm_regset_p (unsigned int regno, unsigned int *regset)
4397 {
4398 if (*regset == 0)
4399 {
4400 if (CDX_LDSTWM_VALID_REGS_0 & (1 << regno))
4401 *regset = CDX_LDSTWM_VALID_REGS_0;
4402 else if (CDX_LDSTWM_VALID_REGS_1 & (1 << regno))
4403 *regset = CDX_LDSTWM_VALID_REGS_1;
4404 else
4405 return false;
4406 return true;
4407 }
4408 else
4409 return (*regset & (1 << regno)) != 0;
4410 }
4411
4412 /* Main function to implement ldwm_operation/stwm_operation
4413 predicates that check ldwm/stwm insn pattern integrity. */
4414 bool
4415 ldstwm_operation_p (rtx op, bool load_p)
4416 {
4417 int start, i, end = XVECLEN (op, 0) - 1, last_regno = -1;
4418 unsigned int regset = 0;
4419 rtx base_reg, offset;
4420 rtx first_elt = XVECEXP (op, 0, 0);
4421 bool inc_p = true;
4422 bool wb_p = base_reg_adjustment_p (first_elt, &base_reg, &offset);
4423 if (GET_CODE (XVECEXP (op, 0, end)) == RETURN)
4424 end--;
4425 start = wb_p ? 1 : 0;
4426 for (i = start; i <= end; i++)
4427 {
4428 int regno;
4429 rtx reg, mem, elt = XVECEXP (op, 0, i);
4430 /* Return early if not a SET at all. */
4431 if (GET_CODE (elt) != SET)
4432 return false;
4433 reg = load_p ? SET_DEST (elt) : SET_SRC (elt);
4434 mem = load_p ? SET_SRC (elt) : SET_DEST (elt);
4435 if (!REG_P (reg) || !MEM_P (mem))
4436 return false;
4437 regno = REGNO (reg);
4438 if (!nios2_ldstwm_regset_p (regno, &regset))
4439 return false;
4440 /* If no writeback to determine direction, use offset of first MEM. */
4441 if (wb_p)
4442 inc_p = INTVAL (offset) > 0;
4443 else if (i == start)
4444 {
4445 rtx first_base, first_offset;
4446 if (!split_mem_address (XEXP (mem, 0),
4447 &first_base, &first_offset))
4448 return false;
4449 base_reg = first_base;
4450 inc_p = INTVAL (first_offset) >= 0;
4451 }
4452 /* Ensure that the base register is not loaded into. */
4453 if (load_p && regno == (int) REGNO (base_reg))
4454 return false;
4455 /* Check for register order inc/dec integrity. */
4456 if (last_regno >= 0)
4457 {
4458 if (inc_p && last_regno >= regno)
4459 return false;
4460 if (!inc_p && last_regno <= regno)
4461 return false;
4462 }
4463 last_regno = regno;
4464 }
4465 return true;
4466 }
4467
4468 /* Helper for nios2_ldst_parallel, for generating a parallel vector
4469 SET element. */
4470 static rtx
4471 gen_ldst (bool load_p, int regno, rtx base_mem, int offset)
4472 {
4473 rtx reg = gen_rtx_REG (SImode, regno);
4474 rtx mem = adjust_address_nv (base_mem, SImode, offset);
4475 return gen_rtx_SET (load_p ? reg : mem,
4476 load_p ? mem : reg);
4477 }
4478
4479 /* A general routine for creating the body RTL pattern of
4480 ldwm/stwm/push.n/pop.n insns.
4481 LOAD_P: true/false for load/store direction.
4482 REG_INC_P: whether registers are incrementing/decrementing in the
4483 *RTL vector* (not necessarily the order defined in the ISA specification).
4484 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
4485 BASE_MEM: starting MEM.
4486 BASE_UPDATE: amount to update base register; zero means no writeback.
4487 REGMASK: register mask to load/store.
4488 RET_P: true if to tag a (return) element at the end.
4489
4490 Note that this routine does not do any checking. It's the job of the
4491 caller to do the right thing, and the insn patterns to do the
4492 safe-guarding. */
4493 static rtx
4494 nios2_ldst_parallel (bool load_p, bool reg_inc_p, bool offset_inc_p,
4495 rtx base_mem, int base_update,
4496 unsigned HOST_WIDE_INT regmask, bool ret_p)
4497 {
4498 rtvec p;
4499 int regno, b = 0, i = 0, n = 0, len = popcount_hwi (regmask);
4500 if (ret_p) len++, i++, b++;
4501 if (base_update != 0) len++, i++;
4502 p = rtvec_alloc (len);
4503 for (regno = (reg_inc_p ? 0 : 31);
4504 regno != (reg_inc_p ? 32 : -1);
4505 regno += (reg_inc_p ? 1 : -1))
4506 if ((regmask & (1 << regno)) != 0)
4507 {
4508 int offset = (offset_inc_p ? 4 : -4) * n++;
4509 RTVEC_ELT (p, i++) = gen_ldst (load_p, regno, base_mem, offset);
4510 }
4511 if (ret_p)
4512 RTVEC_ELT (p, 0) = ret_rtx;
4513 if (base_update != 0)
4514 {
4515 rtx reg, offset;
4516 if (!split_mem_address (XEXP (base_mem, 0), &reg, &offset))
4517 gcc_unreachable ();
4518 RTVEC_ELT (p, b) =
4519 gen_rtx_SET (reg, plus_constant (Pmode, reg, base_update));
4520 }
4521 return gen_rtx_PARALLEL (VOIDmode, p);
4522 }
4523
4524 /* CDX ldwm/stwm peephole optimization pattern related routines. */
4525
4526 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
4527 struct ldstwm_operand
4528 {
4529 int offset; /* Offset from base register. */
4530 rtx reg; /* Register to store at this offset. */
4531 rtx mem; /* Original mem. */
4532 bool bad; /* True if this load/store can't be combined. */
4533 bool rewrite; /* True if we should rewrite using scratch. */
4534 };
4535
4536 static int
4537 compare_ldstwm_operands (const void *arg1, const void *arg2)
4538 {
4539 const struct ldstwm_operand *op1 = (const struct ldstwm_operand *) arg1;
4540 const struct ldstwm_operand *op2 = (const struct ldstwm_operand *) arg2;
4541 if (op1->bad)
4542 return op2->bad ? 0 : 1;
4543 else if (op2->bad)
4544 return -1;
4545 else
4546 return op1->offset - op2->offset;
4547 }
4548
4549 /* Helper function: return true if a load/store using REGNO with address
4550 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
4551 stw.n, ldwsp.n, or stwsp.n instruction. */
4552 static bool
4553 can_use_cdx_ldstw (int regno, int basereg, int offset)
4554 {
4555 if (CDX_REG_P (regno) && CDX_REG_P (basereg)
4556 && (offset & 0x3) == 0 && 0 <= offset && offset < 0x40)
4557 return true;
4558 else if (basereg == SP_REGNO
4559 && offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
4560 return true;
4561 return false;
4562 }
4563
4564 /* This function is called from peephole2 optimizers to try to merge
4565 a series of individual loads and stores into a ldwm or stwm. It
4566 can also rewrite addresses inside the individual loads and stores
4567 using a common base register using a scratch register and smaller
4568 offsets if that allows them to use CDX ldw.n or stw.n instructions
4569 instead of 4-byte loads or stores.
4570 N is the number of insns we are trying to merge. SCRATCH is non-null
4571 if there is a scratch register available. The OPERANDS array contains
4572 alternating REG (even) and MEM (odd) operands. */
4573 bool
4574 gen_ldstwm_peep (bool load_p, int n, rtx scratch, rtx *operands)
4575 {
4576 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
4577 specified. */
4578 #define MAX_LDSTWM_OPS 12
4579 struct ldstwm_operand sort[MAX_LDSTWM_OPS];
4580 int basereg = -1;
4581 int baseoffset;
4582 int i, m, lastoffset, lastreg;
4583 unsigned int regmask = 0, usemask = 0, regset;
4584 bool needscratch;
4585 int newbasereg;
4586 int nbytes;
4587
4588 if (!TARGET_HAS_CDX)
4589 return false;
4590 if (n < 2 || n > MAX_LDSTWM_OPS)
4591 return false;
4592
4593 /* Check all the operands for validity and initialize the sort array.
4594 The places where we return false here are all situations that aren't
4595 expected to ever happen -- invalid patterns, invalid registers, etc. */
4596 for (i = 0; i < n; i++)
4597 {
4598 rtx base, offset;
4599 rtx reg = operands[i];
4600 rtx mem = operands[i + n];
4601 int r, o, regno;
4602 bool bad = false;
4603
4604 if (!REG_P (reg) || !MEM_P (mem))
4605 return false;
4606
4607 regno = REGNO (reg);
4608 if (regno > 31)
4609 return false;
4610 if (load_p && (regmask & (1 << regno)) != 0)
4611 return false;
4612 regmask |= 1 << regno;
4613
4614 if (!split_mem_address (XEXP (mem, 0), &base, &offset))
4615 return false;
4616 r = REGNO (base);
4617 o = INTVAL (offset);
4618
4619 if (basereg == -1)
4620 basereg = r;
4621 else if (r != basereg)
4622 bad = true;
4623 usemask |= 1 << r;
4624
4625 sort[i].bad = bad;
4626 sort[i].rewrite = false;
4627 sort[i].offset = o;
4628 sort[i].reg = reg;
4629 sort[i].mem = mem;
4630 }
4631
4632 /* If we are doing a series of register loads, we can't safely reorder
4633 them if any of the regs used in addr expressions are also being set. */
4634 if (load_p && (regmask & usemask))
4635 return false;
4636
4637 /* Sort the array by increasing mem offset order, then check that
4638 offsets are valid and register order matches mem order. At the
4639 end of this loop, m is the number of loads/stores we will try to
4640 combine; the rest are leftovers. */
4641 qsort (sort, n, sizeof (struct ldstwm_operand), compare_ldstwm_operands);
4642
4643 baseoffset = sort[0].offset;
4644 needscratch = baseoffset != 0;
4645 if (needscratch && !scratch)
4646 return false;
4647
4648 lastreg = regmask = regset = 0;
4649 lastoffset = baseoffset;
4650 for (m = 0; m < n && !sort[m].bad; m++)
4651 {
4652 int thisreg = REGNO (sort[m].reg);
4653 if (sort[m].offset != lastoffset
4654 || (m > 0 && lastreg >= thisreg)
4655 || !nios2_ldstwm_regset_p (thisreg, &regset))
4656 break;
4657 lastoffset += 4;
4658 lastreg = thisreg;
4659 regmask |= (1 << thisreg);
4660 }
4661
4662 /* For loads, make sure we are not overwriting the scratch reg.
4663 The peephole2 pattern isn't supposed to match unless the register is
4664 unused all the way through, so this isn't supposed to happen anyway. */
4665 if (load_p
4666 && needscratch
4667 && ((1 << REGNO (scratch)) & regmask) != 0)
4668 return false;
4669 newbasereg = needscratch ? (int) REGNO (scratch) : basereg;
4670
4671 /* We may be able to combine only the first m of the n total loads/stores
4672 into a single instruction. If m < 2, there's no point in emitting
4673 a ldwm/stwm at all, but we might be able to do further optimizations
4674 if we have a scratch. We will count the instruction lengths of the
4675 old and new patterns and store the savings in nbytes. */
4676 if (m < 2)
4677 {
4678 if (!needscratch)
4679 return false;
4680 m = 0;
4681 nbytes = 0;
4682 }
4683 else
4684 nbytes = -4; /* Size of ldwm/stwm. */
4685 if (needscratch)
4686 {
4687 int bo = baseoffset > 0 ? baseoffset : -baseoffset;
4688 if (CDX_REG_P (newbasereg)
4689 && CDX_REG_P (basereg)
4690 && bo <= 128 && bo > 0 && (bo & (bo - 1)) == 0)
4691 nbytes -= 2; /* Size of addi.n/subi.n. */
4692 else
4693 nbytes -= 4; /* Size of non-CDX addi. */
4694 }
4695
4696 /* Count the size of the input load/store instructions being replaced. */
4697 for (i = 0; i < m; i++)
4698 if (can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset))
4699 nbytes += 2;
4700 else
4701 nbytes += 4;
4702
4703 /* We may also be able to save a bit if we can rewrite non-CDX
4704 load/stores that can't be combined into the ldwm/stwm into CDX
4705 load/stores using the scratch reg. For example, this might happen
4706 if baseoffset is large, by bringing in the offsets in the load/store
4707 instructions within the range that fits in the CDX instruction. */
4708 if (needscratch && CDX_REG_P (newbasereg))
4709 for (i = m; i < n && !sort[i].bad; i++)
4710 if (!can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset)
4711 && can_use_cdx_ldstw (REGNO (sort[i].reg), newbasereg,
4712 sort[i].offset - baseoffset))
4713 {
4714 sort[i].rewrite = true;
4715 nbytes += 2;
4716 }
4717
4718 /* Are we good to go? */
4719 if (nbytes <= 0)
4720 return false;
4721
4722 /* Emit the scratch load. */
4723 if (needscratch)
4724 emit_insn (gen_rtx_SET (scratch, XEXP (sort[0].mem, 0)));
4725
4726 /* Emit the ldwm/stwm insn. */
4727 if (m > 0)
4728 {
4729 rtvec p = rtvec_alloc (m);
4730 for (i = 0; i < m; i++)
4731 {
4732 int offset = sort[i].offset;
4733 rtx mem, reg = sort[i].reg;
4734 rtx base_reg = gen_rtx_REG (Pmode, newbasereg);
4735 if (needscratch)
4736 offset -= baseoffset;
4737 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, base_reg, offset));
4738 if (load_p)
4739 RTVEC_ELT (p, i) = gen_rtx_SET (reg, mem);
4740 else
4741 RTVEC_ELT (p, i) = gen_rtx_SET (mem, reg);
4742 }
4743 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4744 }
4745
4746 /* Emit any leftover load/stores as individual instructions, doing
4747 the previously-noted rewrites to use the scratch reg. */
4748 for (i = m; i < n; i++)
4749 {
4750 rtx reg = sort[i].reg;
4751 rtx mem = sort[i].mem;
4752 if (sort[i].rewrite)
4753 {
4754 int offset = sort[i].offset - baseoffset;
4755 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, scratch, offset));
4756 }
4757 if (load_p)
4758 emit_move_insn (reg, mem);
4759 else
4760 emit_move_insn (mem, reg);
4761 }
4762 return true;
4763 }
4764
4765 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
4766 We use this hook when emitting CDX code to enforce the 4-byte
4767 alignment requirement for labels that are used as the targets of
4768 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
4769 and 32-bit instructions aligned on any 16-bit boundary, but functions
4770 and jmpi labels have to be 32-bit aligned because of the way the address
4771 is encoded in the instruction. */
4772
4773 static unsigned char *label_align;
4774 static int min_labelno, max_labelno;
4775
4776 static void
4777 nios2_reorg (void)
4778 {
4779 bool changed = true;
4780 rtx_insn *insn;
4781
4782 if (!TARGET_HAS_CDX)
4783 return;
4784
4785 /* Initialize the data structures. */
4786 if (label_align)
4787 free (label_align);
4788 max_labelno = max_label_num ();
4789 min_labelno = get_first_label_num ();
4790 label_align = XCNEWVEC (unsigned char, max_labelno - min_labelno + 1);
4791
4792 /* Iterate on inserting alignment and adjusting branch lengths until
4793 no more changes. */
4794 while (changed)
4795 {
4796 changed = false;
4797 shorten_branches (get_insns ());
4798
4799 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
4800 if (JUMP_P (insn) && insn_variable_length_p (insn))
4801 {
4802 rtx label = JUMP_LABEL (insn);
4803 /* We use the current fact that all cases of 'jmpi'
4804 doing the actual branch in the machine description
4805 has a computed length of 6 or 8. Length 4 and below
4806 are all PC-relative 'br' branches without the jump-align
4807 problem. */
4808 if (label && LABEL_P (label) && get_attr_length (insn) > 4)
4809 {
4810 int index = CODE_LABEL_NUMBER (label) - min_labelno;
4811 if (label_align[index] != 2)
4812 {
4813 label_align[index] = 2;
4814 changed = true;
4815 }
4816 }
4817 }
4818 }
4819 }
4820
4821 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
4822 int
4823 nios2_label_align (rtx label)
4824 {
4825 int n = CODE_LABEL_NUMBER (label);
4826
4827 if (label_align && n >= min_labelno && n <= max_labelno)
4828 return MAX (label_align[n - min_labelno], align_labels_log);
4829 return align_labels_log;
4830 }
4831
4832 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
4833 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
4834 the registers that can be used as operands to instructions that
4835 have 3-bit register fields. */
4836 void
4837 nios2_adjust_reg_alloc_order (void)
4838 {
4839 const int cdx_reg_alloc_order[] =
4840 {
4841 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
4842 2, 3, 4, 5, 6, 7,
4843 /* Call-saved GPRs within CDX 3-bit encoded range. */
4844 16, 17,
4845 /* Other call-clobbered GPRs. */
4846 8, 9, 10, 11, 12, 13, 14, 15,
4847 /* Other call-saved GPRs. RA placed first since it is always saved. */
4848 31, 18, 19, 20, 21, 22, 23, 28,
4849 /* Fixed GPRs, not used by the register allocator. */
4850 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
4851 };
4852
4853 if (TARGET_HAS_CDX)
4854 memcpy (reg_alloc_order, cdx_reg_alloc_order,
4855 sizeof (int) * FIRST_PSEUDO_REGISTER);
4856 }
4857
4858 \f
4859 /* Initialize the GCC target structure. */
4860 #undef TARGET_ASM_FUNCTION_PROLOGUE
4861 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
4862
4863 #undef TARGET_IN_SMALL_DATA_P
4864 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
4865
4866 #undef TARGET_SECTION_TYPE_FLAGS
4867 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
4868
4869 #undef TARGET_INIT_BUILTINS
4870 #define TARGET_INIT_BUILTINS nios2_init_builtins
4871 #undef TARGET_EXPAND_BUILTIN
4872 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
4873 #undef TARGET_BUILTIN_DECL
4874 #define TARGET_BUILTIN_DECL nios2_builtin_decl
4875
4876 #undef TARGET_INIT_LIBFUNCS
4877 #define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
4878
4879 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4880 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
4881
4882 #undef TARGET_CAN_ELIMINATE
4883 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
4884
4885 #undef TARGET_FUNCTION_ARG
4886 #define TARGET_FUNCTION_ARG nios2_function_arg
4887
4888 #undef TARGET_FUNCTION_ARG_ADVANCE
4889 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
4890
4891 #undef TARGET_ARG_PARTIAL_BYTES
4892 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
4893
4894 #undef TARGET_TRAMPOLINE_INIT
4895 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
4896
4897 #undef TARGET_FUNCTION_VALUE
4898 #define TARGET_FUNCTION_VALUE nios2_function_value
4899
4900 #undef TARGET_LIBCALL_VALUE
4901 #define TARGET_LIBCALL_VALUE nios2_libcall_value
4902
4903 #undef TARGET_FUNCTION_VALUE_REGNO_P
4904 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
4905
4906 #undef TARGET_RETURN_IN_MEMORY
4907 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
4908
4909 #undef TARGET_PROMOTE_PROTOTYPES
4910 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
4911
4912 #undef TARGET_SETUP_INCOMING_VARARGS
4913 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
4914
4915 #undef TARGET_MUST_PASS_IN_STACK
4916 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
4917
4918 #undef TARGET_LEGITIMATE_CONSTANT_P
4919 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
4920
4921 #undef TARGET_LEGITIMIZE_ADDRESS
4922 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
4923
4924 #undef TARGET_DELEGITIMIZE_ADDRESS
4925 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
4926
4927 #undef TARGET_LEGITIMATE_ADDRESS_P
4928 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
4929
4930 #undef TARGET_PREFERRED_RELOAD_CLASS
4931 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
4932
4933 #undef TARGET_RTX_COSTS
4934 #define TARGET_RTX_COSTS nios2_rtx_costs
4935
4936 #undef TARGET_HAVE_TLS
4937 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
4938
4939 #undef TARGET_CANNOT_FORCE_CONST_MEM
4940 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
4941
4942 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
4943 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
4944
4945 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
4946 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
4947
4948 #undef TARGET_PRINT_OPERAND
4949 #define TARGET_PRINT_OPERAND nios2_print_operand
4950
4951 #undef TARGET_PRINT_OPERAND_ADDRESS
4952 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
4953
4954 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
4955 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
4956
4957 #undef TARGET_ASM_FILE_END
4958 #define TARGET_ASM_FILE_END nios2_asm_file_end
4959
4960 #undef TARGET_OPTION_OVERRIDE
4961 #define TARGET_OPTION_OVERRIDE nios2_option_override
4962
4963 #undef TARGET_OPTION_SAVE
4964 #define TARGET_OPTION_SAVE nios2_option_save
4965
4966 #undef TARGET_OPTION_RESTORE
4967 #define TARGET_OPTION_RESTORE nios2_option_restore
4968
4969 #undef TARGET_SET_CURRENT_FUNCTION
4970 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
4971
4972 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
4973 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
4974
4975 #undef TARGET_OPTION_PRAGMA_PARSE
4976 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
4977
4978 #undef TARGET_MERGE_DECL_ATTRIBUTES
4979 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
4980
4981 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4982 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
4983 hook_bool_const_tree_hwi_hwi_const_tree_true
4984
4985 #undef TARGET_ASM_OUTPUT_MI_THUNK
4986 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
4987
4988 #undef TARGET_MACHINE_DEPENDENT_REORG
4989 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
4990
4991 struct gcc_target targetm = TARGET_INITIALIZER;
4992
4993 #include "gt-nios2.h"