]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/nios2/nios2.c
nios2.c (nios2_symbolic_constant_allowed): Rename to...
[thirdparty/gcc.git] / gcc / config / nios2 / nios2.c
1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "stringpool.h"
31 #include "attribs.h"
32 #include "df.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "optabs.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "flags.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "varasm.h"
46 #include "expr.h"
47 #include "toplev.h"
48 #include "langhooks.h"
49 #include "stor-layout.h"
50 #include "builtins.h"
51 #include "tree-pass.h"
52 #include "xregex.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 /* Forward function declarations. */
58 static bool nios2_symbolic_constant_p (rtx);
59 static bool prologue_saved_reg_p (unsigned);
60 static void nios2_load_pic_register (void);
61 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
62 static const char *nios2_unspec_reloc_name (int);
63 static void nios2_register_builtin_fndecl (unsigned, tree);
64 static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
65 unsigned HOST_WIDE_INT, bool);
66 static int nios2_address_cost (rtx, machine_mode, addr_space_t, bool);
67
68 /* Threshold for data being put into the small data/bss area, instead
69 of the normal data area (references to the small data/bss area take
70 1 instruction, and use the global pointer, references to the normal
71 data area takes 2 instructions). */
72 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
73
74 struct GTY (()) machine_function
75 {
76 /* Current frame information, to be filled in by nios2_compute_frame_layout
77 with register save masks, and offsets for the current function. */
78
79 /* Mask of registers to save. */
80 unsigned int save_mask;
81 /* Number of bytes that the entire frame takes up. */
82 int total_size;
83 /* Number of bytes that variables take up. */
84 int var_size;
85 /* Number of bytes that outgoing arguments take up. */
86 int args_size;
87 /* Number of bytes needed to store registers in frame. */
88 int save_reg_size;
89 /* Number of bytes used to store callee-saved registers. */
90 int callee_save_reg_size;
91 /* Offset from new stack pointer to store registers. */
92 int save_regs_offset;
93 /* Offset from save_regs_offset to store frame pointer register. */
94 int fp_save_offset;
95 /* != 0 if function has a variable argument list. */
96 int uses_anonymous_args;
97 /* != 0 if frame layout already calculated. */
98 int initialized;
99 };
100
101 /* State to track the assignment of custom codes to FPU/custom builtins. */
102 static enum nios2_ccs_code custom_code_status[256];
103 static int custom_code_index[256];
104 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
105 static bool custom_code_conflict = false;
106
107 /* State for command-line options. */
108 regex_t nios2_gprel_sec_regex;
109 regex_t nios2_r0rel_sec_regex;
110
111 \f
112 /* Definition of builtin function types for nios2. */
113
114 #define N2_FTYPES \
115 N2_FTYPE(1, (SF)) \
116 N2_FTYPE(1, (VOID)) \
117 N2_FTYPE(2, (DF, DF)) \
118 N2_FTYPE(3, (DF, DF, DF)) \
119 N2_FTYPE(2, (DF, SF)) \
120 N2_FTYPE(2, (DF, SI)) \
121 N2_FTYPE(2, (DF, UI)) \
122 N2_FTYPE(2, (SF, DF)) \
123 N2_FTYPE(2, (SF, SF)) \
124 N2_FTYPE(3, (SF, SF, SF)) \
125 N2_FTYPE(2, (SF, SI)) \
126 N2_FTYPE(2, (SF, UI)) \
127 N2_FTYPE(2, (SI, CVPTR)) \
128 N2_FTYPE(2, (SI, DF)) \
129 N2_FTYPE(3, (SI, DF, DF)) \
130 N2_FTYPE(2, (SI, SF)) \
131 N2_FTYPE(3, (SI, SF, SF)) \
132 N2_FTYPE(2, (SI, SI)) \
133 N2_FTYPE(3, (SI, SI, SI)) \
134 N2_FTYPE(3, (SI, VPTR, SI)) \
135 N2_FTYPE(2, (UI, CVPTR)) \
136 N2_FTYPE(2, (UI, DF)) \
137 N2_FTYPE(2, (UI, SF)) \
138 N2_FTYPE(2, (VOID, DF)) \
139 N2_FTYPE(2, (VOID, SF)) \
140 N2_FTYPE(2, (VOID, SI)) \
141 N2_FTYPE(3, (VOID, SI, SI)) \
142 N2_FTYPE(2, (VOID, VPTR)) \
143 N2_FTYPE(3, (VOID, VPTR, SI))
144
145 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
146 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
147 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
148
149 /* Expand ftcode enumeration. */
150 enum nios2_ftcode {
151 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
152 N2_FTYPES
153 #undef N2_FTYPE
154 N2_FTYPE_MAX
155 };
156
157 /* Return the tree function type, based on the ftcode. */
158 static tree
159 nios2_ftype (enum nios2_ftcode ftcode)
160 {
161 static tree types[(int) N2_FTYPE_MAX];
162
163 tree N2_TYPE_SF = float_type_node;
164 tree N2_TYPE_DF = double_type_node;
165 tree N2_TYPE_SI = integer_type_node;
166 tree N2_TYPE_UI = unsigned_type_node;
167 tree N2_TYPE_VOID = void_type_node;
168
169 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
170 if (!N2_TYPE_CVPTR)
171 {
172 /* const volatile void *. */
173 N2_TYPE_CVPTR
174 = build_pointer_type (build_qualified_type (void_type_node,
175 (TYPE_QUAL_CONST
176 | TYPE_QUAL_VOLATILE)));
177 /* volatile void *. */
178 N2_TYPE_VPTR
179 = build_pointer_type (build_qualified_type (void_type_node,
180 TYPE_QUAL_VOLATILE));
181 }
182 if (types[(int) ftcode] == NULL_TREE)
183 switch (ftcode)
184 {
185 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
186 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
187 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
188 #define N2_FTYPE(N,ARGS) \
189 case N2_FTYPE_OP ## N ARGS: \
190 types[(int) ftcode] \
191 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
192 break;
193 N2_FTYPES
194 #undef N2_FTYPE
195 default: gcc_unreachable ();
196 }
197 return types[(int) ftcode];
198 }
199
200 \f
201 /* Definition of FPU instruction descriptions. */
202
203 struct nios2_fpu_insn_info
204 {
205 const char *name;
206 int num_operands, *optvar;
207 int opt, no_opt;
208 #define N2F_DF 0x1
209 #define N2F_DFREQ 0x2
210 #define N2F_UNSAFE 0x4
211 #define N2F_FINITE 0x8
212 #define N2F_NO_ERRNO 0x10
213 unsigned int flags;
214 enum insn_code icode;
215 enum nios2_ftcode ftcode;
216 };
217
218 /* Base macro for defining FPU instructions. */
219 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
220 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
221 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
222 N2_FTYPE_OP ## nop args }
223
224 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
225 #define N2FPU_OP2(mode) (mode, mode)
226 #define N2FPU_OP3(mode) (mode, mode, mode)
227 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
228 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
229 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
230 #define N2FPU_INSN_SF(code, nop, flags) \
231 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
232 #define N2FPU_INSN_DF(code, nop, flags) \
233 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
234
235 /* Compare instructions, 3 operand FP operation with a SI result. */
236 #define N2FPU_CMP_DEF(code, flags, m, M) \
237 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
238 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
239 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
240 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
241
242 /* The order of definition needs to be maintained consistent with
243 enum n2fpu_code in nios2-opts.h. */
244 struct nios2_fpu_insn_info nios2_fpu_insn[] =
245 {
246 /* Single precision instructions. */
247 N2FPU_INSN_SF (add, 3, 0),
248 N2FPU_INSN_SF (sub, 3, 0),
249 N2FPU_INSN_SF (mul, 3, 0),
250 N2FPU_INSN_SF (div, 3, 0),
251 /* Due to textual difference between min/max and smin/smax. */
252 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
253 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
254 N2FPU_INSN_SF (neg, 2, 0),
255 N2FPU_INSN_SF (abs, 2, 0),
256 N2FPU_INSN_SF (sqrt, 2, 0),
257 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
258 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
259 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
260 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
261 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
262 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
263 /* Single precision compares. */
264 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
265 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
266 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
267
268 /* Double precision instructions. */
269 N2FPU_INSN_DF (add, 3, 0),
270 N2FPU_INSN_DF (sub, 3, 0),
271 N2FPU_INSN_DF (mul, 3, 0),
272 N2FPU_INSN_DF (div, 3, 0),
273 /* Due to textual difference between min/max and smin/smax. */
274 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
275 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
276 N2FPU_INSN_DF (neg, 2, 0),
277 N2FPU_INSN_DF (abs, 2, 0),
278 N2FPU_INSN_DF (sqrt, 2, 0),
279 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
280 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
281 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
282 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
283 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
284 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
285 /* Double precision compares. */
286 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
287 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
288 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
289
290 /* Conversion instructions. */
291 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
292 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
293 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
294 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
295 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
296 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
297 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
298 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
299 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
300 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
301 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
302
303 /* X, Y access instructions. */
304 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
305 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
306 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
307 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
308 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
309 };
310
311 /* Some macros for ease of access. */
312 #define N2FPU(code) nios2_fpu_insn[(int) code]
313 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
314 #define N2FPU_N(code) (*N2FPU(code).optvar)
315 #define N2FPU_NAME(code) (N2FPU(code).name)
316 #define N2FPU_ICODE(code) (N2FPU(code).icode)
317 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
318 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
319 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
320 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
321 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
322 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
323
324 /* Same as above, but for cases where using only the op part is shorter. */
325 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
326 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
327 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
328
329 /* Export the FPU insn enabled predicate to nios2.md. */
330 bool
331 nios2_fpu_insn_enabled (enum n2fpu_code code)
332 {
333 return N2FPU_ENABLED_P (code);
334 }
335
336 /* Return true if COND comparison for mode MODE is enabled under current
337 settings. */
338
339 static bool
340 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
341 {
342 if (mode == SFmode)
343 switch (cond)
344 {
345 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
346 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
347 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
348 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
349 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
350 case LE: return N2FPU_OP_ENABLED_P (fcmples);
351 default: break;
352 }
353 else if (mode == DFmode)
354 switch (cond)
355 {
356 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
357 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
358 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
359 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
360 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
361 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
362 default: break;
363 }
364 return false;
365 }
366
367 /* Stack layout and calling conventions. */
368
369 #define NIOS2_STACK_ALIGN(LOC) \
370 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
371 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
372
373 /* Return the bytes needed to compute the frame pointer from the current
374 stack pointer. */
375 static int
376 nios2_compute_frame_layout (void)
377 {
378 unsigned int regno;
379 unsigned int save_mask = 0;
380 int total_size;
381 int var_size;
382 int out_args_size;
383 int save_reg_size;
384 int callee_save_reg_size;
385
386 if (cfun->machine->initialized)
387 return cfun->machine->total_size;
388
389 /* Calculate space needed for gp registers. */
390 save_reg_size = 0;
391 for (regno = 0; regno <= LAST_GP_REG; regno++)
392 if (prologue_saved_reg_p (regno))
393 {
394 save_mask |= 1 << regno;
395 save_reg_size += 4;
396 }
397
398 /* If we are saving any callee-save register, then assume
399 push.n/pop.n should be used. Make sure RA is saved, and
400 contiguous registers starting from r16-- are all saved. */
401 if (TARGET_HAS_CDX && save_reg_size != 0)
402 {
403 if ((save_mask & (1 << RA_REGNO)) == 0)
404 {
405 save_mask |= 1 << RA_REGNO;
406 save_reg_size += 4;
407 }
408
409 for (regno = 23; regno >= 16; regno--)
410 if ((save_mask & (1 << regno)) != 0)
411 {
412 /* Starting from highest numbered callee-saved
413 register that is used, make sure all regs down
414 to r16 is saved, to maintain contiguous range
415 for push.n/pop.n. */
416 unsigned int i;
417 for (i = regno - 1; i >= 16; i--)
418 if ((save_mask & (1 << i)) == 0)
419 {
420 save_mask |= 1 << i;
421 save_reg_size += 4;
422 }
423 break;
424 }
425 }
426
427 callee_save_reg_size = save_reg_size;
428
429 /* If we call eh_return, we need to save the EH data registers. */
430 if (crtl->calls_eh_return)
431 {
432 unsigned i;
433 unsigned r;
434
435 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
436 if (!(save_mask & (1 << r)))
437 {
438 save_mask |= 1 << r;
439 save_reg_size += 4;
440 }
441 }
442
443 cfun->machine->fp_save_offset = 0;
444 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
445 {
446 int fp_save_offset = 0;
447 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
448 if (save_mask & (1 << regno))
449 fp_save_offset += 4;
450
451 cfun->machine->fp_save_offset = fp_save_offset;
452 }
453
454 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
455 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
456 total_size = var_size + out_args_size;
457
458 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
459 total_size += save_reg_size;
460 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
461
462 /* Save other computed information. */
463 cfun->machine->save_mask = save_mask;
464 cfun->machine->total_size = total_size;
465 cfun->machine->var_size = var_size;
466 cfun->machine->args_size = out_args_size;
467 cfun->machine->save_reg_size = save_reg_size;
468 cfun->machine->callee_save_reg_size = callee_save_reg_size;
469 cfun->machine->initialized = reload_completed;
470 cfun->machine->save_regs_offset = out_args_size + var_size;
471
472 return total_size;
473 }
474
475 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
476 prologue/epilogue expand routines. */
477 static void
478 save_reg (int regno, unsigned offset)
479 {
480 rtx reg = gen_rtx_REG (SImode, regno);
481 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
482 rtx_insn *insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
483 RTX_FRAME_RELATED_P (insn) = 1;
484 }
485
486 static void
487 restore_reg (int regno, unsigned offset)
488 {
489 rtx reg = gen_rtx_REG (SImode, regno);
490 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
491 rtx_insn *insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
492 /* Tag epilogue unwind note. */
493 add_reg_note (insn, REG_CFA_RESTORE, reg);
494 RTX_FRAME_RELATED_P (insn) = 1;
495 }
496
497 /* This routine tests for the base register update SET in load/store
498 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
499 static bool
500 base_reg_adjustment_p (rtx set, rtx *base_reg, rtx *offset)
501 {
502 if (GET_CODE (set) == SET
503 && REG_P (SET_DEST (set))
504 && GET_CODE (SET_SRC (set)) == PLUS
505 && REG_P (XEXP (SET_SRC (set), 0))
506 && rtx_equal_p (SET_DEST (set), XEXP (SET_SRC (set), 0))
507 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
508 {
509 *base_reg = XEXP (SET_SRC (set), 0);
510 *offset = XEXP (SET_SRC (set), 1);
511 return true;
512 }
513 return false;
514 }
515
516 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
517 static void
518 nios2_create_cfa_notes (rtx_insn *insn, bool epilogue_p)
519 {
520 int i = 0;
521 rtx base_reg, offset, elt, pat = PATTERN (insn);
522 if (epilogue_p)
523 {
524 elt = XVECEXP (pat, 0, 0);
525 if (GET_CODE (elt) == RETURN)
526 i++;
527 elt = XVECEXP (pat, 0, i);
528 if (base_reg_adjustment_p (elt, &base_reg, &offset))
529 {
530 add_reg_note (insn, REG_CFA_ADJUST_CFA, copy_rtx (elt));
531 i++;
532 }
533 for (; i < XVECLEN (pat, 0); i++)
534 {
535 elt = SET_DEST (XVECEXP (pat, 0, i));
536 gcc_assert (REG_P (elt));
537 add_reg_note (insn, REG_CFA_RESTORE, elt);
538 }
539 }
540 else
541 {
542 /* Tag each of the prologue sets. */
543 for (i = 0; i < XVECLEN (pat, 0); i++)
544 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
545 }
546 }
547
548 /* Temp regno used inside prologue/epilogue. */
549 #define TEMP_REG_NUM 8
550
551 /* Emit conditional trap for checking stack limit. SIZE is the number of
552 additional bytes required.
553
554 GDB prologue analysis depends on this generating a direct comparison
555 to the SP register, so the adjustment to add SIZE needs to be done on
556 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
557 if necessary. */
558 static void
559 nios2_emit_stack_limit_check (int size)
560 {
561 rtx sum = NULL_RTX;
562
563 if (GET_CODE (stack_limit_rtx) == SYMBOL_REF)
564 {
565 /* This generates a %hiadj/%lo pair with the constant size
566 add handled by the relocations. */
567 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
568 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
569 }
570 else if (!REG_P (stack_limit_rtx))
571 sorry ("Unknown form for stack limit expression");
572 else if (size == 0)
573 sum = stack_limit_rtx;
574 else if (SMALL_INT (size))
575 {
576 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
577 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
578 }
579 else
580 {
581 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
582 emit_move_insn (sum, gen_int_mode (size, Pmode));
583 emit_insn (gen_add2_insn (sum, stack_limit_rtx));
584 }
585
586 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, sum),
587 stack_pointer_rtx, sum, GEN_INT (3)));
588 }
589
590 static rtx_insn *
591 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
592 {
593 rtx_insn *insn;
594 if (SMALL_INT (immed))
595 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
596 else
597 {
598 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
599 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
600 insn = emit_insn (gen_add2_insn (reg, tmp));
601 }
602 return insn;
603 }
604
605 static rtx_insn *
606 nios2_adjust_stack (int sp_adjust, bool epilogue_p)
607 {
608 enum reg_note note_kind = REG_NOTE_MAX;
609 rtx_insn *insn = NULL;
610 if (sp_adjust)
611 {
612 if (SMALL_INT (sp_adjust))
613 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
614 gen_int_mode (sp_adjust, Pmode)));
615 else
616 {
617 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
618 emit_move_insn (tmp, gen_int_mode (sp_adjust, Pmode));
619 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
620 /* Attach a note indicating what happened. */
621 if (!epilogue_p)
622 note_kind = REG_FRAME_RELATED_EXPR;
623 }
624 if (epilogue_p)
625 note_kind = REG_CFA_ADJUST_CFA;
626 if (note_kind != REG_NOTE_MAX)
627 {
628 rtx cfa_adj = gen_rtx_SET (stack_pointer_rtx,
629 plus_constant (Pmode, stack_pointer_rtx,
630 sp_adjust));
631 add_reg_note (insn, note_kind, cfa_adj);
632 }
633 RTX_FRAME_RELATED_P (insn) = 1;
634 }
635 return insn;
636 }
637
638 void
639 nios2_expand_prologue (void)
640 {
641 unsigned int regno;
642 int total_frame_size, save_offset;
643 int sp_offset; /* offset from base_reg to final stack value. */
644 int save_regs_base; /* offset from base_reg to register save area. */
645 rtx_insn *insn;
646
647 total_frame_size = nios2_compute_frame_layout ();
648
649 if (flag_stack_usage_info)
650 current_function_static_stack_size = total_frame_size;
651
652 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
653 using them. */
654 if (TARGET_HAS_CDX
655 && (cfun->machine->save_reg_size != 0
656 || cfun->machine->uses_anonymous_args))
657 {
658 unsigned int regmask = cfun->machine->save_mask;
659 unsigned int callee_save_regs = regmask & 0xffff0000;
660 unsigned int caller_save_regs = regmask & 0x0000ffff;
661 int push_immed = 0;
662 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
663 rtx stack_mem =
664 gen_frame_mem (SImode, plus_constant (Pmode, stack_pointer_rtx, -4));
665
666 /* Check that there is room for the entire stack frame before doing
667 any SP adjustments or pushes. */
668 if (crtl->limit_stack)
669 nios2_emit_stack_limit_check (total_frame_size);
670
671 if (pretend_args_size)
672 {
673 if (cfun->machine->uses_anonymous_args)
674 {
675 /* Emit a stwm to push copy of argument registers onto
676 the stack for va_arg processing. */
677 unsigned int r, mask = 0, n = pretend_args_size / 4;
678 for (r = LAST_ARG_REGNO - n + 1; r <= LAST_ARG_REGNO; r++)
679 mask |= (1 << r);
680 insn = emit_insn (nios2_ldst_parallel
681 (false, false, false, stack_mem,
682 -pretend_args_size, mask, false));
683 /* Tag first SP adjustment as frame-related. */
684 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
685 RTX_FRAME_RELATED_P (insn) = 1;
686 }
687 else
688 nios2_adjust_stack (-pretend_args_size, false);
689 }
690 if (callee_save_regs)
691 {
692 /* Emit a push.n to save registers and optionally allocate
693 push_immed extra bytes on the stack. */
694 int sp_adjust;
695 if (caller_save_regs)
696 /* Can't allocate extra stack space yet. */
697 push_immed = 0;
698 else if (cfun->machine->save_regs_offset <= 60)
699 /* Stack adjustment fits entirely in the push.n. */
700 push_immed = cfun->machine->save_regs_offset;
701 else if (frame_pointer_needed
702 && cfun->machine->fp_save_offset == 0)
703 /* Deferring the entire stack adjustment until later
704 allows us to use a mov.n instead of a 32-bit addi
705 instruction to set the frame pointer. */
706 push_immed = 0;
707 else
708 /* Splitting the stack adjustment between the push.n
709 and an explicit adjustment makes it more likely that
710 we can use spdeci.n for the explicit part. */
711 push_immed = 60;
712 sp_adjust = -(cfun->machine->callee_save_reg_size + push_immed);
713 insn = emit_insn (nios2_ldst_parallel (false, false, false,
714 stack_mem, sp_adjust,
715 callee_save_regs, false));
716 nios2_create_cfa_notes (insn, false);
717 RTX_FRAME_RELATED_P (insn) = 1;
718 }
719
720 if (caller_save_regs)
721 {
722 /* Emit a stwm to save the EH data regs, r4-r7. */
723 int caller_save_size = (cfun->machine->save_reg_size
724 - cfun->machine->callee_save_reg_size);
725 gcc_assert ((caller_save_regs & ~0xf0) == 0);
726 insn = emit_insn (nios2_ldst_parallel
727 (false, false, false, stack_mem,
728 -caller_save_size, caller_save_regs, false));
729 nios2_create_cfa_notes (insn, false);
730 RTX_FRAME_RELATED_P (insn) = 1;
731 }
732
733 save_regs_base = push_immed;
734 sp_offset = -(cfun->machine->save_regs_offset - push_immed);
735 }
736 /* The non-CDX cases decrement the stack pointer, to prepare for individual
737 register saves to the stack. */
738 else if (!SMALL_INT (total_frame_size))
739 {
740 /* We need an intermediary point, this will point at the spill block. */
741 nios2_adjust_stack (cfun->machine->save_regs_offset - total_frame_size,
742 false);
743 save_regs_base = 0;
744 sp_offset = -cfun->machine->save_regs_offset;
745 if (crtl->limit_stack)
746 nios2_emit_stack_limit_check (cfun->machine->save_regs_offset);
747 }
748 else if (total_frame_size)
749 {
750 nios2_adjust_stack (-total_frame_size, false);
751 save_regs_base = cfun->machine->save_regs_offset;
752 sp_offset = 0;
753 if (crtl->limit_stack)
754 nios2_emit_stack_limit_check (0);
755 }
756 else
757 save_regs_base = sp_offset = 0;
758
759 /* Save the registers individually in the non-CDX case. */
760 if (!TARGET_HAS_CDX)
761 {
762 save_offset = save_regs_base + cfun->machine->save_reg_size;
763
764 for (regno = LAST_GP_REG; regno > 0; regno--)
765 if (cfun->machine->save_mask & (1 << regno))
766 {
767 save_offset -= 4;
768 save_reg (regno, save_offset);
769 }
770 }
771
772 /* Set the hard frame pointer. */
773 if (frame_pointer_needed)
774 {
775 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
776 insn =
777 (fp_save_offset == 0
778 ? emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)
779 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
780 stack_pointer_rtx,
781 gen_int_mode (fp_save_offset, Pmode))));
782 RTX_FRAME_RELATED_P (insn) = 1;
783 }
784
785 /* Allocate sp_offset more bytes in the stack frame. */
786 nios2_adjust_stack (sp_offset, false);
787
788 /* Load the PIC register if needed. */
789 if (crtl->uses_pic_offset_table)
790 nios2_load_pic_register ();
791
792 /* If we are profiling, make sure no instructions are scheduled before
793 the call to mcount. */
794 if (crtl->profile)
795 emit_insn (gen_blockage ());
796 }
797
798 void
799 nios2_expand_epilogue (bool sibcall_p)
800 {
801 rtx_insn *insn;
802 rtx cfa_adj;
803 int total_frame_size;
804 int sp_adjust, save_offset;
805 unsigned int regno;
806
807 if (!sibcall_p && nios2_can_use_return_insn ())
808 {
809 emit_jump_insn (gen_return ());
810 return;
811 }
812
813 emit_insn (gen_blockage ());
814
815 total_frame_size = nios2_compute_frame_layout ();
816 if (frame_pointer_needed)
817 {
818 /* Recover the stack pointer. */
819 insn =
820 (cfun->machine->fp_save_offset == 0
821 ? emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx)
822 : emit_insn (gen_add3_insn
823 (stack_pointer_rtx, hard_frame_pointer_rtx,
824 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))));
825 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
826 (total_frame_size
827 - cfun->machine->save_regs_offset));
828 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
829 RTX_FRAME_RELATED_P (insn) = 1;
830
831 save_offset = 0;
832 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
833 }
834 else if (!SMALL_INT (total_frame_size))
835 {
836 nios2_adjust_stack (cfun->machine->save_regs_offset, true);
837 save_offset = 0;
838 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
839 }
840 else
841 {
842 save_offset = cfun->machine->save_regs_offset;
843 sp_adjust = total_frame_size;
844 }
845
846 if (!TARGET_HAS_CDX)
847 {
848 /* Generate individual register restores. */
849 save_offset += cfun->machine->save_reg_size;
850
851 for (regno = LAST_GP_REG; regno > 0; regno--)
852 if (cfun->machine->save_mask & (1 << regno))
853 {
854 save_offset -= 4;
855 restore_reg (regno, save_offset);
856 }
857 nios2_adjust_stack (sp_adjust, true);
858 }
859 else if (cfun->machine->save_reg_size == 0)
860 {
861 /* Nothing to restore, just recover the stack position. */
862 nios2_adjust_stack (sp_adjust, true);
863 }
864 else
865 {
866 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
867 unsigned int regmask = cfun->machine->save_mask;
868 unsigned int callee_save_regs = regmask & 0xffff0000;
869 unsigned int caller_save_regs = regmask & 0x0000ffff;
870 int callee_save_size = cfun->machine->callee_save_reg_size;
871 int caller_save_size = cfun->machine->save_reg_size - callee_save_size;
872 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
873 bool ret_p = (!pretend_args_size && !crtl->calls_eh_return
874 && !sibcall_p);
875
876 if (!ret_p || caller_save_size > 0)
877 sp_adjust = save_offset;
878 else
879 sp_adjust = (save_offset > 60 ? save_offset - 60 : 0);
880
881 save_offset -= sp_adjust;
882
883 nios2_adjust_stack (sp_adjust, true);
884
885 if (caller_save_regs)
886 {
887 /* Emit a ldwm to restore EH data regs. */
888 rtx stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
889 insn = emit_insn (nios2_ldst_parallel
890 (true, true, true, stack_mem,
891 caller_save_size, caller_save_regs, false));
892 RTX_FRAME_RELATED_P (insn) = 1;
893 nios2_create_cfa_notes (insn, true);
894 }
895
896 if (callee_save_regs)
897 {
898 int sp_adjust = save_offset + callee_save_size;
899 rtx stack_mem;
900 if (ret_p)
901 {
902 /* Emit a pop.n to restore regs and return. */
903 stack_mem =
904 gen_frame_mem (SImode,
905 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
906 gen_int_mode (sp_adjust - 4,
907 Pmode)));
908 insn =
909 emit_jump_insn (nios2_ldst_parallel (true, false, false,
910 stack_mem, sp_adjust,
911 callee_save_regs, ret_p));
912 RTX_FRAME_RELATED_P (insn) = 1;
913 /* No need to attach CFA notes since we cannot step over
914 a return. */
915 return;
916 }
917 else
918 {
919 /* If no return, we have to use the ldwm form. */
920 stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
921 insn =
922 emit_insn (nios2_ldst_parallel (true, true, true,
923 stack_mem, sp_adjust,
924 callee_save_regs, ret_p));
925 RTX_FRAME_RELATED_P (insn) = 1;
926 nios2_create_cfa_notes (insn, true);
927 }
928 }
929
930 if (pretend_args_size)
931 nios2_adjust_stack (pretend_args_size, true);
932 }
933
934 /* Add in the __builtin_eh_return stack adjustment. */
935 if (crtl->calls_eh_return)
936 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
937
938 if (!sibcall_p)
939 emit_jump_insn (gen_simple_return ());
940 }
941
942 bool
943 nios2_expand_return (void)
944 {
945 /* If CDX is available, generate a pop.n instruction to do both
946 the stack pop and return. */
947 if (TARGET_HAS_CDX)
948 {
949 int total_frame_size = nios2_compute_frame_layout ();
950 int sp_adjust = (cfun->machine->save_regs_offset
951 + cfun->machine->callee_save_reg_size);
952 gcc_assert (sp_adjust == total_frame_size);
953 if (sp_adjust != 0)
954 {
955 rtx mem =
956 gen_frame_mem (SImode,
957 plus_constant (Pmode, stack_pointer_rtx,
958 sp_adjust - 4, false));
959 rtx_insn *insn =
960 emit_jump_insn (nios2_ldst_parallel (true, false, false,
961 mem, sp_adjust,
962 cfun->machine->save_mask,
963 true));
964 RTX_FRAME_RELATED_P (insn) = 1;
965 /* No need to create CFA notes since we can't step over
966 a return. */
967 return true;
968 }
969 }
970 return false;
971 }
972
973 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
974 back to a previous frame. */
975 rtx
976 nios2_get_return_address (int count)
977 {
978 if (count != 0)
979 return const0_rtx;
980
981 return get_hard_reg_initial_val (Pmode, RA_REGNO);
982 }
983
984 /* Emit code to change the current function's return address to
985 ADDRESS. SCRATCH is available as a scratch register, if needed.
986 ADDRESS and SCRATCH are both word-mode GPRs. */
987 void
988 nios2_set_return_address (rtx address, rtx scratch)
989 {
990 nios2_compute_frame_layout ();
991 if (cfun->machine->save_mask & (1 << RA_REGNO))
992 {
993 unsigned offset = cfun->machine->save_reg_size - 4;
994 rtx base;
995
996 if (frame_pointer_needed)
997 base = hard_frame_pointer_rtx;
998 else
999 {
1000 base = stack_pointer_rtx;
1001 offset += cfun->machine->save_regs_offset;
1002
1003 if (!SMALL_INT (offset))
1004 {
1005 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
1006 emit_insn (gen_add2_insn (scratch, base));
1007 base = scratch;
1008 offset = 0;
1009 }
1010 }
1011 if (offset)
1012 base = plus_constant (Pmode, base, offset);
1013 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
1014 }
1015 else
1016 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
1017 }
1018
1019 /* Implement FUNCTION_PROFILER macro. */
1020 void
1021 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
1022 {
1023 fprintf (file, "\tmov\tr8, ra\n");
1024 if (flag_pic == 1)
1025 {
1026 fprintf (file, "\tnextpc\tr2\n");
1027 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1028 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1029 fprintf (file, "\tadd\tr2, r2, r3\n");
1030 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
1031 fprintf (file, "\tcallr\tr2\n");
1032 }
1033 else if (flag_pic == 2)
1034 {
1035 fprintf (file, "\tnextpc\tr2\n");
1036 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1037 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1038 fprintf (file, "\tadd\tr2, r2, r3\n");
1039 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1040 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1041 fprintf (file, "\tadd\tr3, r2, r3\n");
1042 fprintf (file, "\tldw\tr2, 0(r3)\n");
1043 fprintf (file, "\tcallr\tr2\n");
1044 }
1045 else
1046 fprintf (file, "\tcall\t_mcount\n");
1047 fprintf (file, "\tmov\tra, r8\n");
1048 }
1049
1050 /* Dump stack layout. */
1051 static void
1052 nios2_dump_frame_layout (FILE *file)
1053 {
1054 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
1055 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
1056 cfun->machine->total_size);
1057 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
1058 cfun->machine->var_size);
1059 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
1060 cfun->machine->args_size);
1061 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
1062 cfun->machine->save_reg_size);
1063 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
1064 cfun->machine->initialized);
1065 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
1066 cfun->machine->save_regs_offset);
1067 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
1068 crtl->is_leaf);
1069 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
1070 frame_pointer_needed);
1071 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
1072 crtl->args.pretend_args_size);
1073 }
1074
1075 /* Return true if REGNO should be saved in the prologue. */
1076 static bool
1077 prologue_saved_reg_p (unsigned regno)
1078 {
1079 gcc_assert (GP_REG_P (regno));
1080
1081 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
1082 return true;
1083
1084 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
1085 return true;
1086
1087 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
1088 return true;
1089
1090 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
1091 return true;
1092
1093 return false;
1094 }
1095
1096 /* Implement TARGET_CAN_ELIMINATE. */
1097 static bool
1098 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1099 {
1100 if (to == STACK_POINTER_REGNUM)
1101 return !frame_pointer_needed;
1102 return true;
1103 }
1104
1105 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1106 int
1107 nios2_initial_elimination_offset (int from, int to)
1108 {
1109 int offset;
1110
1111 nios2_compute_frame_layout ();
1112
1113 /* Set OFFSET to the offset from the stack pointer. */
1114 switch (from)
1115 {
1116 case FRAME_POINTER_REGNUM:
1117 /* This is the high end of the local variable storage, not the
1118 hard frame pointer. */
1119 offset = cfun->machine->args_size + cfun->machine->var_size;
1120 break;
1121
1122 case ARG_POINTER_REGNUM:
1123 offset = cfun->machine->total_size;
1124 offset -= crtl->args.pretend_args_size;
1125 break;
1126
1127 default:
1128 gcc_unreachable ();
1129 }
1130
1131 /* If we are asked for the frame pointer offset, then adjust OFFSET
1132 by the offset from the frame pointer to the stack pointer. */
1133 if (to == HARD_FRAME_POINTER_REGNUM)
1134 offset -= (cfun->machine->save_regs_offset
1135 + cfun->machine->fp_save_offset);
1136
1137 return offset;
1138 }
1139
1140 /* Return nonzero if this function is known to have a null epilogue.
1141 This allows the optimizer to omit jumps to jumps if no stack
1142 was created. */
1143 int
1144 nios2_can_use_return_insn (void)
1145 {
1146 int total_frame_size;
1147
1148 if (!reload_completed || crtl->profile)
1149 return 0;
1150
1151 total_frame_size = nios2_compute_frame_layout ();
1152
1153 /* If CDX is available, check if we can return using a
1154 single pop.n instruction. */
1155 if (TARGET_HAS_CDX
1156 && !frame_pointer_needed
1157 && cfun->machine->save_regs_offset <= 60
1158 && (cfun->machine->save_mask & 0x80000000) != 0
1159 && (cfun->machine->save_mask & 0xffff) == 0
1160 && crtl->args.pretend_args_size == 0)
1161 return true;
1162
1163 return total_frame_size == 0;
1164 }
1165
1166 \f
1167 /* Check and signal some warnings/errors on FPU insn options. */
1168 static void
1169 nios2_custom_check_insns (void)
1170 {
1171 unsigned int i, j;
1172 bool errors = false;
1173
1174 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1175 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
1176 {
1177 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
1178 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
1179 {
1180 error ("switch %<-mcustom-%s%> is required for double "
1181 "precision floating point", N2FPU_NAME (j));
1182 errors = true;
1183 }
1184 break;
1185 }
1186
1187 /* Warn if the user has certain exotic operations that won't get used
1188 without -funsafe-math-optimizations. See expand_builtin () in
1189 builtins.c. */
1190 if (!flag_unsafe_math_optimizations)
1191 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1192 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
1193 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1194 "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
1195
1196 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1197 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1198 in builtins.c. */
1199 if (!flag_finite_math_only)
1200 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1201 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
1202 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1203 "-ffinite-math-only is specified", N2FPU_NAME (i));
1204
1205 /* Warn if the user is trying to use a custom rounding instruction
1206 that won't get used without -fno-math-errno. See
1207 expand_builtin_int_roundingfn_2 () in builtins.c. */
1208 if (flag_errno_math)
1209 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1210 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
1211 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1212 "-fno-math-errno is specified", N2FPU_NAME (i));
1213
1214 if (errors || custom_code_conflict)
1215 fatal_error (input_location,
1216 "conflicting use of -mcustom switches, target attributes, "
1217 "and/or __builtin_custom_ functions");
1218 }
1219
1220 static void
1221 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
1222 {
1223 if (override_p || N2FPU_N (code) == -1)
1224 N2FPU_N (code) = n;
1225 nios2_register_custom_code (n, CCS_FPU, (int) code);
1226 }
1227
1228 /* Type to represent a standard FPU config. */
1229 struct nios2_fpu_config
1230 {
1231 const char *name;
1232 bool set_sp_constants;
1233 int code[n2fpu_code_num];
1234 };
1235
1236 #define NIOS2_FPU_CONFIG_NUM 3
1237 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
1238
1239 static void
1240 nios2_init_fpu_configs (void)
1241 {
1242 struct nios2_fpu_config* cfg;
1243 int i = 0;
1244 #define NEXT_FPU_CONFIG \
1245 do { \
1246 cfg = &custom_fpu_config[i++]; \
1247 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1248 } while (0)
1249
1250 NEXT_FPU_CONFIG;
1251 cfg->name = "60-1";
1252 cfg->set_sp_constants = true;
1253 cfg->code[n2fpu_fmuls] = 252;
1254 cfg->code[n2fpu_fadds] = 253;
1255 cfg->code[n2fpu_fsubs] = 254;
1256
1257 NEXT_FPU_CONFIG;
1258 cfg->name = "60-2";
1259 cfg->set_sp_constants = true;
1260 cfg->code[n2fpu_fmuls] = 252;
1261 cfg->code[n2fpu_fadds] = 253;
1262 cfg->code[n2fpu_fsubs] = 254;
1263 cfg->code[n2fpu_fdivs] = 255;
1264
1265 NEXT_FPU_CONFIG;
1266 cfg->name = "72-3";
1267 cfg->set_sp_constants = true;
1268 cfg->code[n2fpu_floatus] = 243;
1269 cfg->code[n2fpu_fixsi] = 244;
1270 cfg->code[n2fpu_floatis] = 245;
1271 cfg->code[n2fpu_fcmpgts] = 246;
1272 cfg->code[n2fpu_fcmples] = 249;
1273 cfg->code[n2fpu_fcmpeqs] = 250;
1274 cfg->code[n2fpu_fcmpnes] = 251;
1275 cfg->code[n2fpu_fmuls] = 252;
1276 cfg->code[n2fpu_fadds] = 253;
1277 cfg->code[n2fpu_fsubs] = 254;
1278 cfg->code[n2fpu_fdivs] = 255;
1279
1280 #undef NEXT_FPU_CONFIG
1281 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
1282 }
1283
1284 static struct nios2_fpu_config *
1285 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
1286 {
1287 int i;
1288 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
1289 {
1290 bool match = !(endp != NULL
1291 ? strncmp (custom_fpu_config[i].name, cfgname,
1292 endp - cfgname)
1293 : strcmp (custom_fpu_config[i].name, cfgname));
1294 if (match)
1295 return &custom_fpu_config[i];
1296 }
1297 return NULL;
1298 }
1299
1300 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1301 OVERRIDE is true if loaded config codes should overwrite current state. */
1302 static void
1303 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
1304 bool override)
1305 {
1306 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
1307 if (cfg)
1308 {
1309 unsigned int i;
1310 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1311 if (cfg->code[i] >= 0)
1312 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
1313 override);
1314 if (cfg->set_sp_constants)
1315 flag_single_precision_constant = 1;
1316 }
1317 else
1318 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1319 "value %<%s%>", cfgname);
1320
1321 /* Guard against errors in the standard configurations. */
1322 nios2_custom_check_insns ();
1323 }
1324
1325 /* Check individual FPU insn options, and register custom code. */
1326 static void
1327 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1328 {
1329 int param = N2FPU_N (fpu_insn_index);
1330
1331 if (0 <= param && param <= 255)
1332 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1333
1334 /* Valid values are 0-255, but also allow -1 so that the
1335 -mno-custom-<opt> switches work. */
1336 else if (param != -1)
1337 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1338 N2FPU_NAME (fpu_insn_index), param);
1339 }
1340
1341 /* Allocate a chunk of memory for per-function machine-dependent data. */
1342 static struct machine_function *
1343 nios2_init_machine_status (void)
1344 {
1345 return ggc_cleared_alloc<machine_function> ();
1346 }
1347
1348 /* Implement TARGET_OPTION_OVERRIDE. */
1349 static void
1350 nios2_option_override (void)
1351 {
1352 unsigned int i;
1353
1354 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1355 SUBTARGET_OVERRIDE_OPTIONS;
1356 #endif
1357
1358 /* Check for unsupported options. */
1359 if (flag_pic && !TARGET_LINUX_ABI)
1360 sorry ("position-independent code requires the Linux ABI");
1361 if (flag_pic && stack_limit_rtx
1362 && GET_CODE (stack_limit_rtx) == SYMBOL_REF)
1363 sorry ("PIC support for -fstack-limit-symbol");
1364
1365 /* Function to allocate machine-dependent function status. */
1366 init_machine_status = &nios2_init_machine_status;
1367
1368 nios2_section_threshold
1369 = (global_options_set.x_g_switch_value
1370 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1371
1372 if (nios2_gpopt_option == gpopt_unspecified)
1373 {
1374 /* Default to -mgpopt unless -fpic or -fPIC. */
1375 if (flag_pic)
1376 nios2_gpopt_option = gpopt_none;
1377 else
1378 nios2_gpopt_option = gpopt_local;
1379 }
1380
1381 /* GP-relative and r0-relative addressing don't make sense for PIC. */
1382 if (flag_pic)
1383 {
1384 if (nios2_gpopt_option != gpopt_none)
1385 error ("-mgpopt not supported with PIC.");
1386 if (nios2_gprel_sec)
1387 error ("-mgprel-sec= not supported with PIC.");
1388 if (nios2_r0rel_sec)
1389 error ("-mr0rel-sec= not supported with PIC.");
1390 }
1391
1392 /* Process -mgprel-sec= and -m0rel-sec=. */
1393 if (nios2_gprel_sec)
1394 {
1395 if (regcomp (&nios2_gprel_sec_regex, nios2_gprel_sec,
1396 REG_EXTENDED | REG_NOSUB))
1397 error ("-mgprel-sec= argument is not a valid regular expression.");
1398 }
1399 if (nios2_r0rel_sec)
1400 {
1401 if (regcomp (&nios2_r0rel_sec_regex, nios2_r0rel_sec,
1402 REG_EXTENDED | REG_NOSUB))
1403 error ("-mr0rel-sec= argument is not a valid regular expression.");
1404 }
1405
1406 /* If we don't have mul, we don't have mulx either! */
1407 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1408 target_flags &= ~MASK_HAS_MULX;
1409
1410 /* Optional BMX and CDX instructions only make sense for R2. */
1411 if (!TARGET_ARCH_R2)
1412 {
1413 if (TARGET_HAS_BMX)
1414 error ("BMX instructions are only supported with R2 architecture");
1415 if (TARGET_HAS_CDX)
1416 error ("CDX instructions are only supported with R2 architecture");
1417 }
1418
1419 /* R2 is little-endian only. */
1420 if (TARGET_ARCH_R2 && TARGET_BIG_ENDIAN)
1421 error ("R2 architecture is little-endian only");
1422
1423 /* Initialize default FPU configurations. */
1424 nios2_init_fpu_configs ();
1425
1426 /* Set up default handling for floating point custom instructions.
1427
1428 Putting things in this order means that the -mcustom-fpu-cfg=
1429 switch will always be overridden by individual -mcustom-fadds=
1430 switches, regardless of the order in which they were specified
1431 on the command line.
1432
1433 This behavior of prioritization of individual -mcustom-<insn>=
1434 options before the -mcustom-fpu-cfg= switch is maintained for
1435 compatibility. */
1436 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1437 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1438
1439 /* Handle options for individual FPU insns. */
1440 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1441 nios2_handle_custom_fpu_insn_option (i);
1442
1443 nios2_custom_check_insns ();
1444
1445 /* Save the initial options in case the user does function specific
1446 options. */
1447 target_option_default_node = target_option_current_node
1448 = build_target_option_node (&global_options);
1449 }
1450
1451 \f
1452 /* Return true if CST is a constant within range of movi/movui/movhi. */
1453 static bool
1454 nios2_simple_const_p (const_rtx cst)
1455 {
1456 if (!CONST_INT_P (cst))
1457 return false;
1458 HOST_WIDE_INT val = INTVAL (cst);
1459 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1460 }
1461
1462 /* Compute a (partial) cost for rtx X. Return true if the complete
1463 cost has been computed, and false if subexpressions should be
1464 scanned. In either case, *TOTAL contains the cost result. */
1465 static bool
1466 nios2_rtx_costs (rtx x, machine_mode mode,
1467 int outer_code,
1468 int opno,
1469 int *total, bool speed)
1470 {
1471 int code = GET_CODE (x);
1472
1473 switch (code)
1474 {
1475 case CONST_INT:
1476 if (INTVAL (x) == 0 || nios2_simple_const_p (x))
1477 {
1478 *total = COSTS_N_INSNS (0);
1479 return true;
1480 }
1481 else
1482 {
1483 /* High + lo_sum. */
1484 *total = COSTS_N_INSNS (1);
1485 return true;
1486 }
1487
1488 case LABEL_REF:
1489 case SYMBOL_REF:
1490 case CONST:
1491 case CONST_DOUBLE:
1492 if (gprel_constant_p (x) || r0rel_constant_p (x))
1493 {
1494 *total = COSTS_N_INSNS (1);
1495 return true;
1496 }
1497 else
1498 {
1499 /* High + lo_sum. */
1500 *total = COSTS_N_INSNS (1);
1501 return true;
1502 }
1503
1504 case HIGH:
1505 {
1506 /* This is essentially a constant. */
1507 *total = COSTS_N_INSNS (0);
1508 return true;
1509 }
1510
1511 case LO_SUM:
1512 {
1513 *total = COSTS_N_INSNS (0);
1514 return true;
1515 }
1516
1517 case AND:
1518 {
1519 /* Recognize 'nor' insn pattern. */
1520 if (GET_CODE (XEXP (x, 0)) == NOT
1521 && GET_CODE (XEXP (x, 1)) == NOT)
1522 {
1523 *total = COSTS_N_INSNS (1);
1524 return true;
1525 }
1526 return false;
1527 }
1528
1529 /* For insns that have an execution latency (3 cycles), don't
1530 penalize by the full amount since we can often schedule
1531 to avoid it. */
1532 case MULT:
1533 {
1534 if (!TARGET_HAS_MUL)
1535 *total = COSTS_N_INSNS (5); /* Guess? */
1536 else if (speed)
1537 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1538 else
1539 *total = COSTS_N_INSNS (1);
1540 return false;
1541 }
1542
1543 case DIV:
1544 {
1545 if (!TARGET_HAS_DIV)
1546 *total = COSTS_N_INSNS (5); /* Guess? */
1547 else if (speed)
1548 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1549 else
1550 *total = COSTS_N_INSNS (1);
1551 return false;
1552 }
1553
1554 case ASHIFT:
1555 case ASHIFTRT:
1556 case LSHIFTRT:
1557 case ROTATE:
1558 {
1559 if (!speed)
1560 *total = COSTS_N_INSNS (1);
1561 else
1562 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1563 return false;
1564 }
1565
1566 case ZERO_EXTRACT:
1567 if (TARGET_HAS_BMX)
1568 {
1569 *total = COSTS_N_INSNS (1);
1570 return true;
1571 }
1572 return false;
1573
1574 case SIGN_EXTEND:
1575 {
1576 if (MEM_P (XEXP (x, 0)))
1577 *total = COSTS_N_INSNS (1);
1578 else
1579 *total = COSTS_N_INSNS (3);
1580 return false;
1581 }
1582
1583 case MEM:
1584 {
1585 rtx addr = XEXP (x, 0);
1586
1587 /* Account for cost of different addressing modes. */
1588 *total = nios2_address_cost (addr, mode, ADDR_SPACE_GENERIC, speed);
1589
1590 if (outer_code == SET && opno == 0)
1591 /* Stores execute in 1 cycle accounted for by
1592 the outer SET. */
1593 ;
1594 else if (outer_code == SET || outer_code == SIGN_EXTEND
1595 || outer_code == ZERO_EXTEND)
1596 /* Latency adjustment. */
1597 {
1598 if (speed)
1599 *total += COSTS_N_INSNS (1);
1600 }
1601 else
1602 /* This is going to have to be split into a load. */
1603 *total += COSTS_N_INSNS (speed ? 2 : 1);
1604 return true;
1605 }
1606
1607 default:
1608 return false;
1609 }
1610 }
1611
1612 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1613 static reg_class_t
1614 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1615 {
1616 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1617 }
1618
1619 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1620 RET is an RTX for the return value location. The entire insn sequence
1621 is returned. */
1622 static GTY(()) rtx nios2_tls_symbol;
1623
1624 static rtx
1625 nios2_call_tls_get_addr (rtx ti)
1626 {
1627 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1628 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1629 rtx fn;
1630 rtx_insn *insn;
1631
1632 if (!nios2_tls_symbol)
1633 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1634
1635 emit_move_insn (arg, ti);
1636 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1637 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1638 RTL_CONST_CALL_P (insn) = 1;
1639 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1640 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1641
1642 return ret;
1643 }
1644
1645 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1646 static bool
1647 nios2_large_offset_p (int unspec)
1648 {
1649 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1650
1651 if (flag_pic == 2
1652 /* FIXME: TLS GOT offset relocations will eventually also get this
1653 treatment, after binutils support for those are also completed. */
1654 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1655 return true;
1656
1657 /* 'gotoff' offsets are always hiadj/lo. */
1658 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1659 return true;
1660
1661 return false;
1662 }
1663
1664 /* Return true for conforming unspec relocations. Also used in
1665 constraints.md and predicates.md. */
1666 bool
1667 nios2_unspec_reloc_p (rtx op)
1668 {
1669 return (GET_CODE (op) == CONST
1670 && GET_CODE (XEXP (op, 0)) == UNSPEC
1671 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1672 }
1673
1674 static bool
1675 nios2_large_unspec_reloc_p (rtx op)
1676 {
1677 return (GET_CODE (op) == CONST
1678 && GET_CODE (XEXP (op, 0)) == UNSPEC
1679 && nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1680 }
1681
1682 /* Helper to generate unspec constant. */
1683 static rtx
1684 nios2_unspec_offset (rtx loc, int unspec)
1685 {
1686 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1687 unspec));
1688 }
1689
1690 /* Generate GOT pointer based address with large offset. */
1691 static rtx
1692 nios2_large_got_address (rtx offset, rtx tmp)
1693 {
1694 if (!tmp)
1695 tmp = gen_reg_rtx (Pmode);
1696 emit_move_insn (tmp, offset);
1697 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1698 }
1699
1700 /* Generate a GOT pointer based address. */
1701 static rtx
1702 nios2_got_address (rtx loc, int unspec)
1703 {
1704 rtx offset = nios2_unspec_offset (loc, unspec);
1705 crtl->uses_pic_offset_table = 1;
1706
1707 if (nios2_large_offset_p (unspec))
1708 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1709
1710 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1711 }
1712
1713 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1714 return value will be a valid address and move_operand (either a REG
1715 or a LO_SUM). */
1716 static rtx
1717 nios2_legitimize_tls_address (rtx loc)
1718 {
1719 rtx tmp, mem, tp;
1720 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1721
1722 switch (model)
1723 {
1724 case TLS_MODEL_GLOBAL_DYNAMIC:
1725 tmp = gen_reg_rtx (Pmode);
1726 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1727 return nios2_call_tls_get_addr (tmp);
1728
1729 case TLS_MODEL_LOCAL_DYNAMIC:
1730 tmp = gen_reg_rtx (Pmode);
1731 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1732 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1733 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1734
1735 case TLS_MODEL_INITIAL_EXEC:
1736 tmp = gen_reg_rtx (Pmode);
1737 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1738 emit_move_insn (tmp, mem);
1739 tp = gen_rtx_REG (Pmode, TP_REGNO);
1740 return gen_rtx_PLUS (Pmode, tp, tmp);
1741
1742 case TLS_MODEL_LOCAL_EXEC:
1743 tp = gen_rtx_REG (Pmode, TP_REGNO);
1744 return gen_rtx_PLUS (Pmode, tp,
1745 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1746 default:
1747 gcc_unreachable ();
1748 }
1749 }
1750
1751 /* Divide Support
1752
1753 If -O3 is used, we want to output a table lookup for
1754 divides between small numbers (both num and den >= 0
1755 and < 0x10). The overhead of this method in the worst
1756 case is 40 bytes in the text section (10 insns) and
1757 256 bytes in the data section. Additional divides do
1758 not incur additional penalties in the data section.
1759
1760 Code speed is improved for small divides by about 5x
1761 when using this method in the worse case (~9 cycles
1762 vs ~45). And in the worst case divides not within the
1763 table are penalized by about 10% (~5 cycles vs ~45).
1764 However in the typical case the penalty is not as bad
1765 because doing the long divide in only 45 cycles is
1766 quite optimistic.
1767
1768 ??? would be nice to have some benchmarks other
1769 than Dhrystone to back this up.
1770
1771 This bit of expansion is to create this instruction
1772 sequence as rtl.
1773 or $8, $4, $5
1774 slli $9, $4, 4
1775 cmpgeui $3, $8, 16
1776 beq $3, $0, .L3
1777 or $10, $9, $5
1778 add $12, $11, divide_table
1779 ldbu $2, 0($12)
1780 br .L1
1781 .L3:
1782 call slow_div
1783 .L1:
1784 # continue here with result in $2
1785
1786 ??? Ideally I would like the libcall block to contain all
1787 of this code, but I don't know how to do that. What it
1788 means is that if the divide can be eliminated, it may not
1789 completely disappear.
1790
1791 ??? The __divsi3_table label should ideally be moved out
1792 of this block and into a global. If it is placed into the
1793 sdata section we can save even more cycles by doing things
1794 gp relative. */
1795 void
1796 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1797 {
1798 rtx or_result, shift_left_result;
1799 rtx lookup_value;
1800 rtx_code_label *lab1, *lab3;
1801 rtx_insn *insns;
1802 rtx libfunc;
1803 rtx final_result;
1804 rtx_insn *tmp;
1805 rtx table;
1806
1807 /* It may look a little generic, but only SImode is supported for now. */
1808 gcc_assert (mode == SImode);
1809 libfunc = optab_libfunc (sdiv_optab, SImode);
1810
1811 lab1 = gen_label_rtx ();
1812 lab3 = gen_label_rtx ();
1813
1814 or_result = expand_simple_binop (SImode, IOR,
1815 operands[1], operands[2],
1816 0, 0, OPTAB_LIB_WIDEN);
1817
1818 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1819 GET_MODE (or_result), 0, lab3);
1820 JUMP_LABEL (get_last_insn ()) = lab3;
1821
1822 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1823 operands[1], GEN_INT (4),
1824 0, 0, OPTAB_LIB_WIDEN);
1825
1826 lookup_value = expand_simple_binop (SImode, IOR,
1827 shift_left_result, operands[2],
1828 0, 0, OPTAB_LIB_WIDEN);
1829 table = gen_rtx_PLUS (SImode, lookup_value,
1830 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1831 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1832
1833 tmp = emit_jump_insn (gen_jump (lab1));
1834 JUMP_LABEL (tmp) = lab1;
1835 emit_barrier ();
1836
1837 emit_label (lab3);
1838 LABEL_NUSES (lab3) = 1;
1839
1840 start_sequence ();
1841 final_result = emit_library_call_value (libfunc, NULL_RTX,
1842 LCT_CONST, SImode,
1843 operands[1], SImode,
1844 operands[2], SImode);
1845
1846 insns = get_insns ();
1847 end_sequence ();
1848 emit_libcall_block (insns, operands[0], final_result,
1849 gen_rtx_DIV (SImode, operands[1], operands[2]));
1850
1851 emit_label (lab1);
1852 LABEL_NUSES (lab1) = 1;
1853 }
1854
1855 \f
1856 /* Branches and compares. */
1857
1858 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1859 comparison, e.g. >= 1 into > 0. */
1860 static void
1861 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1862 enum rtx_code *alt_code, rtx *alt_op,
1863 machine_mode mode)
1864 {
1865 gcc_assert (CONST_INT_P (op));
1866
1867 HOST_WIDE_INT opval = INTVAL (op);
1868 enum rtx_code scode = signed_condition (code);
1869 bool dec_p = (scode == LT || scode == GE);
1870
1871 if (code == EQ || code == NE)
1872 {
1873 *alt_code = code;
1874 *alt_op = op;
1875 return;
1876 }
1877
1878 *alt_op = (dec_p
1879 ? gen_int_mode (opval - 1, mode)
1880 : gen_int_mode (opval + 1, mode));
1881
1882 /* The required conversion between [>,>=] and [<,<=] is captured
1883 by a reverse + swap of condition codes. */
1884 *alt_code = reverse_condition (swap_condition (code));
1885
1886 {
1887 /* Test if the incremented/decremented value crosses the over/underflow
1888 boundary. Supposedly, such boundary cases should already be transformed
1889 into always-true/false or EQ conditions, so use an assertion here. */
1890 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1891 if (code == scode)
1892 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1893 alt_opval &= GET_MODE_MASK (mode);
1894 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1895 }
1896 }
1897
1898 /* Return true if the constant comparison is supported by nios2. */
1899 static bool
1900 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1901 {
1902 gcc_assert (CONST_INT_P (op));
1903 switch (code)
1904 {
1905 case EQ: case NE: case GE: case LT:
1906 return SMALL_INT (INTVAL (op));
1907 case GEU: case LTU:
1908 return SMALL_INT_UNSIGNED (INTVAL (op));
1909 default:
1910 return false;
1911 }
1912 }
1913
1914 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1915 the current configuration. Perform modifications if MODIFY_P is true.
1916 Returns true if FPU compare can be done. */
1917
1918 bool
1919 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1920 bool modify_p)
1921 {
1922 bool rev_p = false;
1923 enum rtx_code code = GET_CODE (*cmp);
1924
1925 if (!nios2_fpu_compare_enabled (code, mode))
1926 {
1927 code = swap_condition (code);
1928 if (nios2_fpu_compare_enabled (code, mode))
1929 rev_p = true;
1930 else
1931 return false;
1932 }
1933
1934 if (modify_p)
1935 {
1936 if (rev_p)
1937 {
1938 rtx tmp = *op1;
1939 *op1 = *op2;
1940 *op2 = tmp;
1941 }
1942 *op1 = force_reg (mode, *op1);
1943 *op2 = force_reg (mode, *op2);
1944 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1945 }
1946 return true;
1947 }
1948
1949 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1950 nios2 supported form. Returns true if success. */
1951 bool
1952 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1953 {
1954 enum rtx_code code = GET_CODE (*cmp);
1955 enum rtx_code alt_code;
1956 rtx alt_op2;
1957
1958 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1959 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1960
1961 if (CONST_INT_P (*op2) && *op2 != const0_rtx)
1962 {
1963 /* Create alternate constant compare. */
1964 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1965
1966 /* If alterate op2 is zero(0), we can use it directly, possibly
1967 swapping the compare code. */
1968 if (alt_op2 == const0_rtx)
1969 {
1970 code = alt_code;
1971 *op2 = alt_op2;
1972 goto check_rebuild_cmp;
1973 }
1974
1975 /* Check if either constant compare can be used. */
1976 if (nios2_valid_compare_const_p (code, *op2))
1977 return true;
1978 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1979 {
1980 code = alt_code;
1981 *op2 = alt_op2;
1982 goto rebuild_cmp;
1983 }
1984
1985 /* We have to force op2 into a register now. Try to pick one
1986 with a lower cost. */
1987 if (! nios2_simple_const_p (*op2)
1988 && nios2_simple_const_p (alt_op2))
1989 {
1990 code = alt_code;
1991 *op2 = alt_op2;
1992 }
1993 *op2 = force_reg (mode, *op2);
1994 }
1995 else if (!reg_or_0_operand (*op2, mode))
1996 *op2 = force_reg (mode, *op2);
1997
1998 check_rebuild_cmp:
1999 if (code == GT || code == GTU || code == LE || code == LEU)
2000 {
2001 rtx t = *op1; *op1 = *op2; *op2 = t;
2002 code = swap_condition (code);
2003 }
2004 rebuild_cmp:
2005 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
2006 return true;
2007 }
2008
2009
2010 /* Addressing modes and constants. */
2011
2012 /* Symbol references and other 32-bit constants are split into
2013 high/lo_sum pairs during the split1 pass. After that, they are not
2014 considered legitimate addresses.
2015 This function returns true if in a pre-split context where these
2016 constants are allowed. */
2017 static bool
2018 nios2_large_constant_allowed (void)
2019 {
2020 /* The reload_completed check is for the benefit of
2021 nios2_asm_output_mi_thunk and perhaps other places that try to
2022 emulate a post-reload pass. */
2023 return !(cfun->curr_properties & PROP_rtl_split_insns) && !reload_completed;
2024 }
2025
2026 /* Return true if X is constant expression with a reference to an
2027 "ordinary" symbol; not GOT-relative, not GP-relative, not TLS. */
2028 static bool
2029 nios2_symbolic_constant_p (rtx x)
2030 {
2031 rtx base, offset;
2032
2033 if (flag_pic)
2034 return false;
2035 if (GET_CODE (x) == LABEL_REF)
2036 return true;
2037 else if (CONSTANT_P (x))
2038 {
2039 split_const (x, &base, &offset);
2040 return (SYMBOL_REF_P (base)
2041 && !SYMBOL_REF_TLS_MODEL (base)
2042 && !gprel_constant_p (base)
2043 && !r0rel_constant_p (base)
2044 && SMALL_INT (INTVAL (offset)));
2045 }
2046 return false;
2047 }
2048
2049 /* Return true if X is an expression of the form
2050 (PLUS reg large_constant). */
2051 static bool
2052 nios2_plus_large_constant_p (rtx x)
2053 {
2054 return (GET_CODE (x) == PLUS
2055 && REG_P (XEXP (x, 0))
2056 && nios2_large_constant_p (XEXP (x, 1)));
2057 }
2058
2059 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
2060 static bool
2061 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2062 {
2063 rtx base, offset;
2064 split_const (x, &base, &offset);
2065 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
2066 }
2067
2068 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2069 static bool
2070 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2071 {
2072 return nios2_legitimate_constant_p (mode, x) == false;
2073 }
2074
2075 /* Return true if register REGNO is a valid base register.
2076 STRICT_P is true if REG_OK_STRICT is in effect. */
2077
2078 bool
2079 nios2_regno_ok_for_base_p (int regno, bool strict_p)
2080 {
2081 if (!HARD_REGISTER_NUM_P (regno))
2082 {
2083 if (!strict_p)
2084 return true;
2085
2086 if (!reg_renumber)
2087 return false;
2088
2089 regno = reg_renumber[regno];
2090 }
2091
2092 /* The fake registers will be eliminated to either the stack or
2093 hard frame pointer, both of which are usually valid base registers.
2094 Reload deals with the cases where the eliminated form isn't valid. */
2095 return (GP_REG_P (regno)
2096 || regno == FRAME_POINTER_REGNUM
2097 || regno == ARG_POINTER_REGNUM);
2098 }
2099
2100 /* Return true if OFFSET is permitted in a load/store address expression.
2101 Normally any 16-bit value is permitted, but on R2 if we may be emitting
2102 the IO forms of these instructions we must restrict the offset to fit
2103 in a 12-bit field instead. */
2104
2105 static bool
2106 nios2_valid_addr_offset_p (rtx offset)
2107 {
2108 return (CONST_INT_P (offset)
2109 && ((TARGET_ARCH_R2 && (TARGET_BYPASS_CACHE
2110 || TARGET_BYPASS_CACHE_VOLATILE))
2111 ? SMALL_INT12 (INTVAL (offset))
2112 : SMALL_INT (INTVAL (offset))));
2113 }
2114
2115 /* Return true if the address expression formed by BASE + OFFSET is
2116 valid. */
2117 static bool
2118 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
2119 {
2120 if (!strict_p && GET_CODE (base) == SUBREG)
2121 base = SUBREG_REG (base);
2122 return (REG_P (base)
2123 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
2124 && (offset == NULL_RTX
2125 || nios2_valid_addr_offset_p (offset)
2126 || (nios2_large_constant_allowed ()
2127 && nios2_symbolic_constant_p (offset))
2128 || nios2_unspec_reloc_p (offset)));
2129 }
2130
2131 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2132 static bool
2133 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
2134 rtx operand, bool strict_p)
2135 {
2136 switch (GET_CODE (operand))
2137 {
2138 /* Direct. */
2139 case SYMBOL_REF:
2140 if (SYMBOL_REF_TLS_MODEL (operand))
2141 return false;
2142
2143 /* Else, fall through. */
2144 case CONST:
2145 if (gprel_constant_p (operand) || r0rel_constant_p (operand))
2146 return true;
2147
2148 /* Else, fall through. */
2149 case LABEL_REF:
2150 if (nios2_large_constant_allowed ()
2151 && nios2_symbolic_constant_p (operand))
2152 return true;
2153 return false;
2154
2155 case CONST_INT:
2156 if (r0rel_constant_p (operand))
2157 return true;
2158 return nios2_large_constant_allowed ();
2159
2160 case CONST_DOUBLE:
2161 return false;
2162
2163 /* Register indirect. */
2164 case REG:
2165 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
2166
2167 /* Register indirect with displacement. */
2168 case PLUS:
2169 {
2170 rtx op0 = XEXP (operand, 0);
2171 rtx op1 = XEXP (operand, 1);
2172
2173 if (nios2_valid_addr_expr_p (op0, op1, strict_p)
2174 || nios2_valid_addr_expr_p (op1, op0, strict_p))
2175 return true;
2176 }
2177 break;
2178
2179 /* %lo(constant)(reg)
2180 This requires a 16-bit relocation and isn't valid with R2
2181 io-variant load/stores. */
2182 case LO_SUM:
2183 if (TARGET_ARCH_R2
2184 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2185 return false;
2186 else
2187 {
2188 rtx op0 = XEXP (operand, 0);
2189 rtx op1 = XEXP (operand, 1);
2190
2191 return (REG_P (op0)
2192 && nios2_regno_ok_for_base_p (REGNO (op0), strict_p)
2193 && nios2_large_constant_p (op1));
2194 }
2195
2196 default:
2197 break;
2198 }
2199 return false;
2200 }
2201
2202 /* Implement TARGET_ADDRESS_COST.
2203 Experimentation has shown that we get better code by penalizing the
2204 the (plus reg symbolic_constant) and (plus reg (const ...)) forms
2205 but giving (plus reg symbol_ref) address modes the same cost as those
2206 that don't require splitting. Also, from a theoretical point of view:
2207 - This is in line with the recommendation in the GCC internals
2208 documentation to make address forms involving multiple
2209 registers more expensive than single-register forms.
2210 - OTOH it still encourages fwprop1 to propagate constants into
2211 address expressions more aggressively.
2212 - We should discourage splitting (symbol + offset) into hi/lo pairs
2213 to allow CSE'ing the symbol when it's used with more than one offset,
2214 but not so heavily as to avoid this addressing mode at all. */
2215 static int
2216 nios2_address_cost (rtx address,
2217 machine_mode mode ATTRIBUTE_UNUSED,
2218 addr_space_t as ATTRIBUTE_UNUSED,
2219 bool speed ATTRIBUTE_UNUSED)
2220 {
2221 if (nios2_plus_large_constant_p (address))
2222 return COSTS_N_INSNS (1);
2223 if (nios2_large_constant_p (address))
2224 {
2225 if (GET_CODE (address) == CONST)
2226 return COSTS_N_INSNS (1);
2227 else
2228 return COSTS_N_INSNS (0);
2229 }
2230 return COSTS_N_INSNS (0);
2231 }
2232
2233 /* Return true if X is a MEM whose address expression involves a large (32-bit)
2234 constant. */
2235 bool
2236 nios2_large_constant_memory_operand_p (rtx x)
2237 {
2238 rtx addr;
2239
2240 if (GET_CODE (x) != MEM)
2241 return false;
2242 addr = XEXP (x, 0);
2243
2244 return (nios2_large_constant_p (addr)
2245 || nios2_plus_large_constant_p (addr));
2246 }
2247
2248
2249 /* Return true if X is something that needs to be split into a
2250 high/lo_sum pair. */
2251 bool
2252 nios2_large_constant_p (rtx x)
2253 {
2254 return (nios2_symbolic_constant_p (x)
2255 || nios2_large_unspec_reloc_p (x)
2256 || (CONST_INT_P (x) && !SMALL_INT (INTVAL (x))));
2257 }
2258
2259 /* Given an RTX X that satisfies nios2_large_constant_p, split it into
2260 high and lo_sum parts using TEMP as a scratch register. Emit the high
2261 instruction and return the lo_sum expression.
2262 Also handle special cases involving constant integers. */
2263 rtx
2264 nios2_split_large_constant (rtx x, rtx temp)
2265 {
2266 if (CONST_INT_P (x))
2267 {
2268 HOST_WIDE_INT val = INTVAL (x);
2269 if (SMALL_INT (val))
2270 return x;
2271 else if (SMALL_INT_UNSIGNED (val) || UPPER16_INT (val))
2272 {
2273 emit_move_insn (temp, x);
2274 return temp;
2275 }
2276 else
2277 {
2278 HOST_WIDE_INT high = (val + 0x8000) & ~0xffff;
2279 HOST_WIDE_INT low = val - high;
2280 emit_move_insn (temp, gen_int_mode (high, Pmode));
2281 return gen_rtx_PLUS (Pmode, temp, gen_int_mode (low, Pmode));
2282 }
2283 }
2284
2285 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (x))));
2286 return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (x));
2287 }
2288
2289 /* Split an RTX of the form
2290 (plus op0 op1)
2291 where op1 is a large constant into
2292 (set temp (high op1))
2293 (set temp (plus op0 temp))
2294 (lo_sum temp op1)
2295 returning the lo_sum expression as the value. */
2296 static rtx
2297 nios2_split_plus_large_constant (rtx op0, rtx op1)
2298 {
2299 rtx temp = gen_reg_rtx (Pmode);
2300 op0 = force_reg (Pmode, op0);
2301
2302 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (op1))));
2303 emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, op0, temp)));
2304 return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (op1));
2305 }
2306
2307 /* Given a MEM OP with an address that includes a splittable symbol or
2308 other large constant, emit some instructions to do the split and
2309 return a new MEM. */
2310 rtx
2311 nios2_split_large_constant_memory_operand (rtx op)
2312 {
2313 rtx addr = XEXP (op, 0);
2314
2315 if (nios2_large_constant_p (addr))
2316 addr = nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2317 else if (nios2_plus_large_constant_p (addr))
2318 addr = nios2_split_plus_large_constant (XEXP (addr, 0), XEXP (addr, 1));
2319 else
2320 gcc_unreachable ();
2321 return replace_equiv_address (op, addr, false);
2322 }
2323
2324 /* Return true if SECTION is a small section name. */
2325 static bool
2326 nios2_small_section_name_p (const char *section)
2327 {
2328 return (strcmp (section, ".sbss") == 0
2329 || strncmp (section, ".sbss.", 6) == 0
2330 || strcmp (section, ".sdata") == 0
2331 || strncmp (section, ".sdata.", 7) == 0
2332 || (nios2_gprel_sec
2333 && regexec (&nios2_gprel_sec_regex, section, 0, NULL, 0) == 0));
2334 }
2335
2336 /* Return true if SECTION is a r0-relative section name. */
2337 static bool
2338 nios2_r0rel_section_name_p (const char *section)
2339 {
2340 return (nios2_r0rel_sec
2341 && regexec (&nios2_r0rel_sec_regex, section, 0, NULL, 0) == 0);
2342 }
2343
2344 /* Return true if EXP should be placed in the small data section. */
2345 static bool
2346 nios2_in_small_data_p (const_tree exp)
2347 {
2348 /* We want to merge strings, so we never consider them small data. */
2349 if (TREE_CODE (exp) == STRING_CST)
2350 return false;
2351
2352 if (TREE_CODE (exp) == VAR_DECL)
2353 {
2354 if (DECL_SECTION_NAME (exp))
2355 {
2356 const char *section = DECL_SECTION_NAME (exp);
2357 if (nios2_small_section_name_p (section))
2358 return true;
2359 }
2360 else
2361 {
2362 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
2363
2364 /* If this is an incomplete type with size 0, then we can't put it
2365 in sdata because it might be too big when completed. */
2366 if (size > 0
2367 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
2368 return true;
2369 }
2370 }
2371
2372 return false;
2373 }
2374
2375 /* Return true if symbol is in small data section. */
2376
2377 static bool
2378 nios2_symbol_ref_in_small_data_p (rtx sym)
2379 {
2380 tree decl;
2381
2382 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2383 decl = SYMBOL_REF_DECL (sym);
2384
2385 /* TLS variables are not accessed through the GP. */
2386 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2387 return false;
2388
2389 /* On Nios II R2, there is no GP-relative relocation that can be
2390 used with "io" instructions. So, if we are implicitly generating
2391 those instructions, we cannot emit GP-relative accesses. */
2392 if (TARGET_ARCH_R2
2393 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2394 return false;
2395
2396 /* If the user has explicitly placed the symbol in a small data section
2397 via an attribute, generate gp-relative addressing even if the symbol
2398 is external, weak, or larger than we'd automatically put in the
2399 small data section. OTOH, if the symbol is located in some
2400 non-small-data section, we can't use gp-relative accesses on it
2401 unless the user has requested gpopt_data or gpopt_all. */
2402
2403 switch (nios2_gpopt_option)
2404 {
2405 case gpopt_none:
2406 /* Don't generate a gp-relative addressing mode if that's been
2407 disabled. */
2408 return false;
2409
2410 case gpopt_local:
2411 /* Use GP-relative addressing for small data symbols that are
2412 not external or weak or uninitialized common, plus any symbols
2413 that have explicitly been placed in a small data section. */
2414 if (decl && DECL_SECTION_NAME (decl))
2415 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
2416 return (SYMBOL_REF_SMALL_P (sym)
2417 && !SYMBOL_REF_EXTERNAL_P (sym)
2418 && !(decl && DECL_WEAK (decl))
2419 && !(decl && DECL_COMMON (decl)
2420 && (DECL_INITIAL (decl) == NULL
2421 || (!in_lto_p
2422 && DECL_INITIAL (decl) == error_mark_node))));
2423
2424 case gpopt_global:
2425 /* Use GP-relative addressing for small data symbols, even if
2426 they are external or weak. Note that SYMBOL_REF_SMALL_P
2427 is also true of symbols that have explicitly been placed
2428 in a small data section. */
2429 return SYMBOL_REF_SMALL_P (sym);
2430
2431 case gpopt_data:
2432 /* Use GP-relative addressing for all data symbols regardless
2433 of the object size, but not for code symbols. This option
2434 is equivalent to the user asserting that the entire data
2435 section is accessible from the GP. */
2436 return !SYMBOL_REF_FUNCTION_P (sym);
2437
2438 case gpopt_all:
2439 /* Use GP-relative addressing for everything, including code.
2440 Effectively, the user has asserted that the entire program
2441 fits within the 64K range of the GP offset. */
2442 return true;
2443
2444 default:
2445 /* We shouldn't get here. */
2446 return false;
2447 }
2448 }
2449
2450 /* Likewise for r0-relative addressing. */
2451 static bool
2452 nios2_symbol_ref_in_r0rel_data_p (rtx sym)
2453 {
2454 tree decl;
2455
2456 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2457 decl = SYMBOL_REF_DECL (sym);
2458
2459 /* TLS variables are not accessed through r0. */
2460 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2461 return false;
2462
2463 /* On Nios II R2, there is no r0-relative relocation that can be
2464 used with "io" instructions. So, if we are implicitly generating
2465 those instructions, we cannot emit r0-relative accesses. */
2466 if (TARGET_ARCH_R2
2467 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2468 return false;
2469
2470 /* If the user has explicitly placed the symbol in a r0rel section
2471 via an attribute, generate r0-relative addressing. */
2472 if (decl && DECL_SECTION_NAME (decl))
2473 return nios2_r0rel_section_name_p (DECL_SECTION_NAME (decl));
2474 return false;
2475 }
2476
2477 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2478
2479 static unsigned int
2480 nios2_section_type_flags (tree decl, const char *name, int reloc)
2481 {
2482 unsigned int flags;
2483
2484 flags = default_section_type_flags (decl, name, reloc);
2485
2486 if (nios2_small_section_name_p (name))
2487 flags |= SECTION_SMALL;
2488
2489 return flags;
2490 }
2491
2492 /* Return true if SYMBOL_REF X binds locally. */
2493
2494 static bool
2495 nios2_symbol_binds_local_p (const_rtx x)
2496 {
2497 return (SYMBOL_REF_DECL (x)
2498 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
2499 : SYMBOL_REF_LOCAL_P (x));
2500 }
2501
2502 /* Position independent code related. */
2503
2504 /* Emit code to load the PIC register. */
2505 static void
2506 nios2_load_pic_register (void)
2507 {
2508 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
2509
2510 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
2511 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
2512 }
2513
2514 /* Generate a PIC address as a MEM rtx. */
2515 static rtx
2516 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
2517 {
2518 if (flag_pic == 2
2519 && GET_CODE (sym) == SYMBOL_REF
2520 && nios2_symbol_binds_local_p (sym))
2521 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2522 {
2523 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
2524 crtl->uses_pic_offset_table = 1;
2525 return nios2_large_got_address (offset, tmp);
2526 }
2527
2528 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
2529 }
2530
2531 /* Nonzero if the constant value X is a legitimate general operand
2532 when generating PIC code. It is given that flag_pic is on and
2533 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2534 bool
2535 nios2_legitimate_pic_operand_p (rtx x)
2536 {
2537 if (nios2_large_unspec_reloc_p (x))
2538 return true;
2539
2540 return ! (GET_CODE (x) == SYMBOL_REF
2541 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
2542 }
2543
2544 /* Return TRUE if X is a thread-local symbol. */
2545 static bool
2546 nios2_tls_symbol_p (rtx x)
2547 {
2548 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
2549 && SYMBOL_REF_TLS_MODEL (x) != 0);
2550 }
2551
2552 /* Legitimize addresses that are CONSTANT_P expressions. */
2553 static rtx
2554 nios2_legitimize_constant_address (rtx addr)
2555 {
2556 rtx base, offset;
2557 split_const (addr, &base, &offset);
2558
2559 if (nios2_tls_symbol_p (base))
2560 base = nios2_legitimize_tls_address (base);
2561 else if (flag_pic)
2562 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
2563 else if (!nios2_large_constant_allowed ()
2564 && nios2_symbolic_constant_p (addr))
2565 return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2566 else if (CONST_INT_P (addr))
2567 {
2568 HOST_WIDE_INT val = INTVAL (addr);
2569 if (SMALL_INT (val))
2570 /* Use r0-relative addressing. */
2571 return addr;
2572 else if (!nios2_large_constant_allowed ())
2573 /* Split into high/lo pair. */
2574 return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2575 }
2576 else
2577 return addr;
2578
2579 if (offset != const0_rtx)
2580 {
2581 gcc_assert (can_create_pseudo_p ());
2582 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
2583 (CONST_INT_P (offset)
2584 ? (SMALL_INT (INTVAL (offset))
2585 ? offset : force_reg (Pmode, offset))
2586 : offset));
2587 }
2588 return base;
2589 }
2590
2591 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2592 static rtx
2593 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2594 machine_mode mode ATTRIBUTE_UNUSED)
2595 {
2596 rtx op0, op1;
2597
2598 if (CONSTANT_P (x))
2599 return nios2_legitimize_constant_address (x);
2600
2601 /* Remaining cases all involve something + a constant. */
2602 if (GET_CODE (x) != PLUS)
2603 return x;
2604
2605 op0 = XEXP (x, 0);
2606 op1 = XEXP (x, 1);
2607
2608 /* Target-independent code turns (exp + constant) into plain
2609 register indirect. Although subsequent optimization passes will
2610 eventually sort that out, ivopts uses the unoptimized form for
2611 computing its cost model, so we get better results by generating
2612 the correct form from the start. */
2613 if (nios2_valid_addr_offset_p (op1))
2614 return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
2615
2616 /* We may need to split symbolic constants now. */
2617 else if (nios2_symbolic_constant_p (op1))
2618 {
2619 if (nios2_large_constant_allowed ())
2620 return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
2621 else
2622 return nios2_split_plus_large_constant (op0, op1);
2623 }
2624
2625 /* For the TLS LE (Local Exec) model, the compiler may try to
2626 combine constant offsets with unspec relocs, creating address RTXs
2627 looking like this:
2628 (plus:SI (reg:SI 23 r23)
2629 (const:SI
2630 (plus:SI
2631 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2632 (const_int 48 [0x30]))))
2633
2634 This usually happens when 'var' is a thread-local struct variable,
2635 and access of a field in var causes the addend.
2636
2637 We typically want this combining, so transform the above into this
2638 form, which is allowed:
2639 (plus:SI (reg:SI 23 r23)
2640 (const:SI
2641 (unspec:SI
2642 [(const:SI
2643 (plus:SI (symbol_ref:SI ("var"))
2644 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2645
2646 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2647 else if (GET_CODE (op1) == CONST)
2648 {
2649 rtx unspec, offset;
2650 split_const (op1, &unspec, &offset);
2651 if (GET_CODE (unspec) == UNSPEC
2652 && !nios2_large_offset_p (XINT (unspec, 1))
2653 && offset != const0_rtx)
2654 {
2655 rtx reg = force_reg (Pmode, op0);
2656 unspec = copy_rtx (unspec);
2657 XVECEXP (unspec, 0, 0)
2658 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
2659 return gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
2660 }
2661 }
2662
2663 return x;
2664 }
2665
2666 static rtx
2667 nios2_delegitimize_address (rtx x)
2668 {
2669 x = delegitimize_mem_from_attrs (x);
2670
2671 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
2672 {
2673 switch (XINT (XEXP (x, 0), 1))
2674 {
2675 case UNSPEC_PIC_SYM:
2676 case UNSPEC_PIC_CALL_SYM:
2677 case UNSPEC_PIC_GOTOFF_SYM:
2678 case UNSPEC_ADD_TLS_GD:
2679 case UNSPEC_ADD_TLS_LDM:
2680 case UNSPEC_LOAD_TLS_IE:
2681 case UNSPEC_ADD_TLS_LE:
2682 x = XVECEXP (XEXP (x, 0), 0, 0);
2683 gcc_assert (CONSTANT_P (x));
2684 break;
2685 }
2686 }
2687 return x;
2688 }
2689
2690 /* Main expander function for RTL moves. */
2691 bool
2692 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
2693 {
2694 rtx to = operands[0];
2695 rtx from = operands[1];
2696
2697 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
2698 {
2699 gcc_assert (can_create_pseudo_p ());
2700 from = copy_to_mode_reg (mode, from);
2701 }
2702
2703 if (CONSTANT_P (from))
2704 {
2705 if (CONST_INT_P (from))
2706 {
2707 if (!SMALL_INT (INTVAL (from))
2708 && !SMALL_INT_UNSIGNED (INTVAL (from))
2709 && !UPPER16_INT (INTVAL (from)))
2710 {
2711 HOST_WIDE_INT high = (INTVAL (from) + 0x8000) & ~0xffff;
2712 HOST_WIDE_INT low = INTVAL (from) & 0xffff;
2713 emit_move_insn (to, gen_int_mode (high, SImode));
2714 emit_insn (gen_add2_insn (to, gen_int_mode (low, HImode)));
2715 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2716 copy_rtx (from));
2717 return true;
2718 }
2719 }
2720 else if (gprel_constant_p (from) || r0rel_constant_p (from))
2721 /* Handled directly by movsi_internal as gp + offset
2722 or r0 + offset. */
2723 ;
2724 else if (nios2_large_constant_p (from))
2725 /* This case covers either a regular symbol reference or an UNSPEC
2726 representing a 32-bit offset. We split the former
2727 only conditionally and the latter always. */
2728 {
2729 if (!nios2_large_constant_allowed ()
2730 || nios2_large_unspec_reloc_p (from))
2731 {
2732 rtx lo = nios2_split_large_constant (from, to);
2733 emit_insn (gen_rtx_SET (to, lo));
2734 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2735 copy_rtx (operands[1]));
2736 return true;
2737 }
2738 }
2739 else
2740 /* This is a TLS or PIC symbol. */
2741 {
2742 from = nios2_legitimize_constant_address (from);
2743 if (CONSTANT_P (from))
2744 {
2745 emit_insn (gen_rtx_SET (to,
2746 gen_rtx_HIGH (Pmode, copy_rtx (from))));
2747 emit_insn (gen_rtx_SET (to, gen_rtx_LO_SUM (Pmode, to, from)));
2748 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2749 copy_rtx (operands[1]));
2750 return true;
2751 }
2752 }
2753 }
2754
2755 operands[0] = to;
2756 operands[1] = from;
2757 return false;
2758 }
2759
2760 /* The function with address *ADDR is being called. If the address
2761 needs to be loaded from the GOT, emit the instruction to do so and
2762 update *ADDR to point to the rtx for the loaded value.
2763 If REG != NULL_RTX, it is used as the target/scratch register in the
2764 GOT address calculation. */
2765 void
2766 nios2_adjust_call_address (rtx *call_op, rtx reg)
2767 {
2768 if (MEM_P (*call_op))
2769 call_op = &XEXP (*call_op, 0);
2770
2771 rtx addr = *call_op;
2772 if (flag_pic && CONSTANT_P (addr))
2773 {
2774 rtx tmp = reg ? reg : NULL_RTX;
2775 if (!reg)
2776 reg = gen_reg_rtx (Pmode);
2777 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
2778 emit_insn (gen_rtx_SET (reg, addr));
2779 *call_op = reg;
2780 }
2781 }
2782
2783 \f
2784 /* Output assembly language related definitions. */
2785
2786 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2787 static bool
2788 nios2_print_operand_punct_valid_p (unsigned char code)
2789 {
2790 return (code == '.' || code == '!');
2791 }
2792
2793
2794 /* Print the operand OP to file stream FILE modified by LETTER.
2795 LETTER can be one of:
2796
2797 i: print i/hi/ui suffixes (used for mov instruction variants),
2798 when OP is the appropriate immediate operand.
2799
2800 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2801
2802 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2803 or TARGET_BYPASS_CACHE_VOLATILE).
2804
2805 x: print i/hi/ci/chi suffixes for the and instruction,
2806 when OP is the appropriate immediate operand.
2807
2808 z: prints the third register immediate operand in assembly
2809 instructions. Outputs const0_rtx as the 'zero' register
2810 instead of '0'.
2811
2812 y: same as 'z', but for specifically for logical instructions,
2813 where the processing for immediates are slightly different.
2814
2815 H: for %hiadj
2816 L: for %lo
2817 D: for the upper 32-bits of a 64-bit double value
2818 R: prints reverse condition.
2819 A: prints (reg) operand for ld[s]ex and st[s]ex.
2820
2821 .: print .n suffix for 16-bit instructions.
2822 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2823 */
2824 static void
2825 nios2_print_operand (FILE *file, rtx op, int letter)
2826 {
2827
2828 /* First take care of the format letters that just insert a string
2829 into the output stream. */
2830 switch (letter)
2831 {
2832 case '.':
2833 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2834 fprintf (file, ".n");
2835 return;
2836
2837 case '!':
2838 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2839 fprintf (file, "r.n");
2840 return;
2841
2842 case 'x':
2843 if (CONST_INT_P (op))
2844 {
2845 HOST_WIDE_INT val = INTVAL (op);
2846 HOST_WIDE_INT low = val & 0xffff;
2847 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2848
2849 if (val != 0)
2850 {
2851 if (high != 0)
2852 {
2853 if (low != 0)
2854 {
2855 gcc_assert (TARGET_ARCH_R2);
2856 if (high == 0xffff)
2857 fprintf (file, "c");
2858 else if (low == 0xffff)
2859 fprintf (file, "ch");
2860 else
2861 gcc_unreachable ();
2862 }
2863 else
2864 fprintf (file, "h");
2865 }
2866 fprintf (file, "i");
2867 }
2868 }
2869 return;
2870
2871 case 'u':
2872 case 'i':
2873 if (CONST_INT_P (op))
2874 {
2875 HOST_WIDE_INT val = INTVAL (op);
2876 HOST_WIDE_INT low = val & 0xffff;
2877 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2878 if (val != 0)
2879 {
2880 if (low == 0 && high != 0)
2881 fprintf (file, "h");
2882 else if (high == 0 && (low & 0x8000) != 0 && letter != 'u')
2883 fprintf (file, "u");
2884 }
2885 }
2886 if (CONSTANT_P (op) && op != const0_rtx)
2887 fprintf (file, "i");
2888 return;
2889
2890 case 'o':
2891 if (GET_CODE (op) == MEM
2892 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2893 || TARGET_BYPASS_CACHE))
2894 {
2895 gcc_assert (current_output_insn
2896 && get_attr_length (current_output_insn) == 4);
2897 fprintf (file, "io");
2898 }
2899 return;
2900
2901 default:
2902 break;
2903 }
2904
2905 /* Handle comparison operator names. */
2906 if (comparison_operator (op, VOIDmode))
2907 {
2908 enum rtx_code cond = GET_CODE (op);
2909 if (letter == 0)
2910 {
2911 fprintf (file, "%s", GET_RTX_NAME (cond));
2912 return;
2913 }
2914 if (letter == 'R')
2915 {
2916 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2917 return;
2918 }
2919 }
2920
2921 /* Now handle the cases where we actually need to format an operand. */
2922 switch (GET_CODE (op))
2923 {
2924 case REG:
2925 if (letter == 0 || letter == 'z' || letter == 'y')
2926 {
2927 fprintf (file, "%s", reg_names[REGNO (op)]);
2928 return;
2929 }
2930 else if (letter == 'D')
2931 {
2932 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2933 return;
2934 }
2935 break;
2936
2937 case CONST_INT:
2938 {
2939 rtx int_rtx = op;
2940 HOST_WIDE_INT val = INTVAL (int_rtx);
2941 HOST_WIDE_INT low = val & 0xffff;
2942 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2943
2944 if (letter == 'y')
2945 {
2946 if (val == 0)
2947 fprintf (file, "zero");
2948 else
2949 {
2950 if (high != 0)
2951 {
2952 if (low != 0)
2953 {
2954 gcc_assert (TARGET_ARCH_R2);
2955 if (high == 0xffff)
2956 /* andci. */
2957 int_rtx = gen_int_mode (low, SImode);
2958 else if (low == 0xffff)
2959 /* andchi. */
2960 int_rtx = gen_int_mode (high, SImode);
2961 else
2962 gcc_unreachable ();
2963 }
2964 else
2965 /* andhi. */
2966 int_rtx = gen_int_mode (high, SImode);
2967 }
2968 else
2969 /* andi. */
2970 int_rtx = gen_int_mode (low, SImode);
2971 output_addr_const (file, int_rtx);
2972 }
2973 return;
2974 }
2975 else if (letter == 'z')
2976 {
2977 if (val == 0)
2978 fprintf (file, "zero");
2979 else
2980 {
2981 if (low == 0 && high != 0)
2982 int_rtx = gen_int_mode (high, SImode);
2983 else if (low != 0)
2984 {
2985 gcc_assert (high == 0 || high == 0xffff);
2986 int_rtx = gen_int_mode (low, high == 0 ? SImode : HImode);
2987 }
2988 else
2989 gcc_unreachable ();
2990 output_addr_const (file, int_rtx);
2991 }
2992 return;
2993 }
2994 }
2995
2996 /* Else, fall through. */
2997
2998 case CONST:
2999 case LABEL_REF:
3000 case SYMBOL_REF:
3001 case CONST_DOUBLE:
3002 if (letter == 0 || letter == 'z')
3003 {
3004 output_addr_const (file, op);
3005 return;
3006 }
3007 else if (letter == 'H' || letter == 'L')
3008 {
3009 fprintf (file, "%%");
3010 if (GET_CODE (op) == CONST
3011 && GET_CODE (XEXP (op, 0)) == UNSPEC)
3012 {
3013 rtx unspec = XEXP (op, 0);
3014 int unspec_reloc = XINT (unspec, 1);
3015 gcc_assert (nios2_large_offset_p (unspec_reloc));
3016 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
3017 op = XVECEXP (unspec, 0, 0);
3018 }
3019 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
3020 output_addr_const (file, op);
3021 fprintf (file, ")");
3022 return;
3023 }
3024 break;
3025
3026 case SUBREG:
3027 case MEM:
3028 if (letter == 'A')
3029 {
3030 /* Address of '(reg)' form, with no index. */
3031 fprintf (file, "(%s)", reg_names[REGNO (XEXP (op, 0))]);
3032 return;
3033 }
3034 if (letter == 0)
3035 {
3036 output_address (VOIDmode, op);
3037 return;
3038 }
3039 break;
3040
3041 case CODE_LABEL:
3042 if (letter == 0)
3043 {
3044 output_addr_const (file, op);
3045 return;
3046 }
3047 break;
3048
3049 default:
3050 break;
3051 }
3052
3053 debug_rtx (op);
3054 output_operand_lossage ("Unsupported operand for code '%c'", letter);
3055 gcc_unreachable ();
3056 }
3057
3058 /* Return true if this is a GP-relative accessible reference. */
3059 bool
3060 gprel_constant_p (rtx op)
3061 {
3062 if (GET_CODE (op) == SYMBOL_REF
3063 && nios2_symbol_ref_in_small_data_p (op))
3064 return true;
3065 else if (GET_CODE (op) == CONST
3066 && GET_CODE (XEXP (op, 0)) == PLUS)
3067 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
3068
3069 return false;
3070 }
3071
3072 /* Likewise if this is a zero-relative accessible reference. */
3073 bool
3074 r0rel_constant_p (rtx op)
3075 {
3076 if (GET_CODE (op) == SYMBOL_REF
3077 && nios2_symbol_ref_in_r0rel_data_p (op))
3078 return true;
3079 else if (GET_CODE (op) == CONST
3080 && GET_CODE (XEXP (op, 0)) == PLUS)
3081 return r0rel_constant_p (XEXP (XEXP (op, 0), 0));
3082 else if (GET_CODE (op) == CONST_INT
3083 && SMALL_INT (INTVAL (op)))
3084 return true;
3085
3086 return false;
3087 }
3088
3089 /* Return the name string for a supported unspec reloc offset. */
3090 static const char *
3091 nios2_unspec_reloc_name (int unspec)
3092 {
3093 switch (unspec)
3094 {
3095 case UNSPEC_PIC_SYM:
3096 return "got";
3097 case UNSPEC_PIC_CALL_SYM:
3098 return "call";
3099 case UNSPEC_PIC_GOTOFF_SYM:
3100 return "gotoff";
3101 case UNSPEC_LOAD_TLS_IE:
3102 return "tls_ie";
3103 case UNSPEC_ADD_TLS_LE:
3104 return "tls_le";
3105 case UNSPEC_ADD_TLS_GD:
3106 return "tls_gd";
3107 case UNSPEC_ADD_TLS_LDM:
3108 return "tls_ldm";
3109 case UNSPEC_ADD_TLS_LDO:
3110 return "tls_ldo";
3111 default:
3112 return NULL;
3113 }
3114 }
3115
3116 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
3117 static bool
3118 nios2_output_addr_const_extra (FILE *file, rtx op)
3119 {
3120 const char *name;
3121 gcc_assert (GET_CODE (op) == UNSPEC);
3122
3123 /* Support for printing out const unspec relocations. */
3124 name = nios2_unspec_reloc_name (XINT (op, 1));
3125 if (name)
3126 {
3127 fprintf (file, "%%%s(", name);
3128 output_addr_const (file, XVECEXP (op, 0, 0));
3129 fprintf (file, ")");
3130 return true;
3131 }
3132 return false;
3133 }
3134
3135 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
3136 static void
3137 nios2_print_operand_address (FILE *file, machine_mode mode, rtx op)
3138 {
3139 switch (GET_CODE (op))
3140 {
3141 case CONST:
3142 case CONST_INT:
3143 case LABEL_REF:
3144 case CONST_DOUBLE:
3145 case SYMBOL_REF:
3146 if (gprel_constant_p (op))
3147 {
3148 fprintf (file, "%%gprel(");
3149 output_addr_const (file, op);
3150 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
3151 return;
3152 }
3153 else if (r0rel_constant_p (op))
3154 {
3155 if (CONST_INT_P (op))
3156 {
3157 output_addr_const (file, op);
3158 fprintf (file, "(r0)");
3159 return;
3160 }
3161 else
3162 {
3163 fprintf (file, "%%lo(");
3164 output_addr_const (file, op);
3165 fprintf (file, ")(r0)");
3166 return;
3167 }
3168 }
3169 break;
3170
3171 case PLUS:
3172 {
3173 rtx op0 = XEXP (op, 0);
3174 rtx op1 = XEXP (op, 1);
3175
3176 if (REG_P (op0) && CONSTANT_P (op1))
3177 {
3178 output_addr_const (file, op1);
3179 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
3180 return;
3181 }
3182 else if (REG_P (op1) && CONSTANT_P (op0))
3183 {
3184 output_addr_const (file, op0);
3185 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
3186 return;
3187 }
3188 }
3189 break;
3190
3191 case LO_SUM:
3192 {
3193 rtx op0 = XEXP (op, 0);
3194 rtx op1 = XEXP (op, 1);
3195
3196 if (REG_P (op0) && CONSTANT_P (op1))
3197 {
3198 nios2_print_operand (file, op1, 'L');
3199 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
3200 return;
3201 }
3202 }
3203 break;
3204
3205 case REG:
3206 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
3207 return;
3208
3209 case MEM:
3210 {
3211 rtx base = XEXP (op, 0);
3212 nios2_print_operand_address (file, mode, base);
3213 return;
3214 }
3215 default:
3216 break;
3217 }
3218
3219 fprintf (stderr, "Missing way to print address\n");
3220 debug_rtx (op);
3221 gcc_unreachable ();
3222 }
3223
3224 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
3225 static void
3226 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
3227 {
3228 gcc_assert (size == 4);
3229 fprintf (file, "\t.4byte\t%%tls_ldo(");
3230 output_addr_const (file, x);
3231 fprintf (file, ")");
3232 }
3233
3234 /* Implemet TARGET_ASM_FILE_END. */
3235
3236 static void
3237 nios2_asm_file_end (void)
3238 {
3239 /* The Nios II Linux stack is mapped non-executable by default, so add a
3240 .note.GNU-stack section for switching to executable stacks only when
3241 trampolines are generated. */
3242 if (TARGET_LINUX_ABI && trampolines_created)
3243 file_end_indicate_exec_stack ();
3244 }
3245
3246 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
3247 static void
3248 nios2_asm_function_prologue (FILE *file)
3249 {
3250 if (flag_verbose_asm || flag_debug_asm)
3251 {
3252 nios2_compute_frame_layout ();
3253 nios2_dump_frame_layout (file);
3254 }
3255 }
3256
3257 /* Emit assembly of custom FPU instructions. */
3258 const char *
3259 nios2_fpu_insn_asm (enum n2fpu_code code)
3260 {
3261 static char buf[256];
3262 const char *op1, *op2, *op3;
3263 int ln = 256, n = 0;
3264
3265 int N = N2FPU_N (code);
3266 int num_operands = N2FPU (code).num_operands;
3267 const char *insn_name = N2FPU_NAME (code);
3268 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
3269 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
3270 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
3271
3272 /* Prepare X register for DF input operands. */
3273 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
3274 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
3275 N2FPU_N (n2fpu_fwrx));
3276
3277 if (src_mode == SFmode)
3278 {
3279 if (dst_mode == VOIDmode)
3280 {
3281 /* The fwry case. */
3282 op1 = op3 = "zero";
3283 op2 = "%0";
3284 num_operands -= 1;
3285 }
3286 else
3287 {
3288 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3289 op2 = "%1";
3290 op3 = (num_operands == 2 ? "zero" : "%2");
3291 }
3292 }
3293 else if (src_mode == DFmode)
3294 {
3295 if (dst_mode == VOIDmode)
3296 {
3297 /* The fwrx case. */
3298 op1 = "zero";
3299 op2 = "%0";
3300 op3 = "%D0";
3301 num_operands -= 1;
3302 }
3303 else
3304 {
3305 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3306 op2 = (num_operands == 2 ? "%1" : "%2");
3307 op3 = (num_operands == 2 ? "%D1" : "%D2");
3308 }
3309 }
3310 else if (src_mode == VOIDmode)
3311 {
3312 /* frdxlo, frdxhi, frdy cases. */
3313 gcc_assert (dst_mode == SFmode);
3314 op1 = "%0";
3315 op2 = op3 = "zero";
3316 }
3317 else if (src_mode == SImode)
3318 {
3319 /* Conversion operators. */
3320 gcc_assert (num_operands == 2);
3321 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3322 op2 = "%1";
3323 op3 = "zero";
3324 }
3325 else
3326 gcc_unreachable ();
3327
3328 /* Main instruction string. */
3329 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
3330 N, op1, op2, op3, insn_name,
3331 (num_operands >= 2 ? ", %1" : ""),
3332 (num_operands == 3 ? ", %2" : ""));
3333
3334 /* Extraction of Y register for DF results. */
3335 if (dst_mode == DFmode)
3336 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
3337 N2FPU_N (n2fpu_frdy));
3338 return buf;
3339 }
3340
3341 \f
3342
3343 /* Function argument related. */
3344
3345 /* Define where to put the arguments to a function. Value is zero to
3346 push the argument on the stack, or a hard register in which to
3347 store the argument.
3348
3349 MODE is the argument's machine mode.
3350 TYPE is the data type of the argument (as a tree).
3351 This is null for libcalls where that information may
3352 not be available.
3353 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3354 the preceding args and about the function being called.
3355 NAMED is nonzero if this argument is a named parameter
3356 (otherwise it is an extra parameter matching an ellipsis). */
3357
3358 static rtx
3359 nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
3360 const_tree type ATTRIBUTE_UNUSED,
3361 bool named ATTRIBUTE_UNUSED)
3362 {
3363 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3364 rtx return_rtx = NULL_RTX;
3365
3366 if (cum->regs_used < NUM_ARG_REGS)
3367 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
3368
3369 return return_rtx;
3370 }
3371
3372 /* Return number of bytes, at the beginning of the argument, that must be
3373 put in registers. 0 is the argument is entirely in registers or entirely
3374 in memory. */
3375
3376 static int
3377 nios2_arg_partial_bytes (cumulative_args_t cum_v,
3378 machine_mode mode, tree type ATTRIBUTE_UNUSED,
3379 bool named ATTRIBUTE_UNUSED)
3380 {
3381 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3382 HOST_WIDE_INT param_size;
3383
3384 if (mode == BLKmode)
3385 {
3386 param_size = int_size_in_bytes (type);
3387 gcc_assert (param_size >= 0);
3388 }
3389 else
3390 param_size = GET_MODE_SIZE (mode);
3391
3392 /* Convert to words (round up). */
3393 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
3394
3395 if (cum->regs_used < NUM_ARG_REGS
3396 && cum->regs_used + param_size > NUM_ARG_REGS)
3397 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
3398
3399 return 0;
3400 }
3401
3402 /* Update the data in CUM to advance over an argument of mode MODE
3403 and data type TYPE; TYPE is null for libcalls where that information
3404 may not be available. */
3405
3406 static void
3407 nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
3408 const_tree type ATTRIBUTE_UNUSED,
3409 bool named ATTRIBUTE_UNUSED)
3410 {
3411 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3412 HOST_WIDE_INT param_size;
3413
3414 if (mode == BLKmode)
3415 {
3416 param_size = int_size_in_bytes (type);
3417 gcc_assert (param_size >= 0);
3418 }
3419 else
3420 param_size = GET_MODE_SIZE (mode);
3421
3422 /* Convert to words (round up). */
3423 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
3424
3425 if (cum->regs_used + param_size > NUM_ARG_REGS)
3426 cum->regs_used = NUM_ARG_REGS;
3427 else
3428 cum->regs_used += param_size;
3429 }
3430
3431 static pad_direction
3432 nios2_function_arg_padding (machine_mode mode, const_tree type)
3433 {
3434 /* On little-endian targets, the first byte of every stack argument
3435 is passed in the first byte of the stack slot. */
3436 if (!BYTES_BIG_ENDIAN)
3437 return PAD_UPWARD;
3438
3439 /* Otherwise, integral types are padded downward: the last byte of a
3440 stack argument is passed in the last byte of the stack slot. */
3441 if (type != 0
3442 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
3443 : GET_MODE_CLASS (mode) == MODE_INT)
3444 return PAD_DOWNWARD;
3445
3446 /* Arguments smaller than a stack slot are padded downward. */
3447 if (mode != BLKmode)
3448 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY
3449 ? PAD_UPWARD : PAD_DOWNWARD);
3450
3451 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
3452 ? PAD_UPWARD : PAD_DOWNWARD);
3453 }
3454
3455 pad_direction
3456 nios2_block_reg_padding (machine_mode mode, tree type,
3457 int first ATTRIBUTE_UNUSED)
3458 {
3459 return nios2_function_arg_padding (mode, type);
3460 }
3461
3462 /* Emit RTL insns to initialize the variable parts of a trampoline.
3463 FNADDR is an RTX for the address of the function's pure code.
3464 CXT is an RTX for the static chain value for the function.
3465 On Nios II, we handle this by a library call. */
3466 static void
3467 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
3468 {
3469 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3470 rtx ctx_reg = force_reg (Pmode, cxt);
3471 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
3472
3473 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
3474 LCT_NORMAL, VOIDmode, addr, Pmode, fnaddr, Pmode,
3475 ctx_reg, Pmode);
3476 }
3477
3478 /* Implement TARGET_FUNCTION_VALUE. */
3479 static rtx
3480 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
3481 bool outgoing ATTRIBUTE_UNUSED)
3482 {
3483 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
3484 }
3485
3486 /* Implement TARGET_LIBCALL_VALUE. */
3487 static rtx
3488 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3489 {
3490 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
3491 }
3492
3493 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3494 static bool
3495 nios2_function_value_regno_p (const unsigned int regno)
3496 {
3497 return regno == FIRST_RETVAL_REGNO;
3498 }
3499
3500 /* Implement TARGET_RETURN_IN_MEMORY. */
3501 static bool
3502 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3503 {
3504 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3505 || int_size_in_bytes (type) == -1);
3506 }
3507
3508 /* TODO: It may be possible to eliminate the copyback and implement
3509 own va_arg type. */
3510 static void
3511 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
3512 machine_mode mode, tree type,
3513 int *pretend_size, int second_time)
3514 {
3515 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3516 CUMULATIVE_ARGS local_cum;
3517 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
3518 int regs_to_push;
3519 int pret_size;
3520
3521 cfun->machine->uses_anonymous_args = 1;
3522 local_cum = *cum;
3523 nios2_function_arg_advance (local_cum_v, mode, type, true);
3524
3525 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
3526
3527 /* If we can use CDX stwm to push the arguments on the stack,
3528 nios2_expand_prologue will do that instead. */
3529 if (!TARGET_HAS_CDX && !second_time && regs_to_push > 0)
3530 {
3531 rtx ptr = virtual_incoming_args_rtx;
3532 rtx mem = gen_rtx_MEM (BLKmode, ptr);
3533 emit_insn (gen_blockage ());
3534 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
3535 regs_to_push);
3536 emit_insn (gen_blockage ());
3537 }
3538
3539 pret_size = regs_to_push * UNITS_PER_WORD;
3540 if (pret_size)
3541 *pretend_size = pret_size;
3542 }
3543
3544 \f
3545
3546 /* Init FPU builtins. */
3547 static void
3548 nios2_init_fpu_builtins (int start_code)
3549 {
3550 tree fndecl;
3551 char builtin_name[64] = "__builtin_custom_";
3552 unsigned int i, n = strlen ("__builtin_custom_");
3553
3554 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3555 {
3556 snprintf (builtin_name + n, sizeof (builtin_name) - n,
3557 "%s", N2FPU_NAME (i));
3558 fndecl =
3559 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
3560 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
3561 nios2_register_builtin_fndecl (start_code + i, fndecl);
3562 }
3563 }
3564
3565 /* Helper function for expanding FPU builtins. */
3566 static rtx
3567 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
3568 {
3569 struct expand_operand ops[MAX_RECOG_OPERANDS];
3570 enum insn_code icode = N2FPU_ICODE (code);
3571 int nargs, argno, opno = 0;
3572 int num_operands = N2FPU (code).num_operands;
3573 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
3574 bool has_target_p = (dst_mode != VOIDmode);
3575
3576 if (N2FPU_N (code) < 0)
3577 fatal_error (input_location,
3578 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3579 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
3580 if (has_target_p)
3581 create_output_operand (&ops[opno++], target, dst_mode);
3582 else
3583 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3584 num_operands -= 1;
3585 nargs = call_expr_nargs (exp);
3586 for (argno = 0; argno < nargs; argno++)
3587 {
3588 tree arg = CALL_EXPR_ARG (exp, argno);
3589 create_input_operand (&ops[opno++], expand_normal (arg),
3590 TYPE_MODE (TREE_TYPE (arg)));
3591 }
3592 if (!maybe_expand_insn (icode, num_operands, ops))
3593 {
3594 error ("invalid argument to built-in function");
3595 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3596 }
3597 return has_target_p ? ops[0].value : const0_rtx;
3598 }
3599
3600 /* Nios II has custom instruction built-in functions of the forms:
3601 __builtin_custom_n
3602 __builtin_custom_nX
3603 __builtin_custom_nXX
3604 __builtin_custom_Xn
3605 __builtin_custom_XnX
3606 __builtin_custom_XnXX
3607
3608 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3609 Therefore with 0-1 return values, and 0-2 arguments, we have a
3610 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3611 */
3612 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3613 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
3614
3615 static void
3616 nios2_init_custom_builtins (int start_code)
3617 {
3618 tree builtin_ftype, ret_type, fndecl;
3619 char builtin_name[32] = "__builtin_custom_";
3620 int n = strlen ("__builtin_custom_");
3621 int builtin_code = 0;
3622 int lhs, rhs1, rhs2;
3623
3624 struct { tree type; const char *c; } op[4];
3625 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
3626 /* f */ op[1].c = "f"; op[1].type = float_type_node;
3627 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
3628 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
3629
3630 /* We enumerate through the possible operand types to create all the
3631 __builtin_custom_XnXX function tree types. Note that these may slightly
3632 overlap with the function types created for other fixed builtins. */
3633
3634 for (lhs = 0; lhs < 4; lhs++)
3635 for (rhs1 = 0; rhs1 < 4; rhs1++)
3636 for (rhs2 = 0; rhs2 < 4; rhs2++)
3637 {
3638 if (rhs1 == 0 && rhs2 != 0)
3639 continue;
3640 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
3641 builtin_ftype
3642 = build_function_type_list (ret_type, integer_type_node,
3643 op[rhs1].type, op[rhs2].type,
3644 NULL_TREE);
3645 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
3646 op[lhs].c, op[rhs1].c, op[rhs2].c);
3647 /* Save copy of parameter string into custom_builtin_name[]. */
3648 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
3649 fndecl =
3650 add_builtin_function (builtin_name, builtin_ftype,
3651 start_code + builtin_code,
3652 BUILT_IN_MD, NULL, NULL_TREE);
3653 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
3654 builtin_code += 1;
3655 }
3656 }
3657
3658 /* Helper function for expanding custom builtins. */
3659 static rtx
3660 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
3661 {
3662 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
3663 machine_mode tmode = VOIDmode;
3664 int nargs, argno;
3665 rtx value, insn, unspec_args[3];
3666 tree arg;
3667
3668 /* XnXX form. */
3669 if (has_target_p)
3670 {
3671 tmode = TYPE_MODE (TREE_TYPE (exp));
3672 if (!target || GET_MODE (target) != tmode
3673 || !REG_P (target))
3674 target = gen_reg_rtx (tmode);
3675 }
3676
3677 nargs = call_expr_nargs (exp);
3678 for (argno = 0; argno < nargs; argno++)
3679 {
3680 arg = CALL_EXPR_ARG (exp, argno);
3681 value = expand_normal (arg);
3682 unspec_args[argno] = value;
3683 if (argno == 0)
3684 {
3685 if (!custom_insn_opcode (value, VOIDmode))
3686 error ("custom instruction opcode must be compile time "
3687 "constant in the range 0-255 for __builtin_custom_%s",
3688 custom_builtin_name[index]);
3689 }
3690 else
3691 /* For other arguments, force into a register. */
3692 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
3693 unspec_args[argno]);
3694 }
3695 /* Fill remaining unspec operands with zero. */
3696 for (; argno < 3; argno++)
3697 unspec_args[argno] = const0_rtx;
3698
3699 insn = (has_target_p
3700 ? gen_rtx_SET (target,
3701 gen_rtx_UNSPEC_VOLATILE (tmode,
3702 gen_rtvec_v (3, unspec_args),
3703 UNSPECV_CUSTOM_XNXX))
3704 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
3705 UNSPECV_CUSTOM_NXX));
3706 emit_insn (insn);
3707 return has_target_p ? target : const0_rtx;
3708 }
3709
3710
3711 \f
3712
3713 /* Main definition of built-in functions. Nios II has a small number of fixed
3714 builtins, plus a large number of FPU insn builtins, and builtins for
3715 generating custom instructions. */
3716
3717 struct nios2_builtin_desc
3718 {
3719 enum insn_code icode;
3720 enum nios2_arch_type arch;
3721 enum nios2_ftcode ftype;
3722 const char *name;
3723 };
3724
3725 #define N2_BUILTINS \
3726 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3727 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3728 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3729 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3730 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3731 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3732 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3733 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3734 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3735 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3736 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3737 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3738 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3739 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3740 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3741 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3742 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3743 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3744 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3745 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3746
3747 enum nios2_builtin_code {
3748 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3749 N2_BUILTINS
3750 #undef N2_BUILTIN_DEF
3751 NUM_FIXED_NIOS2_BUILTINS
3752 };
3753
3754 static const struct nios2_builtin_desc nios2_builtins[] = {
3755 #define N2_BUILTIN_DEF(name, arch, ftype) \
3756 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3757 N2_BUILTINS
3758 #undef N2_BUILTIN_DEF
3759 };
3760
3761 /* Start/ends of FPU/custom insn builtin index ranges. */
3762 static unsigned int nios2_fpu_builtin_base;
3763 static unsigned int nios2_custom_builtin_base;
3764 static unsigned int nios2_custom_builtin_end;
3765
3766 /* Implement TARGET_INIT_BUILTINS. */
3767 static void
3768 nios2_init_builtins (void)
3769 {
3770 unsigned int i;
3771
3772 /* Initialize fixed builtins. */
3773 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
3774 {
3775 const struct nios2_builtin_desc *d = &nios2_builtins[i];
3776 tree fndecl =
3777 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
3778 BUILT_IN_MD, NULL, NULL);
3779 nios2_register_builtin_fndecl (i, fndecl);
3780 }
3781
3782 /* Initialize FPU builtins. */
3783 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
3784 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
3785
3786 /* Initialize custom insn builtins. */
3787 nios2_custom_builtin_base
3788 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
3789 nios2_custom_builtin_end
3790 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
3791 nios2_init_custom_builtins (nios2_custom_builtin_base);
3792 }
3793
3794 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3795 #define NIOS2_NUM_BUILTINS \
3796 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3797 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
3798
3799 static void
3800 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
3801 {
3802 nios2_builtin_decls[code] = fndecl;
3803 }
3804
3805 /* Implement TARGET_BUILTIN_DECL. */
3806 static tree
3807 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3808 {
3809 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
3810
3811 if (code >= nios2_custom_builtin_end)
3812 return error_mark_node;
3813
3814 if (code >= nios2_fpu_builtin_base
3815 && code < nios2_custom_builtin_base
3816 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
3817 return error_mark_node;
3818
3819 return nios2_builtin_decls[code];
3820 }
3821
3822 \f
3823 /* Low-level built-in expand routine. */
3824 static rtx
3825 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
3826 struct expand_operand *ops, bool has_target_p)
3827 {
3828 if (maybe_expand_insn (d->icode, n, ops))
3829 return has_target_p ? ops[0].value : const0_rtx;
3830 else
3831 {
3832 error ("invalid argument to built-in function %s", d->name);
3833 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3834 }
3835 }
3836
3837 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3838 instruction builtins. */
3839 static rtx
3840 nios2_expand_ldst_builtin (tree exp, rtx target,
3841 const struct nios2_builtin_desc *d)
3842 {
3843 bool has_target_p;
3844 rtx addr, mem, val;
3845 struct expand_operand ops[MAX_RECOG_OPERANDS];
3846 machine_mode mode = insn_data[d->icode].operand[0].mode;
3847
3848 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3849 mem = gen_rtx_MEM (mode, addr);
3850
3851 if (insn_data[d->icode].operand[0].allows_mem)
3852 {
3853 /* stxio/stex/stsex. */
3854 val = expand_normal (CALL_EXPR_ARG (exp, 1));
3855 if (CONST_INT_P (val))
3856 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
3857 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
3858 create_output_operand (&ops[0], mem, mode);
3859 create_input_operand (&ops[1], val, mode);
3860 if (insn_data[d->icode].n_operands == 3)
3861 {
3862 /* stex/stsex status value, returned as result of function. */
3863 create_output_operand (&ops[2], target, mode);
3864 has_target_p = true;
3865 }
3866 else
3867 has_target_p = false;
3868 }
3869 else
3870 {
3871 /* ldxio. */
3872 create_output_operand (&ops[0], target, mode);
3873 create_input_operand (&ops[1], mem, mode);
3874 has_target_p = true;
3875 }
3876 return nios2_expand_builtin_insn (d, insn_data[d->icode].n_operands, ops,
3877 has_target_p);
3878 }
3879
3880 /* Expand rdctl/wrctl builtins. */
3881 static rtx
3882 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
3883 const struct nios2_builtin_desc *d)
3884 {
3885 bool has_target_p = (insn_data[d->icode].operand[0].predicate
3886 == register_operand);
3887 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
3888 struct expand_operand ops[MAX_RECOG_OPERANDS];
3889 if (!rdwrctl_operand (ctlcode, VOIDmode))
3890 {
3891 error ("Control register number must be in range 0-31 for %s",
3892 d->name);
3893 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
3894 }
3895 if (has_target_p)
3896 {
3897 create_output_operand (&ops[0], target, SImode);
3898 create_integer_operand (&ops[1], INTVAL (ctlcode));
3899 }
3900 else
3901 {
3902 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
3903 create_integer_operand (&ops[0], INTVAL (ctlcode));
3904 create_input_operand (&ops[1], val, SImode);
3905 }
3906 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3907 }
3908
3909 static rtx
3910 nios2_expand_rdprs_builtin (tree exp, rtx target,
3911 const struct nios2_builtin_desc *d)
3912 {
3913 rtx reg = expand_normal (CALL_EXPR_ARG (exp, 0));
3914 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 1));
3915 struct expand_operand ops[MAX_RECOG_OPERANDS];
3916
3917 if (!rdwrctl_operand (reg, VOIDmode))
3918 {
3919 error ("Register number must be in range 0-31 for %s",
3920 d->name);
3921 return gen_reg_rtx (SImode);
3922 }
3923
3924 if (!rdprs_dcache_operand (imm, VOIDmode))
3925 {
3926 error ("The immediate value must fit into a %d-bit integer for %s",
3927 (TARGET_ARCH_R2) ? 12 : 16, d->name);
3928 return gen_reg_rtx (SImode);
3929 }
3930
3931 create_output_operand (&ops[0], target, SImode);
3932 create_input_operand (&ops[1], reg, SImode);
3933 create_integer_operand (&ops[2], INTVAL (imm));
3934
3935 return nios2_expand_builtin_insn (d, 3, ops, true);
3936 }
3937
3938 static rtx
3939 nios2_expand_cache_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3940 const struct nios2_builtin_desc *d)
3941 {
3942 rtx mem, addr;
3943 struct expand_operand ops[MAX_RECOG_OPERANDS];
3944
3945 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3946 mem = gen_rtx_MEM (SImode, addr);
3947
3948 create_input_operand (&ops[0], mem, SImode);
3949
3950 return nios2_expand_builtin_insn (d, 1, ops, false);
3951 }
3952
3953 static rtx
3954 nios2_expand_wrpie_builtin (tree exp, rtx target,
3955 const struct nios2_builtin_desc *d)
3956 {
3957 rtx val;
3958 struct expand_operand ops[MAX_RECOG_OPERANDS];
3959
3960 val = expand_normal (CALL_EXPR_ARG (exp, 0));
3961 create_input_operand (&ops[1], val, SImode);
3962 create_output_operand (&ops[0], target, SImode);
3963
3964 return nios2_expand_builtin_insn (d, 2, ops, true);
3965 }
3966
3967 static rtx
3968 nios2_expand_eni_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3969 const struct nios2_builtin_desc *d)
3970 {
3971 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3972 struct expand_operand ops[MAX_RECOG_OPERANDS];
3973
3974 if (INTVAL (imm) != 0 && INTVAL (imm) != 1)
3975 {
3976 error ("The ENI instruction operand must be either 0 or 1");
3977 return const0_rtx;
3978 }
3979 create_integer_operand (&ops[0], INTVAL (imm));
3980
3981 return nios2_expand_builtin_insn (d, 1, ops, false);
3982 }
3983
3984 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3985 a built-in function, with result going to TARGET if that's convenient
3986 (and in mode MODE if that's convenient).
3987 SUBTARGET may be used as the target for computing one of EXP's operands.
3988 IGNORE is nonzero if the value is to be ignored. */
3989
3990 static rtx
3991 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
3992 machine_mode mode ATTRIBUTE_UNUSED,
3993 int ignore ATTRIBUTE_UNUSED)
3994 {
3995 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3996 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3997
3998 if (fcode < nios2_fpu_builtin_base)
3999 {
4000 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
4001
4002 if (d->arch > nios2_arch_option)
4003 {
4004 error ("Builtin function %s requires Nios II R%d",
4005 d->name, (int) d->arch);
4006 /* Given it is invalid, just generate a normal call. */
4007 return expand_call (exp, target, ignore);
4008 }
4009
4010 switch (fcode)
4011 {
4012 case NIOS2_BUILTIN_sync:
4013 emit_insn (gen_sync ());
4014 return const0_rtx;
4015
4016 case NIOS2_BUILTIN_ldbio:
4017 case NIOS2_BUILTIN_ldbuio:
4018 case NIOS2_BUILTIN_ldhio:
4019 case NIOS2_BUILTIN_ldhuio:
4020 case NIOS2_BUILTIN_ldwio:
4021 case NIOS2_BUILTIN_stbio:
4022 case NIOS2_BUILTIN_sthio:
4023 case NIOS2_BUILTIN_stwio:
4024 case NIOS2_BUILTIN_ldex:
4025 case NIOS2_BUILTIN_ldsex:
4026 case NIOS2_BUILTIN_stex:
4027 case NIOS2_BUILTIN_stsex:
4028 return nios2_expand_ldst_builtin (exp, target, d);
4029
4030 case NIOS2_BUILTIN_rdctl:
4031 case NIOS2_BUILTIN_wrctl:
4032 return nios2_expand_rdwrctl_builtin (exp, target, d);
4033
4034 case NIOS2_BUILTIN_rdprs:
4035 return nios2_expand_rdprs_builtin (exp, target, d);
4036
4037 case NIOS2_BUILTIN_flushd:
4038 case NIOS2_BUILTIN_flushda:
4039 return nios2_expand_cache_builtin (exp, target, d);
4040
4041 case NIOS2_BUILTIN_wrpie:
4042 return nios2_expand_wrpie_builtin (exp, target, d);
4043
4044 case NIOS2_BUILTIN_eni:
4045 return nios2_expand_eni_builtin (exp, target, d);
4046
4047 default:
4048 gcc_unreachable ();
4049 }
4050 }
4051 else if (fcode < nios2_custom_builtin_base)
4052 /* FPU builtin range. */
4053 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
4054 target);
4055 else if (fcode < nios2_custom_builtin_end)
4056 /* Custom insn builtin range. */
4057 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
4058 target);
4059 else
4060 gcc_unreachable ();
4061 }
4062
4063 /* Implement TARGET_INIT_LIBFUNCS. */
4064 static void ATTRIBUTE_UNUSED
4065 nios2_init_libfuncs (void)
4066 {
4067 init_sync_libfuncs (UNITS_PER_WORD);
4068 }
4069
4070 \f
4071
4072 /* Register a custom code use, and signal error if a conflict was found. */
4073 static void
4074 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
4075 int index)
4076 {
4077 gcc_assert (N <= 255);
4078
4079 if (status == CCS_FPU)
4080 {
4081 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
4082 {
4083 custom_code_conflict = true;
4084 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
4085 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
4086 }
4087 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
4088 {
4089 custom_code_conflict = true;
4090 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4091 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
4092 N2FPU_NAME (index));
4093 }
4094 }
4095 else if (status == CCS_BUILTIN_CALL)
4096 {
4097 if (custom_code_status[N] == CCS_FPU)
4098 {
4099 custom_code_conflict = true;
4100 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4101 "%<-mcustom-%s%>", custom_builtin_name[index],
4102 N2FPU_NAME (custom_code_index[N]));
4103 }
4104 else
4105 {
4106 /* Note that code conflicts between different __builtin_custom_xnxx
4107 calls are not checked. */
4108 }
4109 }
4110 else
4111 gcc_unreachable ();
4112
4113 custom_code_status[N] = status;
4114 custom_code_index[N] = index;
4115 }
4116
4117 /* Mark a custom code as not in use. */
4118 static void
4119 nios2_deregister_custom_code (unsigned int N)
4120 {
4121 if (N <= 255)
4122 {
4123 custom_code_status[N] = CCS_UNUSED;
4124 custom_code_index[N] = 0;
4125 }
4126 }
4127
4128 /* Target attributes can affect per-function option state, so we need to
4129 save/restore the custom code tracking info using the
4130 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
4131
4132 static void
4133 nios2_option_save (struct cl_target_option *ptr,
4134 struct gcc_options *opts ATTRIBUTE_UNUSED)
4135 {
4136 unsigned int i;
4137 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4138 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
4139 memcpy (ptr->saved_custom_code_status, custom_code_status,
4140 sizeof (custom_code_status));
4141 memcpy (ptr->saved_custom_code_index, custom_code_index,
4142 sizeof (custom_code_index));
4143 }
4144
4145 static void
4146 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
4147 struct cl_target_option *ptr)
4148 {
4149 unsigned int i;
4150 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4151 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
4152 memcpy (custom_code_status, ptr->saved_custom_code_status,
4153 sizeof (custom_code_status));
4154 memcpy (custom_code_index, ptr->saved_custom_code_index,
4155 sizeof (custom_code_index));
4156 }
4157
4158 /* Inner function to process the attribute((target(...))), take an argument and
4159 set the current options from the argument. If we have a list, recursively
4160 go over the list. */
4161
4162 static bool
4163 nios2_valid_target_attribute_rec (tree args)
4164 {
4165 if (TREE_CODE (args) == TREE_LIST)
4166 {
4167 bool ret = true;
4168 for (; args; args = TREE_CHAIN (args))
4169 if (TREE_VALUE (args)
4170 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
4171 ret = false;
4172 return ret;
4173 }
4174 else if (TREE_CODE (args) == STRING_CST)
4175 {
4176 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
4177 while (argstr && *argstr != '\0')
4178 {
4179 bool no_opt = false, end_p = false;
4180 char *eq = NULL, *p;
4181 while (ISSPACE (*argstr))
4182 argstr++;
4183 p = argstr;
4184 while (*p != '\0' && *p != ',')
4185 {
4186 if (!eq && *p == '=')
4187 eq = p;
4188 ++p;
4189 }
4190 if (*p == '\0')
4191 end_p = true;
4192 else
4193 *p = '\0';
4194 if (eq) *eq = '\0';
4195
4196 if (!strncmp (argstr, "no-", 3))
4197 {
4198 no_opt = true;
4199 argstr += 3;
4200 }
4201 if (!strncmp (argstr, "custom-fpu-cfg", 14))
4202 {
4203 char *end_eq = p;
4204 if (no_opt)
4205 {
4206 error ("custom-fpu-cfg option does not support %<no-%>");
4207 return false;
4208 }
4209 if (!eq)
4210 {
4211 error ("custom-fpu-cfg option requires configuration"
4212 " argument");
4213 return false;
4214 }
4215 /* Increment and skip whitespace. */
4216 while (ISSPACE (*(++eq))) ;
4217 /* Decrement and skip to before any trailing whitespace. */
4218 while (ISSPACE (*(--end_eq))) ;
4219
4220 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
4221 }
4222 else if (!strncmp (argstr, "custom-", 7))
4223 {
4224 int code = -1;
4225 unsigned int i;
4226 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4227 if (!strncmp (argstr + 7, N2FPU_NAME (i),
4228 strlen (N2FPU_NAME (i))))
4229 {
4230 /* Found insn. */
4231 code = i;
4232 break;
4233 }
4234 if (code >= 0)
4235 {
4236 if (no_opt)
4237 {
4238 if (eq)
4239 {
4240 error ("%<no-custom-%s%> does not accept arguments",
4241 N2FPU_NAME (code));
4242 return false;
4243 }
4244 /* Disable option by setting to -1. */
4245 nios2_deregister_custom_code (N2FPU_N (code));
4246 N2FPU_N (code) = -1;
4247 }
4248 else
4249 {
4250 char *t;
4251 if (eq)
4252 while (ISSPACE (*(++eq))) ;
4253 if (!eq || eq == p)
4254 {
4255 error ("%<custom-%s=%> requires argument",
4256 N2FPU_NAME (code));
4257 return false;
4258 }
4259 for (t = eq; t != p; ++t)
4260 {
4261 if (ISSPACE (*t))
4262 continue;
4263 if (!ISDIGIT (*t))
4264 {
4265 error ("`custom-%s=' argument requires "
4266 "numeric digits", N2FPU_NAME (code));
4267 return false;
4268 }
4269 }
4270 /* Set option to argument. */
4271 N2FPU_N (code) = atoi (eq);
4272 nios2_handle_custom_fpu_insn_option (code);
4273 }
4274 }
4275 else
4276 {
4277 error ("%<custom-%s=%> is not recognized as FPU instruction",
4278 argstr + 7);
4279 return false;
4280 }
4281 }
4282 else
4283 {
4284 error ("%<%s%> is unknown", argstr);
4285 return false;
4286 }
4287
4288 if (end_p)
4289 break;
4290 else
4291 argstr = p + 1;
4292 }
4293 return true;
4294 }
4295 else
4296 gcc_unreachable ();
4297 }
4298
4299 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
4300
4301 static tree
4302 nios2_valid_target_attribute_tree (tree args)
4303 {
4304 if (!nios2_valid_target_attribute_rec (args))
4305 return NULL_TREE;
4306 nios2_custom_check_insns ();
4307 return build_target_option_node (&global_options);
4308 }
4309
4310 /* Hook to validate attribute((target("string"))). */
4311
4312 static bool
4313 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
4314 tree args, int ARG_UNUSED (flags))
4315 {
4316 struct cl_target_option cur_target;
4317 bool ret = true;
4318 tree old_optimize = build_optimization_node (&global_options);
4319 tree new_target, new_optimize;
4320 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
4321
4322 /* If the function changed the optimization levels as well as setting target
4323 options, start with the optimizations specified. */
4324 if (func_optimize && func_optimize != old_optimize)
4325 cl_optimization_restore (&global_options,
4326 TREE_OPTIMIZATION (func_optimize));
4327
4328 /* The target attributes may also change some optimization flags, so update
4329 the optimization options if necessary. */
4330 cl_target_option_save (&cur_target, &global_options);
4331 new_target = nios2_valid_target_attribute_tree (args);
4332 new_optimize = build_optimization_node (&global_options);
4333
4334 if (!new_target)
4335 ret = false;
4336
4337 else if (fndecl)
4338 {
4339 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
4340
4341 if (old_optimize != new_optimize)
4342 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
4343 }
4344
4345 cl_target_option_restore (&global_options, &cur_target);
4346
4347 if (old_optimize != new_optimize)
4348 cl_optimization_restore (&global_options,
4349 TREE_OPTIMIZATION (old_optimize));
4350 return ret;
4351 }
4352
4353 /* Remember the last target of nios2_set_current_function. */
4354 static GTY(()) tree nios2_previous_fndecl;
4355
4356 /* Establish appropriate back-end context for processing the function
4357 FNDECL. The argument might be NULL to indicate processing at top
4358 level, outside of any function scope. */
4359 static void
4360 nios2_set_current_function (tree fndecl)
4361 {
4362 tree old_tree = (nios2_previous_fndecl
4363 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
4364 : NULL_TREE);
4365
4366 tree new_tree = (fndecl
4367 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
4368 : NULL_TREE);
4369
4370 if (fndecl && fndecl != nios2_previous_fndecl)
4371 {
4372 nios2_previous_fndecl = fndecl;
4373 if (old_tree == new_tree)
4374 ;
4375
4376 else if (new_tree)
4377 {
4378 cl_target_option_restore (&global_options,
4379 TREE_TARGET_OPTION (new_tree));
4380 target_reinit ();
4381 }
4382
4383 else if (old_tree)
4384 {
4385 struct cl_target_option *def
4386 = TREE_TARGET_OPTION (target_option_current_node);
4387
4388 cl_target_option_restore (&global_options, def);
4389 target_reinit ();
4390 }
4391 }
4392 }
4393
4394 /* Hook to validate the current #pragma GCC target and set the FPU custom
4395 code option state. If ARGS is NULL, then POP_TARGET is used to reset
4396 the options. */
4397 static bool
4398 nios2_pragma_target_parse (tree args, tree pop_target)
4399 {
4400 tree cur_tree;
4401 if (! args)
4402 {
4403 cur_tree = ((pop_target)
4404 ? pop_target
4405 : target_option_default_node);
4406 cl_target_option_restore (&global_options,
4407 TREE_TARGET_OPTION (cur_tree));
4408 }
4409 else
4410 {
4411 cur_tree = nios2_valid_target_attribute_tree (args);
4412 if (!cur_tree)
4413 return false;
4414 }
4415
4416 target_option_current_node = cur_tree;
4417 return true;
4418 }
4419
4420 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
4421 We are just using this hook to add some additional error checking to
4422 the default behavior. GCC does not provide a target hook for merging
4423 the target options, and only correctly handles merging empty vs non-empty
4424 option data; see merge_decls() in c-decl.c.
4425 So here we require either that at least one of the decls has empty
4426 target options, or that the target options/data be identical. */
4427 static tree
4428 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
4429 {
4430 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
4431 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
4432 if (newopts && oldopts && newopts != oldopts)
4433 {
4434 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
4435 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
4436 if (oldtree && newtree && oldtree != newtree)
4437 {
4438 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
4439 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
4440 if (olddata != newdata
4441 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
4442 error ("%qE redeclared with conflicting %qs attributes",
4443 DECL_NAME (newdecl), "target");
4444 }
4445 }
4446 return merge_attributes (DECL_ATTRIBUTES (olddecl),
4447 DECL_ATTRIBUTES (newdecl));
4448 }
4449
4450 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4451 static void
4452 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4453 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4454 tree function)
4455 {
4456 rtx this_rtx, funexp;
4457 rtx_insn *insn;
4458
4459 /* Pretend to be a post-reload pass while generating rtl. */
4460 reload_completed = 1;
4461
4462 if (flag_pic)
4463 nios2_load_pic_register ();
4464
4465 /* Mark the end of the (empty) prologue. */
4466 emit_note (NOTE_INSN_PROLOGUE_END);
4467
4468 /* Find the "this" pointer. If the function returns a structure,
4469 the structure return pointer is in $5. */
4470 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4471 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
4472 else
4473 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
4474
4475 /* Add DELTA to THIS_RTX. */
4476 nios2_emit_add_constant (this_rtx, delta);
4477
4478 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4479 if (vcall_offset)
4480 {
4481 rtx tmp;
4482
4483 tmp = gen_rtx_REG (Pmode, 2);
4484 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4485 nios2_emit_add_constant (tmp, vcall_offset);
4486 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4487 emit_insn (gen_add2_insn (this_rtx, tmp));
4488 }
4489
4490 /* Generate a tail call to the target function. */
4491 if (!TREE_USED (function))
4492 {
4493 assemble_external (function);
4494 TREE_USED (function) = 1;
4495 }
4496 funexp = XEXP (DECL_RTL (function), 0);
4497 /* Function address needs to be constructed under PIC,
4498 provide r2 to use here. */
4499 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
4500 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
4501 SIBLING_CALL_P (insn) = 1;
4502
4503 /* Run just enough of rest_of_compilation to get the insns emitted.
4504 There's not really enough bulk here to make other passes such as
4505 instruction scheduling worth while. Note that use_thunk calls
4506 assemble_start_function and assemble_end_function. */
4507 insn = get_insns ();
4508 shorten_branches (insn);
4509 final_start_function (insn, file, 1);
4510 final (insn, file, 1);
4511 final_end_function ();
4512
4513 /* Stop pretending to be a post-reload pass. */
4514 reload_completed = 0;
4515 }
4516
4517
4518 /* Utility function to break a memory address into
4519 base register + constant offset. Return false if something
4520 unexpected is seen. */
4521 static bool
4522 split_mem_address (rtx addr, rtx *base_reg, rtx *offset)
4523 {
4524 if (REG_P (addr))
4525 {
4526 *base_reg = addr;
4527 *offset = const0_rtx;
4528 return true;
4529 }
4530 else if (GET_CODE (addr) == PLUS)
4531 {
4532 *base_reg = XEXP (addr, 0);
4533 *offset = XEXP (addr, 1);
4534 return true;
4535 }
4536 return false;
4537 }
4538
4539 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4540 static void
4541 split_alu_insn (rtx_insn *insn, rtx *lhs, rtx *rhs1, rtx *rhs2)
4542 {
4543 rtx pat = PATTERN (insn);
4544 gcc_assert (GET_CODE (pat) == SET);
4545 *lhs = SET_DEST (pat);
4546 *rhs1 = XEXP (SET_SRC (pat), 0);
4547 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat))) != RTX_UNARY)
4548 *rhs2 = XEXP (SET_SRC (pat), 1);
4549 return;
4550 }
4551
4552 /* Returns true if OP is a REG and assigned a CDX reg. */
4553 static bool
4554 cdxreg (rtx op)
4555 {
4556 return REG_P (op) && (!reload_completed || CDX_REG_P (REGNO (op)));
4557 }
4558
4559 /* Returns true if OP is within range of CDX addi.n immediates. */
4560 static bool
4561 cdx_add_immed (rtx op)
4562 {
4563 if (CONST_INT_P (op))
4564 {
4565 HOST_WIDE_INT ival = INTVAL (op);
4566 return ival <= 128 && ival > 0 && (ival & (ival - 1)) == 0;
4567 }
4568 return false;
4569 }
4570
4571 /* Returns true if OP is within range of CDX andi.n immediates. */
4572 static bool
4573 cdx_and_immed (rtx op)
4574 {
4575 if (CONST_INT_P (op))
4576 {
4577 HOST_WIDE_INT ival = INTVAL (op);
4578 return (ival == 1 || ival == 2 || ival == 3 || ival == 4
4579 || ival == 8 || ival == 0xf || ival == 0x10
4580 || ival == 0x1f || ival == 0x20
4581 || ival == 0x3f || ival == 0x7f
4582 || ival == 0x80 || ival == 0xff || ival == 0x7ff
4583 || ival == 0xff00 || ival == 0xffff);
4584 }
4585 return false;
4586 }
4587
4588 /* Returns true if OP is within range of CDX movi.n immediates. */
4589 static bool
4590 cdx_mov_immed (rtx op)
4591 {
4592 if (CONST_INT_P (op))
4593 {
4594 HOST_WIDE_INT ival = INTVAL (op);
4595 return ((ival >= 0 && ival <= 124)
4596 || ival == 0xff || ival == -2 || ival == -1);
4597 }
4598 return false;
4599 }
4600
4601 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4602 static bool
4603 cdx_shift_immed (rtx op)
4604 {
4605 if (CONST_INT_P (op))
4606 {
4607 HOST_WIDE_INT ival = INTVAL (op);
4608 return (ival == 1 || ival == 2 || ival == 3 || ival == 8
4609 || ival == 12 || ival == 16 || ival == 24
4610 || ival == 31);
4611 }
4612 return false;
4613 }
4614
4615
4616
4617 /* Classification of different kinds of add instructions. */
4618 enum nios2_add_insn_kind {
4619 nios2_add_n_kind,
4620 nios2_addi_n_kind,
4621 nios2_subi_n_kind,
4622 nios2_spaddi_n_kind,
4623 nios2_spinci_n_kind,
4624 nios2_spdeci_n_kind,
4625 nios2_add_kind,
4626 nios2_addi_kind
4627 };
4628
4629 static const char *nios2_add_insn_names[] = {
4630 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4631 "add", "addi" };
4632 static bool nios2_add_insn_narrow[] = {
4633 true, true, true, true, true, true,
4634 false, false};
4635
4636 /* Function to classify kinds of add instruction patterns. */
4637 static enum nios2_add_insn_kind
4638 nios2_add_insn_classify (rtx_insn *insn ATTRIBUTE_UNUSED,
4639 rtx lhs, rtx rhs1, rtx rhs2)
4640 {
4641 if (TARGET_HAS_CDX)
4642 {
4643 if (cdxreg (lhs) && cdxreg (rhs1))
4644 {
4645 if (cdxreg (rhs2))
4646 return nios2_add_n_kind;
4647 if (CONST_INT_P (rhs2))
4648 {
4649 HOST_WIDE_INT ival = INTVAL (rhs2);
4650 if (ival > 0 && cdx_add_immed (rhs2))
4651 return nios2_addi_n_kind;
4652 if (ival < 0 && cdx_add_immed (GEN_INT (-ival)))
4653 return nios2_subi_n_kind;
4654 }
4655 }
4656 else if (rhs1 == stack_pointer_rtx
4657 && CONST_INT_P (rhs2))
4658 {
4659 HOST_WIDE_INT imm7 = INTVAL (rhs2) >> 2;
4660 HOST_WIDE_INT rem = INTVAL (rhs2) & 3;
4661 if (rem == 0 && (imm7 & ~0x7f) == 0)
4662 {
4663 if (cdxreg (lhs))
4664 return nios2_spaddi_n_kind;
4665 if (lhs == stack_pointer_rtx)
4666 return nios2_spinci_n_kind;
4667 }
4668 imm7 = -INTVAL(rhs2) >> 2;
4669 rem = -INTVAL (rhs2) & 3;
4670 if (lhs == stack_pointer_rtx
4671 && rem == 0 && (imm7 & ~0x7f) == 0)
4672 return nios2_spdeci_n_kind;
4673 }
4674 }
4675 return ((REG_P (rhs2) || rhs2 == const0_rtx)
4676 ? nios2_add_kind : nios2_addi_kind);
4677 }
4678
4679 /* Emit assembly language for the different kinds of add instructions. */
4680 const char*
4681 nios2_add_insn_asm (rtx_insn *insn, rtx *operands)
4682 {
4683 static char buf[256];
4684 int ln = 256;
4685 enum nios2_add_insn_kind kind
4686 = nios2_add_insn_classify (insn, operands[0], operands[1], operands[2]);
4687 if (kind == nios2_subi_n_kind)
4688 snprintf (buf, ln, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands[2]));
4689 else if (kind == nios2_spaddi_n_kind)
4690 snprintf (buf, ln, "spaddi.n\t%%0, %%2");
4691 else if (kind == nios2_spinci_n_kind)
4692 snprintf (buf, ln, "spinci.n\t%%2");
4693 else if (kind == nios2_spdeci_n_kind)
4694 snprintf (buf, ln, "spdeci.n\t%d", (int) -INTVAL (operands[2]));
4695 else
4696 snprintf (buf, ln, "%s\t%%0, %%1, %%z2", nios2_add_insn_names[(int)kind]);
4697 return buf;
4698 }
4699
4700 /* This routine, which the default "length" attribute computation is
4701 based on, encapsulates information about all the cases where CDX
4702 provides a narrow 2-byte instruction form. */
4703 bool
4704 nios2_cdx_narrow_form_p (rtx_insn *insn)
4705 {
4706 rtx pat, lhs, rhs1, rhs2;
4707 enum attr_type type;
4708 if (!TARGET_HAS_CDX)
4709 return false;
4710 type = get_attr_type (insn);
4711 pat = PATTERN (insn);
4712 gcc_assert (reload_completed);
4713 switch (type)
4714 {
4715 case TYPE_CONTROL:
4716 if (GET_CODE (pat) == SIMPLE_RETURN)
4717 return true;
4718 if (GET_CODE (pat) == PARALLEL)
4719 pat = XVECEXP (pat, 0, 0);
4720 if (GET_CODE (pat) == SET)
4721 pat = SET_SRC (pat);
4722 if (GET_CODE (pat) == IF_THEN_ELSE)
4723 {
4724 /* Conditional branch patterns; for these we
4725 only check the comparison to find beqz.n/bnez.n cases.
4726 For the 'nios2_cbranch' pattern, we cannot also check
4727 the branch range here. That will be done at the md
4728 pattern "length" attribute computation. */
4729 rtx cmp = XEXP (pat, 0);
4730 return ((GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
4731 && cdxreg (XEXP (cmp, 0))
4732 && XEXP (cmp, 1) == const0_rtx);
4733 }
4734 if (GET_CODE (pat) == TRAP_IF)
4735 /* trap.n is always usable. */
4736 return true;
4737 if (GET_CODE (pat) == CALL)
4738 pat = XEXP (XEXP (pat, 0), 0);
4739 if (REG_P (pat))
4740 /* Control instructions taking a register operand are indirect
4741 jumps and calls. The CDX instructions have a 5-bit register
4742 field so any reg is valid. */
4743 return true;
4744 else
4745 {
4746 gcc_assert (!insn_variable_length_p (insn));
4747 return false;
4748 }
4749 case TYPE_ADD:
4750 {
4751 enum nios2_add_insn_kind kind;
4752 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4753 kind = nios2_add_insn_classify (insn, lhs, rhs1, rhs2);
4754 return nios2_add_insn_narrow[(int)kind];
4755 }
4756 case TYPE_LD:
4757 {
4758 bool ret;
4759 HOST_WIDE_INT offset, rem = 0;
4760 rtx addr, reg = SET_DEST (pat), mem = SET_SRC (pat);
4761 if (GET_CODE (mem) == SIGN_EXTEND)
4762 /* No CDX form for sign-extended load. */
4763 return false;
4764 if (GET_CODE (mem) == ZERO_EXTEND)
4765 /* The load alternatives in the zero_extend* patterns. */
4766 mem = XEXP (mem, 0);
4767 if (MEM_P (mem))
4768 {
4769 /* ldxio. */
4770 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4771 || TARGET_BYPASS_CACHE)
4772 return false;
4773 addr = XEXP (mem, 0);
4774 /* GP-based and R0-based references are never narrow. */
4775 if (gprel_constant_p (addr) || r0rel_constant_p (addr))
4776 return false;
4777 /* %lo requires a 16-bit relocation and is never narrow. */
4778 if (GET_CODE (addr) == LO_SUM)
4779 return false;
4780 ret = split_mem_address (addr, &rhs1, &rhs2);
4781 gcc_assert (ret);
4782 }
4783 else
4784 return false;
4785
4786 offset = INTVAL (rhs2);
4787 if (GET_MODE (mem) == SImode)
4788 {
4789 rem = offset & 3;
4790 offset >>= 2;
4791 /* ldwsp.n case. */
4792 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4793 && rem == 0 && (offset & ~0x1f) == 0)
4794 return true;
4795 }
4796 else if (GET_MODE (mem) == HImode)
4797 {
4798 rem = offset & 1;
4799 offset >>= 1;
4800 }
4801 /* ldbu.n, ldhu.n, ldw.n cases. */
4802 return (cdxreg (reg) && cdxreg (rhs1)
4803 && rem == 0 && (offset & ~0xf) == 0);
4804 }
4805 case TYPE_ST:
4806 if (GET_CODE (pat) == PARALLEL)
4807 /* stex, stsex. */
4808 return false;
4809 else
4810 {
4811 bool ret;
4812 HOST_WIDE_INT offset, rem = 0;
4813 rtx addr, reg = SET_SRC (pat), mem = SET_DEST (pat);
4814 if (!MEM_P (mem))
4815 return false;
4816 /* stxio. */
4817 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4818 || TARGET_BYPASS_CACHE)
4819 return false;
4820 addr = XEXP (mem, 0);
4821 /* GP-based and r0-based references are never narrow. */
4822 if (gprel_constant_p (addr) || r0rel_constant_p (addr))
4823 return false;
4824 /* %lo requires a 16-bit relocation and is never narrow. */
4825 if (GET_CODE (addr) == LO_SUM)
4826 return false;
4827 ret = split_mem_address (addr, &rhs1, &rhs2);
4828 gcc_assert (ret);
4829 offset = INTVAL (rhs2);
4830 if (GET_MODE (mem) == SImode)
4831 {
4832 rem = offset & 3;
4833 offset >>= 2;
4834 /* stwsp.n case. */
4835 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4836 && rem == 0 && (offset & ~0x1f) == 0)
4837 return true;
4838 /* stwz.n case. */
4839 else if (reg == const0_rtx && cdxreg (rhs1)
4840 && rem == 0 && (offset & ~0x3f) == 0)
4841 return true;
4842 }
4843 else if (GET_MODE (mem) == HImode)
4844 {
4845 rem = offset & 1;
4846 offset >>= 1;
4847 }
4848 else
4849 {
4850 gcc_assert (GET_MODE (mem) == QImode);
4851 /* stbz.n case. */
4852 if (reg == const0_rtx && cdxreg (rhs1)
4853 && (offset & ~0x3f) == 0)
4854 return true;
4855 }
4856
4857 /* stbu.n, sthu.n, stw.n cases. */
4858 return (cdxreg (reg) && cdxreg (rhs1)
4859 && rem == 0 && (offset & ~0xf) == 0);
4860 }
4861 case TYPE_MOV:
4862 lhs = SET_DEST (pat);
4863 rhs1 = SET_SRC (pat);
4864 if (CONST_INT_P (rhs1))
4865 return (cdxreg (lhs) && cdx_mov_immed (rhs1));
4866 gcc_assert (REG_P (lhs) && REG_P (rhs1));
4867 return true;
4868
4869 case TYPE_AND:
4870 /* Some zero_extend* alternatives are and insns. */
4871 if (GET_CODE (SET_SRC (pat)) == ZERO_EXTEND)
4872 return (cdxreg (SET_DEST (pat))
4873 && cdxreg (XEXP (SET_SRC (pat), 0)));
4874 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4875 if (CONST_INT_P (rhs2))
4876 return (cdxreg (lhs) && cdxreg (rhs1) && cdx_and_immed (rhs2));
4877 return (cdxreg (lhs) && cdxreg (rhs2)
4878 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4879
4880 case TYPE_OR:
4881 case TYPE_XOR:
4882 /* Note the two-address limitation for CDX form. */
4883 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4884 return (cdxreg (lhs) && cdxreg (rhs2)
4885 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4886
4887 case TYPE_SUB:
4888 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4889 return (cdxreg (lhs) && cdxreg (rhs1) && cdxreg (rhs2));
4890
4891 case TYPE_NEG:
4892 case TYPE_NOT:
4893 split_alu_insn (insn, &lhs, &rhs1, NULL);
4894 return (cdxreg (lhs) && cdxreg (rhs1));
4895
4896 case TYPE_SLL:
4897 case TYPE_SRL:
4898 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4899 return (cdxreg (lhs)
4900 && ((cdxreg (rhs1) && cdx_shift_immed (rhs2))
4901 || (cdxreg (rhs2)
4902 && (!reload_completed || rtx_equal_p (lhs, rhs1)))));
4903 case TYPE_NOP:
4904 case TYPE_PUSH:
4905 case TYPE_POP:
4906 return true;
4907 default:
4908 break;
4909 }
4910 return false;
4911 }
4912
4913 /* Main function to implement the pop_operation predicate that
4914 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4915 hardcode the restored registers, so the main checking is for the
4916 SP offsets. */
4917 bool
4918 pop_operation_p (rtx op)
4919 {
4920 int i;
4921 HOST_WIDE_INT last_offset = -1, len = XVECLEN (op, 0);
4922 rtx base_reg, offset;
4923
4924 if (len < 3 /* At least has a return, SP-update, and RA restore. */
4925 || GET_CODE (XVECEXP (op, 0, 0)) != RETURN
4926 || !base_reg_adjustment_p (XVECEXP (op, 0, 1), &base_reg, &offset)
4927 || !rtx_equal_p (base_reg, stack_pointer_rtx)
4928 || !CONST_INT_P (offset)
4929 || (INTVAL (offset) & 3) != 0)
4930 return false;
4931
4932 for (i = len - 1; i > 1; i--)
4933 {
4934 rtx set = XVECEXP (op, 0, i);
4935 rtx curr_base_reg, curr_offset;
4936
4937 if (GET_CODE (set) != SET || !MEM_P (SET_SRC (set))
4938 || !split_mem_address (XEXP (SET_SRC (set), 0),
4939 &curr_base_reg, &curr_offset)
4940 || !rtx_equal_p (base_reg, curr_base_reg)
4941 || !CONST_INT_P (curr_offset))
4942 return false;
4943 if (i == len - 1)
4944 {
4945 last_offset = INTVAL (curr_offset);
4946 if ((last_offset & 3) != 0 || last_offset > 60)
4947 return false;
4948 }
4949 else
4950 {
4951 last_offset += 4;
4952 if (INTVAL (curr_offset) != last_offset)
4953 return false;
4954 }
4955 }
4956 if (last_offset < 0 || last_offset + 4 != INTVAL (offset))
4957 return false;
4958
4959 return true;
4960 }
4961
4962
4963 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4964 The instruction can encode subsets drawn from either R2-R13 or
4965 R14-R23 + FP + RA. */
4966 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4967 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4968
4969 static bool
4970 nios2_ldstwm_regset_p (unsigned int regno, unsigned int *regset)
4971 {
4972 if (*regset == 0)
4973 {
4974 if (CDX_LDSTWM_VALID_REGS_0 & (1 << regno))
4975 *regset = CDX_LDSTWM_VALID_REGS_0;
4976 else if (CDX_LDSTWM_VALID_REGS_1 & (1 << regno))
4977 *regset = CDX_LDSTWM_VALID_REGS_1;
4978 else
4979 return false;
4980 return true;
4981 }
4982 else
4983 return (*regset & (1 << regno)) != 0;
4984 }
4985
4986 /* Main function to implement ldwm_operation/stwm_operation
4987 predicates that check ldwm/stwm insn pattern integrity. */
4988 bool
4989 ldstwm_operation_p (rtx op, bool load_p)
4990 {
4991 int start, i, end = XVECLEN (op, 0) - 1, last_regno = -1;
4992 unsigned int regset = 0;
4993 rtx base_reg, offset;
4994 rtx first_elt = XVECEXP (op, 0, 0);
4995 bool inc_p = true;
4996 bool wb_p = base_reg_adjustment_p (first_elt, &base_reg, &offset);
4997 if (GET_CODE (XVECEXP (op, 0, end)) == RETURN)
4998 end--;
4999 start = wb_p ? 1 : 0;
5000 for (i = start; i <= end; i++)
5001 {
5002 int regno;
5003 rtx reg, mem, elt = XVECEXP (op, 0, i);
5004 /* Return early if not a SET at all. */
5005 if (GET_CODE (elt) != SET)
5006 return false;
5007 reg = load_p ? SET_DEST (elt) : SET_SRC (elt);
5008 mem = load_p ? SET_SRC (elt) : SET_DEST (elt);
5009 if (!REG_P (reg) || !MEM_P (mem))
5010 return false;
5011 regno = REGNO (reg);
5012 if (!nios2_ldstwm_regset_p (regno, &regset))
5013 return false;
5014 /* If no writeback to determine direction, use offset of first MEM. */
5015 if (wb_p)
5016 inc_p = INTVAL (offset) > 0;
5017 else if (i == start)
5018 {
5019 rtx first_base, first_offset;
5020 if (!split_mem_address (XEXP (mem, 0),
5021 &first_base, &first_offset))
5022 return false;
5023 if (!REG_P (first_base) || !CONST_INT_P (first_offset))
5024 return false;
5025 base_reg = first_base;
5026 inc_p = INTVAL (first_offset) >= 0;
5027 }
5028 /* Ensure that the base register is not loaded into. */
5029 if (load_p && regno == (int) REGNO (base_reg))
5030 return false;
5031 /* Check for register order inc/dec integrity. */
5032 if (last_regno >= 0)
5033 {
5034 if (inc_p && last_regno >= regno)
5035 return false;
5036 if (!inc_p && last_regno <= regno)
5037 return false;
5038 }
5039 last_regno = regno;
5040 }
5041 return true;
5042 }
5043
5044 /* Helper for nios2_ldst_parallel, for generating a parallel vector
5045 SET element. */
5046 static rtx
5047 gen_ldst (bool load_p, int regno, rtx base_mem, int offset)
5048 {
5049 rtx reg = gen_rtx_REG (SImode, regno);
5050 rtx mem = adjust_address_nv (base_mem, SImode, offset);
5051 return gen_rtx_SET (load_p ? reg : mem,
5052 load_p ? mem : reg);
5053 }
5054
5055 /* A general routine for creating the body RTL pattern of
5056 ldwm/stwm/push.n/pop.n insns.
5057 LOAD_P: true/false for load/store direction.
5058 REG_INC_P: whether registers are incrementing/decrementing in the
5059 *RTL vector* (not necessarily the order defined in the ISA specification).
5060 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
5061 BASE_MEM: starting MEM.
5062 BASE_UPDATE: amount to update base register; zero means no writeback.
5063 REGMASK: register mask to load/store.
5064 RET_P: true if to tag a (return) element at the end.
5065
5066 Note that this routine does not do any checking. It's the job of the
5067 caller to do the right thing, and the insn patterns to do the
5068 safe-guarding. */
5069 static rtx
5070 nios2_ldst_parallel (bool load_p, bool reg_inc_p, bool offset_inc_p,
5071 rtx base_mem, int base_update,
5072 unsigned HOST_WIDE_INT regmask, bool ret_p)
5073 {
5074 rtvec p;
5075 int regno, b = 0, i = 0, n = 0, len = popcount_hwi (regmask);
5076 if (ret_p) len++, i++, b++;
5077 if (base_update != 0) len++, i++;
5078 p = rtvec_alloc (len);
5079 for (regno = (reg_inc_p ? 0 : 31);
5080 regno != (reg_inc_p ? 32 : -1);
5081 regno += (reg_inc_p ? 1 : -1))
5082 if ((regmask & (1 << regno)) != 0)
5083 {
5084 int offset = (offset_inc_p ? 4 : -4) * n++;
5085 RTVEC_ELT (p, i++) = gen_ldst (load_p, regno, base_mem, offset);
5086 }
5087 if (ret_p)
5088 RTVEC_ELT (p, 0) = ret_rtx;
5089 if (base_update != 0)
5090 {
5091 rtx reg, offset;
5092 if (!split_mem_address (XEXP (base_mem, 0), &reg, &offset))
5093 gcc_unreachable ();
5094 RTVEC_ELT (p, b) =
5095 gen_rtx_SET (reg, plus_constant (Pmode, reg, base_update));
5096 }
5097 return gen_rtx_PARALLEL (VOIDmode, p);
5098 }
5099
5100 /* CDX ldwm/stwm peephole optimization pattern related routines. */
5101
5102 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
5103 struct ldstwm_operand
5104 {
5105 int offset; /* Offset from base register. */
5106 rtx reg; /* Register to store at this offset. */
5107 rtx mem; /* Original mem. */
5108 bool bad; /* True if this load/store can't be combined. */
5109 bool rewrite; /* True if we should rewrite using scratch. */
5110 };
5111
5112 static int
5113 compare_ldstwm_operands (const void *arg1, const void *arg2)
5114 {
5115 const struct ldstwm_operand *op1 = (const struct ldstwm_operand *) arg1;
5116 const struct ldstwm_operand *op2 = (const struct ldstwm_operand *) arg2;
5117 if (op1->bad)
5118 return op2->bad ? 0 : 1;
5119 else if (op2->bad)
5120 return -1;
5121 else
5122 return op1->offset - op2->offset;
5123 }
5124
5125 /* Helper function: return true if a load/store using REGNO with address
5126 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
5127 stw.n, ldwsp.n, or stwsp.n instruction. */
5128 static bool
5129 can_use_cdx_ldstw (int regno, int basereg, int offset)
5130 {
5131 if (CDX_REG_P (regno) && CDX_REG_P (basereg)
5132 && (offset & 0x3) == 0 && 0 <= offset && offset < 0x40)
5133 return true;
5134 else if (basereg == SP_REGNO
5135 && offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
5136 return true;
5137 return false;
5138 }
5139
5140 /* This function is called from peephole2 optimizers to try to merge
5141 a series of individual loads and stores into a ldwm or stwm. It
5142 can also rewrite addresses inside the individual loads and stores
5143 using a common base register using a scratch register and smaller
5144 offsets if that allows them to use CDX ldw.n or stw.n instructions
5145 instead of 4-byte loads or stores.
5146 N is the number of insns we are trying to merge. SCRATCH is non-null
5147 if there is a scratch register available. The OPERANDS array contains
5148 alternating REG (even) and MEM (odd) operands. */
5149 bool
5150 gen_ldstwm_peep (bool load_p, int n, rtx scratch, rtx *operands)
5151 {
5152 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
5153 specified. */
5154 #define MAX_LDSTWM_OPS 12
5155 struct ldstwm_operand sort[MAX_LDSTWM_OPS];
5156 int basereg = -1;
5157 int baseoffset;
5158 int i, m, lastoffset, lastreg;
5159 unsigned int regmask = 0, usemask = 0, regset;
5160 bool needscratch;
5161 int newbasereg;
5162 int nbytes;
5163
5164 if (!TARGET_HAS_CDX)
5165 return false;
5166 if (n < 2 || n > MAX_LDSTWM_OPS)
5167 return false;
5168
5169 /* Check all the operands for validity and initialize the sort array.
5170 The places where we return false here are all situations that aren't
5171 expected to ever happen -- invalid patterns, invalid registers, etc. */
5172 for (i = 0; i < n; i++)
5173 {
5174 rtx base, offset;
5175 rtx reg = operands[i];
5176 rtx mem = operands[i + n];
5177 int r, o, regno;
5178 bool bad = false;
5179
5180 if (!REG_P (reg) || !MEM_P (mem))
5181 return false;
5182
5183 regno = REGNO (reg);
5184 if (regno > 31)
5185 return false;
5186 if (load_p && (regmask & (1 << regno)) != 0)
5187 return false;
5188 regmask |= 1 << regno;
5189
5190 if (!split_mem_address (XEXP (mem, 0), &base, &offset))
5191 return false;
5192 r = REGNO (base);
5193 o = INTVAL (offset);
5194
5195 if (basereg == -1)
5196 basereg = r;
5197 else if (r != basereg)
5198 bad = true;
5199 usemask |= 1 << r;
5200
5201 sort[i].bad = bad;
5202 sort[i].rewrite = false;
5203 sort[i].offset = o;
5204 sort[i].reg = reg;
5205 sort[i].mem = mem;
5206 }
5207
5208 /* If we are doing a series of register loads, we can't safely reorder
5209 them if any of the regs used in addr expressions are also being set. */
5210 if (load_p && (regmask & usemask))
5211 return false;
5212
5213 /* Sort the array by increasing mem offset order, then check that
5214 offsets are valid and register order matches mem order. At the
5215 end of this loop, m is the number of loads/stores we will try to
5216 combine; the rest are leftovers. */
5217 qsort (sort, n, sizeof (struct ldstwm_operand), compare_ldstwm_operands);
5218
5219 baseoffset = sort[0].offset;
5220 needscratch = baseoffset != 0;
5221 if (needscratch && !scratch)
5222 return false;
5223
5224 lastreg = regmask = regset = 0;
5225 lastoffset = baseoffset;
5226 for (m = 0; m < n && !sort[m].bad; m++)
5227 {
5228 int thisreg = REGNO (sort[m].reg);
5229 if (sort[m].offset != lastoffset
5230 || (m > 0 && lastreg >= thisreg)
5231 || !nios2_ldstwm_regset_p (thisreg, &regset))
5232 break;
5233 lastoffset += 4;
5234 lastreg = thisreg;
5235 regmask |= (1 << thisreg);
5236 }
5237
5238 /* For loads, make sure we are not overwriting the scratch reg.
5239 The peephole2 pattern isn't supposed to match unless the register is
5240 unused all the way through, so this isn't supposed to happen anyway. */
5241 if (load_p
5242 && needscratch
5243 && ((1 << REGNO (scratch)) & regmask) != 0)
5244 return false;
5245 newbasereg = needscratch ? (int) REGNO (scratch) : basereg;
5246
5247 /* We may be able to combine only the first m of the n total loads/stores
5248 into a single instruction. If m < 2, there's no point in emitting
5249 a ldwm/stwm at all, but we might be able to do further optimizations
5250 if we have a scratch. We will count the instruction lengths of the
5251 old and new patterns and store the savings in nbytes. */
5252 if (m < 2)
5253 {
5254 if (!needscratch)
5255 return false;
5256 m = 0;
5257 nbytes = 0;
5258 }
5259 else
5260 nbytes = -4; /* Size of ldwm/stwm. */
5261 if (needscratch)
5262 {
5263 int bo = baseoffset > 0 ? baseoffset : -baseoffset;
5264 if (CDX_REG_P (newbasereg)
5265 && CDX_REG_P (basereg)
5266 && bo <= 128 && bo > 0 && (bo & (bo - 1)) == 0)
5267 nbytes -= 2; /* Size of addi.n/subi.n. */
5268 else
5269 nbytes -= 4; /* Size of non-CDX addi. */
5270 }
5271
5272 /* Count the size of the input load/store instructions being replaced. */
5273 for (i = 0; i < m; i++)
5274 if (can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset))
5275 nbytes += 2;
5276 else
5277 nbytes += 4;
5278
5279 /* We may also be able to save a bit if we can rewrite non-CDX
5280 load/stores that can't be combined into the ldwm/stwm into CDX
5281 load/stores using the scratch reg. For example, this might happen
5282 if baseoffset is large, by bringing in the offsets in the load/store
5283 instructions within the range that fits in the CDX instruction. */
5284 if (needscratch && CDX_REG_P (newbasereg))
5285 for (i = m; i < n && !sort[i].bad; i++)
5286 if (!can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset)
5287 && can_use_cdx_ldstw (REGNO (sort[i].reg), newbasereg,
5288 sort[i].offset - baseoffset))
5289 {
5290 sort[i].rewrite = true;
5291 nbytes += 2;
5292 }
5293
5294 /* Are we good to go? */
5295 if (nbytes <= 0)
5296 return false;
5297
5298 /* Emit the scratch load. */
5299 if (needscratch)
5300 emit_insn (gen_rtx_SET (scratch, XEXP (sort[0].mem, 0)));
5301
5302 /* Emit the ldwm/stwm insn. */
5303 if (m > 0)
5304 {
5305 rtvec p = rtvec_alloc (m);
5306 for (i = 0; i < m; i++)
5307 {
5308 int offset = sort[i].offset;
5309 rtx mem, reg = sort[i].reg;
5310 rtx base_reg = gen_rtx_REG (Pmode, newbasereg);
5311 if (needscratch)
5312 offset -= baseoffset;
5313 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, base_reg, offset));
5314 if (load_p)
5315 RTVEC_ELT (p, i) = gen_rtx_SET (reg, mem);
5316 else
5317 RTVEC_ELT (p, i) = gen_rtx_SET (mem, reg);
5318 }
5319 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
5320 }
5321
5322 /* Emit any leftover load/stores as individual instructions, doing
5323 the previously-noted rewrites to use the scratch reg. */
5324 for (i = m; i < n; i++)
5325 {
5326 rtx reg = sort[i].reg;
5327 rtx mem = sort[i].mem;
5328 if (sort[i].rewrite)
5329 {
5330 int offset = sort[i].offset - baseoffset;
5331 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, scratch, offset));
5332 }
5333 if (load_p)
5334 emit_move_insn (reg, mem);
5335 else
5336 emit_move_insn (mem, reg);
5337 }
5338 return true;
5339 }
5340
5341 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
5342 We use this hook when emitting CDX code to enforce the 4-byte
5343 alignment requirement for labels that are used as the targets of
5344 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
5345 and 32-bit instructions aligned on any 16-bit boundary, but functions
5346 and jmpi labels have to be 32-bit aligned because of the way the address
5347 is encoded in the instruction. */
5348
5349 static unsigned char *label_align;
5350 static int min_labelno, max_labelno;
5351
5352 static void
5353 nios2_reorg (void)
5354 {
5355 bool changed = true;
5356 rtx_insn *insn;
5357
5358 if (!TARGET_HAS_CDX)
5359 return;
5360
5361 /* Initialize the data structures. */
5362 if (label_align)
5363 free (label_align);
5364 max_labelno = max_label_num ();
5365 min_labelno = get_first_label_num ();
5366 label_align = XCNEWVEC (unsigned char, max_labelno - min_labelno + 1);
5367
5368 /* Iterate on inserting alignment and adjusting branch lengths until
5369 no more changes. */
5370 while (changed)
5371 {
5372 changed = false;
5373 shorten_branches (get_insns ());
5374
5375 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
5376 if (JUMP_P (insn) && insn_variable_length_p (insn))
5377 {
5378 rtx label = JUMP_LABEL (insn);
5379 /* We use the current fact that all cases of 'jmpi'
5380 doing the actual branch in the machine description
5381 has a computed length of 6 or 8. Length 4 and below
5382 are all PC-relative 'br' branches without the jump-align
5383 problem. */
5384 if (label && LABEL_P (label) && get_attr_length (insn) > 4)
5385 {
5386 int index = CODE_LABEL_NUMBER (label) - min_labelno;
5387 if (label_align[index] != 2)
5388 {
5389 label_align[index] = 2;
5390 changed = true;
5391 }
5392 }
5393 }
5394 }
5395 }
5396
5397 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
5398 int
5399 nios2_label_align (rtx label)
5400 {
5401 int n = CODE_LABEL_NUMBER (label);
5402
5403 if (label_align && n >= min_labelno && n <= max_labelno)
5404 return MAX (label_align[n - min_labelno], align_labels_log);
5405 return align_labels_log;
5406 }
5407
5408 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
5409 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
5410 the registers that can be used as operands to instructions that
5411 have 3-bit register fields. */
5412 void
5413 nios2_adjust_reg_alloc_order (void)
5414 {
5415 const int cdx_reg_alloc_order[] =
5416 {
5417 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
5418 2, 3, 4, 5, 6, 7,
5419 /* Call-saved GPRs within CDX 3-bit encoded range. */
5420 16, 17,
5421 /* Other call-clobbered GPRs. */
5422 8, 9, 10, 11, 12, 13, 14, 15,
5423 /* Other call-saved GPRs. RA placed first since it is always saved. */
5424 31, 18, 19, 20, 21, 22, 23, 28,
5425 /* Fixed GPRs, not used by the register allocator. */
5426 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
5427 };
5428
5429 if (TARGET_HAS_CDX)
5430 memcpy (reg_alloc_order, cdx_reg_alloc_order,
5431 sizeof (int) * FIRST_PSEUDO_REGISTER);
5432 }
5433
5434 \f
5435 /* Initialize the GCC target structure. */
5436 #undef TARGET_ASM_FUNCTION_PROLOGUE
5437 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
5438
5439 #undef TARGET_IN_SMALL_DATA_P
5440 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
5441
5442 #undef TARGET_SECTION_TYPE_FLAGS
5443 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
5444
5445 #undef TARGET_INIT_BUILTINS
5446 #define TARGET_INIT_BUILTINS nios2_init_builtins
5447 #undef TARGET_EXPAND_BUILTIN
5448 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
5449 #undef TARGET_BUILTIN_DECL
5450 #define TARGET_BUILTIN_DECL nios2_builtin_decl
5451
5452 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5453 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
5454
5455 #undef TARGET_CAN_ELIMINATE
5456 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
5457
5458 #undef TARGET_FUNCTION_ARG
5459 #define TARGET_FUNCTION_ARG nios2_function_arg
5460
5461 #undef TARGET_FUNCTION_ARG_ADVANCE
5462 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5463
5464 #undef TARGET_FUNCTION_ARG_PADDING
5465 #define TARGET_FUNCTION_ARG_PADDING nios2_function_arg_padding
5466
5467 #undef TARGET_ARG_PARTIAL_BYTES
5468 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5469
5470 #undef TARGET_TRAMPOLINE_INIT
5471 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5472
5473 #undef TARGET_FUNCTION_VALUE
5474 #define TARGET_FUNCTION_VALUE nios2_function_value
5475
5476 #undef TARGET_LIBCALL_VALUE
5477 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5478
5479 #undef TARGET_FUNCTION_VALUE_REGNO_P
5480 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5481
5482 #undef TARGET_RETURN_IN_MEMORY
5483 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5484
5485 #undef TARGET_PROMOTE_PROTOTYPES
5486 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5487
5488 #undef TARGET_SETUP_INCOMING_VARARGS
5489 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5490
5491 #undef TARGET_MUST_PASS_IN_STACK
5492 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5493
5494 #undef TARGET_LEGITIMATE_CONSTANT_P
5495 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5496
5497 #undef TARGET_LEGITIMIZE_ADDRESS
5498 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5499
5500 #undef TARGET_DELEGITIMIZE_ADDRESS
5501 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5502
5503 #undef TARGET_LEGITIMATE_ADDRESS_P
5504 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5505
5506 #undef TARGET_PREFERRED_RELOAD_CLASS
5507 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5508
5509 #undef TARGET_RTX_COSTS
5510 #define TARGET_RTX_COSTS nios2_rtx_costs
5511
5512 #undef TARGET_ADDRESS_COST
5513 #define TARGET_ADDRESS_COST nios2_address_cost
5514
5515 #undef TARGET_HAVE_TLS
5516 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5517
5518 #undef TARGET_CANNOT_FORCE_CONST_MEM
5519 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5520
5521 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5522 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5523
5524 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5525 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5526
5527 #undef TARGET_PRINT_OPERAND
5528 #define TARGET_PRINT_OPERAND nios2_print_operand
5529
5530 #undef TARGET_PRINT_OPERAND_ADDRESS
5531 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5532
5533 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5534 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5535
5536 #undef TARGET_ASM_FILE_END
5537 #define TARGET_ASM_FILE_END nios2_asm_file_end
5538
5539 #undef TARGET_OPTION_OVERRIDE
5540 #define TARGET_OPTION_OVERRIDE nios2_option_override
5541
5542 #undef TARGET_OPTION_SAVE
5543 #define TARGET_OPTION_SAVE nios2_option_save
5544
5545 #undef TARGET_OPTION_RESTORE
5546 #define TARGET_OPTION_RESTORE nios2_option_restore
5547
5548 #undef TARGET_SET_CURRENT_FUNCTION
5549 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5550
5551 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5552 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5553
5554 #undef TARGET_OPTION_PRAGMA_PARSE
5555 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5556
5557 #undef TARGET_MERGE_DECL_ATTRIBUTES
5558 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5559
5560 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5561 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5562 hook_bool_const_tree_hwi_hwi_const_tree_true
5563
5564 #undef TARGET_ASM_OUTPUT_MI_THUNK
5565 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5566
5567 #undef TARGET_MACHINE_DEPENDENT_REORG
5568 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5569
5570 #undef TARGET_CONSTANT_ALIGNMENT
5571 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5572
5573 struct gcc_target targetm = TARGET_INITIALIZER;
5574
5575 #include "gt-nios2.h"