]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/nios2/nios2.c
options: Save and restore opts_set for Optimization and Target options
[thirdparty/gcc.git] / gcc / config / nios2 / nios2.c
1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2020 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #define IN_TARGET_CODE 1
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "df.h"
35 #include "memmodel.h"
36 #include "tm_p.h"
37 #include "optabs.h"
38 #include "regs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "flags.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "expr.h"
49 #include "toplev.h"
50 #include "langhooks.h"
51 #include "stor-layout.h"
52 #include "builtins.h"
53 #include "tree-pass.h"
54 #include "xregex.h"
55
56 /* This file should be included last. */
57 #include "target-def.h"
58
59 /* Forward function declarations. */
60 static bool nios2_symbolic_constant_p (rtx);
61 static bool prologue_saved_reg_p (unsigned);
62 static void nios2_load_pic_register (void);
63 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
64 static const char *nios2_unspec_reloc_name (int);
65 static void nios2_register_builtin_fndecl (unsigned, tree);
66 static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
67 unsigned HOST_WIDE_INT, bool);
68 static int nios2_address_cost (rtx, machine_mode, addr_space_t, bool);
69
70 /* Threshold for data being put into the small data/bss area, instead
71 of the normal data area (references to the small data/bss area take
72 1 instruction, and use the global pointer, references to the normal
73 data area takes 2 instructions). */
74 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
75
76 struct GTY (()) machine_function
77 {
78 /* Current frame information, to be filled in by nios2_compute_frame_layout
79 with register save masks, and offsets for the current function. */
80
81 /* Mask of registers to save. */
82 unsigned int save_mask;
83 /* Number of bytes that the entire frame takes up. */
84 int total_size;
85 /* Number of bytes that variables take up. */
86 int var_size;
87 /* Number of bytes that outgoing arguments take up. */
88 int args_size;
89 /* Number of bytes needed to store registers in frame. */
90 int save_reg_size;
91 /* Number of bytes used to store callee-saved registers. */
92 int callee_save_reg_size;
93 /* Offset from new stack pointer to store registers. */
94 int save_regs_offset;
95 /* Offset from save_regs_offset to store frame pointer register. */
96 int fp_save_offset;
97 /* != 0 if function has a variable argument list. */
98 int uses_anonymous_args;
99 /* != 0 if frame layout already calculated. */
100 int initialized;
101 };
102
103 /* State to track the assignment of custom codes to FPU/custom builtins. */
104 static enum nios2_ccs_code custom_code_status[256];
105 static int custom_code_index[256];
106 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
107 static bool custom_code_conflict = false;
108
109 /* State for command-line options. */
110 regex_t nios2_gprel_sec_regex;
111 regex_t nios2_r0rel_sec_regex;
112
113 \f
114 /* Definition of builtin function types for nios2. */
115
116 #define N2_FTYPES \
117 N2_FTYPE(1, (SF)) \
118 N2_FTYPE(1, (VOID)) \
119 N2_FTYPE(2, (DF, DF)) \
120 N2_FTYPE(3, (DF, DF, DF)) \
121 N2_FTYPE(2, (DF, SF)) \
122 N2_FTYPE(2, (DF, SI)) \
123 N2_FTYPE(2, (DF, UI)) \
124 N2_FTYPE(2, (SF, DF)) \
125 N2_FTYPE(2, (SF, SF)) \
126 N2_FTYPE(3, (SF, SF, SF)) \
127 N2_FTYPE(2, (SF, SI)) \
128 N2_FTYPE(2, (SF, UI)) \
129 N2_FTYPE(2, (SI, CVPTR)) \
130 N2_FTYPE(2, (SI, DF)) \
131 N2_FTYPE(3, (SI, DF, DF)) \
132 N2_FTYPE(2, (SI, SF)) \
133 N2_FTYPE(3, (SI, SF, SF)) \
134 N2_FTYPE(2, (SI, SI)) \
135 N2_FTYPE(3, (SI, SI, SI)) \
136 N2_FTYPE(3, (SI, VPTR, SI)) \
137 N2_FTYPE(2, (UI, CVPTR)) \
138 N2_FTYPE(2, (UI, DF)) \
139 N2_FTYPE(2, (UI, SF)) \
140 N2_FTYPE(2, (VOID, DF)) \
141 N2_FTYPE(2, (VOID, SF)) \
142 N2_FTYPE(2, (VOID, SI)) \
143 N2_FTYPE(3, (VOID, SI, SI)) \
144 N2_FTYPE(2, (VOID, VPTR)) \
145 N2_FTYPE(3, (VOID, VPTR, SI))
146
147 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
148 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
149 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
150
151 /* Expand ftcode enumeration. */
152 enum nios2_ftcode {
153 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
154 N2_FTYPES
155 #undef N2_FTYPE
156 N2_FTYPE_MAX
157 };
158
159 /* Return the tree function type, based on the ftcode. */
160 static tree
161 nios2_ftype (enum nios2_ftcode ftcode)
162 {
163 static tree types[(int) N2_FTYPE_MAX];
164
165 tree N2_TYPE_SF = float_type_node;
166 tree N2_TYPE_DF = double_type_node;
167 tree N2_TYPE_SI = integer_type_node;
168 tree N2_TYPE_UI = unsigned_type_node;
169 tree N2_TYPE_VOID = void_type_node;
170
171 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
172 if (!N2_TYPE_CVPTR)
173 {
174 /* const volatile void *. */
175 N2_TYPE_CVPTR
176 = build_pointer_type (build_qualified_type (void_type_node,
177 (TYPE_QUAL_CONST
178 | TYPE_QUAL_VOLATILE)));
179 /* volatile void *. */
180 N2_TYPE_VPTR
181 = build_pointer_type (build_qualified_type (void_type_node,
182 TYPE_QUAL_VOLATILE));
183 }
184 if (types[(int) ftcode] == NULL_TREE)
185 switch (ftcode)
186 {
187 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
188 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
189 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
190 #define N2_FTYPE(N,ARGS) \
191 case N2_FTYPE_OP ## N ARGS: \
192 types[(int) ftcode] \
193 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
194 break;
195 N2_FTYPES
196 #undef N2_FTYPE
197 default: gcc_unreachable ();
198 }
199 return types[(int) ftcode];
200 }
201
202 \f
203 /* Definition of FPU instruction descriptions. */
204
205 struct nios2_fpu_insn_info
206 {
207 const char *name;
208 int num_operands, *optvar;
209 int opt, no_opt;
210 #define N2F_DF 0x1
211 #define N2F_DFREQ 0x2
212 #define N2F_UNSAFE 0x4
213 #define N2F_FINITE 0x8
214 #define N2F_NO_ERRNO 0x10
215 unsigned int flags;
216 enum insn_code icode;
217 enum nios2_ftcode ftcode;
218 };
219
220 /* Base macro for defining FPU instructions. */
221 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
222 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
223 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
224 N2_FTYPE_OP ## nop args }
225
226 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
227 #define N2FPU_OP2(mode) (mode, mode)
228 #define N2FPU_OP3(mode) (mode, mode, mode)
229 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
230 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
231 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
232 #define N2FPU_INSN_SF(code, nop, flags) \
233 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
234 #define N2FPU_INSN_DF(code, nop, flags) \
235 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
236
237 /* Compare instructions, 3 operand FP operation with a SI result. */
238 #define N2FPU_CMP_DEF(code, flags, m, M) \
239 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
240 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
241 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
242 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
243
244 /* The order of definition needs to be maintained consistent with
245 enum n2fpu_code in nios2-opts.h. */
246 struct nios2_fpu_insn_info nios2_fpu_insn[] =
247 {
248 /* Single precision instructions. */
249 N2FPU_INSN_SF (add, 3, 0),
250 N2FPU_INSN_SF (sub, 3, 0),
251 N2FPU_INSN_SF (mul, 3, 0),
252 N2FPU_INSN_SF (div, 3, 0),
253 /* Due to textual difference between min/max and smin/smax. */
254 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
255 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
256 N2FPU_INSN_SF (neg, 2, 0),
257 N2FPU_INSN_SF (abs, 2, 0),
258 N2FPU_INSN_SF (sqrt, 2, 0),
259 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
260 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
261 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
262 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
263 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
264 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
265 /* Single precision compares. */
266 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
267 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
268 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
269
270 /* Double precision instructions. */
271 N2FPU_INSN_DF (add, 3, 0),
272 N2FPU_INSN_DF (sub, 3, 0),
273 N2FPU_INSN_DF (mul, 3, 0),
274 N2FPU_INSN_DF (div, 3, 0),
275 /* Due to textual difference between min/max and smin/smax. */
276 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
277 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
278 N2FPU_INSN_DF (neg, 2, 0),
279 N2FPU_INSN_DF (abs, 2, 0),
280 N2FPU_INSN_DF (sqrt, 2, 0),
281 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
282 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
283 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
284 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
285 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
286 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
287 /* Double precision compares. */
288 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
289 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
290 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
291
292 /* Conversion instructions. */
293 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
294 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
295 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
296 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
297 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
298 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
299 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
300 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
301 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
302 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
303 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
304
305 /* X, Y access instructions. */
306 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
307 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
308 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
309 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
310 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
311 };
312
313 /* Some macros for ease of access. */
314 #define N2FPU(code) nios2_fpu_insn[(int) code]
315 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
316 #define N2FPU_N(code) (*N2FPU(code).optvar)
317 #define N2FPU_NAME(code) (N2FPU(code).name)
318 #define N2FPU_ICODE(code) (N2FPU(code).icode)
319 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
320 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
321 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
322 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
323 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
324 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
325
326 /* Same as above, but for cases where using only the op part is shorter. */
327 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
328 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
329 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
330
331 /* Export the FPU insn enabled predicate to nios2.md. */
332 bool
333 nios2_fpu_insn_enabled (enum n2fpu_code code)
334 {
335 return N2FPU_ENABLED_P (code);
336 }
337
338 /* Return true if COND comparison for mode MODE is enabled under current
339 settings. */
340
341 static bool
342 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
343 {
344 if (mode == SFmode)
345 switch (cond)
346 {
347 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
348 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
349 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
350 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
351 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
352 case LE: return N2FPU_OP_ENABLED_P (fcmples);
353 default: break;
354 }
355 else if (mode == DFmode)
356 switch (cond)
357 {
358 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
359 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
360 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
361 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
362 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
363 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
364 default: break;
365 }
366 return false;
367 }
368
369 /* Stack layout and calling conventions. */
370
371 #define NIOS2_STACK_ALIGN(LOC) \
372 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
373 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
374
375 /* Return the bytes needed to compute the frame pointer from the current
376 stack pointer. */
377 static int
378 nios2_compute_frame_layout (void)
379 {
380 unsigned int regno;
381 unsigned int save_mask = 0;
382 int total_size;
383 int var_size;
384 int out_args_size;
385 int save_reg_size;
386 int callee_save_reg_size;
387
388 if (cfun->machine->initialized)
389 return cfun->machine->total_size;
390
391 /* Calculate space needed for gp registers. */
392 save_reg_size = 0;
393 for (regno = 0; regno <= LAST_GP_REG; regno++)
394 if (prologue_saved_reg_p (regno))
395 {
396 save_mask |= 1 << regno;
397 save_reg_size += 4;
398 }
399
400 /* If we are saving any callee-save register, then assume
401 push.n/pop.n should be used. Make sure RA is saved, and
402 contiguous registers starting from r16-- are all saved. */
403 if (TARGET_HAS_CDX && save_reg_size != 0)
404 {
405 if ((save_mask & (1 << RA_REGNO)) == 0)
406 {
407 save_mask |= 1 << RA_REGNO;
408 save_reg_size += 4;
409 }
410
411 for (regno = 23; regno >= 16; regno--)
412 if ((save_mask & (1 << regno)) != 0)
413 {
414 /* Starting from highest numbered callee-saved
415 register that is used, make sure all regs down
416 to r16 is saved, to maintain contiguous range
417 for push.n/pop.n. */
418 unsigned int i;
419 for (i = regno - 1; i >= 16; i--)
420 if ((save_mask & (1 << i)) == 0)
421 {
422 save_mask |= 1 << i;
423 save_reg_size += 4;
424 }
425 break;
426 }
427 }
428
429 callee_save_reg_size = save_reg_size;
430
431 /* If we call eh_return, we need to save the EH data registers. */
432 if (crtl->calls_eh_return)
433 {
434 unsigned i;
435 unsigned r;
436
437 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
438 if (!(save_mask & (1 << r)))
439 {
440 save_mask |= 1 << r;
441 save_reg_size += 4;
442 }
443 }
444
445 cfun->machine->fp_save_offset = 0;
446 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
447 {
448 int fp_save_offset = 0;
449 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
450 if (save_mask & (1 << regno))
451 fp_save_offset += 4;
452
453 cfun->machine->fp_save_offset = fp_save_offset;
454 }
455
456 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
457 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
458 total_size = var_size + out_args_size;
459
460 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
461 total_size += save_reg_size;
462 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
463
464 /* Save other computed information. */
465 cfun->machine->save_mask = save_mask;
466 cfun->machine->total_size = total_size;
467 cfun->machine->var_size = var_size;
468 cfun->machine->args_size = out_args_size;
469 cfun->machine->save_reg_size = save_reg_size;
470 cfun->machine->callee_save_reg_size = callee_save_reg_size;
471 cfun->machine->initialized = reload_completed;
472 cfun->machine->save_regs_offset = out_args_size + var_size;
473
474 return total_size;
475 }
476
477 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
478 prologue/epilogue expand routines. */
479 static void
480 save_reg (int regno, unsigned offset)
481 {
482 rtx reg = gen_rtx_REG (SImode, regno);
483 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
484 rtx_insn *insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
485 RTX_FRAME_RELATED_P (insn) = 1;
486 }
487
488 static void
489 restore_reg (int regno, unsigned offset)
490 {
491 rtx reg = gen_rtx_REG (SImode, regno);
492 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
493 rtx_insn *insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
494 /* Tag epilogue unwind note. */
495 add_reg_note (insn, REG_CFA_RESTORE, reg);
496 RTX_FRAME_RELATED_P (insn) = 1;
497 }
498
499 /* This routine tests for the base register update SET in load/store
500 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
501 static bool
502 base_reg_adjustment_p (rtx set, rtx *base_reg, rtx *offset)
503 {
504 if (GET_CODE (set) == SET
505 && REG_P (SET_DEST (set))
506 && GET_CODE (SET_SRC (set)) == PLUS
507 && REG_P (XEXP (SET_SRC (set), 0))
508 && rtx_equal_p (SET_DEST (set), XEXP (SET_SRC (set), 0))
509 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
510 {
511 *base_reg = XEXP (SET_SRC (set), 0);
512 *offset = XEXP (SET_SRC (set), 1);
513 return true;
514 }
515 return false;
516 }
517
518 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
519 static void
520 nios2_create_cfa_notes (rtx_insn *insn, bool epilogue_p)
521 {
522 int i = 0;
523 rtx base_reg, offset, elt, pat = PATTERN (insn);
524 if (epilogue_p)
525 {
526 elt = XVECEXP (pat, 0, 0);
527 if (GET_CODE (elt) == RETURN)
528 i++;
529 elt = XVECEXP (pat, 0, i);
530 if (base_reg_adjustment_p (elt, &base_reg, &offset))
531 {
532 add_reg_note (insn, REG_CFA_ADJUST_CFA, copy_rtx (elt));
533 i++;
534 }
535 for (; i < XVECLEN (pat, 0); i++)
536 {
537 elt = SET_DEST (XVECEXP (pat, 0, i));
538 gcc_assert (REG_P (elt));
539 add_reg_note (insn, REG_CFA_RESTORE, elt);
540 }
541 }
542 else
543 {
544 /* Tag each of the prologue sets. */
545 for (i = 0; i < XVECLEN (pat, 0); i++)
546 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
547 }
548 }
549
550 /* Temp regno used inside prologue/epilogue. */
551 #define TEMP_REG_NUM 8
552
553 /* Emit conditional trap for checking stack limit. SIZE is the number of
554 additional bytes required.
555
556 GDB prologue analysis depends on this generating a direct comparison
557 to the SP register, so the adjustment to add SIZE needs to be done on
558 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
559 if necessary. */
560 static void
561 nios2_emit_stack_limit_check (int size)
562 {
563 rtx sum = NULL_RTX;
564
565 if (GET_CODE (stack_limit_rtx) == SYMBOL_REF)
566 {
567 /* This generates a %hiadj/%lo pair with the constant size
568 add handled by the relocations. */
569 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
570 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
571 }
572 else if (!REG_P (stack_limit_rtx))
573 sorry ("Unknown form for stack limit expression");
574 else if (size == 0)
575 sum = stack_limit_rtx;
576 else if (SMALL_INT (size))
577 {
578 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
579 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
580 }
581 else
582 {
583 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
584 emit_move_insn (sum, gen_int_mode (size, Pmode));
585 emit_insn (gen_add2_insn (sum, stack_limit_rtx));
586 }
587
588 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, sum),
589 stack_pointer_rtx, sum, GEN_INT (3)));
590 }
591
592 static rtx_insn *
593 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
594 {
595 rtx_insn *insn;
596 if (SMALL_INT (immed))
597 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
598 else
599 {
600 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
601 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
602 insn = emit_insn (gen_add2_insn (reg, tmp));
603 }
604 return insn;
605 }
606
607 static rtx_insn *
608 nios2_adjust_stack (int sp_adjust, bool epilogue_p)
609 {
610 enum reg_note note_kind = REG_NOTE_MAX;
611 rtx_insn *insn = NULL;
612 if (sp_adjust)
613 {
614 if (SMALL_INT (sp_adjust))
615 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
616 gen_int_mode (sp_adjust, Pmode)));
617 else
618 {
619 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
620 emit_move_insn (tmp, gen_int_mode (sp_adjust, Pmode));
621 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
622 /* Attach a note indicating what happened. */
623 if (!epilogue_p)
624 note_kind = REG_FRAME_RELATED_EXPR;
625 }
626 if (epilogue_p)
627 note_kind = REG_CFA_ADJUST_CFA;
628 if (note_kind != REG_NOTE_MAX)
629 {
630 rtx cfa_adj = gen_rtx_SET (stack_pointer_rtx,
631 plus_constant (Pmode, stack_pointer_rtx,
632 sp_adjust));
633 add_reg_note (insn, note_kind, cfa_adj);
634 }
635 RTX_FRAME_RELATED_P (insn) = 1;
636 }
637 return insn;
638 }
639
640 void
641 nios2_expand_prologue (void)
642 {
643 unsigned int regno;
644 int total_frame_size, save_offset;
645 int sp_offset; /* offset from base_reg to final stack value. */
646 int save_regs_base; /* offset from base_reg to register save area. */
647 rtx_insn *insn;
648
649 total_frame_size = nios2_compute_frame_layout ();
650
651 if (flag_stack_usage_info)
652 current_function_static_stack_size = total_frame_size;
653
654 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
655 using them. */
656 if (TARGET_HAS_CDX
657 && (cfun->machine->save_reg_size != 0
658 || cfun->machine->uses_anonymous_args))
659 {
660 unsigned int regmask = cfun->machine->save_mask;
661 unsigned int callee_save_regs = regmask & 0xffff0000;
662 unsigned int caller_save_regs = regmask & 0x0000ffff;
663 int push_immed = 0;
664 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
665 rtx stack_mem =
666 gen_frame_mem (SImode, plus_constant (Pmode, stack_pointer_rtx, -4));
667
668 /* Check that there is room for the entire stack frame before doing
669 any SP adjustments or pushes. */
670 if (crtl->limit_stack)
671 nios2_emit_stack_limit_check (total_frame_size);
672
673 if (pretend_args_size)
674 {
675 if (cfun->machine->uses_anonymous_args)
676 {
677 /* Emit a stwm to push copy of argument registers onto
678 the stack for va_arg processing. */
679 unsigned int r, mask = 0, n = pretend_args_size / 4;
680 for (r = LAST_ARG_REGNO - n + 1; r <= LAST_ARG_REGNO; r++)
681 mask |= (1 << r);
682 insn = emit_insn (nios2_ldst_parallel
683 (false, false, false, stack_mem,
684 -pretend_args_size, mask, false));
685 /* Tag first SP adjustment as frame-related. */
686 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
687 RTX_FRAME_RELATED_P (insn) = 1;
688 }
689 else
690 nios2_adjust_stack (-pretend_args_size, false);
691 }
692 if (callee_save_regs)
693 {
694 /* Emit a push.n to save registers and optionally allocate
695 push_immed extra bytes on the stack. */
696 int sp_adjust;
697 if (caller_save_regs)
698 /* Can't allocate extra stack space yet. */
699 push_immed = 0;
700 else if (cfun->machine->save_regs_offset <= 60)
701 /* Stack adjustment fits entirely in the push.n. */
702 push_immed = cfun->machine->save_regs_offset;
703 else if (frame_pointer_needed
704 && cfun->machine->fp_save_offset == 0)
705 /* Deferring the entire stack adjustment until later
706 allows us to use a mov.n instead of a 32-bit addi
707 instruction to set the frame pointer. */
708 push_immed = 0;
709 else
710 /* Splitting the stack adjustment between the push.n
711 and an explicit adjustment makes it more likely that
712 we can use spdeci.n for the explicit part. */
713 push_immed = 60;
714 sp_adjust = -(cfun->machine->callee_save_reg_size + push_immed);
715 insn = emit_insn (nios2_ldst_parallel (false, false, false,
716 stack_mem, sp_adjust,
717 callee_save_regs, false));
718 nios2_create_cfa_notes (insn, false);
719 RTX_FRAME_RELATED_P (insn) = 1;
720 }
721
722 if (caller_save_regs)
723 {
724 /* Emit a stwm to save the EH data regs, r4-r7. */
725 int caller_save_size = (cfun->machine->save_reg_size
726 - cfun->machine->callee_save_reg_size);
727 gcc_assert ((caller_save_regs & ~0xf0) == 0);
728 insn = emit_insn (nios2_ldst_parallel
729 (false, false, false, stack_mem,
730 -caller_save_size, caller_save_regs, false));
731 nios2_create_cfa_notes (insn, false);
732 RTX_FRAME_RELATED_P (insn) = 1;
733 }
734
735 save_regs_base = push_immed;
736 sp_offset = -(cfun->machine->save_regs_offset - push_immed);
737 }
738 /* The non-CDX cases decrement the stack pointer, to prepare for individual
739 register saves to the stack. */
740 else if (!SMALL_INT (total_frame_size))
741 {
742 /* We need an intermediary point, this will point at the spill block. */
743 nios2_adjust_stack (cfun->machine->save_regs_offset - total_frame_size,
744 false);
745 save_regs_base = 0;
746 sp_offset = -cfun->machine->save_regs_offset;
747 if (crtl->limit_stack)
748 nios2_emit_stack_limit_check (cfun->machine->save_regs_offset);
749 }
750 else if (total_frame_size)
751 {
752 nios2_adjust_stack (-total_frame_size, false);
753 save_regs_base = cfun->machine->save_regs_offset;
754 sp_offset = 0;
755 if (crtl->limit_stack)
756 nios2_emit_stack_limit_check (0);
757 }
758 else
759 save_regs_base = sp_offset = 0;
760
761 /* Save the registers individually in the non-CDX case. */
762 if (!TARGET_HAS_CDX)
763 {
764 save_offset = save_regs_base + cfun->machine->save_reg_size;
765
766 for (regno = LAST_GP_REG; regno > 0; regno--)
767 if (cfun->machine->save_mask & (1 << regno))
768 {
769 save_offset -= 4;
770 save_reg (regno, save_offset);
771 }
772 }
773
774 /* Set the hard frame pointer. */
775 if (frame_pointer_needed)
776 {
777 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
778 insn =
779 (fp_save_offset == 0
780 ? emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)
781 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
782 stack_pointer_rtx,
783 gen_int_mode (fp_save_offset, Pmode))));
784 RTX_FRAME_RELATED_P (insn) = 1;
785 }
786
787 /* Allocate sp_offset more bytes in the stack frame. */
788 nios2_adjust_stack (sp_offset, false);
789
790 /* Load the PIC register if needed. */
791 if (crtl->uses_pic_offset_table)
792 nios2_load_pic_register ();
793
794 /* If we are profiling, make sure no instructions are scheduled before
795 the call to mcount. */
796 if (crtl->profile)
797 emit_insn (gen_blockage ());
798 }
799
800 void
801 nios2_expand_epilogue (bool sibcall_p)
802 {
803 rtx_insn *insn;
804 rtx cfa_adj;
805 int total_frame_size;
806 int sp_adjust, save_offset;
807 unsigned int regno;
808
809 if (!sibcall_p && nios2_can_use_return_insn ())
810 {
811 emit_jump_insn (gen_return ());
812 return;
813 }
814
815 emit_insn (gen_blockage ());
816
817 total_frame_size = nios2_compute_frame_layout ();
818 if (frame_pointer_needed)
819 {
820 /* Recover the stack pointer. */
821 insn =
822 (cfun->machine->fp_save_offset == 0
823 ? emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx)
824 : emit_insn (gen_add3_insn
825 (stack_pointer_rtx, hard_frame_pointer_rtx,
826 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))));
827 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
828 (total_frame_size
829 - cfun->machine->save_regs_offset));
830 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
831 RTX_FRAME_RELATED_P (insn) = 1;
832
833 save_offset = 0;
834 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
835 }
836 else if (!SMALL_INT (total_frame_size))
837 {
838 nios2_adjust_stack (cfun->machine->save_regs_offset, true);
839 save_offset = 0;
840 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
841 }
842 else
843 {
844 save_offset = cfun->machine->save_regs_offset;
845 sp_adjust = total_frame_size;
846 }
847
848 if (!TARGET_HAS_CDX)
849 {
850 /* Generate individual register restores. */
851 save_offset += cfun->machine->save_reg_size;
852
853 for (regno = LAST_GP_REG; regno > 0; regno--)
854 if (cfun->machine->save_mask & (1 << regno))
855 {
856 save_offset -= 4;
857 restore_reg (regno, save_offset);
858 }
859 nios2_adjust_stack (sp_adjust, true);
860 }
861 else if (cfun->machine->save_reg_size == 0)
862 {
863 /* Nothing to restore, just recover the stack position. */
864 nios2_adjust_stack (sp_adjust, true);
865 }
866 else
867 {
868 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
869 unsigned int regmask = cfun->machine->save_mask;
870 unsigned int callee_save_regs = regmask & 0xffff0000;
871 unsigned int caller_save_regs = regmask & 0x0000ffff;
872 int callee_save_size = cfun->machine->callee_save_reg_size;
873 int caller_save_size = cfun->machine->save_reg_size - callee_save_size;
874 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
875 bool ret_p = (!pretend_args_size && !crtl->calls_eh_return
876 && !sibcall_p);
877
878 if (!ret_p || caller_save_size > 0)
879 sp_adjust = save_offset;
880 else
881 sp_adjust = (save_offset > 60 ? save_offset - 60 : 0);
882
883 save_offset -= sp_adjust;
884
885 nios2_adjust_stack (sp_adjust, true);
886
887 if (caller_save_regs)
888 {
889 /* Emit a ldwm to restore EH data regs. */
890 rtx stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
891 insn = emit_insn (nios2_ldst_parallel
892 (true, true, true, stack_mem,
893 caller_save_size, caller_save_regs, false));
894 RTX_FRAME_RELATED_P (insn) = 1;
895 nios2_create_cfa_notes (insn, true);
896 }
897
898 if (callee_save_regs)
899 {
900 int sp_adjust = save_offset + callee_save_size;
901 rtx stack_mem;
902 if (ret_p)
903 {
904 /* Emit a pop.n to restore regs and return. */
905 stack_mem =
906 gen_frame_mem (SImode,
907 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
908 gen_int_mode (sp_adjust - 4,
909 Pmode)));
910 insn =
911 emit_jump_insn (nios2_ldst_parallel (true, false, false,
912 stack_mem, sp_adjust,
913 callee_save_regs, ret_p));
914 RTX_FRAME_RELATED_P (insn) = 1;
915 /* No need to attach CFA notes since we cannot step over
916 a return. */
917 return;
918 }
919 else
920 {
921 /* If no return, we have to use the ldwm form. */
922 stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
923 insn =
924 emit_insn (nios2_ldst_parallel (true, true, true,
925 stack_mem, sp_adjust,
926 callee_save_regs, ret_p));
927 RTX_FRAME_RELATED_P (insn) = 1;
928 nios2_create_cfa_notes (insn, true);
929 }
930 }
931
932 if (pretend_args_size)
933 nios2_adjust_stack (pretend_args_size, true);
934 }
935
936 /* Add in the __builtin_eh_return stack adjustment. */
937 if (crtl->calls_eh_return)
938 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
939
940 if (!sibcall_p)
941 emit_jump_insn (gen_simple_return ());
942 }
943
944 bool
945 nios2_expand_return (void)
946 {
947 /* If CDX is available, generate a pop.n instruction to do both
948 the stack pop and return. */
949 if (TARGET_HAS_CDX)
950 {
951 int total_frame_size = nios2_compute_frame_layout ();
952 int sp_adjust = (cfun->machine->save_regs_offset
953 + cfun->machine->callee_save_reg_size);
954 gcc_assert (sp_adjust == total_frame_size);
955 if (sp_adjust != 0)
956 {
957 rtx mem =
958 gen_frame_mem (SImode,
959 plus_constant (Pmode, stack_pointer_rtx,
960 sp_adjust - 4, false));
961 rtx_insn *insn =
962 emit_jump_insn (nios2_ldst_parallel (true, false, false,
963 mem, sp_adjust,
964 cfun->machine->save_mask,
965 true));
966 RTX_FRAME_RELATED_P (insn) = 1;
967 /* No need to create CFA notes since we can't step over
968 a return. */
969 return true;
970 }
971 }
972 return false;
973 }
974
975 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
976 back to a previous frame. */
977 rtx
978 nios2_get_return_address (int count)
979 {
980 if (count != 0)
981 return const0_rtx;
982
983 return get_hard_reg_initial_val (Pmode, RA_REGNO);
984 }
985
986 /* Emit code to change the current function's return address to
987 ADDRESS. SCRATCH is available as a scratch register, if needed.
988 ADDRESS and SCRATCH are both word-mode GPRs. */
989 void
990 nios2_set_return_address (rtx address, rtx scratch)
991 {
992 nios2_compute_frame_layout ();
993 if (cfun->machine->save_mask & (1 << RA_REGNO))
994 {
995 unsigned offset = cfun->machine->save_reg_size - 4;
996 rtx base;
997
998 if (frame_pointer_needed)
999 base = hard_frame_pointer_rtx;
1000 else
1001 {
1002 base = stack_pointer_rtx;
1003 offset += cfun->machine->save_regs_offset;
1004
1005 if (!SMALL_INT (offset))
1006 {
1007 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
1008 emit_insn (gen_add2_insn (scratch, base));
1009 base = scratch;
1010 offset = 0;
1011 }
1012 }
1013 if (offset)
1014 base = plus_constant (Pmode, base, offset);
1015 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
1016 }
1017 else
1018 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
1019 }
1020
1021 /* Implement FUNCTION_PROFILER macro. */
1022 void
1023 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
1024 {
1025 fprintf (file, "\tmov\tr8, ra\n");
1026 if (flag_pic == 1)
1027 {
1028 fprintf (file, "\tnextpc\tr2\n");
1029 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1030 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1031 fprintf (file, "\tadd\tr2, r2, r3\n");
1032 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
1033 fprintf (file, "\tcallr\tr2\n");
1034 }
1035 else if (flag_pic == 2)
1036 {
1037 fprintf (file, "\tnextpc\tr2\n");
1038 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1039 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1040 fprintf (file, "\tadd\tr2, r2, r3\n");
1041 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1042 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1043 fprintf (file, "\tadd\tr3, r2, r3\n");
1044 fprintf (file, "\tldw\tr2, 0(r3)\n");
1045 fprintf (file, "\tcallr\tr2\n");
1046 }
1047 else
1048 fprintf (file, "\tcall\t_mcount\n");
1049 fprintf (file, "\tmov\tra, r8\n");
1050 }
1051
1052 /* Dump stack layout. */
1053 static void
1054 nios2_dump_frame_layout (FILE *file)
1055 {
1056 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
1057 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
1058 cfun->machine->total_size);
1059 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
1060 cfun->machine->var_size);
1061 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
1062 cfun->machine->args_size);
1063 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
1064 cfun->machine->save_reg_size);
1065 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
1066 cfun->machine->initialized);
1067 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
1068 cfun->machine->save_regs_offset);
1069 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
1070 crtl->is_leaf);
1071 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
1072 frame_pointer_needed);
1073 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
1074 crtl->args.pretend_args_size);
1075 }
1076
1077 /* Return true if REGNO should be saved in the prologue. */
1078 static bool
1079 prologue_saved_reg_p (unsigned regno)
1080 {
1081 gcc_assert (GP_REG_P (regno));
1082
1083 if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
1084 return true;
1085
1086 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
1087 return true;
1088
1089 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
1090 return true;
1091
1092 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
1093 return true;
1094
1095 return false;
1096 }
1097
1098 /* Implement TARGET_CAN_ELIMINATE. */
1099 static bool
1100 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1101 {
1102 if (to == STACK_POINTER_REGNUM)
1103 return !frame_pointer_needed;
1104 return true;
1105 }
1106
1107 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1108 int
1109 nios2_initial_elimination_offset (int from, int to)
1110 {
1111 int offset;
1112
1113 nios2_compute_frame_layout ();
1114
1115 /* Set OFFSET to the offset from the stack pointer. */
1116 switch (from)
1117 {
1118 case FRAME_POINTER_REGNUM:
1119 /* This is the high end of the local variable storage, not the
1120 hard frame pointer. */
1121 offset = cfun->machine->args_size + cfun->machine->var_size;
1122 break;
1123
1124 case ARG_POINTER_REGNUM:
1125 offset = cfun->machine->total_size;
1126 offset -= crtl->args.pretend_args_size;
1127 break;
1128
1129 default:
1130 gcc_unreachable ();
1131 }
1132
1133 /* If we are asked for the frame pointer offset, then adjust OFFSET
1134 by the offset from the frame pointer to the stack pointer. */
1135 if (to == HARD_FRAME_POINTER_REGNUM)
1136 offset -= (cfun->machine->save_regs_offset
1137 + cfun->machine->fp_save_offset);
1138
1139 return offset;
1140 }
1141
1142 /* Return nonzero if this function is known to have a null epilogue.
1143 This allows the optimizer to omit jumps to jumps if no stack
1144 was created. */
1145 int
1146 nios2_can_use_return_insn (void)
1147 {
1148 int total_frame_size;
1149
1150 if (!reload_completed || crtl->profile)
1151 return 0;
1152
1153 total_frame_size = nios2_compute_frame_layout ();
1154
1155 /* If CDX is available, check if we can return using a
1156 single pop.n instruction. */
1157 if (TARGET_HAS_CDX
1158 && !frame_pointer_needed
1159 && cfun->machine->save_regs_offset <= 60
1160 && (cfun->machine->save_mask & 0x80000000) != 0
1161 && (cfun->machine->save_mask & 0xffff) == 0
1162 && crtl->args.pretend_args_size == 0)
1163 return true;
1164
1165 return total_frame_size == 0;
1166 }
1167
1168 \f
1169 /* Check and signal some warnings/errors on FPU insn options. */
1170 static void
1171 nios2_custom_check_insns (void)
1172 {
1173 unsigned int i, j;
1174 bool errors = false;
1175
1176 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1177 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
1178 {
1179 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
1180 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
1181 {
1182 error ("switch %<-mcustom-%s%> is required for double "
1183 "precision floating point", N2FPU_NAME (j));
1184 errors = true;
1185 }
1186 break;
1187 }
1188
1189 /* Warn if the user has certain exotic operations that won't get used
1190 without -funsafe-math-optimizations. See expand_builtin () in
1191 builtins.c. */
1192 if (!flag_unsafe_math_optimizations)
1193 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1194 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
1195 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1196 "%<-funsafe-math-optimizations%> is specified",
1197 N2FPU_NAME (i));
1198
1199 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1200 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1201 in builtins.c. */
1202 if (!flag_finite_math_only)
1203 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1204 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
1205 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1206 "%<-ffinite-math-only%> is specified", N2FPU_NAME (i));
1207
1208 /* Warn if the user is trying to use a custom rounding instruction
1209 that won't get used without -fno-math-errno. See
1210 expand_builtin_int_roundingfn_2 () in builtins.c. */
1211 if (flag_errno_math)
1212 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1213 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
1214 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1215 "%<-fno-math-errno%> is specified", N2FPU_NAME (i));
1216
1217 if (errors || custom_code_conflict)
1218 fatal_error (input_location,
1219 "conflicting use of %<-mcustom%> switches, target attributes, "
1220 "and/or %<__builtin_custom_%> functions");
1221 }
1222
1223 static void
1224 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
1225 {
1226 if (override_p || N2FPU_N (code) == -1)
1227 N2FPU_N (code) = n;
1228 nios2_register_custom_code (n, CCS_FPU, (int) code);
1229 }
1230
1231 /* Type to represent a standard FPU config. */
1232 struct nios2_fpu_config
1233 {
1234 const char *name;
1235 bool set_sp_constants;
1236 int code[n2fpu_code_num];
1237 };
1238
1239 #define NIOS2_FPU_CONFIG_NUM 3
1240 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
1241
1242 static void
1243 nios2_init_fpu_configs (void)
1244 {
1245 struct nios2_fpu_config* cfg;
1246 int i = 0;
1247 #define NEXT_FPU_CONFIG \
1248 do { \
1249 cfg = &custom_fpu_config[i++]; \
1250 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1251 } while (0)
1252
1253 NEXT_FPU_CONFIG;
1254 cfg->name = "60-1";
1255 cfg->set_sp_constants = true;
1256 cfg->code[n2fpu_fmuls] = 252;
1257 cfg->code[n2fpu_fadds] = 253;
1258 cfg->code[n2fpu_fsubs] = 254;
1259
1260 NEXT_FPU_CONFIG;
1261 cfg->name = "60-2";
1262 cfg->set_sp_constants = true;
1263 cfg->code[n2fpu_fmuls] = 252;
1264 cfg->code[n2fpu_fadds] = 253;
1265 cfg->code[n2fpu_fsubs] = 254;
1266 cfg->code[n2fpu_fdivs] = 255;
1267
1268 NEXT_FPU_CONFIG;
1269 cfg->name = "72-3";
1270 cfg->set_sp_constants = true;
1271 cfg->code[n2fpu_floatus] = 243;
1272 cfg->code[n2fpu_fixsi] = 244;
1273 cfg->code[n2fpu_floatis] = 245;
1274 cfg->code[n2fpu_fcmpgts] = 246;
1275 cfg->code[n2fpu_fcmples] = 249;
1276 cfg->code[n2fpu_fcmpeqs] = 250;
1277 cfg->code[n2fpu_fcmpnes] = 251;
1278 cfg->code[n2fpu_fmuls] = 252;
1279 cfg->code[n2fpu_fadds] = 253;
1280 cfg->code[n2fpu_fsubs] = 254;
1281 cfg->code[n2fpu_fdivs] = 255;
1282
1283 #undef NEXT_FPU_CONFIG
1284 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
1285 }
1286
1287 static struct nios2_fpu_config *
1288 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
1289 {
1290 int i;
1291 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
1292 {
1293 bool match = !(endp != NULL
1294 ? strncmp (custom_fpu_config[i].name, cfgname,
1295 endp - cfgname)
1296 : strcmp (custom_fpu_config[i].name, cfgname));
1297 if (match)
1298 return &custom_fpu_config[i];
1299 }
1300 return NULL;
1301 }
1302
1303 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1304 OVERRIDE is true if loaded config codes should overwrite current state. */
1305 static void
1306 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
1307 bool override)
1308 {
1309 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
1310 if (cfg)
1311 {
1312 unsigned int i;
1313 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1314 if (cfg->code[i] >= 0)
1315 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
1316 override);
1317 if (cfg->set_sp_constants)
1318 flag_single_precision_constant = 1;
1319 }
1320 else
1321 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1322 "value %<%s%>", cfgname);
1323
1324 /* Guard against errors in the standard configurations. */
1325 nios2_custom_check_insns ();
1326 }
1327
1328 /* Check individual FPU insn options, and register custom code. */
1329 static void
1330 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1331 {
1332 int param = N2FPU_N (fpu_insn_index);
1333
1334 if (param >= 0 && param <= 255)
1335 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1336
1337 /* Valid values are 0-255, but also allow -1 so that the
1338 -mno-custom-<opt> switches work. */
1339 else if (param != -1)
1340 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1341 N2FPU_NAME (fpu_insn_index), param);
1342 }
1343
1344 /* Allocate a chunk of memory for per-function machine-dependent data. */
1345 static struct machine_function *
1346 nios2_init_machine_status (void)
1347 {
1348 return ggc_cleared_alloc<machine_function> ();
1349 }
1350
1351 /* Implement TARGET_OPTION_OVERRIDE. */
1352 static void
1353 nios2_option_override (void)
1354 {
1355 unsigned int i;
1356
1357 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1358 SUBTARGET_OVERRIDE_OPTIONS;
1359 #endif
1360
1361 /* Check for unsupported options. */
1362 if (flag_pic && !TARGET_LINUX_ABI)
1363 sorry ("position-independent code requires the Linux ABI");
1364 if (flag_pic && stack_limit_rtx
1365 && GET_CODE (stack_limit_rtx) == SYMBOL_REF)
1366 sorry ("PIC support for %<-fstack-limit-symbol%>");
1367
1368 /* Function to allocate machine-dependent function status. */
1369 init_machine_status = &nios2_init_machine_status;
1370
1371 nios2_section_threshold
1372 = (global_options_set.x_g_switch_value
1373 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1374
1375 if (nios2_gpopt_option == gpopt_unspecified)
1376 {
1377 /* Default to -mgpopt unless -fpic or -fPIC. */
1378 if (flag_pic)
1379 nios2_gpopt_option = gpopt_none;
1380 else
1381 nios2_gpopt_option = gpopt_local;
1382 }
1383
1384 /* GP-relative and r0-relative addressing don't make sense for PIC. */
1385 if (flag_pic)
1386 {
1387 if (nios2_gpopt_option != gpopt_none)
1388 error ("%<-mgpopt%> not supported with PIC.");
1389 if (nios2_gprel_sec)
1390 error ("%<-mgprel-sec=%> not supported with PIC.");
1391 if (nios2_r0rel_sec)
1392 error ("%<-mr0rel-sec=%> not supported with PIC.");
1393 }
1394
1395 /* Process -mgprel-sec= and -m0rel-sec=. */
1396 if (nios2_gprel_sec)
1397 {
1398 if (regcomp (&nios2_gprel_sec_regex, nios2_gprel_sec,
1399 REG_EXTENDED | REG_NOSUB))
1400 error ("%<-mgprel-sec=%> argument is not a valid regular expression.");
1401 }
1402 if (nios2_r0rel_sec)
1403 {
1404 if (regcomp (&nios2_r0rel_sec_regex, nios2_r0rel_sec,
1405 REG_EXTENDED | REG_NOSUB))
1406 error ("%<-mr0rel-sec=%> argument is not a valid regular expression.");
1407 }
1408
1409 /* If we don't have mul, we don't have mulx either! */
1410 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1411 target_flags &= ~MASK_HAS_MULX;
1412
1413 /* Optional BMX and CDX instructions only make sense for R2. */
1414 if (!TARGET_ARCH_R2)
1415 {
1416 if (TARGET_HAS_BMX)
1417 error ("BMX instructions are only supported with R2 architecture");
1418 if (TARGET_HAS_CDX)
1419 error ("CDX instructions are only supported with R2 architecture");
1420 }
1421
1422 /* R2 is little-endian only. */
1423 if (TARGET_ARCH_R2 && TARGET_BIG_ENDIAN)
1424 error ("R2 architecture is little-endian only");
1425
1426 /* Initialize default FPU configurations. */
1427 nios2_init_fpu_configs ();
1428
1429 /* Set up default handling for floating point custom instructions.
1430
1431 Putting things in this order means that the -mcustom-fpu-cfg=
1432 switch will always be overridden by individual -mcustom-fadds=
1433 switches, regardless of the order in which they were specified
1434 on the command line.
1435
1436 This behavior of prioritization of individual -mcustom-<insn>=
1437 options before the -mcustom-fpu-cfg= switch is maintained for
1438 compatibility. */
1439 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1440 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1441
1442 /* Handle options for individual FPU insns. */
1443 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1444 nios2_handle_custom_fpu_insn_option (i);
1445
1446 nios2_custom_check_insns ();
1447
1448 /* Save the initial options in case the user does function specific
1449 options. */
1450 target_option_default_node = target_option_current_node
1451 = build_target_option_node (&global_options, &global_options_set);
1452 }
1453
1454 \f
1455 /* Return true if CST is a constant within range of movi/movui/movhi. */
1456 static bool
1457 nios2_simple_const_p (const_rtx cst)
1458 {
1459 if (!CONST_INT_P (cst))
1460 return false;
1461 HOST_WIDE_INT val = INTVAL (cst);
1462 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1463 }
1464
1465 /* Compute a (partial) cost for rtx X. Return true if the complete
1466 cost has been computed, and false if subexpressions should be
1467 scanned. In either case, *TOTAL contains the cost result. */
1468 static bool
1469 nios2_rtx_costs (rtx x, machine_mode mode,
1470 int outer_code,
1471 int opno,
1472 int *total, bool speed)
1473 {
1474 int code = GET_CODE (x);
1475
1476 switch (code)
1477 {
1478 case CONST_INT:
1479 if (INTVAL (x) == 0 || nios2_simple_const_p (x))
1480 {
1481 *total = COSTS_N_INSNS (0);
1482 return true;
1483 }
1484 else
1485 {
1486 /* High + lo_sum. */
1487 *total = COSTS_N_INSNS (1);
1488 return true;
1489 }
1490
1491 case LABEL_REF:
1492 case SYMBOL_REF:
1493 case CONST:
1494 case CONST_DOUBLE:
1495 if (gprel_constant_p (x) || r0rel_constant_p (x))
1496 {
1497 *total = COSTS_N_INSNS (1);
1498 return true;
1499 }
1500 else
1501 {
1502 /* High + lo_sum. */
1503 *total = COSTS_N_INSNS (1);
1504 return true;
1505 }
1506
1507 case HIGH:
1508 {
1509 /* This is essentially a constant. */
1510 *total = COSTS_N_INSNS (0);
1511 return true;
1512 }
1513
1514 case LO_SUM:
1515 {
1516 *total = COSTS_N_INSNS (0);
1517 return true;
1518 }
1519
1520 case AND:
1521 {
1522 /* Recognize 'nor' insn pattern. */
1523 if (GET_CODE (XEXP (x, 0)) == NOT
1524 && GET_CODE (XEXP (x, 1)) == NOT)
1525 {
1526 *total = COSTS_N_INSNS (1);
1527 return true;
1528 }
1529 return false;
1530 }
1531
1532 /* For insns that have an execution latency (3 cycles), don't
1533 penalize by the full amount since we can often schedule
1534 to avoid it. */
1535 case MULT:
1536 {
1537 if (!TARGET_HAS_MUL)
1538 *total = COSTS_N_INSNS (5); /* Guess? */
1539 else if (speed)
1540 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1541 else
1542 *total = COSTS_N_INSNS (1);
1543 if (TARGET_HAS_MULX && GET_MODE (x) == DImode)
1544 {
1545 enum rtx_code c0 = GET_CODE (XEXP (x, 0));
1546 enum rtx_code c1 = GET_CODE (XEXP (x, 1));
1547 if ((c0 == SIGN_EXTEND && c1 == SIGN_EXTEND)
1548 || (c0 == ZERO_EXTEND && c1 == ZERO_EXTEND))
1549 /* This is the <mul>sidi3 pattern, which expands into 4 insns,
1550 2 multiplies and 2 moves. */
1551 {
1552 *total = *total * 2 + COSTS_N_INSNS (2);
1553 return true;
1554 }
1555 }
1556 return false;
1557 }
1558
1559 case DIV:
1560 {
1561 if (!TARGET_HAS_DIV)
1562 *total = COSTS_N_INSNS (5); /* Guess? */
1563 else if (speed)
1564 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1565 else
1566 *total = COSTS_N_INSNS (1);
1567 return false;
1568 }
1569
1570 case ASHIFT:
1571 case ASHIFTRT:
1572 case LSHIFTRT:
1573 case ROTATE:
1574 {
1575 if (!speed)
1576 *total = COSTS_N_INSNS (1);
1577 else
1578 *total = COSTS_N_INSNS (2); /* Latency adjustment. */
1579 return false;
1580 }
1581
1582 case ZERO_EXTRACT:
1583 if (TARGET_HAS_BMX)
1584 {
1585 *total = COSTS_N_INSNS (1);
1586 return true;
1587 }
1588 return false;
1589
1590 case SIGN_EXTEND:
1591 {
1592 if (MEM_P (XEXP (x, 0)))
1593 *total = COSTS_N_INSNS (1);
1594 else
1595 *total = COSTS_N_INSNS (3);
1596 return false;
1597 }
1598
1599 case MEM:
1600 {
1601 rtx addr = XEXP (x, 0);
1602
1603 /* Account for cost of different addressing modes. */
1604 *total = nios2_address_cost (addr, mode, ADDR_SPACE_GENERIC, speed);
1605
1606 if (outer_code == SET && opno == 0)
1607 /* Stores execute in 1 cycle accounted for by
1608 the outer SET. */
1609 ;
1610 else if (outer_code == SET || outer_code == SIGN_EXTEND
1611 || outer_code == ZERO_EXTEND)
1612 /* Latency adjustment. */
1613 {
1614 if (speed)
1615 *total += COSTS_N_INSNS (1);
1616 }
1617 else
1618 /* This is going to have to be split into a load. */
1619 *total += COSTS_N_INSNS (speed ? 2 : 1);
1620 return true;
1621 }
1622
1623 default:
1624 return false;
1625 }
1626 }
1627
1628 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1629 static reg_class_t
1630 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1631 {
1632 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1633 }
1634
1635 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1636 RET is an RTX for the return value location. The entire insn sequence
1637 is returned. */
1638 static GTY(()) rtx nios2_tls_symbol;
1639
1640 static rtx
1641 nios2_call_tls_get_addr (rtx ti)
1642 {
1643 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1644 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1645 rtx fn;
1646 rtx_insn *insn;
1647
1648 if (!nios2_tls_symbol)
1649 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1650
1651 emit_move_insn (arg, ti);
1652 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1653 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1654 RTL_CONST_CALL_P (insn) = 1;
1655 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1656 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1657
1658 return ret;
1659 }
1660
1661 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1662 static bool
1663 nios2_large_offset_p (int unspec)
1664 {
1665 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1666
1667 if (flag_pic == 2
1668 /* FIXME: TLS GOT offset relocations will eventually also get this
1669 treatment, after binutils support for those are also completed. */
1670 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1671 return true;
1672
1673 /* 'gotoff' offsets are always hiadj/lo. */
1674 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1675 return true;
1676
1677 return false;
1678 }
1679
1680 /* Return true for conforming unspec relocations. Also used in
1681 constraints.md and predicates.md. */
1682 bool
1683 nios2_unspec_reloc_p (rtx op)
1684 {
1685 return (GET_CODE (op) == CONST
1686 && GET_CODE (XEXP (op, 0)) == UNSPEC
1687 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1688 }
1689
1690 static bool
1691 nios2_large_unspec_reloc_p (rtx op)
1692 {
1693 return (GET_CODE (op) == CONST
1694 && GET_CODE (XEXP (op, 0)) == UNSPEC
1695 && nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1696 }
1697
1698 /* Helper to generate unspec constant. */
1699 static rtx
1700 nios2_unspec_offset (rtx loc, int unspec)
1701 {
1702 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1703 unspec));
1704 }
1705
1706 /* Generate GOT pointer based address with large offset. */
1707 static rtx
1708 nios2_large_got_address (rtx offset, rtx tmp)
1709 {
1710 if (!tmp)
1711 tmp = gen_reg_rtx (Pmode);
1712 emit_move_insn (tmp, offset);
1713 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1714 }
1715
1716 /* Generate a GOT pointer based address. */
1717 static rtx
1718 nios2_got_address (rtx loc, int unspec)
1719 {
1720 rtx offset = nios2_unspec_offset (loc, unspec);
1721 crtl->uses_pic_offset_table = 1;
1722
1723 if (nios2_large_offset_p (unspec))
1724 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1725
1726 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1727 }
1728
1729 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1730 return value will be a valid address and move_operand (either a REG
1731 or a LO_SUM). */
1732 static rtx
1733 nios2_legitimize_tls_address (rtx loc)
1734 {
1735 rtx tmp, mem, tp;
1736 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1737
1738 switch (model)
1739 {
1740 case TLS_MODEL_GLOBAL_DYNAMIC:
1741 tmp = gen_reg_rtx (Pmode);
1742 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1743 return nios2_call_tls_get_addr (tmp);
1744
1745 case TLS_MODEL_LOCAL_DYNAMIC:
1746 tmp = gen_reg_rtx (Pmode);
1747 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1748 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1749 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1750
1751 case TLS_MODEL_INITIAL_EXEC:
1752 tmp = gen_reg_rtx (Pmode);
1753 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1754 emit_move_insn (tmp, mem);
1755 tp = gen_rtx_REG (Pmode, TP_REGNO);
1756 return gen_rtx_PLUS (Pmode, tp, tmp);
1757
1758 case TLS_MODEL_LOCAL_EXEC:
1759 tp = gen_rtx_REG (Pmode, TP_REGNO);
1760 return gen_rtx_PLUS (Pmode, tp,
1761 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1762 default:
1763 gcc_unreachable ();
1764 }
1765 }
1766
1767 /* Divide Support
1768
1769 If -O3 is used, we want to output a table lookup for
1770 divides between small numbers (both num and den >= 0
1771 and < 0x10). The overhead of this method in the worst
1772 case is 40 bytes in the text section (10 insns) and
1773 256 bytes in the data section. Additional divides do
1774 not incur additional penalties in the data section.
1775
1776 Code speed is improved for small divides by about 5x
1777 when using this method in the worse case (~9 cycles
1778 vs ~45). And in the worst case divides not within the
1779 table are penalized by about 10% (~5 cycles vs ~45).
1780 However in the typical case the penalty is not as bad
1781 because doing the long divide in only 45 cycles is
1782 quite optimistic.
1783
1784 ??? would be nice to have some benchmarks other
1785 than Dhrystone to back this up.
1786
1787 This bit of expansion is to create this instruction
1788 sequence as rtl.
1789 or $8, $4, $5
1790 slli $9, $4, 4
1791 cmpgeui $3, $8, 16
1792 beq $3, $0, .L3
1793 or $10, $9, $5
1794 add $12, $11, divide_table
1795 ldbu $2, 0($12)
1796 br .L1
1797 .L3:
1798 call slow_div
1799 .L1:
1800 # continue here with result in $2
1801
1802 ??? Ideally I would like the libcall block to contain all
1803 of this code, but I don't know how to do that. What it
1804 means is that if the divide can be eliminated, it may not
1805 completely disappear.
1806
1807 ??? The __divsi3_table label should ideally be moved out
1808 of this block and into a global. If it is placed into the
1809 sdata section we can save even more cycles by doing things
1810 gp relative. */
1811 void
1812 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1813 {
1814 rtx or_result, shift_left_result;
1815 rtx lookup_value;
1816 rtx_code_label *lab1, *lab3;
1817 rtx_insn *insns;
1818 rtx libfunc;
1819 rtx final_result;
1820 rtx_insn *tmp;
1821 rtx table;
1822
1823 /* It may look a little generic, but only SImode is supported for now. */
1824 gcc_assert (mode == SImode);
1825 libfunc = optab_libfunc (sdiv_optab, SImode);
1826
1827 lab1 = gen_label_rtx ();
1828 lab3 = gen_label_rtx ();
1829
1830 or_result = expand_simple_binop (SImode, IOR,
1831 operands[1], operands[2],
1832 0, 0, OPTAB_LIB_WIDEN);
1833
1834 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1835 GET_MODE (or_result), 0, lab3);
1836 JUMP_LABEL (get_last_insn ()) = lab3;
1837
1838 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1839 operands[1], GEN_INT (4),
1840 0, 0, OPTAB_LIB_WIDEN);
1841
1842 lookup_value = expand_simple_binop (SImode, IOR,
1843 shift_left_result, operands[2],
1844 0, 0, OPTAB_LIB_WIDEN);
1845 table = gen_rtx_PLUS (SImode, lookup_value,
1846 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1847 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1848
1849 tmp = emit_jump_insn (gen_jump (lab1));
1850 JUMP_LABEL (tmp) = lab1;
1851 emit_barrier ();
1852
1853 emit_label (lab3);
1854 LABEL_NUSES (lab3) = 1;
1855
1856 start_sequence ();
1857 final_result = emit_library_call_value (libfunc, NULL_RTX,
1858 LCT_CONST, SImode,
1859 operands[1], SImode,
1860 operands[2], SImode);
1861
1862 insns = get_insns ();
1863 end_sequence ();
1864 emit_libcall_block (insns, operands[0], final_result,
1865 gen_rtx_DIV (SImode, operands[1], operands[2]));
1866
1867 emit_label (lab1);
1868 LABEL_NUSES (lab1) = 1;
1869 }
1870
1871 \f
1872 /* Branches and compares. */
1873
1874 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1875 comparison, e.g. >= 1 into > 0. */
1876 static void
1877 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1878 enum rtx_code *alt_code, rtx *alt_op,
1879 machine_mode mode)
1880 {
1881 gcc_assert (CONST_INT_P (op));
1882
1883 HOST_WIDE_INT opval = INTVAL (op);
1884 enum rtx_code scode = signed_condition (code);
1885 bool dec_p = (scode == LT || scode == GE);
1886
1887 if (code == EQ || code == NE)
1888 {
1889 *alt_code = code;
1890 *alt_op = op;
1891 return;
1892 }
1893
1894 *alt_op = (dec_p
1895 ? gen_int_mode (opval - 1, mode)
1896 : gen_int_mode (opval + 1, mode));
1897
1898 /* The required conversion between [>,>=] and [<,<=] is captured
1899 by a reverse + swap of condition codes. */
1900 *alt_code = reverse_condition (swap_condition (code));
1901
1902 {
1903 /* Test if the incremented/decremented value crosses the over/underflow
1904 boundary. Supposedly, such boundary cases should already be transformed
1905 into always-true/false or EQ conditions, so use an assertion here. */
1906 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1907 if (code == scode)
1908 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1909 alt_opval &= GET_MODE_MASK (mode);
1910 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1911 }
1912 }
1913
1914 /* Return true if the constant comparison is supported by nios2. */
1915 static bool
1916 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1917 {
1918 gcc_assert (CONST_INT_P (op));
1919 switch (code)
1920 {
1921 case EQ: case NE: case GE: case LT:
1922 return SMALL_INT (INTVAL (op));
1923 case GEU: case LTU:
1924 return SMALL_INT_UNSIGNED (INTVAL (op));
1925 default:
1926 return false;
1927 }
1928 }
1929
1930 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1931 the current configuration. Perform modifications if MODIFY_P is true.
1932 Returns true if FPU compare can be done. */
1933
1934 bool
1935 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1936 bool modify_p)
1937 {
1938 bool rev_p = false;
1939 enum rtx_code code = GET_CODE (*cmp);
1940
1941 if (!nios2_fpu_compare_enabled (code, mode))
1942 {
1943 code = swap_condition (code);
1944 if (nios2_fpu_compare_enabled (code, mode))
1945 rev_p = true;
1946 else
1947 return false;
1948 }
1949
1950 if (modify_p)
1951 {
1952 if (rev_p)
1953 {
1954 rtx tmp = *op1;
1955 *op1 = *op2;
1956 *op2 = tmp;
1957 }
1958 *op1 = force_reg (mode, *op1);
1959 *op2 = force_reg (mode, *op2);
1960 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1961 }
1962 return true;
1963 }
1964
1965 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1966 nios2 supported form. Returns true if success. */
1967 bool
1968 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1969 {
1970 enum rtx_code code = GET_CODE (*cmp);
1971 enum rtx_code alt_code;
1972 rtx alt_op2;
1973
1974 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1975 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1976
1977 if (CONST_INT_P (*op2) && *op2 != const0_rtx)
1978 {
1979 /* Create alternate constant compare. */
1980 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1981
1982 /* If alterate op2 is zero(0), we can use it directly, possibly
1983 swapping the compare code. */
1984 if (alt_op2 == const0_rtx)
1985 {
1986 code = alt_code;
1987 *op2 = alt_op2;
1988 goto check_rebuild_cmp;
1989 }
1990
1991 /* Check if either constant compare can be used. */
1992 if (nios2_valid_compare_const_p (code, *op2))
1993 return true;
1994 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1995 {
1996 code = alt_code;
1997 *op2 = alt_op2;
1998 goto rebuild_cmp;
1999 }
2000
2001 /* We have to force op2 into a register now. Try to pick one
2002 with a lower cost. */
2003 if (! nios2_simple_const_p (*op2)
2004 && nios2_simple_const_p (alt_op2))
2005 {
2006 code = alt_code;
2007 *op2 = alt_op2;
2008 }
2009 *op2 = force_reg (mode, *op2);
2010 }
2011 else if (!reg_or_0_operand (*op2, mode))
2012 *op2 = force_reg (mode, *op2);
2013
2014 check_rebuild_cmp:
2015 if (code == GT || code == GTU || code == LE || code == LEU)
2016 {
2017 rtx t = *op1; *op1 = *op2; *op2 = t;
2018 code = swap_condition (code);
2019 }
2020 rebuild_cmp:
2021 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
2022 return true;
2023 }
2024
2025
2026 /* Addressing modes and constants. */
2027
2028 /* Symbol references and other 32-bit constants are split into
2029 high/lo_sum pairs during the split1 pass. After that, they are not
2030 considered legitimate addresses.
2031 This function returns true if in a pre-split context where these
2032 constants are allowed. */
2033 static bool
2034 nios2_large_constant_allowed (void)
2035 {
2036 /* The reload_completed check is for the benefit of
2037 nios2_asm_output_mi_thunk and perhaps other places that try to
2038 emulate a post-reload pass. */
2039 return !(cfun->curr_properties & PROP_rtl_split_insns) && !reload_completed;
2040 }
2041
2042 /* Return true if X is constant expression with a reference to an
2043 "ordinary" symbol; not GOT-relative, not GP-relative, not TLS. */
2044 static bool
2045 nios2_symbolic_constant_p (rtx x)
2046 {
2047 rtx base, offset;
2048
2049 if (flag_pic)
2050 return false;
2051 if (GET_CODE (x) == LABEL_REF)
2052 return true;
2053 else if (CONSTANT_P (x))
2054 {
2055 split_const (x, &base, &offset);
2056 return (SYMBOL_REF_P (base)
2057 && !SYMBOL_REF_TLS_MODEL (base)
2058 && !gprel_constant_p (base)
2059 && !r0rel_constant_p (base)
2060 && SMALL_INT (INTVAL (offset)));
2061 }
2062 return false;
2063 }
2064
2065 /* Return true if X is an expression of the form
2066 (PLUS reg large_constant). */
2067 static bool
2068 nios2_plus_large_constant_p (rtx x)
2069 {
2070 return (GET_CODE (x) == PLUS
2071 && REG_P (XEXP (x, 0))
2072 && nios2_large_constant_p (XEXP (x, 1)));
2073 }
2074
2075 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
2076 static bool
2077 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2078 {
2079 rtx base, offset;
2080 split_const (x, &base, &offset);
2081 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
2082 }
2083
2084 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2085 static bool
2086 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2087 {
2088 return nios2_legitimate_constant_p (mode, x) == false;
2089 }
2090
2091 /* Return true if register REGNO is a valid base register.
2092 STRICT_P is true if REG_OK_STRICT is in effect. */
2093
2094 bool
2095 nios2_regno_ok_for_base_p (int regno, bool strict_p)
2096 {
2097 if (!HARD_REGISTER_NUM_P (regno))
2098 {
2099 if (!strict_p)
2100 return true;
2101
2102 if (!reg_renumber)
2103 return false;
2104
2105 regno = reg_renumber[regno];
2106 }
2107
2108 /* The fake registers will be eliminated to either the stack or
2109 hard frame pointer, both of which are usually valid base registers.
2110 Reload deals with the cases where the eliminated form isn't valid. */
2111 return (GP_REG_P (regno)
2112 || regno == FRAME_POINTER_REGNUM
2113 || regno == ARG_POINTER_REGNUM);
2114 }
2115
2116 /* Return true if OFFSET is permitted in a load/store address expression.
2117 Normally any 16-bit value is permitted, but on R2 if we may be emitting
2118 the IO forms of these instructions we must restrict the offset to fit
2119 in a 12-bit field instead. */
2120
2121 static bool
2122 nios2_valid_addr_offset_p (rtx offset)
2123 {
2124 return (CONST_INT_P (offset)
2125 && ((TARGET_ARCH_R2 && (TARGET_BYPASS_CACHE
2126 || TARGET_BYPASS_CACHE_VOLATILE))
2127 ? SMALL_INT12 (INTVAL (offset))
2128 : SMALL_INT (INTVAL (offset))));
2129 }
2130
2131 /* Return true if the address expression formed by BASE + OFFSET is
2132 valid. */
2133 static bool
2134 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
2135 {
2136 if (!strict_p && GET_CODE (base) == SUBREG)
2137 base = SUBREG_REG (base);
2138 return (REG_P (base)
2139 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
2140 && (offset == NULL_RTX
2141 || nios2_valid_addr_offset_p (offset)
2142 || (nios2_large_constant_allowed ()
2143 && nios2_symbolic_constant_p (offset))
2144 || nios2_unspec_reloc_p (offset)));
2145 }
2146
2147 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2148 static bool
2149 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
2150 rtx operand, bool strict_p)
2151 {
2152 switch (GET_CODE (operand))
2153 {
2154 /* Direct. */
2155 case SYMBOL_REF:
2156 if (SYMBOL_REF_TLS_MODEL (operand))
2157 return false;
2158
2159 /* Else, fall through. */
2160 case CONST:
2161 if (gprel_constant_p (operand) || r0rel_constant_p (operand))
2162 return true;
2163
2164 /* Else, fall through. */
2165 case LABEL_REF:
2166 if (nios2_large_constant_allowed ()
2167 && nios2_symbolic_constant_p (operand))
2168 return true;
2169 return false;
2170
2171 case CONST_INT:
2172 if (r0rel_constant_p (operand))
2173 return true;
2174 return nios2_large_constant_allowed ();
2175
2176 case CONST_DOUBLE:
2177 return false;
2178
2179 /* Register indirect. */
2180 case REG:
2181 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
2182
2183 /* Register indirect with displacement. */
2184 case PLUS:
2185 {
2186 rtx op0 = XEXP (operand, 0);
2187 rtx op1 = XEXP (operand, 1);
2188
2189 if (nios2_valid_addr_expr_p (op0, op1, strict_p)
2190 || nios2_valid_addr_expr_p (op1, op0, strict_p))
2191 return true;
2192 }
2193 break;
2194
2195 /* %lo(constant)(reg)
2196 This requires a 16-bit relocation and isn't valid with R2
2197 io-variant load/stores. */
2198 case LO_SUM:
2199 if (TARGET_ARCH_R2
2200 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2201 return false;
2202 else
2203 {
2204 rtx op0 = XEXP (operand, 0);
2205 rtx op1 = XEXP (operand, 1);
2206
2207 return (REG_P (op0)
2208 && nios2_regno_ok_for_base_p (REGNO (op0), strict_p)
2209 && nios2_large_constant_p (op1));
2210 }
2211
2212 default:
2213 break;
2214 }
2215 return false;
2216 }
2217
2218 /* Implement TARGET_ADDRESS_COST.
2219 Experimentation has shown that we get better code by penalizing the
2220 the (plus reg symbolic_constant) and (plus reg (const ...)) forms
2221 but giving (plus reg symbol_ref) address modes the same cost as those
2222 that don't require splitting. Also, from a theoretical point of view:
2223 - This is in line with the recommendation in the GCC internals
2224 documentation to make address forms involving multiple
2225 registers more expensive than single-register forms.
2226 - OTOH it still encourages fwprop1 to propagate constants into
2227 address expressions more aggressively.
2228 - We should discourage splitting (symbol + offset) into hi/lo pairs
2229 to allow CSE'ing the symbol when it's used with more than one offset,
2230 but not so heavily as to avoid this addressing mode at all. */
2231 static int
2232 nios2_address_cost (rtx address,
2233 machine_mode mode ATTRIBUTE_UNUSED,
2234 addr_space_t as ATTRIBUTE_UNUSED,
2235 bool speed ATTRIBUTE_UNUSED)
2236 {
2237 if (nios2_plus_large_constant_p (address))
2238 return COSTS_N_INSNS (1);
2239 if (nios2_large_constant_p (address))
2240 {
2241 if (GET_CODE (address) == CONST)
2242 return COSTS_N_INSNS (1);
2243 else
2244 return COSTS_N_INSNS (0);
2245 }
2246 return COSTS_N_INSNS (0);
2247 }
2248
2249 /* Return true if X is a MEM whose address expression involves a large (32-bit)
2250 constant. */
2251 bool
2252 nios2_large_constant_memory_operand_p (rtx x)
2253 {
2254 rtx addr;
2255
2256 if (GET_CODE (x) != MEM)
2257 return false;
2258 addr = XEXP (x, 0);
2259
2260 return (nios2_large_constant_p (addr)
2261 || nios2_plus_large_constant_p (addr));
2262 }
2263
2264
2265 /* Return true if X is something that needs to be split into a
2266 high/lo_sum pair. */
2267 bool
2268 nios2_large_constant_p (rtx x)
2269 {
2270 return (nios2_symbolic_constant_p (x)
2271 || nios2_large_unspec_reloc_p (x)
2272 || (CONST_INT_P (x) && !SMALL_INT (INTVAL (x))));
2273 }
2274
2275 /* Given an RTX X that satisfies nios2_large_constant_p, split it into
2276 high and lo_sum parts using TEMP as a scratch register. Emit the high
2277 instruction and return the lo_sum expression.
2278 Also handle special cases involving constant integers. */
2279 rtx
2280 nios2_split_large_constant (rtx x, rtx temp)
2281 {
2282 if (CONST_INT_P (x))
2283 {
2284 HOST_WIDE_INT val = INTVAL (x);
2285 if (SMALL_INT (val))
2286 return x;
2287 else if (SMALL_INT_UNSIGNED (val) || UPPER16_INT (val))
2288 {
2289 emit_move_insn (temp, x);
2290 return temp;
2291 }
2292 else
2293 {
2294 HOST_WIDE_INT high = (val + 0x8000) & ~0xffff;
2295 HOST_WIDE_INT low = val - high;
2296 emit_move_insn (temp, gen_int_mode (high, Pmode));
2297 return gen_rtx_PLUS (Pmode, temp, gen_int_mode (low, Pmode));
2298 }
2299 }
2300
2301 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (x))));
2302 return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (x));
2303 }
2304
2305 /* Split an RTX of the form
2306 (plus op0 op1)
2307 where op1 is a large constant into
2308 (set temp (high op1))
2309 (set temp (plus op0 temp))
2310 (lo_sum temp op1)
2311 returning the lo_sum expression as the value. */
2312 static rtx
2313 nios2_split_plus_large_constant (rtx op0, rtx op1)
2314 {
2315 rtx temp = gen_reg_rtx (Pmode);
2316 op0 = force_reg (Pmode, op0);
2317
2318 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (op1))));
2319 emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, op0, temp)));
2320 return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (op1));
2321 }
2322
2323 /* Given a MEM OP with an address that includes a splittable symbol or
2324 other large constant, emit some instructions to do the split and
2325 return a new MEM. */
2326 rtx
2327 nios2_split_large_constant_memory_operand (rtx op)
2328 {
2329 rtx addr = XEXP (op, 0);
2330
2331 if (nios2_large_constant_p (addr))
2332 addr = nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2333 else if (nios2_plus_large_constant_p (addr))
2334 addr = nios2_split_plus_large_constant (XEXP (addr, 0), XEXP (addr, 1));
2335 else
2336 gcc_unreachable ();
2337 return replace_equiv_address (op, addr, false);
2338 }
2339
2340 /* Return true if SECTION is a small section name. */
2341 static bool
2342 nios2_small_section_name_p (const char *section)
2343 {
2344 return (strcmp (section, ".sbss") == 0
2345 || strncmp (section, ".sbss.", 6) == 0
2346 || strcmp (section, ".sdata") == 0
2347 || strncmp (section, ".sdata.", 7) == 0
2348 || (nios2_gprel_sec
2349 && regexec (&nios2_gprel_sec_regex, section, 0, NULL, 0) == 0));
2350 }
2351
2352 /* Return true if SECTION is a r0-relative section name. */
2353 static bool
2354 nios2_r0rel_section_name_p (const char *section)
2355 {
2356 return (nios2_r0rel_sec
2357 && regexec (&nios2_r0rel_sec_regex, section, 0, NULL, 0) == 0);
2358 }
2359
2360 /* Return true if EXP should be placed in the small data section. */
2361 static bool
2362 nios2_in_small_data_p (const_tree exp)
2363 {
2364 /* We want to merge strings, so we never consider them small data. */
2365 if (TREE_CODE (exp) == STRING_CST)
2366 return false;
2367
2368 if (TREE_CODE (exp) == VAR_DECL)
2369 {
2370 if (DECL_SECTION_NAME (exp))
2371 {
2372 const char *section = DECL_SECTION_NAME (exp);
2373 if (nios2_small_section_name_p (section))
2374 return true;
2375 }
2376 else if (flexible_array_type_p (TREE_TYPE (exp))
2377 && (!TREE_PUBLIC (exp) || DECL_EXTERNAL (exp)))
2378 {
2379 /* We really should not consider any objects of any flexibly-sized
2380 type to be small data, but pre-GCC 10 did not test
2381 for this and just fell through to the next case. Thus older
2382 code compiled with -mgpopt=global could contain GP-relative
2383 accesses to objects defined in this compilation unit with
2384 external linkage. We retain the possible small-data treatment
2385 of such definitions for backward ABI compatibility, but
2386 no longer generate GP-relative accesses for external
2387 references (so that the ABI could be changed in the future
2388 with less potential impact), or objects with internal
2389 linkage. */
2390 return false;
2391 }
2392 else
2393 {
2394 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
2395
2396 /* If this is an incomplete type with size 0, then we can't put it
2397 in sdata because it might be too big when completed. */
2398 if (size > 0
2399 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
2400 return true;
2401 }
2402 }
2403
2404 return false;
2405 }
2406
2407 /* Return true if symbol is in small data section. */
2408
2409 static bool
2410 nios2_symbol_ref_in_small_data_p (rtx sym)
2411 {
2412 tree decl;
2413
2414 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2415 decl = SYMBOL_REF_DECL (sym);
2416
2417 /* TLS variables are not accessed through the GP. */
2418 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2419 return false;
2420
2421 /* On Nios II R2, there is no GP-relative relocation that can be
2422 used with "io" instructions. So, if we are implicitly generating
2423 those instructions, we cannot emit GP-relative accesses. */
2424 if (TARGET_ARCH_R2
2425 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2426 return false;
2427
2428 /* If the user has explicitly placed the symbol in a small data section
2429 via an attribute, generate gp-relative addressing even if the symbol
2430 is external, weak, or larger than we'd automatically put in the
2431 small data section. OTOH, if the symbol is located in some
2432 non-small-data section, we can't use gp-relative accesses on it
2433 unless the user has requested gpopt_data or gpopt_all. */
2434
2435 switch (nios2_gpopt_option)
2436 {
2437 case gpopt_none:
2438 /* Don't generate a gp-relative addressing mode if that's been
2439 disabled. */
2440 return false;
2441
2442 case gpopt_local:
2443 /* Use GP-relative addressing for small data symbols that are
2444 not external or weak or uninitialized common, plus any symbols
2445 that have explicitly been placed in a small data section. */
2446 if (decl && DECL_SECTION_NAME (decl))
2447 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
2448 return (SYMBOL_REF_SMALL_P (sym)
2449 && !SYMBOL_REF_EXTERNAL_P (sym)
2450 && !(decl && DECL_WEAK (decl))
2451 && !(decl && DECL_COMMON (decl)
2452 && (DECL_INITIAL (decl) == NULL
2453 || (!in_lto_p
2454 && DECL_INITIAL (decl) == error_mark_node))));
2455
2456 case gpopt_global:
2457 /* Use GP-relative addressing for small data symbols, even if
2458 they are external or weak. Note that SYMBOL_REF_SMALL_P
2459 is also true of symbols that have explicitly been placed
2460 in a small data section. */
2461 return SYMBOL_REF_SMALL_P (sym);
2462
2463 case gpopt_data:
2464 /* Use GP-relative addressing for all data symbols regardless
2465 of the object size, but not for code symbols. This option
2466 is equivalent to the user asserting that the entire data
2467 section is accessible from the GP. */
2468 return !SYMBOL_REF_FUNCTION_P (sym);
2469
2470 case gpopt_all:
2471 /* Use GP-relative addressing for everything, including code.
2472 Effectively, the user has asserted that the entire program
2473 fits within the 64K range of the GP offset. */
2474 return true;
2475
2476 default:
2477 /* We shouldn't get here. */
2478 return false;
2479 }
2480 }
2481
2482 /* Likewise for r0-relative addressing. */
2483 static bool
2484 nios2_symbol_ref_in_r0rel_data_p (rtx sym)
2485 {
2486 tree decl;
2487
2488 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2489 decl = SYMBOL_REF_DECL (sym);
2490
2491 /* TLS variables are not accessed through r0. */
2492 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2493 return false;
2494
2495 /* On Nios II R2, there is no r0-relative relocation that can be
2496 used with "io" instructions. So, if we are implicitly generating
2497 those instructions, we cannot emit r0-relative accesses. */
2498 if (TARGET_ARCH_R2
2499 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2500 return false;
2501
2502 /* If the user has explicitly placed the symbol in a r0rel section
2503 via an attribute, generate r0-relative addressing. */
2504 if (decl && DECL_SECTION_NAME (decl))
2505 return nios2_r0rel_section_name_p (DECL_SECTION_NAME (decl));
2506 return false;
2507 }
2508
2509 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2510
2511 static unsigned int
2512 nios2_section_type_flags (tree decl, const char *name, int reloc)
2513 {
2514 unsigned int flags;
2515
2516 flags = default_section_type_flags (decl, name, reloc);
2517
2518 if (nios2_small_section_name_p (name))
2519 flags |= SECTION_SMALL;
2520
2521 return flags;
2522 }
2523
2524 /* Return true if SYMBOL_REF X binds locally. */
2525
2526 static bool
2527 nios2_symbol_binds_local_p (const_rtx x)
2528 {
2529 return (SYMBOL_REF_DECL (x)
2530 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
2531 : SYMBOL_REF_LOCAL_P (x));
2532 }
2533
2534 /* Position independent code related. */
2535
2536 /* Emit code to load the PIC register. */
2537 static void
2538 nios2_load_pic_register (void)
2539 {
2540 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
2541
2542 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
2543 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
2544 }
2545
2546 /* Generate a PIC address as a MEM rtx. */
2547 static rtx
2548 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
2549 {
2550 if (flag_pic == 2
2551 && GET_CODE (sym) == SYMBOL_REF
2552 && nios2_symbol_binds_local_p (sym))
2553 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2554 {
2555 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
2556 crtl->uses_pic_offset_table = 1;
2557 return nios2_large_got_address (offset, tmp);
2558 }
2559
2560 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
2561 }
2562
2563 /* Nonzero if the constant value X is a legitimate general operand
2564 when generating PIC code. It is given that flag_pic is on and
2565 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2566 bool
2567 nios2_legitimate_pic_operand_p (rtx x)
2568 {
2569 if (nios2_large_unspec_reloc_p (x))
2570 return true;
2571
2572 return ! (GET_CODE (x) == SYMBOL_REF
2573 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
2574 }
2575
2576 /* Return TRUE if X is a thread-local symbol. */
2577 static bool
2578 nios2_tls_symbol_p (rtx x)
2579 {
2580 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
2581 && SYMBOL_REF_TLS_MODEL (x) != 0);
2582 }
2583
2584 /* Legitimize addresses that are CONSTANT_P expressions. */
2585 static rtx
2586 nios2_legitimize_constant_address (rtx addr)
2587 {
2588 rtx base, offset;
2589 split_const (addr, &base, &offset);
2590
2591 if (nios2_tls_symbol_p (base))
2592 base = nios2_legitimize_tls_address (base);
2593 else if (flag_pic)
2594 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
2595 else if (!nios2_large_constant_allowed ()
2596 && nios2_symbolic_constant_p (addr))
2597 return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2598 else if (CONST_INT_P (addr))
2599 {
2600 HOST_WIDE_INT val = INTVAL (addr);
2601 if (SMALL_INT (val))
2602 /* Use r0-relative addressing. */
2603 return addr;
2604 else if (!nios2_large_constant_allowed ())
2605 /* Split into high/lo pair. */
2606 return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
2607 }
2608 else
2609 return addr;
2610
2611 if (offset != const0_rtx)
2612 {
2613 gcc_assert (can_create_pseudo_p ());
2614 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
2615 (CONST_INT_P (offset)
2616 ? (SMALL_INT (INTVAL (offset))
2617 ? offset : force_reg (Pmode, offset))
2618 : offset));
2619 }
2620 return base;
2621 }
2622
2623 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2624 static rtx
2625 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2626 machine_mode mode ATTRIBUTE_UNUSED)
2627 {
2628 rtx op0, op1;
2629
2630 if (CONSTANT_P (x))
2631 return nios2_legitimize_constant_address (x);
2632
2633 /* Remaining cases all involve something + a constant. */
2634 if (GET_CODE (x) != PLUS)
2635 return x;
2636
2637 op0 = XEXP (x, 0);
2638 op1 = XEXP (x, 1);
2639
2640 /* Target-independent code turns (exp + constant) into plain
2641 register indirect. Although subsequent optimization passes will
2642 eventually sort that out, ivopts uses the unoptimized form for
2643 computing its cost model, so we get better results by generating
2644 the correct form from the start. */
2645 if (nios2_valid_addr_offset_p (op1))
2646 return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
2647
2648 /* We may need to split symbolic constants now. */
2649 else if (nios2_symbolic_constant_p (op1))
2650 {
2651 if (nios2_large_constant_allowed ())
2652 return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
2653 else
2654 return nios2_split_plus_large_constant (op0, op1);
2655 }
2656
2657 /* For the TLS LE (Local Exec) model, the compiler may try to
2658 combine constant offsets with unspec relocs, creating address RTXs
2659 looking like this:
2660 (plus:SI (reg:SI 23 r23)
2661 (const:SI
2662 (plus:SI
2663 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2664 (const_int 48 [0x30]))))
2665
2666 This usually happens when 'var' is a thread-local struct variable,
2667 and access of a field in var causes the addend.
2668
2669 We typically want this combining, so transform the above into this
2670 form, which is allowed:
2671 (plus:SI (reg:SI 23 r23)
2672 (const:SI
2673 (unspec:SI
2674 [(const:SI
2675 (plus:SI (symbol_ref:SI ("var"))
2676 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2677
2678 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2679 else if (GET_CODE (op1) == CONST)
2680 {
2681 rtx unspec, offset;
2682 split_const (op1, &unspec, &offset);
2683 if (GET_CODE (unspec) == UNSPEC
2684 && !nios2_large_offset_p (XINT (unspec, 1))
2685 && offset != const0_rtx)
2686 {
2687 rtx reg = force_reg (Pmode, op0);
2688 unspec = copy_rtx (unspec);
2689 XVECEXP (unspec, 0, 0)
2690 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
2691 return gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
2692 }
2693 }
2694
2695 return x;
2696 }
2697
2698 static rtx
2699 nios2_delegitimize_address (rtx x)
2700 {
2701 x = delegitimize_mem_from_attrs (x);
2702
2703 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
2704 {
2705 switch (XINT (XEXP (x, 0), 1))
2706 {
2707 case UNSPEC_PIC_SYM:
2708 case UNSPEC_PIC_CALL_SYM:
2709 case UNSPEC_PIC_GOTOFF_SYM:
2710 case UNSPEC_ADD_TLS_GD:
2711 case UNSPEC_ADD_TLS_LDM:
2712 case UNSPEC_LOAD_TLS_IE:
2713 case UNSPEC_ADD_TLS_LE:
2714 x = XVECEXP (XEXP (x, 0), 0, 0);
2715 gcc_assert (CONSTANT_P (x));
2716 break;
2717 }
2718 }
2719 return x;
2720 }
2721
2722 /* Main expander function for RTL moves. */
2723 bool
2724 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
2725 {
2726 rtx to = operands[0];
2727 rtx from = operands[1];
2728
2729 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
2730 {
2731 gcc_assert (can_create_pseudo_p ());
2732 from = copy_to_mode_reg (mode, from);
2733 }
2734
2735 if (CONSTANT_P (from))
2736 {
2737 if (CONST_INT_P (from))
2738 {
2739 if (!SMALL_INT (INTVAL (from))
2740 && !SMALL_INT_UNSIGNED (INTVAL (from))
2741 && !UPPER16_INT (INTVAL (from)))
2742 {
2743 HOST_WIDE_INT high = (INTVAL (from) + 0x8000) & ~0xffff;
2744 HOST_WIDE_INT low = INTVAL (from) & 0xffff;
2745 emit_move_insn (to, gen_int_mode (high, SImode));
2746 emit_insn (gen_add2_insn (to, gen_int_mode (low, HImode)));
2747 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2748 copy_rtx (from));
2749 return true;
2750 }
2751 }
2752 else if (gprel_constant_p (from) || r0rel_constant_p (from))
2753 /* Handled directly by movsi_internal as gp + offset
2754 or r0 + offset. */
2755 ;
2756 else if (nios2_large_constant_p (from))
2757 /* This case covers either a regular symbol reference or an UNSPEC
2758 representing a 32-bit offset. We split the former
2759 only conditionally and the latter always. */
2760 {
2761 if (!nios2_large_constant_allowed ()
2762 || nios2_large_unspec_reloc_p (from))
2763 {
2764 rtx lo = nios2_split_large_constant (from, to);
2765 emit_insn (gen_rtx_SET (to, lo));
2766 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2767 copy_rtx (operands[1]));
2768 return true;
2769 }
2770 }
2771 else
2772 /* This is a TLS or PIC symbol. */
2773 {
2774 from = nios2_legitimize_constant_address (from);
2775 if (CONSTANT_P (from))
2776 {
2777 emit_insn (gen_rtx_SET (to,
2778 gen_rtx_HIGH (Pmode, copy_rtx (from))));
2779 emit_insn (gen_rtx_SET (to, gen_rtx_LO_SUM (Pmode, to, from)));
2780 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2781 copy_rtx (operands[1]));
2782 return true;
2783 }
2784 }
2785 }
2786
2787 operands[0] = to;
2788 operands[1] = from;
2789 return false;
2790 }
2791
2792 /* The function with address *ADDR is being called. If the address
2793 needs to be loaded from the GOT, emit the instruction to do so and
2794 update *ADDR to point to the rtx for the loaded value.
2795 If REG != NULL_RTX, it is used as the target/scratch register in the
2796 GOT address calculation. */
2797 void
2798 nios2_adjust_call_address (rtx *call_op, rtx reg)
2799 {
2800 if (MEM_P (*call_op))
2801 call_op = &XEXP (*call_op, 0);
2802
2803 rtx addr = *call_op;
2804 if (flag_pic && CONSTANT_P (addr))
2805 {
2806 rtx tmp = reg ? reg : NULL_RTX;
2807 if (!reg)
2808 reg = gen_reg_rtx (Pmode);
2809 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
2810 emit_insn (gen_rtx_SET (reg, addr));
2811 *call_op = reg;
2812 }
2813 }
2814
2815 \f
2816 /* Output assembly language related definitions. */
2817
2818 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2819 static bool
2820 nios2_print_operand_punct_valid_p (unsigned char code)
2821 {
2822 return (code == '.' || code == '!');
2823 }
2824
2825
2826 /* Print the operand OP to file stream FILE modified by LETTER.
2827 LETTER can be one of:
2828
2829 i: print i/hi/ui suffixes (used for mov instruction variants),
2830 when OP is the appropriate immediate operand.
2831
2832 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2833
2834 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2835 or TARGET_BYPASS_CACHE_VOLATILE).
2836
2837 x: print i/hi/ci/chi suffixes for the and instruction,
2838 when OP is the appropriate immediate operand.
2839
2840 z: prints the third register immediate operand in assembly
2841 instructions. Outputs const0_rtx as the 'zero' register
2842 instead of '0'.
2843
2844 y: same as 'z', but for specifically for logical instructions,
2845 where the processing for immediates are slightly different.
2846
2847 H: for %hiadj
2848 L: for %lo
2849 D: for the upper 32-bits of a 64-bit double value
2850 R: prints reverse condition.
2851 A: prints (reg) operand for ld[s]ex and st[s]ex.
2852
2853 .: print .n suffix for 16-bit instructions.
2854 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2855 */
2856 static void
2857 nios2_print_operand (FILE *file, rtx op, int letter)
2858 {
2859
2860 /* First take care of the format letters that just insert a string
2861 into the output stream. */
2862 switch (letter)
2863 {
2864 case '.':
2865 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2866 fprintf (file, ".n");
2867 return;
2868
2869 case '!':
2870 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2871 fprintf (file, "r.n");
2872 return;
2873
2874 case 'x':
2875 if (CONST_INT_P (op))
2876 {
2877 HOST_WIDE_INT val = INTVAL (op);
2878 HOST_WIDE_INT low = val & 0xffff;
2879 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2880
2881 if (val != 0)
2882 {
2883 if (high != 0)
2884 {
2885 if (low != 0)
2886 {
2887 gcc_assert (TARGET_ARCH_R2);
2888 if (high == 0xffff)
2889 fprintf (file, "c");
2890 else if (low == 0xffff)
2891 fprintf (file, "ch");
2892 else
2893 gcc_unreachable ();
2894 }
2895 else
2896 fprintf (file, "h");
2897 }
2898 fprintf (file, "i");
2899 }
2900 }
2901 return;
2902
2903 case 'u':
2904 case 'i':
2905 if (CONST_INT_P (op))
2906 {
2907 HOST_WIDE_INT val = INTVAL (op);
2908 HOST_WIDE_INT low = val & 0xffff;
2909 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2910 if (val != 0)
2911 {
2912 if (low == 0 && high != 0)
2913 fprintf (file, "h");
2914 else if (high == 0 && (low & 0x8000) != 0 && letter != 'u')
2915 fprintf (file, "u");
2916 }
2917 }
2918 if (CONSTANT_P (op) && op != const0_rtx)
2919 fprintf (file, "i");
2920 return;
2921
2922 case 'o':
2923 if (GET_CODE (op) == MEM
2924 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2925 || TARGET_BYPASS_CACHE))
2926 {
2927 gcc_assert (current_output_insn
2928 && get_attr_length (current_output_insn) == 4);
2929 fprintf (file, "io");
2930 }
2931 return;
2932
2933 default:
2934 break;
2935 }
2936
2937 /* Handle comparison operator names. */
2938 if (comparison_operator (op, VOIDmode))
2939 {
2940 enum rtx_code cond = GET_CODE (op);
2941 if (letter == 0)
2942 {
2943 fprintf (file, "%s", GET_RTX_NAME (cond));
2944 return;
2945 }
2946 if (letter == 'R')
2947 {
2948 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2949 return;
2950 }
2951 }
2952
2953 /* Now handle the cases where we actually need to format an operand. */
2954 switch (GET_CODE (op))
2955 {
2956 case REG:
2957 if (letter == 0 || letter == 'z' || letter == 'y')
2958 {
2959 fprintf (file, "%s", reg_names[REGNO (op)]);
2960 return;
2961 }
2962 else if (letter == 'D')
2963 {
2964 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2965 return;
2966 }
2967 break;
2968
2969 case CONST_INT:
2970 {
2971 rtx int_rtx = op;
2972 HOST_WIDE_INT val = INTVAL (int_rtx);
2973 HOST_WIDE_INT low = val & 0xffff;
2974 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2975
2976 if (letter == 'y')
2977 {
2978 if (val == 0)
2979 fprintf (file, "zero");
2980 else
2981 {
2982 if (high != 0)
2983 {
2984 if (low != 0)
2985 {
2986 gcc_assert (TARGET_ARCH_R2);
2987 if (high == 0xffff)
2988 /* andci. */
2989 int_rtx = gen_int_mode (low, SImode);
2990 else if (low == 0xffff)
2991 /* andchi. */
2992 int_rtx = gen_int_mode (high, SImode);
2993 else
2994 gcc_unreachable ();
2995 }
2996 else
2997 /* andhi. */
2998 int_rtx = gen_int_mode (high, SImode);
2999 }
3000 else
3001 /* andi. */
3002 int_rtx = gen_int_mode (low, SImode);
3003 output_addr_const (file, int_rtx);
3004 }
3005 return;
3006 }
3007 else if (letter == 'z')
3008 {
3009 if (val == 0)
3010 fprintf (file, "zero");
3011 else
3012 {
3013 if (low == 0 && high != 0)
3014 int_rtx = gen_int_mode (high, SImode);
3015 else if (low != 0)
3016 {
3017 gcc_assert (high == 0 || high == 0xffff);
3018 int_rtx = gen_int_mode (low, high == 0 ? SImode : HImode);
3019 }
3020 else
3021 gcc_unreachable ();
3022 output_addr_const (file, int_rtx);
3023 }
3024 return;
3025 }
3026 }
3027
3028 /* Else, fall through. */
3029
3030 case CONST:
3031 case LABEL_REF:
3032 case SYMBOL_REF:
3033 case CONST_DOUBLE:
3034 if (letter == 0 || letter == 'z')
3035 {
3036 output_addr_const (file, op);
3037 return;
3038 }
3039 else if (letter == 'H' || letter == 'L')
3040 {
3041 fprintf (file, "%%");
3042 if (GET_CODE (op) == CONST
3043 && GET_CODE (XEXP (op, 0)) == UNSPEC)
3044 {
3045 rtx unspec = XEXP (op, 0);
3046 int unspec_reloc = XINT (unspec, 1);
3047 gcc_assert (nios2_large_offset_p (unspec_reloc));
3048 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
3049 op = XVECEXP (unspec, 0, 0);
3050 }
3051 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
3052 output_addr_const (file, op);
3053 fprintf (file, ")");
3054 return;
3055 }
3056 break;
3057
3058 case SUBREG:
3059 case MEM:
3060 if (letter == 'A')
3061 {
3062 /* Address of '(reg)' form, with no index. */
3063 fprintf (file, "(%s)", reg_names[REGNO (XEXP (op, 0))]);
3064 return;
3065 }
3066 if (letter == 0)
3067 {
3068 output_address (VOIDmode, op);
3069 return;
3070 }
3071 break;
3072
3073 case CODE_LABEL:
3074 if (letter == 0)
3075 {
3076 output_addr_const (file, op);
3077 return;
3078 }
3079 break;
3080
3081 default:
3082 break;
3083 }
3084
3085 debug_rtx (op);
3086 output_operand_lossage ("Unsupported operand for code '%c'", letter);
3087 gcc_unreachable ();
3088 }
3089
3090 /* Return true if this is a GP-relative accessible reference. */
3091 bool
3092 gprel_constant_p (rtx op)
3093 {
3094 if (GET_CODE (op) == SYMBOL_REF
3095 && nios2_symbol_ref_in_small_data_p (op))
3096 return true;
3097 else if (GET_CODE (op) == CONST
3098 && GET_CODE (XEXP (op, 0)) == PLUS)
3099 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
3100
3101 return false;
3102 }
3103
3104 /* Likewise if this is a zero-relative accessible reference. */
3105 bool
3106 r0rel_constant_p (rtx op)
3107 {
3108 if (GET_CODE (op) == SYMBOL_REF
3109 && nios2_symbol_ref_in_r0rel_data_p (op))
3110 return true;
3111 else if (GET_CODE (op) == CONST
3112 && GET_CODE (XEXP (op, 0)) == PLUS)
3113 return r0rel_constant_p (XEXP (XEXP (op, 0), 0));
3114 else if (GET_CODE (op) == CONST_INT
3115 && SMALL_INT (INTVAL (op)))
3116 return true;
3117
3118 return false;
3119 }
3120
3121 /* Return the name string for a supported unspec reloc offset. */
3122 static const char *
3123 nios2_unspec_reloc_name (int unspec)
3124 {
3125 switch (unspec)
3126 {
3127 case UNSPEC_PIC_SYM:
3128 return "got";
3129 case UNSPEC_PIC_CALL_SYM:
3130 return "call";
3131 case UNSPEC_PIC_GOTOFF_SYM:
3132 return "gotoff";
3133 case UNSPEC_LOAD_TLS_IE:
3134 return "tls_ie";
3135 case UNSPEC_ADD_TLS_LE:
3136 return "tls_le";
3137 case UNSPEC_ADD_TLS_GD:
3138 return "tls_gd";
3139 case UNSPEC_ADD_TLS_LDM:
3140 return "tls_ldm";
3141 case UNSPEC_ADD_TLS_LDO:
3142 return "tls_ldo";
3143 default:
3144 return NULL;
3145 }
3146 }
3147
3148 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
3149 static bool
3150 nios2_output_addr_const_extra (FILE *file, rtx op)
3151 {
3152 const char *name;
3153 gcc_assert (GET_CODE (op) == UNSPEC);
3154
3155 /* Support for printing out const unspec relocations. */
3156 name = nios2_unspec_reloc_name (XINT (op, 1));
3157 if (name)
3158 {
3159 fprintf (file, "%%%s(", name);
3160 output_addr_const (file, XVECEXP (op, 0, 0));
3161 fprintf (file, ")");
3162 return true;
3163 }
3164 return false;
3165 }
3166
3167 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
3168 static void
3169 nios2_print_operand_address (FILE *file, machine_mode mode, rtx op)
3170 {
3171 switch (GET_CODE (op))
3172 {
3173 case CONST:
3174 case CONST_INT:
3175 case LABEL_REF:
3176 case CONST_DOUBLE:
3177 case SYMBOL_REF:
3178 if (gprel_constant_p (op))
3179 {
3180 fprintf (file, "%%gprel(");
3181 output_addr_const (file, op);
3182 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
3183 return;
3184 }
3185 else if (r0rel_constant_p (op))
3186 {
3187 if (CONST_INT_P (op))
3188 {
3189 output_addr_const (file, op);
3190 fprintf (file, "(r0)");
3191 return;
3192 }
3193 else
3194 {
3195 fprintf (file, "%%lo(");
3196 output_addr_const (file, op);
3197 fprintf (file, ")(r0)");
3198 return;
3199 }
3200 }
3201 break;
3202
3203 case PLUS:
3204 {
3205 rtx op0 = XEXP (op, 0);
3206 rtx op1 = XEXP (op, 1);
3207
3208 if (REG_P (op0) && CONSTANT_P (op1))
3209 {
3210 output_addr_const (file, op1);
3211 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
3212 return;
3213 }
3214 else if (REG_P (op1) && CONSTANT_P (op0))
3215 {
3216 output_addr_const (file, op0);
3217 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
3218 return;
3219 }
3220 }
3221 break;
3222
3223 case LO_SUM:
3224 {
3225 rtx op0 = XEXP (op, 0);
3226 rtx op1 = XEXP (op, 1);
3227
3228 if (REG_P (op0) && CONSTANT_P (op1))
3229 {
3230 nios2_print_operand (file, op1, 'L');
3231 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
3232 return;
3233 }
3234 }
3235 break;
3236
3237 case REG:
3238 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
3239 return;
3240
3241 case MEM:
3242 {
3243 rtx base = XEXP (op, 0);
3244 nios2_print_operand_address (file, mode, base);
3245 return;
3246 }
3247 default:
3248 break;
3249 }
3250
3251 fprintf (stderr, "Missing way to print address\n");
3252 debug_rtx (op);
3253 gcc_unreachable ();
3254 }
3255
3256 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
3257 static void
3258 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
3259 {
3260 gcc_assert (size == 4);
3261 fprintf (file, "\t.4byte\t%%tls_ldo(");
3262 output_addr_const (file, x);
3263 fprintf (file, ")");
3264 }
3265
3266 /* Implemet TARGET_ASM_FILE_END. */
3267
3268 static void
3269 nios2_asm_file_end (void)
3270 {
3271 /* The Nios II Linux stack is mapped non-executable by default, so add a
3272 .note.GNU-stack section for switching to executable stacks only when
3273 trampolines are generated. */
3274 if (TARGET_LINUX_ABI && trampolines_created)
3275 file_end_indicate_exec_stack ();
3276 }
3277
3278 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
3279 static void
3280 nios2_asm_function_prologue (FILE *file)
3281 {
3282 if (flag_verbose_asm || flag_debug_asm)
3283 {
3284 nios2_compute_frame_layout ();
3285 nios2_dump_frame_layout (file);
3286 }
3287 }
3288
3289 /* Emit assembly of custom FPU instructions. */
3290 const char *
3291 nios2_fpu_insn_asm (enum n2fpu_code code)
3292 {
3293 static char buf[256];
3294 const char *op1, *op2, *op3;
3295 int ln = 256, n = 0;
3296
3297 int N = N2FPU_N (code);
3298 int num_operands = N2FPU (code).num_operands;
3299 const char *insn_name = N2FPU_NAME (code);
3300 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
3301 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
3302 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
3303
3304 /* Prepare X register for DF input operands. */
3305 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
3306 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
3307 N2FPU_N (n2fpu_fwrx));
3308
3309 if (src_mode == SFmode)
3310 {
3311 if (dst_mode == VOIDmode)
3312 {
3313 /* The fwry case. */
3314 op1 = op3 = "zero";
3315 op2 = "%0";
3316 num_operands -= 1;
3317 }
3318 else
3319 {
3320 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3321 op2 = "%1";
3322 op3 = (num_operands == 2 ? "zero" : "%2");
3323 }
3324 }
3325 else if (src_mode == DFmode)
3326 {
3327 if (dst_mode == VOIDmode)
3328 {
3329 /* The fwrx case. */
3330 op1 = "zero";
3331 op2 = "%0";
3332 op3 = "%D0";
3333 num_operands -= 1;
3334 }
3335 else
3336 {
3337 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3338 op2 = (num_operands == 2 ? "%1" : "%2");
3339 op3 = (num_operands == 2 ? "%D1" : "%D2");
3340 }
3341 }
3342 else if (src_mode == VOIDmode)
3343 {
3344 /* frdxlo, frdxhi, frdy cases. */
3345 gcc_assert (dst_mode == SFmode);
3346 op1 = "%0";
3347 op2 = op3 = "zero";
3348 }
3349 else if (src_mode == SImode)
3350 {
3351 /* Conversion operators. */
3352 gcc_assert (num_operands == 2);
3353 op1 = (dst_mode == DFmode ? "%D0" : "%0");
3354 op2 = "%1";
3355 op3 = "zero";
3356 }
3357 else
3358 gcc_unreachable ();
3359
3360 /* Main instruction string. */
3361 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
3362 N, op1, op2, op3, insn_name,
3363 (num_operands >= 2 ? ", %1" : ""),
3364 (num_operands == 3 ? ", %2" : ""));
3365
3366 /* Extraction of Y register for DF results. */
3367 if (dst_mode == DFmode)
3368 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
3369 N2FPU_N (n2fpu_frdy));
3370 return buf;
3371 }
3372
3373 \f
3374
3375 /* Function argument related. */
3376
3377 /* Define where to put the arguments to a function. Value is zero to
3378 push the argument on the stack, or a hard register in which to
3379 store the argument.
3380
3381 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3382 the preceding args and about the function being called.
3383 ARG is a description of the argument. */
3384
3385 static rtx
3386 nios2_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
3387 {
3388 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3389 rtx return_rtx = NULL_RTX;
3390
3391 if (cum->regs_used < NUM_ARG_REGS)
3392 return_rtx = gen_rtx_REG (arg.mode, FIRST_ARG_REGNO + cum->regs_used);
3393
3394 return return_rtx;
3395 }
3396
3397 /* Return number of bytes, at the beginning of the argument, that must be
3398 put in registers. 0 is the argument is entirely in registers or entirely
3399 in memory. */
3400
3401 static int
3402 nios2_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
3403 {
3404 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3405 HOST_WIDE_INT param_size = arg.promoted_size_in_bytes ();
3406 gcc_assert (param_size >= 0);
3407
3408 /* Convert to words (round up). */
3409 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
3410
3411 if (cum->regs_used < NUM_ARG_REGS
3412 && cum->regs_used + param_size > NUM_ARG_REGS)
3413 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
3414
3415 return 0;
3416 }
3417
3418 /* Update the data in CUM to advance over argument ARG. */
3419
3420 static void
3421 nios2_function_arg_advance (cumulative_args_t cum_v,
3422 const function_arg_info &arg)
3423 {
3424 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3425 HOST_WIDE_INT param_size = arg.promoted_size_in_bytes ();
3426 gcc_assert (param_size >= 0);
3427
3428 /* Convert to words (round up). */
3429 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
3430
3431 if (cum->regs_used + param_size > NUM_ARG_REGS)
3432 cum->regs_used = NUM_ARG_REGS;
3433 else
3434 cum->regs_used += param_size;
3435 }
3436
3437 static pad_direction
3438 nios2_function_arg_padding (machine_mode mode, const_tree type)
3439 {
3440 /* On little-endian targets, the first byte of every stack argument
3441 is passed in the first byte of the stack slot. */
3442 if (!BYTES_BIG_ENDIAN)
3443 return PAD_UPWARD;
3444
3445 /* Otherwise, integral types are padded downward: the last byte of a
3446 stack argument is passed in the last byte of the stack slot. */
3447 if (type != 0
3448 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
3449 : GET_MODE_CLASS (mode) == MODE_INT)
3450 return PAD_DOWNWARD;
3451
3452 /* Arguments smaller than a stack slot are padded downward. */
3453 if (mode != BLKmode)
3454 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY
3455 ? PAD_UPWARD : PAD_DOWNWARD);
3456
3457 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
3458 ? PAD_UPWARD : PAD_DOWNWARD);
3459 }
3460
3461 pad_direction
3462 nios2_block_reg_padding (machine_mode mode, tree type,
3463 int first ATTRIBUTE_UNUSED)
3464 {
3465 return nios2_function_arg_padding (mode, type);
3466 }
3467
3468 /* Emit RTL insns to initialize the variable parts of a trampoline.
3469 FNADDR is an RTX for the address of the function's pure code.
3470 CXT is an RTX for the static chain value for the function.
3471 On Nios II, we handle this by a library call. */
3472 static void
3473 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
3474 {
3475 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3476 rtx ctx_reg = force_reg (Pmode, cxt);
3477 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
3478
3479 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
3480 LCT_NORMAL, VOIDmode, addr, Pmode, fnaddr, Pmode,
3481 ctx_reg, Pmode);
3482 }
3483
3484 /* Implement TARGET_FUNCTION_VALUE. */
3485 static rtx
3486 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
3487 bool outgoing ATTRIBUTE_UNUSED)
3488 {
3489 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
3490 }
3491
3492 /* Implement TARGET_LIBCALL_VALUE. */
3493 static rtx
3494 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3495 {
3496 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
3497 }
3498
3499 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3500 static bool
3501 nios2_function_value_regno_p (const unsigned int regno)
3502 {
3503 return regno == FIRST_RETVAL_REGNO;
3504 }
3505
3506 /* Implement TARGET_RETURN_IN_MEMORY. */
3507 static bool
3508 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3509 {
3510 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3511 || int_size_in_bytes (type) == -1);
3512 }
3513
3514 /* TODO: It may be possible to eliminate the copyback and implement
3515 own va_arg type. */
3516 static void
3517 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
3518 const function_arg_info &arg,
3519 int *pretend_size, int second_time)
3520 {
3521 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3522 CUMULATIVE_ARGS local_cum;
3523 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
3524 int regs_to_push;
3525 int pret_size;
3526
3527 cfun->machine->uses_anonymous_args = 1;
3528 local_cum = *cum;
3529 nios2_function_arg_advance (local_cum_v, arg);
3530
3531 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
3532
3533 /* If we can use CDX stwm to push the arguments on the stack,
3534 nios2_expand_prologue will do that instead. */
3535 if (!TARGET_HAS_CDX && !second_time && regs_to_push > 0)
3536 {
3537 rtx ptr = virtual_incoming_args_rtx;
3538 rtx mem = gen_rtx_MEM (BLKmode, ptr);
3539 emit_insn (gen_blockage ());
3540 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
3541 regs_to_push);
3542 emit_insn (gen_blockage ());
3543 }
3544
3545 pret_size = regs_to_push * UNITS_PER_WORD;
3546 if (pret_size)
3547 *pretend_size = pret_size;
3548 }
3549
3550 \f
3551
3552 /* Init FPU builtins. */
3553 static void
3554 nios2_init_fpu_builtins (int start_code)
3555 {
3556 tree fndecl;
3557 char builtin_name[64] = "__builtin_custom_";
3558 unsigned int i, n = strlen ("__builtin_custom_");
3559
3560 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3561 {
3562 snprintf (builtin_name + n, sizeof (builtin_name) - n,
3563 "%s", N2FPU_NAME (i));
3564 fndecl =
3565 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
3566 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
3567 nios2_register_builtin_fndecl (start_code + i, fndecl);
3568 }
3569 }
3570
3571 /* Helper function for expanding FPU builtins. */
3572 static rtx
3573 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
3574 {
3575 struct expand_operand ops[MAX_RECOG_OPERANDS];
3576 enum insn_code icode = N2FPU_ICODE (code);
3577 int nargs, argno, opno = 0;
3578 int num_operands = N2FPU (code).num_operands;
3579 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
3580 bool has_target_p = (dst_mode != VOIDmode);
3581
3582 if (N2FPU_N (code) < 0)
3583 fatal_error (input_location,
3584 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3585 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
3586 if (has_target_p)
3587 create_output_operand (&ops[opno++], target, dst_mode);
3588 else
3589 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3590 num_operands -= 1;
3591 nargs = call_expr_nargs (exp);
3592 for (argno = 0; argno < nargs; argno++)
3593 {
3594 tree arg = CALL_EXPR_ARG (exp, argno);
3595 create_input_operand (&ops[opno++], expand_normal (arg),
3596 TYPE_MODE (TREE_TYPE (arg)));
3597 }
3598 if (!maybe_expand_insn (icode, num_operands, ops))
3599 {
3600 error ("invalid argument to built-in function");
3601 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3602 }
3603 return has_target_p ? ops[0].value : const0_rtx;
3604 }
3605
3606 /* Nios II has custom instruction built-in functions of the forms:
3607 __builtin_custom_n
3608 __builtin_custom_nX
3609 __builtin_custom_nXX
3610 __builtin_custom_Xn
3611 __builtin_custom_XnX
3612 __builtin_custom_XnXX
3613
3614 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3615 Therefore with 0-1 return values, and 0-2 arguments, we have a
3616 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3617 */
3618 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3619 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
3620
3621 static void
3622 nios2_init_custom_builtins (int start_code)
3623 {
3624 tree builtin_ftype, ret_type, fndecl;
3625 char builtin_name[32] = "__builtin_custom_";
3626 int n = strlen ("__builtin_custom_");
3627 int builtin_code = 0;
3628 int lhs, rhs1, rhs2;
3629
3630 struct { tree type; const char *c; } op[4];
3631 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
3632 /* f */ op[1].c = "f"; op[1].type = float_type_node;
3633 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
3634 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
3635
3636 /* We enumerate through the possible operand types to create all the
3637 __builtin_custom_XnXX function tree types. Note that these may slightly
3638 overlap with the function types created for other fixed builtins. */
3639
3640 for (lhs = 0; lhs < 4; lhs++)
3641 for (rhs1 = 0; rhs1 < 4; rhs1++)
3642 for (rhs2 = 0; rhs2 < 4; rhs2++)
3643 {
3644 if (rhs1 == 0 && rhs2 != 0)
3645 continue;
3646 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
3647 builtin_ftype
3648 = build_function_type_list (ret_type, integer_type_node,
3649 op[rhs1].type, op[rhs2].type,
3650 NULL_TREE);
3651 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
3652 op[lhs].c, op[rhs1].c, op[rhs2].c);
3653 /* Save copy of parameter string into custom_builtin_name[]. */
3654 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
3655 fndecl =
3656 add_builtin_function (builtin_name, builtin_ftype,
3657 start_code + builtin_code,
3658 BUILT_IN_MD, NULL, NULL_TREE);
3659 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
3660 builtin_code += 1;
3661 }
3662 }
3663
3664 /* Helper function for expanding custom builtins. */
3665 static rtx
3666 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
3667 {
3668 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
3669 machine_mode tmode = VOIDmode;
3670 int nargs, argno;
3671 rtx value, insn, unspec_args[3];
3672 tree arg;
3673
3674 /* XnXX form. */
3675 if (has_target_p)
3676 {
3677 tmode = TYPE_MODE (TREE_TYPE (exp));
3678 if (!target || GET_MODE (target) != tmode
3679 || !REG_P (target))
3680 target = gen_reg_rtx (tmode);
3681 }
3682
3683 nargs = call_expr_nargs (exp);
3684 for (argno = 0; argno < nargs; argno++)
3685 {
3686 arg = CALL_EXPR_ARG (exp, argno);
3687 value = expand_normal (arg);
3688 unspec_args[argno] = value;
3689 if (argno == 0)
3690 {
3691 if (!custom_insn_opcode (value, VOIDmode))
3692 error ("custom instruction opcode must be compile time "
3693 "constant in the range 0-255 for %<__builtin_custom_%s%>",
3694 custom_builtin_name[index]);
3695 }
3696 else
3697 /* For other arguments, force into a register. */
3698 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
3699 unspec_args[argno]);
3700 }
3701 /* Fill remaining unspec operands with zero. */
3702 for (; argno < 3; argno++)
3703 unspec_args[argno] = const0_rtx;
3704
3705 insn = (has_target_p
3706 ? gen_rtx_SET (target,
3707 gen_rtx_UNSPEC_VOLATILE (tmode,
3708 gen_rtvec_v (3, unspec_args),
3709 UNSPECV_CUSTOM_XNXX))
3710 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
3711 UNSPECV_CUSTOM_NXX));
3712 emit_insn (insn);
3713 return has_target_p ? target : const0_rtx;
3714 }
3715
3716
3717 \f
3718
3719 /* Main definition of built-in functions. Nios II has a small number of fixed
3720 builtins, plus a large number of FPU insn builtins, and builtins for
3721 generating custom instructions. */
3722
3723 struct nios2_builtin_desc
3724 {
3725 enum insn_code icode;
3726 enum nios2_arch_type arch;
3727 enum nios2_ftcode ftype;
3728 const char *name;
3729 };
3730
3731 #define N2_BUILTINS \
3732 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3733 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3734 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3735 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3736 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3737 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3738 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3739 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3740 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3741 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3742 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3743 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3744 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3745 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3746 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3747 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3748 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3749 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3750 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3751 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3752
3753 enum nios2_builtin_code {
3754 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3755 N2_BUILTINS
3756 #undef N2_BUILTIN_DEF
3757 NUM_FIXED_NIOS2_BUILTINS
3758 };
3759
3760 static const struct nios2_builtin_desc nios2_builtins[] = {
3761 #define N2_BUILTIN_DEF(name, arch, ftype) \
3762 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3763 N2_BUILTINS
3764 #undef N2_BUILTIN_DEF
3765 };
3766
3767 /* Start/ends of FPU/custom insn builtin index ranges. */
3768 static unsigned int nios2_fpu_builtin_base;
3769 static unsigned int nios2_custom_builtin_base;
3770 static unsigned int nios2_custom_builtin_end;
3771
3772 /* Implement TARGET_INIT_BUILTINS. */
3773 static void
3774 nios2_init_builtins (void)
3775 {
3776 unsigned int i;
3777
3778 /* Initialize fixed builtins. */
3779 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
3780 {
3781 const struct nios2_builtin_desc *d = &nios2_builtins[i];
3782 tree fndecl =
3783 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
3784 BUILT_IN_MD, NULL, NULL);
3785 nios2_register_builtin_fndecl (i, fndecl);
3786 }
3787
3788 /* Initialize FPU builtins. */
3789 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
3790 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
3791
3792 /* Initialize custom insn builtins. */
3793 nios2_custom_builtin_base
3794 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
3795 nios2_custom_builtin_end
3796 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
3797 nios2_init_custom_builtins (nios2_custom_builtin_base);
3798 }
3799
3800 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3801 #define NIOS2_NUM_BUILTINS \
3802 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3803 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
3804
3805 static void
3806 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
3807 {
3808 nios2_builtin_decls[code] = fndecl;
3809 }
3810
3811 /* Implement TARGET_BUILTIN_DECL. */
3812 static tree
3813 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3814 {
3815 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
3816
3817 if (code >= nios2_custom_builtin_end)
3818 return error_mark_node;
3819
3820 if (code >= nios2_fpu_builtin_base
3821 && code < nios2_custom_builtin_base
3822 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
3823 return error_mark_node;
3824
3825 return nios2_builtin_decls[code];
3826 }
3827
3828 \f
3829 /* Low-level built-in expand routine. */
3830 static rtx
3831 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
3832 struct expand_operand *ops, bool has_target_p)
3833 {
3834 if (maybe_expand_insn (d->icode, n, ops))
3835 return has_target_p ? ops[0].value : const0_rtx;
3836 else
3837 {
3838 error ("invalid argument to built-in function %s", d->name);
3839 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3840 }
3841 }
3842
3843 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3844 instruction builtins. */
3845 static rtx
3846 nios2_expand_ldst_builtin (tree exp, rtx target,
3847 const struct nios2_builtin_desc *d)
3848 {
3849 bool has_target_p;
3850 rtx addr, mem, val;
3851 struct expand_operand ops[MAX_RECOG_OPERANDS];
3852 machine_mode mode = insn_data[d->icode].operand[0].mode;
3853
3854 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3855 mem = gen_rtx_MEM (mode, addr);
3856
3857 if (insn_data[d->icode].operand[0].allows_mem)
3858 {
3859 /* stxio/stex/stsex. */
3860 val = expand_normal (CALL_EXPR_ARG (exp, 1));
3861 if (CONST_INT_P (val))
3862 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
3863 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
3864 create_output_operand (&ops[0], mem, mode);
3865 create_input_operand (&ops[1], val, mode);
3866 if (insn_data[d->icode].n_operands == 3)
3867 {
3868 /* stex/stsex status value, returned as result of function. */
3869 create_output_operand (&ops[2], target, mode);
3870 has_target_p = true;
3871 }
3872 else
3873 has_target_p = false;
3874 }
3875 else
3876 {
3877 /* ldxio. */
3878 create_output_operand (&ops[0], target, mode);
3879 create_input_operand (&ops[1], mem, mode);
3880 has_target_p = true;
3881 }
3882 return nios2_expand_builtin_insn (d, insn_data[d->icode].n_operands, ops,
3883 has_target_p);
3884 }
3885
3886 /* Expand rdctl/wrctl builtins. */
3887 static rtx
3888 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
3889 const struct nios2_builtin_desc *d)
3890 {
3891 bool has_target_p = (insn_data[d->icode].operand[0].predicate
3892 == register_operand);
3893 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
3894 struct expand_operand ops[MAX_RECOG_OPERANDS];
3895 if (!rdwrctl_operand (ctlcode, VOIDmode))
3896 {
3897 error ("Control register number must be in range 0-31 for %s",
3898 d->name);
3899 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
3900 }
3901 if (has_target_p)
3902 {
3903 create_output_operand (&ops[0], target, SImode);
3904 create_integer_operand (&ops[1], INTVAL (ctlcode));
3905 }
3906 else
3907 {
3908 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
3909 create_integer_operand (&ops[0], INTVAL (ctlcode));
3910 create_input_operand (&ops[1], val, SImode);
3911 }
3912 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3913 }
3914
3915 static rtx
3916 nios2_expand_rdprs_builtin (tree exp, rtx target,
3917 const struct nios2_builtin_desc *d)
3918 {
3919 rtx reg = expand_normal (CALL_EXPR_ARG (exp, 0));
3920 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 1));
3921 struct expand_operand ops[MAX_RECOG_OPERANDS];
3922
3923 if (!rdwrctl_operand (reg, VOIDmode))
3924 {
3925 error ("Register number must be in range 0-31 for %s",
3926 d->name);
3927 return gen_reg_rtx (SImode);
3928 }
3929
3930 if (!rdprs_dcache_operand (imm, VOIDmode))
3931 {
3932 error ("The immediate value must fit into a %d-bit integer for %s",
3933 (TARGET_ARCH_R2) ? 12 : 16, d->name);
3934 return gen_reg_rtx (SImode);
3935 }
3936
3937 create_output_operand (&ops[0], target, SImode);
3938 create_input_operand (&ops[1], reg, SImode);
3939 create_integer_operand (&ops[2], INTVAL (imm));
3940
3941 return nios2_expand_builtin_insn (d, 3, ops, true);
3942 }
3943
3944 static rtx
3945 nios2_expand_cache_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3946 const struct nios2_builtin_desc *d)
3947 {
3948 rtx mem, addr;
3949 struct expand_operand ops[MAX_RECOG_OPERANDS];
3950
3951 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3952 mem = gen_rtx_MEM (SImode, addr);
3953
3954 create_input_operand (&ops[0], mem, SImode);
3955
3956 return nios2_expand_builtin_insn (d, 1, ops, false);
3957 }
3958
3959 static rtx
3960 nios2_expand_wrpie_builtin (tree exp, rtx target,
3961 const struct nios2_builtin_desc *d)
3962 {
3963 rtx val;
3964 struct expand_operand ops[MAX_RECOG_OPERANDS];
3965
3966 val = expand_normal (CALL_EXPR_ARG (exp, 0));
3967 create_input_operand (&ops[1], val, SImode);
3968 create_output_operand (&ops[0], target, SImode);
3969
3970 return nios2_expand_builtin_insn (d, 2, ops, true);
3971 }
3972
3973 static rtx
3974 nios2_expand_eni_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3975 const struct nios2_builtin_desc *d)
3976 {
3977 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3978 struct expand_operand ops[MAX_RECOG_OPERANDS];
3979
3980 if (INTVAL (imm) != 0 && INTVAL (imm) != 1)
3981 {
3982 error ("The ENI instruction operand must be either 0 or 1");
3983 return const0_rtx;
3984 }
3985 create_integer_operand (&ops[0], INTVAL (imm));
3986
3987 return nios2_expand_builtin_insn (d, 1, ops, false);
3988 }
3989
3990 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3991 a built-in function, with result going to TARGET if that's convenient
3992 (and in mode MODE if that's convenient).
3993 SUBTARGET may be used as the target for computing one of EXP's operands.
3994 IGNORE is nonzero if the value is to be ignored. */
3995
3996 static rtx
3997 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
3998 machine_mode mode ATTRIBUTE_UNUSED,
3999 int ignore ATTRIBUTE_UNUSED)
4000 {
4001 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
4002 unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
4003
4004 if (fcode < nios2_fpu_builtin_base)
4005 {
4006 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
4007
4008 if (d->arch > nios2_arch_option)
4009 {
4010 error ("Builtin function %s requires Nios II R%d",
4011 d->name, (int) d->arch);
4012 /* Given it is invalid, just generate a normal call. */
4013 return expand_call (exp, target, ignore);
4014 }
4015
4016 switch (fcode)
4017 {
4018 case NIOS2_BUILTIN_sync:
4019 emit_insn (gen_sync ());
4020 return const0_rtx;
4021
4022 case NIOS2_BUILTIN_ldbio:
4023 case NIOS2_BUILTIN_ldbuio:
4024 case NIOS2_BUILTIN_ldhio:
4025 case NIOS2_BUILTIN_ldhuio:
4026 case NIOS2_BUILTIN_ldwio:
4027 case NIOS2_BUILTIN_stbio:
4028 case NIOS2_BUILTIN_sthio:
4029 case NIOS2_BUILTIN_stwio:
4030 case NIOS2_BUILTIN_ldex:
4031 case NIOS2_BUILTIN_ldsex:
4032 case NIOS2_BUILTIN_stex:
4033 case NIOS2_BUILTIN_stsex:
4034 return nios2_expand_ldst_builtin (exp, target, d);
4035
4036 case NIOS2_BUILTIN_rdctl:
4037 case NIOS2_BUILTIN_wrctl:
4038 return nios2_expand_rdwrctl_builtin (exp, target, d);
4039
4040 case NIOS2_BUILTIN_rdprs:
4041 return nios2_expand_rdprs_builtin (exp, target, d);
4042
4043 case NIOS2_BUILTIN_flushd:
4044 case NIOS2_BUILTIN_flushda:
4045 return nios2_expand_cache_builtin (exp, target, d);
4046
4047 case NIOS2_BUILTIN_wrpie:
4048 return nios2_expand_wrpie_builtin (exp, target, d);
4049
4050 case NIOS2_BUILTIN_eni:
4051 return nios2_expand_eni_builtin (exp, target, d);
4052
4053 default:
4054 gcc_unreachable ();
4055 }
4056 }
4057 else if (fcode < nios2_custom_builtin_base)
4058 /* FPU builtin range. */
4059 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
4060 target);
4061 else if (fcode < nios2_custom_builtin_end)
4062 /* Custom insn builtin range. */
4063 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
4064 target);
4065 else
4066 gcc_unreachable ();
4067 }
4068
4069 /* Implement TARGET_INIT_LIBFUNCS. */
4070 static void ATTRIBUTE_UNUSED
4071 nios2_init_libfuncs (void)
4072 {
4073 init_sync_libfuncs (UNITS_PER_WORD);
4074 }
4075
4076 \f
4077
4078 /* Register a custom code use, and signal error if a conflict was found. */
4079 static void
4080 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
4081 int index)
4082 {
4083 gcc_assert (N <= 255);
4084
4085 if (status == CCS_FPU)
4086 {
4087 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
4088 {
4089 custom_code_conflict = true;
4090 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
4091 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
4092 }
4093 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
4094 {
4095 custom_code_conflict = true;
4096 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4097 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
4098 N2FPU_NAME (index));
4099 }
4100 }
4101 else if (status == CCS_BUILTIN_CALL)
4102 {
4103 if (custom_code_status[N] == CCS_FPU)
4104 {
4105 custom_code_conflict = true;
4106 error ("call to %<__builtin_custom_%s%> conflicts with switch "
4107 "%<-mcustom-%s%>", custom_builtin_name[index],
4108 N2FPU_NAME (custom_code_index[N]));
4109 }
4110 else
4111 {
4112 /* Note that code conflicts between different __builtin_custom_xnxx
4113 calls are not checked. */
4114 }
4115 }
4116 else
4117 gcc_unreachable ();
4118
4119 custom_code_status[N] = status;
4120 custom_code_index[N] = index;
4121 }
4122
4123 /* Mark a custom code as not in use. */
4124 static void
4125 nios2_deregister_custom_code (unsigned int N)
4126 {
4127 if (N <= 255)
4128 {
4129 custom_code_status[N] = CCS_UNUSED;
4130 custom_code_index[N] = 0;
4131 }
4132 }
4133
4134 /* Target attributes can affect per-function option state, so we need to
4135 save/restore the custom code tracking info using the
4136 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
4137
4138 static void
4139 nios2_option_save (struct cl_target_option *ptr,
4140 struct gcc_options *opts ATTRIBUTE_UNUSED,
4141 struct gcc_options *opts_set ATTRIBUTE_UNUSED)
4142 {
4143 unsigned int i;
4144 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4145 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
4146 memcpy (ptr->saved_custom_code_status, custom_code_status,
4147 sizeof (custom_code_status));
4148 memcpy (ptr->saved_custom_code_index, custom_code_index,
4149 sizeof (custom_code_index));
4150 }
4151
4152 static void
4153 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
4154 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
4155 struct cl_target_option *ptr)
4156 {
4157 unsigned int i;
4158 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4159 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
4160 memcpy (custom_code_status, ptr->saved_custom_code_status,
4161 sizeof (custom_code_status));
4162 memcpy (custom_code_index, ptr->saved_custom_code_index,
4163 sizeof (custom_code_index));
4164 }
4165
4166 /* Inner function to process the attribute((target(...))), take an argument and
4167 set the current options from the argument. If we have a list, recursively
4168 go over the list. */
4169
4170 static bool
4171 nios2_valid_target_attribute_rec (tree args)
4172 {
4173 if (TREE_CODE (args) == TREE_LIST)
4174 {
4175 bool ret = true;
4176 for (; args; args = TREE_CHAIN (args))
4177 if (TREE_VALUE (args)
4178 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
4179 ret = false;
4180 return ret;
4181 }
4182 else if (TREE_CODE (args) == STRING_CST)
4183 {
4184 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
4185 while (argstr && *argstr != '\0')
4186 {
4187 bool no_opt = false, end_p = false;
4188 char *eq = NULL, *p;
4189 while (ISSPACE (*argstr))
4190 argstr++;
4191 p = argstr;
4192 while (*p != '\0' && *p != ',')
4193 {
4194 if (!eq && *p == '=')
4195 eq = p;
4196 ++p;
4197 }
4198 if (*p == '\0')
4199 end_p = true;
4200 else
4201 *p = '\0';
4202 if (eq) *eq = '\0';
4203
4204 if (!strncmp (argstr, "no-", 3))
4205 {
4206 no_opt = true;
4207 argstr += 3;
4208 }
4209 if (!strncmp (argstr, "custom-fpu-cfg", 14))
4210 {
4211 char *end_eq = p;
4212 if (no_opt)
4213 {
4214 error ("custom-fpu-cfg option does not support %<no-%>");
4215 return false;
4216 }
4217 if (!eq)
4218 {
4219 error ("custom-fpu-cfg option requires configuration"
4220 " argument");
4221 return false;
4222 }
4223 /* Increment and skip whitespace. */
4224 while (ISSPACE (*(++eq))) ;
4225 /* Decrement and skip to before any trailing whitespace. */
4226 while (ISSPACE (*(--end_eq))) ;
4227
4228 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
4229 }
4230 else if (!strncmp (argstr, "custom-", 7))
4231 {
4232 int code = -1;
4233 unsigned int i;
4234 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
4235 if (!strncmp (argstr + 7, N2FPU_NAME (i),
4236 strlen (N2FPU_NAME (i))))
4237 {
4238 /* Found insn. */
4239 code = i;
4240 break;
4241 }
4242 if (code >= 0)
4243 {
4244 if (no_opt)
4245 {
4246 if (eq)
4247 {
4248 error ("%<no-custom-%s%> does not accept arguments",
4249 N2FPU_NAME (code));
4250 return false;
4251 }
4252 /* Disable option by setting to -1. */
4253 nios2_deregister_custom_code (N2FPU_N (code));
4254 N2FPU_N (code) = -1;
4255 }
4256 else
4257 {
4258 char *t;
4259 if (eq)
4260 while (ISSPACE (*(++eq))) ;
4261 if (!eq || eq == p)
4262 {
4263 error ("%<custom-%s=%> requires argument",
4264 N2FPU_NAME (code));
4265 return false;
4266 }
4267 for (t = eq; t != p; ++t)
4268 {
4269 if (ISSPACE (*t))
4270 continue;
4271 if (!ISDIGIT (*t))
4272 {
4273 error ("%<custom-%s=%> argument should be "
4274 "a non-negative integer", N2FPU_NAME (code));
4275 return false;
4276 }
4277 }
4278 /* Set option to argument. */
4279 N2FPU_N (code) = atoi (eq);
4280 nios2_handle_custom_fpu_insn_option (code);
4281 }
4282 }
4283 else
4284 {
4285 error ("%<custom-%s=%> is not recognized as FPU instruction",
4286 argstr + 7);
4287 return false;
4288 }
4289 }
4290 else
4291 {
4292 error ("%<%s%> is unknown", argstr);
4293 return false;
4294 }
4295
4296 if (end_p)
4297 break;
4298 else
4299 argstr = p + 1;
4300 }
4301 return true;
4302 }
4303 else
4304 gcc_unreachable ();
4305 }
4306
4307 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
4308
4309 static tree
4310 nios2_valid_target_attribute_tree (tree args)
4311 {
4312 if (!nios2_valid_target_attribute_rec (args))
4313 return NULL_TREE;
4314 nios2_custom_check_insns ();
4315 return build_target_option_node (&global_options, &global_options_set);
4316 }
4317
4318 /* Hook to validate attribute((target("string"))). */
4319
4320 static bool
4321 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
4322 tree args, int ARG_UNUSED (flags))
4323 {
4324 struct cl_target_option cur_target;
4325 bool ret = true;
4326 tree old_optimize
4327 = build_optimization_node (&global_options, &global_options_set);
4328 tree new_target, new_optimize;
4329 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
4330
4331 /* If the function changed the optimization levels as well as setting target
4332 options, start with the optimizations specified. */
4333 if (func_optimize && func_optimize != old_optimize)
4334 cl_optimization_restore (&global_options, &global_options_set,
4335 TREE_OPTIMIZATION (func_optimize));
4336
4337 /* The target attributes may also change some optimization flags, so update
4338 the optimization options if necessary. */
4339 cl_target_option_save (&cur_target, &global_options, &global_options_set);
4340 new_target = nios2_valid_target_attribute_tree (args);
4341 new_optimize = build_optimization_node (&global_options, &global_options_set);
4342
4343 if (!new_target)
4344 ret = false;
4345
4346 else if (fndecl)
4347 {
4348 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
4349
4350 if (old_optimize != new_optimize)
4351 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
4352 }
4353
4354 cl_target_option_restore (&global_options, &global_options_set, &cur_target);
4355
4356 if (old_optimize != new_optimize)
4357 cl_optimization_restore (&global_options, &global_options_set,
4358 TREE_OPTIMIZATION (old_optimize));
4359 return ret;
4360 }
4361
4362 /* Remember the last target of nios2_set_current_function. */
4363 static GTY(()) tree nios2_previous_fndecl;
4364
4365 /* Establish appropriate back-end context for processing the function
4366 FNDECL. The argument might be NULL to indicate processing at top
4367 level, outside of any function scope. */
4368 static void
4369 nios2_set_current_function (tree fndecl)
4370 {
4371 tree old_tree = (nios2_previous_fndecl
4372 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
4373 : NULL_TREE);
4374
4375 tree new_tree = (fndecl
4376 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
4377 : NULL_TREE);
4378
4379 if (fndecl && fndecl != nios2_previous_fndecl)
4380 {
4381 nios2_previous_fndecl = fndecl;
4382 if (old_tree == new_tree)
4383 ;
4384
4385 else if (new_tree)
4386 {
4387 cl_target_option_restore (&global_options, &global_options_set,
4388 TREE_TARGET_OPTION (new_tree));
4389 target_reinit ();
4390 }
4391
4392 else if (old_tree)
4393 {
4394 struct cl_target_option *def
4395 = TREE_TARGET_OPTION (target_option_current_node);
4396
4397 cl_target_option_restore (&global_options, &global_options_set, def);
4398 target_reinit ();
4399 }
4400 }
4401 }
4402
4403 /* Hook to validate the current #pragma GCC target and set the FPU custom
4404 code option state. If ARGS is NULL, then POP_TARGET is used to reset
4405 the options. */
4406 static bool
4407 nios2_pragma_target_parse (tree args, tree pop_target)
4408 {
4409 tree cur_tree;
4410 if (! args)
4411 {
4412 cur_tree = ((pop_target)
4413 ? pop_target
4414 : target_option_default_node);
4415 cl_target_option_restore (&global_options, &global_options_set,
4416 TREE_TARGET_OPTION (cur_tree));
4417 }
4418 else
4419 {
4420 cur_tree = nios2_valid_target_attribute_tree (args);
4421 if (!cur_tree)
4422 return false;
4423 }
4424
4425 target_option_current_node = cur_tree;
4426 return true;
4427 }
4428
4429 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
4430 We are just using this hook to add some additional error checking to
4431 the default behavior. GCC does not provide a target hook for merging
4432 the target options, and only correctly handles merging empty vs non-empty
4433 option data; see merge_decls() in c-decl.c.
4434 So here we require either that at least one of the decls has empty
4435 target options, or that the target options/data be identical. */
4436 static tree
4437 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
4438 {
4439 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
4440 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
4441 if (newopts && oldopts && newopts != oldopts)
4442 {
4443 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
4444 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
4445 if (oldtree && newtree && oldtree != newtree)
4446 {
4447 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
4448 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
4449 if (olddata != newdata
4450 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
4451 error ("%qE redeclared with conflicting %qs attributes",
4452 DECL_NAME (newdecl), "target");
4453 }
4454 }
4455 return merge_attributes (DECL_ATTRIBUTES (olddecl),
4456 DECL_ATTRIBUTES (newdecl));
4457 }
4458
4459 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4460 static void
4461 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4462 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4463 tree function)
4464 {
4465 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
4466 rtx this_rtx, funexp;
4467 rtx_insn *insn;
4468
4469 /* Pretend to be a post-reload pass while generating rtl. */
4470 reload_completed = 1;
4471
4472 if (flag_pic)
4473 nios2_load_pic_register ();
4474
4475 /* Mark the end of the (empty) prologue. */
4476 emit_note (NOTE_INSN_PROLOGUE_END);
4477
4478 /* Find the "this" pointer. If the function returns a structure,
4479 the structure return pointer is in $5. */
4480 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4481 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
4482 else
4483 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
4484
4485 /* Add DELTA to THIS_RTX. */
4486 nios2_emit_add_constant (this_rtx, delta);
4487
4488 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4489 if (vcall_offset)
4490 {
4491 rtx tmp;
4492
4493 tmp = gen_rtx_REG (Pmode, 2);
4494 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4495 nios2_emit_add_constant (tmp, vcall_offset);
4496 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4497 emit_insn (gen_add2_insn (this_rtx, tmp));
4498 }
4499
4500 /* Generate a tail call to the target function. */
4501 if (!TREE_USED (function))
4502 {
4503 assemble_external (function);
4504 TREE_USED (function) = 1;
4505 }
4506 funexp = XEXP (DECL_RTL (function), 0);
4507 /* Function address needs to be constructed under PIC,
4508 provide r2 to use here. */
4509 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
4510 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
4511 SIBLING_CALL_P (insn) = 1;
4512
4513 /* Run just enough of rest_of_compilation to get the insns emitted.
4514 There's not really enough bulk here to make other passes such as
4515 instruction scheduling worth while. */
4516 insn = get_insns ();
4517 shorten_branches (insn);
4518 assemble_start_function (thunk_fndecl, fnname);
4519 final_start_function (insn, file, 1);
4520 final (insn, file, 1);
4521 final_end_function ();
4522 assemble_end_function (thunk_fndecl, fnname);
4523
4524 /* Stop pretending to be a post-reload pass. */
4525 reload_completed = 0;
4526 }
4527
4528
4529 /* Utility function to break a memory address into
4530 base register + constant offset. Return false if something
4531 unexpected is seen. */
4532 static bool
4533 split_mem_address (rtx addr, rtx *base_reg, rtx *offset)
4534 {
4535 if (REG_P (addr))
4536 {
4537 *base_reg = addr;
4538 *offset = const0_rtx;
4539 return true;
4540 }
4541 else if (GET_CODE (addr) == PLUS)
4542 {
4543 *base_reg = XEXP (addr, 0);
4544 *offset = XEXP (addr, 1);
4545 return true;
4546 }
4547 return false;
4548 }
4549
4550 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4551 static void
4552 split_alu_insn (rtx_insn *insn, rtx *lhs, rtx *rhs1, rtx *rhs2)
4553 {
4554 rtx pat = PATTERN (insn);
4555 gcc_assert (GET_CODE (pat) == SET);
4556 *lhs = SET_DEST (pat);
4557 *rhs1 = XEXP (SET_SRC (pat), 0);
4558 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat))) != RTX_UNARY)
4559 *rhs2 = XEXP (SET_SRC (pat), 1);
4560 return;
4561 }
4562
4563 /* Returns true if OP is a REG and assigned a CDX reg. */
4564 static bool
4565 cdxreg (rtx op)
4566 {
4567 return REG_P (op) && (!reload_completed || CDX_REG_P (REGNO (op)));
4568 }
4569
4570 /* Returns true if OP is within range of CDX addi.n immediates. */
4571 static bool
4572 cdx_add_immed (rtx op)
4573 {
4574 if (CONST_INT_P (op))
4575 {
4576 HOST_WIDE_INT ival = INTVAL (op);
4577 return ival <= 128 && ival > 0 && (ival & (ival - 1)) == 0;
4578 }
4579 return false;
4580 }
4581
4582 /* Returns true if OP is within range of CDX andi.n immediates. */
4583 static bool
4584 cdx_and_immed (rtx op)
4585 {
4586 if (CONST_INT_P (op))
4587 {
4588 HOST_WIDE_INT ival = INTVAL (op);
4589 return (ival == 1 || ival == 2 || ival == 3 || ival == 4
4590 || ival == 8 || ival == 0xf || ival == 0x10
4591 || ival == 0x1f || ival == 0x20
4592 || ival == 0x3f || ival == 0x7f
4593 || ival == 0x80 || ival == 0xff || ival == 0x7ff
4594 || ival == 0xff00 || ival == 0xffff);
4595 }
4596 return false;
4597 }
4598
4599 /* Returns true if OP is within range of CDX movi.n immediates. */
4600 static bool
4601 cdx_mov_immed (rtx op)
4602 {
4603 if (CONST_INT_P (op))
4604 {
4605 HOST_WIDE_INT ival = INTVAL (op);
4606 return ((ival >= 0 && ival <= 124)
4607 || ival == 0xff || ival == -2 || ival == -1);
4608 }
4609 return false;
4610 }
4611
4612 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4613 static bool
4614 cdx_shift_immed (rtx op)
4615 {
4616 if (CONST_INT_P (op))
4617 {
4618 HOST_WIDE_INT ival = INTVAL (op);
4619 return (ival == 1 || ival == 2 || ival == 3 || ival == 8
4620 || ival == 12 || ival == 16 || ival == 24
4621 || ival == 31);
4622 }
4623 return false;
4624 }
4625
4626
4627
4628 /* Classification of different kinds of add instructions. */
4629 enum nios2_add_insn_kind {
4630 nios2_add_n_kind,
4631 nios2_addi_n_kind,
4632 nios2_subi_n_kind,
4633 nios2_spaddi_n_kind,
4634 nios2_spinci_n_kind,
4635 nios2_spdeci_n_kind,
4636 nios2_add_kind,
4637 nios2_addi_kind
4638 };
4639
4640 static const char *nios2_add_insn_names[] = {
4641 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4642 "add", "addi" };
4643 static bool nios2_add_insn_narrow[] = {
4644 true, true, true, true, true, true,
4645 false, false};
4646
4647 /* Function to classify kinds of add instruction patterns. */
4648 static enum nios2_add_insn_kind
4649 nios2_add_insn_classify (rtx_insn *insn ATTRIBUTE_UNUSED,
4650 rtx lhs, rtx rhs1, rtx rhs2)
4651 {
4652 if (TARGET_HAS_CDX)
4653 {
4654 if (cdxreg (lhs) && cdxreg (rhs1))
4655 {
4656 if (cdxreg (rhs2))
4657 return nios2_add_n_kind;
4658 if (CONST_INT_P (rhs2))
4659 {
4660 HOST_WIDE_INT ival = INTVAL (rhs2);
4661 if (ival > 0 && cdx_add_immed (rhs2))
4662 return nios2_addi_n_kind;
4663 if (ival < 0 && cdx_add_immed (GEN_INT (-ival)))
4664 return nios2_subi_n_kind;
4665 }
4666 }
4667 else if (rhs1 == stack_pointer_rtx
4668 && CONST_INT_P (rhs2))
4669 {
4670 HOST_WIDE_INT imm7 = INTVAL (rhs2) >> 2;
4671 HOST_WIDE_INT rem = INTVAL (rhs2) & 3;
4672 if (rem == 0 && (imm7 & ~0x7f) == 0)
4673 {
4674 if (cdxreg (lhs))
4675 return nios2_spaddi_n_kind;
4676 if (lhs == stack_pointer_rtx)
4677 return nios2_spinci_n_kind;
4678 }
4679 imm7 = -INTVAL(rhs2) >> 2;
4680 rem = -INTVAL (rhs2) & 3;
4681 if (lhs == stack_pointer_rtx
4682 && rem == 0 && (imm7 & ~0x7f) == 0)
4683 return nios2_spdeci_n_kind;
4684 }
4685 }
4686 return ((REG_P (rhs2) || rhs2 == const0_rtx)
4687 ? nios2_add_kind : nios2_addi_kind);
4688 }
4689
4690 /* Emit assembly language for the different kinds of add instructions. */
4691 const char*
4692 nios2_add_insn_asm (rtx_insn *insn, rtx *operands)
4693 {
4694 static char buf[256];
4695 int ln = 256;
4696 enum nios2_add_insn_kind kind
4697 = nios2_add_insn_classify (insn, operands[0], operands[1], operands[2]);
4698 if (kind == nios2_subi_n_kind)
4699 snprintf (buf, ln, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands[2]));
4700 else if (kind == nios2_spaddi_n_kind)
4701 snprintf (buf, ln, "spaddi.n\t%%0, %%2");
4702 else if (kind == nios2_spinci_n_kind)
4703 snprintf (buf, ln, "spinci.n\t%%2");
4704 else if (kind == nios2_spdeci_n_kind)
4705 snprintf (buf, ln, "spdeci.n\t%d", (int) -INTVAL (operands[2]));
4706 else
4707 snprintf (buf, ln, "%s\t%%0, %%1, %%z2", nios2_add_insn_names[(int)kind]);
4708 return buf;
4709 }
4710
4711 /* This routine, which the default "length" attribute computation is
4712 based on, encapsulates information about all the cases where CDX
4713 provides a narrow 2-byte instruction form. */
4714 bool
4715 nios2_cdx_narrow_form_p (rtx_insn *insn)
4716 {
4717 rtx pat, lhs, rhs1, rhs2;
4718 enum attr_type type;
4719 if (!TARGET_HAS_CDX)
4720 return false;
4721 type = get_attr_type (insn);
4722 pat = PATTERN (insn);
4723 gcc_assert (reload_completed);
4724 switch (type)
4725 {
4726 case TYPE_CONTROL:
4727 if (GET_CODE (pat) == SIMPLE_RETURN)
4728 return true;
4729 if (GET_CODE (pat) == PARALLEL)
4730 pat = XVECEXP (pat, 0, 0);
4731 if (GET_CODE (pat) == SET)
4732 pat = SET_SRC (pat);
4733 if (GET_CODE (pat) == IF_THEN_ELSE)
4734 {
4735 /* Conditional branch patterns; for these we
4736 only check the comparison to find beqz.n/bnez.n cases.
4737 For the 'nios2_cbranch' pattern, we cannot also check
4738 the branch range here. That will be done at the md
4739 pattern "length" attribute computation. */
4740 rtx cmp = XEXP (pat, 0);
4741 return ((GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
4742 && cdxreg (XEXP (cmp, 0))
4743 && XEXP (cmp, 1) == const0_rtx);
4744 }
4745 if (GET_CODE (pat) == TRAP_IF)
4746 /* trap.n is always usable. */
4747 return true;
4748 if (GET_CODE (pat) == CALL)
4749 pat = XEXP (XEXP (pat, 0), 0);
4750 if (REG_P (pat))
4751 /* Control instructions taking a register operand are indirect
4752 jumps and calls. The CDX instructions have a 5-bit register
4753 field so any reg is valid. */
4754 return true;
4755 else
4756 {
4757 gcc_assert (!insn_variable_length_p (insn));
4758 return false;
4759 }
4760 case TYPE_ADD:
4761 {
4762 enum nios2_add_insn_kind kind;
4763 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4764 kind = nios2_add_insn_classify (insn, lhs, rhs1, rhs2);
4765 return nios2_add_insn_narrow[(int)kind];
4766 }
4767 case TYPE_LD:
4768 {
4769 bool ret;
4770 HOST_WIDE_INT offset, rem = 0;
4771 rtx addr, reg = SET_DEST (pat), mem = SET_SRC (pat);
4772 if (GET_CODE (mem) == SIGN_EXTEND)
4773 /* No CDX form for sign-extended load. */
4774 return false;
4775 if (GET_CODE (mem) == ZERO_EXTEND)
4776 /* The load alternatives in the zero_extend* patterns. */
4777 mem = XEXP (mem, 0);
4778 if (MEM_P (mem))
4779 {
4780 /* ldxio. */
4781 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4782 || TARGET_BYPASS_CACHE)
4783 return false;
4784 addr = XEXP (mem, 0);
4785 /* GP-based and R0-based references are never narrow. */
4786 if (gprel_constant_p (addr) || r0rel_constant_p (addr))
4787 return false;
4788 /* %lo requires a 16-bit relocation and is never narrow. */
4789 if (GET_CODE (addr) == LO_SUM)
4790 return false;
4791 ret = split_mem_address (addr, &rhs1, &rhs2);
4792 gcc_assert (ret);
4793 }
4794 else
4795 return false;
4796
4797 offset = INTVAL (rhs2);
4798 if (GET_MODE (mem) == SImode)
4799 {
4800 rem = offset & 3;
4801 offset >>= 2;
4802 /* ldwsp.n case. */
4803 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4804 && rem == 0 && (offset & ~0x1f) == 0)
4805 return true;
4806 }
4807 else if (GET_MODE (mem) == HImode)
4808 {
4809 rem = offset & 1;
4810 offset >>= 1;
4811 }
4812 /* ldbu.n, ldhu.n, ldw.n cases. */
4813 return (cdxreg (reg) && cdxreg (rhs1)
4814 && rem == 0 && (offset & ~0xf) == 0);
4815 }
4816 case TYPE_ST:
4817 if (GET_CODE (pat) == PARALLEL)
4818 /* stex, stsex. */
4819 return false;
4820 else
4821 {
4822 bool ret;
4823 HOST_WIDE_INT offset, rem = 0;
4824 rtx addr, reg = SET_SRC (pat), mem = SET_DEST (pat);
4825 if (!MEM_P (mem))
4826 return false;
4827 /* stxio. */
4828 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4829 || TARGET_BYPASS_CACHE)
4830 return false;
4831 addr = XEXP (mem, 0);
4832 /* GP-based and r0-based references are never narrow. */
4833 if (gprel_constant_p (addr) || r0rel_constant_p (addr))
4834 return false;
4835 /* %lo requires a 16-bit relocation and is never narrow. */
4836 if (GET_CODE (addr) == LO_SUM)
4837 return false;
4838 ret = split_mem_address (addr, &rhs1, &rhs2);
4839 gcc_assert (ret);
4840 offset = INTVAL (rhs2);
4841 if (GET_MODE (mem) == SImode)
4842 {
4843 rem = offset & 3;
4844 offset >>= 2;
4845 /* stwsp.n case. */
4846 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4847 && rem == 0 && (offset & ~0x1f) == 0)
4848 return true;
4849 /* stwz.n case. */
4850 else if (reg == const0_rtx && cdxreg (rhs1)
4851 && rem == 0 && (offset & ~0x3f) == 0)
4852 return true;
4853 }
4854 else if (GET_MODE (mem) == HImode)
4855 {
4856 rem = offset & 1;
4857 offset >>= 1;
4858 }
4859 else
4860 {
4861 gcc_assert (GET_MODE (mem) == QImode);
4862 /* stbz.n case. */
4863 if (reg == const0_rtx && cdxreg (rhs1)
4864 && (offset & ~0x3f) == 0)
4865 return true;
4866 }
4867
4868 /* stbu.n, sthu.n, stw.n cases. */
4869 return (cdxreg (reg) && cdxreg (rhs1)
4870 && rem == 0 && (offset & ~0xf) == 0);
4871 }
4872 case TYPE_MOV:
4873 lhs = SET_DEST (pat);
4874 rhs1 = SET_SRC (pat);
4875 if (CONST_INT_P (rhs1))
4876 return (cdxreg (lhs) && cdx_mov_immed (rhs1));
4877 gcc_assert (REG_P (lhs) && REG_P (rhs1));
4878 return true;
4879
4880 case TYPE_AND:
4881 /* Some zero_extend* alternatives are and insns. */
4882 if (GET_CODE (SET_SRC (pat)) == ZERO_EXTEND)
4883 return (cdxreg (SET_DEST (pat))
4884 && cdxreg (XEXP (SET_SRC (pat), 0)));
4885 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4886 if (CONST_INT_P (rhs2))
4887 return (cdxreg (lhs) && cdxreg (rhs1) && cdx_and_immed (rhs2));
4888 return (cdxreg (lhs) && cdxreg (rhs2)
4889 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4890
4891 case TYPE_OR:
4892 case TYPE_XOR:
4893 /* Note the two-address limitation for CDX form. */
4894 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4895 return (cdxreg (lhs) && cdxreg (rhs2)
4896 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4897
4898 case TYPE_SUB:
4899 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4900 return (cdxreg (lhs) && cdxreg (rhs1) && cdxreg (rhs2));
4901
4902 case TYPE_NEG:
4903 case TYPE_NOT:
4904 split_alu_insn (insn, &lhs, &rhs1, NULL);
4905 return (cdxreg (lhs) && cdxreg (rhs1));
4906
4907 case TYPE_SLL:
4908 case TYPE_SRL:
4909 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4910 return (cdxreg (lhs)
4911 && ((cdxreg (rhs1) && cdx_shift_immed (rhs2))
4912 || (cdxreg (rhs2)
4913 && (!reload_completed || rtx_equal_p (lhs, rhs1)))));
4914 case TYPE_NOP:
4915 case TYPE_PUSH:
4916 case TYPE_POP:
4917 return true;
4918 default:
4919 break;
4920 }
4921 return false;
4922 }
4923
4924 /* Main function to implement the pop_operation predicate that
4925 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4926 hardcode the restored registers, so the main checking is for the
4927 SP offsets. */
4928 bool
4929 pop_operation_p (rtx op)
4930 {
4931 int i;
4932 HOST_WIDE_INT last_offset = -1, len = XVECLEN (op, 0);
4933 rtx base_reg, offset;
4934
4935 if (len < 3 /* At least has a return, SP-update, and RA restore. */
4936 || GET_CODE (XVECEXP (op, 0, 0)) != RETURN
4937 || !base_reg_adjustment_p (XVECEXP (op, 0, 1), &base_reg, &offset)
4938 || !rtx_equal_p (base_reg, stack_pointer_rtx)
4939 || !CONST_INT_P (offset)
4940 || (INTVAL (offset) & 3) != 0)
4941 return false;
4942
4943 for (i = len - 1; i > 1; i--)
4944 {
4945 rtx set = XVECEXP (op, 0, i);
4946 rtx curr_base_reg, curr_offset;
4947
4948 if (GET_CODE (set) != SET || !MEM_P (SET_SRC (set))
4949 || !split_mem_address (XEXP (SET_SRC (set), 0),
4950 &curr_base_reg, &curr_offset)
4951 || !rtx_equal_p (base_reg, curr_base_reg)
4952 || !CONST_INT_P (curr_offset))
4953 return false;
4954 if (i == len - 1)
4955 {
4956 last_offset = INTVAL (curr_offset);
4957 if ((last_offset & 3) != 0 || last_offset > 60)
4958 return false;
4959 }
4960 else
4961 {
4962 last_offset += 4;
4963 if (INTVAL (curr_offset) != last_offset)
4964 return false;
4965 }
4966 }
4967 if (last_offset < 0 || last_offset + 4 != INTVAL (offset))
4968 return false;
4969
4970 return true;
4971 }
4972
4973
4974 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4975 The instruction can encode subsets drawn from either R2-R13 or
4976 R14-R23 + FP + RA. */
4977 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4978 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4979
4980 static bool
4981 nios2_ldstwm_regset_p (unsigned int regno, unsigned int *regset)
4982 {
4983 if (*regset == 0)
4984 {
4985 if (CDX_LDSTWM_VALID_REGS_0 & (1 << regno))
4986 *regset = CDX_LDSTWM_VALID_REGS_0;
4987 else if (CDX_LDSTWM_VALID_REGS_1 & (1 << regno))
4988 *regset = CDX_LDSTWM_VALID_REGS_1;
4989 else
4990 return false;
4991 return true;
4992 }
4993 else
4994 return (*regset & (1 << regno)) != 0;
4995 }
4996
4997 /* Main function to implement ldwm_operation/stwm_operation
4998 predicates that check ldwm/stwm insn pattern integrity. */
4999 bool
5000 ldstwm_operation_p (rtx op, bool load_p)
5001 {
5002 int start, i, end = XVECLEN (op, 0) - 1, last_regno = -1;
5003 unsigned int regset = 0;
5004 rtx base_reg, offset;
5005 rtx first_elt = XVECEXP (op, 0, 0);
5006 bool inc_p = true;
5007 bool wb_p = base_reg_adjustment_p (first_elt, &base_reg, &offset);
5008 if (GET_CODE (XVECEXP (op, 0, end)) == RETURN)
5009 end--;
5010 start = wb_p ? 1 : 0;
5011 for (i = start; i <= end; i++)
5012 {
5013 int regno;
5014 rtx reg, mem, elt = XVECEXP (op, 0, i);
5015 /* Return early if not a SET at all. */
5016 if (GET_CODE (elt) != SET)
5017 return false;
5018 reg = load_p ? SET_DEST (elt) : SET_SRC (elt);
5019 mem = load_p ? SET_SRC (elt) : SET_DEST (elt);
5020 if (!REG_P (reg) || !MEM_P (mem))
5021 return false;
5022 regno = REGNO (reg);
5023 if (!nios2_ldstwm_regset_p (regno, &regset))
5024 return false;
5025 /* If no writeback to determine direction, use offset of first MEM. */
5026 if (wb_p)
5027 inc_p = INTVAL (offset) > 0;
5028 else if (i == start)
5029 {
5030 rtx first_base, first_offset;
5031 if (!split_mem_address (XEXP (mem, 0),
5032 &first_base, &first_offset))
5033 return false;
5034 if (!REG_P (first_base) || !CONST_INT_P (first_offset))
5035 return false;
5036 base_reg = first_base;
5037 inc_p = INTVAL (first_offset) >= 0;
5038 }
5039 /* Ensure that the base register is not loaded into. */
5040 if (load_p && regno == (int) REGNO (base_reg))
5041 return false;
5042 /* Check for register order inc/dec integrity. */
5043 if (last_regno >= 0)
5044 {
5045 if (inc_p && last_regno >= regno)
5046 return false;
5047 if (!inc_p && last_regno <= regno)
5048 return false;
5049 }
5050 last_regno = regno;
5051 }
5052 return true;
5053 }
5054
5055 /* Helper for nios2_ldst_parallel, for generating a parallel vector
5056 SET element. */
5057 static rtx
5058 gen_ldst (bool load_p, int regno, rtx base_mem, int offset)
5059 {
5060 rtx reg = gen_rtx_REG (SImode, regno);
5061 rtx mem = adjust_address_nv (base_mem, SImode, offset);
5062 return gen_rtx_SET (load_p ? reg : mem,
5063 load_p ? mem : reg);
5064 }
5065
5066 /* A general routine for creating the body RTL pattern of
5067 ldwm/stwm/push.n/pop.n insns.
5068 LOAD_P: true/false for load/store direction.
5069 REG_INC_P: whether registers are incrementing/decrementing in the
5070 *RTL vector* (not necessarily the order defined in the ISA specification).
5071 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
5072 BASE_MEM: starting MEM.
5073 BASE_UPDATE: amount to update base register; zero means no writeback.
5074 REGMASK: register mask to load/store.
5075 RET_P: true if to tag a (return) element at the end.
5076
5077 Note that this routine does not do any checking. It's the job of the
5078 caller to do the right thing, and the insn patterns to do the
5079 safe-guarding. */
5080 static rtx
5081 nios2_ldst_parallel (bool load_p, bool reg_inc_p, bool offset_inc_p,
5082 rtx base_mem, int base_update,
5083 unsigned HOST_WIDE_INT regmask, bool ret_p)
5084 {
5085 rtvec p;
5086 int regno, b = 0, i = 0, n = 0, len = popcount_hwi (regmask);
5087 if (ret_p) len++, i++, b++;
5088 if (base_update != 0) len++, i++;
5089 p = rtvec_alloc (len);
5090 for (regno = (reg_inc_p ? 0 : 31);
5091 regno != (reg_inc_p ? 32 : -1);
5092 regno += (reg_inc_p ? 1 : -1))
5093 if ((regmask & (1 << regno)) != 0)
5094 {
5095 int offset = (offset_inc_p ? 4 : -4) * n++;
5096 RTVEC_ELT (p, i++) = gen_ldst (load_p, regno, base_mem, offset);
5097 }
5098 if (ret_p)
5099 RTVEC_ELT (p, 0) = ret_rtx;
5100 if (base_update != 0)
5101 {
5102 rtx reg, offset;
5103 if (!split_mem_address (XEXP (base_mem, 0), &reg, &offset))
5104 gcc_unreachable ();
5105 RTVEC_ELT (p, b) =
5106 gen_rtx_SET (reg, plus_constant (Pmode, reg, base_update));
5107 }
5108 return gen_rtx_PARALLEL (VOIDmode, p);
5109 }
5110
5111 /* CDX ldwm/stwm peephole optimization pattern related routines. */
5112
5113 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
5114 struct ldstwm_operand
5115 {
5116 int offset; /* Offset from base register. */
5117 rtx reg; /* Register to store at this offset. */
5118 rtx mem; /* Original mem. */
5119 bool bad; /* True if this load/store can't be combined. */
5120 bool rewrite; /* True if we should rewrite using scratch. */
5121 };
5122
5123 static int
5124 compare_ldstwm_operands (const void *arg1, const void *arg2)
5125 {
5126 const struct ldstwm_operand *op1 = (const struct ldstwm_operand *) arg1;
5127 const struct ldstwm_operand *op2 = (const struct ldstwm_operand *) arg2;
5128 if (op1->bad)
5129 return op2->bad ? 0 : 1;
5130 else if (op2->bad)
5131 return -1;
5132 else
5133 return op1->offset - op2->offset;
5134 }
5135
5136 /* Helper function: return true if a load/store using REGNO with address
5137 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
5138 stw.n, ldwsp.n, or stwsp.n instruction. */
5139 static bool
5140 can_use_cdx_ldstw (int regno, int basereg, int offset)
5141 {
5142 if (CDX_REG_P (regno) && CDX_REG_P (basereg)
5143 && (offset & 0x3) == 0 && offset >= 0 && offset < 0x40)
5144 return true;
5145 else if (basereg == SP_REGNO
5146 && offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
5147 return true;
5148 return false;
5149 }
5150
5151 /* This function is called from peephole2 optimizers to try to merge
5152 a series of individual loads and stores into a ldwm or stwm. It
5153 can also rewrite addresses inside the individual loads and stores
5154 using a common base register using a scratch register and smaller
5155 offsets if that allows them to use CDX ldw.n or stw.n instructions
5156 instead of 4-byte loads or stores.
5157 N is the number of insns we are trying to merge. SCRATCH is non-null
5158 if there is a scratch register available. The OPERANDS array contains
5159 alternating REG (even) and MEM (odd) operands. */
5160 bool
5161 gen_ldstwm_peep (bool load_p, int n, rtx scratch, rtx *operands)
5162 {
5163 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
5164 specified. */
5165 #define MAX_LDSTWM_OPS 12
5166 struct ldstwm_operand sort[MAX_LDSTWM_OPS];
5167 int basereg = -1;
5168 int baseoffset;
5169 int i, m, lastoffset, lastreg;
5170 unsigned int regmask = 0, usemask = 0, regset;
5171 bool needscratch;
5172 int newbasereg;
5173 int nbytes;
5174
5175 if (!TARGET_HAS_CDX)
5176 return false;
5177 if (n < 2 || n > MAX_LDSTWM_OPS)
5178 return false;
5179
5180 /* Check all the operands for validity and initialize the sort array.
5181 The places where we return false here are all situations that aren't
5182 expected to ever happen -- invalid patterns, invalid registers, etc. */
5183 for (i = 0; i < n; i++)
5184 {
5185 rtx base, offset;
5186 rtx reg = operands[i];
5187 rtx mem = operands[i + n];
5188 int r, o, regno;
5189 bool bad = false;
5190
5191 if (!REG_P (reg) || !MEM_P (mem))
5192 return false;
5193
5194 regno = REGNO (reg);
5195 if (regno > 31)
5196 return false;
5197 if (load_p && (regmask & (1 << regno)) != 0)
5198 return false;
5199 regmask |= 1 << regno;
5200
5201 if (!split_mem_address (XEXP (mem, 0), &base, &offset))
5202 return false;
5203 r = REGNO (base);
5204 o = INTVAL (offset);
5205
5206 if (basereg == -1)
5207 basereg = r;
5208 else if (r != basereg)
5209 bad = true;
5210 usemask |= 1 << r;
5211
5212 sort[i].bad = bad;
5213 sort[i].rewrite = false;
5214 sort[i].offset = o;
5215 sort[i].reg = reg;
5216 sort[i].mem = mem;
5217 }
5218
5219 /* If we are doing a series of register loads, we can't safely reorder
5220 them if any of the regs used in addr expressions are also being set. */
5221 if (load_p && (regmask & usemask))
5222 return false;
5223
5224 /* Sort the array by increasing mem offset order, then check that
5225 offsets are valid and register order matches mem order. At the
5226 end of this loop, m is the number of loads/stores we will try to
5227 combine; the rest are leftovers. */
5228 qsort (sort, n, sizeof (struct ldstwm_operand), compare_ldstwm_operands);
5229
5230 baseoffset = sort[0].offset;
5231 needscratch = baseoffset != 0;
5232 if (needscratch && !scratch)
5233 return false;
5234
5235 lastreg = regmask = regset = 0;
5236 lastoffset = baseoffset;
5237 for (m = 0; m < n && !sort[m].bad; m++)
5238 {
5239 int thisreg = REGNO (sort[m].reg);
5240 if (sort[m].offset != lastoffset
5241 || (m > 0 && lastreg >= thisreg)
5242 || !nios2_ldstwm_regset_p (thisreg, &regset))
5243 break;
5244 lastoffset += 4;
5245 lastreg = thisreg;
5246 regmask |= (1 << thisreg);
5247 }
5248
5249 /* For loads, make sure we are not overwriting the scratch reg.
5250 The peephole2 pattern isn't supposed to match unless the register is
5251 unused all the way through, so this isn't supposed to happen anyway. */
5252 if (load_p
5253 && needscratch
5254 && ((1 << REGNO (scratch)) & regmask) != 0)
5255 return false;
5256 newbasereg = needscratch ? (int) REGNO (scratch) : basereg;
5257
5258 /* We may be able to combine only the first m of the n total loads/stores
5259 into a single instruction. If m < 2, there's no point in emitting
5260 a ldwm/stwm at all, but we might be able to do further optimizations
5261 if we have a scratch. We will count the instruction lengths of the
5262 old and new patterns and store the savings in nbytes. */
5263 if (m < 2)
5264 {
5265 if (!needscratch)
5266 return false;
5267 m = 0;
5268 nbytes = 0;
5269 }
5270 else
5271 nbytes = -4; /* Size of ldwm/stwm. */
5272 if (needscratch)
5273 {
5274 int bo = baseoffset > 0 ? baseoffset : -baseoffset;
5275 if (CDX_REG_P (newbasereg)
5276 && CDX_REG_P (basereg)
5277 && bo <= 128 && bo > 0 && (bo & (bo - 1)) == 0)
5278 nbytes -= 2; /* Size of addi.n/subi.n. */
5279 else
5280 nbytes -= 4; /* Size of non-CDX addi. */
5281 }
5282
5283 /* Count the size of the input load/store instructions being replaced. */
5284 for (i = 0; i < m; i++)
5285 if (can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset))
5286 nbytes += 2;
5287 else
5288 nbytes += 4;
5289
5290 /* We may also be able to save a bit if we can rewrite non-CDX
5291 load/stores that can't be combined into the ldwm/stwm into CDX
5292 load/stores using the scratch reg. For example, this might happen
5293 if baseoffset is large, by bringing in the offsets in the load/store
5294 instructions within the range that fits in the CDX instruction. */
5295 if (needscratch && CDX_REG_P (newbasereg))
5296 for (i = m; i < n && !sort[i].bad; i++)
5297 if (!can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset)
5298 && can_use_cdx_ldstw (REGNO (sort[i].reg), newbasereg,
5299 sort[i].offset - baseoffset))
5300 {
5301 sort[i].rewrite = true;
5302 nbytes += 2;
5303 }
5304
5305 /* Are we good to go? */
5306 if (nbytes <= 0)
5307 return false;
5308
5309 /* Emit the scratch load. */
5310 if (needscratch)
5311 emit_insn (gen_rtx_SET (scratch, XEXP (sort[0].mem, 0)));
5312
5313 /* Emit the ldwm/stwm insn. */
5314 if (m > 0)
5315 {
5316 rtvec p = rtvec_alloc (m);
5317 for (i = 0; i < m; i++)
5318 {
5319 int offset = sort[i].offset;
5320 rtx mem, reg = sort[i].reg;
5321 rtx base_reg = gen_rtx_REG (Pmode, newbasereg);
5322 if (needscratch)
5323 offset -= baseoffset;
5324 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, base_reg, offset));
5325 if (load_p)
5326 RTVEC_ELT (p, i) = gen_rtx_SET (reg, mem);
5327 else
5328 RTVEC_ELT (p, i) = gen_rtx_SET (mem, reg);
5329 }
5330 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
5331 }
5332
5333 /* Emit any leftover load/stores as individual instructions, doing
5334 the previously-noted rewrites to use the scratch reg. */
5335 for (i = m; i < n; i++)
5336 {
5337 rtx reg = sort[i].reg;
5338 rtx mem = sort[i].mem;
5339 if (sort[i].rewrite)
5340 {
5341 int offset = sort[i].offset - baseoffset;
5342 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, scratch, offset));
5343 }
5344 if (load_p)
5345 emit_move_insn (reg, mem);
5346 else
5347 emit_move_insn (mem, reg);
5348 }
5349 return true;
5350 }
5351
5352 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
5353 We use this hook when emitting CDX code to enforce the 4-byte
5354 alignment requirement for labels that are used as the targets of
5355 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
5356 and 32-bit instructions aligned on any 16-bit boundary, but functions
5357 and jmpi labels have to be 32-bit aligned because of the way the address
5358 is encoded in the instruction. */
5359
5360 static unsigned char *label_align;
5361 static int min_labelno, max_labelno;
5362
5363 static void
5364 nios2_reorg (void)
5365 {
5366 bool changed = true;
5367 rtx_insn *insn;
5368
5369 if (!TARGET_HAS_CDX)
5370 return;
5371
5372 /* Initialize the data structures. */
5373 if (label_align)
5374 free (label_align);
5375 max_labelno = max_label_num ();
5376 min_labelno = get_first_label_num ();
5377 label_align = XCNEWVEC (unsigned char, max_labelno - min_labelno + 1);
5378
5379 /* Iterate on inserting alignment and adjusting branch lengths until
5380 no more changes. */
5381 while (changed)
5382 {
5383 changed = false;
5384 shorten_branches (get_insns ());
5385
5386 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
5387 if (JUMP_P (insn) && insn_variable_length_p (insn))
5388 {
5389 rtx label = JUMP_LABEL (insn);
5390 /* We use the current fact that all cases of 'jmpi'
5391 doing the actual branch in the machine description
5392 has a computed length of 6 or 8. Length 4 and below
5393 are all PC-relative 'br' branches without the jump-align
5394 problem. */
5395 if (label && LABEL_P (label) && get_attr_length (insn) > 4)
5396 {
5397 int index = CODE_LABEL_NUMBER (label) - min_labelno;
5398 if (label_align[index] != 2)
5399 {
5400 label_align[index] = 2;
5401 changed = true;
5402 }
5403 }
5404 }
5405 }
5406 }
5407
5408 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
5409 int
5410 nios2_label_align (rtx label)
5411 {
5412 int n = CODE_LABEL_NUMBER (label);
5413
5414 if (label_align && n >= min_labelno && n <= max_labelno)
5415 return MAX (label_align[n - min_labelno], align_labels.levels[0].log);
5416 return align_labels.levels[0].log;
5417 }
5418
5419 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
5420 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
5421 the registers that can be used as operands to instructions that
5422 have 3-bit register fields. */
5423 void
5424 nios2_adjust_reg_alloc_order (void)
5425 {
5426 const int cdx_reg_alloc_order[] =
5427 {
5428 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
5429 2, 3, 4, 5, 6, 7,
5430 /* Call-saved GPRs within CDX 3-bit encoded range. */
5431 16, 17,
5432 /* Other call-clobbered GPRs. */
5433 8, 9, 10, 11, 12, 13, 14, 15,
5434 /* Other call-saved GPRs. RA placed first since it is always saved. */
5435 31, 18, 19, 20, 21, 22, 23, 28,
5436 /* Fixed GPRs, not used by the register allocator. */
5437 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
5438 };
5439
5440 if (TARGET_HAS_CDX)
5441 memcpy (reg_alloc_order, cdx_reg_alloc_order,
5442 sizeof (int) * FIRST_PSEUDO_REGISTER);
5443 }
5444
5445 \f
5446 /* Initialize the GCC target structure. */
5447 #undef TARGET_ASM_FUNCTION_PROLOGUE
5448 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
5449
5450 #undef TARGET_IN_SMALL_DATA_P
5451 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
5452
5453 #undef TARGET_SECTION_TYPE_FLAGS
5454 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
5455
5456 #undef TARGET_INIT_BUILTINS
5457 #define TARGET_INIT_BUILTINS nios2_init_builtins
5458 #undef TARGET_EXPAND_BUILTIN
5459 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
5460 #undef TARGET_BUILTIN_DECL
5461 #define TARGET_BUILTIN_DECL nios2_builtin_decl
5462
5463 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5464 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
5465
5466 #undef TARGET_CAN_ELIMINATE
5467 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
5468
5469 #undef TARGET_FUNCTION_ARG
5470 #define TARGET_FUNCTION_ARG nios2_function_arg
5471
5472 #undef TARGET_FUNCTION_ARG_ADVANCE
5473 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5474
5475 #undef TARGET_FUNCTION_ARG_PADDING
5476 #define TARGET_FUNCTION_ARG_PADDING nios2_function_arg_padding
5477
5478 #undef TARGET_ARG_PARTIAL_BYTES
5479 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5480
5481 #undef TARGET_TRAMPOLINE_INIT
5482 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5483
5484 #undef TARGET_FUNCTION_VALUE
5485 #define TARGET_FUNCTION_VALUE nios2_function_value
5486
5487 #undef TARGET_LIBCALL_VALUE
5488 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5489
5490 #undef TARGET_FUNCTION_VALUE_REGNO_P
5491 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5492
5493 #undef TARGET_RETURN_IN_MEMORY
5494 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5495
5496 #undef TARGET_PROMOTE_PROTOTYPES
5497 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5498
5499 #undef TARGET_SETUP_INCOMING_VARARGS
5500 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5501
5502 #undef TARGET_MUST_PASS_IN_STACK
5503 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5504
5505 #undef TARGET_LEGITIMATE_CONSTANT_P
5506 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5507
5508 #undef TARGET_LEGITIMIZE_ADDRESS
5509 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5510
5511 #undef TARGET_DELEGITIMIZE_ADDRESS
5512 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5513
5514 #undef TARGET_LEGITIMATE_ADDRESS_P
5515 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5516
5517 #undef TARGET_PREFERRED_RELOAD_CLASS
5518 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5519
5520 #undef TARGET_RTX_COSTS
5521 #define TARGET_RTX_COSTS nios2_rtx_costs
5522
5523 #undef TARGET_ADDRESS_COST
5524 #define TARGET_ADDRESS_COST nios2_address_cost
5525
5526 #undef TARGET_HAVE_TLS
5527 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5528
5529 #undef TARGET_CANNOT_FORCE_CONST_MEM
5530 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5531
5532 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5533 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5534
5535 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5536 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5537
5538 #undef TARGET_PRINT_OPERAND
5539 #define TARGET_PRINT_OPERAND nios2_print_operand
5540
5541 #undef TARGET_PRINT_OPERAND_ADDRESS
5542 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5543
5544 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5545 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5546
5547 #undef TARGET_ASM_FILE_END
5548 #define TARGET_ASM_FILE_END nios2_asm_file_end
5549
5550 #undef TARGET_OPTION_OVERRIDE
5551 #define TARGET_OPTION_OVERRIDE nios2_option_override
5552
5553 #undef TARGET_OPTION_SAVE
5554 #define TARGET_OPTION_SAVE nios2_option_save
5555
5556 #undef TARGET_OPTION_RESTORE
5557 #define TARGET_OPTION_RESTORE nios2_option_restore
5558
5559 #undef TARGET_SET_CURRENT_FUNCTION
5560 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5561
5562 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5563 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5564
5565 #undef TARGET_OPTION_PRAGMA_PARSE
5566 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5567
5568 #undef TARGET_MERGE_DECL_ATTRIBUTES
5569 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5570
5571 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5572 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5573 hook_bool_const_tree_hwi_hwi_const_tree_true
5574
5575 #undef TARGET_ASM_OUTPUT_MI_THUNK
5576 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5577
5578 #undef TARGET_MACHINE_DEPENDENT_REORG
5579 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5580
5581 #undef TARGET_CONSTANT_ALIGNMENT
5582 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5583
5584 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
5585 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
5586
5587 struct gcc_target targetm = TARGET_INITIALIZER;
5588
5589 #include "gt-nios2.h"