]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/nios2/nios2.c
.
[thirdparty/gcc.git] / gcc / config / nios2 / nios2.c
1 /* Target machine subroutines for Altera Nios II.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Jonah Graham (jgraham@altera.com),
4 Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
5 Contributed by Mentor Graphics, Inc.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "stringpool.h"
31 #include "attribs.h"
32 #include "df.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "optabs.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "output.h"
41 #include "insn-attr.h"
42 #include "flags.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "varasm.h"
46 #include "expr.h"
47 #include "toplev.h"
48 #include "langhooks.h"
49 #include "stor-layout.h"
50 #include "builtins.h"
51
52 /* This file should be included last. */
53 #include "target-def.h"
54
55 /* Forward function declarations. */
56 static bool prologue_saved_reg_p (unsigned);
57 static void nios2_load_pic_register (void);
58 static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
59 static const char *nios2_unspec_reloc_name (int);
60 static void nios2_register_builtin_fndecl (unsigned, tree);
61 static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
62 unsigned HOST_WIDE_INT, bool);
63
64 /* Threshold for data being put into the small data/bss area, instead
65 of the normal data area (references to the small data/bss area take
66 1 instruction, and use the global pointer, references to the normal
67 data area takes 2 instructions). */
68 unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
69
70 struct GTY (()) machine_function
71 {
72 /* Current frame information, to be filled in by nios2_compute_frame_layout
73 with register save masks, and offsets for the current function. */
74
75 /* Mask of registers to save. */
76 unsigned int save_mask;
77 /* Number of bytes that the entire frame takes up. */
78 int total_size;
79 /* Number of bytes that variables take up. */
80 int var_size;
81 /* Number of bytes that outgoing arguments take up. */
82 int args_size;
83 /* Number of bytes needed to store registers in frame. */
84 int save_reg_size;
85 /* Number of bytes used to store callee-saved registers. */
86 int callee_save_reg_size;
87 /* Offset from new stack pointer to store registers. */
88 int save_regs_offset;
89 /* Offset from save_regs_offset to store frame pointer register. */
90 int fp_save_offset;
91 /* != 0 if function has a variable argument list. */
92 int uses_anonymous_args;
93 /* != 0 if frame layout already calculated. */
94 int initialized;
95 };
96
97 /* State to track the assignment of custom codes to FPU/custom builtins. */
98 static enum nios2_ccs_code custom_code_status[256];
99 static int custom_code_index[256];
100 /* Set to true if any conflicts (re-use of a code between 0-255) are found. */
101 static bool custom_code_conflict = false;
102
103 \f
104 /* Definition of builtin function types for nios2. */
105
106 #define N2_FTYPES \
107 N2_FTYPE(1, (SF)) \
108 N2_FTYPE(1, (VOID)) \
109 N2_FTYPE(2, (DF, DF)) \
110 N2_FTYPE(3, (DF, DF, DF)) \
111 N2_FTYPE(2, (DF, SF)) \
112 N2_FTYPE(2, (DF, SI)) \
113 N2_FTYPE(2, (DF, UI)) \
114 N2_FTYPE(2, (SF, DF)) \
115 N2_FTYPE(2, (SF, SF)) \
116 N2_FTYPE(3, (SF, SF, SF)) \
117 N2_FTYPE(2, (SF, SI)) \
118 N2_FTYPE(2, (SF, UI)) \
119 N2_FTYPE(2, (SI, CVPTR)) \
120 N2_FTYPE(2, (SI, DF)) \
121 N2_FTYPE(3, (SI, DF, DF)) \
122 N2_FTYPE(2, (SI, SF)) \
123 N2_FTYPE(3, (SI, SF, SF)) \
124 N2_FTYPE(2, (SI, SI)) \
125 N2_FTYPE(3, (SI, SI, SI)) \
126 N2_FTYPE(3, (SI, VPTR, SI)) \
127 N2_FTYPE(2, (UI, CVPTR)) \
128 N2_FTYPE(2, (UI, DF)) \
129 N2_FTYPE(2, (UI, SF)) \
130 N2_FTYPE(2, (VOID, DF)) \
131 N2_FTYPE(2, (VOID, SF)) \
132 N2_FTYPE(2, (VOID, SI)) \
133 N2_FTYPE(3, (VOID, SI, SI)) \
134 N2_FTYPE(2, (VOID, VPTR)) \
135 N2_FTYPE(3, (VOID, VPTR, SI))
136
137 #define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
138 #define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
139 #define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
140
141 /* Expand ftcode enumeration. */
142 enum nios2_ftcode {
143 #define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
144 N2_FTYPES
145 #undef N2_FTYPE
146 N2_FTYPE_MAX
147 };
148
149 /* Return the tree function type, based on the ftcode. */
150 static tree
151 nios2_ftype (enum nios2_ftcode ftcode)
152 {
153 static tree types[(int) N2_FTYPE_MAX];
154
155 tree N2_TYPE_SF = float_type_node;
156 tree N2_TYPE_DF = double_type_node;
157 tree N2_TYPE_SI = integer_type_node;
158 tree N2_TYPE_UI = unsigned_type_node;
159 tree N2_TYPE_VOID = void_type_node;
160
161 static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
162 if (!N2_TYPE_CVPTR)
163 {
164 /* const volatile void *. */
165 N2_TYPE_CVPTR
166 = build_pointer_type (build_qualified_type (void_type_node,
167 (TYPE_QUAL_CONST
168 | TYPE_QUAL_VOLATILE)));
169 /* volatile void *. */
170 N2_TYPE_VPTR
171 = build_pointer_type (build_qualified_type (void_type_node,
172 TYPE_QUAL_VOLATILE));
173 }
174 if (types[(int) ftcode] == NULL_TREE)
175 switch (ftcode)
176 {
177 #define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
178 #define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
179 #define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
180 #define N2_FTYPE(N,ARGS) \
181 case N2_FTYPE_OP ## N ARGS: \
182 types[(int) ftcode] \
183 = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
184 break;
185 N2_FTYPES
186 #undef N2_FTYPE
187 default: gcc_unreachable ();
188 }
189 return types[(int) ftcode];
190 }
191
192 \f
193 /* Definition of FPU instruction descriptions. */
194
195 struct nios2_fpu_insn_info
196 {
197 const char *name;
198 int num_operands, *optvar;
199 int opt, no_opt;
200 #define N2F_DF 0x1
201 #define N2F_DFREQ 0x2
202 #define N2F_UNSAFE 0x4
203 #define N2F_FINITE 0x8
204 #define N2F_NO_ERRNO 0x10
205 unsigned int flags;
206 enum insn_code icode;
207 enum nios2_ftcode ftcode;
208 };
209
210 /* Base macro for defining FPU instructions. */
211 #define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
212 { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
213 OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
214 N2_FTYPE_OP ## nop args }
215
216 /* Arithmetic and math functions; 2 or 3 operand FP operations. */
217 #define N2FPU_OP2(mode) (mode, mode)
218 #define N2FPU_OP3(mode) (mode, mode, mode)
219 #define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
220 N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
221 icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
222 #define N2FPU_INSN_SF(code, nop, flags) \
223 N2FPU_INSN_DEF (code, code, nop, flags, s, S)
224 #define N2FPU_INSN_DF(code, nop, flags) \
225 N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
226
227 /* Compare instructions, 3 operand FP operation with a SI result. */
228 #define N2FPU_CMP_DEF(code, flags, m, M) \
229 N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
230 nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
231 #define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
232 #define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
233
234 /* The order of definition needs to be maintained consistent with
235 enum n2fpu_code in nios2-opts.h. */
236 struct nios2_fpu_insn_info nios2_fpu_insn[] =
237 {
238 /* Single precision instructions. */
239 N2FPU_INSN_SF (add, 3, 0),
240 N2FPU_INSN_SF (sub, 3, 0),
241 N2FPU_INSN_SF (mul, 3, 0),
242 N2FPU_INSN_SF (div, 3, 0),
243 /* Due to textual difference between min/max and smin/smax. */
244 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
245 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
246 N2FPU_INSN_SF (neg, 2, 0),
247 N2FPU_INSN_SF (abs, 2, 0),
248 N2FPU_INSN_SF (sqrt, 2, 0),
249 N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
250 N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
251 N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
252 N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
253 N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
254 N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
255 /* Single precision compares. */
256 N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
257 N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
258 N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
259
260 /* Double precision instructions. */
261 N2FPU_INSN_DF (add, 3, 0),
262 N2FPU_INSN_DF (sub, 3, 0),
263 N2FPU_INSN_DF (mul, 3, 0),
264 N2FPU_INSN_DF (div, 3, 0),
265 /* Due to textual difference between min/max and smin/smax. */
266 N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
267 N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
268 N2FPU_INSN_DF (neg, 2, 0),
269 N2FPU_INSN_DF (abs, 2, 0),
270 N2FPU_INSN_DF (sqrt, 2, 0),
271 N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
272 N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
273 N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
274 N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
275 N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
276 N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
277 /* Double precision compares. */
278 N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
279 N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
280 N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
281
282 /* Conversion instructions. */
283 N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
284 N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
285 N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
286 N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
287 N2FPU_INSN_DEF_BASE (round, 2, N2F_NO_ERRNO, lroundsfsi2, (SI, SF)),
288 N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
289 N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
290 N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
291 N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
292 N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
293 N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
294
295 /* X, Y access instructions. */
296 N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
297 N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
298 N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
299 N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
300 N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
301 };
302
303 /* Some macros for ease of access. */
304 #define N2FPU(code) nios2_fpu_insn[(int) code]
305 #define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
306 #define N2FPU_N(code) (*N2FPU(code).optvar)
307 #define N2FPU_NAME(code) (N2FPU(code).name)
308 #define N2FPU_ICODE(code) (N2FPU(code).icode)
309 #define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
310 #define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
311 #define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
312 #define N2FPU_NO_ERRNO_P(code) (N2FPU(code).flags & N2F_NO_ERRNO)
313 #define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
314 #define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
315
316 /* Same as above, but for cases where using only the op part is shorter. */
317 #define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
318 #define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
319 #define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
320
321 /* Export the FPU insn enabled predicate to nios2.md. */
322 bool
323 nios2_fpu_insn_enabled (enum n2fpu_code code)
324 {
325 return N2FPU_ENABLED_P (code);
326 }
327
328 /* Return true if COND comparison for mode MODE is enabled under current
329 settings. */
330
331 static bool
332 nios2_fpu_compare_enabled (enum rtx_code cond, machine_mode mode)
333 {
334 if (mode == SFmode)
335 switch (cond)
336 {
337 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
338 case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
339 case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
340 case GE: return N2FPU_OP_ENABLED_P (fcmpges);
341 case LT: return N2FPU_OP_ENABLED_P (fcmplts);
342 case LE: return N2FPU_OP_ENABLED_P (fcmples);
343 default: break;
344 }
345 else if (mode == DFmode)
346 switch (cond)
347 {
348 case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
349 case NE: return N2FPU_OP_ENABLED_P (fcmpned);
350 case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
351 case GE: return N2FPU_OP_ENABLED_P (fcmpged);
352 case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
353 case LE: return N2FPU_OP_ENABLED_P (fcmpled);
354 default: break;
355 }
356 return false;
357 }
358
359 /* Stack layout and calling conventions. */
360
361 #define NIOS2_STACK_ALIGN(LOC) \
362 (((LOC) + ((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1)) \
363 & ~((PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) - 1))
364
365 /* Return the bytes needed to compute the frame pointer from the current
366 stack pointer. */
367 static int
368 nios2_compute_frame_layout (void)
369 {
370 unsigned int regno;
371 unsigned int save_mask = 0;
372 int total_size;
373 int var_size;
374 int out_args_size;
375 int save_reg_size;
376 int callee_save_reg_size;
377
378 if (cfun->machine->initialized)
379 return cfun->machine->total_size;
380
381 /* Calculate space needed for gp registers. */
382 save_reg_size = 0;
383 for (regno = 0; regno <= LAST_GP_REG; regno++)
384 if (prologue_saved_reg_p (regno))
385 {
386 save_mask |= 1 << regno;
387 save_reg_size += 4;
388 }
389
390 /* If we are saving any callee-save register, then assume
391 push.n/pop.n should be used. Make sure RA is saved, and
392 contiguous registers starting from r16-- are all saved. */
393 if (TARGET_HAS_CDX && save_reg_size != 0)
394 {
395 if ((save_mask & (1 << RA_REGNO)) == 0)
396 {
397 save_mask |= 1 << RA_REGNO;
398 save_reg_size += 4;
399 }
400
401 for (regno = 23; regno >= 16; regno--)
402 if ((save_mask & (1 << regno)) != 0)
403 {
404 /* Starting from highest numbered callee-saved
405 register that is used, make sure all regs down
406 to r16 is saved, to maintain contiguous range
407 for push.n/pop.n. */
408 unsigned int i;
409 for (i = regno - 1; i >= 16; i--)
410 if ((save_mask & (1 << i)) == 0)
411 {
412 save_mask |= 1 << i;
413 save_reg_size += 4;
414 }
415 break;
416 }
417 }
418
419 callee_save_reg_size = save_reg_size;
420
421 /* If we call eh_return, we need to save the EH data registers. */
422 if (crtl->calls_eh_return)
423 {
424 unsigned i;
425 unsigned r;
426
427 for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
428 if (!(save_mask & (1 << r)))
429 {
430 save_mask |= 1 << r;
431 save_reg_size += 4;
432 }
433 }
434
435 cfun->machine->fp_save_offset = 0;
436 if (save_mask & (1 << HARD_FRAME_POINTER_REGNUM))
437 {
438 int fp_save_offset = 0;
439 for (regno = 0; regno < HARD_FRAME_POINTER_REGNUM; regno++)
440 if (save_mask & (1 << regno))
441 fp_save_offset += 4;
442
443 cfun->machine->fp_save_offset = fp_save_offset;
444 }
445
446 var_size = NIOS2_STACK_ALIGN (get_frame_size ());
447 out_args_size = NIOS2_STACK_ALIGN (crtl->outgoing_args_size);
448 total_size = var_size + out_args_size;
449
450 save_reg_size = NIOS2_STACK_ALIGN (save_reg_size);
451 total_size += save_reg_size;
452 total_size += NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
453
454 /* Save other computed information. */
455 cfun->machine->save_mask = save_mask;
456 cfun->machine->total_size = total_size;
457 cfun->machine->var_size = var_size;
458 cfun->machine->args_size = out_args_size;
459 cfun->machine->save_reg_size = save_reg_size;
460 cfun->machine->callee_save_reg_size = callee_save_reg_size;
461 cfun->machine->initialized = reload_completed;
462 cfun->machine->save_regs_offset = out_args_size + var_size;
463
464 return total_size;
465 }
466
467 /* Generate save/restore of register REGNO at SP + OFFSET. Used by the
468 prologue/epilogue expand routines. */
469 static void
470 save_reg (int regno, unsigned offset)
471 {
472 rtx reg = gen_rtx_REG (SImode, regno);
473 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
474 rtx_insn *insn = emit_move_insn (gen_frame_mem (Pmode, addr), reg);
475 RTX_FRAME_RELATED_P (insn) = 1;
476 }
477
478 static void
479 restore_reg (int regno, unsigned offset)
480 {
481 rtx reg = gen_rtx_REG (SImode, regno);
482 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offset, false);
483 rtx_insn *insn = emit_move_insn (reg, gen_frame_mem (Pmode, addr));
484 /* Tag epilogue unwind note. */
485 add_reg_note (insn, REG_CFA_RESTORE, reg);
486 RTX_FRAME_RELATED_P (insn) = 1;
487 }
488
489 /* This routine tests for the base register update SET in load/store
490 multiple RTL insns, used in pop_operation_p and ldstwm_operation_p. */
491 static bool
492 base_reg_adjustment_p (rtx set, rtx *base_reg, rtx *offset)
493 {
494 if (GET_CODE (set) == SET
495 && REG_P (SET_DEST (set))
496 && GET_CODE (SET_SRC (set)) == PLUS
497 && REG_P (XEXP (SET_SRC (set), 0))
498 && rtx_equal_p (SET_DEST (set), XEXP (SET_SRC (set), 0))
499 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
500 {
501 *base_reg = XEXP (SET_SRC (set), 0);
502 *offset = XEXP (SET_SRC (set), 1);
503 return true;
504 }
505 return false;
506 }
507
508 /* Does the CFA note work for push/pop prologue/epilogue instructions. */
509 static void
510 nios2_create_cfa_notes (rtx_insn *insn, bool epilogue_p)
511 {
512 int i = 0;
513 rtx base_reg, offset, elt, pat = PATTERN (insn);
514 if (epilogue_p)
515 {
516 elt = XVECEXP (pat, 0, 0);
517 if (GET_CODE (elt) == RETURN)
518 i++;
519 elt = XVECEXP (pat, 0, i);
520 if (base_reg_adjustment_p (elt, &base_reg, &offset))
521 {
522 add_reg_note (insn, REG_CFA_ADJUST_CFA, copy_rtx (elt));
523 i++;
524 }
525 for (; i < XVECLEN (pat, 0); i++)
526 {
527 elt = SET_DEST (XVECEXP (pat, 0, i));
528 gcc_assert (REG_P (elt));
529 add_reg_note (insn, REG_CFA_RESTORE, elt);
530 }
531 }
532 else
533 {
534 /* Tag each of the prologue sets. */
535 for (i = 0; i < XVECLEN (pat, 0); i++)
536 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
537 }
538 }
539
540 /* Temp regno used inside prologue/epilogue. */
541 #define TEMP_REG_NUM 8
542
543 /* Emit conditional trap for checking stack limit. SIZE is the number of
544 additional bytes required.
545
546 GDB prologue analysis depends on this generating a direct comparison
547 to the SP register, so the adjustment to add SIZE needs to be done on
548 the other operand to the comparison. Use TEMP_REG_NUM as a temporary,
549 if necessary. */
550 static void
551 nios2_emit_stack_limit_check (int size)
552 {
553 rtx sum = NULL_RTX;
554
555 if (GET_CODE (stack_limit_rtx) == SYMBOL_REF)
556 {
557 /* This generates a %hiadj/%lo pair with the constant size
558 add handled by the relocations. */
559 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
560 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
561 }
562 else if (!REG_P (stack_limit_rtx))
563 sorry ("Unknown form for stack limit expression");
564 else if (size == 0)
565 sum = stack_limit_rtx;
566 else if (SMALL_INT (size))
567 {
568 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
569 emit_move_insn (sum, plus_constant (Pmode, stack_limit_rtx, size));
570 }
571 else
572 {
573 sum = gen_rtx_REG (Pmode, TEMP_REG_NUM);
574 emit_move_insn (sum, gen_int_mode (size, Pmode));
575 emit_insn (gen_add2_insn (sum, stack_limit_rtx));
576 }
577
578 emit_insn (gen_ctrapsi4 (gen_rtx_LTU (VOIDmode, stack_pointer_rtx, sum),
579 stack_pointer_rtx, sum, GEN_INT (3)));
580 }
581
582 static rtx_insn *
583 nios2_emit_add_constant (rtx reg, HOST_WIDE_INT immed)
584 {
585 rtx_insn *insn;
586 if (SMALL_INT (immed))
587 insn = emit_insn (gen_add2_insn (reg, gen_int_mode (immed, Pmode)));
588 else
589 {
590 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
591 emit_move_insn (tmp, gen_int_mode (immed, Pmode));
592 insn = emit_insn (gen_add2_insn (reg, tmp));
593 }
594 return insn;
595 }
596
597 static rtx_insn *
598 nios2_adjust_stack (int sp_adjust, bool epilogue_p)
599 {
600 enum reg_note note_kind = REG_NOTE_MAX;
601 rtx_insn *insn = NULL;
602 if (sp_adjust)
603 {
604 if (SMALL_INT (sp_adjust))
605 insn = emit_insn (gen_add2_insn (stack_pointer_rtx,
606 gen_int_mode (sp_adjust, Pmode)));
607 else
608 {
609 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
610 emit_move_insn (tmp, gen_int_mode (sp_adjust, Pmode));
611 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, tmp));
612 /* Attach a note indicating what happened. */
613 if (!epilogue_p)
614 note_kind = REG_FRAME_RELATED_EXPR;
615 }
616 if (epilogue_p)
617 note_kind = REG_CFA_ADJUST_CFA;
618 if (note_kind != REG_NOTE_MAX)
619 {
620 rtx cfa_adj = gen_rtx_SET (stack_pointer_rtx,
621 plus_constant (Pmode, stack_pointer_rtx,
622 sp_adjust));
623 add_reg_note (insn, note_kind, cfa_adj);
624 }
625 RTX_FRAME_RELATED_P (insn) = 1;
626 }
627 return insn;
628 }
629
630 void
631 nios2_expand_prologue (void)
632 {
633 unsigned int regno;
634 int total_frame_size, save_offset;
635 int sp_offset; /* offset from base_reg to final stack value. */
636 int save_regs_base; /* offset from base_reg to register save area. */
637 rtx_insn *insn;
638
639 total_frame_size = nios2_compute_frame_layout ();
640
641 if (flag_stack_usage_info)
642 current_function_static_stack_size = total_frame_size;
643
644 /* When R2 CDX push.n/stwm is available, arrange for stack frame to be built
645 using them. */
646 if (TARGET_HAS_CDX
647 && (cfun->machine->save_reg_size != 0
648 || cfun->machine->uses_anonymous_args))
649 {
650 unsigned int regmask = cfun->machine->save_mask;
651 unsigned int callee_save_regs = regmask & 0xffff0000;
652 unsigned int caller_save_regs = regmask & 0x0000ffff;
653 int push_immed = 0;
654 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
655 rtx stack_mem =
656 gen_frame_mem (SImode, plus_constant (Pmode, stack_pointer_rtx, -4));
657
658 /* Check that there is room for the entire stack frame before doing
659 any SP adjustments or pushes. */
660 if (crtl->limit_stack)
661 nios2_emit_stack_limit_check (total_frame_size);
662
663 if (pretend_args_size)
664 {
665 if (cfun->machine->uses_anonymous_args)
666 {
667 /* Emit a stwm to push copy of argument registers onto
668 the stack for va_arg processing. */
669 unsigned int r, mask = 0, n = pretend_args_size / 4;
670 for (r = LAST_ARG_REGNO - n + 1; r <= LAST_ARG_REGNO; r++)
671 mask |= (1 << r);
672 insn = emit_insn (nios2_ldst_parallel
673 (false, false, false, stack_mem,
674 -pretend_args_size, mask, false));
675 /* Tag first SP adjustment as frame-related. */
676 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
677 RTX_FRAME_RELATED_P (insn) = 1;
678 }
679 else
680 nios2_adjust_stack (-pretend_args_size, false);
681 }
682 if (callee_save_regs)
683 {
684 /* Emit a push.n to save registers and optionally allocate
685 push_immed extra bytes on the stack. */
686 int sp_adjust;
687 if (caller_save_regs)
688 /* Can't allocate extra stack space yet. */
689 push_immed = 0;
690 else if (cfun->machine->save_regs_offset <= 60)
691 /* Stack adjustment fits entirely in the push.n. */
692 push_immed = cfun->machine->save_regs_offset;
693 else if (frame_pointer_needed
694 && cfun->machine->fp_save_offset == 0)
695 /* Deferring the entire stack adjustment until later
696 allows us to use a mov.n instead of a 32-bit addi
697 instruction to set the frame pointer. */
698 push_immed = 0;
699 else
700 /* Splitting the stack adjustment between the push.n
701 and an explicit adjustment makes it more likely that
702 we can use spdeci.n for the explicit part. */
703 push_immed = 60;
704 sp_adjust = -(cfun->machine->callee_save_reg_size + push_immed);
705 insn = emit_insn (nios2_ldst_parallel (false, false, false,
706 stack_mem, sp_adjust,
707 callee_save_regs, false));
708 nios2_create_cfa_notes (insn, false);
709 RTX_FRAME_RELATED_P (insn) = 1;
710 }
711
712 if (caller_save_regs)
713 {
714 /* Emit a stwm to save the EH data regs, r4-r7. */
715 int caller_save_size = (cfun->machine->save_reg_size
716 - cfun->machine->callee_save_reg_size);
717 gcc_assert ((caller_save_regs & ~0xf0) == 0);
718 insn = emit_insn (nios2_ldst_parallel
719 (false, false, false, stack_mem,
720 -caller_save_size, caller_save_regs, false));
721 nios2_create_cfa_notes (insn, false);
722 RTX_FRAME_RELATED_P (insn) = 1;
723 }
724
725 save_regs_base = push_immed;
726 sp_offset = -(cfun->machine->save_regs_offset - push_immed);
727 }
728 /* The non-CDX cases decrement the stack pointer, to prepare for individual
729 register saves to the stack. */
730 else if (!SMALL_INT (total_frame_size))
731 {
732 /* We need an intermediary point, this will point at the spill block. */
733 nios2_adjust_stack (cfun->machine->save_regs_offset - total_frame_size,
734 false);
735 save_regs_base = 0;
736 sp_offset = -cfun->machine->save_regs_offset;
737 if (crtl->limit_stack)
738 nios2_emit_stack_limit_check (cfun->machine->save_regs_offset);
739 }
740 else if (total_frame_size)
741 {
742 nios2_adjust_stack (-total_frame_size, false);
743 save_regs_base = cfun->machine->save_regs_offset;
744 sp_offset = 0;
745 if (crtl->limit_stack)
746 nios2_emit_stack_limit_check (0);
747 }
748 else
749 save_regs_base = sp_offset = 0;
750
751 /* Save the registers individually in the non-CDX case. */
752 if (!TARGET_HAS_CDX)
753 {
754 save_offset = save_regs_base + cfun->machine->save_reg_size;
755
756 for (regno = LAST_GP_REG; regno > 0; regno--)
757 if (cfun->machine->save_mask & (1 << regno))
758 {
759 save_offset -= 4;
760 save_reg (regno, save_offset);
761 }
762 }
763
764 /* Set the hard frame pointer. */
765 if (frame_pointer_needed)
766 {
767 int fp_save_offset = save_regs_base + cfun->machine->fp_save_offset;
768 insn =
769 (fp_save_offset == 0
770 ? emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)
771 : emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
772 stack_pointer_rtx,
773 gen_int_mode (fp_save_offset, Pmode))));
774 RTX_FRAME_RELATED_P (insn) = 1;
775 }
776
777 /* Allocate sp_offset more bytes in the stack frame. */
778 nios2_adjust_stack (sp_offset, false);
779
780 /* Load the PIC register if needed. */
781 if (crtl->uses_pic_offset_table)
782 nios2_load_pic_register ();
783
784 /* If we are profiling, make sure no instructions are scheduled before
785 the call to mcount. */
786 if (crtl->profile)
787 emit_insn (gen_blockage ());
788 }
789
790 void
791 nios2_expand_epilogue (bool sibcall_p)
792 {
793 rtx_insn *insn;
794 rtx cfa_adj;
795 int total_frame_size;
796 int sp_adjust, save_offset;
797 unsigned int regno;
798
799 if (!sibcall_p && nios2_can_use_return_insn ())
800 {
801 emit_jump_insn (gen_return ());
802 return;
803 }
804
805 emit_insn (gen_blockage ());
806
807 total_frame_size = nios2_compute_frame_layout ();
808 if (frame_pointer_needed)
809 {
810 /* Recover the stack pointer. */
811 insn =
812 (cfun->machine->fp_save_offset == 0
813 ? emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx)
814 : emit_insn (gen_add3_insn
815 (stack_pointer_rtx, hard_frame_pointer_rtx,
816 gen_int_mode (-cfun->machine->fp_save_offset, Pmode))));
817 cfa_adj = plus_constant (Pmode, stack_pointer_rtx,
818 (total_frame_size
819 - cfun->machine->save_regs_offset));
820 add_reg_note (insn, REG_CFA_DEF_CFA, cfa_adj);
821 RTX_FRAME_RELATED_P (insn) = 1;
822
823 save_offset = 0;
824 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
825 }
826 else if (!SMALL_INT (total_frame_size))
827 {
828 nios2_adjust_stack (cfun->machine->save_regs_offset, true);
829 save_offset = 0;
830 sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
831 }
832 else
833 {
834 save_offset = cfun->machine->save_regs_offset;
835 sp_adjust = total_frame_size;
836 }
837
838 if (!TARGET_HAS_CDX)
839 {
840 /* Generate individual register restores. */
841 save_offset += cfun->machine->save_reg_size;
842
843 for (regno = LAST_GP_REG; regno > 0; regno--)
844 if (cfun->machine->save_mask & (1 << regno))
845 {
846 save_offset -= 4;
847 restore_reg (regno, save_offset);
848 }
849 nios2_adjust_stack (sp_adjust, true);
850 }
851 else if (cfun->machine->save_reg_size == 0)
852 {
853 /* Nothing to restore, just recover the stack position. */
854 nios2_adjust_stack (sp_adjust, true);
855 }
856 else
857 {
858 /* Emit CDX pop.n/ldwm to restore registers and optionally return. */
859 unsigned int regmask = cfun->machine->save_mask;
860 unsigned int callee_save_regs = regmask & 0xffff0000;
861 unsigned int caller_save_regs = regmask & 0x0000ffff;
862 int callee_save_size = cfun->machine->callee_save_reg_size;
863 int caller_save_size = cfun->machine->save_reg_size - callee_save_size;
864 int pretend_args_size = NIOS2_STACK_ALIGN (crtl->args.pretend_args_size);
865 bool ret_p = (!pretend_args_size && !crtl->calls_eh_return
866 && !sibcall_p);
867
868 if (!ret_p || caller_save_size > 0)
869 sp_adjust = save_offset;
870 else
871 sp_adjust = (save_offset > 60 ? save_offset - 60 : 0);
872
873 save_offset -= sp_adjust;
874
875 nios2_adjust_stack (sp_adjust, true);
876
877 if (caller_save_regs)
878 {
879 /* Emit a ldwm to restore EH data regs. */
880 rtx stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
881 insn = emit_insn (nios2_ldst_parallel
882 (true, true, true, stack_mem,
883 caller_save_size, caller_save_regs, false));
884 RTX_FRAME_RELATED_P (insn) = 1;
885 nios2_create_cfa_notes (insn, true);
886 }
887
888 if (callee_save_regs)
889 {
890 int sp_adjust = save_offset + callee_save_size;
891 rtx stack_mem;
892 if (ret_p)
893 {
894 /* Emit a pop.n to restore regs and return. */
895 stack_mem =
896 gen_frame_mem (SImode,
897 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
898 gen_int_mode (sp_adjust - 4,
899 Pmode)));
900 insn =
901 emit_jump_insn (nios2_ldst_parallel (true, false, false,
902 stack_mem, sp_adjust,
903 callee_save_regs, ret_p));
904 RTX_FRAME_RELATED_P (insn) = 1;
905 /* No need to attach CFA notes since we cannot step over
906 a return. */
907 return;
908 }
909 else
910 {
911 /* If no return, we have to use the ldwm form. */
912 stack_mem = gen_frame_mem (SImode, stack_pointer_rtx);
913 insn =
914 emit_insn (nios2_ldst_parallel (true, true, true,
915 stack_mem, sp_adjust,
916 callee_save_regs, ret_p));
917 RTX_FRAME_RELATED_P (insn) = 1;
918 nios2_create_cfa_notes (insn, true);
919 }
920 }
921
922 if (pretend_args_size)
923 nios2_adjust_stack (pretend_args_size, true);
924 }
925
926 /* Add in the __builtin_eh_return stack adjustment. */
927 if (crtl->calls_eh_return)
928 emit_insn (gen_add2_insn (stack_pointer_rtx, EH_RETURN_STACKADJ_RTX));
929
930 if (!sibcall_p)
931 emit_jump_insn (gen_simple_return ());
932 }
933
934 bool
935 nios2_expand_return (void)
936 {
937 /* If CDX is available, generate a pop.n instruction to do both
938 the stack pop and return. */
939 if (TARGET_HAS_CDX)
940 {
941 int total_frame_size = nios2_compute_frame_layout ();
942 int sp_adjust = (cfun->machine->save_regs_offset
943 + cfun->machine->callee_save_reg_size);
944 gcc_assert (sp_adjust == total_frame_size);
945 if (sp_adjust != 0)
946 {
947 rtx mem =
948 gen_frame_mem (SImode,
949 plus_constant (Pmode, stack_pointer_rtx,
950 sp_adjust - 4, false));
951 rtx_insn *insn =
952 emit_jump_insn (nios2_ldst_parallel (true, false, false,
953 mem, sp_adjust,
954 cfun->machine->save_mask,
955 true));
956 RTX_FRAME_RELATED_P (insn) = 1;
957 /* No need to create CFA notes since we can't step over
958 a return. */
959 return true;
960 }
961 }
962 return false;
963 }
964
965 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
966 back to a previous frame. */
967 rtx
968 nios2_get_return_address (int count)
969 {
970 if (count != 0)
971 return const0_rtx;
972
973 return get_hard_reg_initial_val (Pmode, RA_REGNO);
974 }
975
976 /* Emit code to change the current function's return address to
977 ADDRESS. SCRATCH is available as a scratch register, if needed.
978 ADDRESS and SCRATCH are both word-mode GPRs. */
979 void
980 nios2_set_return_address (rtx address, rtx scratch)
981 {
982 nios2_compute_frame_layout ();
983 if (cfun->machine->save_mask & (1 << RA_REGNO))
984 {
985 unsigned offset = cfun->machine->save_reg_size - 4;
986 rtx base;
987
988 if (frame_pointer_needed)
989 base = hard_frame_pointer_rtx;
990 else
991 {
992 base = stack_pointer_rtx;
993 offset += cfun->machine->save_regs_offset;
994
995 if (!SMALL_INT (offset))
996 {
997 emit_move_insn (scratch, gen_int_mode (offset, Pmode));
998 emit_insn (gen_add2_insn (scratch, base));
999 base = scratch;
1000 offset = 0;
1001 }
1002 }
1003 if (offset)
1004 base = plus_constant (Pmode, base, offset);
1005 emit_move_insn (gen_rtx_MEM (Pmode, base), address);
1006 }
1007 else
1008 emit_move_insn (gen_rtx_REG (Pmode, RA_REGNO), address);
1009 }
1010
1011 /* Implement FUNCTION_PROFILER macro. */
1012 void
1013 nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
1014 {
1015 fprintf (file, "\tmov\tr8, ra\n");
1016 if (flag_pic == 1)
1017 {
1018 fprintf (file, "\tnextpc\tr2\n");
1019 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1020 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1021 fprintf (file, "\tadd\tr2, r2, r3\n");
1022 fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
1023 fprintf (file, "\tcallr\tr2\n");
1024 }
1025 else if (flag_pic == 2)
1026 {
1027 fprintf (file, "\tnextpc\tr2\n");
1028 fprintf (file, "\t1: movhi\tr3, %%hiadj(_gp_got - 1b)\n");
1029 fprintf (file, "\taddi\tr3, r3, %%lo(_gp_got - 1b)\n");
1030 fprintf (file, "\tadd\tr2, r2, r3\n");
1031 fprintf (file, "\tmovhi\tr3, %%call_hiadj(_mcount)\n");
1032 fprintf (file, "\taddi\tr3, r3, %%call_lo(_mcount)\n");
1033 fprintf (file, "\tadd\tr3, r2, r3\n");
1034 fprintf (file, "\tldw\tr2, 0(r3)\n");
1035 fprintf (file, "\tcallr\tr2\n");
1036 }
1037 else
1038 fprintf (file, "\tcall\t_mcount\n");
1039 fprintf (file, "\tmov\tra, r8\n");
1040 }
1041
1042 /* Dump stack layout. */
1043 static void
1044 nios2_dump_frame_layout (FILE *file)
1045 {
1046 fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
1047 fprintf (file, "\t%s total_size = %d\n", ASM_COMMENT_START,
1048 cfun->machine->total_size);
1049 fprintf (file, "\t%s var_size = %d\n", ASM_COMMENT_START,
1050 cfun->machine->var_size);
1051 fprintf (file, "\t%s args_size = %d\n", ASM_COMMENT_START,
1052 cfun->machine->args_size);
1053 fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
1054 cfun->machine->save_reg_size);
1055 fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
1056 cfun->machine->initialized);
1057 fprintf (file, "\t%s save_regs_offset = %d\n", ASM_COMMENT_START,
1058 cfun->machine->save_regs_offset);
1059 fprintf (file, "\t%s is_leaf = %d\n", ASM_COMMENT_START,
1060 crtl->is_leaf);
1061 fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
1062 frame_pointer_needed);
1063 fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
1064 crtl->args.pretend_args_size);
1065 }
1066
1067 /* Return true if REGNO should be saved in the prologue. */
1068 static bool
1069 prologue_saved_reg_p (unsigned regno)
1070 {
1071 gcc_assert (GP_REG_P (regno));
1072
1073 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
1074 return true;
1075
1076 if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
1077 return true;
1078
1079 if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
1080 return true;
1081
1082 if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
1083 return true;
1084
1085 return false;
1086 }
1087
1088 /* Implement TARGET_CAN_ELIMINATE. */
1089 static bool
1090 nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1091 {
1092 if (to == STACK_POINTER_REGNUM)
1093 return !frame_pointer_needed;
1094 return true;
1095 }
1096
1097 /* Implement INITIAL_ELIMINATION_OFFSET macro. */
1098 int
1099 nios2_initial_elimination_offset (int from, int to)
1100 {
1101 int offset;
1102
1103 nios2_compute_frame_layout ();
1104
1105 /* Set OFFSET to the offset from the stack pointer. */
1106 switch (from)
1107 {
1108 case FRAME_POINTER_REGNUM:
1109 offset = cfun->machine->args_size;
1110 break;
1111
1112 case ARG_POINTER_REGNUM:
1113 offset = cfun->machine->total_size;
1114 offset -= crtl->args.pretend_args_size;
1115 break;
1116
1117 default:
1118 gcc_unreachable ();
1119 }
1120
1121 /* If we are asked for the frame pointer offset, then adjust OFFSET
1122 by the offset from the frame pointer to the stack pointer. */
1123 if (to == HARD_FRAME_POINTER_REGNUM)
1124 offset -= (cfun->machine->save_regs_offset
1125 + cfun->machine->fp_save_offset);
1126
1127 return offset;
1128 }
1129
1130 /* Return nonzero if this function is known to have a null epilogue.
1131 This allows the optimizer to omit jumps to jumps if no stack
1132 was created. */
1133 int
1134 nios2_can_use_return_insn (void)
1135 {
1136 int total_frame_size;
1137
1138 if (!reload_completed || crtl->profile)
1139 return 0;
1140
1141 total_frame_size = nios2_compute_frame_layout ();
1142
1143 /* If CDX is available, check if we can return using a
1144 single pop.n instruction. */
1145 if (TARGET_HAS_CDX
1146 && !frame_pointer_needed
1147 && cfun->machine->save_regs_offset <= 60
1148 && (cfun->machine->save_mask & 0x80000000) != 0
1149 && (cfun->machine->save_mask & 0xffff) == 0
1150 && crtl->args.pretend_args_size == 0)
1151 return true;
1152
1153 return total_frame_size == 0;
1154 }
1155
1156 \f
1157 /* Check and signal some warnings/errors on FPU insn options. */
1158 static void
1159 nios2_custom_check_insns (void)
1160 {
1161 unsigned int i, j;
1162 bool errors = false;
1163
1164 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1165 if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
1166 {
1167 for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
1168 if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
1169 {
1170 error ("switch %<-mcustom-%s%> is required for double "
1171 "precision floating point", N2FPU_NAME (j));
1172 errors = true;
1173 }
1174 break;
1175 }
1176
1177 /* Warn if the user has certain exotic operations that won't get used
1178 without -funsafe-math-optimizations. See expand_builtin () in
1179 builtins.c. */
1180 if (!flag_unsafe_math_optimizations)
1181 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1182 if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
1183 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1184 "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
1185
1186 /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
1187 get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
1188 in builtins.c. */
1189 if (!flag_finite_math_only)
1190 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1191 if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
1192 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1193 "-ffinite-math-only is specified", N2FPU_NAME (i));
1194
1195 /* Warn if the user is trying to use a custom rounding instruction
1196 that won't get used without -fno-math-errno. See
1197 expand_builtin_int_roundingfn_2 () in builtins.c. */
1198 if (flag_errno_math)
1199 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1200 if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
1201 warning (0, "switch %<-mcustom-%s%> has no effect unless "
1202 "-fno-math-errno is specified", N2FPU_NAME (i));
1203
1204 if (errors || custom_code_conflict)
1205 fatal_error (input_location,
1206 "conflicting use of -mcustom switches, target attributes, "
1207 "and/or __builtin_custom_ functions");
1208 }
1209
1210 static void
1211 nios2_set_fpu_custom_code (enum n2fpu_code code, int n, bool override_p)
1212 {
1213 if (override_p || N2FPU_N (code) == -1)
1214 N2FPU_N (code) = n;
1215 nios2_register_custom_code (n, CCS_FPU, (int) code);
1216 }
1217
1218 /* Type to represent a standard FPU config. */
1219 struct nios2_fpu_config
1220 {
1221 const char *name;
1222 bool set_sp_constants;
1223 int code[n2fpu_code_num];
1224 };
1225
1226 #define NIOS2_FPU_CONFIG_NUM 3
1227 static struct nios2_fpu_config custom_fpu_config[NIOS2_FPU_CONFIG_NUM];
1228
1229 static void
1230 nios2_init_fpu_configs (void)
1231 {
1232 struct nios2_fpu_config* cfg;
1233 int i = 0;
1234 #define NEXT_FPU_CONFIG \
1235 do { \
1236 cfg = &custom_fpu_config[i++]; \
1237 memset (cfg, -1, sizeof (struct nios2_fpu_config));\
1238 } while (0)
1239
1240 NEXT_FPU_CONFIG;
1241 cfg->name = "60-1";
1242 cfg->set_sp_constants = true;
1243 cfg->code[n2fpu_fmuls] = 252;
1244 cfg->code[n2fpu_fadds] = 253;
1245 cfg->code[n2fpu_fsubs] = 254;
1246
1247 NEXT_FPU_CONFIG;
1248 cfg->name = "60-2";
1249 cfg->set_sp_constants = true;
1250 cfg->code[n2fpu_fmuls] = 252;
1251 cfg->code[n2fpu_fadds] = 253;
1252 cfg->code[n2fpu_fsubs] = 254;
1253 cfg->code[n2fpu_fdivs] = 255;
1254
1255 NEXT_FPU_CONFIG;
1256 cfg->name = "72-3";
1257 cfg->set_sp_constants = true;
1258 cfg->code[n2fpu_floatus] = 243;
1259 cfg->code[n2fpu_fixsi] = 244;
1260 cfg->code[n2fpu_floatis] = 245;
1261 cfg->code[n2fpu_fcmpgts] = 246;
1262 cfg->code[n2fpu_fcmples] = 249;
1263 cfg->code[n2fpu_fcmpeqs] = 250;
1264 cfg->code[n2fpu_fcmpnes] = 251;
1265 cfg->code[n2fpu_fmuls] = 252;
1266 cfg->code[n2fpu_fadds] = 253;
1267 cfg->code[n2fpu_fsubs] = 254;
1268 cfg->code[n2fpu_fdivs] = 255;
1269
1270 #undef NEXT_FPU_CONFIG
1271 gcc_assert (i == NIOS2_FPU_CONFIG_NUM);
1272 }
1273
1274 static struct nios2_fpu_config *
1275 nios2_match_custom_fpu_cfg (const char *cfgname, const char *endp)
1276 {
1277 int i;
1278 for (i = 0; i < NIOS2_FPU_CONFIG_NUM; i++)
1279 {
1280 bool match = !(endp != NULL
1281 ? strncmp (custom_fpu_config[i].name, cfgname,
1282 endp - cfgname)
1283 : strcmp (custom_fpu_config[i].name, cfgname));
1284 if (match)
1285 return &custom_fpu_config[i];
1286 }
1287 return NULL;
1288 }
1289
1290 /* Use CFGNAME to lookup FPU config, ENDP if not NULL marks end of string.
1291 OVERRIDE is true if loaded config codes should overwrite current state. */
1292 static void
1293 nios2_handle_custom_fpu_cfg (const char *cfgname, const char *endp,
1294 bool override)
1295 {
1296 struct nios2_fpu_config *cfg = nios2_match_custom_fpu_cfg (cfgname, endp);
1297 if (cfg)
1298 {
1299 unsigned int i;
1300 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1301 if (cfg->code[i] >= 0)
1302 nios2_set_fpu_custom_code ((enum n2fpu_code) i, cfg->code[i],
1303 override);
1304 if (cfg->set_sp_constants)
1305 flag_single_precision_constant = 1;
1306 }
1307 else
1308 warning (0, "ignoring unrecognized switch %<-mcustom-fpu-cfg%> "
1309 "value %<%s%>", cfgname);
1310
1311 /* Guard against errors in the standard configurations. */
1312 nios2_custom_check_insns ();
1313 }
1314
1315 /* Check individual FPU insn options, and register custom code. */
1316 static void
1317 nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
1318 {
1319 int param = N2FPU_N (fpu_insn_index);
1320
1321 if (0 <= param && param <= 255)
1322 nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
1323
1324 /* Valid values are 0-255, but also allow -1 so that the
1325 -mno-custom-<opt> switches work. */
1326 else if (param != -1)
1327 error ("switch %<-mcustom-%s%> value %d must be between 0 and 255",
1328 N2FPU_NAME (fpu_insn_index), param);
1329 }
1330
1331 /* Allocate a chunk of memory for per-function machine-dependent data. */
1332 static struct machine_function *
1333 nios2_init_machine_status (void)
1334 {
1335 return ggc_cleared_alloc<machine_function> ();
1336 }
1337
1338 /* Implement TARGET_OPTION_OVERRIDE. */
1339 static void
1340 nios2_option_override (void)
1341 {
1342 unsigned int i;
1343
1344 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1345 SUBTARGET_OVERRIDE_OPTIONS;
1346 #endif
1347
1348 /* Check for unsupported options. */
1349 if (flag_pic && !TARGET_LINUX_ABI)
1350 sorry ("position-independent code requires the Linux ABI");
1351 if (flag_pic && stack_limit_rtx
1352 && GET_CODE (stack_limit_rtx) == SYMBOL_REF)
1353 sorry ("PIC support for -fstack-limit-symbol");
1354
1355 /* Function to allocate machine-dependent function status. */
1356 init_machine_status = &nios2_init_machine_status;
1357
1358 nios2_section_threshold
1359 = (global_options_set.x_g_switch_value
1360 ? g_switch_value : NIOS2_DEFAULT_GVALUE);
1361
1362 if (nios2_gpopt_option == gpopt_unspecified)
1363 {
1364 /* Default to -mgpopt unless -fpic or -fPIC. */
1365 if (flag_pic)
1366 nios2_gpopt_option = gpopt_none;
1367 else
1368 nios2_gpopt_option = gpopt_local;
1369 }
1370
1371 /* If we don't have mul, we don't have mulx either! */
1372 if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
1373 target_flags &= ~MASK_HAS_MULX;
1374
1375 /* Optional BMX and CDX instructions only make sense for R2. */
1376 if (!TARGET_ARCH_R2)
1377 {
1378 if (TARGET_HAS_BMX)
1379 error ("BMX instructions are only supported with R2 architecture");
1380 if (TARGET_HAS_CDX)
1381 error ("CDX instructions are only supported with R2 architecture");
1382 }
1383
1384 /* R2 is little-endian only. */
1385 if (TARGET_ARCH_R2 && TARGET_BIG_ENDIAN)
1386 error ("R2 architecture is little-endian only");
1387
1388 /* Initialize default FPU configurations. */
1389 nios2_init_fpu_configs ();
1390
1391 /* Set up default handling for floating point custom instructions.
1392
1393 Putting things in this order means that the -mcustom-fpu-cfg=
1394 switch will always be overridden by individual -mcustom-fadds=
1395 switches, regardless of the order in which they were specified
1396 on the command line.
1397
1398 This behavior of prioritization of individual -mcustom-<insn>=
1399 options before the -mcustom-fpu-cfg= switch is maintained for
1400 compatibility. */
1401 if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
1402 nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, NULL, false);
1403
1404 /* Handle options for individual FPU insns. */
1405 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
1406 nios2_handle_custom_fpu_insn_option (i);
1407
1408 nios2_custom_check_insns ();
1409
1410 /* Save the initial options in case the user does function specific
1411 options. */
1412 target_option_default_node = target_option_current_node
1413 = build_target_option_node (&global_options);
1414 }
1415
1416 \f
1417 /* Return true if CST is a constant within range of movi/movui/movhi. */
1418 static bool
1419 nios2_simple_const_p (const_rtx cst)
1420 {
1421 if (!CONST_INT_P (cst))
1422 return false;
1423 HOST_WIDE_INT val = INTVAL (cst);
1424 return SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val);
1425 }
1426
1427 /* Compute a (partial) cost for rtx X. Return true if the complete
1428 cost has been computed, and false if subexpressions should be
1429 scanned. In either case, *TOTAL contains the cost result. */
1430 static bool
1431 nios2_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1432 int outer_code ATTRIBUTE_UNUSED,
1433 int opno ATTRIBUTE_UNUSED,
1434 int *total, bool speed ATTRIBUTE_UNUSED)
1435 {
1436 int code = GET_CODE (x);
1437
1438 switch (code)
1439 {
1440 case CONST_INT:
1441 if (INTVAL (x) == 0)
1442 {
1443 *total = COSTS_N_INSNS (0);
1444 return true;
1445 }
1446 else if (nios2_simple_const_p (x))
1447 {
1448 *total = COSTS_N_INSNS (2);
1449 return true;
1450 }
1451 else
1452 {
1453 *total = COSTS_N_INSNS (4);
1454 return true;
1455 }
1456
1457 case LABEL_REF:
1458 case SYMBOL_REF:
1459 case CONST:
1460 case CONST_DOUBLE:
1461 {
1462 *total = COSTS_N_INSNS (4);
1463 return true;
1464 }
1465
1466 case AND:
1467 {
1468 /* Recognize 'nor' insn pattern. */
1469 if (GET_CODE (XEXP (x, 0)) == NOT
1470 && GET_CODE (XEXP (x, 1)) == NOT)
1471 {
1472 *total = COSTS_N_INSNS (1);
1473 return true;
1474 }
1475 return false;
1476 }
1477
1478 case MULT:
1479 {
1480 *total = COSTS_N_INSNS (1);
1481 return false;
1482 }
1483 case SIGN_EXTEND:
1484 {
1485 *total = COSTS_N_INSNS (3);
1486 return false;
1487 }
1488 case ZERO_EXTEND:
1489 {
1490 *total = COSTS_N_INSNS (1);
1491 return false;
1492 }
1493
1494 case ZERO_EXTRACT:
1495 if (TARGET_HAS_BMX)
1496 {
1497 *total = COSTS_N_INSNS (1);
1498 return true;
1499 }
1500 return false;
1501
1502 default:
1503 return false;
1504 }
1505 }
1506
1507 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1508 static reg_class_t
1509 nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
1510 {
1511 return regclass == NO_REGS ? GENERAL_REGS : regclass;
1512 }
1513
1514 /* Emit a call to __tls_get_addr. TI is the argument to this function.
1515 RET is an RTX for the return value location. The entire insn sequence
1516 is returned. */
1517 static GTY(()) rtx nios2_tls_symbol;
1518
1519 static rtx
1520 nios2_call_tls_get_addr (rtx ti)
1521 {
1522 rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
1523 rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
1524 rtx fn;
1525 rtx_insn *insn;
1526
1527 if (!nios2_tls_symbol)
1528 nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
1529
1530 emit_move_insn (arg, ti);
1531 fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
1532 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
1533 RTL_CONST_CALL_P (insn) = 1;
1534 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
1535 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
1536
1537 return ret;
1538 }
1539
1540 /* Return true for large offsets requiring hiadj/lo relocation pairs. */
1541 static bool
1542 nios2_large_offset_p (int unspec)
1543 {
1544 gcc_assert (nios2_unspec_reloc_name (unspec) != NULL);
1545
1546 if (flag_pic == 2
1547 /* FIXME: TLS GOT offset relocations will eventually also get this
1548 treatment, after binutils support for those are also completed. */
1549 && (unspec == UNSPEC_PIC_SYM || unspec == UNSPEC_PIC_CALL_SYM))
1550 return true;
1551
1552 /* 'gotoff' offsets are always hiadj/lo. */
1553 if (unspec == UNSPEC_PIC_GOTOFF_SYM)
1554 return true;
1555
1556 return false;
1557 }
1558
1559 /* Return true for conforming unspec relocations. Also used in
1560 constraints.md and predicates.md. */
1561 bool
1562 nios2_unspec_reloc_p (rtx op)
1563 {
1564 return (GET_CODE (op) == CONST
1565 && GET_CODE (XEXP (op, 0)) == UNSPEC
1566 && ! nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1567 }
1568
1569 static bool
1570 nios2_large_unspec_reloc_p (rtx op)
1571 {
1572 return (GET_CODE (op) == CONST
1573 && GET_CODE (XEXP (op, 0)) == UNSPEC
1574 && nios2_large_offset_p (XINT (XEXP (op, 0), 1)));
1575 }
1576
1577 /* Helper to generate unspec constant. */
1578 static rtx
1579 nios2_unspec_offset (rtx loc, int unspec)
1580 {
1581 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
1582 unspec));
1583 }
1584
1585 /* Generate GOT pointer based address with large offset. */
1586 static rtx
1587 nios2_large_got_address (rtx offset, rtx tmp)
1588 {
1589 if (!tmp)
1590 tmp = gen_reg_rtx (Pmode);
1591 emit_move_insn (tmp, offset);
1592 return gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
1593 }
1594
1595 /* Generate a GOT pointer based address. */
1596 static rtx
1597 nios2_got_address (rtx loc, int unspec)
1598 {
1599 rtx offset = nios2_unspec_offset (loc, unspec);
1600 crtl->uses_pic_offset_table = 1;
1601
1602 if (nios2_large_offset_p (unspec))
1603 return force_reg (Pmode, nios2_large_got_address (offset, NULL_RTX));
1604
1605 return gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
1606 }
1607
1608 /* Generate the code to access LOC, a thread local SYMBOL_REF. The
1609 return value will be a valid address and move_operand (either a REG
1610 or a LO_SUM). */
1611 static rtx
1612 nios2_legitimize_tls_address (rtx loc)
1613 {
1614 rtx tmp, mem, tp;
1615 enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
1616
1617 switch (model)
1618 {
1619 case TLS_MODEL_GLOBAL_DYNAMIC:
1620 tmp = gen_reg_rtx (Pmode);
1621 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_GD));
1622 return nios2_call_tls_get_addr (tmp);
1623
1624 case TLS_MODEL_LOCAL_DYNAMIC:
1625 tmp = gen_reg_rtx (Pmode);
1626 emit_move_insn (tmp, nios2_got_address (loc, UNSPEC_ADD_TLS_LDM));
1627 return gen_rtx_PLUS (Pmode, nios2_call_tls_get_addr (tmp),
1628 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LDO));
1629
1630 case TLS_MODEL_INITIAL_EXEC:
1631 tmp = gen_reg_rtx (Pmode);
1632 mem = gen_const_mem (Pmode, nios2_got_address (loc, UNSPEC_LOAD_TLS_IE));
1633 emit_move_insn (tmp, mem);
1634 tp = gen_rtx_REG (Pmode, TP_REGNO);
1635 return gen_rtx_PLUS (Pmode, tp, tmp);
1636
1637 case TLS_MODEL_LOCAL_EXEC:
1638 tp = gen_rtx_REG (Pmode, TP_REGNO);
1639 return gen_rtx_PLUS (Pmode, tp,
1640 nios2_unspec_offset (loc, UNSPEC_ADD_TLS_LE));
1641 default:
1642 gcc_unreachable ();
1643 }
1644 }
1645
1646 /* Divide Support
1647
1648 If -O3 is used, we want to output a table lookup for
1649 divides between small numbers (both num and den >= 0
1650 and < 0x10). The overhead of this method in the worst
1651 case is 40 bytes in the text section (10 insns) and
1652 256 bytes in the data section. Additional divides do
1653 not incur additional penalties in the data section.
1654
1655 Code speed is improved for small divides by about 5x
1656 when using this method in the worse case (~9 cycles
1657 vs ~45). And in the worst case divides not within the
1658 table are penalized by about 10% (~5 cycles vs ~45).
1659 However in the typical case the penalty is not as bad
1660 because doing the long divide in only 45 cycles is
1661 quite optimistic.
1662
1663 ??? would be nice to have some benchmarks other
1664 than Dhrystone to back this up.
1665
1666 This bit of expansion is to create this instruction
1667 sequence as rtl.
1668 or $8, $4, $5
1669 slli $9, $4, 4
1670 cmpgeui $3, $8, 16
1671 beq $3, $0, .L3
1672 or $10, $9, $5
1673 add $12, $11, divide_table
1674 ldbu $2, 0($12)
1675 br .L1
1676 .L3:
1677 call slow_div
1678 .L1:
1679 # continue here with result in $2
1680
1681 ??? Ideally I would like the libcall block to contain all
1682 of this code, but I don't know how to do that. What it
1683 means is that if the divide can be eliminated, it may not
1684 completely disappear.
1685
1686 ??? The __divsi3_table label should ideally be moved out
1687 of this block and into a global. If it is placed into the
1688 sdata section we can save even more cycles by doing things
1689 gp relative. */
1690 void
1691 nios2_emit_expensive_div (rtx *operands, machine_mode mode)
1692 {
1693 rtx or_result, shift_left_result;
1694 rtx lookup_value;
1695 rtx_code_label *lab1, *lab3;
1696 rtx_insn *insns;
1697 rtx libfunc;
1698 rtx final_result;
1699 rtx_insn *tmp;
1700 rtx table;
1701
1702 /* It may look a little generic, but only SImode is supported for now. */
1703 gcc_assert (mode == SImode);
1704 libfunc = optab_libfunc (sdiv_optab, SImode);
1705
1706 lab1 = gen_label_rtx ();
1707 lab3 = gen_label_rtx ();
1708
1709 or_result = expand_simple_binop (SImode, IOR,
1710 operands[1], operands[2],
1711 0, 0, OPTAB_LIB_WIDEN);
1712
1713 emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
1714 GET_MODE (or_result), 0, lab3);
1715 JUMP_LABEL (get_last_insn ()) = lab3;
1716
1717 shift_left_result = expand_simple_binop (SImode, ASHIFT,
1718 operands[1], GEN_INT (4),
1719 0, 0, OPTAB_LIB_WIDEN);
1720
1721 lookup_value = expand_simple_binop (SImode, IOR,
1722 shift_left_result, operands[2],
1723 0, 0, OPTAB_LIB_WIDEN);
1724 table = gen_rtx_PLUS (SImode, lookup_value,
1725 gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
1726 convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
1727
1728 tmp = emit_jump_insn (gen_jump (lab1));
1729 JUMP_LABEL (tmp) = lab1;
1730 emit_barrier ();
1731
1732 emit_label (lab3);
1733 LABEL_NUSES (lab3) = 1;
1734
1735 start_sequence ();
1736 final_result = emit_library_call_value (libfunc, NULL_RTX,
1737 LCT_CONST, SImode, 2,
1738 operands[1], SImode,
1739 operands[2], SImode);
1740
1741 insns = get_insns ();
1742 end_sequence ();
1743 emit_libcall_block (insns, operands[0], final_result,
1744 gen_rtx_DIV (SImode, operands[1], operands[2]));
1745
1746 emit_label (lab1);
1747 LABEL_NUSES (lab1) = 1;
1748 }
1749
1750 \f
1751 /* Branches and compares. */
1752
1753 /* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
1754 comparison, e.g. >= 1 into > 0. */
1755 static void
1756 nios2_alternate_compare_const (enum rtx_code code, rtx op,
1757 enum rtx_code *alt_code, rtx *alt_op,
1758 machine_mode mode)
1759 {
1760 gcc_assert (CONST_INT_P (op));
1761
1762 HOST_WIDE_INT opval = INTVAL (op);
1763 enum rtx_code scode = signed_condition (code);
1764 bool dec_p = (scode == LT || scode == GE);
1765
1766 if (code == EQ || code == NE)
1767 {
1768 *alt_code = code;
1769 *alt_op = op;
1770 return;
1771 }
1772
1773 *alt_op = (dec_p
1774 ? gen_int_mode (opval - 1, mode)
1775 : gen_int_mode (opval + 1, mode));
1776
1777 /* The required conversion between [>,>=] and [<,<=] is captured
1778 by a reverse + swap of condition codes. */
1779 *alt_code = reverse_condition (swap_condition (code));
1780
1781 {
1782 /* Test if the incremented/decremented value crosses the over/underflow
1783 boundary. Supposedly, such boundary cases should already be transformed
1784 into always-true/false or EQ conditions, so use an assertion here. */
1785 unsigned HOST_WIDE_INT alt_opval = INTVAL (*alt_op);
1786 if (code == scode)
1787 alt_opval ^= (1 << (GET_MODE_BITSIZE (mode) - 1));
1788 alt_opval &= GET_MODE_MASK (mode);
1789 gcc_assert (dec_p ? alt_opval != GET_MODE_MASK (mode) : alt_opval != 0);
1790 }
1791 }
1792
1793 /* Return true if the constant comparison is supported by nios2. */
1794 static bool
1795 nios2_valid_compare_const_p (enum rtx_code code, rtx op)
1796 {
1797 gcc_assert (CONST_INT_P (op));
1798 switch (code)
1799 {
1800 case EQ: case NE: case GE: case LT:
1801 return SMALL_INT (INTVAL (op));
1802 case GEU: case LTU:
1803 return SMALL_INT_UNSIGNED (INTVAL (op));
1804 default:
1805 return false;
1806 }
1807 }
1808
1809 /* Checks if the FPU comparison in *CMP, *OP1, and *OP2 can be supported in
1810 the current configuration. Perform modifications if MODIFY_P is true.
1811 Returns true if FPU compare can be done. */
1812
1813 bool
1814 nios2_validate_fpu_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2,
1815 bool modify_p)
1816 {
1817 bool rev_p = false;
1818 enum rtx_code code = GET_CODE (*cmp);
1819
1820 if (!nios2_fpu_compare_enabled (code, mode))
1821 {
1822 code = swap_condition (code);
1823 if (nios2_fpu_compare_enabled (code, mode))
1824 rev_p = true;
1825 else
1826 return false;
1827 }
1828
1829 if (modify_p)
1830 {
1831 if (rev_p)
1832 {
1833 rtx tmp = *op1;
1834 *op1 = *op2;
1835 *op2 = tmp;
1836 }
1837 *op1 = force_reg (mode, *op1);
1838 *op2 = force_reg (mode, *op2);
1839 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1840 }
1841 return true;
1842 }
1843
1844 /* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
1845 nios2 supported form. Returns true if success. */
1846 bool
1847 nios2_validate_compare (machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
1848 {
1849 enum rtx_code code = GET_CODE (*cmp);
1850 enum rtx_code alt_code;
1851 rtx alt_op2;
1852
1853 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1854 return nios2_validate_fpu_compare (mode, cmp, op1, op2, true);
1855
1856 if (CONST_INT_P (*op2) && *op2 != const0_rtx)
1857 {
1858 /* Create alternate constant compare. */
1859 nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
1860
1861 /* If alterate op2 is zero(0), we can use it directly, possibly
1862 swapping the compare code. */
1863 if (alt_op2 == const0_rtx)
1864 {
1865 code = alt_code;
1866 *op2 = alt_op2;
1867 goto check_rebuild_cmp;
1868 }
1869
1870 /* Check if either constant compare can be used. */
1871 if (nios2_valid_compare_const_p (code, *op2))
1872 return true;
1873 else if (nios2_valid_compare_const_p (alt_code, alt_op2))
1874 {
1875 code = alt_code;
1876 *op2 = alt_op2;
1877 goto rebuild_cmp;
1878 }
1879
1880 /* We have to force op2 into a register now. Try to pick one
1881 with a lower cost. */
1882 if (! nios2_simple_const_p (*op2)
1883 && nios2_simple_const_p (alt_op2))
1884 {
1885 code = alt_code;
1886 *op2 = alt_op2;
1887 }
1888 *op2 = force_reg (mode, *op2);
1889 }
1890 else if (!reg_or_0_operand (*op2, mode))
1891 *op2 = force_reg (mode, *op2);
1892
1893 check_rebuild_cmp:
1894 if (code == GT || code == GTU || code == LE || code == LEU)
1895 {
1896 rtx t = *op1; *op1 = *op2; *op2 = t;
1897 code = swap_condition (code);
1898 }
1899 rebuild_cmp:
1900 *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
1901 return true;
1902 }
1903
1904
1905 /* Addressing Modes. */
1906
1907 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1908 static bool
1909 nios2_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1910 {
1911 rtx base, offset;
1912 split_const (x, &base, &offset);
1913 return GET_CODE (base) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (base);
1914 }
1915
1916 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1917 static bool
1918 nios2_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1919 {
1920 return nios2_legitimate_constant_p (mode, x) == false;
1921 }
1922
1923 /* Return true if register REGNO is a valid base register.
1924 STRICT_P is true if REG_OK_STRICT is in effect. */
1925
1926 bool
1927 nios2_regno_ok_for_base_p (int regno, bool strict_p)
1928 {
1929 if (!HARD_REGISTER_NUM_P (regno))
1930 {
1931 if (!strict_p)
1932 return true;
1933
1934 if (!reg_renumber)
1935 return false;
1936
1937 regno = reg_renumber[regno];
1938 }
1939
1940 /* The fake registers will be eliminated to either the stack or
1941 hard frame pointer, both of which are usually valid base registers.
1942 Reload deals with the cases where the eliminated form isn't valid. */
1943 return (GP_REG_P (regno)
1944 || regno == FRAME_POINTER_REGNUM
1945 || regno == ARG_POINTER_REGNUM);
1946 }
1947
1948 /* Return true if OFFSET is permitted in a load/store address expression.
1949 Normally any 16-bit value is permitted, but on R2 if we may be emitting
1950 the IO forms of these instructions we must restrict the offset to fit
1951 in a 12-bit field instead. */
1952
1953 static bool
1954 nios2_valid_addr_offset_p (rtx offset)
1955 {
1956 return (CONST_INT_P (offset)
1957 && ((TARGET_ARCH_R2 && (TARGET_BYPASS_CACHE
1958 || TARGET_BYPASS_CACHE_VOLATILE))
1959 ? SMALL_INT12 (INTVAL (offset))
1960 : SMALL_INT (INTVAL (offset))));
1961 }
1962
1963 /* Return true if the address expression formed by BASE + OFFSET is
1964 valid. */
1965 static bool
1966 nios2_valid_addr_expr_p (rtx base, rtx offset, bool strict_p)
1967 {
1968 if (!strict_p && GET_CODE (base) == SUBREG)
1969 base = SUBREG_REG (base);
1970 return (REG_P (base)
1971 && nios2_regno_ok_for_base_p (REGNO (base), strict_p)
1972 && (offset == NULL_RTX
1973 || nios2_valid_addr_offset_p (offset)
1974 || nios2_unspec_reloc_p (offset)));
1975 }
1976
1977 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1978 static bool
1979 nios2_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
1980 rtx operand, bool strict_p)
1981 {
1982 switch (GET_CODE (operand))
1983 {
1984 /* Direct. */
1985 case SYMBOL_REF:
1986 if (SYMBOL_REF_TLS_MODEL (operand))
1987 return false;
1988
1989 /* Else, fall through. */
1990 case CONST:
1991 if (gprel_constant_p (operand))
1992 return true;
1993
1994 /* Else, fall through. */
1995 case LABEL_REF:
1996 case CONST_INT:
1997 case CONST_DOUBLE:
1998 return false;
1999
2000 /* Register indirect. */
2001 case REG:
2002 return nios2_regno_ok_for_base_p (REGNO (operand), strict_p);
2003
2004 /* Register indirect with displacement. */
2005 case PLUS:
2006 {
2007 rtx op0 = XEXP (operand, 0);
2008 rtx op1 = XEXP (operand, 1);
2009
2010 return (nios2_valid_addr_expr_p (op0, op1, strict_p)
2011 || nios2_valid_addr_expr_p (op1, op0, strict_p));
2012 }
2013
2014 default:
2015 break;
2016 }
2017 return false;
2018 }
2019
2020 /* Return true if SECTION is a small section name. */
2021 static bool
2022 nios2_small_section_name_p (const char *section)
2023 {
2024 return (strcmp (section, ".sbss") == 0
2025 || strncmp (section, ".sbss.", 6) == 0
2026 || strcmp (section, ".sdata") == 0
2027 || strncmp (section, ".sdata.", 7) == 0);
2028 }
2029
2030 /* Return true if EXP should be placed in the small data section. */
2031 static bool
2032 nios2_in_small_data_p (const_tree exp)
2033 {
2034 /* We want to merge strings, so we never consider them small data. */
2035 if (TREE_CODE (exp) == STRING_CST)
2036 return false;
2037
2038 if (TREE_CODE (exp) == VAR_DECL)
2039 {
2040 if (DECL_SECTION_NAME (exp))
2041 {
2042 const char *section = DECL_SECTION_NAME (exp);
2043 if (nios2_small_section_name_p (section))
2044 return true;
2045 }
2046 else
2047 {
2048 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
2049
2050 /* If this is an incomplete type with size 0, then we can't put it
2051 in sdata because it might be too big when completed. */
2052 if (size > 0
2053 && (unsigned HOST_WIDE_INT) size <= nios2_section_threshold)
2054 return true;
2055 }
2056 }
2057
2058 return false;
2059 }
2060
2061 /* Return true if symbol is in small data section. */
2062
2063 static bool
2064 nios2_symbol_ref_in_small_data_p (rtx sym)
2065 {
2066 tree decl;
2067
2068 gcc_assert (GET_CODE (sym) == SYMBOL_REF);
2069 decl = SYMBOL_REF_DECL (sym);
2070
2071 /* TLS variables are not accessed through the GP. */
2072 if (SYMBOL_REF_TLS_MODEL (sym) != 0)
2073 return false;
2074
2075 /* On Nios II R2, there is no GP-relative relocation that can be
2076 used with "io" instructions. So, if we are implicitly generating
2077 those instructions, we cannot emit GP-relative accesses. */
2078 if (TARGET_ARCH_R2
2079 && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
2080 return false;
2081
2082 /* If the user has explicitly placed the symbol in a small data section
2083 via an attribute, generate gp-relative addressing even if the symbol
2084 is external, weak, or larger than we'd automatically put in the
2085 small data section. OTOH, if the symbol is located in some
2086 non-small-data section, we can't use gp-relative accesses on it
2087 unless the user has requested gpopt_data or gpopt_all. */
2088
2089 switch (nios2_gpopt_option)
2090 {
2091 case gpopt_none:
2092 /* Don't generate a gp-relative addressing mode if that's been
2093 disabled. */
2094 return false;
2095
2096 case gpopt_local:
2097 /* Use GP-relative addressing for small data symbols that are
2098 not external or weak or uninitialized common, plus any symbols
2099 that have explicitly been placed in a small data section. */
2100 if (decl && DECL_SECTION_NAME (decl))
2101 return nios2_small_section_name_p (DECL_SECTION_NAME (decl));
2102 return (SYMBOL_REF_SMALL_P (sym)
2103 && !SYMBOL_REF_EXTERNAL_P (sym)
2104 && !(decl && DECL_WEAK (decl))
2105 && !(decl && DECL_COMMON (decl)
2106 && (DECL_INITIAL (decl) == NULL
2107 || (!in_lto_p
2108 && DECL_INITIAL (decl) == error_mark_node))));
2109
2110 case gpopt_global:
2111 /* Use GP-relative addressing for small data symbols, even if
2112 they are external or weak. Note that SYMBOL_REF_SMALL_P
2113 is also true of symbols that have explicitly been placed
2114 in a small data section. */
2115 return SYMBOL_REF_SMALL_P (sym);
2116
2117 case gpopt_data:
2118 /* Use GP-relative addressing for all data symbols regardless
2119 of the object size, but not for code symbols. This option
2120 is equivalent to the user asserting that the entire data
2121 section is accessible from the GP. */
2122 return !SYMBOL_REF_FUNCTION_P (sym);
2123
2124 case gpopt_all:
2125 /* Use GP-relative addressing for everything, including code.
2126 Effectively, the user has asserted that the entire program
2127 fits within the 64K range of the GP offset. */
2128 return true;
2129
2130 default:
2131 /* We shouldn't get here. */
2132 return false;
2133 }
2134 }
2135
2136 /* Implement TARGET_SECTION_TYPE_FLAGS. */
2137
2138 static unsigned int
2139 nios2_section_type_flags (tree decl, const char *name, int reloc)
2140 {
2141 unsigned int flags;
2142
2143 flags = default_section_type_flags (decl, name, reloc);
2144
2145 if (nios2_small_section_name_p (name))
2146 flags |= SECTION_SMALL;
2147
2148 return flags;
2149 }
2150
2151 /* Return true if SYMBOL_REF X binds locally. */
2152
2153 static bool
2154 nios2_symbol_binds_local_p (const_rtx x)
2155 {
2156 return (SYMBOL_REF_DECL (x)
2157 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
2158 : SYMBOL_REF_LOCAL_P (x));
2159 }
2160
2161 /* Position independent code related. */
2162
2163 /* Emit code to load the PIC register. */
2164 static void
2165 nios2_load_pic_register (void)
2166 {
2167 rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
2168
2169 emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
2170 emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
2171 }
2172
2173 /* Generate a PIC address as a MEM rtx. */
2174 static rtx
2175 nios2_load_pic_address (rtx sym, int unspec, rtx tmp)
2176 {
2177 if (flag_pic == 2
2178 && GET_CODE (sym) == SYMBOL_REF
2179 && nios2_symbol_binds_local_p (sym))
2180 /* Under -fPIC, generate a GOTOFF address for local symbols. */
2181 {
2182 rtx offset = nios2_unspec_offset (sym, UNSPEC_PIC_GOTOFF_SYM);
2183 crtl->uses_pic_offset_table = 1;
2184 return nios2_large_got_address (offset, tmp);
2185 }
2186
2187 return gen_const_mem (Pmode, nios2_got_address (sym, unspec));
2188 }
2189
2190 /* Nonzero if the constant value X is a legitimate general operand
2191 when generating PIC code. It is given that flag_pic is on and
2192 that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
2193 bool
2194 nios2_legitimate_pic_operand_p (rtx x)
2195 {
2196 if (nios2_large_unspec_reloc_p (x))
2197 return true;
2198
2199 return ! (GET_CODE (x) == SYMBOL_REF
2200 || GET_CODE (x) == LABEL_REF || GET_CODE (x) == CONST);
2201 }
2202
2203 /* Return TRUE if X is a thread-local symbol. */
2204 static bool
2205 nios2_tls_symbol_p (rtx x)
2206 {
2207 return (targetm.have_tls && GET_CODE (x) == SYMBOL_REF
2208 && SYMBOL_REF_TLS_MODEL (x) != 0);
2209 }
2210
2211 /* Legitimize addresses that are CONSTANT_P expressions. */
2212 static rtx
2213 nios2_legitimize_constant_address (rtx addr)
2214 {
2215 rtx base, offset;
2216 split_const (addr, &base, &offset);
2217
2218 if (nios2_tls_symbol_p (base))
2219 base = nios2_legitimize_tls_address (base);
2220 else if (flag_pic)
2221 base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
2222 else
2223 return addr;
2224
2225 if (offset != const0_rtx)
2226 {
2227 gcc_assert (can_create_pseudo_p ());
2228 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
2229 (CONST_INT_P (offset)
2230 ? (SMALL_INT (INTVAL (offset))
2231 ? offset : force_reg (Pmode, offset))
2232 : offset));
2233 }
2234 return base;
2235 }
2236
2237 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
2238 static rtx
2239 nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2240 machine_mode mode ATTRIBUTE_UNUSED)
2241 {
2242 if (CONSTANT_P (x))
2243 return nios2_legitimize_constant_address (x);
2244
2245 /* For the TLS LE (Local Exec) model, the compiler may try to
2246 combine constant offsets with unspec relocs, creating address RTXs
2247 looking like this:
2248 (plus:SI (reg:SI 23 r23)
2249 (const:SI
2250 (plus:SI
2251 (unspec:SI [(symbol_ref:SI ("var"))] UNSPEC_ADD_TLS_LE)
2252 (const_int 48 [0x30]))))
2253
2254 This usually happens when 'var' is a thread-local struct variable,
2255 and access of a field in var causes the addend.
2256
2257 We typically want this combining, so transform the above into this
2258 form, which is allowed:
2259 (plus:SI (reg:SI 23 r23)
2260 (const:SI
2261 (unspec:SI
2262 [(const:SI
2263 (plus:SI (symbol_ref:SI ("var"))
2264 (const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
2265
2266 Which will be output as '%tls_le(var+48)(r23)' in assembly. */
2267 if (GET_CODE (x) == PLUS
2268 && GET_CODE (XEXP (x, 1)) == CONST)
2269 {
2270 rtx unspec, offset;
2271 split_const (XEXP (x, 1), &unspec, &offset);
2272 if (GET_CODE (unspec) == UNSPEC
2273 && !nios2_large_offset_p (XINT (unspec, 1))
2274 && offset != const0_rtx)
2275 {
2276 rtx reg = force_reg (Pmode, XEXP (x, 0));
2277 unspec = copy_rtx (unspec);
2278 XVECEXP (unspec, 0, 0)
2279 = plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
2280 x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
2281 }
2282 }
2283
2284 return x;
2285 }
2286
2287 static rtx
2288 nios2_delegitimize_address (rtx x)
2289 {
2290 x = delegitimize_mem_from_attrs (x);
2291
2292 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
2293 {
2294 switch (XINT (XEXP (x, 0), 1))
2295 {
2296 case UNSPEC_PIC_SYM:
2297 case UNSPEC_PIC_CALL_SYM:
2298 case UNSPEC_PIC_GOTOFF_SYM:
2299 case UNSPEC_ADD_TLS_GD:
2300 case UNSPEC_ADD_TLS_LDM:
2301 case UNSPEC_LOAD_TLS_IE:
2302 case UNSPEC_ADD_TLS_LE:
2303 x = XVECEXP (XEXP (x, 0), 0, 0);
2304 gcc_assert (CONSTANT_P (x));
2305 break;
2306 }
2307 }
2308 return x;
2309 }
2310
2311 /* Main expander function for RTL moves. */
2312 bool
2313 nios2_emit_move_sequence (rtx *operands, machine_mode mode)
2314 {
2315 rtx to = operands[0];
2316 rtx from = operands[1];
2317
2318 if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
2319 {
2320 gcc_assert (can_create_pseudo_p ());
2321 from = copy_to_mode_reg (mode, from);
2322 }
2323
2324 if (CONSTANT_P (from))
2325 {
2326 if (CONST_INT_P (from))
2327 {
2328 if (!SMALL_INT (INTVAL (from))
2329 && !SMALL_INT_UNSIGNED (INTVAL (from))
2330 && !UPPER16_INT (INTVAL (from)))
2331 {
2332 HOST_WIDE_INT high = (INTVAL (from) + 0x8000) & ~0xffff;
2333 HOST_WIDE_INT low = INTVAL (from) & 0xffff;
2334 emit_move_insn (to, gen_int_mode (high, SImode));
2335 emit_insn (gen_add2_insn (to, gen_int_mode (low, HImode)));
2336 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2337 copy_rtx (from));
2338 return true;
2339 }
2340 }
2341 else if (!gprel_constant_p (from))
2342 {
2343 if (!nios2_large_unspec_reloc_p (from))
2344 from = nios2_legitimize_constant_address (from);
2345 if (CONSTANT_P (from))
2346 {
2347 emit_insn (gen_rtx_SET (to,
2348 gen_rtx_HIGH (Pmode, copy_rtx (from))));
2349 emit_insn (gen_rtx_SET (to, gen_rtx_LO_SUM (Pmode, to, from)));
2350 set_unique_reg_note (get_last_insn (), REG_EQUAL,
2351 copy_rtx (operands[1]));
2352 return true;
2353 }
2354 }
2355 }
2356
2357 operands[0] = to;
2358 operands[1] = from;
2359 return false;
2360 }
2361
2362 /* The function with address *ADDR is being called. If the address
2363 needs to be loaded from the GOT, emit the instruction to do so and
2364 update *ADDR to point to the rtx for the loaded value.
2365 If REG != NULL_RTX, it is used as the target/scratch register in the
2366 GOT address calculation. */
2367 void
2368 nios2_adjust_call_address (rtx *call_op, rtx reg)
2369 {
2370 if (MEM_P (*call_op))
2371 call_op = &XEXP (*call_op, 0);
2372
2373 rtx addr = *call_op;
2374 if (flag_pic && CONSTANT_P (addr))
2375 {
2376 rtx tmp = reg ? reg : NULL_RTX;
2377 if (!reg)
2378 reg = gen_reg_rtx (Pmode);
2379 addr = nios2_load_pic_address (addr, UNSPEC_PIC_CALL_SYM, tmp);
2380 emit_insn (gen_rtx_SET (reg, addr));
2381 *call_op = reg;
2382 }
2383 }
2384
2385 \f
2386 /* Output assembly language related definitions. */
2387
2388 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
2389 static bool
2390 nios2_print_operand_punct_valid_p (unsigned char code)
2391 {
2392 return (code == '.' || code == '!');
2393 }
2394
2395
2396 /* Print the operand OP to file stream FILE modified by LETTER.
2397 LETTER can be one of:
2398
2399 i: print i/hi/ui suffixes (used for mov instruction variants),
2400 when OP is the appropriate immediate operand.
2401
2402 u: like 'i', except without "ui" suffix case (used for cmpgeu/cmpltu)
2403
2404 o: print "io" if OP needs volatile access (due to TARGET_BYPASS_CACHE
2405 or TARGET_BYPASS_CACHE_VOLATILE).
2406
2407 x: print i/hi/ci/chi suffixes for the and instruction,
2408 when OP is the appropriate immediate operand.
2409
2410 z: prints the third register immediate operand in assembly
2411 instructions. Outputs const0_rtx as the 'zero' register
2412 instead of '0'.
2413
2414 y: same as 'z', but for specifically for logical instructions,
2415 where the processing for immediates are slightly different.
2416
2417 H: for %hiadj
2418 L: for %lo
2419 D: for the upper 32-bits of a 64-bit double value
2420 R: prints reverse condition.
2421 A: prints (reg) operand for ld[s]ex and st[s]ex.
2422
2423 .: print .n suffix for 16-bit instructions.
2424 !: print r.n suffix for 16-bit instructions. Used for jmpr.n.
2425 */
2426 static void
2427 nios2_print_operand (FILE *file, rtx op, int letter)
2428 {
2429
2430 /* First take care of the format letters that just insert a string
2431 into the output stream. */
2432 switch (letter)
2433 {
2434 case '.':
2435 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2436 fprintf (file, ".n");
2437 return;
2438
2439 case '!':
2440 if (current_output_insn && get_attr_length (current_output_insn) == 2)
2441 fprintf (file, "r.n");
2442 return;
2443
2444 case 'x':
2445 if (CONST_INT_P (op))
2446 {
2447 HOST_WIDE_INT val = INTVAL (op);
2448 HOST_WIDE_INT low = val & 0xffff;
2449 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2450
2451 if (val != 0)
2452 {
2453 if (high != 0)
2454 {
2455 if (low != 0)
2456 {
2457 gcc_assert (TARGET_ARCH_R2);
2458 if (high == 0xffff)
2459 fprintf (file, "c");
2460 else if (low == 0xffff)
2461 fprintf (file, "ch");
2462 else
2463 gcc_unreachable ();
2464 }
2465 else
2466 fprintf (file, "h");
2467 }
2468 fprintf (file, "i");
2469 }
2470 }
2471 return;
2472
2473 case 'u':
2474 case 'i':
2475 if (CONST_INT_P (op))
2476 {
2477 HOST_WIDE_INT val = INTVAL (op);
2478 HOST_WIDE_INT low = val & 0xffff;
2479 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2480 if (val != 0)
2481 {
2482 if (low == 0 && high != 0)
2483 fprintf (file, "h");
2484 else if (high == 0 && (low & 0x8000) != 0 && letter != 'u')
2485 fprintf (file, "u");
2486 }
2487 }
2488 if (CONSTANT_P (op) && op != const0_rtx)
2489 fprintf (file, "i");
2490 return;
2491
2492 case 'o':
2493 if (GET_CODE (op) == MEM
2494 && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
2495 || TARGET_BYPASS_CACHE))
2496 {
2497 gcc_assert (current_output_insn
2498 && get_attr_length (current_output_insn) == 4);
2499 fprintf (file, "io");
2500 }
2501 return;
2502
2503 default:
2504 break;
2505 }
2506
2507 /* Handle comparison operator names. */
2508 if (comparison_operator (op, VOIDmode))
2509 {
2510 enum rtx_code cond = GET_CODE (op);
2511 if (letter == 0)
2512 {
2513 fprintf (file, "%s", GET_RTX_NAME (cond));
2514 return;
2515 }
2516 if (letter == 'R')
2517 {
2518 fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
2519 return;
2520 }
2521 }
2522
2523 /* Now handle the cases where we actually need to format an operand. */
2524 switch (GET_CODE (op))
2525 {
2526 case REG:
2527 if (letter == 0 || letter == 'z' || letter == 'y')
2528 {
2529 fprintf (file, "%s", reg_names[REGNO (op)]);
2530 return;
2531 }
2532 else if (letter == 'D')
2533 {
2534 fprintf (file, "%s", reg_names[REGNO (op)+1]);
2535 return;
2536 }
2537 break;
2538
2539 case CONST_INT:
2540 {
2541 rtx int_rtx = op;
2542 HOST_WIDE_INT val = INTVAL (int_rtx);
2543 HOST_WIDE_INT low = val & 0xffff;
2544 HOST_WIDE_INT high = (val >> 16) & 0xffff;
2545
2546 if (letter == 'y')
2547 {
2548 if (val == 0)
2549 fprintf (file, "zero");
2550 else
2551 {
2552 if (high != 0)
2553 {
2554 if (low != 0)
2555 {
2556 gcc_assert (TARGET_ARCH_R2);
2557 if (high == 0xffff)
2558 /* andci. */
2559 int_rtx = gen_int_mode (low, SImode);
2560 else if (low == 0xffff)
2561 /* andchi. */
2562 int_rtx = gen_int_mode (high, SImode);
2563 else
2564 gcc_unreachable ();
2565 }
2566 else
2567 /* andhi. */
2568 int_rtx = gen_int_mode (high, SImode);
2569 }
2570 else
2571 /* andi. */
2572 int_rtx = gen_int_mode (low, SImode);
2573 output_addr_const (file, int_rtx);
2574 }
2575 return;
2576 }
2577 else if (letter == 'z')
2578 {
2579 if (val == 0)
2580 fprintf (file, "zero");
2581 else
2582 {
2583 if (low == 0 && high != 0)
2584 int_rtx = gen_int_mode (high, SImode);
2585 else if (low != 0)
2586 {
2587 gcc_assert (high == 0 || high == 0xffff);
2588 int_rtx = gen_int_mode (low, high == 0 ? SImode : HImode);
2589 }
2590 else
2591 gcc_unreachable ();
2592 output_addr_const (file, int_rtx);
2593 }
2594 return;
2595 }
2596 }
2597
2598 /* Else, fall through. */
2599
2600 case CONST:
2601 case LABEL_REF:
2602 case SYMBOL_REF:
2603 case CONST_DOUBLE:
2604 if (letter == 0 || letter == 'z')
2605 {
2606 output_addr_const (file, op);
2607 return;
2608 }
2609 else if (letter == 'H' || letter == 'L')
2610 {
2611 fprintf (file, "%%");
2612 if (GET_CODE (op) == CONST
2613 && GET_CODE (XEXP (op, 0)) == UNSPEC)
2614 {
2615 rtx unspec = XEXP (op, 0);
2616 int unspec_reloc = XINT (unspec, 1);
2617 gcc_assert (nios2_large_offset_p (unspec_reloc));
2618 fprintf (file, "%s_", nios2_unspec_reloc_name (unspec_reloc));
2619 op = XVECEXP (unspec, 0, 0);
2620 }
2621 fprintf (file, letter == 'H' ? "hiadj(" : "lo(");
2622 output_addr_const (file, op);
2623 fprintf (file, ")");
2624 return;
2625 }
2626 break;
2627
2628 case SUBREG:
2629 case MEM:
2630 if (letter == 'A')
2631 {
2632 /* Address of '(reg)' form, with no index. */
2633 fprintf (file, "(%s)", reg_names[REGNO (XEXP (op, 0))]);
2634 return;
2635 }
2636 if (letter == 0)
2637 {
2638 output_address (VOIDmode, op);
2639 return;
2640 }
2641 break;
2642
2643 case CODE_LABEL:
2644 if (letter == 0)
2645 {
2646 output_addr_const (file, op);
2647 return;
2648 }
2649 break;
2650
2651 default:
2652 break;
2653 }
2654
2655 output_operand_lossage ("Unsupported operand for code '%c'", letter);
2656 gcc_unreachable ();
2657 }
2658
2659 /* Return true if this is a GP-relative accessible reference. */
2660 bool
2661 gprel_constant_p (rtx op)
2662 {
2663 if (GET_CODE (op) == SYMBOL_REF
2664 && nios2_symbol_ref_in_small_data_p (op))
2665 return true;
2666 else if (GET_CODE (op) == CONST
2667 && GET_CODE (XEXP (op, 0)) == PLUS)
2668 return gprel_constant_p (XEXP (XEXP (op, 0), 0));
2669
2670 return false;
2671 }
2672
2673 /* Return the name string for a supported unspec reloc offset. */
2674 static const char *
2675 nios2_unspec_reloc_name (int unspec)
2676 {
2677 switch (unspec)
2678 {
2679 case UNSPEC_PIC_SYM:
2680 return "got";
2681 case UNSPEC_PIC_CALL_SYM:
2682 return "call";
2683 case UNSPEC_PIC_GOTOFF_SYM:
2684 return "gotoff";
2685 case UNSPEC_LOAD_TLS_IE:
2686 return "tls_ie";
2687 case UNSPEC_ADD_TLS_LE:
2688 return "tls_le";
2689 case UNSPEC_ADD_TLS_GD:
2690 return "tls_gd";
2691 case UNSPEC_ADD_TLS_LDM:
2692 return "tls_ldm";
2693 case UNSPEC_ADD_TLS_LDO:
2694 return "tls_ldo";
2695 default:
2696 return NULL;
2697 }
2698 }
2699
2700 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
2701 static bool
2702 nios2_output_addr_const_extra (FILE *file, rtx op)
2703 {
2704 const char *name;
2705 gcc_assert (GET_CODE (op) == UNSPEC);
2706
2707 /* Support for printing out const unspec relocations. */
2708 name = nios2_unspec_reloc_name (XINT (op, 1));
2709 if (name)
2710 {
2711 fprintf (file, "%%%s(", name);
2712 output_addr_const (file, XVECEXP (op, 0, 0));
2713 fprintf (file, ")");
2714 return true;
2715 }
2716 return false;
2717 }
2718
2719 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
2720 static void
2721 nios2_print_operand_address (FILE *file, machine_mode mode, rtx op)
2722 {
2723 switch (GET_CODE (op))
2724 {
2725 case CONST:
2726 case CONST_INT:
2727 case LABEL_REF:
2728 case CONST_DOUBLE:
2729 case SYMBOL_REF:
2730 if (gprel_constant_p (op))
2731 {
2732 fprintf (file, "%%gprel(");
2733 output_addr_const (file, op);
2734 fprintf (file, ")(%s)", reg_names[GP_REGNO]);
2735 return;
2736 }
2737
2738 break;
2739
2740 case PLUS:
2741 {
2742 rtx op0 = XEXP (op, 0);
2743 rtx op1 = XEXP (op, 1);
2744
2745 if (REG_P (op0) && CONSTANT_P (op1))
2746 {
2747 output_addr_const (file, op1);
2748 fprintf (file, "(%s)", reg_names[REGNO (op0)]);
2749 return;
2750 }
2751 else if (REG_P (op1) && CONSTANT_P (op0))
2752 {
2753 output_addr_const (file, op0);
2754 fprintf (file, "(%s)", reg_names[REGNO (op1)]);
2755 return;
2756 }
2757 }
2758 break;
2759
2760 case REG:
2761 fprintf (file, "0(%s)", reg_names[REGNO (op)]);
2762 return;
2763
2764 case MEM:
2765 {
2766 rtx base = XEXP (op, 0);
2767 nios2_print_operand_address (file, mode, base);
2768 return;
2769 }
2770 default:
2771 break;
2772 }
2773
2774 fprintf (stderr, "Missing way to print address\n");
2775 debug_rtx (op);
2776 gcc_unreachable ();
2777 }
2778
2779 /* Implement TARGET_ASM_OUTPUT_DWARF_DTPREL. */
2780 static void
2781 nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
2782 {
2783 gcc_assert (size == 4);
2784 fprintf (file, "\t.4byte\t%%tls_ldo(");
2785 output_addr_const (file, x);
2786 fprintf (file, ")");
2787 }
2788
2789 /* Implemet TARGET_ASM_FILE_END. */
2790
2791 static void
2792 nios2_asm_file_end (void)
2793 {
2794 /* The Nios II Linux stack is mapped non-executable by default, so add a
2795 .note.GNU-stack section for switching to executable stacks only when
2796 trampolines are generated. */
2797 if (TARGET_LINUX_ABI && trampolines_created)
2798 file_end_indicate_exec_stack ();
2799 }
2800
2801 /* Implement TARGET_ASM_FUNCTION_PROLOGUE. */
2802 static void
2803 nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2804 {
2805 if (flag_verbose_asm || flag_debug_asm)
2806 {
2807 nios2_compute_frame_layout ();
2808 nios2_dump_frame_layout (file);
2809 }
2810 }
2811
2812 /* Emit assembly of custom FPU instructions. */
2813 const char *
2814 nios2_fpu_insn_asm (enum n2fpu_code code)
2815 {
2816 static char buf[256];
2817 const char *op1, *op2, *op3;
2818 int ln = 256, n = 0;
2819
2820 int N = N2FPU_N (code);
2821 int num_operands = N2FPU (code).num_operands;
2822 const char *insn_name = N2FPU_NAME (code);
2823 tree ftype = nios2_ftype (N2FPU_FTCODE (code));
2824 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
2825 machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
2826
2827 /* Prepare X register for DF input operands. */
2828 if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
2829 n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
2830 N2FPU_N (n2fpu_fwrx));
2831
2832 if (src_mode == SFmode)
2833 {
2834 if (dst_mode == VOIDmode)
2835 {
2836 /* The fwry case. */
2837 op1 = op3 = "zero";
2838 op2 = "%0";
2839 num_operands -= 1;
2840 }
2841 else
2842 {
2843 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2844 op2 = "%1";
2845 op3 = (num_operands == 2 ? "zero" : "%2");
2846 }
2847 }
2848 else if (src_mode == DFmode)
2849 {
2850 if (dst_mode == VOIDmode)
2851 {
2852 /* The fwrx case. */
2853 op1 = "zero";
2854 op2 = "%0";
2855 op3 = "%D0";
2856 num_operands -= 1;
2857 }
2858 else
2859 {
2860 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2861 op2 = (num_operands == 2 ? "%1" : "%2");
2862 op3 = (num_operands == 2 ? "%D1" : "%D2");
2863 }
2864 }
2865 else if (src_mode == VOIDmode)
2866 {
2867 /* frdxlo, frdxhi, frdy cases. */
2868 gcc_assert (dst_mode == SFmode);
2869 op1 = "%0";
2870 op2 = op3 = "zero";
2871 }
2872 else if (src_mode == SImode)
2873 {
2874 /* Conversion operators. */
2875 gcc_assert (num_operands == 2);
2876 op1 = (dst_mode == DFmode ? "%D0" : "%0");
2877 op2 = "%1";
2878 op3 = "zero";
2879 }
2880 else
2881 gcc_unreachable ();
2882
2883 /* Main instruction string. */
2884 n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
2885 N, op1, op2, op3, insn_name,
2886 (num_operands >= 2 ? ", %1" : ""),
2887 (num_operands == 3 ? ", %2" : ""));
2888
2889 /* Extraction of Y register for DF results. */
2890 if (dst_mode == DFmode)
2891 snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
2892 N2FPU_N (n2fpu_frdy));
2893 return buf;
2894 }
2895
2896 \f
2897
2898 /* Function argument related. */
2899
2900 /* Define where to put the arguments to a function. Value is zero to
2901 push the argument on the stack, or a hard register in which to
2902 store the argument.
2903
2904 MODE is the argument's machine mode.
2905 TYPE is the data type of the argument (as a tree).
2906 This is null for libcalls where that information may
2907 not be available.
2908 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2909 the preceding args and about the function being called.
2910 NAMED is nonzero if this argument is a named parameter
2911 (otherwise it is an extra parameter matching an ellipsis). */
2912
2913 static rtx
2914 nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
2915 const_tree type ATTRIBUTE_UNUSED,
2916 bool named ATTRIBUTE_UNUSED)
2917 {
2918 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2919 rtx return_rtx = NULL_RTX;
2920
2921 if (cum->regs_used < NUM_ARG_REGS)
2922 return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
2923
2924 return return_rtx;
2925 }
2926
2927 /* Return number of bytes, at the beginning of the argument, that must be
2928 put in registers. 0 is the argument is entirely in registers or entirely
2929 in memory. */
2930
2931 static int
2932 nios2_arg_partial_bytes (cumulative_args_t cum_v,
2933 machine_mode mode, tree type ATTRIBUTE_UNUSED,
2934 bool named ATTRIBUTE_UNUSED)
2935 {
2936 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2937 HOST_WIDE_INT param_size;
2938
2939 if (mode == BLKmode)
2940 {
2941 param_size = int_size_in_bytes (type);
2942 gcc_assert (param_size >= 0);
2943 }
2944 else
2945 param_size = GET_MODE_SIZE (mode);
2946
2947 /* Convert to words (round up). */
2948 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2949
2950 if (cum->regs_used < NUM_ARG_REGS
2951 && cum->regs_used + param_size > NUM_ARG_REGS)
2952 return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
2953
2954 return 0;
2955 }
2956
2957 /* Update the data in CUM to advance over an argument of mode MODE
2958 and data type TYPE; TYPE is null for libcalls where that information
2959 may not be available. */
2960
2961 static void
2962 nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2963 const_tree type ATTRIBUTE_UNUSED,
2964 bool named ATTRIBUTE_UNUSED)
2965 {
2966 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2967 HOST_WIDE_INT param_size;
2968
2969 if (mode == BLKmode)
2970 {
2971 param_size = int_size_in_bytes (type);
2972 gcc_assert (param_size >= 0);
2973 }
2974 else
2975 param_size = GET_MODE_SIZE (mode);
2976
2977 /* Convert to words (round up). */
2978 param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
2979
2980 if (cum->regs_used + param_size > NUM_ARG_REGS)
2981 cum->regs_used = NUM_ARG_REGS;
2982 else
2983 cum->regs_used += param_size;
2984 }
2985
2986 enum direction
2987 nios2_function_arg_padding (machine_mode mode, const_tree type)
2988 {
2989 /* On little-endian targets, the first byte of every stack argument
2990 is passed in the first byte of the stack slot. */
2991 if (!BYTES_BIG_ENDIAN)
2992 return upward;
2993
2994 /* Otherwise, integral types are padded downward: the last byte of a
2995 stack argument is passed in the last byte of the stack slot. */
2996 if (type != 0
2997 ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
2998 : GET_MODE_CLASS (mode) == MODE_INT)
2999 return downward;
3000
3001 /* Arguments smaller than a stack slot are padded downward. */
3002 if (mode != BLKmode)
3003 return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward;
3004
3005 return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
3006 ? upward : downward);
3007 }
3008
3009 enum direction
3010 nios2_block_reg_padding (machine_mode mode, tree type,
3011 int first ATTRIBUTE_UNUSED)
3012 {
3013 return nios2_function_arg_padding (mode, type);
3014 }
3015
3016 /* Emit RTL insns to initialize the variable parts of a trampoline.
3017 FNADDR is an RTX for the address of the function's pure code.
3018 CXT is an RTX for the static chain value for the function.
3019 On Nios II, we handle this by a library call. */
3020 static void
3021 nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
3022 {
3023 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3024 rtx ctx_reg = force_reg (Pmode, cxt);
3025 rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
3026
3027 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
3028 LCT_NORMAL, VOIDmode, 3, addr, Pmode, fnaddr, Pmode,
3029 ctx_reg, Pmode);
3030 }
3031
3032 /* Implement TARGET_FUNCTION_VALUE. */
3033 static rtx
3034 nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
3035 bool outgoing ATTRIBUTE_UNUSED)
3036 {
3037 return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
3038 }
3039
3040 /* Implement TARGET_LIBCALL_VALUE. */
3041 static rtx
3042 nios2_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3043 {
3044 return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
3045 }
3046
3047 /* Implement TARGET_FUNCTION_VALUE_REGNO_P. */
3048 static bool
3049 nios2_function_value_regno_p (const unsigned int regno)
3050 {
3051 return regno == FIRST_RETVAL_REGNO;
3052 }
3053
3054 /* Implement TARGET_RETURN_IN_MEMORY. */
3055 static bool
3056 nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3057 {
3058 return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3059 || int_size_in_bytes (type) == -1);
3060 }
3061
3062 /* TODO: It may be possible to eliminate the copyback and implement
3063 own va_arg type. */
3064 static void
3065 nios2_setup_incoming_varargs (cumulative_args_t cum_v,
3066 machine_mode mode, tree type,
3067 int *pretend_size, int second_time)
3068 {
3069 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3070 CUMULATIVE_ARGS local_cum;
3071 cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
3072 int regs_to_push;
3073 int pret_size;
3074
3075 cfun->machine->uses_anonymous_args = 1;
3076 local_cum = *cum;
3077 nios2_function_arg_advance (local_cum_v, mode, type, true);
3078
3079 regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
3080
3081 /* If we can use CDX stwm to push the arguments on the stack,
3082 nios2_expand_prologue will do that instead. */
3083 if (!TARGET_HAS_CDX && !second_time && regs_to_push > 0)
3084 {
3085 rtx ptr = virtual_incoming_args_rtx;
3086 rtx mem = gen_rtx_MEM (BLKmode, ptr);
3087 emit_insn (gen_blockage ());
3088 move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
3089 regs_to_push);
3090 emit_insn (gen_blockage ());
3091 }
3092
3093 pret_size = regs_to_push * UNITS_PER_WORD;
3094 if (pret_size)
3095 *pretend_size = pret_size;
3096 }
3097
3098 \f
3099
3100 /* Init FPU builtins. */
3101 static void
3102 nios2_init_fpu_builtins (int start_code)
3103 {
3104 tree fndecl;
3105 char builtin_name[64] = "__builtin_custom_";
3106 unsigned int i, n = strlen ("__builtin_custom_");
3107
3108 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3109 {
3110 snprintf (builtin_name + n, sizeof (builtin_name) - n,
3111 "%s", N2FPU_NAME (i));
3112 fndecl =
3113 add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
3114 start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
3115 nios2_register_builtin_fndecl (start_code + i, fndecl);
3116 }
3117 }
3118
3119 /* Helper function for expanding FPU builtins. */
3120 static rtx
3121 nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
3122 {
3123 struct expand_operand ops[MAX_RECOG_OPERANDS];
3124 enum insn_code icode = N2FPU_ICODE (code);
3125 int nargs, argno, opno = 0;
3126 int num_operands = N2FPU (code).num_operands;
3127 machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
3128 bool has_target_p = (dst_mode != VOIDmode);
3129
3130 if (N2FPU_N (code) < 0)
3131 fatal_error (input_location,
3132 "Cannot call %<__builtin_custom_%s%> without specifying switch"
3133 " %<-mcustom-%s%>", N2FPU_NAME (code), N2FPU_NAME (code));
3134 if (has_target_p)
3135 create_output_operand (&ops[opno++], target, dst_mode);
3136 else
3137 /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
3138 num_operands -= 1;
3139 nargs = call_expr_nargs (exp);
3140 for (argno = 0; argno < nargs; argno++)
3141 {
3142 tree arg = CALL_EXPR_ARG (exp, argno);
3143 create_input_operand (&ops[opno++], expand_normal (arg),
3144 TYPE_MODE (TREE_TYPE (arg)));
3145 }
3146 if (!maybe_expand_insn (icode, num_operands, ops))
3147 {
3148 error ("invalid argument to built-in function");
3149 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3150 }
3151 return has_target_p ? ops[0].value : const0_rtx;
3152 }
3153
3154 /* Nios II has custom instruction built-in functions of the forms:
3155 __builtin_custom_n
3156 __builtin_custom_nX
3157 __builtin_custom_nXX
3158 __builtin_custom_Xn
3159 __builtin_custom_XnX
3160 __builtin_custom_XnXX
3161
3162 where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
3163 Therefore with 0-1 return values, and 0-2 arguments, we have a
3164 total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
3165 */
3166 #define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
3167 static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
3168
3169 static void
3170 nios2_init_custom_builtins (int start_code)
3171 {
3172 tree builtin_ftype, ret_type, fndecl;
3173 char builtin_name[32] = "__builtin_custom_";
3174 int n = strlen ("__builtin_custom_");
3175 int builtin_code = 0;
3176 int lhs, rhs1, rhs2;
3177
3178 struct { tree type; const char *c; } op[4];
3179 /* z */ op[0].c = ""; op[0].type = NULL_TREE;
3180 /* f */ op[1].c = "f"; op[1].type = float_type_node;
3181 /* i */ op[2].c = "i"; op[2].type = integer_type_node;
3182 /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
3183
3184 /* We enumerate through the possible operand types to create all the
3185 __builtin_custom_XnXX function tree types. Note that these may slightly
3186 overlap with the function types created for other fixed builtins. */
3187
3188 for (lhs = 0; lhs < 4; lhs++)
3189 for (rhs1 = 0; rhs1 < 4; rhs1++)
3190 for (rhs2 = 0; rhs2 < 4; rhs2++)
3191 {
3192 if (rhs1 == 0 && rhs2 != 0)
3193 continue;
3194 ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
3195 builtin_ftype
3196 = build_function_type_list (ret_type, integer_type_node,
3197 op[rhs1].type, op[rhs2].type,
3198 NULL_TREE);
3199 snprintf (builtin_name + n, 32 - n, "%sn%s%s",
3200 op[lhs].c, op[rhs1].c, op[rhs2].c);
3201 /* Save copy of parameter string into custom_builtin_name[]. */
3202 strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
3203 fndecl =
3204 add_builtin_function (builtin_name, builtin_ftype,
3205 start_code + builtin_code,
3206 BUILT_IN_MD, NULL, NULL_TREE);
3207 nios2_register_builtin_fndecl (start_code + builtin_code, fndecl);
3208 builtin_code += 1;
3209 }
3210 }
3211
3212 /* Helper function for expanding custom builtins. */
3213 static rtx
3214 nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
3215 {
3216 bool has_target_p = (TREE_TYPE (exp) != void_type_node);
3217 machine_mode tmode = VOIDmode;
3218 int nargs, argno;
3219 rtx value, insn, unspec_args[3];
3220 tree arg;
3221
3222 /* XnXX form. */
3223 if (has_target_p)
3224 {
3225 tmode = TYPE_MODE (TREE_TYPE (exp));
3226 if (!target || GET_MODE (target) != tmode
3227 || !REG_P (target))
3228 target = gen_reg_rtx (tmode);
3229 }
3230
3231 nargs = call_expr_nargs (exp);
3232 for (argno = 0; argno < nargs; argno++)
3233 {
3234 arg = CALL_EXPR_ARG (exp, argno);
3235 value = expand_normal (arg);
3236 unspec_args[argno] = value;
3237 if (argno == 0)
3238 {
3239 if (!custom_insn_opcode (value, VOIDmode))
3240 error ("custom instruction opcode must be compile time "
3241 "constant in the range 0-255 for __builtin_custom_%s",
3242 custom_builtin_name[index]);
3243 }
3244 else
3245 /* For other arguments, force into a register. */
3246 unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
3247 unspec_args[argno]);
3248 }
3249 /* Fill remaining unspec operands with zero. */
3250 for (; argno < 3; argno++)
3251 unspec_args[argno] = const0_rtx;
3252
3253 insn = (has_target_p
3254 ? gen_rtx_SET (target,
3255 gen_rtx_UNSPEC_VOLATILE (tmode,
3256 gen_rtvec_v (3, unspec_args),
3257 UNSPECV_CUSTOM_XNXX))
3258 : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
3259 UNSPECV_CUSTOM_NXX));
3260 emit_insn (insn);
3261 return has_target_p ? target : const0_rtx;
3262 }
3263
3264
3265 \f
3266
3267 /* Main definition of built-in functions. Nios II has a small number of fixed
3268 builtins, plus a large number of FPU insn builtins, and builtins for
3269 generating custom instructions. */
3270
3271 struct nios2_builtin_desc
3272 {
3273 enum insn_code icode;
3274 enum nios2_arch_type arch;
3275 enum nios2_ftcode ftype;
3276 const char *name;
3277 };
3278
3279 #define N2_BUILTINS \
3280 N2_BUILTIN_DEF (sync, R1, N2_FTYPE_VOID_VOID) \
3281 N2_BUILTIN_DEF (ldbio, R1, N2_FTYPE_SI_CVPTR) \
3282 N2_BUILTIN_DEF (ldbuio, R1, N2_FTYPE_UI_CVPTR) \
3283 N2_BUILTIN_DEF (ldhio, R1, N2_FTYPE_SI_CVPTR) \
3284 N2_BUILTIN_DEF (ldhuio, R1, N2_FTYPE_UI_CVPTR) \
3285 N2_BUILTIN_DEF (ldwio, R1, N2_FTYPE_SI_CVPTR) \
3286 N2_BUILTIN_DEF (stbio, R1, N2_FTYPE_VOID_VPTR_SI) \
3287 N2_BUILTIN_DEF (sthio, R1, N2_FTYPE_VOID_VPTR_SI) \
3288 N2_BUILTIN_DEF (stwio, R1, N2_FTYPE_VOID_VPTR_SI) \
3289 N2_BUILTIN_DEF (rdctl, R1, N2_FTYPE_SI_SI) \
3290 N2_BUILTIN_DEF (wrctl, R1, N2_FTYPE_VOID_SI_SI) \
3291 N2_BUILTIN_DEF (rdprs, R1, N2_FTYPE_SI_SI_SI) \
3292 N2_BUILTIN_DEF (flushd, R1, N2_FTYPE_VOID_VPTR) \
3293 N2_BUILTIN_DEF (flushda, R1, N2_FTYPE_VOID_VPTR) \
3294 N2_BUILTIN_DEF (wrpie, R2, N2_FTYPE_SI_SI) \
3295 N2_BUILTIN_DEF (eni, R2, N2_FTYPE_VOID_SI) \
3296 N2_BUILTIN_DEF (ldex, R2, N2_FTYPE_SI_CVPTR) \
3297 N2_BUILTIN_DEF (ldsex, R2, N2_FTYPE_SI_CVPTR) \
3298 N2_BUILTIN_DEF (stex, R2, N2_FTYPE_SI_VPTR_SI) \
3299 N2_BUILTIN_DEF (stsex, R2, N2_FTYPE_SI_VPTR_SI)
3300
3301 enum nios2_builtin_code {
3302 #define N2_BUILTIN_DEF(name, arch, ftype) NIOS2_BUILTIN_ ## name,
3303 N2_BUILTINS
3304 #undef N2_BUILTIN_DEF
3305 NUM_FIXED_NIOS2_BUILTINS
3306 };
3307
3308 static const struct nios2_builtin_desc nios2_builtins[] = {
3309 #define N2_BUILTIN_DEF(name, arch, ftype) \
3310 { CODE_FOR_ ## name, ARCH_ ## arch, ftype, "__builtin_" #name },
3311 N2_BUILTINS
3312 #undef N2_BUILTIN_DEF
3313 };
3314
3315 /* Start/ends of FPU/custom insn builtin index ranges. */
3316 static unsigned int nios2_fpu_builtin_base;
3317 static unsigned int nios2_custom_builtin_base;
3318 static unsigned int nios2_custom_builtin_end;
3319
3320 /* Implement TARGET_INIT_BUILTINS. */
3321 static void
3322 nios2_init_builtins (void)
3323 {
3324 unsigned int i;
3325
3326 /* Initialize fixed builtins. */
3327 for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
3328 {
3329 const struct nios2_builtin_desc *d = &nios2_builtins[i];
3330 tree fndecl =
3331 add_builtin_function (d->name, nios2_ftype (d->ftype), i,
3332 BUILT_IN_MD, NULL, NULL);
3333 nios2_register_builtin_fndecl (i, fndecl);
3334 }
3335
3336 /* Initialize FPU builtins. */
3337 nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
3338 nios2_init_fpu_builtins (nios2_fpu_builtin_base);
3339
3340 /* Initialize custom insn builtins. */
3341 nios2_custom_builtin_base
3342 = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
3343 nios2_custom_builtin_end
3344 = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
3345 nios2_init_custom_builtins (nios2_custom_builtin_base);
3346 }
3347
3348 /* Array of fndecls for TARGET_BUILTIN_DECL. */
3349 #define NIOS2_NUM_BUILTINS \
3350 (ARRAY_SIZE (nios2_builtins) + ARRAY_SIZE (nios2_fpu_insn) + NUM_CUSTOM_BUILTINS)
3351 static GTY(()) tree nios2_builtin_decls[NIOS2_NUM_BUILTINS];
3352
3353 static void
3354 nios2_register_builtin_fndecl (unsigned code, tree fndecl)
3355 {
3356 nios2_builtin_decls[code] = fndecl;
3357 }
3358
3359 /* Implement TARGET_BUILTIN_DECL. */
3360 static tree
3361 nios2_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3362 {
3363 gcc_assert (nios2_custom_builtin_end == ARRAY_SIZE (nios2_builtin_decls));
3364
3365 if (code >= nios2_custom_builtin_end)
3366 return error_mark_node;
3367
3368 if (code >= nios2_fpu_builtin_base
3369 && code < nios2_custom_builtin_base
3370 && ! N2FPU_ENABLED_P (code - nios2_fpu_builtin_base))
3371 return error_mark_node;
3372
3373 return nios2_builtin_decls[code];
3374 }
3375
3376 \f
3377 /* Low-level built-in expand routine. */
3378 static rtx
3379 nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
3380 struct expand_operand *ops, bool has_target_p)
3381 {
3382 if (maybe_expand_insn (d->icode, n, ops))
3383 return has_target_p ? ops[0].value : const0_rtx;
3384 else
3385 {
3386 error ("invalid argument to built-in function %s", d->name);
3387 return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
3388 }
3389 }
3390
3391 /* Expand ldio/stio and ldex/ldsex/stex/stsex form load-store
3392 instruction builtins. */
3393 static rtx
3394 nios2_expand_ldst_builtin (tree exp, rtx target,
3395 const struct nios2_builtin_desc *d)
3396 {
3397 bool has_target_p;
3398 rtx addr, mem, val;
3399 struct expand_operand ops[MAX_RECOG_OPERANDS];
3400 machine_mode mode = insn_data[d->icode].operand[0].mode;
3401
3402 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3403 mem = gen_rtx_MEM (mode, addr);
3404
3405 if (insn_data[d->icode].operand[0].allows_mem)
3406 {
3407 /* stxio/stex/stsex. */
3408 val = expand_normal (CALL_EXPR_ARG (exp, 1));
3409 if (CONST_INT_P (val))
3410 val = force_reg (mode, gen_int_mode (INTVAL (val), mode));
3411 val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
3412 create_output_operand (&ops[0], mem, mode);
3413 create_input_operand (&ops[1], val, mode);
3414 if (insn_data[d->icode].n_operands == 3)
3415 {
3416 /* stex/stsex status value, returned as result of function. */
3417 create_output_operand (&ops[2], target, mode);
3418 has_target_p = true;
3419 }
3420 else
3421 has_target_p = false;
3422 }
3423 else
3424 {
3425 /* ldxio. */
3426 create_output_operand (&ops[0], target, mode);
3427 create_input_operand (&ops[1], mem, mode);
3428 has_target_p = true;
3429 }
3430 return nios2_expand_builtin_insn (d, insn_data[d->icode].n_operands, ops,
3431 has_target_p);
3432 }
3433
3434 /* Expand rdctl/wrctl builtins. */
3435 static rtx
3436 nios2_expand_rdwrctl_builtin (tree exp, rtx target,
3437 const struct nios2_builtin_desc *d)
3438 {
3439 bool has_target_p = (insn_data[d->icode].operand[0].predicate
3440 == register_operand);
3441 rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
3442 struct expand_operand ops[MAX_RECOG_OPERANDS];
3443 if (!rdwrctl_operand (ctlcode, VOIDmode))
3444 {
3445 error ("Control register number must be in range 0-31 for %s",
3446 d->name);
3447 return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
3448 }
3449 if (has_target_p)
3450 {
3451 create_output_operand (&ops[0], target, SImode);
3452 create_integer_operand (&ops[1], INTVAL (ctlcode));
3453 }
3454 else
3455 {
3456 rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
3457 create_integer_operand (&ops[0], INTVAL (ctlcode));
3458 create_input_operand (&ops[1], val, SImode);
3459 }
3460 return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
3461 }
3462
3463 static rtx
3464 nios2_expand_rdprs_builtin (tree exp, rtx target,
3465 const struct nios2_builtin_desc *d)
3466 {
3467 rtx reg = expand_normal (CALL_EXPR_ARG (exp, 0));
3468 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 1));
3469 struct expand_operand ops[MAX_RECOG_OPERANDS];
3470
3471 if (!rdwrctl_operand (reg, VOIDmode))
3472 {
3473 error ("Register number must be in range 0-31 for %s",
3474 d->name);
3475 return gen_reg_rtx (SImode);
3476 }
3477
3478 if (!rdprs_dcache_operand (imm, VOIDmode))
3479 {
3480 error ("The immediate value must fit into a %d-bit integer for %s",
3481 (TARGET_ARCH_R2) ? 12 : 16, d->name);
3482 return gen_reg_rtx (SImode);
3483 }
3484
3485 create_output_operand (&ops[0], target, SImode);
3486 create_input_operand (&ops[1], reg, SImode);
3487 create_integer_operand (&ops[2], INTVAL (imm));
3488
3489 return nios2_expand_builtin_insn (d, 3, ops, true);
3490 }
3491
3492 static rtx
3493 nios2_expand_cache_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3494 const struct nios2_builtin_desc *d)
3495 {
3496 rtx mem, addr;
3497 struct expand_operand ops[MAX_RECOG_OPERANDS];
3498
3499 addr = expand_normal (CALL_EXPR_ARG (exp, 0));
3500 mem = gen_rtx_MEM (SImode, addr);
3501
3502 create_input_operand (&ops[0], mem, SImode);
3503
3504 return nios2_expand_builtin_insn (d, 1, ops, false);
3505 }
3506
3507 static rtx
3508 nios2_expand_wrpie_builtin (tree exp, rtx target,
3509 const struct nios2_builtin_desc *d)
3510 {
3511 rtx val;
3512 struct expand_operand ops[MAX_RECOG_OPERANDS];
3513
3514 val = expand_normal (CALL_EXPR_ARG (exp, 0));
3515 create_input_operand (&ops[1], val, SImode);
3516 create_output_operand (&ops[0], target, SImode);
3517
3518 return nios2_expand_builtin_insn (d, 2, ops, true);
3519 }
3520
3521 static rtx
3522 nios2_expand_eni_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
3523 const struct nios2_builtin_desc *d)
3524 {
3525 rtx imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3526 struct expand_operand ops[MAX_RECOG_OPERANDS];
3527
3528 if (INTVAL (imm) != 0 && INTVAL (imm) != 1)
3529 {
3530 error ("The ENI instruction operand must be either 0 or 1");
3531 return const0_rtx;
3532 }
3533 create_integer_operand (&ops[0], INTVAL (imm));
3534
3535 return nios2_expand_builtin_insn (d, 1, ops, false);
3536 }
3537
3538 /* Implement TARGET_EXPAND_BUILTIN. Expand an expression EXP that calls
3539 a built-in function, with result going to TARGET if that's convenient
3540 (and in mode MODE if that's convenient).
3541 SUBTARGET may be used as the target for computing one of EXP's operands.
3542 IGNORE is nonzero if the value is to be ignored. */
3543
3544 static rtx
3545 nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
3546 machine_mode mode ATTRIBUTE_UNUSED,
3547 int ignore ATTRIBUTE_UNUSED)
3548 {
3549 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3550 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3551
3552 if (fcode < nios2_fpu_builtin_base)
3553 {
3554 const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
3555
3556 if (d->arch > nios2_arch_option)
3557 {
3558 error ("Builtin function %s requires Nios II R%d",
3559 d->name, (int) d->arch);
3560 /* Given it is invalid, just generate a normal call. */
3561 return expand_call (exp, target, ignore);
3562 }
3563
3564 switch (fcode)
3565 {
3566 case NIOS2_BUILTIN_sync:
3567 emit_insn (gen_sync ());
3568 return const0_rtx;
3569
3570 case NIOS2_BUILTIN_ldbio:
3571 case NIOS2_BUILTIN_ldbuio:
3572 case NIOS2_BUILTIN_ldhio:
3573 case NIOS2_BUILTIN_ldhuio:
3574 case NIOS2_BUILTIN_ldwio:
3575 case NIOS2_BUILTIN_stbio:
3576 case NIOS2_BUILTIN_sthio:
3577 case NIOS2_BUILTIN_stwio:
3578 case NIOS2_BUILTIN_ldex:
3579 case NIOS2_BUILTIN_ldsex:
3580 case NIOS2_BUILTIN_stex:
3581 case NIOS2_BUILTIN_stsex:
3582 return nios2_expand_ldst_builtin (exp, target, d);
3583
3584 case NIOS2_BUILTIN_rdctl:
3585 case NIOS2_BUILTIN_wrctl:
3586 return nios2_expand_rdwrctl_builtin (exp, target, d);
3587
3588 case NIOS2_BUILTIN_rdprs:
3589 return nios2_expand_rdprs_builtin (exp, target, d);
3590
3591 case NIOS2_BUILTIN_flushd:
3592 case NIOS2_BUILTIN_flushda:
3593 return nios2_expand_cache_builtin (exp, target, d);
3594
3595 case NIOS2_BUILTIN_wrpie:
3596 return nios2_expand_wrpie_builtin (exp, target, d);
3597
3598 case NIOS2_BUILTIN_eni:
3599 return nios2_expand_eni_builtin (exp, target, d);
3600
3601 default:
3602 gcc_unreachable ();
3603 }
3604 }
3605 else if (fcode < nios2_custom_builtin_base)
3606 /* FPU builtin range. */
3607 return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
3608 target);
3609 else if (fcode < nios2_custom_builtin_end)
3610 /* Custom insn builtin range. */
3611 return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
3612 target);
3613 else
3614 gcc_unreachable ();
3615 }
3616
3617 /* Implement TARGET_INIT_LIBFUNCS. */
3618 static void ATTRIBUTE_UNUSED
3619 nios2_init_libfuncs (void)
3620 {
3621 init_sync_libfuncs (UNITS_PER_WORD);
3622 }
3623
3624 \f
3625
3626 /* Register a custom code use, and signal error if a conflict was found. */
3627 static void
3628 nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
3629 int index)
3630 {
3631 gcc_assert (N <= 255);
3632
3633 if (status == CCS_FPU)
3634 {
3635 if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
3636 {
3637 custom_code_conflict = true;
3638 error ("switch %<-mcustom-%s%> conflicts with switch %<-mcustom-%s%>",
3639 N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
3640 }
3641 else if (custom_code_status[N] == CCS_BUILTIN_CALL)
3642 {
3643 custom_code_conflict = true;
3644 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3645 "%<-mcustom-%s%>", custom_builtin_name[custom_code_index[N]],
3646 N2FPU_NAME (index));
3647 }
3648 }
3649 else if (status == CCS_BUILTIN_CALL)
3650 {
3651 if (custom_code_status[N] == CCS_FPU)
3652 {
3653 custom_code_conflict = true;
3654 error ("call to %<__builtin_custom_%s%> conflicts with switch "
3655 "%<-mcustom-%s%>", custom_builtin_name[index],
3656 N2FPU_NAME (custom_code_index[N]));
3657 }
3658 else
3659 {
3660 /* Note that code conflicts between different __builtin_custom_xnxx
3661 calls are not checked. */
3662 }
3663 }
3664 else
3665 gcc_unreachable ();
3666
3667 custom_code_status[N] = status;
3668 custom_code_index[N] = index;
3669 }
3670
3671 /* Mark a custom code as not in use. */
3672 static void
3673 nios2_deregister_custom_code (unsigned int N)
3674 {
3675 if (N <= 255)
3676 {
3677 custom_code_status[N] = CCS_UNUSED;
3678 custom_code_index[N] = 0;
3679 }
3680 }
3681
3682 /* Target attributes can affect per-function option state, so we need to
3683 save/restore the custom code tracking info using the
3684 TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
3685
3686 static void
3687 nios2_option_save (struct cl_target_option *ptr,
3688 struct gcc_options *opts ATTRIBUTE_UNUSED)
3689 {
3690 unsigned int i;
3691 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3692 ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
3693 memcpy (ptr->saved_custom_code_status, custom_code_status,
3694 sizeof (custom_code_status));
3695 memcpy (ptr->saved_custom_code_index, custom_code_index,
3696 sizeof (custom_code_index));
3697 }
3698
3699 static void
3700 nios2_option_restore (struct gcc_options *opts ATTRIBUTE_UNUSED,
3701 struct cl_target_option *ptr)
3702 {
3703 unsigned int i;
3704 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3705 N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
3706 memcpy (custom_code_status, ptr->saved_custom_code_status,
3707 sizeof (custom_code_status));
3708 memcpy (custom_code_index, ptr->saved_custom_code_index,
3709 sizeof (custom_code_index));
3710 }
3711
3712 /* Inner function to process the attribute((target(...))), take an argument and
3713 set the current options from the argument. If we have a list, recursively
3714 go over the list. */
3715
3716 static bool
3717 nios2_valid_target_attribute_rec (tree args)
3718 {
3719 if (TREE_CODE (args) == TREE_LIST)
3720 {
3721 bool ret = true;
3722 for (; args; args = TREE_CHAIN (args))
3723 if (TREE_VALUE (args)
3724 && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
3725 ret = false;
3726 return ret;
3727 }
3728 else if (TREE_CODE (args) == STRING_CST)
3729 {
3730 char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
3731 while (argstr && *argstr != '\0')
3732 {
3733 bool no_opt = false, end_p = false;
3734 char *eq = NULL, *p;
3735 while (ISSPACE (*argstr))
3736 argstr++;
3737 p = argstr;
3738 while (*p != '\0' && *p != ',')
3739 {
3740 if (!eq && *p == '=')
3741 eq = p;
3742 ++p;
3743 }
3744 if (*p == '\0')
3745 end_p = true;
3746 else
3747 *p = '\0';
3748 if (eq) *eq = '\0';
3749
3750 if (!strncmp (argstr, "no-", 3))
3751 {
3752 no_opt = true;
3753 argstr += 3;
3754 }
3755 if (!strncmp (argstr, "custom-fpu-cfg", 14))
3756 {
3757 char *end_eq = p;
3758 if (no_opt)
3759 {
3760 error ("custom-fpu-cfg option does not support %<no-%>");
3761 return false;
3762 }
3763 if (!eq)
3764 {
3765 error ("custom-fpu-cfg option requires configuration"
3766 " argument");
3767 return false;
3768 }
3769 /* Increment and skip whitespace. */
3770 while (ISSPACE (*(++eq))) ;
3771 /* Decrement and skip to before any trailing whitespace. */
3772 while (ISSPACE (*(--end_eq))) ;
3773
3774 nios2_handle_custom_fpu_cfg (eq, end_eq + 1, true);
3775 }
3776 else if (!strncmp (argstr, "custom-", 7))
3777 {
3778 int code = -1;
3779 unsigned int i;
3780 for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
3781 if (!strncmp (argstr + 7, N2FPU_NAME (i),
3782 strlen (N2FPU_NAME (i))))
3783 {
3784 /* Found insn. */
3785 code = i;
3786 break;
3787 }
3788 if (code >= 0)
3789 {
3790 if (no_opt)
3791 {
3792 if (eq)
3793 {
3794 error ("%<no-custom-%s%> does not accept arguments",
3795 N2FPU_NAME (code));
3796 return false;
3797 }
3798 /* Disable option by setting to -1. */
3799 nios2_deregister_custom_code (N2FPU_N (code));
3800 N2FPU_N (code) = -1;
3801 }
3802 else
3803 {
3804 char *t;
3805 if (eq)
3806 while (ISSPACE (*(++eq))) ;
3807 if (!eq || eq == p)
3808 {
3809 error ("%<custom-%s=%> requires argument",
3810 N2FPU_NAME (code));
3811 return false;
3812 }
3813 for (t = eq; t != p; ++t)
3814 {
3815 if (ISSPACE (*t))
3816 continue;
3817 if (!ISDIGIT (*t))
3818 {
3819 error ("`custom-%s=' argument requires "
3820 "numeric digits", N2FPU_NAME (code));
3821 return false;
3822 }
3823 }
3824 /* Set option to argument. */
3825 N2FPU_N (code) = atoi (eq);
3826 nios2_handle_custom_fpu_insn_option (code);
3827 }
3828 }
3829 else
3830 {
3831 error ("%<custom-%s=%> is not recognized as FPU instruction",
3832 argstr + 7);
3833 return false;
3834 }
3835 }
3836 else
3837 {
3838 error ("%<%s%> is unknown", argstr);
3839 return false;
3840 }
3841
3842 if (end_p)
3843 break;
3844 else
3845 argstr = p + 1;
3846 }
3847 return true;
3848 }
3849 else
3850 gcc_unreachable ();
3851 }
3852
3853 /* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
3854
3855 static tree
3856 nios2_valid_target_attribute_tree (tree args)
3857 {
3858 if (!nios2_valid_target_attribute_rec (args))
3859 return NULL_TREE;
3860 nios2_custom_check_insns ();
3861 return build_target_option_node (&global_options);
3862 }
3863
3864 /* Hook to validate attribute((target("string"))). */
3865
3866 static bool
3867 nios2_valid_target_attribute_p (tree fndecl, tree ARG_UNUSED (name),
3868 tree args, int ARG_UNUSED (flags))
3869 {
3870 struct cl_target_option cur_target;
3871 bool ret = true;
3872 tree old_optimize = build_optimization_node (&global_options);
3873 tree new_target, new_optimize;
3874 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
3875
3876 /* If the function changed the optimization levels as well as setting target
3877 options, start with the optimizations specified. */
3878 if (func_optimize && func_optimize != old_optimize)
3879 cl_optimization_restore (&global_options,
3880 TREE_OPTIMIZATION (func_optimize));
3881
3882 /* The target attributes may also change some optimization flags, so update
3883 the optimization options if necessary. */
3884 cl_target_option_save (&cur_target, &global_options);
3885 new_target = nios2_valid_target_attribute_tree (args);
3886 new_optimize = build_optimization_node (&global_options);
3887
3888 if (!new_target)
3889 ret = false;
3890
3891 else if (fndecl)
3892 {
3893 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
3894
3895 if (old_optimize != new_optimize)
3896 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
3897 }
3898
3899 cl_target_option_restore (&global_options, &cur_target);
3900
3901 if (old_optimize != new_optimize)
3902 cl_optimization_restore (&global_options,
3903 TREE_OPTIMIZATION (old_optimize));
3904 return ret;
3905 }
3906
3907 /* Remember the last target of nios2_set_current_function. */
3908 static GTY(()) tree nios2_previous_fndecl;
3909
3910 /* Establish appropriate back-end context for processing the function
3911 FNDECL. The argument might be NULL to indicate processing at top
3912 level, outside of any function scope. */
3913 static void
3914 nios2_set_current_function (tree fndecl)
3915 {
3916 tree old_tree = (nios2_previous_fndecl
3917 ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
3918 : NULL_TREE);
3919
3920 tree new_tree = (fndecl
3921 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
3922 : NULL_TREE);
3923
3924 if (fndecl && fndecl != nios2_previous_fndecl)
3925 {
3926 nios2_previous_fndecl = fndecl;
3927 if (old_tree == new_tree)
3928 ;
3929
3930 else if (new_tree)
3931 {
3932 cl_target_option_restore (&global_options,
3933 TREE_TARGET_OPTION (new_tree));
3934 target_reinit ();
3935 }
3936
3937 else if (old_tree)
3938 {
3939 struct cl_target_option *def
3940 = TREE_TARGET_OPTION (target_option_current_node);
3941
3942 cl_target_option_restore (&global_options, def);
3943 target_reinit ();
3944 }
3945 }
3946 }
3947
3948 /* Hook to validate the current #pragma GCC target and set the FPU custom
3949 code option state. If ARGS is NULL, then POP_TARGET is used to reset
3950 the options. */
3951 static bool
3952 nios2_pragma_target_parse (tree args, tree pop_target)
3953 {
3954 tree cur_tree;
3955 if (! args)
3956 {
3957 cur_tree = ((pop_target)
3958 ? pop_target
3959 : target_option_default_node);
3960 cl_target_option_restore (&global_options,
3961 TREE_TARGET_OPTION (cur_tree));
3962 }
3963 else
3964 {
3965 cur_tree = nios2_valid_target_attribute_tree (args);
3966 if (!cur_tree)
3967 return false;
3968 }
3969
3970 target_option_current_node = cur_tree;
3971 return true;
3972 }
3973
3974 /* Implement TARGET_MERGE_DECL_ATTRIBUTES.
3975 We are just using this hook to add some additional error checking to
3976 the default behavior. GCC does not provide a target hook for merging
3977 the target options, and only correctly handles merging empty vs non-empty
3978 option data; see merge_decls() in c-decl.c.
3979 So here we require either that at least one of the decls has empty
3980 target options, or that the target options/data be identical. */
3981 static tree
3982 nios2_merge_decl_attributes (tree olddecl, tree newdecl)
3983 {
3984 tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
3985 tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
3986 if (newopts && oldopts && newopts != oldopts)
3987 {
3988 tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
3989 tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
3990 if (oldtree && newtree && oldtree != newtree)
3991 {
3992 struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
3993 struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
3994 if (olddata != newdata
3995 && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
3996 error ("%qE redeclared with conflicting %qs attributes",
3997 DECL_NAME (newdecl), "target");
3998 }
3999 }
4000 return merge_attributes (DECL_ATTRIBUTES (olddecl),
4001 DECL_ATTRIBUTES (newdecl));
4002 }
4003
4004 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4005 static void
4006 nios2_asm_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4007 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4008 tree function)
4009 {
4010 rtx this_rtx, funexp;
4011 rtx_insn *insn;
4012
4013 /* Pretend to be a post-reload pass while generating rtl. */
4014 reload_completed = 1;
4015
4016 if (flag_pic)
4017 nios2_load_pic_register ();
4018
4019 /* Mark the end of the (empty) prologue. */
4020 emit_note (NOTE_INSN_PROLOGUE_END);
4021
4022 /* Find the "this" pointer. If the function returns a structure,
4023 the structure return pointer is in $5. */
4024 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4025 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO + 1);
4026 else
4027 this_rtx = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
4028
4029 /* Add DELTA to THIS_RTX. */
4030 nios2_emit_add_constant (this_rtx, delta);
4031
4032 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4033 if (vcall_offset)
4034 {
4035 rtx tmp;
4036
4037 tmp = gen_rtx_REG (Pmode, 2);
4038 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4039 nios2_emit_add_constant (tmp, vcall_offset);
4040 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4041 emit_insn (gen_add2_insn (this_rtx, tmp));
4042 }
4043
4044 /* Generate a tail call to the target function. */
4045 if (!TREE_USED (function))
4046 {
4047 assemble_external (function);
4048 TREE_USED (function) = 1;
4049 }
4050 funexp = XEXP (DECL_RTL (function), 0);
4051 /* Function address needs to be constructed under PIC,
4052 provide r2 to use here. */
4053 nios2_adjust_call_address (&funexp, gen_rtx_REG (Pmode, 2));
4054 insn = emit_call_insn (gen_sibcall_internal (funexp, const0_rtx));
4055 SIBLING_CALL_P (insn) = 1;
4056
4057 /* Run just enough of rest_of_compilation to get the insns emitted.
4058 There's not really enough bulk here to make other passes such as
4059 instruction scheduling worth while. Note that use_thunk calls
4060 assemble_start_function and assemble_end_function. */
4061 insn = get_insns ();
4062 shorten_branches (insn);
4063 final_start_function (insn, file, 1);
4064 final (insn, file, 1);
4065 final_end_function ();
4066
4067 /* Stop pretending to be a post-reload pass. */
4068 reload_completed = 0;
4069 }
4070
4071
4072 /* Utility function to break a memory address into
4073 base register + constant offset. Return false if something
4074 unexpected is seen. */
4075 static bool
4076 split_mem_address (rtx addr, rtx *base_reg, rtx *offset)
4077 {
4078 if (REG_P (addr))
4079 {
4080 *base_reg = addr;
4081 *offset = const0_rtx;
4082 return true;
4083 }
4084 else if (GET_CODE (addr) == PLUS)
4085 {
4086 *base_reg = XEXP (addr, 0);
4087 *offset = XEXP (addr, 1);
4088 return true;
4089 }
4090 return false;
4091 }
4092
4093 /* Splits out the operands of an ALU insn, places them in *LHS, *RHS1, *RHS2. */
4094 static void
4095 split_alu_insn (rtx_insn *insn, rtx *lhs, rtx *rhs1, rtx *rhs2)
4096 {
4097 rtx pat = PATTERN (insn);
4098 gcc_assert (GET_CODE (pat) == SET);
4099 *lhs = SET_DEST (pat);
4100 *rhs1 = XEXP (SET_SRC (pat), 0);
4101 if (GET_RTX_CLASS (GET_CODE (SET_SRC (pat))) != RTX_UNARY)
4102 *rhs2 = XEXP (SET_SRC (pat), 1);
4103 return;
4104 }
4105
4106 /* Returns true if OP is a REG and assigned a CDX reg. */
4107 static bool
4108 cdxreg (rtx op)
4109 {
4110 return REG_P (op) && (!reload_completed || CDX_REG_P (REGNO (op)));
4111 }
4112
4113 /* Returns true if OP is within range of CDX addi.n immediates. */
4114 static bool
4115 cdx_add_immed (rtx op)
4116 {
4117 if (CONST_INT_P (op))
4118 {
4119 HOST_WIDE_INT ival = INTVAL (op);
4120 return ival <= 128 && ival > 0 && (ival & (ival - 1)) == 0;
4121 }
4122 return false;
4123 }
4124
4125 /* Returns true if OP is within range of CDX andi.n immediates. */
4126 static bool
4127 cdx_and_immed (rtx op)
4128 {
4129 if (CONST_INT_P (op))
4130 {
4131 HOST_WIDE_INT ival = INTVAL (op);
4132 return (ival == 1 || ival == 2 || ival == 3 || ival == 4
4133 || ival == 8 || ival == 0xf || ival == 0x10
4134 || ival == 0x1f || ival == 0x20
4135 || ival == 0x3f || ival == 0x7f
4136 || ival == 0x80 || ival == 0xff || ival == 0x7ff
4137 || ival == 0xff00 || ival == 0xffff);
4138 }
4139 return false;
4140 }
4141
4142 /* Returns true if OP is within range of CDX movi.n immediates. */
4143 static bool
4144 cdx_mov_immed (rtx op)
4145 {
4146 if (CONST_INT_P (op))
4147 {
4148 HOST_WIDE_INT ival = INTVAL (op);
4149 return ((ival >= 0 && ival <= 124)
4150 || ival == 0xff || ival == -2 || ival == -1);
4151 }
4152 return false;
4153 }
4154
4155 /* Returns true if OP is within range of CDX slli.n/srli.n immediates. */
4156 static bool
4157 cdx_shift_immed (rtx op)
4158 {
4159 if (CONST_INT_P (op))
4160 {
4161 HOST_WIDE_INT ival = INTVAL (op);
4162 return (ival == 1 || ival == 2 || ival == 3 || ival == 8
4163 || ival == 12 || ival == 16 || ival == 24
4164 || ival == 31);
4165 }
4166 return false;
4167 }
4168
4169
4170
4171 /* Classification of different kinds of add instructions. */
4172 enum nios2_add_insn_kind {
4173 nios2_add_n_kind,
4174 nios2_addi_n_kind,
4175 nios2_subi_n_kind,
4176 nios2_spaddi_n_kind,
4177 nios2_spinci_n_kind,
4178 nios2_spdeci_n_kind,
4179 nios2_add_kind,
4180 nios2_addi_kind
4181 };
4182
4183 static const char *nios2_add_insn_names[] = {
4184 "add.n", "addi.n", "subi.n", "spaddi.n", "spinci.n", "spdeci.n",
4185 "add", "addi" };
4186 static bool nios2_add_insn_narrow[] = {
4187 true, true, true, true, true, true,
4188 false, false};
4189
4190 /* Function to classify kinds of add instruction patterns. */
4191 static enum nios2_add_insn_kind
4192 nios2_add_insn_classify (rtx_insn *insn ATTRIBUTE_UNUSED,
4193 rtx lhs, rtx rhs1, rtx rhs2)
4194 {
4195 if (TARGET_HAS_CDX)
4196 {
4197 if (cdxreg (lhs) && cdxreg (rhs1))
4198 {
4199 if (cdxreg (rhs2))
4200 return nios2_add_n_kind;
4201 if (CONST_INT_P (rhs2))
4202 {
4203 HOST_WIDE_INT ival = INTVAL (rhs2);
4204 if (ival > 0 && cdx_add_immed (rhs2))
4205 return nios2_addi_n_kind;
4206 if (ival < 0 && cdx_add_immed (GEN_INT (-ival)))
4207 return nios2_subi_n_kind;
4208 }
4209 }
4210 else if (rhs1 == stack_pointer_rtx
4211 && CONST_INT_P (rhs2))
4212 {
4213 HOST_WIDE_INT imm7 = INTVAL (rhs2) >> 2;
4214 HOST_WIDE_INT rem = INTVAL (rhs2) & 3;
4215 if (rem == 0 && (imm7 & ~0x7f) == 0)
4216 {
4217 if (cdxreg (lhs))
4218 return nios2_spaddi_n_kind;
4219 if (lhs == stack_pointer_rtx)
4220 return nios2_spinci_n_kind;
4221 }
4222 imm7 = -INTVAL(rhs2) >> 2;
4223 rem = -INTVAL (rhs2) & 3;
4224 if (lhs == stack_pointer_rtx
4225 && rem == 0 && (imm7 & ~0x7f) == 0)
4226 return nios2_spdeci_n_kind;
4227 }
4228 }
4229 return ((REG_P (rhs2) || rhs2 == const0_rtx)
4230 ? nios2_add_kind : nios2_addi_kind);
4231 }
4232
4233 /* Emit assembly language for the different kinds of add instructions. */
4234 const char*
4235 nios2_add_insn_asm (rtx_insn *insn, rtx *operands)
4236 {
4237 static char buf[256];
4238 int ln = 256;
4239 enum nios2_add_insn_kind kind
4240 = nios2_add_insn_classify (insn, operands[0], operands[1], operands[2]);
4241 if (kind == nios2_subi_n_kind)
4242 snprintf (buf, ln, "subi.n\t%%0, %%1, %d", (int) -INTVAL (operands[2]));
4243 else if (kind == nios2_spaddi_n_kind)
4244 snprintf (buf, ln, "spaddi.n\t%%0, %%2");
4245 else if (kind == nios2_spinci_n_kind)
4246 snprintf (buf, ln, "spinci.n\t%%2");
4247 else if (kind == nios2_spdeci_n_kind)
4248 snprintf (buf, ln, "spdeci.n\t%d", (int) -INTVAL (operands[2]));
4249 else
4250 snprintf (buf, ln, "%s\t%%0, %%1, %%z2", nios2_add_insn_names[(int)kind]);
4251 return buf;
4252 }
4253
4254 /* This routine, which the default "length" attribute computation is
4255 based on, encapsulates information about all the cases where CDX
4256 provides a narrow 2-byte instruction form. */
4257 bool
4258 nios2_cdx_narrow_form_p (rtx_insn *insn)
4259 {
4260 rtx pat, lhs, rhs1, rhs2;
4261 enum attr_type type;
4262 if (!TARGET_HAS_CDX)
4263 return false;
4264 type = get_attr_type (insn);
4265 pat = PATTERN (insn);
4266 gcc_assert (reload_completed);
4267 switch (type)
4268 {
4269 case TYPE_CONTROL:
4270 if (GET_CODE (pat) == SIMPLE_RETURN)
4271 return true;
4272 if (GET_CODE (pat) == PARALLEL)
4273 pat = XVECEXP (pat, 0, 0);
4274 if (GET_CODE (pat) == SET)
4275 pat = SET_SRC (pat);
4276 if (GET_CODE (pat) == IF_THEN_ELSE)
4277 {
4278 /* Conditional branch patterns; for these we
4279 only check the comparison to find beqz.n/bnez.n cases.
4280 For the 'nios2_cbranch' pattern, we cannot also check
4281 the branch range here. That will be done at the md
4282 pattern "length" attribute computation. */
4283 rtx cmp = XEXP (pat, 0);
4284 return ((GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
4285 && cdxreg (XEXP (cmp, 0))
4286 && XEXP (cmp, 1) == const0_rtx);
4287 }
4288 if (GET_CODE (pat) == TRAP_IF)
4289 /* trap.n is always usable. */
4290 return true;
4291 if (GET_CODE (pat) == CALL)
4292 pat = XEXP (XEXP (pat, 0), 0);
4293 if (REG_P (pat))
4294 /* Control instructions taking a register operand are indirect
4295 jumps and calls. The CDX instructions have a 5-bit register
4296 field so any reg is valid. */
4297 return true;
4298 else
4299 {
4300 gcc_assert (!insn_variable_length_p (insn));
4301 return false;
4302 }
4303 case TYPE_ADD:
4304 {
4305 enum nios2_add_insn_kind kind;
4306 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4307 kind = nios2_add_insn_classify (insn, lhs, rhs1, rhs2);
4308 return nios2_add_insn_narrow[(int)kind];
4309 }
4310 case TYPE_LD:
4311 {
4312 bool ret;
4313 HOST_WIDE_INT offset, rem = 0;
4314 rtx addr, reg = SET_DEST (pat), mem = SET_SRC (pat);
4315 if (GET_CODE (mem) == SIGN_EXTEND)
4316 /* No CDX form for sign-extended load. */
4317 return false;
4318 if (GET_CODE (mem) == ZERO_EXTEND)
4319 /* The load alternatives in the zero_extend* patterns. */
4320 mem = XEXP (mem, 0);
4321 if (MEM_P (mem))
4322 {
4323 /* ldxio. */
4324 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4325 || TARGET_BYPASS_CACHE)
4326 return false;
4327 addr = XEXP (mem, 0);
4328 /* GP-based references are never narrow. */
4329 if (gprel_constant_p (addr))
4330 return false;
4331 ret = split_mem_address (addr, &rhs1, &rhs2);
4332 gcc_assert (ret);
4333 }
4334 else
4335 return false;
4336
4337 offset = INTVAL (rhs2);
4338 if (GET_MODE (mem) == SImode)
4339 {
4340 rem = offset & 3;
4341 offset >>= 2;
4342 /* ldwsp.n case. */
4343 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4344 && rem == 0 && (offset & ~0x1f) == 0)
4345 return true;
4346 }
4347 else if (GET_MODE (mem) == HImode)
4348 {
4349 rem = offset & 1;
4350 offset >>= 1;
4351 }
4352 /* ldbu.n, ldhu.n, ldw.n cases. */
4353 return (cdxreg (reg) && cdxreg (rhs1)
4354 && rem == 0 && (offset & ~0xf) == 0);
4355 }
4356 case TYPE_ST:
4357 if (GET_CODE (pat) == PARALLEL)
4358 /* stex, stsex. */
4359 return false;
4360 else
4361 {
4362 bool ret;
4363 HOST_WIDE_INT offset, rem = 0;
4364 rtx addr, reg = SET_SRC (pat), mem = SET_DEST (pat);
4365 if (!MEM_P (mem))
4366 return false;
4367 /* stxio. */
4368 if ((MEM_VOLATILE_P (mem) && TARGET_BYPASS_CACHE_VOLATILE)
4369 || TARGET_BYPASS_CACHE)
4370 return false;
4371 addr = XEXP (mem, 0);
4372 /* GP-based references are never narrow. */
4373 if (gprel_constant_p (addr))
4374 return false;
4375 ret = split_mem_address (addr, &rhs1, &rhs2);
4376 gcc_assert (ret);
4377 offset = INTVAL (rhs2);
4378 if (GET_MODE (mem) == SImode)
4379 {
4380 rem = offset & 3;
4381 offset >>= 2;
4382 /* stwsp.n case. */
4383 if (rtx_equal_p (rhs1, stack_pointer_rtx)
4384 && rem == 0 && (offset & ~0x1f) == 0)
4385 return true;
4386 /* stwz.n case. */
4387 else if (reg == const0_rtx && cdxreg (rhs1)
4388 && rem == 0 && (offset & ~0x3f) == 0)
4389 return true;
4390 }
4391 else if (GET_MODE (mem) == HImode)
4392 {
4393 rem = offset & 1;
4394 offset >>= 1;
4395 }
4396 else
4397 {
4398 gcc_assert (GET_MODE (mem) == QImode);
4399 /* stbz.n case. */
4400 if (reg == const0_rtx && cdxreg (rhs1)
4401 && (offset & ~0x3f) == 0)
4402 return true;
4403 }
4404
4405 /* stbu.n, sthu.n, stw.n cases. */
4406 return (cdxreg (reg) && cdxreg (rhs1)
4407 && rem == 0 && (offset & ~0xf) == 0);
4408 }
4409 case TYPE_MOV:
4410 lhs = SET_DEST (pat);
4411 rhs1 = SET_SRC (pat);
4412 if (CONST_INT_P (rhs1))
4413 return (cdxreg (lhs) && cdx_mov_immed (rhs1));
4414 gcc_assert (REG_P (lhs) && REG_P (rhs1));
4415 return true;
4416
4417 case TYPE_AND:
4418 /* Some zero_extend* alternatives are and insns. */
4419 if (GET_CODE (SET_SRC (pat)) == ZERO_EXTEND)
4420 return (cdxreg (SET_DEST (pat))
4421 && cdxreg (XEXP (SET_SRC (pat), 0)));
4422 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4423 if (CONST_INT_P (rhs2))
4424 return (cdxreg (lhs) && cdxreg (rhs1) && cdx_and_immed (rhs2));
4425 return (cdxreg (lhs) && cdxreg (rhs2)
4426 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4427
4428 case TYPE_OR:
4429 case TYPE_XOR:
4430 /* Note the two-address limitation for CDX form. */
4431 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4432 return (cdxreg (lhs) && cdxreg (rhs2)
4433 && (!reload_completed || rtx_equal_p (lhs, rhs1)));
4434
4435 case TYPE_SUB:
4436 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4437 return (cdxreg (lhs) && cdxreg (rhs1) && cdxreg (rhs2));
4438
4439 case TYPE_NEG:
4440 case TYPE_NOT:
4441 split_alu_insn (insn, &lhs, &rhs1, NULL);
4442 return (cdxreg (lhs) && cdxreg (rhs1));
4443
4444 case TYPE_SLL:
4445 case TYPE_SRL:
4446 split_alu_insn (insn, &lhs, &rhs1, &rhs2);
4447 return (cdxreg (lhs)
4448 && ((cdxreg (rhs1) && cdx_shift_immed (rhs2))
4449 || (cdxreg (rhs2)
4450 && (!reload_completed || rtx_equal_p (lhs, rhs1)))));
4451 case TYPE_NOP:
4452 case TYPE_PUSH:
4453 case TYPE_POP:
4454 return true;
4455 default:
4456 break;
4457 }
4458 return false;
4459 }
4460
4461 /* Main function to implement the pop_operation predicate that
4462 check pop.n insn pattern integrity. The CDX pop.n patterns mostly
4463 hardcode the restored registers, so the main checking is for the
4464 SP offsets. */
4465 bool
4466 pop_operation_p (rtx op)
4467 {
4468 int i;
4469 HOST_WIDE_INT last_offset = -1, len = XVECLEN (op, 0);
4470 rtx base_reg, offset;
4471
4472 if (len < 3 /* At least has a return, SP-update, and RA restore. */
4473 || GET_CODE (XVECEXP (op, 0, 0)) != RETURN
4474 || !base_reg_adjustment_p (XVECEXP (op, 0, 1), &base_reg, &offset)
4475 || !rtx_equal_p (base_reg, stack_pointer_rtx)
4476 || !CONST_INT_P (offset)
4477 || (INTVAL (offset) & 3) != 0)
4478 return false;
4479
4480 for (i = len - 1; i > 1; i--)
4481 {
4482 rtx set = XVECEXP (op, 0, i);
4483 rtx curr_base_reg, curr_offset;
4484
4485 if (GET_CODE (set) != SET || !MEM_P (SET_SRC (set))
4486 || !split_mem_address (XEXP (SET_SRC (set), 0),
4487 &curr_base_reg, &curr_offset)
4488 || !rtx_equal_p (base_reg, curr_base_reg)
4489 || !CONST_INT_P (curr_offset))
4490 return false;
4491 if (i == len - 1)
4492 {
4493 last_offset = INTVAL (curr_offset);
4494 if ((last_offset & 3) != 0 || last_offset > 60)
4495 return false;
4496 }
4497 else
4498 {
4499 last_offset += 4;
4500 if (INTVAL (curr_offset) != last_offset)
4501 return false;
4502 }
4503 }
4504 if (last_offset < 0 || last_offset + 4 != INTVAL (offset))
4505 return false;
4506
4507 return true;
4508 }
4509
4510
4511 /* Masks of registers that are valid for CDX ldwm/stwm instructions.
4512 The instruction can encode subsets drawn from either R2-R13 or
4513 R14-R23 + FP + RA. */
4514 #define CDX_LDSTWM_VALID_REGS_0 0x00003ffc
4515 #define CDX_LDSTWM_VALID_REGS_1 0x90ffc000
4516
4517 static bool
4518 nios2_ldstwm_regset_p (unsigned int regno, unsigned int *regset)
4519 {
4520 if (*regset == 0)
4521 {
4522 if (CDX_LDSTWM_VALID_REGS_0 & (1 << regno))
4523 *regset = CDX_LDSTWM_VALID_REGS_0;
4524 else if (CDX_LDSTWM_VALID_REGS_1 & (1 << regno))
4525 *regset = CDX_LDSTWM_VALID_REGS_1;
4526 else
4527 return false;
4528 return true;
4529 }
4530 else
4531 return (*regset & (1 << regno)) != 0;
4532 }
4533
4534 /* Main function to implement ldwm_operation/stwm_operation
4535 predicates that check ldwm/stwm insn pattern integrity. */
4536 bool
4537 ldstwm_operation_p (rtx op, bool load_p)
4538 {
4539 int start, i, end = XVECLEN (op, 0) - 1, last_regno = -1;
4540 unsigned int regset = 0;
4541 rtx base_reg, offset;
4542 rtx first_elt = XVECEXP (op, 0, 0);
4543 bool inc_p = true;
4544 bool wb_p = base_reg_adjustment_p (first_elt, &base_reg, &offset);
4545 if (GET_CODE (XVECEXP (op, 0, end)) == RETURN)
4546 end--;
4547 start = wb_p ? 1 : 0;
4548 for (i = start; i <= end; i++)
4549 {
4550 int regno;
4551 rtx reg, mem, elt = XVECEXP (op, 0, i);
4552 /* Return early if not a SET at all. */
4553 if (GET_CODE (elt) != SET)
4554 return false;
4555 reg = load_p ? SET_DEST (elt) : SET_SRC (elt);
4556 mem = load_p ? SET_SRC (elt) : SET_DEST (elt);
4557 if (!REG_P (reg) || !MEM_P (mem))
4558 return false;
4559 regno = REGNO (reg);
4560 if (!nios2_ldstwm_regset_p (regno, &regset))
4561 return false;
4562 /* If no writeback to determine direction, use offset of first MEM. */
4563 if (wb_p)
4564 inc_p = INTVAL (offset) > 0;
4565 else if (i == start)
4566 {
4567 rtx first_base, first_offset;
4568 if (!split_mem_address (XEXP (mem, 0),
4569 &first_base, &first_offset))
4570 return false;
4571 if (!REG_P (first_base) || !CONST_INT_P (first_offset))
4572 return false;
4573 base_reg = first_base;
4574 inc_p = INTVAL (first_offset) >= 0;
4575 }
4576 /* Ensure that the base register is not loaded into. */
4577 if (load_p && regno == (int) REGNO (base_reg))
4578 return false;
4579 /* Check for register order inc/dec integrity. */
4580 if (last_regno >= 0)
4581 {
4582 if (inc_p && last_regno >= regno)
4583 return false;
4584 if (!inc_p && last_regno <= regno)
4585 return false;
4586 }
4587 last_regno = regno;
4588 }
4589 return true;
4590 }
4591
4592 /* Helper for nios2_ldst_parallel, for generating a parallel vector
4593 SET element. */
4594 static rtx
4595 gen_ldst (bool load_p, int regno, rtx base_mem, int offset)
4596 {
4597 rtx reg = gen_rtx_REG (SImode, regno);
4598 rtx mem = adjust_address_nv (base_mem, SImode, offset);
4599 return gen_rtx_SET (load_p ? reg : mem,
4600 load_p ? mem : reg);
4601 }
4602
4603 /* A general routine for creating the body RTL pattern of
4604 ldwm/stwm/push.n/pop.n insns.
4605 LOAD_P: true/false for load/store direction.
4606 REG_INC_P: whether registers are incrementing/decrementing in the
4607 *RTL vector* (not necessarily the order defined in the ISA specification).
4608 OFFSET_INC_P: Same as REG_INC_P, but for the memory offset order.
4609 BASE_MEM: starting MEM.
4610 BASE_UPDATE: amount to update base register; zero means no writeback.
4611 REGMASK: register mask to load/store.
4612 RET_P: true if to tag a (return) element at the end.
4613
4614 Note that this routine does not do any checking. It's the job of the
4615 caller to do the right thing, and the insn patterns to do the
4616 safe-guarding. */
4617 static rtx
4618 nios2_ldst_parallel (bool load_p, bool reg_inc_p, bool offset_inc_p,
4619 rtx base_mem, int base_update,
4620 unsigned HOST_WIDE_INT regmask, bool ret_p)
4621 {
4622 rtvec p;
4623 int regno, b = 0, i = 0, n = 0, len = popcount_hwi (regmask);
4624 if (ret_p) len++, i++, b++;
4625 if (base_update != 0) len++, i++;
4626 p = rtvec_alloc (len);
4627 for (regno = (reg_inc_p ? 0 : 31);
4628 regno != (reg_inc_p ? 32 : -1);
4629 regno += (reg_inc_p ? 1 : -1))
4630 if ((regmask & (1 << regno)) != 0)
4631 {
4632 int offset = (offset_inc_p ? 4 : -4) * n++;
4633 RTVEC_ELT (p, i++) = gen_ldst (load_p, regno, base_mem, offset);
4634 }
4635 if (ret_p)
4636 RTVEC_ELT (p, 0) = ret_rtx;
4637 if (base_update != 0)
4638 {
4639 rtx reg, offset;
4640 if (!split_mem_address (XEXP (base_mem, 0), &reg, &offset))
4641 gcc_unreachable ();
4642 RTVEC_ELT (p, b) =
4643 gen_rtx_SET (reg, plus_constant (Pmode, reg, base_update));
4644 }
4645 return gen_rtx_PARALLEL (VOIDmode, p);
4646 }
4647
4648 /* CDX ldwm/stwm peephole optimization pattern related routines. */
4649
4650 /* Data structure and sorting function for ldwm/stwm peephole optimizers. */
4651 struct ldstwm_operand
4652 {
4653 int offset; /* Offset from base register. */
4654 rtx reg; /* Register to store at this offset. */
4655 rtx mem; /* Original mem. */
4656 bool bad; /* True if this load/store can't be combined. */
4657 bool rewrite; /* True if we should rewrite using scratch. */
4658 };
4659
4660 static int
4661 compare_ldstwm_operands (const void *arg1, const void *arg2)
4662 {
4663 const struct ldstwm_operand *op1 = (const struct ldstwm_operand *) arg1;
4664 const struct ldstwm_operand *op2 = (const struct ldstwm_operand *) arg2;
4665 if (op1->bad)
4666 return op2->bad ? 0 : 1;
4667 else if (op2->bad)
4668 return -1;
4669 else
4670 return op1->offset - op2->offset;
4671 }
4672
4673 /* Helper function: return true if a load/store using REGNO with address
4674 BASEREG and offset OFFSET meets the constraints for a 2-byte CDX ldw.n,
4675 stw.n, ldwsp.n, or stwsp.n instruction. */
4676 static bool
4677 can_use_cdx_ldstw (int regno, int basereg, int offset)
4678 {
4679 if (CDX_REG_P (regno) && CDX_REG_P (basereg)
4680 && (offset & 0x3) == 0 && 0 <= offset && offset < 0x40)
4681 return true;
4682 else if (basereg == SP_REGNO
4683 && offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
4684 return true;
4685 return false;
4686 }
4687
4688 /* This function is called from peephole2 optimizers to try to merge
4689 a series of individual loads and stores into a ldwm or stwm. It
4690 can also rewrite addresses inside the individual loads and stores
4691 using a common base register using a scratch register and smaller
4692 offsets if that allows them to use CDX ldw.n or stw.n instructions
4693 instead of 4-byte loads or stores.
4694 N is the number of insns we are trying to merge. SCRATCH is non-null
4695 if there is a scratch register available. The OPERANDS array contains
4696 alternating REG (even) and MEM (odd) operands. */
4697 bool
4698 gen_ldstwm_peep (bool load_p, int n, rtx scratch, rtx *operands)
4699 {
4700 /* CDX ldwm/stwm instructions allow a maximum of 12 registers to be
4701 specified. */
4702 #define MAX_LDSTWM_OPS 12
4703 struct ldstwm_operand sort[MAX_LDSTWM_OPS];
4704 int basereg = -1;
4705 int baseoffset;
4706 int i, m, lastoffset, lastreg;
4707 unsigned int regmask = 0, usemask = 0, regset;
4708 bool needscratch;
4709 int newbasereg;
4710 int nbytes;
4711
4712 if (!TARGET_HAS_CDX)
4713 return false;
4714 if (n < 2 || n > MAX_LDSTWM_OPS)
4715 return false;
4716
4717 /* Check all the operands for validity and initialize the sort array.
4718 The places where we return false here are all situations that aren't
4719 expected to ever happen -- invalid patterns, invalid registers, etc. */
4720 for (i = 0; i < n; i++)
4721 {
4722 rtx base, offset;
4723 rtx reg = operands[i];
4724 rtx mem = operands[i + n];
4725 int r, o, regno;
4726 bool bad = false;
4727
4728 if (!REG_P (reg) || !MEM_P (mem))
4729 return false;
4730
4731 regno = REGNO (reg);
4732 if (regno > 31)
4733 return false;
4734 if (load_p && (regmask & (1 << regno)) != 0)
4735 return false;
4736 regmask |= 1 << regno;
4737
4738 if (!split_mem_address (XEXP (mem, 0), &base, &offset))
4739 return false;
4740 r = REGNO (base);
4741 o = INTVAL (offset);
4742
4743 if (basereg == -1)
4744 basereg = r;
4745 else if (r != basereg)
4746 bad = true;
4747 usemask |= 1 << r;
4748
4749 sort[i].bad = bad;
4750 sort[i].rewrite = false;
4751 sort[i].offset = o;
4752 sort[i].reg = reg;
4753 sort[i].mem = mem;
4754 }
4755
4756 /* If we are doing a series of register loads, we can't safely reorder
4757 them if any of the regs used in addr expressions are also being set. */
4758 if (load_p && (regmask & usemask))
4759 return false;
4760
4761 /* Sort the array by increasing mem offset order, then check that
4762 offsets are valid and register order matches mem order. At the
4763 end of this loop, m is the number of loads/stores we will try to
4764 combine; the rest are leftovers. */
4765 qsort (sort, n, sizeof (struct ldstwm_operand), compare_ldstwm_operands);
4766
4767 baseoffset = sort[0].offset;
4768 needscratch = baseoffset != 0;
4769 if (needscratch && !scratch)
4770 return false;
4771
4772 lastreg = regmask = regset = 0;
4773 lastoffset = baseoffset;
4774 for (m = 0; m < n && !sort[m].bad; m++)
4775 {
4776 int thisreg = REGNO (sort[m].reg);
4777 if (sort[m].offset != lastoffset
4778 || (m > 0 && lastreg >= thisreg)
4779 || !nios2_ldstwm_regset_p (thisreg, &regset))
4780 break;
4781 lastoffset += 4;
4782 lastreg = thisreg;
4783 regmask |= (1 << thisreg);
4784 }
4785
4786 /* For loads, make sure we are not overwriting the scratch reg.
4787 The peephole2 pattern isn't supposed to match unless the register is
4788 unused all the way through, so this isn't supposed to happen anyway. */
4789 if (load_p
4790 && needscratch
4791 && ((1 << REGNO (scratch)) & regmask) != 0)
4792 return false;
4793 newbasereg = needscratch ? (int) REGNO (scratch) : basereg;
4794
4795 /* We may be able to combine only the first m of the n total loads/stores
4796 into a single instruction. If m < 2, there's no point in emitting
4797 a ldwm/stwm at all, but we might be able to do further optimizations
4798 if we have a scratch. We will count the instruction lengths of the
4799 old and new patterns and store the savings in nbytes. */
4800 if (m < 2)
4801 {
4802 if (!needscratch)
4803 return false;
4804 m = 0;
4805 nbytes = 0;
4806 }
4807 else
4808 nbytes = -4; /* Size of ldwm/stwm. */
4809 if (needscratch)
4810 {
4811 int bo = baseoffset > 0 ? baseoffset : -baseoffset;
4812 if (CDX_REG_P (newbasereg)
4813 && CDX_REG_P (basereg)
4814 && bo <= 128 && bo > 0 && (bo & (bo - 1)) == 0)
4815 nbytes -= 2; /* Size of addi.n/subi.n. */
4816 else
4817 nbytes -= 4; /* Size of non-CDX addi. */
4818 }
4819
4820 /* Count the size of the input load/store instructions being replaced. */
4821 for (i = 0; i < m; i++)
4822 if (can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset))
4823 nbytes += 2;
4824 else
4825 nbytes += 4;
4826
4827 /* We may also be able to save a bit if we can rewrite non-CDX
4828 load/stores that can't be combined into the ldwm/stwm into CDX
4829 load/stores using the scratch reg. For example, this might happen
4830 if baseoffset is large, by bringing in the offsets in the load/store
4831 instructions within the range that fits in the CDX instruction. */
4832 if (needscratch && CDX_REG_P (newbasereg))
4833 for (i = m; i < n && !sort[i].bad; i++)
4834 if (!can_use_cdx_ldstw (REGNO (sort[i].reg), basereg, sort[i].offset)
4835 && can_use_cdx_ldstw (REGNO (sort[i].reg), newbasereg,
4836 sort[i].offset - baseoffset))
4837 {
4838 sort[i].rewrite = true;
4839 nbytes += 2;
4840 }
4841
4842 /* Are we good to go? */
4843 if (nbytes <= 0)
4844 return false;
4845
4846 /* Emit the scratch load. */
4847 if (needscratch)
4848 emit_insn (gen_rtx_SET (scratch, XEXP (sort[0].mem, 0)));
4849
4850 /* Emit the ldwm/stwm insn. */
4851 if (m > 0)
4852 {
4853 rtvec p = rtvec_alloc (m);
4854 for (i = 0; i < m; i++)
4855 {
4856 int offset = sort[i].offset;
4857 rtx mem, reg = sort[i].reg;
4858 rtx base_reg = gen_rtx_REG (Pmode, newbasereg);
4859 if (needscratch)
4860 offset -= baseoffset;
4861 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, base_reg, offset));
4862 if (load_p)
4863 RTVEC_ELT (p, i) = gen_rtx_SET (reg, mem);
4864 else
4865 RTVEC_ELT (p, i) = gen_rtx_SET (mem, reg);
4866 }
4867 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4868 }
4869
4870 /* Emit any leftover load/stores as individual instructions, doing
4871 the previously-noted rewrites to use the scratch reg. */
4872 for (i = m; i < n; i++)
4873 {
4874 rtx reg = sort[i].reg;
4875 rtx mem = sort[i].mem;
4876 if (sort[i].rewrite)
4877 {
4878 int offset = sort[i].offset - baseoffset;
4879 mem = gen_rtx_MEM (SImode, plus_constant (Pmode, scratch, offset));
4880 }
4881 if (load_p)
4882 emit_move_insn (reg, mem);
4883 else
4884 emit_move_insn (mem, reg);
4885 }
4886 return true;
4887 }
4888
4889 /* Implement TARGET_MACHINE_DEPENDENT_REORG:
4890 We use this hook when emitting CDX code to enforce the 4-byte
4891 alignment requirement for labels that are used as the targets of
4892 jmpi instructions. CDX code can otherwise contain a mix of 16-bit
4893 and 32-bit instructions aligned on any 16-bit boundary, but functions
4894 and jmpi labels have to be 32-bit aligned because of the way the address
4895 is encoded in the instruction. */
4896
4897 static unsigned char *label_align;
4898 static int min_labelno, max_labelno;
4899
4900 static void
4901 nios2_reorg (void)
4902 {
4903 bool changed = true;
4904 rtx_insn *insn;
4905
4906 if (!TARGET_HAS_CDX)
4907 return;
4908
4909 /* Initialize the data structures. */
4910 if (label_align)
4911 free (label_align);
4912 max_labelno = max_label_num ();
4913 min_labelno = get_first_label_num ();
4914 label_align = XCNEWVEC (unsigned char, max_labelno - min_labelno + 1);
4915
4916 /* Iterate on inserting alignment and adjusting branch lengths until
4917 no more changes. */
4918 while (changed)
4919 {
4920 changed = false;
4921 shorten_branches (get_insns ());
4922
4923 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
4924 if (JUMP_P (insn) && insn_variable_length_p (insn))
4925 {
4926 rtx label = JUMP_LABEL (insn);
4927 /* We use the current fact that all cases of 'jmpi'
4928 doing the actual branch in the machine description
4929 has a computed length of 6 or 8. Length 4 and below
4930 are all PC-relative 'br' branches without the jump-align
4931 problem. */
4932 if (label && LABEL_P (label) && get_attr_length (insn) > 4)
4933 {
4934 int index = CODE_LABEL_NUMBER (label) - min_labelno;
4935 if (label_align[index] != 2)
4936 {
4937 label_align[index] = 2;
4938 changed = true;
4939 }
4940 }
4941 }
4942 }
4943 }
4944
4945 /* Implement LABEL_ALIGN, using the information gathered in nios2_reorg. */
4946 int
4947 nios2_label_align (rtx label)
4948 {
4949 int n = CODE_LABEL_NUMBER (label);
4950
4951 if (label_align && n >= min_labelno && n <= max_labelno)
4952 return MAX (label_align[n - min_labelno], align_labels_log);
4953 return align_labels_log;
4954 }
4955
4956 /* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
4957 for R1 and non-CDX R2 code; for CDX we tweak thing to prefer
4958 the registers that can be used as operands to instructions that
4959 have 3-bit register fields. */
4960 void
4961 nios2_adjust_reg_alloc_order (void)
4962 {
4963 const int cdx_reg_alloc_order[] =
4964 {
4965 /* Call-clobbered GPRs within CDX 3-bit encoded range. */
4966 2, 3, 4, 5, 6, 7,
4967 /* Call-saved GPRs within CDX 3-bit encoded range. */
4968 16, 17,
4969 /* Other call-clobbered GPRs. */
4970 8, 9, 10, 11, 12, 13, 14, 15,
4971 /* Other call-saved GPRs. RA placed first since it is always saved. */
4972 31, 18, 19, 20, 21, 22, 23, 28,
4973 /* Fixed GPRs, not used by the register allocator. */
4974 0, 1, 24, 25, 26, 27, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39
4975 };
4976
4977 if (TARGET_HAS_CDX)
4978 memcpy (reg_alloc_order, cdx_reg_alloc_order,
4979 sizeof (int) * FIRST_PSEUDO_REGISTER);
4980 }
4981
4982 \f
4983 /* Initialize the GCC target structure. */
4984 #undef TARGET_ASM_FUNCTION_PROLOGUE
4985 #define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
4986
4987 #undef TARGET_IN_SMALL_DATA_P
4988 #define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
4989
4990 #undef TARGET_SECTION_TYPE_FLAGS
4991 #define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
4992
4993 #undef TARGET_INIT_BUILTINS
4994 #define TARGET_INIT_BUILTINS nios2_init_builtins
4995 #undef TARGET_EXPAND_BUILTIN
4996 #define TARGET_EXPAND_BUILTIN nios2_expand_builtin
4997 #undef TARGET_BUILTIN_DECL
4998 #define TARGET_BUILTIN_DECL nios2_builtin_decl
4999
5000 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5001 #define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
5002
5003 #undef TARGET_CAN_ELIMINATE
5004 #define TARGET_CAN_ELIMINATE nios2_can_eliminate
5005
5006 #undef TARGET_FUNCTION_ARG
5007 #define TARGET_FUNCTION_ARG nios2_function_arg
5008
5009 #undef TARGET_FUNCTION_ARG_ADVANCE
5010 #define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
5011
5012 #undef TARGET_ARG_PARTIAL_BYTES
5013 #define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
5014
5015 #undef TARGET_TRAMPOLINE_INIT
5016 #define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
5017
5018 #undef TARGET_FUNCTION_VALUE
5019 #define TARGET_FUNCTION_VALUE nios2_function_value
5020
5021 #undef TARGET_LIBCALL_VALUE
5022 #define TARGET_LIBCALL_VALUE nios2_libcall_value
5023
5024 #undef TARGET_FUNCTION_VALUE_REGNO_P
5025 #define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
5026
5027 #undef TARGET_RETURN_IN_MEMORY
5028 #define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
5029
5030 #undef TARGET_PROMOTE_PROTOTYPES
5031 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
5032
5033 #undef TARGET_SETUP_INCOMING_VARARGS
5034 #define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
5035
5036 #undef TARGET_MUST_PASS_IN_STACK
5037 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
5038
5039 #undef TARGET_LEGITIMATE_CONSTANT_P
5040 #define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
5041
5042 #undef TARGET_LEGITIMIZE_ADDRESS
5043 #define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
5044
5045 #undef TARGET_DELEGITIMIZE_ADDRESS
5046 #define TARGET_DELEGITIMIZE_ADDRESS nios2_delegitimize_address
5047
5048 #undef TARGET_LEGITIMATE_ADDRESS_P
5049 #define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
5050
5051 #undef TARGET_LRA_P
5052 #define TARGET_LRA_P hook_bool_void_false
5053
5054 #undef TARGET_PREFERRED_RELOAD_CLASS
5055 #define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
5056
5057 #undef TARGET_RTX_COSTS
5058 #define TARGET_RTX_COSTS nios2_rtx_costs
5059
5060 #undef TARGET_HAVE_TLS
5061 #define TARGET_HAVE_TLS TARGET_LINUX_ABI
5062
5063 #undef TARGET_CANNOT_FORCE_CONST_MEM
5064 #define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
5065
5066 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
5067 #define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
5068
5069 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
5070 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P nios2_print_operand_punct_valid_p
5071
5072 #undef TARGET_PRINT_OPERAND
5073 #define TARGET_PRINT_OPERAND nios2_print_operand
5074
5075 #undef TARGET_PRINT_OPERAND_ADDRESS
5076 #define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
5077
5078 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
5079 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA nios2_output_addr_const_extra
5080
5081 #undef TARGET_ASM_FILE_END
5082 #define TARGET_ASM_FILE_END nios2_asm_file_end
5083
5084 #undef TARGET_OPTION_OVERRIDE
5085 #define TARGET_OPTION_OVERRIDE nios2_option_override
5086
5087 #undef TARGET_OPTION_SAVE
5088 #define TARGET_OPTION_SAVE nios2_option_save
5089
5090 #undef TARGET_OPTION_RESTORE
5091 #define TARGET_OPTION_RESTORE nios2_option_restore
5092
5093 #undef TARGET_SET_CURRENT_FUNCTION
5094 #define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
5095
5096 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
5097 #define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
5098
5099 #undef TARGET_OPTION_PRAGMA_PARSE
5100 #define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
5101
5102 #undef TARGET_MERGE_DECL_ATTRIBUTES
5103 #define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
5104
5105 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5106 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5107 hook_bool_const_tree_hwi_hwi_const_tree_true
5108
5109 #undef TARGET_ASM_OUTPUT_MI_THUNK
5110 #define TARGET_ASM_OUTPUT_MI_THUNK nios2_asm_output_mi_thunk
5111
5112 #undef TARGET_MACHINE_DEPENDENT_REORG
5113 #define TARGET_MACHINE_DEPENDENT_REORG nios2_reorg
5114
5115 struct gcc_target targetm = TARGET_INITIALIZER;
5116
5117 #include "gt-nios2.h"