]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/stormy16/stormy16.c
tm.texi.in (TARGET_RTX_COSTS): Add an opno paramter.
[thirdparty/gcc.git] / gcc / config / stormy16 / stormy16.c
CommitLineData
c6243b4c 1/* Xstormy16 target functions.
6fb5fa3c 2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
192997cf 3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4b58290f
GK
4 Contributed by Red Hat, Inc.
5
5ab9749e 6 This file is part of GCC.
4b58290f 7
5ab9749e
NC
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
4b58290f 12
5ab9749e
NC
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
4b58290f 17
5ab9749e
NC
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
4b58290f
GK
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
4b58290f
GK
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
4b58290f
GK
29#include "insn-config.h"
30#include "conditions.h"
31#include "insn-flags.h"
32#include "output.h"
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
718f9c0f 36#include "diagnostic-core.h"
4b58290f
GK
37#include "obstack.h"
38#include "tree.h"
39#include "expr.h"
40#include "optabs.h"
4b58290f
GK
41#include "except.h"
42#include "function.h"
43#include "target.h"
44#include "target-def.h"
45#include "tm_p.h"
f1e639b1 46#include "langhooks.h"
726a989a 47#include "gimple.h"
f84fe9b6 48#include "df.h"
6b1ce545 49#include "reload.h"
d6b5193b 50#include "ggc.h"
4b58290f 51
51c16b7e
SB
52static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53static void xstormy16_asm_out_constructor (rtx, int);
54static void xstormy16_asm_out_destructor (rtx, int);
55static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
4b58290f 57
51c16b7e
SB
58static void xstormy16_init_builtins (void);
59static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
68f932c4 60static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
f40751dd 61static int xstormy16_address_cost (rtx, bool);
586de218 62static bool xstormy16_return_in_memory (const_tree, const_tree);
3d4b192a 63
d6b5193b
RS
64static GTY(()) section *bss100_section;
65
3c50106f
RH
66/* Compute a (partial) cost for rtx X. Return true if the complete
67 cost has been computed, and false if subexpressions should be
68 scanned. In either case, *TOTAL contains the cost result. */
69
70static bool
51c16b7e 71xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
68f932c4
RS
72 int opno ATTRIBUTE_UNUSED, int *total,
73 bool speed ATTRIBUTE_UNUSED)
3c50106f
RH
74{
75 switch (code)
76 {
77 case CONST_INT:
78 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
79 *total = COSTS_N_INSNS (1) / 2;
80 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
81 *total = COSTS_N_INSNS (1);
82 else
83 *total = COSTS_N_INSNS (2);
84 return true;
85
86 case CONST_DOUBLE:
87 case CONST:
88 case SYMBOL_REF:
89 case LABEL_REF:
5ab9749e 90 *total = COSTS_N_INSNS (2);
3c50106f
RH
91 return true;
92
93 case MULT:
94 *total = COSTS_N_INSNS (35 + 6);
95 return true;
96 case DIV:
97 *total = COSTS_N_INSNS (51 - 6);
98 return true;
99
100 default:
101 return false;
102 }
103}
104
dcefdf67 105static int
f40751dd 106xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
dcefdf67 107{
a21eaf5e 108 return (CONST_INT_P (x) ? 2
dcefdf67
RH
109 : GET_CODE (x) == PLUS ? 7
110 : 5);
111}
3c50106f 112
6b1ce545
AS
113/* Worker function for TARGET_MEMORY_MOVE_COST. */
114
115static int
116xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
117 bool in)
118{
119 return (5 + memory_move_secondary_cost (mode, rclass, in));
120}
121
4b58290f
GK
122/* Branches are handled as follows:
123
124 1. HImode compare-and-branches. The machine supports these
125 natively, so the appropriate pattern is emitted directly.
126
127 2. SImode EQ and NE. These are emitted as pairs of HImode
5ab9749e 128 compare-and-branches.
4b58290f
GK
129
130 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
131 of a SImode subtract followed by a branch (not a compare-and-branch),
132 like this:
133 sub
134 sbc
135 blt
136
137 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
138 sub
139 sbc
140 blt
141 or
5ab9749e 142 bne. */
4b58290f
GK
143
144/* Emit a branch of kind CODE to location LOC. */
145
146void
f90b7a5a 147xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
4b58290f 148{
4b58290f
GK
149 rtx condition_rtx, loc_ref, branch, cy_clobber;
150 rtvec vec;
151 enum machine_mode mode;
5ab9749e 152
4b58290f 153 mode = GET_MODE (op0);
4718bfd8 154 gcc_assert (mode == HImode || mode == SImode);
4b58290f
GK
155
156 if (mode == SImode
157 && (code == GT || code == LE || code == GTU || code == LEU))
158 {
159 int unsigned_p = (code == GTU || code == LEU);
160 int gt_p = (code == GT || code == GTU);
cd4c46f3 161 rtx lab = NULL_RTX;
5ab9749e 162
4b58290f
GK
163 if (gt_p)
164 lab = gen_label_rtx ();
f90b7a5a 165 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
4b58290f
GK
166 /* This should be generated as a comparison against the temporary
167 created by the previous insn, but reload can't handle that. */
f90b7a5a 168 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
4b58290f
GK
169 if (gt_p)
170 emit_label (lab);
171 return;
172 }
5ab9749e 173 else if (mode == SImode
4b58290f
GK
174 && (code == NE || code == EQ)
175 && op1 != const0_rtx)
176 {
f90b7a5a 177 rtx op0_word, op1_word;
cd4c46f3 178 rtx lab = NULL_RTX;
4b58290f
GK
179 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
180 int i;
5ab9749e 181
4b58290f
GK
182 if (code == EQ)
183 lab = gen_label_rtx ();
5ab9749e 184
4b58290f
GK
185 for (i = 0; i < num_words - 1; i++)
186 {
f90b7a5a
PB
187 op0_word = simplify_gen_subreg (word_mode, op0, mode,
188 i * UNITS_PER_WORD);
189 op1_word = simplify_gen_subreg (word_mode, op1, mode,
190 i * UNITS_PER_WORD);
191 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
4b58290f 192 }
f90b7a5a
PB
193 op0_word = simplify_gen_subreg (word_mode, op0, mode,
194 i * UNITS_PER_WORD);
195 op1_word = simplify_gen_subreg (word_mode, op1, mode,
196 i * UNITS_PER_WORD);
197 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
4b58290f
GK
198
199 if (code == EQ)
200 emit_label (lab);
201 return;
202 }
203
204 /* We can't allow reload to try to generate any reload after a branch,
205 so when some register must match we must make the temporary ourselves. */
206 if (mode != HImode)
207 {
208 rtx tmp;
209 tmp = gen_reg_rtx (mode);
210 emit_move_insn (tmp, op0);
211 op0 = tmp;
212 }
213
1c563bed 214 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
4b58290f
GK
215 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
216 branch = gen_rtx_SET (VOIDmode, pc_rtx,
217 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
218 loc_ref, pc_rtx));
219
b72bbbcb 220 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
4b58290f
GK
221
222 if (mode == HImode)
223 vec = gen_rtvec (2, branch, cy_clobber);
224 else if (code == NE || code == EQ)
225 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
226 else
227 {
228 rtx sub;
229#if 0
230 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
231#else
232 sub = gen_rtx_CLOBBER (SImode, op0);
233#endif
234 vec = gen_rtvec (3, branch, sub, cy_clobber);
235 }
236
237 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
238}
239
240/* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
241 the arithmetic operation. Most of the work is done by
c6243b4c 242 xstormy16_expand_arith. */
4b58290f
GK
243
244void
51c16b7e 245xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
d40ba0b6 246 rtx dest)
4b58290f
GK
247{
248 rtx op0 = XEXP (comparison, 0);
249 rtx op1 = XEXP (comparison, 1);
2f937369 250 rtx seq, last_insn;
4b58290f 251 rtx compare;
5ab9749e 252
4b58290f 253 start_sequence ();
d40ba0b6 254 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
2f937369 255 seq = get_insns ();
4b58290f 256 end_sequence ();
2f937369 257
4718bfd8 258 gcc_assert (INSN_P (seq));
2f937369
DM
259
260 last_insn = seq;
261 while (NEXT_INSN (last_insn) != NULL_RTX)
262 last_insn = NEXT_INSN (last_insn);
263
264 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
4b58290f
GK
265 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
266 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
267 emit_insn (seq);
268}
269
270
271/* Return the string to output a conditional branch to LABEL, which is
272 the operand number of the label.
273
274 OP is the conditional expression, or NULL for branch-always.
275
5e7a8ee0 276 REVERSED is nonzero if we should reverse the sense of the comparison.
4b58290f
GK
277
278 INSN is the insn. */
279
280char *
51c16b7e 281xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
4b58290f
GK
282{
283 static char string[64];
284 int need_longbranch = (op != NULL_RTX
285 ? get_attr_length (insn) == 8
286 : get_attr_length (insn) == 4);
287 int really_reversed = reversed ^ need_longbranch;
288 const char *ccode;
0a2aaacc 289 const char *templ;
4b58290f
GK
290 const char *operands;
291 enum rtx_code code;
5ab9749e 292
4b58290f
GK
293 if (! op)
294 {
295 if (need_longbranch)
296 ccode = "jmpf";
297 else
298 ccode = "br";
299 sprintf (string, "%s %s", ccode, label);
300 return string;
301 }
302
303 code = GET_CODE (op);
304
a21eaf5e 305 if (! REG_P (XEXP (op, 0)))
4b58290f
GK
306 {
307 code = swap_condition (code);
308 operands = "%3,%2";
309 }
310 else
311 operands = "%2,%3";
312
313 /* Work out which way this really branches. */
314 if (really_reversed)
315 code = reverse_condition (code);
316
317 switch (code)
318 {
319 case EQ: ccode = "z"; break;
320 case NE: ccode = "nz"; break;
321 case GE: ccode = "ge"; break;
322 case LT: ccode = "lt"; break;
323 case GT: ccode = "gt"; break;
324 case LE: ccode = "le"; break;
325 case GEU: ccode = "nc"; break;
326 case LTU: ccode = "c"; break;
327 case GTU: ccode = "hi"; break;
328 case LEU: ccode = "ls"; break;
5ab9749e 329
4b58290f 330 default:
4718bfd8 331 gcc_unreachable ();
4b58290f
GK
332 }
333
334 if (need_longbranch)
0a2aaacc 335 templ = "b%s %s,.+8 | jmpf %s";
4b58290f 336 else
0a2aaacc
KG
337 templ = "b%s %s,%s";
338 sprintf (string, templ, ccode, operands, label);
5ab9749e 339
4b58290f
GK
340 return string;
341}
342
343/* Return the string to output a conditional branch to LABEL, which is
344 the operand number of the label, but suitable for the tail of a
345 SImode branch.
346
347 OP is the conditional expression (OP is never NULL_RTX).
348
5e7a8ee0 349 REVERSED is nonzero if we should reverse the sense of the comparison.
4b58290f
GK
350
351 INSN is the insn. */
352
353char *
51c16b7e 354xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
4b58290f
GK
355{
356 static char string[64];
357 int need_longbranch = get_attr_length (insn) >= 8;
358 int really_reversed = reversed ^ need_longbranch;
359 const char *ccode;
0a2aaacc 360 const char *templ;
4b58290f
GK
361 char prevop[16];
362 enum rtx_code code;
5ab9749e 363
4b58290f
GK
364 code = GET_CODE (op);
365
366 /* Work out which way this really branches. */
367 if (really_reversed)
368 code = reverse_condition (code);
369
370 switch (code)
371 {
372 case EQ: ccode = "z"; break;
373 case NE: ccode = "nz"; break;
374 case GE: ccode = "ge"; break;
375 case LT: ccode = "lt"; break;
376 case GEU: ccode = "nc"; break;
377 case LTU: ccode = "c"; break;
378
379 /* The missing codes above should never be generated. */
380 default:
4718bfd8 381 gcc_unreachable ();
4b58290f
GK
382 }
383
384 switch (code)
385 {
386 case EQ: case NE:
387 {
388 int regnum;
5ab9749e 389
a21eaf5e 390 gcc_assert (REG_P (XEXP (op, 0)));
5ab9749e 391
4b58290f
GK
392 regnum = REGNO (XEXP (op, 0));
393 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
394 }
395 break;
396
397 case GE: case LT: case GEU: case LTU:
398 strcpy (prevop, "sbc %2,%3");
399 break;
400
401 default:
4718bfd8 402 gcc_unreachable ();
4b58290f
GK
403 }
404
405 if (need_longbranch)
0a2aaacc 406 templ = "%s | b%s .+6 | jmpf %s";
4b58290f 407 else
0a2aaacc
KG
408 templ = "%s | b%s %s";
409 sprintf (string, templ, prevop, ccode, label);
5ab9749e 410
4b58290f
GK
411 return string;
412}
413\f
414/* Many machines have some registers that cannot be copied directly to or from
415 memory or even from other types of registers. An example is the `MQ'
416 register, which on most machines, can only be copied to or from general
417 registers, but not memory. Some machines allow copying all registers to and
418 from memory, but require a scratch register for stores to some memory
419 locations (e.g., those with symbolic address on the RT, and those with
981f6289 420 certain symbolic address on the SPARC when compiling PIC). In some cases,
4b58290f
GK
421 both an intermediate and a scratch register are required.
422
423 You should define these macros to indicate to the reload phase that it may
424 need to allocate at least one register for a reload in addition to the
425 register to contain the data. Specifically, if copying X to a register
0a2aaacc 426 RCLASS in MODE requires an intermediate register, you should define
4b58290f
GK
427 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
428 whose registers can be used as intermediate registers or scratch registers.
429
0a2aaacc 430 If copying a register RCLASS in MODE to X requires an intermediate or scratch
4b58290f
GK
431 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
432 largest register class required. If the requirements for input and output
433 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
434 instead of defining both macros identically.
435
436 The values returned by these macros are often `GENERAL_REGS'. Return
437 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
0a2aaacc 438 to or from a register of RCLASS in MODE without requiring a scratch register.
4b58290f
GK
439 Do not define this macro if it would always return `NO_REGS'.
440
441 If a scratch register is required (either with or without an intermediate
442 register), you should define patterns for `reload_inM' or `reload_outM', as
443 required.. These patterns, which will normally be implemented with a
444 `define_expand', should be similar to the `movM' patterns, except that
445 operand 2 is the scratch register.
446
447 Define constraints for the reload register and scratch register that contain
448 a single register class. If the original reload register (whose class is
0a2aaacc 449 RCLASS) can meet the constraint given in the pattern, the value returned by
4b58290f
GK
450 these macros is used for the class of the scratch register. Otherwise, two
451 additional reload registers are required. Their classes are obtained from
452 the constraints in the insn pattern.
453
454 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
455 either be in a hard register or in memory. Use `true_regnum' to find out;
456 it will return -1 if the pseudo is in memory and the hard register number if
457 it is in a register.
458
459 These macros should not be used in the case where a particular class of
460 registers can only be copied to memory and not to another class of
461 registers. In that case, secondary reload registers are not needed and
462 would not be helpful. Instead, a stack location must be used to perform the
e03f5d43 463 copy and the `movM' pattern should use memory as an intermediate storage.
4b58290f
GK
464 This case often occurs between floating-point and general registers. */
465
466enum reg_class
0a2aaacc 467xstormy16_secondary_reload_class (enum reg_class rclass,
a21eaf5e 468 enum machine_mode mode ATTRIBUTE_UNUSED,
51c16b7e 469 rtx x)
4b58290f
GK
470{
471 /* This chip has the interesting property that only the first eight
472 registers can be moved to/from memory. */
a21eaf5e
NC
473 if ((MEM_P (x)
474 || ((GET_CODE (x) == SUBREG || REG_P (x))
4b58290f
GK
475 && (true_regnum (x) == -1
476 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
0a2aaacc 477 && ! reg_class_subset_p (rclass, EIGHT_REGS))
4b58290f
GK
478 return EIGHT_REGS;
479
4b58290f
GK
480 return NO_REGS;
481}
482
ef795fc2
AS
483/* Worker function for TARGET_PREFERRED_RELOAD_CLASS
484 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
485
486static reg_class_t
487xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
4b58290f 488{
a21eaf5e 489 if (rclass == GENERAL_REGS && MEM_P (x))
4b58290f
GK
490 return EIGHT_REGS;
491
0a2aaacc 492 return rclass;
4b58290f
GK
493}
494
54e9a19d
DD
495/* Predicate for symbols and addresses that reflect special 8-bit
496 addressing. */
5ab9749e 497
54e9a19d
DD
498int
499xstormy16_below100_symbol (rtx x,
500 enum machine_mode mode ATTRIBUTE_UNUSED)
501{
502 if (GET_CODE (x) == CONST)
503 x = XEXP (x, 0);
a21eaf5e 504 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
54e9a19d 505 x = XEXP (x, 0);
2f806f3b 506
54e9a19d 507 if (GET_CODE (x) == SYMBOL_REF)
2f806f3b
NC
508 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
509
a21eaf5e 510 if (CONST_INT_P (x))
54e9a19d
DD
511 {
512 HOST_WIDE_INT i = INTVAL (x);
a21eaf5e 513
54e9a19d
DD
514 if ((i >= 0x0000 && i <= 0x00ff)
515 || (i >= 0x7f00 && i <= 0x7fff))
516 return 1;
517 }
518 return 0;
519}
520
54e9a19d
DD
521/* Likewise, but only for non-volatile MEMs, for patterns where the
522 MEM will get split into smaller sized accesses. */
5ab9749e 523
54e9a19d
DD
524int
525xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
526{
a21eaf5e 527 if (MEM_P (x) && MEM_VOLATILE_P (x))
54e9a19d
DD
528 return 0;
529 return xstormy16_below100_operand (x, mode);
530}
531
54e9a19d
DD
532/* Expand an 8-bit IOR. This either detects the one case we can
533 actually do, or uses a 16-bit IOR. */
5ab9749e 534
54e9a19d
DD
535void
536xstormy16_expand_iorqi3 (rtx *operands)
537{
538 rtx in, out, outsub, val;
539
540 out = operands[0];
541 in = operands[1];
542 val = operands[2];
543
544 if (xstormy16_onebit_set_operand (val, QImode))
545 {
546 if (!xstormy16_below100_or_register (in, QImode))
547 in = copy_to_mode_reg (QImode, in);
548 if (!xstormy16_below100_or_register (out, QImode))
549 out = gen_reg_rtx (QImode);
550 emit_insn (gen_iorqi3_internal (out, in, val));
551 if (out != operands[0])
552 emit_move_insn (operands[0], out);
553 return;
554 }
555
a21eaf5e 556 if (! REG_P (in))
54e9a19d 557 in = copy_to_mode_reg (QImode, in);
a21eaf5e
NC
558
559 if (! REG_P (val) && ! CONST_INT_P (val))
54e9a19d 560 val = copy_to_mode_reg (QImode, val);
a21eaf5e
NC
561
562 if (! REG_P (out))
54e9a19d
DD
563 out = gen_reg_rtx (QImode);
564
565 in = simplify_gen_subreg (HImode, in, QImode, 0);
566 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
a21eaf5e
NC
567
568 if (! CONST_INT_P (val))
54e9a19d
DD
569 val = simplify_gen_subreg (HImode, val, QImode, 0);
570
571 emit_insn (gen_iorhi3 (outsub, in, val));
572
573 if (out != operands[0])
574 emit_move_insn (operands[0], out);
575}
576
5ab9749e
NC
577/* Expand an 8-bit AND. This either detects the one case we can
578 actually do, or uses a 16-bit AND. */
579
54e9a19d
DD
580void
581xstormy16_expand_andqi3 (rtx *operands)
582{
583 rtx in, out, outsub, val;
584
585 out = operands[0];
586 in = operands[1];
587 val = operands[2];
588
589 if (xstormy16_onebit_clr_operand (val, QImode))
590 {
591 if (!xstormy16_below100_or_register (in, QImode))
592 in = copy_to_mode_reg (QImode, in);
593 if (!xstormy16_below100_or_register (out, QImode))
594 out = gen_reg_rtx (QImode);
595 emit_insn (gen_andqi3_internal (out, in, val));
596 if (out != operands[0])
597 emit_move_insn (operands[0], out);
598 return;
599 }
600
a21eaf5e 601 if (! REG_P (in))
54e9a19d 602 in = copy_to_mode_reg (QImode, in);
a21eaf5e
NC
603
604 if (! REG_P (val) && ! CONST_INT_P (val))
54e9a19d 605 val = copy_to_mode_reg (QImode, val);
a21eaf5e
NC
606
607 if (! REG_P (out))
54e9a19d
DD
608 out = gen_reg_rtx (QImode);
609
610 in = simplify_gen_subreg (HImode, in, QImode, 0);
611 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
a21eaf5e
NC
612
613 if (! CONST_INT_P (val))
54e9a19d
DD
614 val = simplify_gen_subreg (HImode, val, QImode, 0);
615
616 emit_insn (gen_andhi3 (outsub, in, val));
617
618 if (out != operands[0])
619 emit_move_insn (operands[0], out);
620}
621
4b58290f 622#define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
a21eaf5e 623 (CONST_INT_P (X) \
4b58290f
GK
624 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
625
626#define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
a21eaf5e 627 (CONST_INT_P (X) \
4b58290f
GK
628 && INTVAL (X) + (OFFSET) >= 0 \
629 && INTVAL (X) + (OFFSET) < 0x8000 \
630 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
631
d634083b 632bool
51c16b7e 633xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
c6c3dba9 634 rtx x, bool strict)
4b58290f
GK
635{
636 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
a21eaf5e 637 return true;
4b58290f
GK
638
639 if (GET_CODE (x) == PLUS
640 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
813ab1d7
NC
641 {
642 x = XEXP (x, 0);
643 /* PR 31232: Do not allow INT+INT as an address. */
a21eaf5e
NC
644 if (CONST_INT_P (x))
645 return false;
813ab1d7 646 }
5ab9749e 647
a21eaf5e 648 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
f3922fd2 649 || GET_CODE (x) == POST_INC
4b58290f
GK
650 || GET_CODE (x) == PRE_DEC)
651 x = XEXP (x, 0);
5ab9749e 652
a21eaf5e
NC
653 if (REG_P (x)
654 && REGNO_OK_FOR_BASE_P (REGNO (x))
4b58290f 655 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
a21eaf5e 656 return true;
54e9a19d 657
f84fe9b6 658 if (xstormy16_below100_symbol (x, mode))
a21eaf5e 659 return true;
5ab9749e 660
a21eaf5e 661 return false;
4b58290f
GK
662}
663
192997cf 664/* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
5ab9749e 665
4b58290f
GK
666 On this chip, this is true if the address is valid with an offset
667 of 0 but not of 6, because in that case it cannot be used as an
668 address for DImode or DFmode, or if the address is a post-increment
669 or pre-decrement address. */
5ab9749e 670
192997cf
AS
671static bool
672xstormy16_mode_dependent_address_p (const_rtx x)
4b58290f
GK
673{
674 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
675 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
192997cf 676 return true;
5ab9749e 677
4b58290f
GK
678 if (GET_CODE (x) == PLUS
679 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
680 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
192997cf 681 return true;
4b58290f 682
b9a76028 683 /* Auto-increment addresses are now treated generically in recog.c. */
192997cf 684 return false;
4b58290f
GK
685}
686
4b58290f 687int
51c16b7e 688short_memory_operand (rtx x, enum machine_mode mode)
4b58290f
GK
689{
690 if (! memory_operand (x, mode))
691 return 0;
692 return (GET_CODE (XEXP (x, 0)) != PLUS);
693}
694
fae778eb 695/* Splitter for the 'move' patterns, for modes not directly implemented
4b58290f
GK
696 by hardware. Emit insns to copy a value of mode MODE from SRC to
697 DEST.
698
5ab9749e 699 This function is only called when reload_completed. */
4b58290f 700
5ab9749e 701void
51c16b7e 702xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
4b58290f
GK
703{
704 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
705 int direction, end, i;
706 int src_modifies = 0;
707 int dest_modifies = 0;
708 int src_volatile = 0;
709 int dest_volatile = 0;
710 rtx mem_operand;
7c87e9f9 711 rtx auto_inc_reg_rtx = NULL_RTX;
5ab9749e 712
4b58290f 713 /* Check initial conditions. */
4718bfd8
NS
714 gcc_assert (reload_completed
715 && mode != QImode && mode != HImode
716 && nonimmediate_operand (dest, mode)
717 && general_operand (src, mode));
4b58290f
GK
718
719 /* This case is not supported below, and shouldn't be generated. */
a21eaf5e 720 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
4b58290f
GK
721
722 /* This case is very very bad after reload, so trap it now. */
4718bfd8 723 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
4b58290f
GK
724
725 /* The general idea is to copy by words, offsetting the source and
726 destination. Normally the least-significant word will be copied
5ab9749e 727 first, but for pre-dec operations it's better to copy the
4b58290f 728 most-significant word first. Only one operand can be a pre-dec
5ab9749e 729 or post-inc operand.
4b58290f
GK
730
731 It's also possible that the copy overlaps so that the direction
732 must be reversed. */
733 direction = 1;
5ab9749e 734
a21eaf5e 735 if (MEM_P (dest))
4b58290f
GK
736 {
737 mem_operand = XEXP (dest, 0);
738 dest_modifies = side_effects_p (mem_operand);
7c87e9f9
CM
739 if (auto_inc_p (mem_operand))
740 auto_inc_reg_rtx = XEXP (mem_operand, 0);
4b58290f
GK
741 dest_volatile = MEM_VOLATILE_P (dest);
742 if (dest_volatile)
743 {
744 dest = copy_rtx (dest);
745 MEM_VOLATILE_P (dest) = 0;
746 }
747 }
a21eaf5e 748 else if (MEM_P (src))
4b58290f
GK
749 {
750 mem_operand = XEXP (src, 0);
751 src_modifies = side_effects_p (mem_operand);
7c87e9f9
CM
752 if (auto_inc_p (mem_operand))
753 auto_inc_reg_rtx = XEXP (mem_operand, 0);
4b58290f
GK
754 src_volatile = MEM_VOLATILE_P (src);
755 if (src_volatile)
756 {
757 src = copy_rtx (src);
758 MEM_VOLATILE_P (src) = 0;
759 }
760 }
761 else
762 mem_operand = NULL_RTX;
763
764 if (mem_operand == NULL_RTX)
765 {
a21eaf5e
NC
766 if (REG_P (src)
767 && REG_P (dest)
4b58290f
GK
768 && reg_overlap_mentioned_p (dest, src)
769 && REGNO (dest) > REGNO (src))
770 direction = -1;
771 }
772 else if (GET_CODE (mem_operand) == PRE_DEC
5ab9749e 773 || (GET_CODE (mem_operand) == PLUS
4b58290f
GK
774 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
775 direction = -1;
a21eaf5e 776 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
4b58290f
GK
777 {
778 int regno;
5ab9749e 779
a21eaf5e 780 gcc_assert (REG_P (dest));
4b58290f 781 regno = REGNO (dest);
5ab9749e 782
4718bfd8
NS
783 gcc_assert (refers_to_regno_p (regno, regno + num_words,
784 mem_operand, 0));
5ab9749e 785
4b58290f
GK
786 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
787 direction = -1;
788 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
789 mem_operand, 0))
790 direction = 1;
791 else
792 /* This means something like
793 (set (reg:DI r0) (mem:DI (reg:HI r1)))
794 which we'd need to support by doing the set of the second word
795 last. */
4718bfd8 796 gcc_unreachable ();
4b58290f
GK
797 }
798
799 end = direction < 0 ? -1 : num_words;
800 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
801 {
7c87e9f9
CM
802 rtx w_src, w_dest, insn;
803
4b58290f
GK
804 if (src_modifies)
805 w_src = gen_rtx_MEM (word_mode, mem_operand);
806 else
807 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
808 if (src_volatile)
809 MEM_VOLATILE_P (w_src) = 1;
810 if (dest_modifies)
811 w_dest = gen_rtx_MEM (word_mode, mem_operand);
812 else
5ab9749e 813 w_dest = simplify_gen_subreg (word_mode, dest, mode,
4b58290f
GK
814 i * UNITS_PER_WORD);
815 if (dest_volatile)
816 MEM_VOLATILE_P (w_dest) = 1;
5ab9749e 817
4b58290f 818 /* The simplify_subreg calls must always be able to simplify. */
4718bfd8
NS
819 gcc_assert (GET_CODE (w_src) != SUBREG
820 && GET_CODE (w_dest) != SUBREG);
5ab9749e 821
7c87e9f9
CM
822 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
823 if (auto_inc_reg_rtx)
824 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
825 auto_inc_reg_rtx,
826 REG_NOTES (insn));
4b58290f
GK
827 }
828}
829
830/* Expander for the 'move' patterns. Emit insns to copy a value of
831 mode MODE from SRC to DEST. */
832
5ab9749e 833void
51c16b7e 834xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
4b58290f 835{
a21eaf5e 836 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
f3922fd2
DD
837 {
838 rtx pmv = XEXP (dest, 0);
839 rtx dest_reg = XEXP (pmv, 0);
840 rtx dest_mod = XEXP (pmv, 1);
841 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
b72bbbcb 842 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
5ab9749e 843
f3922fd2
DD
844 dest = gen_rtx_MEM (mode, dest_reg);
845 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
846 }
a21eaf5e 847 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
f3922fd2
DD
848 {
849 rtx pmv = XEXP (src, 0);
850 rtx src_reg = XEXP (pmv, 0);
851 rtx src_mod = XEXP (pmv, 1);
852 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
b72bbbcb 853 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
5ab9749e 854
f3922fd2
DD
855 src = gen_rtx_MEM (mode, src_reg);
856 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
857 }
5ab9749e 858
4b58290f
GK
859 /* There are only limited immediate-to-memory move instructions. */
860 if (! reload_in_progress
861 && ! reload_completed
a21eaf5e
NC
862 && MEM_P (dest)
863 && (! CONST_INT_P (XEXP (dest, 0))
c6243b4c 864 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
54e9a19d 865 && ! xstormy16_below100_operand (dest, mode)
a21eaf5e 866 && ! REG_P (src)
4b58290f
GK
867 && GET_CODE (src) != SUBREG)
868 src = copy_to_mode_reg (mode, src);
869
870 /* Don't emit something we would immediately split. */
871 if (reload_completed
872 && mode != HImode && mode != QImode)
873 {
c6243b4c 874 xstormy16_split_move (mode, dest, src);
4b58290f
GK
875 return;
876 }
5ab9749e 877
4b58290f
GK
878 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
879}
4b58290f
GK
880\f
881/* Stack Layout:
882
883 The stack is laid out as follows:
884
885SP->
886FP-> Local variables
887 Register save area (up to 4 words)
888 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
889
890AP-> Return address (two words)
891 9th procedure parameter word
892 10th procedure parameter word
893 ...
894 last procedure parameter word
895
896 The frame pointer location is tuned to make it most likely that all
897 parameters and local variables can be accessed using a load-indexed
898 instruction. */
899
900/* A structure to describe the layout. */
c6243b4c 901struct xstormy16_stack_layout
4b58290f
GK
902{
903 /* Size of the topmost three items on the stack. */
904 int locals_size;
905 int register_save_size;
906 int stdarg_save_size;
907 /* Sum of the above items. */
908 int frame_size;
909 /* Various offsets. */
910 int first_local_minus_ap;
911 int sp_minus_fp;
912 int fp_minus_ap;
913};
914
915/* Does REGNO need to be saved? */
916#define REG_NEEDS_SAVE(REGNUM, IFUN) \
6fb5fa3c 917 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
4b58290f 918 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
d40ba0b6 919 && (REGNUM != CARRY_REGNUM) \
6fb5fa3c 920 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
4b58290f
GK
921
922/* Compute the stack layout. */
5ab9749e
NC
923
924struct xstormy16_stack_layout
51c16b7e 925xstormy16_compute_stack_layout (void)
4b58290f 926{
c6243b4c 927 struct xstormy16_stack_layout layout;
4b58290f 928 int regno;
c6243b4c 929 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
930
931 layout.locals_size = get_frame_size ();
5ab9749e 932
4b58290f
GK
933 layout.register_save_size = 0;
934 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
935 if (REG_NEEDS_SAVE (regno, ifun))
936 layout.register_save_size += UNITS_PER_WORD;
5ab9749e 937
e3b5732b 938 if (cfun->stdarg)
4b58290f
GK
939 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
940 else
941 layout.stdarg_save_size = 0;
5ab9749e
NC
942
943 layout.frame_size = (layout.locals_size
944 + layout.register_save_size
4b58290f 945 + layout.stdarg_save_size);
5ab9749e 946
38173d38 947 if (crtl->args.size <= 2048 && crtl->args.size != -1)
4b58290f 948 {
5ab9749e 949 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
38173d38 950 + crtl->args.size <= 2048)
b72bbbcb 951 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
4b58290f 952 else
38173d38 953 layout.fp_minus_ap = 2048 - crtl->args.size;
4b58290f
GK
954 }
955 else
5ab9749e 956 layout.fp_minus_ap = (layout.stdarg_save_size
4b58290f 957 + layout.register_save_size
b72bbbcb 958 - INCOMING_FRAME_SP_OFFSET);
5ab9749e 959 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
4b58290f
GK
960 - layout.fp_minus_ap);
961 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
962 return layout;
963}
964
7b5cbb57
AS
965/* Worker function for TARGET_CAN_ELIMINATE. */
966
967static bool
968xstormy16_can_eliminate (const int from, const int to)
969{
970 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
971 ? ! frame_pointer_needed
972 : true);
973}
974
4b58290f 975/* Determine how all the special registers get eliminated. */
5ab9749e 976
4b58290f 977int
51c16b7e 978xstormy16_initial_elimination_offset (int from, int to)
4b58290f 979{
c6243b4c 980 struct xstormy16_stack_layout layout;
4b58290f 981 int result;
5ab9749e 982
c6243b4c 983 layout = xstormy16_compute_stack_layout ();
4b58290f
GK
984
985 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
986 result = layout.sp_minus_fp - layout.locals_size;
987 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
b72bbbcb 988 result = - layout.locals_size;
4b58290f 989 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
b72bbbcb 990 result = - layout.fp_minus_ap;
4b58290f 991 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
b72bbbcb 992 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
4b58290f 993 else
4718bfd8 994 gcc_unreachable ();
4b58290f
GK
995
996 return result;
997}
998
999static rtx
51c16b7e 1000emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
4b58290f
GK
1001{
1002 rtx set, clobber, insn;
5ab9749e 1003
4b58290f 1004 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
b72bbbcb 1005 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
4b58290f
GK
1006 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1007 return insn;
1008}
1009
41441dc7
NB
1010/* Called after register allocation to add any instructions needed for
1011 the prologue. Using a prologue insn is favored compared to putting
1012 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1013 since it allows the scheduler to intermix instructions with the
1014 saves of the caller saved registers. In some cases, it might be
1015 necessary to emit a barrier instruction as the last insn to prevent
1016 such scheduling.
4b58290f
GK
1017
1018 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1019 so that the debug info generation code can handle them properly. */
5ab9749e 1020
4b58290f 1021void
51c16b7e 1022xstormy16_expand_prologue (void)
4b58290f 1023{
c6243b4c 1024 struct xstormy16_stack_layout layout;
4b58290f
GK
1025 int regno;
1026 rtx insn;
1027 rtx mem_push_rtx;
c6243b4c 1028 const int ifun = xstormy16_interrupt_function_p ();
5ab9749e 1029
4b58290f
GK
1030 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1031 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
5ab9749e 1032
c6243b4c 1033 layout = xstormy16_compute_stack_layout ();
4b58290f 1034
f3922fd2 1035 if (layout.locals_size >= 32768)
ab532386 1036 error ("local variable memory requirements exceed capacity");
f3922fd2 1037
4b58290f
GK
1038 /* Save the argument registers if necessary. */
1039 if (layout.stdarg_save_size)
5ab9749e 1040 for (regno = FIRST_ARGUMENT_REGISTER;
4b58290f
GK
1041 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1042 regno++)
1043 {
f3922fd2 1044 rtx dwarf;
4b58290f 1045 rtx reg = gen_rtx_REG (HImode, regno);
f3922fd2 1046
4b58290f
GK
1047 insn = emit_move_insn (mem_push_rtx, reg);
1048 RTX_FRAME_RELATED_P (insn) = 1;
f3922fd2
DD
1049
1050 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
5ab9749e 1051
f3922fd2
DD
1052 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1053 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1054 reg);
1055 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1056 plus_constant (stack_pointer_rtx,
1057 GET_MODE_SIZE (Pmode)));
f1cb6795 1058 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
f3922fd2
DD
1059 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1060 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
4b58290f 1061 }
5ab9749e 1062
4b58290f
GK
1063 /* Push each of the registers to save. */
1064 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1065 if (REG_NEEDS_SAVE (regno, ifun))
1066 {
f3922fd2 1067 rtx dwarf;
4b58290f 1068 rtx reg = gen_rtx_REG (HImode, regno);
f3922fd2 1069
4b58290f
GK
1070 insn = emit_move_insn (mem_push_rtx, reg);
1071 RTX_FRAME_RELATED_P (insn) = 1;
f3922fd2
DD
1072
1073 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
5ab9749e 1074
f3922fd2
DD
1075 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1076 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1077 reg);
1078 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1079 plus_constant (stack_pointer_rtx,
1080 GET_MODE_SIZE (Pmode)));
f1cb6795 1081 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
f3922fd2
DD
1082 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1083 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
4b58290f
GK
1084 }
1085
1086 /* It's just possible that the SP here might be what we need for
b1c9bc51 1087 the new FP... */
4b58290f 1088 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
b72bbbcb
DD
1089 {
1090 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1091 RTX_FRAME_RELATED_P (insn) = 1;
1092 }
4b58290f
GK
1093
1094 /* Allocate space for local variables. */
1095 if (layout.locals_size)
1096 {
1097 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1098 GEN_INT (layout.locals_size));
1099 RTX_FRAME_RELATED_P (insn) = 1;
1100 }
1101
1102 /* Set up the frame pointer, if required. */
1103 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1104 {
1105 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
b72bbbcb 1106 RTX_FRAME_RELATED_P (insn) = 1;
6208b55d 1107
4b58290f 1108 if (layout.sp_minus_fp)
b72bbbcb
DD
1109 {
1110 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1111 hard_frame_pointer_rtx,
1112 GEN_INT (- layout.sp_minus_fp));
1113 RTX_FRAME_RELATED_P (insn) = 1;
1114 }
4b58290f
GK
1115 }
1116}
1117
1118/* Do we need an epilogue at all? */
5ab9749e 1119
4b58290f 1120int
51c16b7e 1121direct_return (void)
4b58290f 1122{
5ab9749e 1123 return (reload_completed
2fd7ba4a
NC
1124 && xstormy16_compute_stack_layout ().frame_size == 0
1125 && ! xstormy16_interrupt_function_p ());
4b58290f
GK
1126}
1127
41441dc7 1128/* Called after register allocation to add any instructions needed for
e03f5d43 1129 the epilogue. Using an epilogue insn is favored compared to putting
41441dc7
NB
1130 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1131 since it allows the scheduler to intermix instructions with the
1132 saves of the caller saved registers. In some cases, it might be
1133 necessary to emit a barrier instruction as the last insn to prevent
1134 such scheduling. */
4b58290f
GK
1135
1136void
51c16b7e 1137xstormy16_expand_epilogue (void)
4b58290f 1138{
c6243b4c 1139 struct xstormy16_stack_layout layout;
a21eaf5e 1140 rtx mem_pop_rtx;
4b58290f 1141 int regno;
c6243b4c 1142 const int ifun = xstormy16_interrupt_function_p ();
5ab9749e 1143
4b58290f
GK
1144 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1145 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
5ab9749e 1146
c6243b4c 1147 layout = xstormy16_compute_stack_layout ();
4b58290f
GK
1148
1149 /* Pop the stack for the locals. */
1150 if (layout.locals_size)
e2470e1b
GK
1151 {
1152 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1153 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1154 else
b72bbbcb
DD
1155 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1156 GEN_INT (- layout.locals_size));
e2470e1b 1157 }
4b58290f
GK
1158
1159 /* Restore any call-saved registers. */
1160 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1161 if (REG_NEEDS_SAVE (regno, ifun))
b72bbbcb 1162 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
5ab9749e 1163
4b58290f
GK
1164 /* Pop the stack for the stdarg save area. */
1165 if (layout.stdarg_save_size)
b72bbbcb
DD
1166 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1167 GEN_INT (- layout.stdarg_save_size));
4b58290f
GK
1168
1169 /* Return. */
1170 if (ifun)
1171 emit_jump_insn (gen_return_internal_interrupt ());
1172 else
1173 emit_jump_insn (gen_return_internal ());
1174}
1175
1176int
51c16b7e 1177xstormy16_epilogue_uses (int regno)
4b58290f
GK
1178{
1179 if (reload_completed && call_used_regs[regno])
1180 {
c6243b4c 1181 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
1182 return REG_NEEDS_SAVE (regno, ifun);
1183 }
1184 return 0;
1185}
14b56832
DD
1186
1187void
51c16b7e 1188xstormy16_function_profiler (void)
14b56832
DD
1189{
1190 sorry ("function_profiler support");
1191}
4b58290f 1192\f
bf425ddd
NF
1193/* Update CUM to advance past an argument in the argument list. The
1194 values MODE, TYPE and NAMED describe that argument. Once this is
1195 done, the variable CUM is suitable for analyzing the *following*
1196 argument with `TARGET_FUNCTION_ARG', etc.
4b58290f
GK
1197
1198 This function need not do anything if the argument in question was
1199 passed on the stack. The compiler knows how to track the amount of
1200 stack space used for arguments without any special help. However,
c6243b4c 1201 it makes life easier for xstormy16_build_va_list if it does update
4b58290f 1202 the word count. */
5ab9749e 1203
bf425ddd 1204static void
d5cc9181 1205xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
bf425ddd 1206 const_tree type, bool named ATTRIBUTE_UNUSED)
4b58290f 1207{
d5cc9181
JR
1208 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1209
4b58290f
GK
1210 /* If an argument would otherwise be passed partially in registers,
1211 and partially on the stack, the whole of it is passed on the
1212 stack. */
bf425ddd
NF
1213 if (*cum < NUM_ARGUMENT_REGISTERS
1214 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1215 *cum = NUM_ARGUMENT_REGISTERS;
5ab9749e 1216
bf425ddd 1217 *cum += XSTORMY16_WORD_SIZE (type, mode);
4b58290f
GK
1218}
1219
bf425ddd 1220static rtx
d5cc9181 1221xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
bf425ddd 1222 const_tree type, bool named ATTRIBUTE_UNUSED)
06d22853 1223{
d5cc9181
JR
1224 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1225
06d22853
DD
1226 if (mode == VOIDmode)
1227 return const0_rtx;
fe984136 1228 if (targetm.calls.must_pass_in_stack (mode, type)
f1cb6795 1229 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
5ab9749e 1230 return NULL_RTX;
f1cb6795 1231 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
06d22853
DD
1232}
1233
4b58290f
GK
1234/* Build the va_list type.
1235
1236 For this chip, va_list is a record containing a counter and a pointer.
1237 The counter is of type 'int' and indicates how many bytes
1238 have been used to date. The pointer indicates the stack position
5ab9749e 1239 for arguments that have not been passed in registers.
4b58290f
GK
1240 To keep the layout nice, the pointer is first in the structure. */
1241
37cd4bca
NC
1242static tree
1243xstormy16_build_builtin_va_list (void)
4b58290f
GK
1244{
1245 tree f_1, f_2, record, type_decl;
1246
f1e639b1 1247 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4c4bde29
AH
1248 type_decl = build_decl (BUILTINS_LOCATION,
1249 TYPE_DECL, get_identifier ("__va_list_tag"), record);
4b58290f 1250
4c4bde29
AH
1251 f_1 = build_decl (BUILTINS_LOCATION,
1252 FIELD_DECL, get_identifier ("base"),
4b58290f 1253 ptr_type_node);
4c4bde29
AH
1254 f_2 = build_decl (BUILTINS_LOCATION,
1255 FIELD_DECL, get_identifier ("count"),
4b58290f
GK
1256 unsigned_type_node);
1257
1258 DECL_FIELD_CONTEXT (f_1) = record;
1259 DECL_FIELD_CONTEXT (f_2) = record;
1260
0fd2eac2 1261 TYPE_STUB_DECL (record) = type_decl;
4b58290f
GK
1262 TYPE_NAME (record) = type_decl;
1263 TYPE_FIELDS (record) = f_1;
910ad8de 1264 DECL_CHAIN (f_1) = f_2;
4b58290f
GK
1265
1266 layout_type (record);
1267
1268 return record;
1269}
1270
5e7a8ee0 1271/* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
4b58290f
GK
1272 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1273 variable to initialize. NEXTARG is the machine independent notion of the
1274 'next' argument after the variable arguments. */
5ab9749e 1275
d7bd8aeb 1276static void
51c16b7e 1277xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
4b58290f
GK
1278{
1279 tree f_base, f_count;
1280 tree base, count;
f84fe9b6 1281 tree t,u;
4b58290f 1282
c6243b4c 1283 if (xstormy16_interrupt_function_p ())
4b58290f 1284 error ("cannot use va_start in interrupt function");
5ab9749e 1285
4b58290f 1286 f_base = TYPE_FIELDS (va_list_type_node);
910ad8de 1287 f_count = DECL_CHAIN (f_base);
5ab9749e 1288
47a25a46
RG
1289 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1290 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1291 NULL_TREE);
4b58290f
GK
1292
1293 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
b72bbbcb 1294 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
f84fe9b6 1295 u = fold_convert (TREE_TYPE (count), u);
5d49b6a7 1296 t = fold_build_pointer_plus (t, u);
726a989a 1297 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
4b58290f
GK
1298 TREE_SIDE_EFFECTS (t) = 1;
1299 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1300
5ab9749e 1301 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
47a25a46 1302 build_int_cst (NULL_TREE,
38173d38 1303 crtl->args.info * UNITS_PER_WORD));
4b58290f
GK
1304 TREE_SIDE_EFFECTS (t) = 1;
1305 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1306}
1307
1308/* Implement the stdarg/varargs va_arg macro. VALIST is the variable
cf4c092e
CM
1309 of type va_list as a tree, TYPE is the type passed to va_arg.
1310 Note: This algorithm is documented in stormy-abi. */
5ab9749e 1311
5d47df87 1312static tree
726a989a
RB
1313xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1314 gimple_seq *post_p ATTRIBUTE_UNUSED)
4b58290f
GK
1315{
1316 tree f_base, f_count;
1317 tree base, count;
5d47df87
RH
1318 tree count_tmp, addr, t;
1319 tree lab_gotaddr, lab_fromstack;
06d22853 1320 int size, size_of_reg_args, must_stack;
5d47df87
RH
1321 tree size_tree;
1322
4b58290f 1323 f_base = TYPE_FIELDS (va_list_type_node);
910ad8de 1324 f_count = DECL_CHAIN (f_base);
5ab9749e 1325
47a25a46
RG
1326 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1327 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1328 NULL_TREE);
4b58290f 1329
fe984136 1330 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
4b58290f 1331 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
5d47df87 1332 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
5ab9749e 1333
cf4c092e 1334 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
4b58290f 1335
5d47df87 1336 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
4c4bde29
AH
1337 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1338 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
5d47df87 1339 addr = create_tmp_var (ptr_type_node, NULL);
cf4c092e 1340
06d22853
DD
1341 if (!must_stack)
1342 {
5d47df87
RH
1343 tree r;
1344
1345 t = fold_convert (TREE_TYPE (count), size_tree);
47a25a46 1346 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
5d47df87 1347 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
47a25a46
RG
1348 t = build2 (GT_EXPR, boolean_type_node, t, r);
1349 t = build3 (COND_EXPR, void_type_node, t,
1350 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1351 NULL_TREE);
5d47df87 1352 gimplify_and_add (t, pre_p);
f84fe9b6 1353
5d49b6a7 1354 t = fold_build_pointer_plus (base, count_tmp);
726a989a 1355 gimplify_assign (addr, t, pre_p);
5d47df87 1356
47a25a46 1357 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
5d47df87
RH
1358 gimplify_and_add (t, pre_p);
1359
47a25a46 1360 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
5d47df87 1361 gimplify_and_add (t, pre_p);
06d22853 1362 }
5ab9749e 1363
4b58290f
GK
1364 /* Arguments larger than a word might need to skip over some
1365 registers, since arguments are either passed entirely in
1366 registers or entirely on the stack. */
06d22853
DD
1367 size = PUSH_ROUNDING (int_size_in_bytes (type));
1368 if (size > 2 || size < 0 || must_stack)
4b58290f 1369 {
5d47df87
RH
1370 tree r, u;
1371
1372 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
726a989a 1373 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
5d47df87
RH
1374
1375 t = fold_convert (TREE_TYPE (count), r);
47a25a46
RG
1376 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1377 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
5d47df87 1378 gimplify_and_add (t, pre_p);
4b58290f
GK
1379 }
1380
5d47df87 1381 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
b72bbbcb 1382 + INCOMING_FRAME_SP_OFFSET);
5d47df87 1383 t = fold_convert (TREE_TYPE (count), t);
47a25a46
RG
1384 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1385 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1386 fold_convert (TREE_TYPE (count), size_tree));
f84fe9b6
NC
1387 t = fold_convert (TREE_TYPE (t), fold (t));
1388 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
5d49b6a7 1389 t = fold_build_pointer_plus (base, t);
726a989a 1390 gimplify_assign (addr, t, pre_p);
5d47df87 1391
47a25a46 1392 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
5d47df87 1393 gimplify_and_add (t, pre_p);
4b58290f 1394
5d47df87 1395 t = fold_convert (TREE_TYPE (count), size_tree);
47a25a46 1396 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
726a989a 1397 gimplify_assign (count, t, pre_p);
5ab9749e 1398
5d47df87 1399 addr = fold_convert (build_pointer_type (type), addr);
d6e9821f 1400 return build_va_arg_indirect_ref (addr);
4b58290f
GK
1401}
1402
fb8d0fac 1403/* Worker function for TARGET_TRAMPOLINE_INIT. */
5ab9749e 1404
fb8d0fac
RH
1405static void
1406xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4b58290f 1407{
4b58290f
GK
1408 rtx temp = gen_reg_rtx (HImode);
1409 rtx reg_fnaddr = gen_reg_rtx (HImode);
fb8d0fac 1410 rtx reg_addr, reg_addr_mem;
4b58290f 1411
fb8d0fac
RH
1412 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1413 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
5ab9749e 1414
4b58290f
GK
1415 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1416 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1417 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
fb8d0fac
RH
1418 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1419
4b58290f
GK
1420 emit_move_insn (temp, static_chain);
1421 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1422 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
fb8d0fac
RH
1423 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1424
1425 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
4b58290f
GK
1426 emit_move_insn (temp, reg_fnaddr);
1427 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1428 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1429 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1430 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
fb8d0fac
RH
1431 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1432
4b58290f
GK
1433 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1434 emit_move_insn (reg_addr_mem, reg_fnaddr);
1435}
1436
998871e9 1437/* Worker function for TARGET_FUNCTION_VALUE. */
bd5bd7ac 1438
998871e9
AS
1439static rtx
1440xstormy16_function_value (const_tree valtype,
1441 const_tree func ATTRIBUTE_UNUSED,
1442 bool outgoing ATTRIBUTE_UNUSED)
4b58290f
GK
1443{
1444 enum machine_mode mode;
1445 mode = TYPE_MODE (valtype);
1446 PROMOTE_MODE (mode, 0, valtype);
1447 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1448}
1449
998871e9
AS
1450/* Worker function for TARGET_LIBCALL_VALUE. */
1451
1452static rtx
1453xstormy16_libcall_value (enum machine_mode mode,
1454 const_rtx fun ATTRIBUTE_UNUSED)
1455{
1456 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1457}
1458
1459/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1460
1461static bool
1462xstormy16_function_value_regno_p (const unsigned int regno)
1463{
1464 return (regno == RETURN_VALUE_REGNUM);
1465}
1466
52560c7b
GK
1467/* A C compound statement that outputs the assembler code for a thunk function,
1468 used to implement C++ virtual function calls with multiple inheritance. The
1469 thunk acts as a wrapper around a virtual function, adjusting the implicit
1470 object parameter before handing control off to the real function.
1471
1472 First, emit code to add the integer DELTA to the location that contains the
1473 incoming first argument. Assume that this argument contains a pointer, and
1474 is the one used to pass the `this' pointer in C++. This is the incoming
1475 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1476 addition must preserve the values of all other incoming arguments.
1477
1478 After the addition, emit code to jump to FUNCTION, which is a
1479 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1480 the return address. Hence returning from FUNCTION will return to whoever
1481 called the current `thunk'.
1482
1483 The effect must be as if @var{function} had been called directly
1484 with the adjusted first argument. This macro is responsible for
1485 emitting all of the code for a thunk function;
1486 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1487 not invoked.
1488
1489 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1490 extracted from it.) It might possibly be useful on some targets, but
1491 probably not. */
1492
c590b625 1493static void
51c16b7e
SB
1494xstormy16_asm_output_mi_thunk (FILE *file,
1495 tree thunk_fndecl ATTRIBUTE_UNUSED,
1496 HOST_WIDE_INT delta,
1497 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1498 tree function)
52560c7b
GK
1499{
1500 int regnum = FIRST_ARGUMENT_REGISTER;
5ab9749e 1501
52560c7b 1502 /* There might be a hidden first argument for a returned structure. */
61f71b34 1503 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
52560c7b 1504 regnum += 1;
5ab9749e 1505
eb0424da 1506 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
52560c7b
GK
1507 fputs ("\tjmpf ", file);
1508 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1509 putc ('\n', file);
1510}
1511
54e9a19d
DD
1512/* The purpose of this function is to override the default behavior of
1513 BSS objects. Normally, they go into .bss or .sbss via ".common"
1514 directives, but we need to override that and put them in
1515 .bss_below100. We can't just use a section override (like we do
1516 for .data_below100), because that makes them initialized rather
1517 than uninitialized. */
5ab9749e 1518
54e9a19d
DD
1519void
1520xstormy16_asm_output_aligned_common (FILE *stream,
2f806f3b 1521 tree decl,
54e9a19d
DD
1522 const char *name,
1523 int size,
1524 int align,
1525 int global)
1526{
038eab67 1527 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
2f806f3b 1528 rtx symbol;
5ab9749e 1529
2f806f3b 1530 if (mem != NULL_RTX
a21eaf5e 1531 && MEM_P (mem)
2f806f3b
NC
1532 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1533 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
54e9a19d 1534 {
2f806f3b
NC
1535 const char *name2;
1536 int p2align = 0;
1537
d6b5193b 1538 switch_to_section (bss100_section);
2f806f3b
NC
1539
1540 while (align > 8)
54e9a19d 1541 {
2f806f3b
NC
1542 align /= 2;
1543 p2align ++;
54e9a19d 1544 }
54e9a19d 1545
2f806f3b
NC
1546 name2 = default_strip_name_encoding (name);
1547 if (global)
1548 fprintf (stream, "\t.globl\t%s\n", name2);
1549 if (p2align)
1550 fprintf (stream, "\t.p2align %d\n", p2align);
1551 fprintf (stream, "\t.type\t%s, @object\n", name2);
1552 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1553 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1554 return;
54e9a19d
DD
1555 }
1556
1557 if (!global)
1558 {
1559 fprintf (stream, "\t.local\t");
1560 assemble_name (stream, name);
1561 fprintf (stream, "\n");
1562 }
1563 fprintf (stream, "\t.comm\t");
1564 assemble_name (stream, name);
43f51151 1565 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
54e9a19d
DD
1566}
1567
d6b5193b
RS
1568/* Implement TARGET_ASM_INIT_SECTIONS. */
1569
1570static void
1571xstormy16_asm_init_sections (void)
1572{
1573 bss100_section
1574 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1575 output_section_asm_op,
1576 "\t.section \".bss_below100\",\"aw\",@nobits");
1577}
1578
54e9a19d
DD
1579/* Mark symbols with the "below100" attribute so that we can use the
1580 special addressing modes for them. */
1581
1582static void
2f806f3b 1583xstormy16_encode_section_info (tree decl, rtx r, int first)
54e9a19d 1584{
e5eb9a52
NC
1585 default_encode_section_info (decl, r, first);
1586
2f806f3b 1587 if (TREE_CODE (decl) == VAR_DECL
54e9a19d
DD
1588 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1589 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1590 {
2f806f3b 1591 rtx symbol = XEXP (r, 0);
5ab9749e 1592
2f806f3b
NC
1593 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1594 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
54e9a19d
DD
1595 }
1596}
1597
6208b55d 1598#undef TARGET_ASM_CONSTRUCTOR
5ab9749e 1599#define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
6208b55d 1600#undef TARGET_ASM_DESTRUCTOR
5ab9749e
NC
1601#define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1602
1603/* Output constructors and destructors. Just like
1604 default_named_section_asm_out_* but don't set the sections writable. */
43898541
GK
1605
1606static void
51c16b7e 1607xstormy16_asm_out_destructor (rtx symbol, int priority)
43898541
GK
1608{
1609 const char *section = ".dtors";
1610 char buf[16];
1611
71cc389b 1612 /* ??? This only works reliably with the GNU linker. */
43898541
GK
1613 if (priority != DEFAULT_INIT_PRIORITY)
1614 {
1615 sprintf (buf, ".dtors.%.5u",
1616 /* Invert the numbering so the linker puts us in the proper
1617 order; constructors are run from right to left, and the
1618 linker sorts in increasing order. */
1619 MAX_INIT_PRIORITY - priority);
1620 section = buf;
1621 }
1622
d6b5193b 1623 switch_to_section (get_section (section, 0, NULL));
43898541
GK
1624 assemble_align (POINTER_SIZE);
1625 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1626}
1627
1628static void
51c16b7e 1629xstormy16_asm_out_constructor (rtx symbol, int priority)
43898541
GK
1630{
1631 const char *section = ".ctors";
1632 char buf[16];
1633
71cc389b 1634 /* ??? This only works reliably with the GNU linker. */
43898541
GK
1635 if (priority != DEFAULT_INIT_PRIORITY)
1636 {
1637 sprintf (buf, ".ctors.%.5u",
1638 /* Invert the numbering so the linker puts us in the proper
1639 order; constructors are run from right to left, and the
1640 linker sorts in increasing order. */
1641 MAX_INIT_PRIORITY - priority);
1642 section = buf;
1643 }
1644
d6b5193b 1645 switch_to_section (get_section (section, 0, NULL));
43898541
GK
1646 assemble_align (POINTER_SIZE);
1647 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1648}
4b58290f 1649\f
43070a6e 1650/* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
5ab9749e 1651
43070a6e
AS
1652 Print a memory address as an operand to reference that memory location. */
1653
1654static void
51c16b7e 1655xstormy16_print_operand_address (FILE *file, rtx address)
4b58290f
GK
1656{
1657 HOST_WIDE_INT offset;
1658 int pre_dec, post_inc;
1659
1660 /* There are a few easy cases. */
a21eaf5e 1661 if (CONST_INT_P (address))
4b58290f
GK
1662 {
1663 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1664 return;
1665 }
5ab9749e 1666
a21eaf5e 1667 if (CONSTANT_P (address) || LABEL_P (address))
4b58290f
GK
1668 {
1669 output_addr_const (file, address);
1670 return;
1671 }
4b58290f 1672
5ab9749e
NC
1673 /* Otherwise, it's hopefully something of the form
1674 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
4b58290f
GK
1675 if (GET_CODE (address) == PLUS)
1676 {
a21eaf5e 1677 gcc_assert (CONST_INT_P (XEXP (address, 1)));
4b58290f
GK
1678 offset = INTVAL (XEXP (address, 1));
1679 address = XEXP (address, 0);
1680 }
1681 else
1682 offset = 0;
1683
1684 pre_dec = (GET_CODE (address) == PRE_DEC);
1685 post_inc = (GET_CODE (address) == POST_INC);
1686 if (pre_dec || post_inc)
1687 address = XEXP (address, 0);
5ab9749e 1688
a21eaf5e 1689 gcc_assert (REG_P (address));
4b58290f
GK
1690
1691 fputc ('(', file);
1692 if (pre_dec)
1693 fputs ("--", file);
1694 fputs (reg_names [REGNO (address)], file);
1695 if (post_inc)
1696 fputs ("++", file);
1697 if (offset != 0)
4a0a75dd 1698 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
4b58290f
GK
1699 fputc (')', file);
1700}
1701
43070a6e 1702/* Worker function for TARGET_PRINT_OPERAND.
5ab9749e 1703
43070a6e
AS
1704 Print an operand to an assembler instruction. */
1705
1706static void
51c16b7e 1707xstormy16_print_operand (FILE *file, rtx x, int code)
4b58290f
GK
1708{
1709 switch (code)
1710 {
1711 case 'B':
1712 /* There is either one bit set, or one bit clear, in X.
1713 Print it preceded by '#'. */
1714 {
54e9a19d 1715 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
e9818db2
GK
1716 HOST_WIDE_INT xx = 1;
1717 HOST_WIDE_INT l;
4b58290f 1718
a21eaf5e 1719 if (CONST_INT_P (x))
4b58290f
GK
1720 xx = INTVAL (x);
1721 else
9e637a26 1722 output_operand_lossage ("'B' operand is not constant");
5ab9749e 1723
54e9a19d
DD
1724 /* GCC sign-extends masks with the MSB set, so we have to
1725 detect all the cases that differ only in sign extension
1726 beyond the bits we care about. Normally, the predicates
1727 and constraints ensure that we have the right values. This
1728 works correctly for valid masks. */
1729 if (bits_set[xx & 7] <= 1)
1730 {
1731 /* Remove sign extension bits. */
1732 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1733 xx &= 0xff;
1734 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1735 xx &= 0xffff;
1736 l = exact_log2 (xx);
1737 }
1738 else
1739 {
1740 /* Add sign extension bits. */
1741 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1742 xx |= ~(HOST_WIDE_INT)0xff;
1743 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1744 xx |= ~(HOST_WIDE_INT)0xffff;
1745 l = exact_log2 (~xx);
1746 }
1747
4b58290f 1748 if (l == -1)
9e637a26 1749 output_operand_lossage ("'B' operand has multiple bits set");
5ab9749e 1750
4a0a75dd 1751 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
4b58290f
GK
1752 return;
1753 }
1754
1755 case 'C':
1756 /* Print the symbol without a surrounding @fptr(). */
1757 if (GET_CODE (x) == SYMBOL_REF)
1758 assemble_name (file, XSTR (x, 0));
a21eaf5e 1759 else if (LABEL_P (x))
2f0b7af6 1760 output_asm_label (x);
4b58290f 1761 else
c6243b4c 1762 xstormy16_print_operand_address (file, x);
4b58290f
GK
1763 return;
1764
1765 case 'o':
1766 case 'O':
5ab9749e 1767 /* Print the immediate operand less one, preceded by '#'.
4b58290f
GK
1768 For 'O', negate it first. */
1769 {
e9818db2 1770 HOST_WIDE_INT xx = 0;
5ab9749e 1771
a21eaf5e 1772 if (CONST_INT_P (x))
4b58290f
GK
1773 xx = INTVAL (x);
1774 else
9e637a26 1775 output_operand_lossage ("'o' operand is not constant");
5ab9749e 1776
4b58290f
GK
1777 if (code == 'O')
1778 xx = -xx;
5ab9749e 1779
4a0a75dd 1780 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
4b58290f
GK
1781 return;
1782 }
1783
54e9a19d
DD
1784 case 'b':
1785 /* Print the shift mask for bp/bn. */
1786 {
1787 HOST_WIDE_INT xx = 1;
1788 HOST_WIDE_INT l;
1789
a21eaf5e 1790 if (CONST_INT_P (x))
54e9a19d
DD
1791 xx = INTVAL (x);
1792 else
9e637a26 1793 output_operand_lossage ("'B' operand is not constant");
5ab9749e 1794
54e9a19d 1795 l = 7 - xx;
5ab9749e 1796
54e9a19d
DD
1797 fputs (IMMEDIATE_PREFIX, file);
1798 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1799 return;
1800 }
1801
4b58290f
GK
1802 case 0:
1803 /* Handled below. */
1804 break;
5ab9749e 1805
4b58290f 1806 default:
c6243b4c 1807 output_operand_lossage ("xstormy16_print_operand: unknown code");
4b58290f
GK
1808 return;
1809 }
1810
1811 switch (GET_CODE (x))
1812 {
1813 case REG:
1814 fputs (reg_names [REGNO (x)], file);
1815 break;
1816
1817 case MEM:
c6243b4c 1818 xstormy16_print_operand_address (file, XEXP (x, 0));
4b58290f
GK
1819 break;
1820
1821 default:
1822 /* Some kind of constant or label; an immediate operand,
1823 so prefix it with '#' for the assembler. */
1824 fputs (IMMEDIATE_PREFIX, file);
1825 output_addr_const (file, x);
1826 break;
1827 }
1828
1829 return;
1830}
4b58290f
GK
1831\f
1832/* Expander for the `casesi' pattern.
1833 INDEX is the index of the switch statement.
1834 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1835 to the first table entry.
1836 RANGE is the number of table entries.
1837 TABLE is an ADDR_VEC that is the jump table.
1838 DEFAULT_LABEL is the address to branch to if INDEX is outside the
5ab9749e 1839 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
4b58290f 1840
5ab9749e 1841void
51c16b7e
SB
1842xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1843 rtx table, rtx default_label)
4b58290f
GK
1844{
1845 HOST_WIDE_INT range_i = INTVAL (range);
1846 rtx int_index;
1847
1848 /* This code uses 'br', so it can deal only with tables of size up to
1849 8192 entries. */
1850 if (range_i >= 8192)
5ab9749e 1851 sorry ("switch statement of size %lu entries too large",
4b58290f
GK
1852 (unsigned long) range_i);
1853
4192f0d2 1854 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
4b58290f
GK
1855 OPTAB_LIB_WIDEN);
1856 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
40c13662 1857 default_label);
4b58290f 1858 int_index = gen_lowpart_common (HImode, index);
a556fd39 1859 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
4b58290f
GK
1860 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1861}
1862
1863/* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1864 instructions, without label or alignment or any other special
1865 constructs. We know that the previous instruction will be the
1866 `tablejump_pcrel' output above.
1867
1868 TODO: it might be nice to output 'br' instructions if they could
1869 all reach. */
1870
1871void
51c16b7e 1872xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
5ab9749e 1873{
4b58290f 1874 int vlen, idx;
5ab9749e 1875
d6b5193b 1876 switch_to_section (current_function_section ());
4b58290f
GK
1877
1878 vlen = XVECLEN (table, 0);
1879 for (idx = 0; idx < vlen; idx++)
1880 {
1881 fputs ("\tjmpf ", file);
2f0b7af6 1882 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
4b58290f
GK
1883 fputc ('\n', file);
1884 }
1885}
4b58290f
GK
1886\f
1887/* Expander for the `call' patterns.
a21eaf5e
NC
1888 RETVAL is the RTL for the return register or NULL for void functions.
1889 DEST is the function to call, expressed as a MEM.
1890 COUNTER is ignored. */
4b58290f 1891
5ab9749e 1892void
51c16b7e 1893xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
4b58290f
GK
1894{
1895 rtx call, temp;
1896 enum machine_mode mode;
1897
a21eaf5e 1898 gcc_assert (MEM_P (dest));
4b58290f
GK
1899 dest = XEXP (dest, 0);
1900
a21eaf5e 1901 if (! CONSTANT_P (dest) && ! REG_P (dest))
4b58290f 1902 dest = force_reg (Pmode, dest);
5ab9749e 1903
4b58290f
GK
1904 if (retval == NULL)
1905 mode = VOIDmode;
1906 else
1907 mode = GET_MODE (retval);
1908
1909 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1910 counter);
1911 if (retval)
1912 call = gen_rtx_SET (VOIDmode, retval, call);
5ab9749e 1913
4b58290f
GK
1914 if (! CONSTANT_P (dest))
1915 {
1916 temp = gen_reg_rtx (HImode);
1917 emit_move_insn (temp, const0_rtx);
1918 }
1919 else
1920 temp = const0_rtx;
5ab9749e
NC
1921
1922 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
4b58290f
GK
1923 gen_rtx_USE (VOIDmode, temp)));
1924 emit_call_insn (call);
1925}
1926\f
1927/* Expanders for multiword computational operations. */
1928
1929/* Expander for arithmetic operations; emit insns to compute
1930
1931 (set DEST (CODE:MODE SRC0 SRC1))
5ab9749e 1932
d40ba0b6
NC
1933 When CODE is COMPARE, a branch template is generated
1934 (this saves duplicating code in xstormy16_split_cbranch). */
4b58290f 1935
5ab9749e 1936void
51c16b7e 1937xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
d40ba0b6 1938 rtx dest, rtx src0, rtx src1)
4b58290f
GK
1939{
1940 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1941 int i;
1942 int firstloop = 1;
1943
1944 if (code == NEG)
9be13211 1945 emit_move_insn (src0, const0_rtx);
5ab9749e 1946
4b58290f
GK
1947 for (i = 0; i < num_words; i++)
1948 {
1949 rtx w_src0, w_src1, w_dest;
1950 rtx insn;
5ab9749e
NC
1951
1952 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
9be13211 1953 i * UNITS_PER_WORD);
4b58290f
GK
1954 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1955 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1956
1957 switch (code)
1958 {
1959 case PLUS:
1960 if (firstloop
a21eaf5e
NC
1961 && CONST_INT_P (w_src1)
1962 && INTVAL (w_src1) == 0)
4b58290f 1963 continue;
5ab9749e 1964
4b58290f 1965 if (firstloop)
d40ba0b6 1966 insn = gen_addchi4 (w_dest, w_src0, w_src1);
4b58290f 1967 else
d40ba0b6 1968 insn = gen_addchi5 (w_dest, w_src0, w_src1);
4b58290f
GK
1969 break;
1970
1971 case NEG:
1972 case MINUS:
1973 case COMPARE:
1974 if (code == COMPARE && i == num_words - 1)
1975 {
1976 rtx branch, sub, clobber, sub_1;
5ab9749e
NC
1977
1978 sub_1 = gen_rtx_MINUS (HImode, w_src0,
b72bbbcb 1979 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
4b58290f
GK
1980 sub = gen_rtx_SET (VOIDmode, w_dest,
1981 gen_rtx_MINUS (HImode, sub_1, w_src1));
b72bbbcb 1982 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
4b58290f
GK
1983 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1984 gen_rtx_IF_THEN_ELSE (VOIDmode,
1985 gen_rtx_EQ (HImode,
1986 sub_1,
1987 w_src1),
1988 pc_rtx,
1989 pc_rtx));
1990 insn = gen_rtx_PARALLEL (VOIDmode,
1991 gen_rtvec (3, branch, sub, clobber));
1992 }
1993 else if (firstloop
1994 && code != COMPARE
a21eaf5e
NC
1995 && CONST_INT_P (w_src1)
1996 && INTVAL (w_src1) == 0)
4b58290f
GK
1997 continue;
1998 else if (firstloop)
d40ba0b6 1999 insn = gen_subchi4 (w_dest, w_src0, w_src1);
4b58290f 2000 else
d40ba0b6 2001 insn = gen_subchi5 (w_dest, w_src0, w_src1);
4b58290f
GK
2002 break;
2003
2004 case IOR:
2005 case XOR:
2006 case AND:
a21eaf5e 2007 if (CONST_INT_P (w_src1)
4b58290f
GK
2008 && INTVAL (w_src1) == -(code == AND))
2009 continue;
5ab9749e 2010
1c563bed 2011 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
0f4c242b 2012 w_src0, w_src1));
4b58290f
GK
2013 break;
2014
2015 case NOT:
2016 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2017 break;
2018
2019 default:
4718bfd8 2020 gcc_unreachable ();
4b58290f 2021 }
5ab9749e 2022
4b58290f
GK
2023 firstloop = 0;
2024 emit (insn);
2025 }
f3cd0185
DD
2026
2027 /* If we emit nothing, try_split() will think we failed. So emit
2028 something that does nothing and can be optimized away. */
2029 if (firstloop)
2030 emit (gen_nop ());
4b58290f
GK
2031}
2032
4b58290f 2033/* The shift operations are split at output time for constant values;
5ab9749e 2034 variable-width shifts get handed off to a library routine.
4b58290f
GK
2035
2036 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2037 SIZE_R will be a CONST_INT, X will be a hard register. */
2038
5ab9749e 2039const char *
51c16b7e
SB
2040xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2041 rtx x, rtx size_r, rtx temp)
4b58290f
GK
2042{
2043 HOST_WIDE_INT size;
2044 const char *r0, *r1, *rt;
2045 static char r[64];
2046
a21eaf5e
NC
2047 gcc_assert (CONST_INT_P (size_r)
2048 && REG_P (x)
2049 && mode == SImode);
2050
4b58290f
GK
2051 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2052
2053 if (size == 0)
2054 return "";
2055
2056 r0 = reg_names [REGNO (x)];
2057 r1 = reg_names [REGNO (x) + 1];
4b58290f
GK
2058
2059 /* For shifts of size 1, we can use the rotate instructions. */
2060 if (size == 1)
2061 {
2062 switch (code)
2063 {
2064 case ASHIFT:
2065 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2066 break;
2067 case ASHIFTRT:
2068 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2069 break;
2070 case LSHIFTRT:
2071 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2072 break;
2073 default:
4718bfd8 2074 gcc_unreachable ();
4b58290f
GK
2075 }
2076 return r;
2077 }
5ab9749e 2078
4b58290f
GK
2079 /* For large shifts, there are easy special cases. */
2080 if (size == 16)
2081 {
2082 switch (code)
2083 {
2084 case ASHIFT:
2085 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2086 break;
2087 case ASHIFTRT:
2088 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2089 break;
2090 case LSHIFTRT:
2091 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2092 break;
2093 default:
4718bfd8 2094 gcc_unreachable ();
4b58290f
GK
2095 }
2096 return r;
2097 }
2098 if (size > 16)
2099 {
2100 switch (code)
2101 {
2102 case ASHIFT:
5ab9749e 2103 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
4b58290f
GK
2104 r1, r0, r0, r1, (int) size - 16);
2105 break;
2106 case ASHIFTRT:
5ab9749e 2107 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
4b58290f
GK
2108 r0, r1, r1, r0, (int) size - 16);
2109 break;
2110 case LSHIFTRT:
5ab9749e 2111 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
4b58290f
GK
2112 r0, r1, r1, r0, (int) size - 16);
2113 break;
2114 default:
4718bfd8 2115 gcc_unreachable ();
4b58290f
GK
2116 }
2117 return r;
2118 }
2119
2120 /* For the rest, we have to do more work. In particular, we
2121 need a temporary. */
5766e0ef 2122 rt = reg_names [REGNO (temp)];
4b58290f
GK
2123 switch (code)
2124 {
2125 case ASHIFT:
5ab9749e
NC
2126 sprintf (r,
2127 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2128 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
4b58290f
GK
2129 r1, rt);
2130 break;
2131 case ASHIFTRT:
5ab9749e
NC
2132 sprintf (r,
2133 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2134 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
4b58290f
GK
2135 r0, rt);
2136 break;
2137 case LSHIFTRT:
5ab9749e
NC
2138 sprintf (r,
2139 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2140 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
4b58290f
GK
2141 r0, rt);
2142 break;
2143 default:
4718bfd8 2144 gcc_unreachable ();
4b58290f
GK
2145 }
2146 return r;
2147}
2148\f
2149/* Attribute handling. */
2150
2151/* Return nonzero if the function is an interrupt function. */
5ab9749e 2152
4b58290f 2153int
51c16b7e 2154xstormy16_interrupt_function_p (void)
4b58290f
GK
2155{
2156 tree attributes;
5ab9749e 2157
4b58290f
GK
2158 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2159 any functions are declared, which is demonstrably wrong, but
2160 it is worked around here. FIXME. */
2161 if (!cfun)
2162 return 0;
2163
2164 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2165 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2166}
2167
5ab9749e
NC
2168#undef TARGET_ATTRIBUTE_TABLE
2169#define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2170
51c16b7e
SB
2171static tree xstormy16_handle_interrupt_attribute
2172 (tree *, tree, tree, int, bool *);
54e9a19d
DD
2173static tree xstormy16_handle_below100_attribute
2174 (tree *, tree, tree, int, bool *);
51c16b7e 2175
c6243b4c 2176static const struct attribute_spec xstormy16_attribute_table[] =
91d231cb 2177{
62d784f7
KT
2178 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2179 affects_type_identity. */
2180 { "interrupt", 0, 0, false, true, true,
2181 xstormy16_handle_interrupt_attribute , false },
2182 { "BELOW100", 0, 0, false, false, false,
2183 xstormy16_handle_below100_attribute, false },
2184 { "below100", 0, 0, false, false, false,
2185 xstormy16_handle_below100_attribute, false },
2186 { NULL, 0, 0, false, false, false, NULL, false }
91d231cb
JM
2187};
2188
2189/* Handle an "interrupt" attribute;
2190 arguments as in struct attribute_spec.handler. */
5ab9749e 2191
91d231cb 2192static tree
51c16b7e
SB
2193xstormy16_handle_interrupt_attribute (tree *node, tree name,
2194 tree args ATTRIBUTE_UNUSED,
2195 int flags ATTRIBUTE_UNUSED,
2196 bool *no_add_attrs)
4b58290f 2197{
91d231cb
JM
2198 if (TREE_CODE (*node) != FUNCTION_TYPE)
2199 {
29d08eba
JM
2200 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2201 name);
91d231cb
JM
2202 *no_add_attrs = true;
2203 }
4b58290f 2204
91d231cb 2205 return NULL_TREE;
4b58290f 2206}
54e9a19d
DD
2207
2208/* Handle an "below" attribute;
2209 arguments as in struct attribute_spec.handler. */
5ab9749e 2210
54e9a19d
DD
2211static tree
2212xstormy16_handle_below100_attribute (tree *node,
2213 tree name ATTRIBUTE_UNUSED,
2214 tree args ATTRIBUTE_UNUSED,
2215 int flags ATTRIBUTE_UNUSED,
2216 bool *no_add_attrs)
2217{
2218 if (TREE_CODE (*node) != VAR_DECL
2219 && TREE_CODE (*node) != POINTER_TYPE
2220 && TREE_CODE (*node) != TYPE_DECL)
2221 {
5c498b10
DD
2222 warning (OPT_Wattributes,
2223 "%<__BELOW100__%> attribute only applies to variables");
54e9a19d
DD
2224 *no_add_attrs = true;
2225 }
2226 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2227 {
2228 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2229 {
5c498b10 2230 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
ab532386 2231 "with auto storage class");
54e9a19d
DD
2232 *no_add_attrs = true;
2233 }
2234 }
5ab9749e 2235
54e9a19d
DD
2236 return NULL_TREE;
2237}
3d4b192a 2238\f
5ab9749e
NC
2239#undef TARGET_INIT_BUILTINS
2240#define TARGET_INIT_BUILTINS xstormy16_init_builtins
2241#undef TARGET_EXPAND_BUILTIN
2242#define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2243
2244static struct
2245{
2246 const char * name;
2247 int md_code;
2248 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2249 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2250}
2251 s16builtins[] =
2252{
3d4b192a
DD
2253 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2254 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2255 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2256 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
5ab9749e 2257 { NULL, 0, NULL, NULL }
3d4b192a
DD
2258};
2259
2260static void
51c16b7e 2261xstormy16_init_builtins (void)
3d4b192a 2262{
2c67cf6e
NF
2263 tree args[2], ret_type, arg = NULL_TREE, ftype;
2264 int i, a, n_args;
3d4b192a
DD
2265
2266 ret_type = void_type_node;
2267
5ab9749e 2268 for (i = 0; s16builtins[i].name; i++)
3d4b192a 2269 {
2c67cf6e
NF
2270 n_args = strlen (s16builtins[i].arg_types) - 1;
2271
2272 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2273
15f072f9 2274 for (a = n_args - 1; a >= 0; a--)
2c67cf6e
NF
2275 args[a] = NULL_TREE;
2276
2277 for (a = n_args; a >= 0; a--)
3d4b192a
DD
2278 {
2279 switch (s16builtins[i].arg_types[a])
2280 {
2281 case 's': arg = short_integer_type_node; break;
2282 case 'S': arg = short_unsigned_type_node; break;
2283 case 'l': arg = long_integer_type_node; break;
2284 case 'L': arg = long_unsigned_type_node; break;
4718bfd8 2285 default: gcc_unreachable ();
3d4b192a
DD
2286 }
2287 if (a == 0)
2288 ret_type = arg;
2289 else
2c67cf6e 2290 args[a-1] = arg;
3d4b192a 2291 }
15f072f9 2292 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2c67cf6e 2293 add_builtin_function (s16builtins[i].name, ftype,
15f072f9 2294 i, BUILT_IN_MD, NULL, NULL_TREE);
3d4b192a
DD
2295 }
2296}
2297
2298static rtx
f84fe9b6
NC
2299xstormy16_expand_builtin (tree exp, rtx target,
2300 rtx subtarget ATTRIBUTE_UNUSED,
2301 enum machine_mode mode ATTRIBUTE_UNUSED,
2302 int ignore ATTRIBUTE_UNUSED)
3d4b192a
DD
2303{
2304 rtx op[10], args[10], pat, copyto[10], retval = 0;
2305 tree fndecl, argtree;
2306 int i, a, o, code;
2307
2308 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2309 argtree = TREE_OPERAND (exp, 1);
2310 i = DECL_FUNCTION_CODE (fndecl);
2311 code = s16builtins[i].md_code;
2312
2313 for (a = 0; a < 10 && argtree; a++)
2314 {
f1cb6795 2315 args[a] = expand_normal (TREE_VALUE (argtree));
3d4b192a
DD
2316 argtree = TREE_CHAIN (argtree);
2317 }
2318
2319 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2320 {
2321 char ao = s16builtins[i].arg_ops[o];
2322 char c = insn_data[code].operand[o].constraint[0];
f1cb6795 2323 enum machine_mode omode;
3d4b192a
DD
2324
2325 copyto[o] = 0;
2326
f1cb6795 2327 omode = (enum machine_mode) insn_data[code].operand[o].mode;
3d4b192a
DD
2328 if (ao == 'r')
2329 op[o] = target ? target : gen_reg_rtx (omode);
2330 else if (ao == 't')
2331 op[o] = gen_reg_rtx (omode);
2332 else
2333 op[o] = args[(int) hex_value (ao)];
2334
2335 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2336 {
2337 if (c == '+' || c == '=')
2338 {
2339 copyto[o] = op[o];
2340 op[o] = gen_reg_rtx (omode);
2341 }
2342 else
2343 op[o] = copy_to_mode_reg (omode, op[o]);
2344 }
2345
2346 if (ao == 'r')
2347 retval = op[o];
2348 }
2349
2350 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2351 op[5], op[6], op[7], op[8], op[9]);
2352 emit_insn (pat);
2353
2354 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2355 if (copyto[o])
2356 {
2357 emit_move_insn (copyto[o], op[o]);
2358 if (op[o] == retval)
2359 retval = copyto[o];
2360 }
2361
2362 return retval;
2363}
54e9a19d 2364\f
54e9a19d
DD
2365/* Look for combinations of insns that can be converted to BN or BP
2366 opcodes. This is, unfortunately, too complex to do with MD
2367 patterns. */
5ab9749e 2368
54e9a19d
DD
2369static void
2370combine_bnp (rtx insn)
2371{
f99652b5
NC
2372 int insn_code, regno, need_extend;
2373 unsigned int mask;
f1cb6795 2374 rtx cond, reg, and_insn, load, qireg, mem;
54e9a19d 2375 enum machine_mode load_mode = QImode;
f99652b5
NC
2376 enum machine_mode and_mode = QImode;
2377 rtx shift = NULL_RTX;
54e9a19d
DD
2378
2379 insn_code = recog_memoized (insn);
2380 if (insn_code != CODE_FOR_cbranchhi
2381 && insn_code != CODE_FOR_cbranchhi_neg)
2382 return;
2383
2384 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2385 cond = XEXP (cond, 1); /* if */
2386 cond = XEXP (cond, 0); /* cond */
2387 switch (GET_CODE (cond))
2388 {
2389 case NE:
2390 case EQ:
2391 need_extend = 0;
2392 break;
2393 case LT:
2394 case GE:
2395 need_extend = 1;
2396 break;
2397 default:
2398 return;
2399 }
2400
2401 reg = XEXP (cond, 0);
a21eaf5e 2402 if (! REG_P (reg))
54e9a19d
DD
2403 return;
2404 regno = REGNO (reg);
2405 if (XEXP (cond, 1) != const0_rtx)
2406 return;
2407 if (! find_regno_note (insn, REG_DEAD, regno))
2408 return;
2409 qireg = gen_rtx_REG (QImode, regno);
2410
2411 if (need_extend)
2412 {
569b7f6a 2413 /* LT and GE conditionals should have a sign extend before
54e9a19d 2414 them. */
15f072f9
NC
2415 for (and_insn = prev_real_insn (insn);
2416 and_insn != NULL_RTX;
f1cb6795 2417 and_insn = prev_real_insn (and_insn))
54e9a19d 2418 {
f1cb6795 2419 int and_code = recog_memoized (and_insn);
f99652b5 2420
54e9a19d 2421 if (and_code == CODE_FOR_extendqihi2
f1cb6795
JR
2422 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2423 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
f99652b5 2424 break;
5ab9749e 2425
54e9a19d 2426 if (and_code == CODE_FOR_movhi_internal
f1cb6795 2427 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
54e9a19d
DD
2428 {
2429 /* This is for testing bit 15. */
f1cb6795 2430 and_insn = insn;
54e9a19d
DD
2431 break;
2432 }
2433
f1cb6795 2434 if (reg_mentioned_p (reg, and_insn))
54e9a19d 2435 return;
f99652b5 2436
f1cb6795
JR
2437 if (GET_CODE (and_insn) != NOTE
2438 && GET_CODE (and_insn) != INSN)
54e9a19d
DD
2439 return;
2440 }
2441 }
2442 else
2443 {
2444 /* EQ and NE conditionals have an AND before them. */
15f072f9
NC
2445 for (and_insn = prev_real_insn (insn);
2446 and_insn != NULL_RTX;
f1cb6795 2447 and_insn = prev_real_insn (and_insn))
54e9a19d 2448 {
f1cb6795
JR
2449 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2450 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2451 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
f99652b5 2452 break;
5ab9749e 2453
f1cb6795 2454 if (reg_mentioned_p (reg, and_insn))
54e9a19d 2455 return;
f99652b5 2456
f1cb6795
JR
2457 if (GET_CODE (and_insn) != NOTE
2458 && GET_CODE (and_insn) != INSN)
54e9a19d
DD
2459 return;
2460 }
f99652b5 2461
f1cb6795 2462 if (and_insn)
f99652b5 2463 {
aabcd309 2464 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
f99652b5
NC
2465 followed by an AND like this:
2466
2467 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2468 (clobber (reg:BI carry))]
2469
2470 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
5ab9749e 2471
f99652b5 2472 Attempt to detect this here. */
f1cb6795
JR
2473 for (shift = prev_real_insn (and_insn); shift;
2474 shift = prev_real_insn (shift))
f99652b5
NC
2475 {
2476 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2477 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2478 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2479 break;
5ab9749e 2480
f99652b5
NC
2481 if (reg_mentioned_p (reg, shift)
2482 || (GET_CODE (shift) != NOTE
2483 && GET_CODE (shift) != INSN))
2484 {
2485 shift = NULL_RTX;
2486 break;
2487 }
2488 }
2489 }
54e9a19d 2490 }
15f072f9
NC
2491
2492 if (and_insn == NULL_RTX)
54e9a19d
DD
2493 return;
2494
f1cb6795 2495 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
f99652b5
NC
2496 load;
2497 load = prev_real_insn (load))
54e9a19d
DD
2498 {
2499 int load_code = recog_memoized (load);
f99652b5 2500
54e9a19d 2501 if (load_code == CODE_FOR_movhi_internal
f99652b5
NC
2502 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2503 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2504 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
54e9a19d
DD
2505 {
2506 load_mode = HImode;
2507 break;
2508 }
2509
2510 if (load_code == CODE_FOR_movqi_internal
f99652b5
NC
2511 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2512 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
54e9a19d
DD
2513 {
2514 load_mode = QImode;
2515 break;
2516 }
f99652b5
NC
2517
2518 if (load_code == CODE_FOR_zero_extendqihi2
2519 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2520 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2521 {
2522 load_mode = QImode;
2523 and_mode = HImode;
2524 break;
2525 }
2526
54e9a19d
DD
2527 if (reg_mentioned_p (reg, load))
2528 return;
f99652b5 2529
54e9a19d
DD
2530 if (GET_CODE (load) != NOTE
2531 && GET_CODE (load) != INSN)
2532 return;
2533 }
2534 if (!load)
2535 return;
2536
f99652b5
NC
2537 mem = SET_SRC (PATTERN (load));
2538
2539 if (need_extend)
54e9a19d 2540 {
f99652b5
NC
2541 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2542
2543 /* If the mem includes a zero-extend operation and we are
2544 going to generate a sign-extend operation then move the
2545 mem inside the zero-extend. */
2546 if (GET_CODE (mem) == ZERO_EXTEND)
2547 mem = XEXP (mem, 0);
54e9a19d
DD
2548 }
2549 else
f99652b5 2550 {
f1cb6795
JR
2551 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2552 load_mode))
f99652b5
NC
2553 return;
2554
f1cb6795 2555 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
f99652b5
NC
2556
2557 if (shift)
2558 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2559 }
54e9a19d 2560
54e9a19d
DD
2561 if (load_mode == HImode)
2562 {
2563 rtx addr = XEXP (mem, 0);
f99652b5 2564
54e9a19d
DD
2565 if (! (mask & 0xff))
2566 {
2567 addr = plus_constant (addr, 1);
2568 mask >>= 8;
2569 }
2570 mem = gen_rtx_MEM (QImode, addr);
2571 }
2572
2573 if (need_extend)
2574 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2575 else
f99652b5
NC
2576 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2577
54e9a19d
DD
2578 INSN_CODE (insn) = -1;
2579 delete_insn (load);
f99652b5 2580
f1cb6795
JR
2581 if (and_insn != insn)
2582 delete_insn (and_insn);
f99652b5
NC
2583
2584 if (shift != NULL_RTX)
2585 delete_insn (shift);
54e9a19d
DD
2586}
2587
2588static void
2589xstormy16_reorg (void)
2590{
2591 rtx insn;
2592
2593 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2594 {
2595 if (! JUMP_P (insn))
2596 continue;
2597 combine_bnp (insn);
2598 }
2599}
7e43c821 2600\f
78bc94a2
KH
2601/* Worker function for TARGET_RETURN_IN_MEMORY. */
2602
7e43c821 2603static bool
586de218 2604xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7e43c821 2605{
586de218 2606 const HOST_WIDE_INT size = int_size_in_bytes (type);
78bc94a2 2607 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
7e43c821 2608}
4b58290f 2609\f
5ab9749e 2610#undef TARGET_ASM_ALIGNED_HI_OP
301d03af 2611#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
5ab9749e 2612#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 2613#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
5ab9749e 2614#undef TARGET_ENCODE_SECTION_INFO
54e9a19d 2615#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
301d03af 2616
5ab9749e 2617/* Select_section doesn't handle .bss_below100. */
434aeebb
RS
2618#undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2619#define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2620
5ab9749e 2621#undef TARGET_ASM_OUTPUT_MI_THUNK
c590b625 2622#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
5ab9749e 2623#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3961e8fe 2624#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
c590b625 2625
43070a6e
AS
2626#undef TARGET_PRINT_OPERAND
2627#define TARGET_PRINT_OPERAND xstormy16_print_operand
2628#undef TARGET_PRINT_OPERAND_ADDRESS
2629#define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2630
6b1ce545
AS
2631#undef TARGET_MEMORY_MOVE_COST
2632#define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
5ab9749e 2633#undef TARGET_RTX_COSTS
3c50106f 2634#define TARGET_RTX_COSTS xstormy16_rtx_costs
5ab9749e 2635#undef TARGET_ADDRESS_COST
dcefdf67 2636#define TARGET_ADDRESS_COST xstormy16_address_cost
3c50106f 2637
5ab9749e 2638#undef TARGET_BUILD_BUILTIN_VA_LIST
f2f61ee7 2639#define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
5ab9749e 2640#undef TARGET_EXPAND_BUILTIN_VA_START
d7bd8aeb 2641#define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
5ab9749e 2642#undef TARGET_GIMPLIFY_VA_ARG_EXPR
f84fe9b6 2643#define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
37cd4bca 2644
cde0f3fd
PB
2645#undef TARGET_PROMOTE_FUNCTION_MODE
2646#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5ab9749e 2647#undef TARGET_PROMOTE_PROTOTYPES
586de218 2648#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
7e43c821 2649
bf425ddd
NF
2650#undef TARGET_FUNCTION_ARG
2651#define TARGET_FUNCTION_ARG xstormy16_function_arg
2652#undef TARGET_FUNCTION_ARG_ADVANCE
2653#define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2654
5ab9749e 2655#undef TARGET_RETURN_IN_MEMORY
7e43c821 2656#define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
998871e9
AS
2657#undef TARGET_FUNCTION_VALUE
2658#define TARGET_FUNCTION_VALUE xstormy16_function_value
2659#undef TARGET_LIBCALL_VALUE
2660#define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2661#undef TARGET_FUNCTION_VALUE_REGNO_P
2662#define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
7e43c821 2663
5ab9749e 2664#undef TARGET_MACHINE_DEPENDENT_REORG
54e9a19d
DD
2665#define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2666
ef795fc2
AS
2667#undef TARGET_PREFERRED_RELOAD_CLASS
2668#define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2669#undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2670#define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2671
c6c3dba9
PB
2672#undef TARGET_LEGITIMATE_ADDRESS_P
2673#define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
192997cf
AS
2674#undef TARGET_MODE_DEPENDENT_ADDRESS_P
2675#define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
c6c3dba9 2676
7b5cbb57
AS
2677#undef TARGET_CAN_ELIMINATE
2678#define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2679
fb8d0fac
RH
2680#undef TARGET_TRAMPOLINE_INIT
2681#define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2682
4b58290f 2683struct gcc_target targetm = TARGET_INITIALIZER;
d6b5193b
RS
2684
2685#include "gt-stormy16.h"