]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/stormy16/stormy16.c
arc-protos.h (arc_select_cc_mode, gen_compare_reg): Wrap in RTX_CODE macro guard.
[thirdparty/gcc.git] / gcc / config / stormy16 / stormy16.c
CommitLineData
c6243b4c 1/* Xstormy16 target functions.
6fb5fa3c
DB
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007 Free Software Foundation, Inc.
4b58290f
GK
4 Contributed by Red Hat, Inc.
5
4dedfc09 6This file is part of GCC.
4b58290f 7
4dedfc09 8GCC is free software; you can redistribute it and/or modify
4b58290f 9it under the terms of the GNU General Public License as published by
2f83c7d6 10the Free Software Foundation; either version 3, or (at your option)
4b58290f
GK
11any later version.
12
4dedfc09 13GCC is distributed in the hope that it will be useful,
4b58290f
GK
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
2f83c7d6
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4b58290f
GK
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
4b58290f
GK
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
37#include "toplev.h"
38#include "obstack.h"
39#include "tree.h"
40#include "expr.h"
41#include "optabs.h"
4b58290f
GK
42#include "except.h"
43#include "function.h"
44#include "target.h"
45#include "target-def.h"
46#include "tm_p.h"
f1e639b1 47#include "langhooks.h"
5d47df87 48#include "tree-gimple.h"
d6b5193b 49#include "ggc.h"
4b58290f 50
51c16b7e
SB
51static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52static void xstormy16_asm_out_constructor (rtx, int);
53static void xstormy16_asm_out_destructor (rtx, int);
54static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
4b58290f 56
51c16b7e
SB
57static void xstormy16_init_builtins (void);
58static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59static bool xstormy16_rtx_costs (rtx, int, int, int *);
60static int xstormy16_address_cost (rtx);
586de218 61static bool xstormy16_return_in_memory (const_tree, const_tree);
3d4b192a 62
4b58290f
GK
63/* Define the information needed to generate branch and scc insns. This is
64 stored from the compare operation. */
c6243b4c
GK
65struct rtx_def * xstormy16_compare_op0;
66struct rtx_def * xstormy16_compare_op1;
4b58290f 67
d6b5193b
RS
68static GTY(()) section *bss100_section;
69
3c50106f
RH
70/* Compute a (partial) cost for rtx X. Return true if the complete
71 cost has been computed, and false if subexpressions should be
72 scanned. In either case, *TOTAL contains the cost result. */
73
74static bool
51c16b7e
SB
75xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
76 int *total)
3c50106f
RH
77{
78 switch (code)
79 {
80 case CONST_INT:
81 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
82 *total = COSTS_N_INSNS (1) / 2;
83 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
84 *total = COSTS_N_INSNS (1);
85 else
86 *total = COSTS_N_INSNS (2);
87 return true;
88
89 case CONST_DOUBLE:
90 case CONST:
91 case SYMBOL_REF:
92 case LABEL_REF:
93 *total = COSTS_N_INSNS(2);
94 return true;
95
96 case MULT:
97 *total = COSTS_N_INSNS (35 + 6);
98 return true;
99 case DIV:
100 *total = COSTS_N_INSNS (51 - 6);
101 return true;
102
103 default:
104 return false;
105 }
106}
107
dcefdf67 108static int
51c16b7e 109xstormy16_address_cost (rtx x)
dcefdf67
RH
110{
111 return (GET_CODE (x) == CONST_INT ? 2
112 : GET_CODE (x) == PLUS ? 7
113 : 5);
114}
3c50106f 115
4b58290f
GK
116/* Branches are handled as follows:
117
118 1. HImode compare-and-branches. The machine supports these
119 natively, so the appropriate pattern is emitted directly.
120
121 2. SImode EQ and NE. These are emitted as pairs of HImode
122 compare-and-branches.
123
124 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
125 of a SImode subtract followed by a branch (not a compare-and-branch),
126 like this:
127 sub
128 sbc
129 blt
130
131 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
132 sub
133 sbc
134 blt
135 or
136 bne
137*/
138
139/* Emit a branch of kind CODE to location LOC. */
140
141void
51c16b7e 142xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
4b58290f 143{
c6243b4c
GK
144 rtx op0 = xstormy16_compare_op0;
145 rtx op1 = xstormy16_compare_op1;
4b58290f
GK
146 rtx condition_rtx, loc_ref, branch, cy_clobber;
147 rtvec vec;
148 enum machine_mode mode;
149
150 mode = GET_MODE (op0);
4718bfd8 151 gcc_assert (mode == HImode || mode == SImode);
4b58290f
GK
152
153 if (mode == SImode
154 && (code == GT || code == LE || code == GTU || code == LEU))
155 {
156 int unsigned_p = (code == GTU || code == LEU);
157 int gt_p = (code == GT || code == GTU);
cd4c46f3 158 rtx lab = NULL_RTX;
4b58290f
GK
159
160 if (gt_p)
161 lab = gen_label_rtx ();
c6243b4c 162 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
4b58290f
GK
163 /* This should be generated as a comparison against the temporary
164 created by the previous insn, but reload can't handle that. */
c6243b4c 165 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
4b58290f
GK
166 if (gt_p)
167 emit_label (lab);
168 return;
169 }
170 else if (mode == SImode
171 && (code == NE || code == EQ)
172 && op1 != const0_rtx)
173 {
cd4c46f3 174 rtx lab = NULL_RTX;
4b58290f
GK
175 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
176 int i;
177
178 if (code == EQ)
179 lab = gen_label_rtx ();
180
181 for (i = 0; i < num_words - 1; i++)
182 {
c6243b4c 183 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
4b58290f 184 i * UNITS_PER_WORD);
c6243b4c 185 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
4b58290f 186 i * UNITS_PER_WORD);
c6243b4c 187 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
4b58290f 188 }
c6243b4c 189 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
4b58290f 190 i * UNITS_PER_WORD);
c6243b4c 191 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
4b58290f 192 i * UNITS_PER_WORD);
c6243b4c 193 xstormy16_emit_cbranch (code, loc);
4b58290f
GK
194
195 if (code == EQ)
196 emit_label (lab);
197 return;
198 }
199
200 /* We can't allow reload to try to generate any reload after a branch,
201 so when some register must match we must make the temporary ourselves. */
202 if (mode != HImode)
203 {
204 rtx tmp;
205 tmp = gen_reg_rtx (mode);
206 emit_move_insn (tmp, op0);
207 op0 = tmp;
208 }
209
1c563bed 210 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
4b58290f
GK
211 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
212 branch = gen_rtx_SET (VOIDmode, pc_rtx,
213 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
214 loc_ref, pc_rtx));
215
216 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
217
218 if (mode == HImode)
219 vec = gen_rtvec (2, branch, cy_clobber);
220 else if (code == NE || code == EQ)
221 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
222 else
223 {
224 rtx sub;
225#if 0
226 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
227#else
228 sub = gen_rtx_CLOBBER (SImode, op0);
229#endif
230 vec = gen_rtvec (3, branch, sub, cy_clobber);
231 }
232
233 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
234}
235
236/* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
237 the arithmetic operation. Most of the work is done by
c6243b4c 238 xstormy16_expand_arith. */
4b58290f
GK
239
240void
51c16b7e
SB
241xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
242 rtx dest, rtx carry)
4b58290f
GK
243{
244 rtx op0 = XEXP (comparison, 0);
245 rtx op1 = XEXP (comparison, 1);
2f937369 246 rtx seq, last_insn;
4b58290f
GK
247 rtx compare;
248
249 start_sequence ();
c6243b4c 250 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
2f937369 251 seq = get_insns ();
4b58290f 252 end_sequence ();
2f937369 253
4718bfd8 254 gcc_assert (INSN_P (seq));
2f937369
DM
255
256 last_insn = seq;
257 while (NEXT_INSN (last_insn) != NULL_RTX)
258 last_insn = NEXT_INSN (last_insn);
259
260 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
4b58290f
GK
261 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
262 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
263 emit_insn (seq);
264}
265
266
267/* Return the string to output a conditional branch to LABEL, which is
268 the operand number of the label.
269
270 OP is the conditional expression, or NULL for branch-always.
271
5e7a8ee0 272 REVERSED is nonzero if we should reverse the sense of the comparison.
4b58290f
GK
273
274 INSN is the insn. */
275
276char *
51c16b7e 277xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
4b58290f
GK
278{
279 static char string[64];
280 int need_longbranch = (op != NULL_RTX
281 ? get_attr_length (insn) == 8
282 : get_attr_length (insn) == 4);
283 int really_reversed = reversed ^ need_longbranch;
284 const char *ccode;
285 const char *template;
286 const char *operands;
287 enum rtx_code code;
288
289 if (! op)
290 {
291 if (need_longbranch)
292 ccode = "jmpf";
293 else
294 ccode = "br";
295 sprintf (string, "%s %s", ccode, label);
296 return string;
297 }
298
299 code = GET_CODE (op);
300
301 if (GET_CODE (XEXP (op, 0)) != REG)
302 {
303 code = swap_condition (code);
304 operands = "%3,%2";
305 }
306 else
307 operands = "%2,%3";
308
309 /* Work out which way this really branches. */
310 if (really_reversed)
311 code = reverse_condition (code);
312
313 switch (code)
314 {
315 case EQ: ccode = "z"; break;
316 case NE: ccode = "nz"; break;
317 case GE: ccode = "ge"; break;
318 case LT: ccode = "lt"; break;
319 case GT: ccode = "gt"; break;
320 case LE: ccode = "le"; break;
321 case GEU: ccode = "nc"; break;
322 case LTU: ccode = "c"; break;
323 case GTU: ccode = "hi"; break;
324 case LEU: ccode = "ls"; break;
325
326 default:
4718bfd8 327 gcc_unreachable ();
4b58290f
GK
328 }
329
330 if (need_longbranch)
331 template = "b%s %s,.+8 | jmpf %s";
332 else
333 template = "b%s %s,%s";
334 sprintf (string, template, ccode, operands, label);
335
336 return string;
337}
338
339/* Return the string to output a conditional branch to LABEL, which is
340 the operand number of the label, but suitable for the tail of a
341 SImode branch.
342
343 OP is the conditional expression (OP is never NULL_RTX).
344
5e7a8ee0 345 REVERSED is nonzero if we should reverse the sense of the comparison.
4b58290f
GK
346
347 INSN is the insn. */
348
349char *
51c16b7e 350xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
4b58290f
GK
351{
352 static char string[64];
353 int need_longbranch = get_attr_length (insn) >= 8;
354 int really_reversed = reversed ^ need_longbranch;
355 const char *ccode;
356 const char *template;
357 char prevop[16];
358 enum rtx_code code;
359
360 code = GET_CODE (op);
361
362 /* Work out which way this really branches. */
363 if (really_reversed)
364 code = reverse_condition (code);
365
366 switch (code)
367 {
368 case EQ: ccode = "z"; break;
369 case NE: ccode = "nz"; break;
370 case GE: ccode = "ge"; break;
371 case LT: ccode = "lt"; break;
372 case GEU: ccode = "nc"; break;
373 case LTU: ccode = "c"; break;
374
375 /* The missing codes above should never be generated. */
376 default:
4718bfd8 377 gcc_unreachable ();
4b58290f
GK
378 }
379
380 switch (code)
381 {
382 case EQ: case NE:
383 {
384 int regnum;
385
4718bfd8 386 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
4b58290f
GK
387
388 regnum = REGNO (XEXP (op, 0));
389 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
390 }
391 break;
392
393 case GE: case LT: case GEU: case LTU:
394 strcpy (prevop, "sbc %2,%3");
395 break;
396
397 default:
4718bfd8 398 gcc_unreachable ();
4b58290f
GK
399 }
400
401 if (need_longbranch)
402 template = "%s | b%s .+6 | jmpf %s";
403 else
404 template = "%s | b%s %s";
405 sprintf (string, template, prevop, ccode, label);
406
407 return string;
408}
409\f
410/* Many machines have some registers that cannot be copied directly to or from
411 memory or even from other types of registers. An example is the `MQ'
412 register, which on most machines, can only be copied to or from general
413 registers, but not memory. Some machines allow copying all registers to and
414 from memory, but require a scratch register for stores to some memory
415 locations (e.g., those with symbolic address on the RT, and those with
981f6289 416 certain symbolic address on the SPARC when compiling PIC). In some cases,
4b58290f
GK
417 both an intermediate and a scratch register are required.
418
419 You should define these macros to indicate to the reload phase that it may
420 need to allocate at least one register for a reload in addition to the
421 register to contain the data. Specifically, if copying X to a register
422 CLASS in MODE requires an intermediate register, you should define
423 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
424 whose registers can be used as intermediate registers or scratch registers.
425
426 If copying a register CLASS in MODE to X requires an intermediate or scratch
427 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
428 largest register class required. If the requirements for input and output
429 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
430 instead of defining both macros identically.
431
432 The values returned by these macros are often `GENERAL_REGS'. Return
433 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
434 to or from a register of CLASS in MODE without requiring a scratch register.
435 Do not define this macro if it would always return `NO_REGS'.
436
437 If a scratch register is required (either with or without an intermediate
438 register), you should define patterns for `reload_inM' or `reload_outM', as
439 required.. These patterns, which will normally be implemented with a
440 `define_expand', should be similar to the `movM' patterns, except that
441 operand 2 is the scratch register.
442
443 Define constraints for the reload register and scratch register that contain
444 a single register class. If the original reload register (whose class is
445 CLASS) can meet the constraint given in the pattern, the value returned by
446 these macros is used for the class of the scratch register. Otherwise, two
447 additional reload registers are required. Their classes are obtained from
448 the constraints in the insn pattern.
449
450 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
451 either be in a hard register or in memory. Use `true_regnum' to find out;
452 it will return -1 if the pseudo is in memory and the hard register number if
453 it is in a register.
454
455 These macros should not be used in the case where a particular class of
456 registers can only be copied to memory and not to another class of
457 registers. In that case, secondary reload registers are not needed and
458 would not be helpful. Instead, a stack location must be used to perform the
e03f5d43 459 copy and the `movM' pattern should use memory as an intermediate storage.
4b58290f
GK
460 This case often occurs between floating-point and general registers. */
461
462enum reg_class
51c16b7e
SB
463xstormy16_secondary_reload_class (enum reg_class class,
464 enum machine_mode mode,
465 rtx x)
4b58290f
GK
466{
467 /* This chip has the interesting property that only the first eight
468 registers can be moved to/from memory. */
469 if ((GET_CODE (x) == MEM
470 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
471 && (true_regnum (x) == -1
472 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
473 && ! reg_class_subset_p (class, EIGHT_REGS))
474 return EIGHT_REGS;
475
476 /* When reloading a PLUS, the carry register will be required
477 unless the inc or dec instructions can be used. */
c6243b4c 478 if (xstormy16_carry_plus_operand (x, mode))
4b58290f
GK
479 return CARRY_REGS;
480
481 return NO_REGS;
482}
483
05713b80 484/* Recognize a PLUS that needs the carry register. */
4b58290f 485int
51c16b7e 486xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
4b58290f
GK
487{
488 return (GET_CODE (x) == PLUS
489 && GET_CODE (XEXP (x, 1)) == CONST_INT
490 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
491}
492
f3922fd2
DD
493/* Detect and error out on out-of-range constants for movhi. */
494int
51c16b7e 495xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
f3922fd2
DD
496{
497 if ((GET_CODE (x) == CONST_INT)
498 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
ab532386 499 error ("constant halfword load operand out of range");
f3922fd2
DD
500 return general_operand (x, mode);
501}
502
503/* Detect and error out on out-of-range constants for addhi and subhi. */
504int
51c16b7e 505xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
f3922fd2
DD
506{
507 if ((GET_CODE (x) == CONST_INT)
508 && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
ab532386 509 error ("constant arithmetic operand out of range");
f3922fd2
DD
510 return nonmemory_operand (x, mode);
511}
4b58290f
GK
512
513enum reg_class
51c16b7e 514xstormy16_preferred_reload_class (rtx x, enum reg_class class)
4b58290f
GK
515{
516 if (class == GENERAL_REGS
517 && GET_CODE (x) == MEM)
518 return EIGHT_REGS;
519
520 return class;
521}
522
54e9a19d
DD
523/* Predicate for symbols and addresses that reflect special 8-bit
524 addressing. */
525int
526xstormy16_below100_symbol (rtx x,
527 enum machine_mode mode ATTRIBUTE_UNUSED)
528{
529 if (GET_CODE (x) == CONST)
530 x = XEXP (x, 0);
531 if (GET_CODE (x) == PLUS
532 && GET_CODE (XEXP (x, 1)) == CONST_INT)
533 x = XEXP (x, 0);
2f806f3b 534
54e9a19d 535 if (GET_CODE (x) == SYMBOL_REF)
2f806f3b
NC
536 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
537
54e9a19d
DD
538 if (GET_CODE (x) == CONST_INT)
539 {
540 HOST_WIDE_INT i = INTVAL (x);
541 if ((i >= 0x0000 && i <= 0x00ff)
542 || (i >= 0x7f00 && i <= 0x7fff))
543 return 1;
544 }
545 return 0;
546}
547
54e9a19d
DD
548/* Likewise, but only for non-volatile MEMs, for patterns where the
549 MEM will get split into smaller sized accesses. */
550int
551xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
552{
553 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
554 return 0;
555 return xstormy16_below100_operand (x, mode);
556}
557
54e9a19d
DD
558/* Expand an 8-bit IOR. This either detects the one case we can
559 actually do, or uses a 16-bit IOR. */
560void
561xstormy16_expand_iorqi3 (rtx *operands)
562{
563 rtx in, out, outsub, val;
564
565 out = operands[0];
566 in = operands[1];
567 val = operands[2];
568
569 if (xstormy16_onebit_set_operand (val, QImode))
570 {
571 if (!xstormy16_below100_or_register (in, QImode))
572 in = copy_to_mode_reg (QImode, in);
573 if (!xstormy16_below100_or_register (out, QImode))
574 out = gen_reg_rtx (QImode);
575 emit_insn (gen_iorqi3_internal (out, in, val));
576 if (out != operands[0])
577 emit_move_insn (operands[0], out);
578 return;
579 }
580
581 if (GET_CODE (in) != REG)
582 in = copy_to_mode_reg (QImode, in);
583 if (GET_CODE (val) != REG
584 && GET_CODE (val) != CONST_INT)
585 val = copy_to_mode_reg (QImode, val);
586 if (GET_CODE (out) != REG)
587 out = gen_reg_rtx (QImode);
588
589 in = simplify_gen_subreg (HImode, in, QImode, 0);
590 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
591 if (GET_CODE (val) != CONST_INT)
592 val = simplify_gen_subreg (HImode, val, QImode, 0);
593
594 emit_insn (gen_iorhi3 (outsub, in, val));
595
596 if (out != operands[0])
597 emit_move_insn (operands[0], out);
598}
599
600/* Likewise, for AND. */
601void
602xstormy16_expand_andqi3 (rtx *operands)
603{
604 rtx in, out, outsub, val;
605
606 out = operands[0];
607 in = operands[1];
608 val = operands[2];
609
610 if (xstormy16_onebit_clr_operand (val, QImode))
611 {
612 if (!xstormy16_below100_or_register (in, QImode))
613 in = copy_to_mode_reg (QImode, in);
614 if (!xstormy16_below100_or_register (out, QImode))
615 out = gen_reg_rtx (QImode);
616 emit_insn (gen_andqi3_internal (out, in, val));
617 if (out != operands[0])
618 emit_move_insn (operands[0], out);
619 return;
620 }
621
622 if (GET_CODE (in) != REG)
623 in = copy_to_mode_reg (QImode, in);
624 if (GET_CODE (val) != REG
625 && GET_CODE (val) != CONST_INT)
626 val = copy_to_mode_reg (QImode, val);
627 if (GET_CODE (out) != REG)
628 out = gen_reg_rtx (QImode);
629
630 in = simplify_gen_subreg (HImode, in, QImode, 0);
631 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
632 if (GET_CODE (val) != CONST_INT)
633 val = simplify_gen_subreg (HImode, val, QImode, 0);
634
635 emit_insn (gen_andhi3 (outsub, in, val));
636
637 if (out != operands[0])
638 emit_move_insn (operands[0], out);
639}
640
4b58290f
GK
641#define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
642 (GET_CODE (X) == CONST_INT \
643 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
644
645#define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
646 (GET_CODE (X) == CONST_INT \
647 && INTVAL (X) + (OFFSET) >= 0 \
648 && INTVAL (X) + (OFFSET) < 0x8000 \
649 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
650
651int
51c16b7e
SB
652xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
653 rtx x, int strict)
4b58290f
GK
654{
655 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
656 return 1;
657
658 if (GET_CODE (x) == PLUS
659 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
660 x = XEXP (x, 0);
661
f3922fd2
DD
662 if ((GET_CODE (x) == PRE_MODIFY
663 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
664 || GET_CODE (x) == POST_INC
4b58290f
GK
665 || GET_CODE (x) == PRE_DEC)
666 x = XEXP (x, 0);
667
668 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
669 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
670 return 1;
54e9a19d
DD
671
672 if (xstormy16_below100_symbol(x, mode))
673 return 1;
4b58290f
GK
674
675 return 0;
676}
677
678/* Return nonzero if memory address X (an RTX) can have different
679 meanings depending on the machine mode of the memory reference it
680 is used for or if the address is valid for some modes but not
681 others.
682
683 Autoincrement and autodecrement addresses typically have mode-dependent
684 effects because the amount of the increment or decrement is the size of the
685 operand being addressed. Some machines have other mode-dependent addresses.
686 Many RISC machines have no mode-dependent addresses.
687
688 You may assume that ADDR is a valid address for the machine.
689
690 On this chip, this is true if the address is valid with an offset
691 of 0 but not of 6, because in that case it cannot be used as an
692 address for DImode or DFmode, or if the address is a post-increment
693 or pre-decrement address. */
694int
51c16b7e 695xstormy16_mode_dependent_address_p (rtx x)
4b58290f
GK
696{
697 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
698 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
699 return 1;
700
701 if (GET_CODE (x) == PLUS
702 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
703 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
704 return 1;
705
706 if (GET_CODE (x) == PLUS)
707 x = XEXP (x, 0);
708
b9a76028 709 /* Auto-increment addresses are now treated generically in recog.c. */
4b58290f
GK
710
711 return 0;
712}
713
714/* A C expression that defines the optional machine-dependent constraint
715 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
716 types of operands, usually memory references, for the target machine.
717 Normally this macro will not be defined. If it is required for a particular
718 target machine, it should return 1 if VALUE corresponds to the operand type
719 represented by the constraint letter C. If C is not defined as an extra
720 constraint, the value returned should be 0 regardless of VALUE. */
721int
51c16b7e 722xstormy16_extra_constraint_p (rtx x, int c)
4b58290f
GK
723{
724 switch (c)
725 {
726 /* 'Q' is for pushes. */
727 case 'Q':
728 return (GET_CODE (x) == MEM
729 && GET_CODE (XEXP (x, 0)) == POST_INC
730 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
731
732 /* 'R' is for pops. */
733 case 'R':
734 return (GET_CODE (x) == MEM
735 && GET_CODE (XEXP (x, 0)) == PRE_DEC
736 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
737
738 /* 'S' is for immediate memory addresses. */
739 case 'S':
740 return (GET_CODE (x) == MEM
741 && GET_CODE (XEXP (x, 0)) == CONST_INT
c6243b4c 742 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
4b58290f
GK
743
744 /* 'T' is for Rx. */
745 case 'T':
746 /* Not implemented yet. */
747 return 0;
748
749 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
750 for allocating a scratch register for 32-bit shifts. */
751 case 'U':
752 return (GET_CODE (x) == CONST_INT
753 && (INTVAL (x) < 2 || INTVAL (x) > 15));
754
e7e09ad8
DD
755 /* 'Z' is for CONST_INT value zero. This is for adding zero to
756 a register in addhi3, which would otherwise require a carry. */
757 case 'Z':
758 return (GET_CODE (x) == CONST_INT
759 && (INTVAL (x) == 0));
760
54e9a19d
DD
761 case 'W':
762 return xstormy16_below100_operand(x, GET_MODE(x));
763
4b58290f
GK
764 default:
765 return 0;
766 }
767}
768
769int
51c16b7e 770short_memory_operand (rtx x, enum machine_mode mode)
4b58290f
GK
771{
772 if (! memory_operand (x, mode))
773 return 0;
774 return (GET_CODE (XEXP (x, 0)) != PLUS);
775}
776
fae778eb 777/* Splitter for the 'move' patterns, for modes not directly implemented
4b58290f
GK
778 by hardware. Emit insns to copy a value of mode MODE from SRC to
779 DEST.
780
781 This function is only called when reload_completed.
782 */
783
784void
51c16b7e 785xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
4b58290f
GK
786{
787 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
788 int direction, end, i;
789 int src_modifies = 0;
790 int dest_modifies = 0;
791 int src_volatile = 0;
792 int dest_volatile = 0;
793 rtx mem_operand;
7c87e9f9 794 rtx auto_inc_reg_rtx = NULL_RTX;
4b58290f
GK
795
796 /* Check initial conditions. */
4718bfd8
NS
797 gcc_assert (reload_completed
798 && mode != QImode && mode != HImode
799 && nonimmediate_operand (dest, mode)
800 && general_operand (src, mode));
4b58290f
GK
801
802 /* This case is not supported below, and shouldn't be generated. */
4718bfd8 803 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
4b58290f
GK
804
805 /* This case is very very bad after reload, so trap it now. */
4718bfd8 806 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
4b58290f
GK
807
808 /* The general idea is to copy by words, offsetting the source and
809 destination. Normally the least-significant word will be copied
810 first, but for pre-dec operations it's better to copy the
811 most-significant word first. Only one operand can be a pre-dec
812 or post-inc operand.
813
814 It's also possible that the copy overlaps so that the direction
815 must be reversed. */
816 direction = 1;
817
818 if (GET_CODE (dest) == MEM)
819 {
820 mem_operand = XEXP (dest, 0);
821 dest_modifies = side_effects_p (mem_operand);
7c87e9f9
CM
822 if (auto_inc_p (mem_operand))
823 auto_inc_reg_rtx = XEXP (mem_operand, 0);
4b58290f
GK
824 dest_volatile = MEM_VOLATILE_P (dest);
825 if (dest_volatile)
826 {
827 dest = copy_rtx (dest);
828 MEM_VOLATILE_P (dest) = 0;
829 }
830 }
831 else if (GET_CODE (src) == MEM)
832 {
833 mem_operand = XEXP (src, 0);
834 src_modifies = side_effects_p (mem_operand);
7c87e9f9
CM
835 if (auto_inc_p (mem_operand))
836 auto_inc_reg_rtx = XEXP (mem_operand, 0);
4b58290f
GK
837 src_volatile = MEM_VOLATILE_P (src);
838 if (src_volatile)
839 {
840 src = copy_rtx (src);
841 MEM_VOLATILE_P (src) = 0;
842 }
843 }
844 else
845 mem_operand = NULL_RTX;
846
847 if (mem_operand == NULL_RTX)
848 {
849 if (GET_CODE (src) == REG
850 && GET_CODE (dest) == REG
851 && reg_overlap_mentioned_p (dest, src)
852 && REGNO (dest) > REGNO (src))
853 direction = -1;
854 }
855 else if (GET_CODE (mem_operand) == PRE_DEC
856 || (GET_CODE (mem_operand) == PLUS
857 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
858 direction = -1;
859 else if (GET_CODE (src) == MEM
860 && reg_overlap_mentioned_p (dest, src))
861 {
862 int regno;
4718bfd8
NS
863
864 gcc_assert (GET_CODE (dest) == REG);
4b58290f
GK
865 regno = REGNO (dest);
866
4718bfd8
NS
867 gcc_assert (refers_to_regno_p (regno, regno + num_words,
868 mem_operand, 0));
4b58290f
GK
869
870 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
871 direction = -1;
872 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
873 mem_operand, 0))
874 direction = 1;
875 else
876 /* This means something like
877 (set (reg:DI r0) (mem:DI (reg:HI r1)))
878 which we'd need to support by doing the set of the second word
879 last. */
4718bfd8 880 gcc_unreachable ();
4b58290f
GK
881 }
882
883 end = direction < 0 ? -1 : num_words;
884 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
885 {
7c87e9f9
CM
886 rtx w_src, w_dest, insn;
887
4b58290f
GK
888 if (src_modifies)
889 w_src = gen_rtx_MEM (word_mode, mem_operand);
890 else
891 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
892 if (src_volatile)
893 MEM_VOLATILE_P (w_src) = 1;
894 if (dest_modifies)
895 w_dest = gen_rtx_MEM (word_mode, mem_operand);
896 else
897 w_dest = simplify_gen_subreg (word_mode, dest, mode,
898 i * UNITS_PER_WORD);
899 if (dest_volatile)
900 MEM_VOLATILE_P (w_dest) = 1;
901
902 /* The simplify_subreg calls must always be able to simplify. */
4718bfd8
NS
903 gcc_assert (GET_CODE (w_src) != SUBREG
904 && GET_CODE (w_dest) != SUBREG);
4b58290f 905
7c87e9f9
CM
906 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
907 if (auto_inc_reg_rtx)
908 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
909 auto_inc_reg_rtx,
910 REG_NOTES (insn));
4b58290f
GK
911 }
912}
913
914/* Expander for the 'move' patterns. Emit insns to copy a value of
915 mode MODE from SRC to DEST. */
916
917void
51c16b7e 918xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
4b58290f 919{
f3922fd2
DD
920 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
921 {
922 rtx pmv = XEXP (dest, 0);
923 rtx dest_reg = XEXP (pmv, 0);
924 rtx dest_mod = XEXP (pmv, 1);
925 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
926 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
927
928 dest = gen_rtx_MEM (mode, dest_reg);
929 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
930 }
931 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
932 {
933 rtx pmv = XEXP (src, 0);
934 rtx src_reg = XEXP (pmv, 0);
935 rtx src_mod = XEXP (pmv, 1);
936 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
937 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
938
939 src = gen_rtx_MEM (mode, src_reg);
940 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
941 }
942
4b58290f
GK
943 /* There are only limited immediate-to-memory move instructions. */
944 if (! reload_in_progress
945 && ! reload_completed
946 && GET_CODE (dest) == MEM
947 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
c6243b4c 948 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
54e9a19d 949 && ! xstormy16_below100_operand (dest, mode)
4b58290f
GK
950 && GET_CODE (src) != REG
951 && GET_CODE (src) != SUBREG)
952 src = copy_to_mode_reg (mode, src);
953
954 /* Don't emit something we would immediately split. */
955 if (reload_completed
956 && mode != HImode && mode != QImode)
957 {
c6243b4c 958 xstormy16_split_move (mode, dest, src);
4b58290f
GK
959 return;
960 }
961
962 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
963}
964
965\f
966/* Stack Layout:
967
968 The stack is laid out as follows:
969
970SP->
971FP-> Local variables
972 Register save area (up to 4 words)
973 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
974
975AP-> Return address (two words)
976 9th procedure parameter word
977 10th procedure parameter word
978 ...
979 last procedure parameter word
980
981 The frame pointer location is tuned to make it most likely that all
982 parameters and local variables can be accessed using a load-indexed
983 instruction. */
984
985/* A structure to describe the layout. */
c6243b4c 986struct xstormy16_stack_layout
4b58290f
GK
987{
988 /* Size of the topmost three items on the stack. */
989 int locals_size;
990 int register_save_size;
991 int stdarg_save_size;
992 /* Sum of the above items. */
993 int frame_size;
994 /* Various offsets. */
995 int first_local_minus_ap;
996 int sp_minus_fp;
997 int fp_minus_ap;
998};
999
1000/* Does REGNO need to be saved? */
1001#define REG_NEEDS_SAVE(REGNUM, IFUN) \
6fb5fa3c 1002 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
4b58290f 1003 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
f3922fd2 1004 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
6fb5fa3c 1005 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
4b58290f
GK
1006
1007/* Compute the stack layout. */
c6243b4c 1008struct xstormy16_stack_layout
51c16b7e 1009xstormy16_compute_stack_layout (void)
4b58290f 1010{
c6243b4c 1011 struct xstormy16_stack_layout layout;
4b58290f 1012 int regno;
c6243b4c 1013 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
1014
1015 layout.locals_size = get_frame_size ();
1016
1017 layout.register_save_size = 0;
1018 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1019 if (REG_NEEDS_SAVE (regno, ifun))
1020 layout.register_save_size += UNITS_PER_WORD;
1021
6c535c69 1022 if (current_function_stdarg)
4b58290f
GK
1023 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1024 else
1025 layout.stdarg_save_size = 0;
1026
1027 layout.frame_size = (layout.locals_size
1028 + layout.register_save_size
1029 + layout.stdarg_save_size);
1030
1031 if (current_function_args_size <= 2048 && current_function_args_size != -1)
1032 {
1033 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1034 + current_function_args_size <= 2048)
1035 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1036 else
1037 layout.fp_minus_ap = 2048 - current_function_args_size;
1038 }
1039 else
1040 layout.fp_minus_ap = (layout.stdarg_save_size
1041 + layout.register_save_size
1042 + INCOMING_FRAME_SP_OFFSET);
1043 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1044 - layout.fp_minus_ap);
1045 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1046 return layout;
1047}
1048
1049/* Determine how all the special registers get eliminated. */
1050int
51c16b7e 1051xstormy16_initial_elimination_offset (int from, int to)
4b58290f 1052{
c6243b4c 1053 struct xstormy16_stack_layout layout;
4b58290f
GK
1054 int result;
1055
c6243b4c 1056 layout = xstormy16_compute_stack_layout ();
4b58290f
GK
1057
1058 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1059 result = layout.sp_minus_fp - layout.locals_size;
1060 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1061 result = -layout.locals_size;
1062 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1063 result = -layout.fp_minus_ap;
1064 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1065 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1066 else
4718bfd8 1067 gcc_unreachable ();
4b58290f
GK
1068
1069 return result;
1070}
1071
1072static rtx
51c16b7e 1073emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
4b58290f
GK
1074{
1075 rtx set, clobber, insn;
1076
1077 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1078 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1079 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1080 return insn;
1081}
1082
41441dc7
NB
1083/* Called after register allocation to add any instructions needed for
1084 the prologue. Using a prologue insn is favored compared to putting
1085 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1086 since it allows the scheduler to intermix instructions with the
1087 saves of the caller saved registers. In some cases, it might be
1088 necessary to emit a barrier instruction as the last insn to prevent
1089 such scheduling.
4b58290f
GK
1090
1091 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1092 so that the debug info generation code can handle them properly. */
1093void
51c16b7e 1094xstormy16_expand_prologue (void)
4b58290f 1095{
c6243b4c 1096 struct xstormy16_stack_layout layout;
4b58290f
GK
1097 int regno;
1098 rtx insn;
1099 rtx mem_push_rtx;
c6243b4c 1100 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
1101
1102 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1103 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
4b58290f 1104
c6243b4c 1105 layout = xstormy16_compute_stack_layout ();
4b58290f 1106
f3922fd2 1107 if (layout.locals_size >= 32768)
ab532386 1108 error ("local variable memory requirements exceed capacity");
f3922fd2 1109
4b58290f
GK
1110 /* Save the argument registers if necessary. */
1111 if (layout.stdarg_save_size)
1112 for (regno = FIRST_ARGUMENT_REGISTER;
1113 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1114 regno++)
1115 {
f3922fd2 1116 rtx dwarf;
4b58290f 1117 rtx reg = gen_rtx_REG (HImode, regno);
f3922fd2 1118
4b58290f
GK
1119 insn = emit_move_insn (mem_push_rtx, reg);
1120 RTX_FRAME_RELATED_P (insn) = 1;
f3922fd2
DD
1121
1122 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1123
1124 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1125 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1126 reg);
1127 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1128 plus_constant (stack_pointer_rtx,
1129 GET_MODE_SIZE (Pmode)));
4b58290f 1130 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f3922fd2 1131 dwarf,
4b58290f 1132 REG_NOTES (insn));
f3922fd2
DD
1133 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1134 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
4b58290f
GK
1135 }
1136
1137 /* Push each of the registers to save. */
1138 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1139 if (REG_NEEDS_SAVE (regno, ifun))
1140 {
f3922fd2 1141 rtx dwarf;
4b58290f 1142 rtx reg = gen_rtx_REG (HImode, regno);
f3922fd2 1143
4b58290f
GK
1144 insn = emit_move_insn (mem_push_rtx, reg);
1145 RTX_FRAME_RELATED_P (insn) = 1;
f3922fd2
DD
1146
1147 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1148
1149 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1150 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1151 reg);
1152 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1153 plus_constant (stack_pointer_rtx,
1154 GET_MODE_SIZE (Pmode)));
4b58290f 1155 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f3922fd2 1156 dwarf,
4b58290f 1157 REG_NOTES (insn));
f3922fd2
DD
1158 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1159 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
4b58290f
GK
1160 }
1161
1162 /* It's just possible that the SP here might be what we need for
b1c9bc51 1163 the new FP... */
4b58290f 1164 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
6208b55d 1165 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4b58290f
GK
1166
1167 /* Allocate space for local variables. */
1168 if (layout.locals_size)
1169 {
1170 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1171 GEN_INT (layout.locals_size));
1172 RTX_FRAME_RELATED_P (insn) = 1;
1173 }
1174
1175 /* Set up the frame pointer, if required. */
1176 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1177 {
1178 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6208b55d 1179
4b58290f 1180 if (layout.sp_minus_fp)
6208b55d
AM
1181 emit_addhi3_postreload (hard_frame_pointer_rtx,
1182 hard_frame_pointer_rtx,
1183 GEN_INT (-layout.sp_minus_fp));
4b58290f
GK
1184 }
1185}
1186
1187/* Do we need an epilogue at all? */
1188int
51c16b7e 1189direct_return (void)
4b58290f
GK
1190{
1191 return (reload_completed
c6243b4c 1192 && xstormy16_compute_stack_layout ().frame_size == 0);
4b58290f
GK
1193}
1194
41441dc7 1195/* Called after register allocation to add any instructions needed for
e03f5d43 1196 the epilogue. Using an epilogue insn is favored compared to putting
41441dc7
NB
1197 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1198 since it allows the scheduler to intermix instructions with the
1199 saves of the caller saved registers. In some cases, it might be
1200 necessary to emit a barrier instruction as the last insn to prevent
1201 such scheduling. */
4b58290f
GK
1202
1203void
51c16b7e 1204xstormy16_expand_epilogue (void)
4b58290f 1205{
c6243b4c 1206 struct xstormy16_stack_layout layout;
6208b55d 1207 rtx mem_pop_rtx, insn;
4b58290f 1208 int regno;
c6243b4c 1209 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
1210
1211 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1212 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1213
c6243b4c 1214 layout = xstormy16_compute_stack_layout ();
4b58290f
GK
1215
1216 /* Pop the stack for the locals. */
1217 if (layout.locals_size)
e2470e1b
GK
1218 {
1219 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1220 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1221 else
6208b55d
AM
1222 {
1223 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1224 GEN_INT (- layout.locals_size));
1225 RTX_FRAME_RELATED_P (insn) = 1;
1226 }
e2470e1b 1227 }
4b58290f
GK
1228
1229 /* Restore any call-saved registers. */
1230 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1231 if (REG_NEEDS_SAVE (regno, ifun))
6208b55d
AM
1232 {
1233 rtx dwarf;
1234
1235 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1236 RTX_FRAME_RELATED_P (insn) = 1;
1237 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1238 plus_constant (stack_pointer_rtx,
1239 -GET_MODE_SIZE (Pmode)));
1240 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1241 dwarf,
1242 REG_NOTES (insn));
1243 }
4b58290f
GK
1244
1245 /* Pop the stack for the stdarg save area. */
1246 if (layout.stdarg_save_size)
6208b55d
AM
1247 {
1248 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1249 GEN_INT (- layout.stdarg_save_size));
1250 RTX_FRAME_RELATED_P (insn) = 1;
1251 }
4b58290f
GK
1252
1253 /* Return. */
1254 if (ifun)
1255 emit_jump_insn (gen_return_internal_interrupt ());
1256 else
1257 emit_jump_insn (gen_return_internal ());
1258}
1259
1260int
51c16b7e 1261xstormy16_epilogue_uses (int regno)
4b58290f
GK
1262{
1263 if (reload_completed && call_used_regs[regno])
1264 {
c6243b4c 1265 const int ifun = xstormy16_interrupt_function_p ();
4b58290f
GK
1266 return REG_NEEDS_SAVE (regno, ifun);
1267 }
1268 return 0;
1269}
14b56832
DD
1270
1271void
51c16b7e 1272xstormy16_function_profiler (void)
14b56832
DD
1273{
1274 sorry ("function_profiler support");
1275}
1276
4b58290f
GK
1277\f
1278/* Return an updated summarizer variable CUM to advance past an
1279 argument in the argument list. The values MODE, TYPE and NAMED
1280 describe that argument. Once this is done, the variable CUM is
1281 suitable for analyzing the *following* argument with
1282 `FUNCTION_ARG', etc.
1283
1284 This function need not do anything if the argument in question was
1285 passed on the stack. The compiler knows how to track the amount of
1286 stack space used for arguments without any special help. However,
c6243b4c 1287 it makes life easier for xstormy16_build_va_list if it does update
4b58290f
GK
1288 the word count. */
1289CUMULATIVE_ARGS
51c16b7e
SB
1290xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1291 tree type, int named ATTRIBUTE_UNUSED)
4b58290f
GK
1292{
1293 /* If an argument would otherwise be passed partially in registers,
1294 and partially on the stack, the whole of it is passed on the
1295 stack. */
1296 if (cum < NUM_ARGUMENT_REGISTERS
c6243b4c 1297 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
4b58290f
GK
1298 cum = NUM_ARGUMENT_REGISTERS;
1299
c6243b4c 1300 cum += XSTORMY16_WORD_SIZE (type, mode);
4b58290f
GK
1301
1302 return cum;
1303}
1304
06d22853 1305rtx
51c16b7e
SB
1306xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1307 tree type, int named ATTRIBUTE_UNUSED)
06d22853
DD
1308{
1309 if (mode == VOIDmode)
1310 return const0_rtx;
fe984136 1311 if (targetm.calls.must_pass_in_stack (mode, type)
06d22853
DD
1312 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1313 return 0;
1314 return gen_rtx_REG (mode, cum + 2);
1315}
1316
4b58290f
GK
1317/* Build the va_list type.
1318
1319 For this chip, va_list is a record containing a counter and a pointer.
1320 The counter is of type 'int' and indicates how many bytes
1321 have been used to date. The pointer indicates the stack position
1322 for arguments that have not been passed in registers.
1323 To keep the layout nice, the pointer is first in the structure. */
1324
37cd4bca
NC
1325static tree
1326xstormy16_build_builtin_va_list (void)
4b58290f
GK
1327{
1328 tree f_1, f_2, record, type_decl;
1329
f1e639b1 1330 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4b58290f
GK
1331 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1332
cf4c092e 1333 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
4b58290f 1334 ptr_type_node);
cf4c092e 1335 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
4b58290f
GK
1336 unsigned_type_node);
1337
1338 DECL_FIELD_CONTEXT (f_1) = record;
1339 DECL_FIELD_CONTEXT (f_2) = record;
1340
1341 TREE_CHAIN (record) = type_decl;
1342 TYPE_NAME (record) = type_decl;
1343 TYPE_FIELDS (record) = f_1;
1344 TREE_CHAIN (f_1) = f_2;
1345
1346 layout_type (record);
1347
1348 return record;
1349}
1350
5e7a8ee0 1351/* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
4b58290f
GK
1352 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1353 variable to initialize. NEXTARG is the machine independent notion of the
1354 'next' argument after the variable arguments. */
1355void
51c16b7e 1356xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
4b58290f
GK
1357{
1358 tree f_base, f_count;
1359 tree base, count;
1360 tree t;
1361
c6243b4c 1362 if (xstormy16_interrupt_function_p ())
4b58290f
GK
1363 error ("cannot use va_start in interrupt function");
1364
1365 f_base = TYPE_FIELDS (va_list_type_node);
1366 f_count = TREE_CHAIN (f_base);
1367
47a25a46
RG
1368 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1369 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1370 NULL_TREE);
4b58290f
GK
1371
1372 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
47a25a46
RG
1373 t = build2 (PLUS_EXPR, TREE_TYPE (base), t,
1374 build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
07beea0d 1375 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base), base, t);
4b58290f
GK
1376 TREE_SIDE_EFFECTS (t) = 1;
1377 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1378
07beea0d 1379 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count,
47a25a46
RG
1380 build_int_cst (NULL_TREE,
1381 current_function_args_info * UNITS_PER_WORD));
4b58290f
GK
1382 TREE_SIDE_EFFECTS (t) = 1;
1383 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1384}
1385
1386/* Implement the stdarg/varargs va_arg macro. VALIST is the variable
cf4c092e
CM
1387 of type va_list as a tree, TYPE is the type passed to va_arg.
1388 Note: This algorithm is documented in stormy-abi. */
1389
5d47df87
RH
1390static tree
1391xstormy16_expand_builtin_va_arg (tree valist, tree type, tree *pre_p,
1392 tree *post_p ATTRIBUTE_UNUSED)
4b58290f
GK
1393{
1394 tree f_base, f_count;
1395 tree base, count;
5d47df87
RH
1396 tree count_tmp, addr, t;
1397 tree lab_gotaddr, lab_fromstack;
06d22853 1398 int size, size_of_reg_args, must_stack;
5d47df87
RH
1399 tree size_tree;
1400
4b58290f
GK
1401 f_base = TYPE_FIELDS (va_list_type_node);
1402 f_count = TREE_CHAIN (f_base);
1403
47a25a46
RG
1404 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1405 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1406 NULL_TREE);
4b58290f 1407
fe984136 1408 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
4b58290f 1409 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
5d47df87 1410 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
4b58290f 1411
cf4c092e 1412 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
4b58290f 1413
5d47df87
RH
1414 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1415 lab_gotaddr = create_artificial_label ();
1416 lab_fromstack = create_artificial_label ();
1417 addr = create_tmp_var (ptr_type_node, NULL);
cf4c092e 1418
06d22853
DD
1419 if (!must_stack)
1420 {
5d47df87
RH
1421 tree r;
1422
1423 t = fold_convert (TREE_TYPE (count), size_tree);
47a25a46 1424 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
5d47df87 1425 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
47a25a46
RG
1426 t = build2 (GT_EXPR, boolean_type_node, t, r);
1427 t = build3 (COND_EXPR, void_type_node, t,
1428 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1429 NULL_TREE);
5d47df87 1430 gimplify_and_add (t, pre_p);
4b58290f 1431
5d47df87 1432 t = fold_convert (ptr_type_node, count_tmp);
47a25a46 1433 t = build2 (PLUS_EXPR, ptr_type_node, base, t);
07beea0d 1434 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
5d47df87
RH
1435 gimplify_and_add (t, pre_p);
1436
47a25a46 1437 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
5d47df87
RH
1438 gimplify_and_add (t, pre_p);
1439
47a25a46 1440 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
5d47df87 1441 gimplify_and_add (t, pre_p);
06d22853 1442 }
4b58290f
GK
1443
1444 /* Arguments larger than a word might need to skip over some
1445 registers, since arguments are either passed entirely in
1446 registers or entirely on the stack. */
06d22853
DD
1447 size = PUSH_ROUNDING (int_size_in_bytes (type));
1448 if (size > 2 || size < 0 || must_stack)
4b58290f 1449 {
5d47df87
RH
1450 tree r, u;
1451
1452 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
07beea0d 1453 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, count_tmp, r);
5d47df87
RH
1454
1455 t = fold_convert (TREE_TYPE (count), r);
47a25a46
RG
1456 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1457 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
5d47df87 1458 gimplify_and_add (t, pre_p);
4b58290f
GK
1459 }
1460
5d47df87
RH
1461 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1462 - INCOMING_FRAME_SP_OFFSET);
1463 t = fold_convert (TREE_TYPE (count), t);
47a25a46
RG
1464 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1465 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1466 fold_convert (TREE_TYPE (count), size_tree));
5d47df87 1467 t = fold_convert (TREE_TYPE (base), fold (t));
47a25a46 1468 t = build2 (MINUS_EXPR, TREE_TYPE (base), base, t);
07beea0d 1469 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
5d47df87
RH
1470 gimplify_and_add (t, pre_p);
1471
47a25a46 1472 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
5d47df87 1473 gimplify_and_add (t, pre_p);
4b58290f 1474
5d47df87 1475 t = fold_convert (TREE_TYPE (count), size_tree);
47a25a46 1476 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
07beea0d 1477 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count, t);
5d47df87
RH
1478 gimplify_and_add (t, pre_p);
1479
1480 addr = fold_convert (build_pointer_type (type), addr);
d6e9821f 1481 return build_va_arg_indirect_ref (addr);
4b58290f
GK
1482}
1483
1484/* Initialize the variable parts of a trampoline. ADDR is an RTX for
1485 the address of the trampoline; FNADDR is an RTX for the address of
1486 the nested function; STATIC_CHAIN is an RTX for the static chain
1487 value that should be passed to the function when it is called. */
1488void
51c16b7e 1489xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4b58290f
GK
1490{
1491 rtx reg_addr = gen_reg_rtx (Pmode);
1492 rtx temp = gen_reg_rtx (HImode);
1493 rtx reg_fnaddr = gen_reg_rtx (HImode);
1494 rtx reg_addr_mem;
1495
e2470e1b 1496 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
4b58290f
GK
1497
1498 emit_move_insn (reg_addr, addr);
1499 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1500 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1501 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
4b58290f
GK
1502 emit_move_insn (temp, static_chain);
1503 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1504 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
4b58290f
GK
1505 emit_move_insn (reg_fnaddr, fnaddr);
1506 emit_move_insn (temp, reg_fnaddr);
1507 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1508 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1509 emit_move_insn (reg_addr_mem, temp);
e2470e1b 1510 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
4b58290f
GK
1511 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1512 emit_move_insn (reg_addr_mem, reg_fnaddr);
1513}
1514
bd5bd7ac
KH
1515/* Worker function for FUNCTION_VALUE. */
1516
4b58290f 1517rtx
586de218 1518xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
4b58290f
GK
1519{
1520 enum machine_mode mode;
1521 mode = TYPE_MODE (valtype);
1522 PROMOTE_MODE (mode, 0, valtype);
1523 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1524}
1525
52560c7b
GK
1526/* A C compound statement that outputs the assembler code for a thunk function,
1527 used to implement C++ virtual function calls with multiple inheritance. The
1528 thunk acts as a wrapper around a virtual function, adjusting the implicit
1529 object parameter before handing control off to the real function.
1530
1531 First, emit code to add the integer DELTA to the location that contains the
1532 incoming first argument. Assume that this argument contains a pointer, and
1533 is the one used to pass the `this' pointer in C++. This is the incoming
1534 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1535 addition must preserve the values of all other incoming arguments.
1536
1537 After the addition, emit code to jump to FUNCTION, which is a
1538 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1539 the return address. Hence returning from FUNCTION will return to whoever
1540 called the current `thunk'.
1541
1542 The effect must be as if @var{function} had been called directly
1543 with the adjusted first argument. This macro is responsible for
1544 emitting all of the code for a thunk function;
1545 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1546 not invoked.
1547
1548 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1549 extracted from it.) It might possibly be useful on some targets, but
1550 probably not. */
1551
c590b625 1552static void
51c16b7e
SB
1553xstormy16_asm_output_mi_thunk (FILE *file,
1554 tree thunk_fndecl ATTRIBUTE_UNUSED,
1555 HOST_WIDE_INT delta,
1556 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1557 tree function)
52560c7b
GK
1558{
1559 int regnum = FIRST_ARGUMENT_REGISTER;
1560
1561 /* There might be a hidden first argument for a returned structure. */
61f71b34 1562 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
52560c7b
GK
1563 regnum += 1;
1564
eb0424da 1565 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
52560c7b
GK
1566 fputs ("\tjmpf ", file);
1567 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1568 putc ('\n', file);
1569}
1570
54e9a19d
DD
1571/* The purpose of this function is to override the default behavior of
1572 BSS objects. Normally, they go into .bss or .sbss via ".common"
1573 directives, but we need to override that and put them in
1574 .bss_below100. We can't just use a section override (like we do
1575 for .data_below100), because that makes them initialized rather
1576 than uninitialized. */
1577void
1578xstormy16_asm_output_aligned_common (FILE *stream,
2f806f3b 1579 tree decl,
54e9a19d
DD
1580 const char *name,
1581 int size,
1582 int align,
1583 int global)
1584{
2f806f3b
NC
1585 rtx mem = DECL_RTL (decl);
1586 rtx symbol;
1587
1588 if (mem != NULL_RTX
1589 && GET_CODE (mem) == MEM
1590 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1591 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
54e9a19d 1592 {
2f806f3b
NC
1593 const char *name2;
1594 int p2align = 0;
1595
d6b5193b 1596 switch_to_section (bss100_section);
2f806f3b
NC
1597
1598 while (align > 8)
54e9a19d 1599 {
2f806f3b
NC
1600 align /= 2;
1601 p2align ++;
54e9a19d 1602 }
54e9a19d 1603
2f806f3b
NC
1604 name2 = default_strip_name_encoding (name);
1605 if (global)
1606 fprintf (stream, "\t.globl\t%s\n", name2);
1607 if (p2align)
1608 fprintf (stream, "\t.p2align %d\n", p2align);
1609 fprintf (stream, "\t.type\t%s, @object\n", name2);
1610 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1611 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1612 return;
54e9a19d
DD
1613 }
1614
1615 if (!global)
1616 {
1617 fprintf (stream, "\t.local\t");
1618 assemble_name (stream, name);
1619 fprintf (stream, "\n");
1620 }
1621 fprintf (stream, "\t.comm\t");
1622 assemble_name (stream, name);
43f51151 1623 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
54e9a19d
DD
1624}
1625
d6b5193b
RS
1626/* Implement TARGET_ASM_INIT_SECTIONS. */
1627
1628static void
1629xstormy16_asm_init_sections (void)
1630{
1631 bss100_section
1632 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1633 output_section_asm_op,
1634 "\t.section \".bss_below100\",\"aw\",@nobits");
1635}
1636
54e9a19d
DD
1637/* Mark symbols with the "below100" attribute so that we can use the
1638 special addressing modes for them. */
1639
1640static void
2f806f3b 1641xstormy16_encode_section_info (tree decl, rtx r, int first)
54e9a19d 1642{
e5eb9a52
NC
1643 default_encode_section_info (decl, r, first);
1644
2f806f3b 1645 if (TREE_CODE (decl) == VAR_DECL
54e9a19d
DD
1646 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1647 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1648 {
2f806f3b
NC
1649 rtx symbol = XEXP (r, 0);
1650
1651 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1652 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
54e9a19d
DD
1653 }
1654}
1655
43898541
GK
1656/* Output constructors and destructors. Just like
1657 default_named_section_asm_out_* but don't set the sections writable. */
6208b55d 1658#undef TARGET_ASM_CONSTRUCTOR
c6243b4c 1659#define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
6208b55d 1660#undef TARGET_ASM_DESTRUCTOR
c6243b4c 1661#define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
43898541
GK
1662
1663static void
51c16b7e 1664xstormy16_asm_out_destructor (rtx symbol, int priority)
43898541
GK
1665{
1666 const char *section = ".dtors";
1667 char buf[16];
1668
71cc389b 1669 /* ??? This only works reliably with the GNU linker. */
43898541
GK
1670 if (priority != DEFAULT_INIT_PRIORITY)
1671 {
1672 sprintf (buf, ".dtors.%.5u",
1673 /* Invert the numbering so the linker puts us in the proper
1674 order; constructors are run from right to left, and the
1675 linker sorts in increasing order. */
1676 MAX_INIT_PRIORITY - priority);
1677 section = buf;
1678 }
1679
d6b5193b 1680 switch_to_section (get_section (section, 0, NULL));
43898541
GK
1681 assemble_align (POINTER_SIZE);
1682 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1683}
1684
1685static void
51c16b7e 1686xstormy16_asm_out_constructor (rtx symbol, int priority)
43898541
GK
1687{
1688 const char *section = ".ctors";
1689 char buf[16];
1690
71cc389b 1691 /* ??? This only works reliably with the GNU linker. */
43898541
GK
1692 if (priority != DEFAULT_INIT_PRIORITY)
1693 {
1694 sprintf (buf, ".ctors.%.5u",
1695 /* Invert the numbering so the linker puts us in the proper
1696 order; constructors are run from right to left, and the
1697 linker sorts in increasing order. */
1698 MAX_INIT_PRIORITY - priority);
1699 section = buf;
1700 }
1701
d6b5193b 1702 switch_to_section (get_section (section, 0, NULL));
43898541
GK
1703 assemble_align (POINTER_SIZE);
1704 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1705}
4b58290f
GK
1706\f
1707/* Print a memory address as an operand to reference that memory location. */
1708void
51c16b7e 1709xstormy16_print_operand_address (FILE *file, rtx address)
4b58290f
GK
1710{
1711 HOST_WIDE_INT offset;
1712 int pre_dec, post_inc;
1713
1714 /* There are a few easy cases. */
1715 if (GET_CODE (address) == CONST_INT)
1716 {
1717 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1718 return;
1719 }
1720
1721 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1722 {
1723 output_addr_const (file, address);
1724 return;
1725 }
1726
1727 /* Otherwise, it's hopefully something of the form
1728 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1729 */
1730
1731 if (GET_CODE (address) == PLUS)
1732 {
4718bfd8 1733 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
4b58290f
GK
1734 offset = INTVAL (XEXP (address, 1));
1735 address = XEXP (address, 0);
1736 }
1737 else
1738 offset = 0;
1739
1740 pre_dec = (GET_CODE (address) == PRE_DEC);
1741 post_inc = (GET_CODE (address) == POST_INC);
1742 if (pre_dec || post_inc)
1743 address = XEXP (address, 0);
1744
4718bfd8 1745 gcc_assert (GET_CODE (address) == REG);
4b58290f
GK
1746
1747 fputc ('(', file);
1748 if (pre_dec)
1749 fputs ("--", file);
1750 fputs (reg_names [REGNO (address)], file);
1751 if (post_inc)
1752 fputs ("++", file);
1753 if (offset != 0)
4a0a75dd 1754 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
4b58290f
GK
1755 fputc (')', file);
1756}
1757
e03f5d43 1758/* Print an operand to an assembler instruction. */
4b58290f 1759void
51c16b7e 1760xstormy16_print_operand (FILE *file, rtx x, int code)
4b58290f
GK
1761{
1762 switch (code)
1763 {
1764 case 'B':
1765 /* There is either one bit set, or one bit clear, in X.
1766 Print it preceded by '#'. */
1767 {
54e9a19d 1768 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
e9818db2
GK
1769 HOST_WIDE_INT xx = 1;
1770 HOST_WIDE_INT l;
4b58290f
GK
1771
1772 if (GET_CODE (x) == CONST_INT)
1773 xx = INTVAL (x);
1774 else
9e637a26 1775 output_operand_lossage ("'B' operand is not constant");
4b58290f 1776
54e9a19d
DD
1777 /* GCC sign-extends masks with the MSB set, so we have to
1778 detect all the cases that differ only in sign extension
1779 beyond the bits we care about. Normally, the predicates
1780 and constraints ensure that we have the right values. This
1781 works correctly for valid masks. */
1782 if (bits_set[xx & 7] <= 1)
1783 {
1784 /* Remove sign extension bits. */
1785 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1786 xx &= 0xff;
1787 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1788 xx &= 0xffff;
1789 l = exact_log2 (xx);
1790 }
1791 else
1792 {
1793 /* Add sign extension bits. */
1794 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1795 xx |= ~(HOST_WIDE_INT)0xff;
1796 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1797 xx |= ~(HOST_WIDE_INT)0xffff;
1798 l = exact_log2 (~xx);
1799 }
1800
4b58290f 1801 if (l == -1)
9e637a26 1802 output_operand_lossage ("'B' operand has multiple bits set");
4b58290f 1803
4a0a75dd 1804 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
4b58290f
GK
1805 return;
1806 }
1807
1808 case 'C':
1809 /* Print the symbol without a surrounding @fptr(). */
1810 if (GET_CODE (x) == SYMBOL_REF)
1811 assemble_name (file, XSTR (x, 0));
2f0b7af6
GK
1812 else if (GET_CODE (x) == LABEL_REF)
1813 output_asm_label (x);
4b58290f 1814 else
c6243b4c 1815 xstormy16_print_operand_address (file, x);
4b58290f
GK
1816 return;
1817
1818 case 'o':
1819 case 'O':
1820 /* Print the immediate operand less one, preceded by '#'.
1821 For 'O', negate it first. */
1822 {
e9818db2 1823 HOST_WIDE_INT xx = 0;
4b58290f
GK
1824
1825 if (GET_CODE (x) == CONST_INT)
1826 xx = INTVAL (x);
1827 else
9e637a26 1828 output_operand_lossage ("'o' operand is not constant");
4b58290f
GK
1829
1830 if (code == 'O')
1831 xx = -xx;
1832
4a0a75dd 1833 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
4b58290f
GK
1834 return;
1835 }
1836
54e9a19d
DD
1837 case 'b':
1838 /* Print the shift mask for bp/bn. */
1839 {
1840 HOST_WIDE_INT xx = 1;
1841 HOST_WIDE_INT l;
1842
1843 if (GET_CODE (x) == CONST_INT)
1844 xx = INTVAL (x);
1845 else
9e637a26 1846 output_operand_lossage ("'B' operand is not constant");
54e9a19d
DD
1847
1848 l = 7 - xx;
1849
1850 fputs (IMMEDIATE_PREFIX, file);
1851 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1852 return;
1853 }
1854
4b58290f
GK
1855 case 0:
1856 /* Handled below. */
1857 break;
1858
1859 default:
c6243b4c 1860 output_operand_lossage ("xstormy16_print_operand: unknown code");
4b58290f
GK
1861 return;
1862 }
1863
1864 switch (GET_CODE (x))
1865 {
1866 case REG:
1867 fputs (reg_names [REGNO (x)], file);
1868 break;
1869
1870 case MEM:
c6243b4c 1871 xstormy16_print_operand_address (file, XEXP (x, 0));
4b58290f
GK
1872 break;
1873
1874 default:
1875 /* Some kind of constant or label; an immediate operand,
1876 so prefix it with '#' for the assembler. */
1877 fputs (IMMEDIATE_PREFIX, file);
1878 output_addr_const (file, x);
1879 break;
1880 }
1881
1882 return;
1883}
1884
1885\f
1886/* Expander for the `casesi' pattern.
1887 INDEX is the index of the switch statement.
1888 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1889 to the first table entry.
1890 RANGE is the number of table entries.
1891 TABLE is an ADDR_VEC that is the jump table.
1892 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1893 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1894*/
1895
1896void
51c16b7e
SB
1897xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1898 rtx table, rtx default_label)
4b58290f
GK
1899{
1900 HOST_WIDE_INT range_i = INTVAL (range);
1901 rtx int_index;
1902
1903 /* This code uses 'br', so it can deal only with tables of size up to
1904 8192 entries. */
1905 if (range_i >= 8192)
1906 sorry ("switch statement of size %lu entries too large",
1907 (unsigned long) range_i);
1908
4192f0d2 1909 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
4b58290f
GK
1910 OPTAB_LIB_WIDEN);
1911 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
40c13662 1912 default_label);
4b58290f 1913 int_index = gen_lowpart_common (HImode, index);
a556fd39 1914 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
4b58290f
GK
1915 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1916}
1917
1918/* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1919 instructions, without label or alignment or any other special
1920 constructs. We know that the previous instruction will be the
1921 `tablejump_pcrel' output above.
1922
1923 TODO: it might be nice to output 'br' instructions if they could
1924 all reach. */
1925
1926void
51c16b7e 1927xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
4b58290f
GK
1928{
1929 int vlen, idx;
1930
d6b5193b 1931 switch_to_section (current_function_section ());
4b58290f
GK
1932
1933 vlen = XVECLEN (table, 0);
1934 for (idx = 0; idx < vlen; idx++)
1935 {
1936 fputs ("\tjmpf ", file);
2f0b7af6 1937 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
4b58290f
GK
1938 fputc ('\n', file);
1939 }
1940}
1941
1942\f
1943/* Expander for the `call' patterns.
1944 INDEX is the index of the switch statement.
1945 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1946 to the first table entry.
1947 RANGE is the number of table entries.
1948 TABLE is an ADDR_VEC that is the jump table.
1949 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1950 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1951*/
1952
1953void
51c16b7e 1954xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
4b58290f
GK
1955{
1956 rtx call, temp;
1957 enum machine_mode mode;
1958
4718bfd8 1959 gcc_assert (GET_CODE (dest) == MEM);
4b58290f
GK
1960 dest = XEXP (dest, 0);
1961
1962 if (! CONSTANT_P (dest)
1963 && GET_CODE (dest) != REG)
1964 dest = force_reg (Pmode, dest);
1965
1966 if (retval == NULL)
1967 mode = VOIDmode;
1968 else
1969 mode = GET_MODE (retval);
1970
1971 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1972 counter);
1973 if (retval)
1974 call = gen_rtx_SET (VOIDmode, retval, call);
1975
1976 if (! CONSTANT_P (dest))
1977 {
1978 temp = gen_reg_rtx (HImode);
1979 emit_move_insn (temp, const0_rtx);
1980 }
1981 else
1982 temp = const0_rtx;
1983
1984 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1985 gen_rtx_USE (VOIDmode, temp)));
1986 emit_call_insn (call);
1987}
1988\f
1989/* Expanders for multiword computational operations. */
1990
1991/* Expander for arithmetic operations; emit insns to compute
1992
1993 (set DEST (CODE:MODE SRC0 SRC1))
1994
1995 using CARRY as a temporary. When CODE is COMPARE, a branch
1996 template is generated (this saves duplicating code in
c6243b4c 1997 xstormy16_split_cbranch). */
4b58290f
GK
1998
1999void
51c16b7e
SB
2000xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
2001 rtx dest, rtx src0, rtx src1, rtx carry)
4b58290f
GK
2002{
2003 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
2004 int i;
2005 int firstloop = 1;
2006
2007 if (code == NEG)
9be13211 2008 emit_move_insn (src0, const0_rtx);
4b58290f
GK
2009
2010 for (i = 0; i < num_words; i++)
2011 {
2012 rtx w_src0, w_src1, w_dest;
2013 rtx insn;
2014
9be13211
DD
2015 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
2016 i * UNITS_PER_WORD);
4b58290f
GK
2017 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
2018 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
2019
2020 switch (code)
2021 {
2022 case PLUS:
2023 if (firstloop
2024 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2025 continue;
2026
2027 if (firstloop)
2028 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2029 else
2030 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2031 break;
2032
2033 case NEG:
2034 case MINUS:
2035 case COMPARE:
2036 if (code == COMPARE && i == num_words - 1)
2037 {
2038 rtx branch, sub, clobber, sub_1;
2039
2040 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2041 gen_rtx_ZERO_EXTEND (HImode, carry));
2042 sub = gen_rtx_SET (VOIDmode, w_dest,
2043 gen_rtx_MINUS (HImode, sub_1, w_src1));
2044 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2045 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2046 gen_rtx_IF_THEN_ELSE (VOIDmode,
2047 gen_rtx_EQ (HImode,
2048 sub_1,
2049 w_src1),
2050 pc_rtx,
2051 pc_rtx));
2052 insn = gen_rtx_PARALLEL (VOIDmode,
2053 gen_rtvec (3, branch, sub, clobber));
2054 }
2055 else if (firstloop
2056 && code != COMPARE
2057 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2058 continue;
2059 else if (firstloop)
2060 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2061 else
2062 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2063 break;
2064
2065 case IOR:
2066 case XOR:
2067 case AND:
2068 if (GET_CODE (w_src1) == CONST_INT
2069 && INTVAL (w_src1) == -(code == AND))
2070 continue;
2071
1c563bed 2072 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
0f4c242b 2073 w_src0, w_src1));
4b58290f
GK
2074 break;
2075
2076 case NOT:
2077 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2078 break;
2079
2080 default:
4718bfd8 2081 gcc_unreachable ();
4b58290f
GK
2082 }
2083
2084 firstloop = 0;
2085 emit (insn);
2086 }
f3cd0185
DD
2087
2088 /* If we emit nothing, try_split() will think we failed. So emit
2089 something that does nothing and can be optimized away. */
2090 if (firstloop)
2091 emit (gen_nop ());
4b58290f
GK
2092}
2093
4b58290f
GK
2094/* The shift operations are split at output time for constant values;
2095 variable-width shifts get handed off to a library routine.
2096
2097 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2098 SIZE_R will be a CONST_INT, X will be a hard register. */
2099
2100const char *
51c16b7e
SB
2101xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2102 rtx x, rtx size_r, rtx temp)
4b58290f
GK
2103{
2104 HOST_WIDE_INT size;
2105 const char *r0, *r1, *rt;
2106 static char r[64];
2107
4718bfd8
NS
2108 gcc_assert (GET_CODE (size_r) == CONST_INT
2109 && GET_CODE (x) == REG && mode == SImode);
4b58290f
GK
2110 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2111
2112 if (size == 0)
2113 return "";
2114
2115 r0 = reg_names [REGNO (x)];
2116 r1 = reg_names [REGNO (x) + 1];
4b58290f
GK
2117
2118 /* For shifts of size 1, we can use the rotate instructions. */
2119 if (size == 1)
2120 {
2121 switch (code)
2122 {
2123 case ASHIFT:
2124 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2125 break;
2126 case ASHIFTRT:
2127 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2128 break;
2129 case LSHIFTRT:
2130 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2131 break;
2132 default:
4718bfd8 2133 gcc_unreachable ();
4b58290f
GK
2134 }
2135 return r;
2136 }
2137
2138 /* For large shifts, there are easy special cases. */
2139 if (size == 16)
2140 {
2141 switch (code)
2142 {
2143 case ASHIFT:
2144 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2145 break;
2146 case ASHIFTRT:
2147 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2148 break;
2149 case LSHIFTRT:
2150 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2151 break;
2152 default:
4718bfd8 2153 gcc_unreachable ();
4b58290f
GK
2154 }
2155 return r;
2156 }
2157 if (size > 16)
2158 {
2159 switch (code)
2160 {
2161 case ASHIFT:
2162 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2163 r1, r0, r0, r1, (int) size - 16);
2164 break;
2165 case ASHIFTRT:
2166 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2167 r0, r1, r1, r0, (int) size - 16);
2168 break;
2169 case LSHIFTRT:
2170 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2171 r0, r1, r1, r0, (int) size - 16);
2172 break;
2173 default:
4718bfd8 2174 gcc_unreachable ();
4b58290f
GK
2175 }
2176 return r;
2177 }
2178
2179 /* For the rest, we have to do more work. In particular, we
2180 need a temporary. */
5766e0ef 2181 rt = reg_names [REGNO (temp)];
4b58290f
GK
2182 switch (code)
2183 {
2184 case ASHIFT:
2185 sprintf (r,
2186 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
804a0655 2187 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
4b58290f
GK
2188 r1, rt);
2189 break;
2190 case ASHIFTRT:
2191 sprintf (r,
2192 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
804a0655 2193 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
4b58290f
GK
2194 r0, rt);
2195 break;
2196 case LSHIFTRT:
2197 sprintf (r,
2198 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
804a0655 2199 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
4b58290f
GK
2200 r0, rt);
2201 break;
2202 default:
4718bfd8 2203 gcc_unreachable ();
4b58290f
GK
2204 }
2205 return r;
2206}
2207\f
2208/* Attribute handling. */
2209
2210/* Return nonzero if the function is an interrupt function. */
2211int
51c16b7e 2212xstormy16_interrupt_function_p (void)
4b58290f
GK
2213{
2214 tree attributes;
2215
2216 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2217 any functions are declared, which is demonstrably wrong, but
2218 it is worked around here. FIXME. */
2219 if (!cfun)
2220 return 0;
2221
2222 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2223 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2224}
2225
91d231cb 2226#undef TARGET_ATTRIBUTE_TABLE
c6243b4c 2227#define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
51c16b7e
SB
2228static tree xstormy16_handle_interrupt_attribute
2229 (tree *, tree, tree, int, bool *);
54e9a19d
DD
2230static tree xstormy16_handle_below100_attribute
2231 (tree *, tree, tree, int, bool *);
51c16b7e 2232
c6243b4c 2233static const struct attribute_spec xstormy16_attribute_table[] =
91d231cb
JM
2234{
2235 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
c6243b4c 2236 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
54e9a19d
DD
2237 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2238 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
91d231cb
JM
2239 { NULL, 0, 0, false, false, false, NULL }
2240};
2241
2242/* Handle an "interrupt" attribute;
2243 arguments as in struct attribute_spec.handler. */
2244static tree
51c16b7e
SB
2245xstormy16_handle_interrupt_attribute (tree *node, tree name,
2246 tree args ATTRIBUTE_UNUSED,
2247 int flags ATTRIBUTE_UNUSED,
2248 bool *no_add_attrs)
4b58290f 2249{
91d231cb
JM
2250 if (TREE_CODE (*node) != FUNCTION_TYPE)
2251 {
5c498b10 2252 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
2253 IDENTIFIER_POINTER (name));
2254 *no_add_attrs = true;
2255 }
4b58290f 2256
91d231cb 2257 return NULL_TREE;
4b58290f 2258}
54e9a19d
DD
2259
2260/* Handle an "below" attribute;
2261 arguments as in struct attribute_spec.handler. */
2262static tree
2263xstormy16_handle_below100_attribute (tree *node,
2264 tree name ATTRIBUTE_UNUSED,
2265 tree args ATTRIBUTE_UNUSED,
2266 int flags ATTRIBUTE_UNUSED,
2267 bool *no_add_attrs)
2268{
2269 if (TREE_CODE (*node) != VAR_DECL
2270 && TREE_CODE (*node) != POINTER_TYPE
2271 && TREE_CODE (*node) != TYPE_DECL)
2272 {
5c498b10
DD
2273 warning (OPT_Wattributes,
2274 "%<__BELOW100__%> attribute only applies to variables");
54e9a19d
DD
2275 *no_add_attrs = true;
2276 }
2277 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2278 {
2279 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2280 {
5c498b10 2281 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
ab532386 2282 "with auto storage class");
54e9a19d
DD
2283 *no_add_attrs = true;
2284 }
2285 }
2286
2287 return NULL_TREE;
2288}
3d4b192a
DD
2289\f
2290#undef TARGET_INIT_BUILTINS
2291#define TARGET_INIT_BUILTINS xstormy16_init_builtins
2292#undef TARGET_EXPAND_BUILTIN
2293#define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2294
2295static struct {
2296 const char *name;
2297 int md_code;
2298 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2299 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2300} s16builtins[] = {
2301 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2302 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2303 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2304 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2305 { 0, 0, 0, 0 }
2306};
2307
2308static void
51c16b7e 2309xstormy16_init_builtins (void)
3d4b192a
DD
2310{
2311 tree args, ret_type, arg;
2312 int i, a;
2313
2314 ret_type = void_type_node;
2315
2316 for (i=0; s16builtins[i].name; i++)
2317 {
2318 args = void_list_node;
2319 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2320 {
2321 switch (s16builtins[i].arg_types[a])
2322 {
2323 case 's': arg = short_integer_type_node; break;
2324 case 'S': arg = short_unsigned_type_node; break;
2325 case 'l': arg = long_integer_type_node; break;
2326 case 'L': arg = long_unsigned_type_node; break;
4718bfd8 2327 default: gcc_unreachable ();
3d4b192a
DD
2328 }
2329 if (a == 0)
2330 ret_type = arg;
2331 else
2332 args = tree_cons (NULL_TREE, arg, args);
2333 }
c79efc4d
RÁE
2334 add_builtin_function (s16builtins[i].name,
2335 build_function_type (ret_type, args),
2336 i, BUILT_IN_MD, NULL, NULL);
3d4b192a
DD
2337 }
2338}
2339
2340static rtx
51c16b7e
SB
2341xstormy16_expand_builtin(tree exp, rtx target,
2342 rtx subtarget ATTRIBUTE_UNUSED,
2343 enum machine_mode mode ATTRIBUTE_UNUSED,
2344 int ignore ATTRIBUTE_UNUSED)
3d4b192a
DD
2345{
2346 rtx op[10], args[10], pat, copyto[10], retval = 0;
2347 tree fndecl, argtree;
2348 int i, a, o, code;
2349
2350 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2351 argtree = TREE_OPERAND (exp, 1);
2352 i = DECL_FUNCTION_CODE (fndecl);
2353 code = s16builtins[i].md_code;
2354
2355 for (a = 0; a < 10 && argtree; a++)
2356 {
2357 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2358 argtree = TREE_CHAIN (argtree);
2359 }
2360
2361 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2362 {
2363 char ao = s16builtins[i].arg_ops[o];
2364 char c = insn_data[code].operand[o].constraint[0];
2365 int omode;
2366
2367 copyto[o] = 0;
2368
2369 omode = insn_data[code].operand[o].mode;
2370 if (ao == 'r')
2371 op[o] = target ? target : gen_reg_rtx (omode);
2372 else if (ao == 't')
2373 op[o] = gen_reg_rtx (omode);
2374 else
2375 op[o] = args[(int) hex_value (ao)];
2376
2377 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2378 {
2379 if (c == '+' || c == '=')
2380 {
2381 copyto[o] = op[o];
2382 op[o] = gen_reg_rtx (omode);
2383 }
2384 else
2385 op[o] = copy_to_mode_reg (omode, op[o]);
2386 }
2387
2388 if (ao == 'r')
2389 retval = op[o];
2390 }
2391
2392 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2393 op[5], op[6], op[7], op[8], op[9]);
2394 emit_insn (pat);
2395
2396 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2397 if (copyto[o])
2398 {
2399 emit_move_insn (copyto[o], op[o]);
2400 if (op[o] == retval)
2401 retval = copyto[o];
2402 }
2403
2404 return retval;
2405}
54e9a19d
DD
2406\f
2407
2408/* Look for combinations of insns that can be converted to BN or BP
2409 opcodes. This is, unfortunately, too complex to do with MD
2410 patterns. */
2411static void
2412combine_bnp (rtx insn)
2413{
f99652b5
NC
2414 int insn_code, regno, need_extend;
2415 unsigned int mask;
54e9a19d
DD
2416 rtx cond, reg, and, load, qireg, mem;
2417 enum machine_mode load_mode = QImode;
f99652b5
NC
2418 enum machine_mode and_mode = QImode;
2419 rtx shift = NULL_RTX;
54e9a19d
DD
2420
2421 insn_code = recog_memoized (insn);
2422 if (insn_code != CODE_FOR_cbranchhi
2423 && insn_code != CODE_FOR_cbranchhi_neg)
2424 return;
2425
2426 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2427 cond = XEXP (cond, 1); /* if */
2428 cond = XEXP (cond, 0); /* cond */
2429 switch (GET_CODE (cond))
2430 {
2431 case NE:
2432 case EQ:
2433 need_extend = 0;
2434 break;
2435 case LT:
2436 case GE:
2437 need_extend = 1;
2438 break;
2439 default:
2440 return;
2441 }
2442
2443 reg = XEXP (cond, 0);
2444 if (GET_CODE (reg) != REG)
2445 return;
2446 regno = REGNO (reg);
2447 if (XEXP (cond, 1) != const0_rtx)
2448 return;
2449 if (! find_regno_note (insn, REG_DEAD, regno))
2450 return;
2451 qireg = gen_rtx_REG (QImode, regno);
2452
2453 if (need_extend)
2454 {
569b7f6a 2455 /* LT and GE conditionals should have a sign extend before
54e9a19d
DD
2456 them. */
2457 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2458 {
2459 int and_code = recog_memoized (and);
f99652b5 2460
54e9a19d 2461 if (and_code == CODE_FOR_extendqihi2
f99652b5
NC
2462 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2463 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2464 break;
54e9a19d
DD
2465
2466 if (and_code == CODE_FOR_movhi_internal
f99652b5 2467 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
54e9a19d
DD
2468 {
2469 /* This is for testing bit 15. */
2470 and = insn;
2471 break;
2472 }
2473
2474 if (reg_mentioned_p (reg, and))
2475 return;
f99652b5 2476
54e9a19d
DD
2477 if (GET_CODE (and) != NOTE
2478 && GET_CODE (and) != INSN)
2479 return;
2480 }
2481 }
2482 else
2483 {
2484 /* EQ and NE conditionals have an AND before them. */
2485 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2486 {
2487 if (recog_memoized (and) == CODE_FOR_andhi3
f99652b5
NC
2488 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2489 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2490 break;
54e9a19d
DD
2491
2492 if (reg_mentioned_p (reg, and))
2493 return;
f99652b5 2494
54e9a19d
DD
2495 if (GET_CODE (and) != NOTE
2496 && GET_CODE (and) != INSN)
2497 return;
2498 }
f99652b5
NC
2499
2500 if (and)
2501 {
aabcd309 2502 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
f99652b5
NC
2503 followed by an AND like this:
2504
2505 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2506 (clobber (reg:BI carry))]
2507
2508 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2509
2510 Attempt to detect this here. */
2511 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2512 {
2513 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2514 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2515 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2516 break;
2517
2518 if (reg_mentioned_p (reg, shift)
2519 || (GET_CODE (shift) != NOTE
2520 && GET_CODE (shift) != INSN))
2521 {
2522 shift = NULL_RTX;
2523 break;
2524 }
2525 }
2526 }
54e9a19d
DD
2527 }
2528 if (!and)
2529 return;
2530
f99652b5
NC
2531 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2532 load;
2533 load = prev_real_insn (load))
54e9a19d
DD
2534 {
2535 int load_code = recog_memoized (load);
f99652b5 2536
54e9a19d 2537 if (load_code == CODE_FOR_movhi_internal
f99652b5
NC
2538 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2539 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2540 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
54e9a19d
DD
2541 {
2542 load_mode = HImode;
2543 break;
2544 }
2545
2546 if (load_code == CODE_FOR_movqi_internal
f99652b5
NC
2547 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2548 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
54e9a19d
DD
2549 {
2550 load_mode = QImode;
2551 break;
2552 }
f99652b5
NC
2553
2554 if (load_code == CODE_FOR_zero_extendqihi2
2555 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2556 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2557 {
2558 load_mode = QImode;
2559 and_mode = HImode;
2560 break;
2561 }
2562
54e9a19d
DD
2563 if (reg_mentioned_p (reg, load))
2564 return;
f99652b5 2565
54e9a19d
DD
2566 if (GET_CODE (load) != NOTE
2567 && GET_CODE (load) != INSN)
2568 return;
2569 }
2570 if (!load)
2571 return;
2572
f99652b5
NC
2573 mem = SET_SRC (PATTERN (load));
2574
2575 if (need_extend)
54e9a19d 2576 {
f99652b5
NC
2577 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2578
2579 /* If the mem includes a zero-extend operation and we are
2580 going to generate a sign-extend operation then move the
2581 mem inside the zero-extend. */
2582 if (GET_CODE (mem) == ZERO_EXTEND)
2583 mem = XEXP (mem, 0);
54e9a19d
DD
2584 }
2585 else
f99652b5
NC
2586 {
2587 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2588 return;
2589
2590 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2591
2592 if (shift)
2593 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2594 }
54e9a19d 2595
54e9a19d
DD
2596 if (load_mode == HImode)
2597 {
2598 rtx addr = XEXP (mem, 0);
f99652b5 2599
54e9a19d
DD
2600 if (! (mask & 0xff))
2601 {
2602 addr = plus_constant (addr, 1);
2603 mask >>= 8;
2604 }
2605 mem = gen_rtx_MEM (QImode, addr);
2606 }
2607
2608 if (need_extend)
2609 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2610 else
f99652b5
NC
2611 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2612
54e9a19d
DD
2613 INSN_CODE (insn) = -1;
2614 delete_insn (load);
f99652b5 2615
54e9a19d
DD
2616 if (and != insn)
2617 delete_insn (and);
f99652b5
NC
2618
2619 if (shift != NULL_RTX)
2620 delete_insn (shift);
54e9a19d
DD
2621}
2622
2623static void
2624xstormy16_reorg (void)
2625{
2626 rtx insn;
2627
2628 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2629 {
2630 if (! JUMP_P (insn))
2631 continue;
2632 combine_bnp (insn);
2633 }
2634}
2635
7e43c821 2636\f
78bc94a2
KH
2637/* Worker function for TARGET_RETURN_IN_MEMORY. */
2638
7e43c821 2639static bool
586de218 2640xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7e43c821 2641{
586de218 2642 const HOST_WIDE_INT size = int_size_in_bytes (type);
78bc94a2 2643 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
7e43c821 2644}
4b58290f 2645\f
301d03af
RS
2646#undef TARGET_ASM_ALIGNED_HI_OP
2647#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2648#undef TARGET_ASM_ALIGNED_SI_OP
2649#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
54e9a19d
DD
2650#undef TARGET_ENCODE_SECTION_INFO
2651#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
301d03af 2652
434aeebb
RS
2653/* select_section doesn't handle .bss_below100. */
2654#undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2655#define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2656
c590b625
RH
2657#undef TARGET_ASM_OUTPUT_MI_THUNK
2658#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
3961e8fe
RH
2659#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2660#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
c590b625 2661
3c50106f
RH
2662#undef TARGET_RTX_COSTS
2663#define TARGET_RTX_COSTS xstormy16_rtx_costs
dcefdf67
RH
2664#undef TARGET_ADDRESS_COST
2665#define TARGET_ADDRESS_COST xstormy16_address_cost
3c50106f 2666
f2f61ee7
KH
2667#undef TARGET_BUILD_BUILTIN_VA_LIST
2668#define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
5d47df87
RH
2669#undef TARGET_GIMPLIFY_VA_ARG_EXPR
2670#define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_expand_builtin_va_arg
37cd4bca 2671
7e43c821 2672#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 2673#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
7e43c821 2674#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 2675#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
7e43c821 2676#undef TARGET_PROMOTE_PROTOTYPES
586de218 2677#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
7e43c821 2678
7e43c821
KH
2679#undef TARGET_RETURN_IN_MEMORY
2680#define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2681
54e9a19d
DD
2682#undef TARGET_MACHINE_DEPENDENT_REORG
2683#define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2684
4b58290f 2685struct gcc_target targetm = TARGET_INITIALIZER;
d6b5193b
RS
2686
2687#include "gt-stormy16.h"