]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/stormy16/stormy16.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / config / stormy16 / stormy16.c
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "obstack.h"
37 #include "hash-set.h"
38 #include "machmode.h"
39 #include "vec.h"
40 #include "double-int.h"
41 #include "input.h"
42 #include "alias.h"
43 #include "symtab.h"
44 #include "wide-int.h"
45 #include "inchash.h"
46 #include "tree.h"
47 #include "fold-const.h"
48 #include "stringpool.h"
49 #include "stor-layout.h"
50 #include "varasm.h"
51 #include "calls.h"
52 #include "expr.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "except.h"
56 #include "input.h"
57 #include "function.h"
58 #include "target.h"
59 #include "target-def.h"
60 #include "tm_p.h"
61 #include "langhooks.h"
62 #include "hash-table.h"
63 #include "ggc.h"
64 #include "predict.h"
65 #include "dominance.h"
66 #include "cfg.h"
67 #include "cfgrtl.h"
68 #include "cfganal.h"
69 #include "lcm.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "basic-block.h"
73 #include "tree-ssa-alias.h"
74 #include "internal-fn.h"
75 #include "gimple-fold.h"
76 #include "tree-eh.h"
77 #include "gimple-expr.h"
78 #include "is-a.h"
79 #include "gimple.h"
80 #include "gimplify.h"
81 #include "df.h"
82 #include "reload.h"
83 #include "builtins.h"
84
85 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
86 static void xstormy16_asm_out_constructor (rtx, int);
87 static void xstormy16_asm_out_destructor (rtx, int);
88 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
89 HOST_WIDE_INT, tree);
90
91 static void xstormy16_init_builtins (void);
92 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
93 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
94 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
95 static bool xstormy16_return_in_memory (const_tree, const_tree);
96
97 static GTY(()) section *bss100_section;
98
99 /* Compute a (partial) cost for rtx X. Return true if the complete
100 cost has been computed, and false if subexpressions should be
101 scanned. In either case, *TOTAL contains the cost result. */
102
103 static bool
104 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
105 int opno ATTRIBUTE_UNUSED, int *total,
106 bool speed ATTRIBUTE_UNUSED)
107 {
108 switch (code)
109 {
110 case CONST_INT:
111 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
112 *total = COSTS_N_INSNS (1) / 2;
113 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
114 *total = COSTS_N_INSNS (1);
115 else
116 *total = COSTS_N_INSNS (2);
117 return true;
118
119 case CONST_DOUBLE:
120 case CONST:
121 case SYMBOL_REF:
122 case LABEL_REF:
123 *total = COSTS_N_INSNS (2);
124 return true;
125
126 case MULT:
127 *total = COSTS_N_INSNS (35 + 6);
128 return true;
129 case DIV:
130 *total = COSTS_N_INSNS (51 - 6);
131 return true;
132
133 default:
134 return false;
135 }
136 }
137
138 static int
139 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
140 addr_space_t as ATTRIBUTE_UNUSED,
141 bool speed ATTRIBUTE_UNUSED)
142 {
143 return (CONST_INT_P (x) ? 2
144 : GET_CODE (x) == PLUS ? 7
145 : 5);
146 }
147
148 /* Worker function for TARGET_MEMORY_MOVE_COST. */
149
150 static int
151 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
152 bool in)
153 {
154 return (5 + memory_move_secondary_cost (mode, rclass, in));
155 }
156
157 /* Branches are handled as follows:
158
159 1. HImode compare-and-branches. The machine supports these
160 natively, so the appropriate pattern is emitted directly.
161
162 2. SImode EQ and NE. These are emitted as pairs of HImode
163 compare-and-branches.
164
165 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
166 of a SImode subtract followed by a branch (not a compare-and-branch),
167 like this:
168 sub
169 sbc
170 blt
171
172 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
173 sub
174 sbc
175 blt
176 or
177 bne. */
178
179 /* Emit a branch of kind CODE to location LOC. */
180
181 void
182 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
183 {
184 rtx condition_rtx, loc_ref, branch, cy_clobber;
185 rtvec vec;
186 machine_mode mode;
187
188 mode = GET_MODE (op0);
189 gcc_assert (mode == HImode || mode == SImode);
190
191 if (mode == SImode
192 && (code == GT || code == LE || code == GTU || code == LEU))
193 {
194 int unsigned_p = (code == GTU || code == LEU);
195 int gt_p = (code == GT || code == GTU);
196 rtx lab = NULL_RTX;
197
198 if (gt_p)
199 lab = gen_label_rtx ();
200 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
201 /* This should be generated as a comparison against the temporary
202 created by the previous insn, but reload can't handle that. */
203 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
204 if (gt_p)
205 emit_label (lab);
206 return;
207 }
208 else if (mode == SImode
209 && (code == NE || code == EQ)
210 && op1 != const0_rtx)
211 {
212 rtx op0_word, op1_word;
213 rtx lab = NULL_RTX;
214 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
215 int i;
216
217 if (code == EQ)
218 lab = gen_label_rtx ();
219
220 for (i = 0; i < num_words - 1; i++)
221 {
222 op0_word = simplify_gen_subreg (word_mode, op0, mode,
223 i * UNITS_PER_WORD);
224 op1_word = simplify_gen_subreg (word_mode, op1, mode,
225 i * UNITS_PER_WORD);
226 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
227 }
228 op0_word = simplify_gen_subreg (word_mode, op0, mode,
229 i * UNITS_PER_WORD);
230 op1_word = simplify_gen_subreg (word_mode, op1, mode,
231 i * UNITS_PER_WORD);
232 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
233
234 if (code == EQ)
235 emit_label (lab);
236 return;
237 }
238
239 /* We can't allow reload to try to generate any reload after a branch,
240 so when some register must match we must make the temporary ourselves. */
241 if (mode != HImode)
242 {
243 rtx tmp;
244 tmp = gen_reg_rtx (mode);
245 emit_move_insn (tmp, op0);
246 op0 = tmp;
247 }
248
249 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
250 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
251 branch = gen_rtx_SET (VOIDmode, pc_rtx,
252 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
253 loc_ref, pc_rtx));
254
255 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
256
257 if (mode == HImode)
258 vec = gen_rtvec (2, branch, cy_clobber);
259 else if (code == NE || code == EQ)
260 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
261 else
262 {
263 rtx sub;
264 #if 0
265 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
266 #else
267 sub = gen_rtx_CLOBBER (SImode, op0);
268 #endif
269 vec = gen_rtvec (3, branch, sub, cy_clobber);
270 }
271
272 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
273 }
274
275 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
276 the arithmetic operation. Most of the work is done by
277 xstormy16_expand_arith. */
278
279 void
280 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
281 rtx dest)
282 {
283 rtx op0 = XEXP (comparison, 0);
284 rtx op1 = XEXP (comparison, 1);
285 rtx_insn *seq, *last_insn;
286 rtx compare;
287
288 start_sequence ();
289 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
290 seq = get_insns ();
291 end_sequence ();
292
293 gcc_assert (INSN_P (seq));
294
295 last_insn = seq;
296 while (NEXT_INSN (last_insn) != NULL_RTX)
297 last_insn = NEXT_INSN (last_insn);
298
299 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
300 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
301 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
302 emit_insn (seq);
303 }
304
305
306 /* Return the string to output a conditional branch to LABEL, which is
307 the operand number of the label.
308
309 OP is the conditional expression, or NULL for branch-always.
310
311 REVERSED is nonzero if we should reverse the sense of the comparison.
312
313 INSN is the insn. */
314
315 char *
316 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
317 rtx_insn *insn)
318 {
319 static char string[64];
320 int need_longbranch = (op != NULL_RTX
321 ? get_attr_length (insn) == 8
322 : get_attr_length (insn) == 4);
323 int really_reversed = reversed ^ need_longbranch;
324 const char *ccode;
325 const char *templ;
326 const char *operands;
327 enum rtx_code code;
328
329 if (! op)
330 {
331 if (need_longbranch)
332 ccode = "jmpf";
333 else
334 ccode = "br";
335 sprintf (string, "%s %s", ccode, label);
336 return string;
337 }
338
339 code = GET_CODE (op);
340
341 if (! REG_P (XEXP (op, 0)))
342 {
343 code = swap_condition (code);
344 operands = "%3,%2";
345 }
346 else
347 operands = "%2,%3";
348
349 /* Work out which way this really branches. */
350 if (really_reversed)
351 code = reverse_condition (code);
352
353 switch (code)
354 {
355 case EQ: ccode = "z"; break;
356 case NE: ccode = "nz"; break;
357 case GE: ccode = "ge"; break;
358 case LT: ccode = "lt"; break;
359 case GT: ccode = "gt"; break;
360 case LE: ccode = "le"; break;
361 case GEU: ccode = "nc"; break;
362 case LTU: ccode = "c"; break;
363 case GTU: ccode = "hi"; break;
364 case LEU: ccode = "ls"; break;
365
366 default:
367 gcc_unreachable ();
368 }
369
370 if (need_longbranch)
371 templ = "b%s %s,.+8 | jmpf %s";
372 else
373 templ = "b%s %s,%s";
374 sprintf (string, templ, ccode, operands, label);
375
376 return string;
377 }
378
379 /* Return the string to output a conditional branch to LABEL, which is
380 the operand number of the label, but suitable for the tail of a
381 SImode branch.
382
383 OP is the conditional expression (OP is never NULL_RTX).
384
385 REVERSED is nonzero if we should reverse the sense of the comparison.
386
387 INSN is the insn. */
388
389 char *
390 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
391 rtx_insn *insn)
392 {
393 static char string[64];
394 int need_longbranch = get_attr_length (insn) >= 8;
395 int really_reversed = reversed ^ need_longbranch;
396 const char *ccode;
397 const char *templ;
398 char prevop[16];
399 enum rtx_code code;
400
401 code = GET_CODE (op);
402
403 /* Work out which way this really branches. */
404 if (really_reversed)
405 code = reverse_condition (code);
406
407 switch (code)
408 {
409 case EQ: ccode = "z"; break;
410 case NE: ccode = "nz"; break;
411 case GE: ccode = "ge"; break;
412 case LT: ccode = "lt"; break;
413 case GEU: ccode = "nc"; break;
414 case LTU: ccode = "c"; break;
415
416 /* The missing codes above should never be generated. */
417 default:
418 gcc_unreachable ();
419 }
420
421 switch (code)
422 {
423 case EQ: case NE:
424 {
425 int regnum;
426
427 gcc_assert (REG_P (XEXP (op, 0)));
428
429 regnum = REGNO (XEXP (op, 0));
430 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
431 }
432 break;
433
434 case GE: case LT: case GEU: case LTU:
435 strcpy (prevop, "sbc %2,%3");
436 break;
437
438 default:
439 gcc_unreachable ();
440 }
441
442 if (need_longbranch)
443 templ = "%s | b%s .+6 | jmpf %s";
444 else
445 templ = "%s | b%s %s";
446 sprintf (string, templ, prevop, ccode, label);
447
448 return string;
449 }
450 \f
451 /* Many machines have some registers that cannot be copied directly to or from
452 memory or even from other types of registers. An example is the `MQ'
453 register, which on most machines, can only be copied to or from general
454 registers, but not memory. Some machines allow copying all registers to and
455 from memory, but require a scratch register for stores to some memory
456 locations (e.g., those with symbolic address on the RT, and those with
457 certain symbolic address on the SPARC when compiling PIC). In some cases,
458 both an intermediate and a scratch register are required.
459
460 You should define these macros to indicate to the reload phase that it may
461 need to allocate at least one register for a reload in addition to the
462 register to contain the data. Specifically, if copying X to a register
463 RCLASS in MODE requires an intermediate register, you should define
464 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
465 whose registers can be used as intermediate registers or scratch registers.
466
467 If copying a register RCLASS in MODE to X requires an intermediate or scratch
468 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
469 largest register class required. If the requirements for input and output
470 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
471 instead of defining both macros identically.
472
473 The values returned by these macros are often `GENERAL_REGS'. Return
474 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
475 to or from a register of RCLASS in MODE without requiring a scratch register.
476 Do not define this macro if it would always return `NO_REGS'.
477
478 If a scratch register is required (either with or without an intermediate
479 register), you should define patterns for `reload_inM' or `reload_outM', as
480 required.. These patterns, which will normally be implemented with a
481 `define_expand', should be similar to the `movM' patterns, except that
482 operand 2 is the scratch register.
483
484 Define constraints for the reload register and scratch register that contain
485 a single register class. If the original reload register (whose class is
486 RCLASS) can meet the constraint given in the pattern, the value returned by
487 these macros is used for the class of the scratch register. Otherwise, two
488 additional reload registers are required. Their classes are obtained from
489 the constraints in the insn pattern.
490
491 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
492 either be in a hard register or in memory. Use `true_regnum' to find out;
493 it will return -1 if the pseudo is in memory and the hard register number if
494 it is in a register.
495
496 These macros should not be used in the case where a particular class of
497 registers can only be copied to memory and not to another class of
498 registers. In that case, secondary reload registers are not needed and
499 would not be helpful. Instead, a stack location must be used to perform the
500 copy and the `movM' pattern should use memory as an intermediate storage.
501 This case often occurs between floating-point and general registers. */
502
503 enum reg_class
504 xstormy16_secondary_reload_class (enum reg_class rclass,
505 machine_mode mode ATTRIBUTE_UNUSED,
506 rtx x)
507 {
508 /* This chip has the interesting property that only the first eight
509 registers can be moved to/from memory. */
510 if ((MEM_P (x)
511 || ((GET_CODE (x) == SUBREG || REG_P (x))
512 && (true_regnum (x) == -1
513 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
514 && ! reg_class_subset_p (rclass, EIGHT_REGS))
515 return EIGHT_REGS;
516
517 return NO_REGS;
518 }
519
520 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
521 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
522
523 static reg_class_t
524 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
525 {
526 if (rclass == GENERAL_REGS && MEM_P (x))
527 return EIGHT_REGS;
528
529 return rclass;
530 }
531
532 /* Predicate for symbols and addresses that reflect special 8-bit
533 addressing. */
534
535 int
536 xstormy16_below100_symbol (rtx x,
537 machine_mode mode ATTRIBUTE_UNUSED)
538 {
539 if (GET_CODE (x) == CONST)
540 x = XEXP (x, 0);
541 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
542 x = XEXP (x, 0);
543
544 if (GET_CODE (x) == SYMBOL_REF)
545 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
546
547 if (CONST_INT_P (x))
548 {
549 HOST_WIDE_INT i = INTVAL (x);
550
551 if ((i >= 0x0000 && i <= 0x00ff)
552 || (i >= 0x7f00 && i <= 0x7fff))
553 return 1;
554 }
555 return 0;
556 }
557
558 /* Likewise, but only for non-volatile MEMs, for patterns where the
559 MEM will get split into smaller sized accesses. */
560
561 int
562 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
563 {
564 if (MEM_P (x) && MEM_VOLATILE_P (x))
565 return 0;
566 return xstormy16_below100_operand (x, mode);
567 }
568
569 /* Expand an 8-bit IOR. This either detects the one case we can
570 actually do, or uses a 16-bit IOR. */
571
572 void
573 xstormy16_expand_iorqi3 (rtx *operands)
574 {
575 rtx in, out, outsub, val;
576
577 out = operands[0];
578 in = operands[1];
579 val = operands[2];
580
581 if (xstormy16_onebit_set_operand (val, QImode))
582 {
583 if (!xstormy16_below100_or_register (in, QImode))
584 in = copy_to_mode_reg (QImode, in);
585 if (!xstormy16_below100_or_register (out, QImode))
586 out = gen_reg_rtx (QImode);
587 emit_insn (gen_iorqi3_internal (out, in, val));
588 if (out != operands[0])
589 emit_move_insn (operands[0], out);
590 return;
591 }
592
593 if (! REG_P (in))
594 in = copy_to_mode_reg (QImode, in);
595
596 if (! REG_P (val) && ! CONST_INT_P (val))
597 val = copy_to_mode_reg (QImode, val);
598
599 if (! REG_P (out))
600 out = gen_reg_rtx (QImode);
601
602 in = simplify_gen_subreg (HImode, in, QImode, 0);
603 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
604
605 if (! CONST_INT_P (val))
606 val = simplify_gen_subreg (HImode, val, QImode, 0);
607
608 emit_insn (gen_iorhi3 (outsub, in, val));
609
610 if (out != operands[0])
611 emit_move_insn (operands[0], out);
612 }
613
614 /* Expand an 8-bit AND. This either detects the one case we can
615 actually do, or uses a 16-bit AND. */
616
617 void
618 xstormy16_expand_andqi3 (rtx *operands)
619 {
620 rtx in, out, outsub, val;
621
622 out = operands[0];
623 in = operands[1];
624 val = operands[2];
625
626 if (xstormy16_onebit_clr_operand (val, QImode))
627 {
628 if (!xstormy16_below100_or_register (in, QImode))
629 in = copy_to_mode_reg (QImode, in);
630 if (!xstormy16_below100_or_register (out, QImode))
631 out = gen_reg_rtx (QImode);
632 emit_insn (gen_andqi3_internal (out, in, val));
633 if (out != operands[0])
634 emit_move_insn (operands[0], out);
635 return;
636 }
637
638 if (! REG_P (in))
639 in = copy_to_mode_reg (QImode, in);
640
641 if (! REG_P (val) && ! CONST_INT_P (val))
642 val = copy_to_mode_reg (QImode, val);
643
644 if (! REG_P (out))
645 out = gen_reg_rtx (QImode);
646
647 in = simplify_gen_subreg (HImode, in, QImode, 0);
648 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
649
650 if (! CONST_INT_P (val))
651 val = simplify_gen_subreg (HImode, val, QImode, 0);
652
653 emit_insn (gen_andhi3 (outsub, in, val));
654
655 if (out != operands[0])
656 emit_move_insn (operands[0], out);
657 }
658
659 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
660 (CONST_INT_P (X) \
661 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
662
663 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
664 (CONST_INT_P (X) \
665 && INTVAL (X) + (OFFSET) >= 0 \
666 && INTVAL (X) + (OFFSET) < 0x8000 \
667 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
668
669 bool
670 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
671 rtx x, bool strict)
672 {
673 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
674 return true;
675
676 if (GET_CODE (x) == PLUS
677 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
678 {
679 x = XEXP (x, 0);
680 /* PR 31232: Do not allow INT+INT as an address. */
681 if (CONST_INT_P (x))
682 return false;
683 }
684
685 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
686 || GET_CODE (x) == POST_INC
687 || GET_CODE (x) == PRE_DEC)
688 x = XEXP (x, 0);
689
690 if (REG_P (x)
691 && REGNO_OK_FOR_BASE_P (REGNO (x))
692 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
693 return true;
694
695 if (xstormy16_below100_symbol (x, mode))
696 return true;
697
698 return false;
699 }
700
701 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
702
703 On this chip, this is true if the address is valid with an offset
704 of 0 but not of 6, because in that case it cannot be used as an
705 address for DImode or DFmode, or if the address is a post-increment
706 or pre-decrement address. */
707
708 static bool
709 xstormy16_mode_dependent_address_p (const_rtx x,
710 addr_space_t as ATTRIBUTE_UNUSED)
711 {
712 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
713 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
714 return true;
715
716 if (GET_CODE (x) == PLUS
717 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
718 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
719 return true;
720
721 /* Auto-increment addresses are now treated generically in recog.c. */
722 return false;
723 }
724
725 int
726 short_memory_operand (rtx x, machine_mode mode)
727 {
728 if (! memory_operand (x, mode))
729 return 0;
730 return (GET_CODE (XEXP (x, 0)) != PLUS);
731 }
732
733 /* Splitter for the 'move' patterns, for modes not directly implemented
734 by hardware. Emit insns to copy a value of mode MODE from SRC to
735 DEST.
736
737 This function is only called when reload_completed. */
738
739 void
740 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
741 {
742 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
743 int direction, end, i;
744 int src_modifies = 0;
745 int dest_modifies = 0;
746 int src_volatile = 0;
747 int dest_volatile = 0;
748 rtx mem_operand;
749 rtx auto_inc_reg_rtx = NULL_RTX;
750
751 /* Check initial conditions. */
752 gcc_assert (reload_completed
753 && mode != QImode && mode != HImode
754 && nonimmediate_operand (dest, mode)
755 && general_operand (src, mode));
756
757 /* This case is not supported below, and shouldn't be generated. */
758 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
759
760 /* This case is very very bad after reload, so trap it now. */
761 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
762
763 /* The general idea is to copy by words, offsetting the source and
764 destination. Normally the least-significant word will be copied
765 first, but for pre-dec operations it's better to copy the
766 most-significant word first. Only one operand can be a pre-dec
767 or post-inc operand.
768
769 It's also possible that the copy overlaps so that the direction
770 must be reversed. */
771 direction = 1;
772
773 if (MEM_P (dest))
774 {
775 mem_operand = XEXP (dest, 0);
776 dest_modifies = side_effects_p (mem_operand);
777 if (auto_inc_p (mem_operand))
778 auto_inc_reg_rtx = XEXP (mem_operand, 0);
779 dest_volatile = MEM_VOLATILE_P (dest);
780 if (dest_volatile)
781 {
782 dest = copy_rtx (dest);
783 MEM_VOLATILE_P (dest) = 0;
784 }
785 }
786 else if (MEM_P (src))
787 {
788 mem_operand = XEXP (src, 0);
789 src_modifies = side_effects_p (mem_operand);
790 if (auto_inc_p (mem_operand))
791 auto_inc_reg_rtx = XEXP (mem_operand, 0);
792 src_volatile = MEM_VOLATILE_P (src);
793 if (src_volatile)
794 {
795 src = copy_rtx (src);
796 MEM_VOLATILE_P (src) = 0;
797 }
798 }
799 else
800 mem_operand = NULL_RTX;
801
802 if (mem_operand == NULL_RTX)
803 {
804 if (REG_P (src)
805 && REG_P (dest)
806 && reg_overlap_mentioned_p (dest, src)
807 && REGNO (dest) > REGNO (src))
808 direction = -1;
809 }
810 else if (GET_CODE (mem_operand) == PRE_DEC
811 || (GET_CODE (mem_operand) == PLUS
812 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
813 direction = -1;
814 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
815 {
816 int regno;
817
818 gcc_assert (REG_P (dest));
819 regno = REGNO (dest);
820
821 gcc_assert (refers_to_regno_p (regno, regno + num_words,
822 mem_operand, 0));
823
824 if (refers_to_regno_p (regno, mem_operand))
825 direction = -1;
826 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
827 mem_operand, 0))
828 direction = 1;
829 else
830 /* This means something like
831 (set (reg:DI r0) (mem:DI (reg:HI r1)))
832 which we'd need to support by doing the set of the second word
833 last. */
834 gcc_unreachable ();
835 }
836
837 end = direction < 0 ? -1 : num_words;
838 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
839 {
840 rtx w_src, w_dest, insn;
841
842 if (src_modifies)
843 w_src = gen_rtx_MEM (word_mode, mem_operand);
844 else
845 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
846 if (src_volatile)
847 MEM_VOLATILE_P (w_src) = 1;
848 if (dest_modifies)
849 w_dest = gen_rtx_MEM (word_mode, mem_operand);
850 else
851 w_dest = simplify_gen_subreg (word_mode, dest, mode,
852 i * UNITS_PER_WORD);
853 if (dest_volatile)
854 MEM_VOLATILE_P (w_dest) = 1;
855
856 /* The simplify_subreg calls must always be able to simplify. */
857 gcc_assert (GET_CODE (w_src) != SUBREG
858 && GET_CODE (w_dest) != SUBREG);
859
860 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
861 if (auto_inc_reg_rtx)
862 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
863 auto_inc_reg_rtx,
864 REG_NOTES (insn));
865 }
866 }
867
868 /* Expander for the 'move' patterns. Emit insns to copy a value of
869 mode MODE from SRC to DEST. */
870
871 void
872 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
873 {
874 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
875 {
876 rtx pmv = XEXP (dest, 0);
877 rtx dest_reg = XEXP (pmv, 0);
878 rtx dest_mod = XEXP (pmv, 1);
879 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
880 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
881
882 dest = gen_rtx_MEM (mode, dest_reg);
883 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
884 }
885 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
886 {
887 rtx pmv = XEXP (src, 0);
888 rtx src_reg = XEXP (pmv, 0);
889 rtx src_mod = XEXP (pmv, 1);
890 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
891 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
892
893 src = gen_rtx_MEM (mode, src_reg);
894 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
895 }
896
897 /* There are only limited immediate-to-memory move instructions. */
898 if (! reload_in_progress
899 && ! reload_completed
900 && MEM_P (dest)
901 && (! CONST_INT_P (XEXP (dest, 0))
902 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
903 && ! xstormy16_below100_operand (dest, mode)
904 && ! REG_P (src)
905 && GET_CODE (src) != SUBREG)
906 src = copy_to_mode_reg (mode, src);
907
908 /* Don't emit something we would immediately split. */
909 if (reload_completed
910 && mode != HImode && mode != QImode)
911 {
912 xstormy16_split_move (mode, dest, src);
913 return;
914 }
915
916 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
917 }
918 \f
919 /* Stack Layout:
920
921 The stack is laid out as follows:
922
923 SP->
924 FP-> Local variables
925 Register save area (up to 4 words)
926 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
927
928 AP-> Return address (two words)
929 9th procedure parameter word
930 10th procedure parameter word
931 ...
932 last procedure parameter word
933
934 The frame pointer location is tuned to make it most likely that all
935 parameters and local variables can be accessed using a load-indexed
936 instruction. */
937
938 /* A structure to describe the layout. */
939 struct xstormy16_stack_layout
940 {
941 /* Size of the topmost three items on the stack. */
942 int locals_size;
943 int register_save_size;
944 int stdarg_save_size;
945 /* Sum of the above items. */
946 int frame_size;
947 /* Various offsets. */
948 int first_local_minus_ap;
949 int sp_minus_fp;
950 int fp_minus_ap;
951 };
952
953 /* Does REGNO need to be saved? */
954 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
955 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
956 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
957 && (REGNUM != CARRY_REGNUM) \
958 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
959
960 /* Compute the stack layout. */
961
962 struct xstormy16_stack_layout
963 xstormy16_compute_stack_layout (void)
964 {
965 struct xstormy16_stack_layout layout;
966 int regno;
967 const int ifun = xstormy16_interrupt_function_p ();
968
969 layout.locals_size = get_frame_size ();
970
971 layout.register_save_size = 0;
972 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
973 if (REG_NEEDS_SAVE (regno, ifun))
974 layout.register_save_size += UNITS_PER_WORD;
975
976 if (cfun->stdarg)
977 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
978 else
979 layout.stdarg_save_size = 0;
980
981 layout.frame_size = (layout.locals_size
982 + layout.register_save_size
983 + layout.stdarg_save_size);
984
985 if (crtl->args.size <= 2048 && crtl->args.size != -1)
986 {
987 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
988 + crtl->args.size <= 2048)
989 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
990 else
991 layout.fp_minus_ap = 2048 - crtl->args.size;
992 }
993 else
994 layout.fp_minus_ap = (layout.stdarg_save_size
995 + layout.register_save_size
996 - INCOMING_FRAME_SP_OFFSET);
997 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
998 - layout.fp_minus_ap);
999 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1000 return layout;
1001 }
1002
1003 /* Worker function for TARGET_CAN_ELIMINATE. */
1004
1005 static bool
1006 xstormy16_can_eliminate (const int from, const int to)
1007 {
1008 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1009 ? ! frame_pointer_needed
1010 : true);
1011 }
1012
1013 /* Determine how all the special registers get eliminated. */
1014
1015 int
1016 xstormy16_initial_elimination_offset (int from, int to)
1017 {
1018 struct xstormy16_stack_layout layout;
1019 int result;
1020
1021 layout = xstormy16_compute_stack_layout ();
1022
1023 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1024 result = layout.sp_minus_fp - layout.locals_size;
1025 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1026 result = - layout.locals_size;
1027 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1028 result = - layout.fp_minus_ap;
1029 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1030 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1031 else
1032 gcc_unreachable ();
1033
1034 return result;
1035 }
1036
1037 static rtx
1038 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1039 {
1040 rtx set, clobber, insn;
1041
1042 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1043 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1044 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1045 return insn;
1046 }
1047
1048 /* Called after register allocation to add any instructions needed for
1049 the prologue. Using a prologue insn is favored compared to putting
1050 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1051 since it allows the scheduler to intermix instructions with the
1052 saves of the caller saved registers. In some cases, it might be
1053 necessary to emit a barrier instruction as the last insn to prevent
1054 such scheduling.
1055
1056 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1057 so that the debug info generation code can handle them properly. */
1058
1059 void
1060 xstormy16_expand_prologue (void)
1061 {
1062 struct xstormy16_stack_layout layout;
1063 int regno;
1064 rtx insn;
1065 rtx mem_push_rtx;
1066 const int ifun = xstormy16_interrupt_function_p ();
1067
1068 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1069 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1070
1071 layout = xstormy16_compute_stack_layout ();
1072
1073 if (layout.locals_size >= 32768)
1074 error ("local variable memory requirements exceed capacity");
1075
1076 if (flag_stack_usage_info)
1077 current_function_static_stack_size = layout.frame_size;
1078
1079 /* Save the argument registers if necessary. */
1080 if (layout.stdarg_save_size)
1081 for (regno = FIRST_ARGUMENT_REGISTER;
1082 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1083 regno++)
1084 {
1085 rtx dwarf;
1086 rtx reg = gen_rtx_REG (HImode, regno);
1087
1088 insn = emit_move_insn (mem_push_rtx, reg);
1089 RTX_FRAME_RELATED_P (insn) = 1;
1090
1091 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1092
1093 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1094 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1095 reg);
1096 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1097 plus_constant (Pmode,
1098 stack_pointer_rtx,
1099 GET_MODE_SIZE (Pmode)));
1100 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1101 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1102 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1103 }
1104
1105 /* Push each of the registers to save. */
1106 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1107 if (REG_NEEDS_SAVE (regno, ifun))
1108 {
1109 rtx dwarf;
1110 rtx reg = gen_rtx_REG (HImode, regno);
1111
1112 insn = emit_move_insn (mem_push_rtx, reg);
1113 RTX_FRAME_RELATED_P (insn) = 1;
1114
1115 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1116
1117 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1118 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1119 reg);
1120 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1121 plus_constant (Pmode,
1122 stack_pointer_rtx,
1123 GET_MODE_SIZE (Pmode)));
1124 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1125 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1126 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1127 }
1128
1129 /* It's just possible that the SP here might be what we need for
1130 the new FP... */
1131 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1132 {
1133 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1134 RTX_FRAME_RELATED_P (insn) = 1;
1135 }
1136
1137 /* Allocate space for local variables. */
1138 if (layout.locals_size)
1139 {
1140 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1141 GEN_INT (layout.locals_size));
1142 RTX_FRAME_RELATED_P (insn) = 1;
1143 }
1144
1145 /* Set up the frame pointer, if required. */
1146 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1147 {
1148 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1149 RTX_FRAME_RELATED_P (insn) = 1;
1150
1151 if (layout.sp_minus_fp)
1152 {
1153 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1154 hard_frame_pointer_rtx,
1155 GEN_INT (- layout.sp_minus_fp));
1156 RTX_FRAME_RELATED_P (insn) = 1;
1157 }
1158 }
1159 }
1160
1161 /* Do we need an epilogue at all? */
1162
1163 int
1164 direct_return (void)
1165 {
1166 return (reload_completed
1167 && xstormy16_compute_stack_layout ().frame_size == 0
1168 && ! xstormy16_interrupt_function_p ());
1169 }
1170
1171 /* Called after register allocation to add any instructions needed for
1172 the epilogue. Using an epilogue insn is favored compared to putting
1173 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1174 since it allows the scheduler to intermix instructions with the
1175 saves of the caller saved registers. In some cases, it might be
1176 necessary to emit a barrier instruction as the last insn to prevent
1177 such scheduling. */
1178
1179 void
1180 xstormy16_expand_epilogue (void)
1181 {
1182 struct xstormy16_stack_layout layout;
1183 rtx mem_pop_rtx;
1184 int regno;
1185 const int ifun = xstormy16_interrupt_function_p ();
1186
1187 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1188 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1189
1190 layout = xstormy16_compute_stack_layout ();
1191
1192 /* Pop the stack for the locals. */
1193 if (layout.locals_size)
1194 {
1195 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1196 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1197 else
1198 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1199 GEN_INT (- layout.locals_size));
1200 }
1201
1202 /* Restore any call-saved registers. */
1203 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1204 if (REG_NEEDS_SAVE (regno, ifun))
1205 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1206
1207 /* Pop the stack for the stdarg save area. */
1208 if (layout.stdarg_save_size)
1209 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1210 GEN_INT (- layout.stdarg_save_size));
1211
1212 /* Return. */
1213 if (ifun)
1214 emit_jump_insn (gen_return_internal_interrupt ());
1215 else
1216 emit_jump_insn (gen_return_internal ());
1217 }
1218
1219 int
1220 xstormy16_epilogue_uses (int regno)
1221 {
1222 if (reload_completed && call_used_regs[regno])
1223 {
1224 const int ifun = xstormy16_interrupt_function_p ();
1225 return REG_NEEDS_SAVE (regno, ifun);
1226 }
1227 return 0;
1228 }
1229
1230 void
1231 xstormy16_function_profiler (void)
1232 {
1233 sorry ("function_profiler support");
1234 }
1235 \f
1236 /* Update CUM to advance past an argument in the argument list. The
1237 values MODE, TYPE and NAMED describe that argument. Once this is
1238 done, the variable CUM is suitable for analyzing the *following*
1239 argument with `TARGET_FUNCTION_ARG', etc.
1240
1241 This function need not do anything if the argument in question was
1242 passed on the stack. The compiler knows how to track the amount of
1243 stack space used for arguments without any special help. However,
1244 it makes life easier for xstormy16_build_va_list if it does update
1245 the word count. */
1246
1247 static void
1248 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1249 const_tree type, bool named ATTRIBUTE_UNUSED)
1250 {
1251 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1252
1253 /* If an argument would otherwise be passed partially in registers,
1254 and partially on the stack, the whole of it is passed on the
1255 stack. */
1256 if (*cum < NUM_ARGUMENT_REGISTERS
1257 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1258 *cum = NUM_ARGUMENT_REGISTERS;
1259
1260 *cum += XSTORMY16_WORD_SIZE (type, mode);
1261 }
1262
1263 static rtx
1264 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1265 const_tree type, bool named ATTRIBUTE_UNUSED)
1266 {
1267 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1268
1269 if (mode == VOIDmode)
1270 return const0_rtx;
1271 if (targetm.calls.must_pass_in_stack (mode, type)
1272 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1273 return NULL_RTX;
1274 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1275 }
1276
1277 /* Build the va_list type.
1278
1279 For this chip, va_list is a record containing a counter and a pointer.
1280 The counter is of type 'int' and indicates how many bytes
1281 have been used to date. The pointer indicates the stack position
1282 for arguments that have not been passed in registers.
1283 To keep the layout nice, the pointer is first in the structure. */
1284
1285 static tree
1286 xstormy16_build_builtin_va_list (void)
1287 {
1288 tree f_1, f_2, record, type_decl;
1289
1290 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1291 type_decl = build_decl (BUILTINS_LOCATION,
1292 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1293
1294 f_1 = build_decl (BUILTINS_LOCATION,
1295 FIELD_DECL, get_identifier ("base"),
1296 ptr_type_node);
1297 f_2 = build_decl (BUILTINS_LOCATION,
1298 FIELD_DECL, get_identifier ("count"),
1299 unsigned_type_node);
1300
1301 DECL_FIELD_CONTEXT (f_1) = record;
1302 DECL_FIELD_CONTEXT (f_2) = record;
1303
1304 TYPE_STUB_DECL (record) = type_decl;
1305 TYPE_NAME (record) = type_decl;
1306 TYPE_FIELDS (record) = f_1;
1307 DECL_CHAIN (f_1) = f_2;
1308
1309 layout_type (record);
1310
1311 return record;
1312 }
1313
1314 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1315 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1316 variable to initialize. NEXTARG is the machine independent notion of the
1317 'next' argument after the variable arguments. */
1318
1319 static void
1320 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1321 {
1322 tree f_base, f_count;
1323 tree base, count;
1324 tree t,u;
1325
1326 if (xstormy16_interrupt_function_p ())
1327 error ("cannot use va_start in interrupt function");
1328
1329 f_base = TYPE_FIELDS (va_list_type_node);
1330 f_count = DECL_CHAIN (f_base);
1331
1332 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1333 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1334 NULL_TREE);
1335
1336 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1337 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1338 u = fold_convert (TREE_TYPE (count), u);
1339 t = fold_build_pointer_plus (t, u);
1340 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1341 TREE_SIDE_EFFECTS (t) = 1;
1342 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1343
1344 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1345 build_int_cst (NULL_TREE,
1346 crtl->args.info * UNITS_PER_WORD));
1347 TREE_SIDE_EFFECTS (t) = 1;
1348 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1349 }
1350
1351 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1352 of type va_list as a tree, TYPE is the type passed to va_arg.
1353 Note: This algorithm is documented in stormy-abi. */
1354
1355 static tree
1356 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1357 gimple_seq *post_p ATTRIBUTE_UNUSED)
1358 {
1359 tree f_base, f_count;
1360 tree base, count;
1361 tree count_tmp, addr, t;
1362 tree lab_gotaddr, lab_fromstack;
1363 int size, size_of_reg_args, must_stack;
1364 tree size_tree;
1365
1366 f_base = TYPE_FIELDS (va_list_type_node);
1367 f_count = DECL_CHAIN (f_base);
1368
1369 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1370 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1371 NULL_TREE);
1372
1373 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1374 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1375 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1376
1377 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1378
1379 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1380 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1381 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1382 addr = create_tmp_var (ptr_type_node);
1383
1384 if (!must_stack)
1385 {
1386 tree r;
1387
1388 t = fold_convert (TREE_TYPE (count), size_tree);
1389 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1390 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1391 t = build2 (GT_EXPR, boolean_type_node, t, r);
1392 t = build3 (COND_EXPR, void_type_node, t,
1393 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1394 NULL_TREE);
1395 gimplify_and_add (t, pre_p);
1396
1397 t = fold_build_pointer_plus (base, count_tmp);
1398 gimplify_assign (addr, t, pre_p);
1399
1400 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1401 gimplify_and_add (t, pre_p);
1402
1403 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1404 gimplify_and_add (t, pre_p);
1405 }
1406
1407 /* Arguments larger than a word might need to skip over some
1408 registers, since arguments are either passed entirely in
1409 registers or entirely on the stack. */
1410 size = PUSH_ROUNDING (int_size_in_bytes (type));
1411 if (size > 2 || size < 0 || must_stack)
1412 {
1413 tree r, u;
1414
1415 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1416 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1417
1418 t = fold_convert (TREE_TYPE (count), r);
1419 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1420 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1421 gimplify_and_add (t, pre_p);
1422 }
1423
1424 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1425 + INCOMING_FRAME_SP_OFFSET);
1426 t = fold_convert (TREE_TYPE (count), t);
1427 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1428 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1429 fold_convert (TREE_TYPE (count), size_tree));
1430 t = fold_convert (TREE_TYPE (t), fold (t));
1431 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1432 t = fold_build_pointer_plus (base, t);
1433 gimplify_assign (addr, t, pre_p);
1434
1435 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1436 gimplify_and_add (t, pre_p);
1437
1438 t = fold_convert (TREE_TYPE (count), size_tree);
1439 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1440 gimplify_assign (count, t, pre_p);
1441
1442 addr = fold_convert (build_pointer_type (type), addr);
1443 return build_va_arg_indirect_ref (addr);
1444 }
1445
1446 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1447
1448 static void
1449 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1450 {
1451 rtx temp = gen_reg_rtx (HImode);
1452 rtx reg_fnaddr = gen_reg_rtx (HImode);
1453 rtx reg_addr, reg_addr_mem;
1454
1455 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1456 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1457
1458 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1459 emit_move_insn (reg_addr_mem, temp);
1460 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1461 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1462
1463 emit_move_insn (temp, static_chain);
1464 emit_move_insn (reg_addr_mem, temp);
1465 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1466 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1467
1468 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1469 emit_move_insn (temp, reg_fnaddr);
1470 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1471 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1472 emit_move_insn (reg_addr_mem, temp);
1473 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1474 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1475
1476 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1477 emit_move_insn (reg_addr_mem, reg_fnaddr);
1478 }
1479
1480 /* Worker function for TARGET_FUNCTION_VALUE. */
1481
1482 static rtx
1483 xstormy16_function_value (const_tree valtype,
1484 const_tree func ATTRIBUTE_UNUSED,
1485 bool outgoing ATTRIBUTE_UNUSED)
1486 {
1487 machine_mode mode;
1488 mode = TYPE_MODE (valtype);
1489 PROMOTE_MODE (mode, 0, valtype);
1490 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1491 }
1492
1493 /* Worker function for TARGET_LIBCALL_VALUE. */
1494
1495 static rtx
1496 xstormy16_libcall_value (machine_mode mode,
1497 const_rtx fun ATTRIBUTE_UNUSED)
1498 {
1499 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1500 }
1501
1502 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1503
1504 static bool
1505 xstormy16_function_value_regno_p (const unsigned int regno)
1506 {
1507 return (regno == RETURN_VALUE_REGNUM);
1508 }
1509
1510 /* A C compound statement that outputs the assembler code for a thunk function,
1511 used to implement C++ virtual function calls with multiple inheritance. The
1512 thunk acts as a wrapper around a virtual function, adjusting the implicit
1513 object parameter before handing control off to the real function.
1514
1515 First, emit code to add the integer DELTA to the location that contains the
1516 incoming first argument. Assume that this argument contains a pointer, and
1517 is the one used to pass the `this' pointer in C++. This is the incoming
1518 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1519 addition must preserve the values of all other incoming arguments.
1520
1521 After the addition, emit code to jump to FUNCTION, which is a
1522 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1523 the return address. Hence returning from FUNCTION will return to whoever
1524 called the current `thunk'.
1525
1526 The effect must be as if @var{function} had been called directly
1527 with the adjusted first argument. This macro is responsible for
1528 emitting all of the code for a thunk function;
1529 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1530 not invoked.
1531
1532 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1533 extracted from it.) It might possibly be useful on some targets, but
1534 probably not. */
1535
1536 static void
1537 xstormy16_asm_output_mi_thunk (FILE *file,
1538 tree thunk_fndecl ATTRIBUTE_UNUSED,
1539 HOST_WIDE_INT delta,
1540 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1541 tree function)
1542 {
1543 int regnum = FIRST_ARGUMENT_REGISTER;
1544
1545 /* There might be a hidden first argument for a returned structure. */
1546 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1547 regnum += 1;
1548
1549 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1550 fputs ("\tjmpf ", file);
1551 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1552 putc ('\n', file);
1553 }
1554
1555 /* The purpose of this function is to override the default behavior of
1556 BSS objects. Normally, they go into .bss or .sbss via ".common"
1557 directives, but we need to override that and put them in
1558 .bss_below100. We can't just use a section override (like we do
1559 for .data_below100), because that makes them initialized rather
1560 than uninitialized. */
1561
1562 void
1563 xstormy16_asm_output_aligned_common (FILE *stream,
1564 tree decl,
1565 const char *name,
1566 int size,
1567 int align,
1568 int global)
1569 {
1570 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1571 rtx symbol;
1572
1573 if (mem != NULL_RTX
1574 && MEM_P (mem)
1575 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1576 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1577 {
1578 const char *name2;
1579 int p2align = 0;
1580
1581 switch_to_section (bss100_section);
1582
1583 while (align > 8)
1584 {
1585 align /= 2;
1586 p2align ++;
1587 }
1588
1589 name2 = default_strip_name_encoding (name);
1590 if (global)
1591 fprintf (stream, "\t.globl\t%s\n", name2);
1592 if (p2align)
1593 fprintf (stream, "\t.p2align %d\n", p2align);
1594 fprintf (stream, "\t.type\t%s, @object\n", name2);
1595 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1596 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1597 return;
1598 }
1599
1600 if (!global)
1601 {
1602 fprintf (stream, "\t.local\t");
1603 assemble_name (stream, name);
1604 fprintf (stream, "\n");
1605 }
1606 fprintf (stream, "\t.comm\t");
1607 assemble_name (stream, name);
1608 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1609 }
1610
1611 /* Implement TARGET_ASM_INIT_SECTIONS. */
1612
1613 static void
1614 xstormy16_asm_init_sections (void)
1615 {
1616 bss100_section
1617 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1618 output_section_asm_op,
1619 "\t.section \".bss_below100\",\"aw\",@nobits");
1620 }
1621
1622 /* Mark symbols with the "below100" attribute so that we can use the
1623 special addressing modes for them. */
1624
1625 static void
1626 xstormy16_encode_section_info (tree decl, rtx r, int first)
1627 {
1628 default_encode_section_info (decl, r, first);
1629
1630 if (TREE_CODE (decl) == VAR_DECL
1631 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1632 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1633 {
1634 rtx symbol = XEXP (r, 0);
1635
1636 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1637 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1638 }
1639 }
1640
1641 #undef TARGET_ASM_CONSTRUCTOR
1642 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1643 #undef TARGET_ASM_DESTRUCTOR
1644 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1645
1646 /* Output constructors and destructors. Just like
1647 default_named_section_asm_out_* but don't set the sections writable. */
1648
1649 static void
1650 xstormy16_asm_out_destructor (rtx symbol, int priority)
1651 {
1652 const char *section = ".dtors";
1653 char buf[16];
1654
1655 /* ??? This only works reliably with the GNU linker. */
1656 if (priority != DEFAULT_INIT_PRIORITY)
1657 {
1658 sprintf (buf, ".dtors.%.5u",
1659 /* Invert the numbering so the linker puts us in the proper
1660 order; constructors are run from right to left, and the
1661 linker sorts in increasing order. */
1662 MAX_INIT_PRIORITY - priority);
1663 section = buf;
1664 }
1665
1666 switch_to_section (get_section (section, 0, NULL));
1667 assemble_align (POINTER_SIZE);
1668 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1669 }
1670
1671 static void
1672 xstormy16_asm_out_constructor (rtx symbol, int priority)
1673 {
1674 const char *section = ".ctors";
1675 char buf[16];
1676
1677 /* ??? This only works reliably with the GNU linker. */
1678 if (priority != DEFAULT_INIT_PRIORITY)
1679 {
1680 sprintf (buf, ".ctors.%.5u",
1681 /* Invert the numbering so the linker puts us in the proper
1682 order; constructors are run from right to left, and the
1683 linker sorts in increasing order. */
1684 MAX_INIT_PRIORITY - priority);
1685 section = buf;
1686 }
1687
1688 switch_to_section (get_section (section, 0, NULL));
1689 assemble_align (POINTER_SIZE);
1690 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1691 }
1692 \f
1693 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1694
1695 Print a memory address as an operand to reference that memory location. */
1696
1697 static void
1698 xstormy16_print_operand_address (FILE *file, rtx address)
1699 {
1700 HOST_WIDE_INT offset;
1701 int pre_dec, post_inc;
1702
1703 /* There are a few easy cases. */
1704 if (CONST_INT_P (address))
1705 {
1706 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1707 return;
1708 }
1709
1710 if (CONSTANT_P (address) || LABEL_P (address))
1711 {
1712 output_addr_const (file, address);
1713 return;
1714 }
1715
1716 /* Otherwise, it's hopefully something of the form
1717 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1718 if (GET_CODE (address) == PLUS)
1719 {
1720 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1721 offset = INTVAL (XEXP (address, 1));
1722 address = XEXP (address, 0);
1723 }
1724 else
1725 offset = 0;
1726
1727 pre_dec = (GET_CODE (address) == PRE_DEC);
1728 post_inc = (GET_CODE (address) == POST_INC);
1729 if (pre_dec || post_inc)
1730 address = XEXP (address, 0);
1731
1732 gcc_assert (REG_P (address));
1733
1734 fputc ('(', file);
1735 if (pre_dec)
1736 fputs ("--", file);
1737 fputs (reg_names [REGNO (address)], file);
1738 if (post_inc)
1739 fputs ("++", file);
1740 if (offset != 0)
1741 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1742 fputc (')', file);
1743 }
1744
1745 /* Worker function for TARGET_PRINT_OPERAND.
1746
1747 Print an operand to an assembler instruction. */
1748
1749 static void
1750 xstormy16_print_operand (FILE *file, rtx x, int code)
1751 {
1752 switch (code)
1753 {
1754 case 'B':
1755 /* There is either one bit set, or one bit clear, in X.
1756 Print it preceded by '#'. */
1757 {
1758 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1759 HOST_WIDE_INT xx = 1;
1760 HOST_WIDE_INT l;
1761
1762 if (CONST_INT_P (x))
1763 xx = INTVAL (x);
1764 else
1765 output_operand_lossage ("'B' operand is not constant");
1766
1767 /* GCC sign-extends masks with the MSB set, so we have to
1768 detect all the cases that differ only in sign extension
1769 beyond the bits we care about. Normally, the predicates
1770 and constraints ensure that we have the right values. This
1771 works correctly for valid masks. */
1772 if (bits_set[xx & 7] <= 1)
1773 {
1774 /* Remove sign extension bits. */
1775 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1776 xx &= 0xff;
1777 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1778 xx &= 0xffff;
1779 l = exact_log2 (xx);
1780 }
1781 else
1782 {
1783 /* Add sign extension bits. */
1784 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1785 xx |= ~(HOST_WIDE_INT)0xff;
1786 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1787 xx |= ~(HOST_WIDE_INT)0xffff;
1788 l = exact_log2 (~xx);
1789 }
1790
1791 if (l == -1)
1792 output_operand_lossage ("'B' operand has multiple bits set");
1793
1794 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1795 return;
1796 }
1797
1798 case 'C':
1799 /* Print the symbol without a surrounding @fptr(). */
1800 if (GET_CODE (x) == SYMBOL_REF)
1801 assemble_name (file, XSTR (x, 0));
1802 else if (LABEL_P (x))
1803 output_asm_label (x);
1804 else
1805 xstormy16_print_operand_address (file, x);
1806 return;
1807
1808 case 'o':
1809 case 'O':
1810 /* Print the immediate operand less one, preceded by '#'.
1811 For 'O', negate it first. */
1812 {
1813 HOST_WIDE_INT xx = 0;
1814
1815 if (CONST_INT_P (x))
1816 xx = INTVAL (x);
1817 else
1818 output_operand_lossage ("'o' operand is not constant");
1819
1820 if (code == 'O')
1821 xx = -xx;
1822
1823 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1824 return;
1825 }
1826
1827 case 'b':
1828 /* Print the shift mask for bp/bn. */
1829 {
1830 HOST_WIDE_INT xx = 1;
1831 HOST_WIDE_INT l;
1832
1833 if (CONST_INT_P (x))
1834 xx = INTVAL (x);
1835 else
1836 output_operand_lossage ("'B' operand is not constant");
1837
1838 l = 7 - xx;
1839
1840 fputs (IMMEDIATE_PREFIX, file);
1841 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1842 return;
1843 }
1844
1845 case 0:
1846 /* Handled below. */
1847 break;
1848
1849 default:
1850 output_operand_lossage ("xstormy16_print_operand: unknown code");
1851 return;
1852 }
1853
1854 switch (GET_CODE (x))
1855 {
1856 case REG:
1857 fputs (reg_names [REGNO (x)], file);
1858 break;
1859
1860 case MEM:
1861 xstormy16_print_operand_address (file, XEXP (x, 0));
1862 break;
1863
1864 default:
1865 /* Some kind of constant or label; an immediate operand,
1866 so prefix it with '#' for the assembler. */
1867 fputs (IMMEDIATE_PREFIX, file);
1868 output_addr_const (file, x);
1869 break;
1870 }
1871
1872 return;
1873 }
1874 \f
1875 /* Expander for the `casesi' pattern.
1876 INDEX is the index of the switch statement.
1877 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1878 to the first table entry.
1879 RANGE is the number of table entries.
1880 TABLE is an ADDR_VEC that is the jump table.
1881 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1882 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1883
1884 void
1885 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1886 rtx table, rtx default_label)
1887 {
1888 HOST_WIDE_INT range_i = INTVAL (range);
1889 rtx int_index;
1890
1891 /* This code uses 'br', so it can deal only with tables of size up to
1892 8192 entries. */
1893 if (range_i >= 8192)
1894 sorry ("switch statement of size %lu entries too large",
1895 (unsigned long) range_i);
1896
1897 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1898 OPTAB_LIB_WIDEN);
1899 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1900 default_label);
1901 int_index = gen_lowpart_common (HImode, index);
1902 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1903 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1904 }
1905
1906 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1907 instructions, without label or alignment or any other special
1908 constructs. We know that the previous instruction will be the
1909 `tablejump_pcrel' output above.
1910
1911 TODO: it might be nice to output 'br' instructions if they could
1912 all reach. */
1913
1914 void
1915 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1916 {
1917 int vlen, idx;
1918
1919 switch_to_section (current_function_section ());
1920
1921 vlen = XVECLEN (table, 0);
1922 for (idx = 0; idx < vlen; idx++)
1923 {
1924 fputs ("\tjmpf ", file);
1925 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1926 fputc ('\n', file);
1927 }
1928 }
1929 \f
1930 /* Expander for the `call' patterns.
1931 RETVAL is the RTL for the return register or NULL for void functions.
1932 DEST is the function to call, expressed as a MEM.
1933 COUNTER is ignored. */
1934
1935 void
1936 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1937 {
1938 rtx call, temp;
1939 machine_mode mode;
1940
1941 gcc_assert (MEM_P (dest));
1942 dest = XEXP (dest, 0);
1943
1944 if (! CONSTANT_P (dest) && ! REG_P (dest))
1945 dest = force_reg (Pmode, dest);
1946
1947 if (retval == NULL)
1948 mode = VOIDmode;
1949 else
1950 mode = GET_MODE (retval);
1951
1952 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1953 counter);
1954 if (retval)
1955 call = gen_rtx_SET (VOIDmode, retval, call);
1956
1957 if (! CONSTANT_P (dest))
1958 {
1959 temp = gen_reg_rtx (HImode);
1960 emit_move_insn (temp, const0_rtx);
1961 }
1962 else
1963 temp = const0_rtx;
1964
1965 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1966 gen_rtx_USE (VOIDmode, temp)));
1967 emit_call_insn (call);
1968 }
1969 \f
1970 /* Expanders for multiword computational operations. */
1971
1972 /* Expander for arithmetic operations; emit insns to compute
1973
1974 (set DEST (CODE:MODE SRC0 SRC1))
1975
1976 When CODE is COMPARE, a branch template is generated
1977 (this saves duplicating code in xstormy16_split_cbranch). */
1978
1979 void
1980 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1981 rtx dest, rtx src0, rtx src1)
1982 {
1983 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1984 int i;
1985 int firstloop = 1;
1986
1987 if (code == NEG)
1988 emit_move_insn (src0, const0_rtx);
1989
1990 for (i = 0; i < num_words; i++)
1991 {
1992 rtx w_src0, w_src1, w_dest;
1993 rtx insn;
1994
1995 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1996 i * UNITS_PER_WORD);
1997 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1998 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1999
2000 switch (code)
2001 {
2002 case PLUS:
2003 if (firstloop
2004 && CONST_INT_P (w_src1)
2005 && INTVAL (w_src1) == 0)
2006 continue;
2007
2008 if (firstloop)
2009 insn = gen_addchi4 (w_dest, w_src0, w_src1);
2010 else
2011 insn = gen_addchi5 (w_dest, w_src0, w_src1);
2012 break;
2013
2014 case NEG:
2015 case MINUS:
2016 case COMPARE:
2017 if (code == COMPARE && i == num_words - 1)
2018 {
2019 rtx branch, sub, clobber, sub_1;
2020
2021 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2022 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2023 sub = gen_rtx_SET (VOIDmode, w_dest,
2024 gen_rtx_MINUS (HImode, sub_1, w_src1));
2025 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2026 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2027 gen_rtx_IF_THEN_ELSE (VOIDmode,
2028 gen_rtx_EQ (HImode,
2029 sub_1,
2030 w_src1),
2031 pc_rtx,
2032 pc_rtx));
2033 insn = gen_rtx_PARALLEL (VOIDmode,
2034 gen_rtvec (3, branch, sub, clobber));
2035 }
2036 else if (firstloop
2037 && code != COMPARE
2038 && CONST_INT_P (w_src1)
2039 && INTVAL (w_src1) == 0)
2040 continue;
2041 else if (firstloop)
2042 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2043 else
2044 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2045 break;
2046
2047 case IOR:
2048 case XOR:
2049 case AND:
2050 if (CONST_INT_P (w_src1)
2051 && INTVAL (w_src1) == -(code == AND))
2052 continue;
2053
2054 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2055 w_src0, w_src1));
2056 break;
2057
2058 case NOT:
2059 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2060 break;
2061
2062 default:
2063 gcc_unreachable ();
2064 }
2065
2066 firstloop = 0;
2067 emit (insn);
2068 }
2069
2070 /* If we emit nothing, try_split() will think we failed. So emit
2071 something that does nothing and can be optimized away. */
2072 if (firstloop)
2073 emit (gen_nop ());
2074 }
2075
2076 /* The shift operations are split at output time for constant values;
2077 variable-width shifts get handed off to a library routine.
2078
2079 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2080 SIZE_R will be a CONST_INT, X will be a hard register. */
2081
2082 const char *
2083 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2084 rtx x, rtx size_r, rtx temp)
2085 {
2086 HOST_WIDE_INT size;
2087 const char *r0, *r1, *rt;
2088 static char r[64];
2089
2090 gcc_assert (CONST_INT_P (size_r)
2091 && REG_P (x)
2092 && mode == SImode);
2093
2094 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2095
2096 if (size == 0)
2097 return "";
2098
2099 r0 = reg_names [REGNO (x)];
2100 r1 = reg_names [REGNO (x) + 1];
2101
2102 /* For shifts of size 1, we can use the rotate instructions. */
2103 if (size == 1)
2104 {
2105 switch (code)
2106 {
2107 case ASHIFT:
2108 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2109 break;
2110 case ASHIFTRT:
2111 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2112 break;
2113 case LSHIFTRT:
2114 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2115 break;
2116 default:
2117 gcc_unreachable ();
2118 }
2119 return r;
2120 }
2121
2122 /* For large shifts, there are easy special cases. */
2123 if (size == 16)
2124 {
2125 switch (code)
2126 {
2127 case ASHIFT:
2128 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2129 break;
2130 case ASHIFTRT:
2131 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2132 break;
2133 case LSHIFTRT:
2134 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2135 break;
2136 default:
2137 gcc_unreachable ();
2138 }
2139 return r;
2140 }
2141 if (size > 16)
2142 {
2143 switch (code)
2144 {
2145 case ASHIFT:
2146 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2147 r1, r0, r0, r1, (int) size - 16);
2148 break;
2149 case ASHIFTRT:
2150 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2151 r0, r1, r1, r0, (int) size - 16);
2152 break;
2153 case LSHIFTRT:
2154 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2155 r0, r1, r1, r0, (int) size - 16);
2156 break;
2157 default:
2158 gcc_unreachable ();
2159 }
2160 return r;
2161 }
2162
2163 /* For the rest, we have to do more work. In particular, we
2164 need a temporary. */
2165 rt = reg_names [REGNO (temp)];
2166 switch (code)
2167 {
2168 case ASHIFT:
2169 sprintf (r,
2170 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2171 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2172 r1, rt);
2173 break;
2174 case ASHIFTRT:
2175 sprintf (r,
2176 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2177 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2178 r0, rt);
2179 break;
2180 case LSHIFTRT:
2181 sprintf (r,
2182 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2183 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2184 r0, rt);
2185 break;
2186 default:
2187 gcc_unreachable ();
2188 }
2189 return r;
2190 }
2191 \f
2192 /* Attribute handling. */
2193
2194 /* Return nonzero if the function is an interrupt function. */
2195
2196 int
2197 xstormy16_interrupt_function_p (void)
2198 {
2199 tree attributes;
2200
2201 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2202 any functions are declared, which is demonstrably wrong, but
2203 it is worked around here. FIXME. */
2204 if (!cfun)
2205 return 0;
2206
2207 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2208 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2209 }
2210
2211 #undef TARGET_ATTRIBUTE_TABLE
2212 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2213
2214 static tree xstormy16_handle_interrupt_attribute
2215 (tree *, tree, tree, int, bool *);
2216 static tree xstormy16_handle_below100_attribute
2217 (tree *, tree, tree, int, bool *);
2218
2219 static const struct attribute_spec xstormy16_attribute_table[] =
2220 {
2221 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2222 affects_type_identity. */
2223 { "interrupt", 0, 0, false, true, true,
2224 xstormy16_handle_interrupt_attribute , false },
2225 { "BELOW100", 0, 0, false, false, false,
2226 xstormy16_handle_below100_attribute, false },
2227 { "below100", 0, 0, false, false, false,
2228 xstormy16_handle_below100_attribute, false },
2229 { NULL, 0, 0, false, false, false, NULL, false }
2230 };
2231
2232 /* Handle an "interrupt" attribute;
2233 arguments as in struct attribute_spec.handler. */
2234
2235 static tree
2236 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2237 tree args ATTRIBUTE_UNUSED,
2238 int flags ATTRIBUTE_UNUSED,
2239 bool *no_add_attrs)
2240 {
2241 if (TREE_CODE (*node) != FUNCTION_TYPE)
2242 {
2243 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2244 name);
2245 *no_add_attrs = true;
2246 }
2247
2248 return NULL_TREE;
2249 }
2250
2251 /* Handle an "below" attribute;
2252 arguments as in struct attribute_spec.handler. */
2253
2254 static tree
2255 xstormy16_handle_below100_attribute (tree *node,
2256 tree name ATTRIBUTE_UNUSED,
2257 tree args ATTRIBUTE_UNUSED,
2258 int flags ATTRIBUTE_UNUSED,
2259 bool *no_add_attrs)
2260 {
2261 if (TREE_CODE (*node) != VAR_DECL
2262 && TREE_CODE (*node) != POINTER_TYPE
2263 && TREE_CODE (*node) != TYPE_DECL)
2264 {
2265 warning (OPT_Wattributes,
2266 "%<__BELOW100__%> attribute only applies to variables");
2267 *no_add_attrs = true;
2268 }
2269 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2270 {
2271 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2272 {
2273 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2274 "with auto storage class");
2275 *no_add_attrs = true;
2276 }
2277 }
2278
2279 return NULL_TREE;
2280 }
2281 \f
2282 #undef TARGET_INIT_BUILTINS
2283 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2284 #undef TARGET_EXPAND_BUILTIN
2285 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2286
2287 static struct
2288 {
2289 const char * name;
2290 int md_code;
2291 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2292 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2293 }
2294 s16builtins[] =
2295 {
2296 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2297 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2298 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2299 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2300 { NULL, 0, NULL, NULL }
2301 };
2302
2303 static void
2304 xstormy16_init_builtins (void)
2305 {
2306 tree args[2], ret_type, arg = NULL_TREE, ftype;
2307 int i, a, n_args;
2308
2309 ret_type = void_type_node;
2310
2311 for (i = 0; s16builtins[i].name; i++)
2312 {
2313 n_args = strlen (s16builtins[i].arg_types) - 1;
2314
2315 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2316
2317 for (a = n_args - 1; a >= 0; a--)
2318 args[a] = NULL_TREE;
2319
2320 for (a = n_args; a >= 0; a--)
2321 {
2322 switch (s16builtins[i].arg_types[a])
2323 {
2324 case 's': arg = short_integer_type_node; break;
2325 case 'S': arg = short_unsigned_type_node; break;
2326 case 'l': arg = long_integer_type_node; break;
2327 case 'L': arg = long_unsigned_type_node; break;
2328 default: gcc_unreachable ();
2329 }
2330 if (a == 0)
2331 ret_type = arg;
2332 else
2333 args[a-1] = arg;
2334 }
2335 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2336 add_builtin_function (s16builtins[i].name, ftype,
2337 i, BUILT_IN_MD, NULL, NULL_TREE);
2338 }
2339 }
2340
2341 static rtx
2342 xstormy16_expand_builtin (tree exp, rtx target,
2343 rtx subtarget ATTRIBUTE_UNUSED,
2344 machine_mode mode ATTRIBUTE_UNUSED,
2345 int ignore ATTRIBUTE_UNUSED)
2346 {
2347 rtx op[10], args[10], pat, copyto[10], retval = 0;
2348 tree fndecl, argtree;
2349 int i, a, o, code;
2350
2351 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2352 argtree = TREE_OPERAND (exp, 1);
2353 i = DECL_FUNCTION_CODE (fndecl);
2354 code = s16builtins[i].md_code;
2355
2356 for (a = 0; a < 10 && argtree; a++)
2357 {
2358 args[a] = expand_normal (TREE_VALUE (argtree));
2359 argtree = TREE_CHAIN (argtree);
2360 }
2361
2362 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2363 {
2364 char ao = s16builtins[i].arg_ops[o];
2365 char c = insn_data[code].operand[o].constraint[0];
2366 machine_mode omode;
2367
2368 copyto[o] = 0;
2369
2370 omode = (machine_mode) insn_data[code].operand[o].mode;
2371 if (ao == 'r')
2372 op[o] = target ? target : gen_reg_rtx (omode);
2373 else if (ao == 't')
2374 op[o] = gen_reg_rtx (omode);
2375 else
2376 op[o] = args[(int) hex_value (ao)];
2377
2378 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2379 {
2380 if (c == '+' || c == '=')
2381 {
2382 copyto[o] = op[o];
2383 op[o] = gen_reg_rtx (omode);
2384 }
2385 else
2386 op[o] = copy_to_mode_reg (omode, op[o]);
2387 }
2388
2389 if (ao == 'r')
2390 retval = op[o];
2391 }
2392
2393 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2394 op[5], op[6], op[7], op[8], op[9]);
2395 emit_insn (pat);
2396
2397 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2398 if (copyto[o])
2399 {
2400 emit_move_insn (copyto[o], op[o]);
2401 if (op[o] == retval)
2402 retval = copyto[o];
2403 }
2404
2405 return retval;
2406 }
2407 \f
2408 /* Look for combinations of insns that can be converted to BN or BP
2409 opcodes. This is, unfortunately, too complex to do with MD
2410 patterns. */
2411
2412 static void
2413 combine_bnp (rtx_insn *insn)
2414 {
2415 int insn_code, regno, need_extend;
2416 unsigned int mask;
2417 rtx cond, reg, qireg, mem;
2418 rtx_insn *and_insn, *load;
2419 machine_mode load_mode = QImode;
2420 machine_mode and_mode = QImode;
2421 rtx_insn *shift = NULL;
2422
2423 insn_code = recog_memoized (insn);
2424 if (insn_code != CODE_FOR_cbranchhi
2425 && insn_code != CODE_FOR_cbranchhi_neg)
2426 return;
2427
2428 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2429 cond = XEXP (cond, 1); /* if */
2430 cond = XEXP (cond, 0); /* cond */
2431 switch (GET_CODE (cond))
2432 {
2433 case NE:
2434 case EQ:
2435 need_extend = 0;
2436 break;
2437 case LT:
2438 case GE:
2439 need_extend = 1;
2440 break;
2441 default:
2442 return;
2443 }
2444
2445 reg = XEXP (cond, 0);
2446 if (! REG_P (reg))
2447 return;
2448 regno = REGNO (reg);
2449 if (XEXP (cond, 1) != const0_rtx)
2450 return;
2451 if (! find_regno_note (insn, REG_DEAD, regno))
2452 return;
2453 qireg = gen_rtx_REG (QImode, regno);
2454
2455 if (need_extend)
2456 {
2457 /* LT and GE conditionals should have a sign extend before
2458 them. */
2459 for (and_insn = prev_real_insn (insn);
2460 and_insn != NULL_RTX;
2461 and_insn = prev_real_insn (and_insn))
2462 {
2463 int and_code = recog_memoized (and_insn);
2464
2465 if (and_code == CODE_FOR_extendqihi2
2466 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2467 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2468 break;
2469
2470 if (and_code == CODE_FOR_movhi_internal
2471 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2472 {
2473 /* This is for testing bit 15. */
2474 and_insn = insn;
2475 break;
2476 }
2477
2478 if (reg_mentioned_p (reg, and_insn))
2479 return;
2480
2481 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2482 return;
2483 }
2484 }
2485 else
2486 {
2487 /* EQ and NE conditionals have an AND before them. */
2488 for (and_insn = prev_real_insn (insn);
2489 and_insn != NULL_RTX;
2490 and_insn = prev_real_insn (and_insn))
2491 {
2492 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2493 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2494 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2495 break;
2496
2497 if (reg_mentioned_p (reg, and_insn))
2498 return;
2499
2500 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2501 return;
2502 }
2503
2504 if (and_insn)
2505 {
2506 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2507 followed by an AND like this:
2508
2509 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2510 (clobber (reg:BI carry))]
2511
2512 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2513
2514 Attempt to detect this here. */
2515 for (shift = prev_real_insn (and_insn); shift;
2516 shift = prev_real_insn (shift))
2517 {
2518 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2519 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2520 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2521 break;
2522
2523 if (reg_mentioned_p (reg, shift)
2524 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2525 {
2526 shift = NULL;
2527 break;
2528 }
2529 }
2530 }
2531 }
2532
2533 if (and_insn == NULL_RTX)
2534 return;
2535
2536 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2537 load;
2538 load = prev_real_insn (load))
2539 {
2540 int load_code = recog_memoized (load);
2541
2542 if (load_code == CODE_FOR_movhi_internal
2543 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2544 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2545 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2546 {
2547 load_mode = HImode;
2548 break;
2549 }
2550
2551 if (load_code == CODE_FOR_movqi_internal
2552 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2553 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2554 {
2555 load_mode = QImode;
2556 break;
2557 }
2558
2559 if (load_code == CODE_FOR_zero_extendqihi2
2560 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2561 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2562 {
2563 load_mode = QImode;
2564 and_mode = HImode;
2565 break;
2566 }
2567
2568 if (reg_mentioned_p (reg, load))
2569 return;
2570
2571 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2572 return;
2573 }
2574 if (!load)
2575 return;
2576
2577 mem = SET_SRC (PATTERN (load));
2578
2579 if (need_extend)
2580 {
2581 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2582
2583 /* If the mem includes a zero-extend operation and we are
2584 going to generate a sign-extend operation then move the
2585 mem inside the zero-extend. */
2586 if (GET_CODE (mem) == ZERO_EXTEND)
2587 mem = XEXP (mem, 0);
2588 }
2589 else
2590 {
2591 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2592 load_mode))
2593 return;
2594
2595 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2596
2597 if (shift)
2598 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2599 }
2600
2601 if (load_mode == HImode)
2602 {
2603 rtx addr = XEXP (mem, 0);
2604
2605 if (! (mask & 0xff))
2606 {
2607 addr = plus_constant (Pmode, addr, 1);
2608 mask >>= 8;
2609 }
2610 mem = gen_rtx_MEM (QImode, addr);
2611 }
2612
2613 if (need_extend)
2614 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2615 else
2616 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2617
2618 INSN_CODE (insn) = -1;
2619 delete_insn (load);
2620
2621 if (and_insn != insn)
2622 delete_insn (and_insn);
2623
2624 if (shift != NULL_RTX)
2625 delete_insn (shift);
2626 }
2627
2628 static void
2629 xstormy16_reorg (void)
2630 {
2631 rtx_insn *insn;
2632
2633 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2634 {
2635 if (! JUMP_P (insn))
2636 continue;
2637 combine_bnp (insn);
2638 }
2639 }
2640 \f
2641 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2642
2643 static bool
2644 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2645 {
2646 const HOST_WIDE_INT size = int_size_in_bytes (type);
2647 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2648 }
2649 \f
2650 #undef TARGET_ASM_ALIGNED_HI_OP
2651 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2652 #undef TARGET_ASM_ALIGNED_SI_OP
2653 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2654 #undef TARGET_ENCODE_SECTION_INFO
2655 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2656
2657 /* Select_section doesn't handle .bss_below100. */
2658 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2659 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2660
2661 #undef TARGET_ASM_OUTPUT_MI_THUNK
2662 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2663 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2664 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2665
2666 #undef TARGET_PRINT_OPERAND
2667 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2668 #undef TARGET_PRINT_OPERAND_ADDRESS
2669 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2670
2671 #undef TARGET_MEMORY_MOVE_COST
2672 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2673 #undef TARGET_RTX_COSTS
2674 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2675 #undef TARGET_ADDRESS_COST
2676 #define TARGET_ADDRESS_COST xstormy16_address_cost
2677
2678 #undef TARGET_BUILD_BUILTIN_VA_LIST
2679 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2680 #undef TARGET_EXPAND_BUILTIN_VA_START
2681 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2682 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2683 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2684
2685 #undef TARGET_PROMOTE_FUNCTION_MODE
2686 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2687 #undef TARGET_PROMOTE_PROTOTYPES
2688 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2689
2690 #undef TARGET_FUNCTION_ARG
2691 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2692 #undef TARGET_FUNCTION_ARG_ADVANCE
2693 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2694
2695 #undef TARGET_RETURN_IN_MEMORY
2696 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2697 #undef TARGET_FUNCTION_VALUE
2698 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2699 #undef TARGET_LIBCALL_VALUE
2700 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2701 #undef TARGET_FUNCTION_VALUE_REGNO_P
2702 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2703
2704 #undef TARGET_MACHINE_DEPENDENT_REORG
2705 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2706
2707 #undef TARGET_PREFERRED_RELOAD_CLASS
2708 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2709 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2710 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2711
2712 #undef TARGET_LEGITIMATE_ADDRESS_P
2713 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2714 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2715 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2716
2717 #undef TARGET_CAN_ELIMINATE
2718 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2719
2720 #undef TARGET_TRAMPOLINE_INIT
2721 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2722
2723 struct gcc_target targetm = TARGET_INITIALIZER;
2724
2725 #include "gt-stormy16.h"