]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/stormy16/stormy16.c
decl.c, [...]: Remove redundant enum from machine_mode.
[thirdparty/gcc.git] / gcc / config / stormy16 / stormy16.c
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "stringpool.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "except.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "vec.h"
48 #include "machmode.h"
49 #include "input.h"
50 #include "function.h"
51 #include "target.h"
52 #include "target-def.h"
53 #include "tm_p.h"
54 #include "langhooks.h"
55 #include "hash-table.h"
56 #include "ggc.h"
57 #include "predict.h"
58 #include "dominance.h"
59 #include "cfg.h"
60 #include "cfgrtl.h"
61 #include "cfganal.h"
62 #include "lcm.h"
63 #include "cfgbuild.h"
64 #include "cfgcleanup.h"
65 #include "basic-block.h"
66 #include "tree-ssa-alias.h"
67 #include "internal-fn.h"
68 #include "gimple-fold.h"
69 #include "tree-eh.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimplify.h"
74 #include "df.h"
75 #include "reload.h"
76 #include "builtins.h"
77
78 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
79 static void xstormy16_asm_out_constructor (rtx, int);
80 static void xstormy16_asm_out_destructor (rtx, int);
81 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
82 HOST_WIDE_INT, tree);
83
84 static void xstormy16_init_builtins (void);
85 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
86 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
87 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
88 static bool xstormy16_return_in_memory (const_tree, const_tree);
89
90 static GTY(()) section *bss100_section;
91
92 /* Compute a (partial) cost for rtx X. Return true if the complete
93 cost has been computed, and false if subexpressions should be
94 scanned. In either case, *TOTAL contains the cost result. */
95
96 static bool
97 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
98 int opno ATTRIBUTE_UNUSED, int *total,
99 bool speed ATTRIBUTE_UNUSED)
100 {
101 switch (code)
102 {
103 case CONST_INT:
104 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
105 *total = COSTS_N_INSNS (1) / 2;
106 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
107 *total = COSTS_N_INSNS (1);
108 else
109 *total = COSTS_N_INSNS (2);
110 return true;
111
112 case CONST_DOUBLE:
113 case CONST:
114 case SYMBOL_REF:
115 case LABEL_REF:
116 *total = COSTS_N_INSNS (2);
117 return true;
118
119 case MULT:
120 *total = COSTS_N_INSNS (35 + 6);
121 return true;
122 case DIV:
123 *total = COSTS_N_INSNS (51 - 6);
124 return true;
125
126 default:
127 return false;
128 }
129 }
130
131 static int
132 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
133 addr_space_t as ATTRIBUTE_UNUSED,
134 bool speed ATTRIBUTE_UNUSED)
135 {
136 return (CONST_INT_P (x) ? 2
137 : GET_CODE (x) == PLUS ? 7
138 : 5);
139 }
140
141 /* Worker function for TARGET_MEMORY_MOVE_COST. */
142
143 static int
144 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
145 bool in)
146 {
147 return (5 + memory_move_secondary_cost (mode, rclass, in));
148 }
149
150 /* Branches are handled as follows:
151
152 1. HImode compare-and-branches. The machine supports these
153 natively, so the appropriate pattern is emitted directly.
154
155 2. SImode EQ and NE. These are emitted as pairs of HImode
156 compare-and-branches.
157
158 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
159 of a SImode subtract followed by a branch (not a compare-and-branch),
160 like this:
161 sub
162 sbc
163 blt
164
165 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
166 sub
167 sbc
168 blt
169 or
170 bne. */
171
172 /* Emit a branch of kind CODE to location LOC. */
173
174 void
175 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
176 {
177 rtx condition_rtx, loc_ref, branch, cy_clobber;
178 rtvec vec;
179 machine_mode mode;
180
181 mode = GET_MODE (op0);
182 gcc_assert (mode == HImode || mode == SImode);
183
184 if (mode == SImode
185 && (code == GT || code == LE || code == GTU || code == LEU))
186 {
187 int unsigned_p = (code == GTU || code == LEU);
188 int gt_p = (code == GT || code == GTU);
189 rtx lab = NULL_RTX;
190
191 if (gt_p)
192 lab = gen_label_rtx ();
193 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
194 /* This should be generated as a comparison against the temporary
195 created by the previous insn, but reload can't handle that. */
196 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
197 if (gt_p)
198 emit_label (lab);
199 return;
200 }
201 else if (mode == SImode
202 && (code == NE || code == EQ)
203 && op1 != const0_rtx)
204 {
205 rtx op0_word, op1_word;
206 rtx lab = NULL_RTX;
207 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
208 int i;
209
210 if (code == EQ)
211 lab = gen_label_rtx ();
212
213 for (i = 0; i < num_words - 1; i++)
214 {
215 op0_word = simplify_gen_subreg (word_mode, op0, mode,
216 i * UNITS_PER_WORD);
217 op1_word = simplify_gen_subreg (word_mode, op1, mode,
218 i * UNITS_PER_WORD);
219 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
220 }
221 op0_word = simplify_gen_subreg (word_mode, op0, mode,
222 i * UNITS_PER_WORD);
223 op1_word = simplify_gen_subreg (word_mode, op1, mode,
224 i * UNITS_PER_WORD);
225 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
226
227 if (code == EQ)
228 emit_label (lab);
229 return;
230 }
231
232 /* We can't allow reload to try to generate any reload after a branch,
233 so when some register must match we must make the temporary ourselves. */
234 if (mode != HImode)
235 {
236 rtx tmp;
237 tmp = gen_reg_rtx (mode);
238 emit_move_insn (tmp, op0);
239 op0 = tmp;
240 }
241
242 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
243 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
244 branch = gen_rtx_SET (VOIDmode, pc_rtx,
245 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
246 loc_ref, pc_rtx));
247
248 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
249
250 if (mode == HImode)
251 vec = gen_rtvec (2, branch, cy_clobber);
252 else if (code == NE || code == EQ)
253 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
254 else
255 {
256 rtx sub;
257 #if 0
258 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
259 #else
260 sub = gen_rtx_CLOBBER (SImode, op0);
261 #endif
262 vec = gen_rtvec (3, branch, sub, cy_clobber);
263 }
264
265 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
266 }
267
268 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
269 the arithmetic operation. Most of the work is done by
270 xstormy16_expand_arith. */
271
272 void
273 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
274 rtx dest)
275 {
276 rtx op0 = XEXP (comparison, 0);
277 rtx op1 = XEXP (comparison, 1);
278 rtx_insn *seq, *last_insn;
279 rtx compare;
280
281 start_sequence ();
282 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
283 seq = get_insns ();
284 end_sequence ();
285
286 gcc_assert (INSN_P (seq));
287
288 last_insn = seq;
289 while (NEXT_INSN (last_insn) != NULL_RTX)
290 last_insn = NEXT_INSN (last_insn);
291
292 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
293 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
294 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
295 emit_insn (seq);
296 }
297
298
299 /* Return the string to output a conditional branch to LABEL, which is
300 the operand number of the label.
301
302 OP is the conditional expression, or NULL for branch-always.
303
304 REVERSED is nonzero if we should reverse the sense of the comparison.
305
306 INSN is the insn. */
307
308 char *
309 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
310 rtx_insn *insn)
311 {
312 static char string[64];
313 int need_longbranch = (op != NULL_RTX
314 ? get_attr_length (insn) == 8
315 : get_attr_length (insn) == 4);
316 int really_reversed = reversed ^ need_longbranch;
317 const char *ccode;
318 const char *templ;
319 const char *operands;
320 enum rtx_code code;
321
322 if (! op)
323 {
324 if (need_longbranch)
325 ccode = "jmpf";
326 else
327 ccode = "br";
328 sprintf (string, "%s %s", ccode, label);
329 return string;
330 }
331
332 code = GET_CODE (op);
333
334 if (! REG_P (XEXP (op, 0)))
335 {
336 code = swap_condition (code);
337 operands = "%3,%2";
338 }
339 else
340 operands = "%2,%3";
341
342 /* Work out which way this really branches. */
343 if (really_reversed)
344 code = reverse_condition (code);
345
346 switch (code)
347 {
348 case EQ: ccode = "z"; break;
349 case NE: ccode = "nz"; break;
350 case GE: ccode = "ge"; break;
351 case LT: ccode = "lt"; break;
352 case GT: ccode = "gt"; break;
353 case LE: ccode = "le"; break;
354 case GEU: ccode = "nc"; break;
355 case LTU: ccode = "c"; break;
356 case GTU: ccode = "hi"; break;
357 case LEU: ccode = "ls"; break;
358
359 default:
360 gcc_unreachable ();
361 }
362
363 if (need_longbranch)
364 templ = "b%s %s,.+8 | jmpf %s";
365 else
366 templ = "b%s %s,%s";
367 sprintf (string, templ, ccode, operands, label);
368
369 return string;
370 }
371
372 /* Return the string to output a conditional branch to LABEL, which is
373 the operand number of the label, but suitable for the tail of a
374 SImode branch.
375
376 OP is the conditional expression (OP is never NULL_RTX).
377
378 REVERSED is nonzero if we should reverse the sense of the comparison.
379
380 INSN is the insn. */
381
382 char *
383 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
384 rtx_insn *insn)
385 {
386 static char string[64];
387 int need_longbranch = get_attr_length (insn) >= 8;
388 int really_reversed = reversed ^ need_longbranch;
389 const char *ccode;
390 const char *templ;
391 char prevop[16];
392 enum rtx_code code;
393
394 code = GET_CODE (op);
395
396 /* Work out which way this really branches. */
397 if (really_reversed)
398 code = reverse_condition (code);
399
400 switch (code)
401 {
402 case EQ: ccode = "z"; break;
403 case NE: ccode = "nz"; break;
404 case GE: ccode = "ge"; break;
405 case LT: ccode = "lt"; break;
406 case GEU: ccode = "nc"; break;
407 case LTU: ccode = "c"; break;
408
409 /* The missing codes above should never be generated. */
410 default:
411 gcc_unreachable ();
412 }
413
414 switch (code)
415 {
416 case EQ: case NE:
417 {
418 int regnum;
419
420 gcc_assert (REG_P (XEXP (op, 0)));
421
422 regnum = REGNO (XEXP (op, 0));
423 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
424 }
425 break;
426
427 case GE: case LT: case GEU: case LTU:
428 strcpy (prevop, "sbc %2,%3");
429 break;
430
431 default:
432 gcc_unreachable ();
433 }
434
435 if (need_longbranch)
436 templ = "%s | b%s .+6 | jmpf %s";
437 else
438 templ = "%s | b%s %s";
439 sprintf (string, templ, prevop, ccode, label);
440
441 return string;
442 }
443 \f
444 /* Many machines have some registers that cannot be copied directly to or from
445 memory or even from other types of registers. An example is the `MQ'
446 register, which on most machines, can only be copied to or from general
447 registers, but not memory. Some machines allow copying all registers to and
448 from memory, but require a scratch register for stores to some memory
449 locations (e.g., those with symbolic address on the RT, and those with
450 certain symbolic address on the SPARC when compiling PIC). In some cases,
451 both an intermediate and a scratch register are required.
452
453 You should define these macros to indicate to the reload phase that it may
454 need to allocate at least one register for a reload in addition to the
455 register to contain the data. Specifically, if copying X to a register
456 RCLASS in MODE requires an intermediate register, you should define
457 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
458 whose registers can be used as intermediate registers or scratch registers.
459
460 If copying a register RCLASS in MODE to X requires an intermediate or scratch
461 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
462 largest register class required. If the requirements for input and output
463 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
464 instead of defining both macros identically.
465
466 The values returned by these macros are often `GENERAL_REGS'. Return
467 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
468 to or from a register of RCLASS in MODE without requiring a scratch register.
469 Do not define this macro if it would always return `NO_REGS'.
470
471 If a scratch register is required (either with or without an intermediate
472 register), you should define patterns for `reload_inM' or `reload_outM', as
473 required.. These patterns, which will normally be implemented with a
474 `define_expand', should be similar to the `movM' patterns, except that
475 operand 2 is the scratch register.
476
477 Define constraints for the reload register and scratch register that contain
478 a single register class. If the original reload register (whose class is
479 RCLASS) can meet the constraint given in the pattern, the value returned by
480 these macros is used for the class of the scratch register. Otherwise, two
481 additional reload registers are required. Their classes are obtained from
482 the constraints in the insn pattern.
483
484 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
485 either be in a hard register or in memory. Use `true_regnum' to find out;
486 it will return -1 if the pseudo is in memory and the hard register number if
487 it is in a register.
488
489 These macros should not be used in the case where a particular class of
490 registers can only be copied to memory and not to another class of
491 registers. In that case, secondary reload registers are not needed and
492 would not be helpful. Instead, a stack location must be used to perform the
493 copy and the `movM' pattern should use memory as an intermediate storage.
494 This case often occurs between floating-point and general registers. */
495
496 enum reg_class
497 xstormy16_secondary_reload_class (enum reg_class rclass,
498 machine_mode mode ATTRIBUTE_UNUSED,
499 rtx x)
500 {
501 /* This chip has the interesting property that only the first eight
502 registers can be moved to/from memory. */
503 if ((MEM_P (x)
504 || ((GET_CODE (x) == SUBREG || REG_P (x))
505 && (true_regnum (x) == -1
506 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
507 && ! reg_class_subset_p (rclass, EIGHT_REGS))
508 return EIGHT_REGS;
509
510 return NO_REGS;
511 }
512
513 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
514 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
515
516 static reg_class_t
517 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
518 {
519 if (rclass == GENERAL_REGS && MEM_P (x))
520 return EIGHT_REGS;
521
522 return rclass;
523 }
524
525 /* Predicate for symbols and addresses that reflect special 8-bit
526 addressing. */
527
528 int
529 xstormy16_below100_symbol (rtx x,
530 machine_mode mode ATTRIBUTE_UNUSED)
531 {
532 if (GET_CODE (x) == CONST)
533 x = XEXP (x, 0);
534 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
535 x = XEXP (x, 0);
536
537 if (GET_CODE (x) == SYMBOL_REF)
538 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
539
540 if (CONST_INT_P (x))
541 {
542 HOST_WIDE_INT i = INTVAL (x);
543
544 if ((i >= 0x0000 && i <= 0x00ff)
545 || (i >= 0x7f00 && i <= 0x7fff))
546 return 1;
547 }
548 return 0;
549 }
550
551 /* Likewise, but only for non-volatile MEMs, for patterns where the
552 MEM will get split into smaller sized accesses. */
553
554 int
555 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
556 {
557 if (MEM_P (x) && MEM_VOLATILE_P (x))
558 return 0;
559 return xstormy16_below100_operand (x, mode);
560 }
561
562 /* Expand an 8-bit IOR. This either detects the one case we can
563 actually do, or uses a 16-bit IOR. */
564
565 void
566 xstormy16_expand_iorqi3 (rtx *operands)
567 {
568 rtx in, out, outsub, val;
569
570 out = operands[0];
571 in = operands[1];
572 val = operands[2];
573
574 if (xstormy16_onebit_set_operand (val, QImode))
575 {
576 if (!xstormy16_below100_or_register (in, QImode))
577 in = copy_to_mode_reg (QImode, in);
578 if (!xstormy16_below100_or_register (out, QImode))
579 out = gen_reg_rtx (QImode);
580 emit_insn (gen_iorqi3_internal (out, in, val));
581 if (out != operands[0])
582 emit_move_insn (operands[0], out);
583 return;
584 }
585
586 if (! REG_P (in))
587 in = copy_to_mode_reg (QImode, in);
588
589 if (! REG_P (val) && ! CONST_INT_P (val))
590 val = copy_to_mode_reg (QImode, val);
591
592 if (! REG_P (out))
593 out = gen_reg_rtx (QImode);
594
595 in = simplify_gen_subreg (HImode, in, QImode, 0);
596 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
597
598 if (! CONST_INT_P (val))
599 val = simplify_gen_subreg (HImode, val, QImode, 0);
600
601 emit_insn (gen_iorhi3 (outsub, in, val));
602
603 if (out != operands[0])
604 emit_move_insn (operands[0], out);
605 }
606
607 /* Expand an 8-bit AND. This either detects the one case we can
608 actually do, or uses a 16-bit AND. */
609
610 void
611 xstormy16_expand_andqi3 (rtx *operands)
612 {
613 rtx in, out, outsub, val;
614
615 out = operands[0];
616 in = operands[1];
617 val = operands[2];
618
619 if (xstormy16_onebit_clr_operand (val, QImode))
620 {
621 if (!xstormy16_below100_or_register (in, QImode))
622 in = copy_to_mode_reg (QImode, in);
623 if (!xstormy16_below100_or_register (out, QImode))
624 out = gen_reg_rtx (QImode);
625 emit_insn (gen_andqi3_internal (out, in, val));
626 if (out != operands[0])
627 emit_move_insn (operands[0], out);
628 return;
629 }
630
631 if (! REG_P (in))
632 in = copy_to_mode_reg (QImode, in);
633
634 if (! REG_P (val) && ! CONST_INT_P (val))
635 val = copy_to_mode_reg (QImode, val);
636
637 if (! REG_P (out))
638 out = gen_reg_rtx (QImode);
639
640 in = simplify_gen_subreg (HImode, in, QImode, 0);
641 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
642
643 if (! CONST_INT_P (val))
644 val = simplify_gen_subreg (HImode, val, QImode, 0);
645
646 emit_insn (gen_andhi3 (outsub, in, val));
647
648 if (out != operands[0])
649 emit_move_insn (operands[0], out);
650 }
651
652 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
653 (CONST_INT_P (X) \
654 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
655
656 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
657 (CONST_INT_P (X) \
658 && INTVAL (X) + (OFFSET) >= 0 \
659 && INTVAL (X) + (OFFSET) < 0x8000 \
660 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
661
662 bool
663 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
664 rtx x, bool strict)
665 {
666 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
667 return true;
668
669 if (GET_CODE (x) == PLUS
670 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
671 {
672 x = XEXP (x, 0);
673 /* PR 31232: Do not allow INT+INT as an address. */
674 if (CONST_INT_P (x))
675 return false;
676 }
677
678 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
679 || GET_CODE (x) == POST_INC
680 || GET_CODE (x) == PRE_DEC)
681 x = XEXP (x, 0);
682
683 if (REG_P (x)
684 && REGNO_OK_FOR_BASE_P (REGNO (x))
685 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
686 return true;
687
688 if (xstormy16_below100_symbol (x, mode))
689 return true;
690
691 return false;
692 }
693
694 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
695
696 On this chip, this is true if the address is valid with an offset
697 of 0 but not of 6, because in that case it cannot be used as an
698 address for DImode or DFmode, or if the address is a post-increment
699 or pre-decrement address. */
700
701 static bool
702 xstormy16_mode_dependent_address_p (const_rtx x,
703 addr_space_t as ATTRIBUTE_UNUSED)
704 {
705 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
706 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
707 return true;
708
709 if (GET_CODE (x) == PLUS
710 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
711 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
712 return true;
713
714 /* Auto-increment addresses are now treated generically in recog.c. */
715 return false;
716 }
717
718 int
719 short_memory_operand (rtx x, machine_mode mode)
720 {
721 if (! memory_operand (x, mode))
722 return 0;
723 return (GET_CODE (XEXP (x, 0)) != PLUS);
724 }
725
726 /* Splitter for the 'move' patterns, for modes not directly implemented
727 by hardware. Emit insns to copy a value of mode MODE from SRC to
728 DEST.
729
730 This function is only called when reload_completed. */
731
732 void
733 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
734 {
735 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
736 int direction, end, i;
737 int src_modifies = 0;
738 int dest_modifies = 0;
739 int src_volatile = 0;
740 int dest_volatile = 0;
741 rtx mem_operand;
742 rtx auto_inc_reg_rtx = NULL_RTX;
743
744 /* Check initial conditions. */
745 gcc_assert (reload_completed
746 && mode != QImode && mode != HImode
747 && nonimmediate_operand (dest, mode)
748 && general_operand (src, mode));
749
750 /* This case is not supported below, and shouldn't be generated. */
751 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
752
753 /* This case is very very bad after reload, so trap it now. */
754 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
755
756 /* The general idea is to copy by words, offsetting the source and
757 destination. Normally the least-significant word will be copied
758 first, but for pre-dec operations it's better to copy the
759 most-significant word first. Only one operand can be a pre-dec
760 or post-inc operand.
761
762 It's also possible that the copy overlaps so that the direction
763 must be reversed. */
764 direction = 1;
765
766 if (MEM_P (dest))
767 {
768 mem_operand = XEXP (dest, 0);
769 dest_modifies = side_effects_p (mem_operand);
770 if (auto_inc_p (mem_operand))
771 auto_inc_reg_rtx = XEXP (mem_operand, 0);
772 dest_volatile = MEM_VOLATILE_P (dest);
773 if (dest_volatile)
774 {
775 dest = copy_rtx (dest);
776 MEM_VOLATILE_P (dest) = 0;
777 }
778 }
779 else if (MEM_P (src))
780 {
781 mem_operand = XEXP (src, 0);
782 src_modifies = side_effects_p (mem_operand);
783 if (auto_inc_p (mem_operand))
784 auto_inc_reg_rtx = XEXP (mem_operand, 0);
785 src_volatile = MEM_VOLATILE_P (src);
786 if (src_volatile)
787 {
788 src = copy_rtx (src);
789 MEM_VOLATILE_P (src) = 0;
790 }
791 }
792 else
793 mem_operand = NULL_RTX;
794
795 if (mem_operand == NULL_RTX)
796 {
797 if (REG_P (src)
798 && REG_P (dest)
799 && reg_overlap_mentioned_p (dest, src)
800 && REGNO (dest) > REGNO (src))
801 direction = -1;
802 }
803 else if (GET_CODE (mem_operand) == PRE_DEC
804 || (GET_CODE (mem_operand) == PLUS
805 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
806 direction = -1;
807 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
808 {
809 int regno;
810
811 gcc_assert (REG_P (dest));
812 regno = REGNO (dest);
813
814 gcc_assert (refers_to_regno_p (regno, regno + num_words,
815 mem_operand, 0));
816
817 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
818 direction = -1;
819 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
820 mem_operand, 0))
821 direction = 1;
822 else
823 /* This means something like
824 (set (reg:DI r0) (mem:DI (reg:HI r1)))
825 which we'd need to support by doing the set of the second word
826 last. */
827 gcc_unreachable ();
828 }
829
830 end = direction < 0 ? -1 : num_words;
831 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
832 {
833 rtx w_src, w_dest, insn;
834
835 if (src_modifies)
836 w_src = gen_rtx_MEM (word_mode, mem_operand);
837 else
838 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
839 if (src_volatile)
840 MEM_VOLATILE_P (w_src) = 1;
841 if (dest_modifies)
842 w_dest = gen_rtx_MEM (word_mode, mem_operand);
843 else
844 w_dest = simplify_gen_subreg (word_mode, dest, mode,
845 i * UNITS_PER_WORD);
846 if (dest_volatile)
847 MEM_VOLATILE_P (w_dest) = 1;
848
849 /* The simplify_subreg calls must always be able to simplify. */
850 gcc_assert (GET_CODE (w_src) != SUBREG
851 && GET_CODE (w_dest) != SUBREG);
852
853 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
854 if (auto_inc_reg_rtx)
855 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
856 auto_inc_reg_rtx,
857 REG_NOTES (insn));
858 }
859 }
860
861 /* Expander for the 'move' patterns. Emit insns to copy a value of
862 mode MODE from SRC to DEST. */
863
864 void
865 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
866 {
867 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
868 {
869 rtx pmv = XEXP (dest, 0);
870 rtx dest_reg = XEXP (pmv, 0);
871 rtx dest_mod = XEXP (pmv, 1);
872 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
873 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
874
875 dest = gen_rtx_MEM (mode, dest_reg);
876 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
877 }
878 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
879 {
880 rtx pmv = XEXP (src, 0);
881 rtx src_reg = XEXP (pmv, 0);
882 rtx src_mod = XEXP (pmv, 1);
883 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
884 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
885
886 src = gen_rtx_MEM (mode, src_reg);
887 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
888 }
889
890 /* There are only limited immediate-to-memory move instructions. */
891 if (! reload_in_progress
892 && ! reload_completed
893 && MEM_P (dest)
894 && (! CONST_INT_P (XEXP (dest, 0))
895 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
896 && ! xstormy16_below100_operand (dest, mode)
897 && ! REG_P (src)
898 && GET_CODE (src) != SUBREG)
899 src = copy_to_mode_reg (mode, src);
900
901 /* Don't emit something we would immediately split. */
902 if (reload_completed
903 && mode != HImode && mode != QImode)
904 {
905 xstormy16_split_move (mode, dest, src);
906 return;
907 }
908
909 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
910 }
911 \f
912 /* Stack Layout:
913
914 The stack is laid out as follows:
915
916 SP->
917 FP-> Local variables
918 Register save area (up to 4 words)
919 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
920
921 AP-> Return address (two words)
922 9th procedure parameter word
923 10th procedure parameter word
924 ...
925 last procedure parameter word
926
927 The frame pointer location is tuned to make it most likely that all
928 parameters and local variables can be accessed using a load-indexed
929 instruction. */
930
931 /* A structure to describe the layout. */
932 struct xstormy16_stack_layout
933 {
934 /* Size of the topmost three items on the stack. */
935 int locals_size;
936 int register_save_size;
937 int stdarg_save_size;
938 /* Sum of the above items. */
939 int frame_size;
940 /* Various offsets. */
941 int first_local_minus_ap;
942 int sp_minus_fp;
943 int fp_minus_ap;
944 };
945
946 /* Does REGNO need to be saved? */
947 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
948 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
949 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
950 && (REGNUM != CARRY_REGNUM) \
951 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
952
953 /* Compute the stack layout. */
954
955 struct xstormy16_stack_layout
956 xstormy16_compute_stack_layout (void)
957 {
958 struct xstormy16_stack_layout layout;
959 int regno;
960 const int ifun = xstormy16_interrupt_function_p ();
961
962 layout.locals_size = get_frame_size ();
963
964 layout.register_save_size = 0;
965 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
966 if (REG_NEEDS_SAVE (regno, ifun))
967 layout.register_save_size += UNITS_PER_WORD;
968
969 if (cfun->stdarg)
970 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
971 else
972 layout.stdarg_save_size = 0;
973
974 layout.frame_size = (layout.locals_size
975 + layout.register_save_size
976 + layout.stdarg_save_size);
977
978 if (crtl->args.size <= 2048 && crtl->args.size != -1)
979 {
980 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
981 + crtl->args.size <= 2048)
982 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
983 else
984 layout.fp_minus_ap = 2048 - crtl->args.size;
985 }
986 else
987 layout.fp_minus_ap = (layout.stdarg_save_size
988 + layout.register_save_size
989 - INCOMING_FRAME_SP_OFFSET);
990 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
991 - layout.fp_minus_ap);
992 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
993 return layout;
994 }
995
996 /* Worker function for TARGET_CAN_ELIMINATE. */
997
998 static bool
999 xstormy16_can_eliminate (const int from, const int to)
1000 {
1001 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1002 ? ! frame_pointer_needed
1003 : true);
1004 }
1005
1006 /* Determine how all the special registers get eliminated. */
1007
1008 int
1009 xstormy16_initial_elimination_offset (int from, int to)
1010 {
1011 struct xstormy16_stack_layout layout;
1012 int result;
1013
1014 layout = xstormy16_compute_stack_layout ();
1015
1016 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1017 result = layout.sp_minus_fp - layout.locals_size;
1018 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1019 result = - layout.locals_size;
1020 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1021 result = - layout.fp_minus_ap;
1022 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1023 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1024 else
1025 gcc_unreachable ();
1026
1027 return result;
1028 }
1029
1030 static rtx
1031 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1032 {
1033 rtx set, clobber, insn;
1034
1035 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1036 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1037 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1038 return insn;
1039 }
1040
1041 /* Called after register allocation to add any instructions needed for
1042 the prologue. Using a prologue insn is favored compared to putting
1043 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1044 since it allows the scheduler to intermix instructions with the
1045 saves of the caller saved registers. In some cases, it might be
1046 necessary to emit a barrier instruction as the last insn to prevent
1047 such scheduling.
1048
1049 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1050 so that the debug info generation code can handle them properly. */
1051
1052 void
1053 xstormy16_expand_prologue (void)
1054 {
1055 struct xstormy16_stack_layout layout;
1056 int regno;
1057 rtx insn;
1058 rtx mem_push_rtx;
1059 const int ifun = xstormy16_interrupt_function_p ();
1060
1061 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1062 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1063
1064 layout = xstormy16_compute_stack_layout ();
1065
1066 if (layout.locals_size >= 32768)
1067 error ("local variable memory requirements exceed capacity");
1068
1069 if (flag_stack_usage_info)
1070 current_function_static_stack_size = layout.frame_size;
1071
1072 /* Save the argument registers if necessary. */
1073 if (layout.stdarg_save_size)
1074 for (regno = FIRST_ARGUMENT_REGISTER;
1075 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1076 regno++)
1077 {
1078 rtx dwarf;
1079 rtx reg = gen_rtx_REG (HImode, regno);
1080
1081 insn = emit_move_insn (mem_push_rtx, reg);
1082 RTX_FRAME_RELATED_P (insn) = 1;
1083
1084 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1085
1086 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1087 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1088 reg);
1089 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1090 plus_constant (Pmode,
1091 stack_pointer_rtx,
1092 GET_MODE_SIZE (Pmode)));
1093 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1094 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1095 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1096 }
1097
1098 /* Push each of the registers to save. */
1099 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1100 if (REG_NEEDS_SAVE (regno, ifun))
1101 {
1102 rtx dwarf;
1103 rtx reg = gen_rtx_REG (HImode, regno);
1104
1105 insn = emit_move_insn (mem_push_rtx, reg);
1106 RTX_FRAME_RELATED_P (insn) = 1;
1107
1108 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1109
1110 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1111 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1112 reg);
1113 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1114 plus_constant (Pmode,
1115 stack_pointer_rtx,
1116 GET_MODE_SIZE (Pmode)));
1117 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1118 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1119 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1120 }
1121
1122 /* It's just possible that the SP here might be what we need for
1123 the new FP... */
1124 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1125 {
1126 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1127 RTX_FRAME_RELATED_P (insn) = 1;
1128 }
1129
1130 /* Allocate space for local variables. */
1131 if (layout.locals_size)
1132 {
1133 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1134 GEN_INT (layout.locals_size));
1135 RTX_FRAME_RELATED_P (insn) = 1;
1136 }
1137
1138 /* Set up the frame pointer, if required. */
1139 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1140 {
1141 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1142 RTX_FRAME_RELATED_P (insn) = 1;
1143
1144 if (layout.sp_minus_fp)
1145 {
1146 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1147 hard_frame_pointer_rtx,
1148 GEN_INT (- layout.sp_minus_fp));
1149 RTX_FRAME_RELATED_P (insn) = 1;
1150 }
1151 }
1152 }
1153
1154 /* Do we need an epilogue at all? */
1155
1156 int
1157 direct_return (void)
1158 {
1159 return (reload_completed
1160 && xstormy16_compute_stack_layout ().frame_size == 0
1161 && ! xstormy16_interrupt_function_p ());
1162 }
1163
1164 /* Called after register allocation to add any instructions needed for
1165 the epilogue. Using an epilogue insn is favored compared to putting
1166 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1167 since it allows the scheduler to intermix instructions with the
1168 saves of the caller saved registers. In some cases, it might be
1169 necessary to emit a barrier instruction as the last insn to prevent
1170 such scheduling. */
1171
1172 void
1173 xstormy16_expand_epilogue (void)
1174 {
1175 struct xstormy16_stack_layout layout;
1176 rtx mem_pop_rtx;
1177 int regno;
1178 const int ifun = xstormy16_interrupt_function_p ();
1179
1180 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1181 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1182
1183 layout = xstormy16_compute_stack_layout ();
1184
1185 /* Pop the stack for the locals. */
1186 if (layout.locals_size)
1187 {
1188 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1189 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1190 else
1191 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1192 GEN_INT (- layout.locals_size));
1193 }
1194
1195 /* Restore any call-saved registers. */
1196 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1197 if (REG_NEEDS_SAVE (regno, ifun))
1198 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1199
1200 /* Pop the stack for the stdarg save area. */
1201 if (layout.stdarg_save_size)
1202 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1203 GEN_INT (- layout.stdarg_save_size));
1204
1205 /* Return. */
1206 if (ifun)
1207 emit_jump_insn (gen_return_internal_interrupt ());
1208 else
1209 emit_jump_insn (gen_return_internal ());
1210 }
1211
1212 int
1213 xstormy16_epilogue_uses (int regno)
1214 {
1215 if (reload_completed && call_used_regs[regno])
1216 {
1217 const int ifun = xstormy16_interrupt_function_p ();
1218 return REG_NEEDS_SAVE (regno, ifun);
1219 }
1220 return 0;
1221 }
1222
1223 void
1224 xstormy16_function_profiler (void)
1225 {
1226 sorry ("function_profiler support");
1227 }
1228 \f
1229 /* Update CUM to advance past an argument in the argument list. The
1230 values MODE, TYPE and NAMED describe that argument. Once this is
1231 done, the variable CUM is suitable for analyzing the *following*
1232 argument with `TARGET_FUNCTION_ARG', etc.
1233
1234 This function need not do anything if the argument in question was
1235 passed on the stack. The compiler knows how to track the amount of
1236 stack space used for arguments without any special help. However,
1237 it makes life easier for xstormy16_build_va_list if it does update
1238 the word count. */
1239
1240 static void
1241 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1242 const_tree type, bool named ATTRIBUTE_UNUSED)
1243 {
1244 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1245
1246 /* If an argument would otherwise be passed partially in registers,
1247 and partially on the stack, the whole of it is passed on the
1248 stack. */
1249 if (*cum < NUM_ARGUMENT_REGISTERS
1250 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1251 *cum = NUM_ARGUMENT_REGISTERS;
1252
1253 *cum += XSTORMY16_WORD_SIZE (type, mode);
1254 }
1255
1256 static rtx
1257 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1258 const_tree type, bool named ATTRIBUTE_UNUSED)
1259 {
1260 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1261
1262 if (mode == VOIDmode)
1263 return const0_rtx;
1264 if (targetm.calls.must_pass_in_stack (mode, type)
1265 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1266 return NULL_RTX;
1267 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1268 }
1269
1270 /* Build the va_list type.
1271
1272 For this chip, va_list is a record containing a counter and a pointer.
1273 The counter is of type 'int' and indicates how many bytes
1274 have been used to date. The pointer indicates the stack position
1275 for arguments that have not been passed in registers.
1276 To keep the layout nice, the pointer is first in the structure. */
1277
1278 static tree
1279 xstormy16_build_builtin_va_list (void)
1280 {
1281 tree f_1, f_2, record, type_decl;
1282
1283 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1284 type_decl = build_decl (BUILTINS_LOCATION,
1285 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1286
1287 f_1 = build_decl (BUILTINS_LOCATION,
1288 FIELD_DECL, get_identifier ("base"),
1289 ptr_type_node);
1290 f_2 = build_decl (BUILTINS_LOCATION,
1291 FIELD_DECL, get_identifier ("count"),
1292 unsigned_type_node);
1293
1294 DECL_FIELD_CONTEXT (f_1) = record;
1295 DECL_FIELD_CONTEXT (f_2) = record;
1296
1297 TYPE_STUB_DECL (record) = type_decl;
1298 TYPE_NAME (record) = type_decl;
1299 TYPE_FIELDS (record) = f_1;
1300 DECL_CHAIN (f_1) = f_2;
1301
1302 layout_type (record);
1303
1304 return record;
1305 }
1306
1307 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1308 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1309 variable to initialize. NEXTARG is the machine independent notion of the
1310 'next' argument after the variable arguments. */
1311
1312 static void
1313 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1314 {
1315 tree f_base, f_count;
1316 tree base, count;
1317 tree t,u;
1318
1319 if (xstormy16_interrupt_function_p ())
1320 error ("cannot use va_start in interrupt function");
1321
1322 f_base = TYPE_FIELDS (va_list_type_node);
1323 f_count = DECL_CHAIN (f_base);
1324
1325 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1326 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1327 NULL_TREE);
1328
1329 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1330 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1331 u = fold_convert (TREE_TYPE (count), u);
1332 t = fold_build_pointer_plus (t, u);
1333 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1334 TREE_SIDE_EFFECTS (t) = 1;
1335 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1336
1337 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1338 build_int_cst (NULL_TREE,
1339 crtl->args.info * UNITS_PER_WORD));
1340 TREE_SIDE_EFFECTS (t) = 1;
1341 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1342 }
1343
1344 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1345 of type va_list as a tree, TYPE is the type passed to va_arg.
1346 Note: This algorithm is documented in stormy-abi. */
1347
1348 static tree
1349 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1350 gimple_seq *post_p ATTRIBUTE_UNUSED)
1351 {
1352 tree f_base, f_count;
1353 tree base, count;
1354 tree count_tmp, addr, t;
1355 tree lab_gotaddr, lab_fromstack;
1356 int size, size_of_reg_args, must_stack;
1357 tree size_tree;
1358
1359 f_base = TYPE_FIELDS (va_list_type_node);
1360 f_count = DECL_CHAIN (f_base);
1361
1362 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1363 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1364 NULL_TREE);
1365
1366 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1367 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1368 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1369
1370 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1371
1372 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1373 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1374 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1375 addr = create_tmp_var (ptr_type_node, NULL);
1376
1377 if (!must_stack)
1378 {
1379 tree r;
1380
1381 t = fold_convert (TREE_TYPE (count), size_tree);
1382 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1383 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1384 t = build2 (GT_EXPR, boolean_type_node, t, r);
1385 t = build3 (COND_EXPR, void_type_node, t,
1386 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1387 NULL_TREE);
1388 gimplify_and_add (t, pre_p);
1389
1390 t = fold_build_pointer_plus (base, count_tmp);
1391 gimplify_assign (addr, t, pre_p);
1392
1393 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1394 gimplify_and_add (t, pre_p);
1395
1396 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1397 gimplify_and_add (t, pre_p);
1398 }
1399
1400 /* Arguments larger than a word might need to skip over some
1401 registers, since arguments are either passed entirely in
1402 registers or entirely on the stack. */
1403 size = PUSH_ROUNDING (int_size_in_bytes (type));
1404 if (size > 2 || size < 0 || must_stack)
1405 {
1406 tree r, u;
1407
1408 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1409 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1410
1411 t = fold_convert (TREE_TYPE (count), r);
1412 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1413 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1414 gimplify_and_add (t, pre_p);
1415 }
1416
1417 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1418 + INCOMING_FRAME_SP_OFFSET);
1419 t = fold_convert (TREE_TYPE (count), t);
1420 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1421 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1422 fold_convert (TREE_TYPE (count), size_tree));
1423 t = fold_convert (TREE_TYPE (t), fold (t));
1424 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1425 t = fold_build_pointer_plus (base, t);
1426 gimplify_assign (addr, t, pre_p);
1427
1428 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1429 gimplify_and_add (t, pre_p);
1430
1431 t = fold_convert (TREE_TYPE (count), size_tree);
1432 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1433 gimplify_assign (count, t, pre_p);
1434
1435 addr = fold_convert (build_pointer_type (type), addr);
1436 return build_va_arg_indirect_ref (addr);
1437 }
1438
1439 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1440
1441 static void
1442 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1443 {
1444 rtx temp = gen_reg_rtx (HImode);
1445 rtx reg_fnaddr = gen_reg_rtx (HImode);
1446 rtx reg_addr, reg_addr_mem;
1447
1448 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1449 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1450
1451 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1452 emit_move_insn (reg_addr_mem, temp);
1453 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1454 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1455
1456 emit_move_insn (temp, static_chain);
1457 emit_move_insn (reg_addr_mem, temp);
1458 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1459 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1460
1461 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1462 emit_move_insn (temp, reg_fnaddr);
1463 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1464 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1465 emit_move_insn (reg_addr_mem, temp);
1466 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1467 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1468
1469 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1470 emit_move_insn (reg_addr_mem, reg_fnaddr);
1471 }
1472
1473 /* Worker function for TARGET_FUNCTION_VALUE. */
1474
1475 static rtx
1476 xstormy16_function_value (const_tree valtype,
1477 const_tree func ATTRIBUTE_UNUSED,
1478 bool outgoing ATTRIBUTE_UNUSED)
1479 {
1480 machine_mode mode;
1481 mode = TYPE_MODE (valtype);
1482 PROMOTE_MODE (mode, 0, valtype);
1483 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1484 }
1485
1486 /* Worker function for TARGET_LIBCALL_VALUE. */
1487
1488 static rtx
1489 xstormy16_libcall_value (machine_mode mode,
1490 const_rtx fun ATTRIBUTE_UNUSED)
1491 {
1492 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1493 }
1494
1495 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1496
1497 static bool
1498 xstormy16_function_value_regno_p (const unsigned int regno)
1499 {
1500 return (regno == RETURN_VALUE_REGNUM);
1501 }
1502
1503 /* A C compound statement that outputs the assembler code for a thunk function,
1504 used to implement C++ virtual function calls with multiple inheritance. The
1505 thunk acts as a wrapper around a virtual function, adjusting the implicit
1506 object parameter before handing control off to the real function.
1507
1508 First, emit code to add the integer DELTA to the location that contains the
1509 incoming first argument. Assume that this argument contains a pointer, and
1510 is the one used to pass the `this' pointer in C++. This is the incoming
1511 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1512 addition must preserve the values of all other incoming arguments.
1513
1514 After the addition, emit code to jump to FUNCTION, which is a
1515 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1516 the return address. Hence returning from FUNCTION will return to whoever
1517 called the current `thunk'.
1518
1519 The effect must be as if @var{function} had been called directly
1520 with the adjusted first argument. This macro is responsible for
1521 emitting all of the code for a thunk function;
1522 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1523 not invoked.
1524
1525 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1526 extracted from it.) It might possibly be useful on some targets, but
1527 probably not. */
1528
1529 static void
1530 xstormy16_asm_output_mi_thunk (FILE *file,
1531 tree thunk_fndecl ATTRIBUTE_UNUSED,
1532 HOST_WIDE_INT delta,
1533 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1534 tree function)
1535 {
1536 int regnum = FIRST_ARGUMENT_REGISTER;
1537
1538 /* There might be a hidden first argument for a returned structure. */
1539 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1540 regnum += 1;
1541
1542 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1543 fputs ("\tjmpf ", file);
1544 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1545 putc ('\n', file);
1546 }
1547
1548 /* The purpose of this function is to override the default behavior of
1549 BSS objects. Normally, they go into .bss or .sbss via ".common"
1550 directives, but we need to override that and put them in
1551 .bss_below100. We can't just use a section override (like we do
1552 for .data_below100), because that makes them initialized rather
1553 than uninitialized. */
1554
1555 void
1556 xstormy16_asm_output_aligned_common (FILE *stream,
1557 tree decl,
1558 const char *name,
1559 int size,
1560 int align,
1561 int global)
1562 {
1563 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1564 rtx symbol;
1565
1566 if (mem != NULL_RTX
1567 && MEM_P (mem)
1568 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1569 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1570 {
1571 const char *name2;
1572 int p2align = 0;
1573
1574 switch_to_section (bss100_section);
1575
1576 while (align > 8)
1577 {
1578 align /= 2;
1579 p2align ++;
1580 }
1581
1582 name2 = default_strip_name_encoding (name);
1583 if (global)
1584 fprintf (stream, "\t.globl\t%s\n", name2);
1585 if (p2align)
1586 fprintf (stream, "\t.p2align %d\n", p2align);
1587 fprintf (stream, "\t.type\t%s, @object\n", name2);
1588 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1589 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1590 return;
1591 }
1592
1593 if (!global)
1594 {
1595 fprintf (stream, "\t.local\t");
1596 assemble_name (stream, name);
1597 fprintf (stream, "\n");
1598 }
1599 fprintf (stream, "\t.comm\t");
1600 assemble_name (stream, name);
1601 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1602 }
1603
1604 /* Implement TARGET_ASM_INIT_SECTIONS. */
1605
1606 static void
1607 xstormy16_asm_init_sections (void)
1608 {
1609 bss100_section
1610 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1611 output_section_asm_op,
1612 "\t.section \".bss_below100\",\"aw\",@nobits");
1613 }
1614
1615 /* Mark symbols with the "below100" attribute so that we can use the
1616 special addressing modes for them. */
1617
1618 static void
1619 xstormy16_encode_section_info (tree decl, rtx r, int first)
1620 {
1621 default_encode_section_info (decl, r, first);
1622
1623 if (TREE_CODE (decl) == VAR_DECL
1624 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1625 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1626 {
1627 rtx symbol = XEXP (r, 0);
1628
1629 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1630 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1631 }
1632 }
1633
1634 #undef TARGET_ASM_CONSTRUCTOR
1635 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1636 #undef TARGET_ASM_DESTRUCTOR
1637 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1638
1639 /* Output constructors and destructors. Just like
1640 default_named_section_asm_out_* but don't set the sections writable. */
1641
1642 static void
1643 xstormy16_asm_out_destructor (rtx symbol, int priority)
1644 {
1645 const char *section = ".dtors";
1646 char buf[16];
1647
1648 /* ??? This only works reliably with the GNU linker. */
1649 if (priority != DEFAULT_INIT_PRIORITY)
1650 {
1651 sprintf (buf, ".dtors.%.5u",
1652 /* Invert the numbering so the linker puts us in the proper
1653 order; constructors are run from right to left, and the
1654 linker sorts in increasing order. */
1655 MAX_INIT_PRIORITY - priority);
1656 section = buf;
1657 }
1658
1659 switch_to_section (get_section (section, 0, NULL));
1660 assemble_align (POINTER_SIZE);
1661 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1662 }
1663
1664 static void
1665 xstormy16_asm_out_constructor (rtx symbol, int priority)
1666 {
1667 const char *section = ".ctors";
1668 char buf[16];
1669
1670 /* ??? This only works reliably with the GNU linker. */
1671 if (priority != DEFAULT_INIT_PRIORITY)
1672 {
1673 sprintf (buf, ".ctors.%.5u",
1674 /* Invert the numbering so the linker puts us in the proper
1675 order; constructors are run from right to left, and the
1676 linker sorts in increasing order. */
1677 MAX_INIT_PRIORITY - priority);
1678 section = buf;
1679 }
1680
1681 switch_to_section (get_section (section, 0, NULL));
1682 assemble_align (POINTER_SIZE);
1683 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1684 }
1685 \f
1686 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1687
1688 Print a memory address as an operand to reference that memory location. */
1689
1690 static void
1691 xstormy16_print_operand_address (FILE *file, rtx address)
1692 {
1693 HOST_WIDE_INT offset;
1694 int pre_dec, post_inc;
1695
1696 /* There are a few easy cases. */
1697 if (CONST_INT_P (address))
1698 {
1699 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1700 return;
1701 }
1702
1703 if (CONSTANT_P (address) || LABEL_P (address))
1704 {
1705 output_addr_const (file, address);
1706 return;
1707 }
1708
1709 /* Otherwise, it's hopefully something of the form
1710 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1711 if (GET_CODE (address) == PLUS)
1712 {
1713 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1714 offset = INTVAL (XEXP (address, 1));
1715 address = XEXP (address, 0);
1716 }
1717 else
1718 offset = 0;
1719
1720 pre_dec = (GET_CODE (address) == PRE_DEC);
1721 post_inc = (GET_CODE (address) == POST_INC);
1722 if (pre_dec || post_inc)
1723 address = XEXP (address, 0);
1724
1725 gcc_assert (REG_P (address));
1726
1727 fputc ('(', file);
1728 if (pre_dec)
1729 fputs ("--", file);
1730 fputs (reg_names [REGNO (address)], file);
1731 if (post_inc)
1732 fputs ("++", file);
1733 if (offset != 0)
1734 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1735 fputc (')', file);
1736 }
1737
1738 /* Worker function for TARGET_PRINT_OPERAND.
1739
1740 Print an operand to an assembler instruction. */
1741
1742 static void
1743 xstormy16_print_operand (FILE *file, rtx x, int code)
1744 {
1745 switch (code)
1746 {
1747 case 'B':
1748 /* There is either one bit set, or one bit clear, in X.
1749 Print it preceded by '#'. */
1750 {
1751 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1752 HOST_WIDE_INT xx = 1;
1753 HOST_WIDE_INT l;
1754
1755 if (CONST_INT_P (x))
1756 xx = INTVAL (x);
1757 else
1758 output_operand_lossage ("'B' operand is not constant");
1759
1760 /* GCC sign-extends masks with the MSB set, so we have to
1761 detect all the cases that differ only in sign extension
1762 beyond the bits we care about. Normally, the predicates
1763 and constraints ensure that we have the right values. This
1764 works correctly for valid masks. */
1765 if (bits_set[xx & 7] <= 1)
1766 {
1767 /* Remove sign extension bits. */
1768 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1769 xx &= 0xff;
1770 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1771 xx &= 0xffff;
1772 l = exact_log2 (xx);
1773 }
1774 else
1775 {
1776 /* Add sign extension bits. */
1777 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1778 xx |= ~(HOST_WIDE_INT)0xff;
1779 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1780 xx |= ~(HOST_WIDE_INT)0xffff;
1781 l = exact_log2 (~xx);
1782 }
1783
1784 if (l == -1)
1785 output_operand_lossage ("'B' operand has multiple bits set");
1786
1787 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1788 return;
1789 }
1790
1791 case 'C':
1792 /* Print the symbol without a surrounding @fptr(). */
1793 if (GET_CODE (x) == SYMBOL_REF)
1794 assemble_name (file, XSTR (x, 0));
1795 else if (LABEL_P (x))
1796 output_asm_label (x);
1797 else
1798 xstormy16_print_operand_address (file, x);
1799 return;
1800
1801 case 'o':
1802 case 'O':
1803 /* Print the immediate operand less one, preceded by '#'.
1804 For 'O', negate it first. */
1805 {
1806 HOST_WIDE_INT xx = 0;
1807
1808 if (CONST_INT_P (x))
1809 xx = INTVAL (x);
1810 else
1811 output_operand_lossage ("'o' operand is not constant");
1812
1813 if (code == 'O')
1814 xx = -xx;
1815
1816 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1817 return;
1818 }
1819
1820 case 'b':
1821 /* Print the shift mask for bp/bn. */
1822 {
1823 HOST_WIDE_INT xx = 1;
1824 HOST_WIDE_INT l;
1825
1826 if (CONST_INT_P (x))
1827 xx = INTVAL (x);
1828 else
1829 output_operand_lossage ("'B' operand is not constant");
1830
1831 l = 7 - xx;
1832
1833 fputs (IMMEDIATE_PREFIX, file);
1834 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1835 return;
1836 }
1837
1838 case 0:
1839 /* Handled below. */
1840 break;
1841
1842 default:
1843 output_operand_lossage ("xstormy16_print_operand: unknown code");
1844 return;
1845 }
1846
1847 switch (GET_CODE (x))
1848 {
1849 case REG:
1850 fputs (reg_names [REGNO (x)], file);
1851 break;
1852
1853 case MEM:
1854 xstormy16_print_operand_address (file, XEXP (x, 0));
1855 break;
1856
1857 default:
1858 /* Some kind of constant or label; an immediate operand,
1859 so prefix it with '#' for the assembler. */
1860 fputs (IMMEDIATE_PREFIX, file);
1861 output_addr_const (file, x);
1862 break;
1863 }
1864
1865 return;
1866 }
1867 \f
1868 /* Expander for the `casesi' pattern.
1869 INDEX is the index of the switch statement.
1870 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1871 to the first table entry.
1872 RANGE is the number of table entries.
1873 TABLE is an ADDR_VEC that is the jump table.
1874 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1875 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1876
1877 void
1878 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1879 rtx table, rtx default_label)
1880 {
1881 HOST_WIDE_INT range_i = INTVAL (range);
1882 rtx int_index;
1883
1884 /* This code uses 'br', so it can deal only with tables of size up to
1885 8192 entries. */
1886 if (range_i >= 8192)
1887 sorry ("switch statement of size %lu entries too large",
1888 (unsigned long) range_i);
1889
1890 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1891 OPTAB_LIB_WIDEN);
1892 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1893 default_label);
1894 int_index = gen_lowpart_common (HImode, index);
1895 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1896 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1897 }
1898
1899 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1900 instructions, without label or alignment or any other special
1901 constructs. We know that the previous instruction will be the
1902 `tablejump_pcrel' output above.
1903
1904 TODO: it might be nice to output 'br' instructions if they could
1905 all reach. */
1906
1907 void
1908 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1909 {
1910 int vlen, idx;
1911
1912 switch_to_section (current_function_section ());
1913
1914 vlen = XVECLEN (table, 0);
1915 for (idx = 0; idx < vlen; idx++)
1916 {
1917 fputs ("\tjmpf ", file);
1918 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1919 fputc ('\n', file);
1920 }
1921 }
1922 \f
1923 /* Expander for the `call' patterns.
1924 RETVAL is the RTL for the return register or NULL for void functions.
1925 DEST is the function to call, expressed as a MEM.
1926 COUNTER is ignored. */
1927
1928 void
1929 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1930 {
1931 rtx call, temp;
1932 machine_mode mode;
1933
1934 gcc_assert (MEM_P (dest));
1935 dest = XEXP (dest, 0);
1936
1937 if (! CONSTANT_P (dest) && ! REG_P (dest))
1938 dest = force_reg (Pmode, dest);
1939
1940 if (retval == NULL)
1941 mode = VOIDmode;
1942 else
1943 mode = GET_MODE (retval);
1944
1945 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1946 counter);
1947 if (retval)
1948 call = gen_rtx_SET (VOIDmode, retval, call);
1949
1950 if (! CONSTANT_P (dest))
1951 {
1952 temp = gen_reg_rtx (HImode);
1953 emit_move_insn (temp, const0_rtx);
1954 }
1955 else
1956 temp = const0_rtx;
1957
1958 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1959 gen_rtx_USE (VOIDmode, temp)));
1960 emit_call_insn (call);
1961 }
1962 \f
1963 /* Expanders for multiword computational operations. */
1964
1965 /* Expander for arithmetic operations; emit insns to compute
1966
1967 (set DEST (CODE:MODE SRC0 SRC1))
1968
1969 When CODE is COMPARE, a branch template is generated
1970 (this saves duplicating code in xstormy16_split_cbranch). */
1971
1972 void
1973 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1974 rtx dest, rtx src0, rtx src1)
1975 {
1976 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1977 int i;
1978 int firstloop = 1;
1979
1980 if (code == NEG)
1981 emit_move_insn (src0, const0_rtx);
1982
1983 for (i = 0; i < num_words; i++)
1984 {
1985 rtx w_src0, w_src1, w_dest;
1986 rtx insn;
1987
1988 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1989 i * UNITS_PER_WORD);
1990 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1991 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1992
1993 switch (code)
1994 {
1995 case PLUS:
1996 if (firstloop
1997 && CONST_INT_P (w_src1)
1998 && INTVAL (w_src1) == 0)
1999 continue;
2000
2001 if (firstloop)
2002 insn = gen_addchi4 (w_dest, w_src0, w_src1);
2003 else
2004 insn = gen_addchi5 (w_dest, w_src0, w_src1);
2005 break;
2006
2007 case NEG:
2008 case MINUS:
2009 case COMPARE:
2010 if (code == COMPARE && i == num_words - 1)
2011 {
2012 rtx branch, sub, clobber, sub_1;
2013
2014 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2015 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2016 sub = gen_rtx_SET (VOIDmode, w_dest,
2017 gen_rtx_MINUS (HImode, sub_1, w_src1));
2018 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2019 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2020 gen_rtx_IF_THEN_ELSE (VOIDmode,
2021 gen_rtx_EQ (HImode,
2022 sub_1,
2023 w_src1),
2024 pc_rtx,
2025 pc_rtx));
2026 insn = gen_rtx_PARALLEL (VOIDmode,
2027 gen_rtvec (3, branch, sub, clobber));
2028 }
2029 else if (firstloop
2030 && code != COMPARE
2031 && CONST_INT_P (w_src1)
2032 && INTVAL (w_src1) == 0)
2033 continue;
2034 else if (firstloop)
2035 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2036 else
2037 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2038 break;
2039
2040 case IOR:
2041 case XOR:
2042 case AND:
2043 if (CONST_INT_P (w_src1)
2044 && INTVAL (w_src1) == -(code == AND))
2045 continue;
2046
2047 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2048 w_src0, w_src1));
2049 break;
2050
2051 case NOT:
2052 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2053 break;
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058
2059 firstloop = 0;
2060 emit (insn);
2061 }
2062
2063 /* If we emit nothing, try_split() will think we failed. So emit
2064 something that does nothing and can be optimized away. */
2065 if (firstloop)
2066 emit (gen_nop ());
2067 }
2068
2069 /* The shift operations are split at output time for constant values;
2070 variable-width shifts get handed off to a library routine.
2071
2072 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2073 SIZE_R will be a CONST_INT, X will be a hard register. */
2074
2075 const char *
2076 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2077 rtx x, rtx size_r, rtx temp)
2078 {
2079 HOST_WIDE_INT size;
2080 const char *r0, *r1, *rt;
2081 static char r[64];
2082
2083 gcc_assert (CONST_INT_P (size_r)
2084 && REG_P (x)
2085 && mode == SImode);
2086
2087 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2088
2089 if (size == 0)
2090 return "";
2091
2092 r0 = reg_names [REGNO (x)];
2093 r1 = reg_names [REGNO (x) + 1];
2094
2095 /* For shifts of size 1, we can use the rotate instructions. */
2096 if (size == 1)
2097 {
2098 switch (code)
2099 {
2100 case ASHIFT:
2101 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2102 break;
2103 case ASHIFTRT:
2104 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2105 break;
2106 case LSHIFTRT:
2107 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2108 break;
2109 default:
2110 gcc_unreachable ();
2111 }
2112 return r;
2113 }
2114
2115 /* For large shifts, there are easy special cases. */
2116 if (size == 16)
2117 {
2118 switch (code)
2119 {
2120 case ASHIFT:
2121 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2122 break;
2123 case ASHIFTRT:
2124 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2125 break;
2126 case LSHIFTRT:
2127 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2128 break;
2129 default:
2130 gcc_unreachable ();
2131 }
2132 return r;
2133 }
2134 if (size > 16)
2135 {
2136 switch (code)
2137 {
2138 case ASHIFT:
2139 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2140 r1, r0, r0, r1, (int) size - 16);
2141 break;
2142 case ASHIFTRT:
2143 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2144 r0, r1, r1, r0, (int) size - 16);
2145 break;
2146 case LSHIFTRT:
2147 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2148 r0, r1, r1, r0, (int) size - 16);
2149 break;
2150 default:
2151 gcc_unreachable ();
2152 }
2153 return r;
2154 }
2155
2156 /* For the rest, we have to do more work. In particular, we
2157 need a temporary. */
2158 rt = reg_names [REGNO (temp)];
2159 switch (code)
2160 {
2161 case ASHIFT:
2162 sprintf (r,
2163 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2164 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2165 r1, rt);
2166 break;
2167 case ASHIFTRT:
2168 sprintf (r,
2169 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2170 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2171 r0, rt);
2172 break;
2173 case LSHIFTRT:
2174 sprintf (r,
2175 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2176 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2177 r0, rt);
2178 break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182 return r;
2183 }
2184 \f
2185 /* Attribute handling. */
2186
2187 /* Return nonzero if the function is an interrupt function. */
2188
2189 int
2190 xstormy16_interrupt_function_p (void)
2191 {
2192 tree attributes;
2193
2194 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2195 any functions are declared, which is demonstrably wrong, but
2196 it is worked around here. FIXME. */
2197 if (!cfun)
2198 return 0;
2199
2200 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2201 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2202 }
2203
2204 #undef TARGET_ATTRIBUTE_TABLE
2205 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2206
2207 static tree xstormy16_handle_interrupt_attribute
2208 (tree *, tree, tree, int, bool *);
2209 static tree xstormy16_handle_below100_attribute
2210 (tree *, tree, tree, int, bool *);
2211
2212 static const struct attribute_spec xstormy16_attribute_table[] =
2213 {
2214 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2215 affects_type_identity. */
2216 { "interrupt", 0, 0, false, true, true,
2217 xstormy16_handle_interrupt_attribute , false },
2218 { "BELOW100", 0, 0, false, false, false,
2219 xstormy16_handle_below100_attribute, false },
2220 { "below100", 0, 0, false, false, false,
2221 xstormy16_handle_below100_attribute, false },
2222 { NULL, 0, 0, false, false, false, NULL, false }
2223 };
2224
2225 /* Handle an "interrupt" attribute;
2226 arguments as in struct attribute_spec.handler. */
2227
2228 static tree
2229 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2230 tree args ATTRIBUTE_UNUSED,
2231 int flags ATTRIBUTE_UNUSED,
2232 bool *no_add_attrs)
2233 {
2234 if (TREE_CODE (*node) != FUNCTION_TYPE)
2235 {
2236 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2237 name);
2238 *no_add_attrs = true;
2239 }
2240
2241 return NULL_TREE;
2242 }
2243
2244 /* Handle an "below" attribute;
2245 arguments as in struct attribute_spec.handler. */
2246
2247 static tree
2248 xstormy16_handle_below100_attribute (tree *node,
2249 tree name ATTRIBUTE_UNUSED,
2250 tree args ATTRIBUTE_UNUSED,
2251 int flags ATTRIBUTE_UNUSED,
2252 bool *no_add_attrs)
2253 {
2254 if (TREE_CODE (*node) != VAR_DECL
2255 && TREE_CODE (*node) != POINTER_TYPE
2256 && TREE_CODE (*node) != TYPE_DECL)
2257 {
2258 warning (OPT_Wattributes,
2259 "%<__BELOW100__%> attribute only applies to variables");
2260 *no_add_attrs = true;
2261 }
2262 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2263 {
2264 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2265 {
2266 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2267 "with auto storage class");
2268 *no_add_attrs = true;
2269 }
2270 }
2271
2272 return NULL_TREE;
2273 }
2274 \f
2275 #undef TARGET_INIT_BUILTINS
2276 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2277 #undef TARGET_EXPAND_BUILTIN
2278 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2279
2280 static struct
2281 {
2282 const char * name;
2283 int md_code;
2284 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2285 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2286 }
2287 s16builtins[] =
2288 {
2289 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2290 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2291 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2292 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2293 { NULL, 0, NULL, NULL }
2294 };
2295
2296 static void
2297 xstormy16_init_builtins (void)
2298 {
2299 tree args[2], ret_type, arg = NULL_TREE, ftype;
2300 int i, a, n_args;
2301
2302 ret_type = void_type_node;
2303
2304 for (i = 0; s16builtins[i].name; i++)
2305 {
2306 n_args = strlen (s16builtins[i].arg_types) - 1;
2307
2308 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2309
2310 for (a = n_args - 1; a >= 0; a--)
2311 args[a] = NULL_TREE;
2312
2313 for (a = n_args; a >= 0; a--)
2314 {
2315 switch (s16builtins[i].arg_types[a])
2316 {
2317 case 's': arg = short_integer_type_node; break;
2318 case 'S': arg = short_unsigned_type_node; break;
2319 case 'l': arg = long_integer_type_node; break;
2320 case 'L': arg = long_unsigned_type_node; break;
2321 default: gcc_unreachable ();
2322 }
2323 if (a == 0)
2324 ret_type = arg;
2325 else
2326 args[a-1] = arg;
2327 }
2328 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2329 add_builtin_function (s16builtins[i].name, ftype,
2330 i, BUILT_IN_MD, NULL, NULL_TREE);
2331 }
2332 }
2333
2334 static rtx
2335 xstormy16_expand_builtin (tree exp, rtx target,
2336 rtx subtarget ATTRIBUTE_UNUSED,
2337 machine_mode mode ATTRIBUTE_UNUSED,
2338 int ignore ATTRIBUTE_UNUSED)
2339 {
2340 rtx op[10], args[10], pat, copyto[10], retval = 0;
2341 tree fndecl, argtree;
2342 int i, a, o, code;
2343
2344 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2345 argtree = TREE_OPERAND (exp, 1);
2346 i = DECL_FUNCTION_CODE (fndecl);
2347 code = s16builtins[i].md_code;
2348
2349 for (a = 0; a < 10 && argtree; a++)
2350 {
2351 args[a] = expand_normal (TREE_VALUE (argtree));
2352 argtree = TREE_CHAIN (argtree);
2353 }
2354
2355 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2356 {
2357 char ao = s16builtins[i].arg_ops[o];
2358 char c = insn_data[code].operand[o].constraint[0];
2359 machine_mode omode;
2360
2361 copyto[o] = 0;
2362
2363 omode = (machine_mode) insn_data[code].operand[o].mode;
2364 if (ao == 'r')
2365 op[o] = target ? target : gen_reg_rtx (omode);
2366 else if (ao == 't')
2367 op[o] = gen_reg_rtx (omode);
2368 else
2369 op[o] = args[(int) hex_value (ao)];
2370
2371 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2372 {
2373 if (c == '+' || c == '=')
2374 {
2375 copyto[o] = op[o];
2376 op[o] = gen_reg_rtx (omode);
2377 }
2378 else
2379 op[o] = copy_to_mode_reg (omode, op[o]);
2380 }
2381
2382 if (ao == 'r')
2383 retval = op[o];
2384 }
2385
2386 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2387 op[5], op[6], op[7], op[8], op[9]);
2388 emit_insn (pat);
2389
2390 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2391 if (copyto[o])
2392 {
2393 emit_move_insn (copyto[o], op[o]);
2394 if (op[o] == retval)
2395 retval = copyto[o];
2396 }
2397
2398 return retval;
2399 }
2400 \f
2401 /* Look for combinations of insns that can be converted to BN or BP
2402 opcodes. This is, unfortunately, too complex to do with MD
2403 patterns. */
2404
2405 static void
2406 combine_bnp (rtx_insn *insn)
2407 {
2408 int insn_code, regno, need_extend;
2409 unsigned int mask;
2410 rtx cond, reg, qireg, mem;
2411 rtx_insn *and_insn, *load;
2412 machine_mode load_mode = QImode;
2413 machine_mode and_mode = QImode;
2414 rtx_insn *shift = NULL;
2415
2416 insn_code = recog_memoized (insn);
2417 if (insn_code != CODE_FOR_cbranchhi
2418 && insn_code != CODE_FOR_cbranchhi_neg)
2419 return;
2420
2421 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2422 cond = XEXP (cond, 1); /* if */
2423 cond = XEXP (cond, 0); /* cond */
2424 switch (GET_CODE (cond))
2425 {
2426 case NE:
2427 case EQ:
2428 need_extend = 0;
2429 break;
2430 case LT:
2431 case GE:
2432 need_extend = 1;
2433 break;
2434 default:
2435 return;
2436 }
2437
2438 reg = XEXP (cond, 0);
2439 if (! REG_P (reg))
2440 return;
2441 regno = REGNO (reg);
2442 if (XEXP (cond, 1) != const0_rtx)
2443 return;
2444 if (! find_regno_note (insn, REG_DEAD, regno))
2445 return;
2446 qireg = gen_rtx_REG (QImode, regno);
2447
2448 if (need_extend)
2449 {
2450 /* LT and GE conditionals should have a sign extend before
2451 them. */
2452 for (and_insn = prev_real_insn (insn);
2453 and_insn != NULL_RTX;
2454 and_insn = prev_real_insn (and_insn))
2455 {
2456 int and_code = recog_memoized (and_insn);
2457
2458 if (and_code == CODE_FOR_extendqihi2
2459 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2460 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2461 break;
2462
2463 if (and_code == CODE_FOR_movhi_internal
2464 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2465 {
2466 /* This is for testing bit 15. */
2467 and_insn = insn;
2468 break;
2469 }
2470
2471 if (reg_mentioned_p (reg, and_insn))
2472 return;
2473
2474 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2475 return;
2476 }
2477 }
2478 else
2479 {
2480 /* EQ and NE conditionals have an AND before them. */
2481 for (and_insn = prev_real_insn (insn);
2482 and_insn != NULL_RTX;
2483 and_insn = prev_real_insn (and_insn))
2484 {
2485 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2486 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2487 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2488 break;
2489
2490 if (reg_mentioned_p (reg, and_insn))
2491 return;
2492
2493 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2494 return;
2495 }
2496
2497 if (and_insn)
2498 {
2499 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2500 followed by an AND like this:
2501
2502 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2503 (clobber (reg:BI carry))]
2504
2505 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2506
2507 Attempt to detect this here. */
2508 for (shift = prev_real_insn (and_insn); shift;
2509 shift = prev_real_insn (shift))
2510 {
2511 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2512 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2513 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2514 break;
2515
2516 if (reg_mentioned_p (reg, shift)
2517 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2518 {
2519 shift = NULL;
2520 break;
2521 }
2522 }
2523 }
2524 }
2525
2526 if (and_insn == NULL_RTX)
2527 return;
2528
2529 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2530 load;
2531 load = prev_real_insn (load))
2532 {
2533 int load_code = recog_memoized (load);
2534
2535 if (load_code == CODE_FOR_movhi_internal
2536 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2537 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2538 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2539 {
2540 load_mode = HImode;
2541 break;
2542 }
2543
2544 if (load_code == CODE_FOR_movqi_internal
2545 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2546 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2547 {
2548 load_mode = QImode;
2549 break;
2550 }
2551
2552 if (load_code == CODE_FOR_zero_extendqihi2
2553 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2554 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2555 {
2556 load_mode = QImode;
2557 and_mode = HImode;
2558 break;
2559 }
2560
2561 if (reg_mentioned_p (reg, load))
2562 return;
2563
2564 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2565 return;
2566 }
2567 if (!load)
2568 return;
2569
2570 mem = SET_SRC (PATTERN (load));
2571
2572 if (need_extend)
2573 {
2574 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2575
2576 /* If the mem includes a zero-extend operation and we are
2577 going to generate a sign-extend operation then move the
2578 mem inside the zero-extend. */
2579 if (GET_CODE (mem) == ZERO_EXTEND)
2580 mem = XEXP (mem, 0);
2581 }
2582 else
2583 {
2584 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2585 load_mode))
2586 return;
2587
2588 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2589
2590 if (shift)
2591 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2592 }
2593
2594 if (load_mode == HImode)
2595 {
2596 rtx addr = XEXP (mem, 0);
2597
2598 if (! (mask & 0xff))
2599 {
2600 addr = plus_constant (Pmode, addr, 1);
2601 mask >>= 8;
2602 }
2603 mem = gen_rtx_MEM (QImode, addr);
2604 }
2605
2606 if (need_extend)
2607 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2608 else
2609 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2610
2611 INSN_CODE (insn) = -1;
2612 delete_insn (load);
2613
2614 if (and_insn != insn)
2615 delete_insn (and_insn);
2616
2617 if (shift != NULL_RTX)
2618 delete_insn (shift);
2619 }
2620
2621 static void
2622 xstormy16_reorg (void)
2623 {
2624 rtx_insn *insn;
2625
2626 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2627 {
2628 if (! JUMP_P (insn))
2629 continue;
2630 combine_bnp (insn);
2631 }
2632 }
2633 \f
2634 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2635
2636 static bool
2637 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2638 {
2639 const HOST_WIDE_INT size = int_size_in_bytes (type);
2640 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2641 }
2642 \f
2643 #undef TARGET_ASM_ALIGNED_HI_OP
2644 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2645 #undef TARGET_ASM_ALIGNED_SI_OP
2646 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2647 #undef TARGET_ENCODE_SECTION_INFO
2648 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2649
2650 /* Select_section doesn't handle .bss_below100. */
2651 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2652 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2653
2654 #undef TARGET_ASM_OUTPUT_MI_THUNK
2655 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2656 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2657 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2658
2659 #undef TARGET_PRINT_OPERAND
2660 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2661 #undef TARGET_PRINT_OPERAND_ADDRESS
2662 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2663
2664 #undef TARGET_MEMORY_MOVE_COST
2665 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2666 #undef TARGET_RTX_COSTS
2667 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2668 #undef TARGET_ADDRESS_COST
2669 #define TARGET_ADDRESS_COST xstormy16_address_cost
2670
2671 #undef TARGET_BUILD_BUILTIN_VA_LIST
2672 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2673 #undef TARGET_EXPAND_BUILTIN_VA_START
2674 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2675 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2676 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2677
2678 #undef TARGET_PROMOTE_FUNCTION_MODE
2679 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2680 #undef TARGET_PROMOTE_PROTOTYPES
2681 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2682
2683 #undef TARGET_FUNCTION_ARG
2684 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2685 #undef TARGET_FUNCTION_ARG_ADVANCE
2686 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2687
2688 #undef TARGET_RETURN_IN_MEMORY
2689 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2690 #undef TARGET_FUNCTION_VALUE
2691 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2692 #undef TARGET_LIBCALL_VALUE
2693 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2694 #undef TARGET_FUNCTION_VALUE_REGNO_P
2695 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2696
2697 #undef TARGET_MACHINE_DEPENDENT_REORG
2698 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2699
2700 #undef TARGET_PREFERRED_RELOAD_CLASS
2701 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2702 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2703 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2704
2705 #undef TARGET_LEGITIMATE_ADDRESS_P
2706 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2707 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2708 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2709
2710 #undef TARGET_CAN_ELIMINATE
2711 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2712
2713 #undef TARGET_TRAMPOLINE_INIT
2714 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2715
2716 struct gcc_target targetm = TARGET_INITIALIZER;
2717
2718 #include "gt-stormy16.h"