]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/stormy16/stormy16.c
* config/alpha/alpha.c (alpha_preferred_reload_class,
[thirdparty/gcc.git] / gcc / config / stormy16 / stormy16.c
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "toplev.h"
38 #include "obstack.h"
39 #include "tree.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "except.h"
43 #include "function.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "tm_p.h"
47 #include "langhooks.h"
48 #include "gimple.h"
49 #include "df.h"
50 #include "ggc.h"
51
52 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
53 static void xstormy16_asm_out_constructor (rtx, int);
54 static void xstormy16_asm_out_destructor (rtx, int);
55 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 HOST_WIDE_INT, tree);
57
58 static void xstormy16_init_builtins (void);
59 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
60 static bool xstormy16_rtx_costs (rtx, int, int, int *);
61 static int xstormy16_address_cost (rtx);
62 static bool xstormy16_return_in_memory (const_tree, const_tree);
63
64 /* Define the information needed to generate branch and scc insns. This is
65 stored from the compare operation. */
66 struct rtx_def * xstormy16_compare_op0;
67 struct rtx_def * xstormy16_compare_op1;
68
69 static GTY(()) section *bss100_section;
70
71 /* Compute a (partial) cost for rtx X. Return true if the complete
72 cost has been computed, and false if subexpressions should be
73 scanned. In either case, *TOTAL contains the cost result. */
74
75 static bool
76 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
77 int *total)
78 {
79 switch (code)
80 {
81 case CONST_INT:
82 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
83 *total = COSTS_N_INSNS (1) / 2;
84 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
85 *total = COSTS_N_INSNS (1);
86 else
87 *total = COSTS_N_INSNS (2);
88 return true;
89
90 case CONST_DOUBLE:
91 case CONST:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 *total = COSTS_N_INSNS(2);
95 return true;
96
97 case MULT:
98 *total = COSTS_N_INSNS (35 + 6);
99 return true;
100 case DIV:
101 *total = COSTS_N_INSNS (51 - 6);
102 return true;
103
104 default:
105 return false;
106 }
107 }
108
109 static int
110 xstormy16_address_cost (rtx x)
111 {
112 return (GET_CODE (x) == CONST_INT ? 2
113 : GET_CODE (x) == PLUS ? 7
114 : 5);
115 }
116
117 /* Branches are handled as follows:
118
119 1. HImode compare-and-branches. The machine supports these
120 natively, so the appropriate pattern is emitted directly.
121
122 2. SImode EQ and NE. These are emitted as pairs of HImode
123 compare-and-branches.
124
125 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
126 of a SImode subtract followed by a branch (not a compare-and-branch),
127 like this:
128 sub
129 sbc
130 blt
131
132 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
133 sub
134 sbc
135 blt
136 or
137 bne
138 */
139
140 /* Emit a branch of kind CODE to location LOC. */
141
142 void
143 xstormy16_emit_cbranch (enum rtx_code code, rtx loc)
144 {
145 rtx op0 = xstormy16_compare_op0;
146 rtx op1 = xstormy16_compare_op1;
147 rtx condition_rtx, loc_ref, branch, cy_clobber;
148 rtvec vec;
149 enum machine_mode mode;
150
151 mode = GET_MODE (op0);
152 gcc_assert (mode == HImode || mode == SImode);
153
154 if (mode == SImode
155 && (code == GT || code == LE || code == GTU || code == LEU))
156 {
157 int unsigned_p = (code == GTU || code == LEU);
158 int gt_p = (code == GT || code == GTU);
159 rtx lab = NULL_RTX;
160
161 if (gt_p)
162 lab = gen_label_rtx ();
163 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc);
164 /* This should be generated as a comparison against the temporary
165 created by the previous insn, but reload can't handle that. */
166 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc);
167 if (gt_p)
168 emit_label (lab);
169 return;
170 }
171 else if (mode == SImode
172 && (code == NE || code == EQ)
173 && op1 != const0_rtx)
174 {
175 rtx lab = NULL_RTX;
176 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
177 int i;
178
179 if (code == EQ)
180 lab = gen_label_rtx ();
181
182 for (i = 0; i < num_words - 1; i++)
183 {
184 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
185 i * UNITS_PER_WORD);
186 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
187 i * UNITS_PER_WORD);
188 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc);
189 }
190 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode,
191 i * UNITS_PER_WORD);
192 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode,
193 i * UNITS_PER_WORD);
194 xstormy16_emit_cbranch (code, loc);
195
196 if (code == EQ)
197 emit_label (lab);
198 return;
199 }
200
201 /* We can't allow reload to try to generate any reload after a branch,
202 so when some register must match we must make the temporary ourselves. */
203 if (mode != HImode)
204 {
205 rtx tmp;
206 tmp = gen_reg_rtx (mode);
207 emit_move_insn (tmp, op0);
208 op0 = tmp;
209 }
210
211 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
212 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
213 branch = gen_rtx_SET (VOIDmode, pc_rtx,
214 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
215 loc_ref, pc_rtx));
216
217 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode));
218
219 if (mode == HImode)
220 vec = gen_rtvec (2, branch, cy_clobber);
221 else if (code == NE || code == EQ)
222 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
223 else
224 {
225 rtx sub;
226 #if 0
227 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
228 #else
229 sub = gen_rtx_CLOBBER (SImode, op0);
230 #endif
231 vec = gen_rtvec (3, branch, sub, cy_clobber);
232 }
233
234 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
235 }
236
237 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
238 the arithmetic operation. Most of the work is done by
239 xstormy16_expand_arith. */
240
241 void
242 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
243 rtx dest, rtx carry)
244 {
245 rtx op0 = XEXP (comparison, 0);
246 rtx op1 = XEXP (comparison, 1);
247 rtx seq, last_insn;
248 rtx compare;
249
250 start_sequence ();
251 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry);
252 seq = get_insns ();
253 end_sequence ();
254
255 gcc_assert (INSN_P (seq));
256
257 last_insn = seq;
258 while (NEXT_INSN (last_insn) != NULL_RTX)
259 last_insn = NEXT_INSN (last_insn);
260
261 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
262 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
263 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
264 emit_insn (seq);
265 }
266
267
268 /* Return the string to output a conditional branch to LABEL, which is
269 the operand number of the label.
270
271 OP is the conditional expression, or NULL for branch-always.
272
273 REVERSED is nonzero if we should reverse the sense of the comparison.
274
275 INSN is the insn. */
276
277 char *
278 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
279 {
280 static char string[64];
281 int need_longbranch = (op != NULL_RTX
282 ? get_attr_length (insn) == 8
283 : get_attr_length (insn) == 4);
284 int really_reversed = reversed ^ need_longbranch;
285 const char *ccode;
286 const char *templ;
287 const char *operands;
288 enum rtx_code code;
289
290 if (! op)
291 {
292 if (need_longbranch)
293 ccode = "jmpf";
294 else
295 ccode = "br";
296 sprintf (string, "%s %s", ccode, label);
297 return string;
298 }
299
300 code = GET_CODE (op);
301
302 if (GET_CODE (XEXP (op, 0)) != REG)
303 {
304 code = swap_condition (code);
305 operands = "%3,%2";
306 }
307 else
308 operands = "%2,%3";
309
310 /* Work out which way this really branches. */
311 if (really_reversed)
312 code = reverse_condition (code);
313
314 switch (code)
315 {
316 case EQ: ccode = "z"; break;
317 case NE: ccode = "nz"; break;
318 case GE: ccode = "ge"; break;
319 case LT: ccode = "lt"; break;
320 case GT: ccode = "gt"; break;
321 case LE: ccode = "le"; break;
322 case GEU: ccode = "nc"; break;
323 case LTU: ccode = "c"; break;
324 case GTU: ccode = "hi"; break;
325 case LEU: ccode = "ls"; break;
326
327 default:
328 gcc_unreachable ();
329 }
330
331 if (need_longbranch)
332 templ = "b%s %s,.+8 | jmpf %s";
333 else
334 templ = "b%s %s,%s";
335 sprintf (string, templ, ccode, operands, label);
336
337 return string;
338 }
339
340 /* Return the string to output a conditional branch to LABEL, which is
341 the operand number of the label, but suitable for the tail of a
342 SImode branch.
343
344 OP is the conditional expression (OP is never NULL_RTX).
345
346 REVERSED is nonzero if we should reverse the sense of the comparison.
347
348 INSN is the insn. */
349
350 char *
351 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
352 {
353 static char string[64];
354 int need_longbranch = get_attr_length (insn) >= 8;
355 int really_reversed = reversed ^ need_longbranch;
356 const char *ccode;
357 const char *templ;
358 char prevop[16];
359 enum rtx_code code;
360
361 code = GET_CODE (op);
362
363 /* Work out which way this really branches. */
364 if (really_reversed)
365 code = reverse_condition (code);
366
367 switch (code)
368 {
369 case EQ: ccode = "z"; break;
370 case NE: ccode = "nz"; break;
371 case GE: ccode = "ge"; break;
372 case LT: ccode = "lt"; break;
373 case GEU: ccode = "nc"; break;
374 case LTU: ccode = "c"; break;
375
376 /* The missing codes above should never be generated. */
377 default:
378 gcc_unreachable ();
379 }
380
381 switch (code)
382 {
383 case EQ: case NE:
384 {
385 int regnum;
386
387 gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
388
389 regnum = REGNO (XEXP (op, 0));
390 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
391 }
392 break;
393
394 case GE: case LT: case GEU: case LTU:
395 strcpy (prevop, "sbc %2,%3");
396 break;
397
398 default:
399 gcc_unreachable ();
400 }
401
402 if (need_longbranch)
403 templ = "%s | b%s .+6 | jmpf %s";
404 else
405 templ = "%s | b%s %s";
406 sprintf (string, templ, prevop, ccode, label);
407
408 return string;
409 }
410 \f
411 /* Many machines have some registers that cannot be copied directly to or from
412 memory or even from other types of registers. An example is the `MQ'
413 register, which on most machines, can only be copied to or from general
414 registers, but not memory. Some machines allow copying all registers to and
415 from memory, but require a scratch register for stores to some memory
416 locations (e.g., those with symbolic address on the RT, and those with
417 certain symbolic address on the SPARC when compiling PIC). In some cases,
418 both an intermediate and a scratch register are required.
419
420 You should define these macros to indicate to the reload phase that it may
421 need to allocate at least one register for a reload in addition to the
422 register to contain the data. Specifically, if copying X to a register
423 RCLASS in MODE requires an intermediate register, you should define
424 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
425 whose registers can be used as intermediate registers or scratch registers.
426
427 If copying a register RCLASS in MODE to X requires an intermediate or scratch
428 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
429 largest register class required. If the requirements for input and output
430 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
431 instead of defining both macros identically.
432
433 The values returned by these macros are often `GENERAL_REGS'. Return
434 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
435 to or from a register of RCLASS in MODE without requiring a scratch register.
436 Do not define this macro if it would always return `NO_REGS'.
437
438 If a scratch register is required (either with or without an intermediate
439 register), you should define patterns for `reload_inM' or `reload_outM', as
440 required.. These patterns, which will normally be implemented with a
441 `define_expand', should be similar to the `movM' patterns, except that
442 operand 2 is the scratch register.
443
444 Define constraints for the reload register and scratch register that contain
445 a single register class. If the original reload register (whose class is
446 RCLASS) can meet the constraint given in the pattern, the value returned by
447 these macros is used for the class of the scratch register. Otherwise, two
448 additional reload registers are required. Their classes are obtained from
449 the constraints in the insn pattern.
450
451 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
452 either be in a hard register or in memory. Use `true_regnum' to find out;
453 it will return -1 if the pseudo is in memory and the hard register number if
454 it is in a register.
455
456 These macros should not be used in the case where a particular class of
457 registers can only be copied to memory and not to another class of
458 registers. In that case, secondary reload registers are not needed and
459 would not be helpful. Instead, a stack location must be used to perform the
460 copy and the `movM' pattern should use memory as an intermediate storage.
461 This case often occurs between floating-point and general registers. */
462
463 enum reg_class
464 xstormy16_secondary_reload_class (enum reg_class rclass,
465 enum machine_mode mode,
466 rtx x)
467 {
468 /* This chip has the interesting property that only the first eight
469 registers can be moved to/from memory. */
470 if ((GET_CODE (x) == MEM
471 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
472 && (true_regnum (x) == -1
473 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
474 && ! reg_class_subset_p (rclass, EIGHT_REGS))
475 return EIGHT_REGS;
476
477 /* When reloading a PLUS, the carry register will be required
478 unless the inc or dec instructions can be used. */
479 if (xstormy16_carry_plus_operand (x, mode))
480 return CARRY_REGS;
481
482 return NO_REGS;
483 }
484
485 enum reg_class
486 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
487 {
488 if (rclass == GENERAL_REGS
489 && GET_CODE (x) == MEM)
490 return EIGHT_REGS;
491
492 return rclass;
493 }
494
495 /* Predicate for symbols and addresses that reflect special 8-bit
496 addressing. */
497 int
498 xstormy16_below100_symbol (rtx x,
499 enum machine_mode mode ATTRIBUTE_UNUSED)
500 {
501 if (GET_CODE (x) == CONST)
502 x = XEXP (x, 0);
503 if (GET_CODE (x) == PLUS
504 && GET_CODE (XEXP (x, 1)) == CONST_INT)
505 x = XEXP (x, 0);
506
507 if (GET_CODE (x) == SYMBOL_REF)
508 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
509
510 if (GET_CODE (x) == CONST_INT)
511 {
512 HOST_WIDE_INT i = INTVAL (x);
513 if ((i >= 0x0000 && i <= 0x00ff)
514 || (i >= 0x7f00 && i <= 0x7fff))
515 return 1;
516 }
517 return 0;
518 }
519
520 /* Likewise, but only for non-volatile MEMs, for patterns where the
521 MEM will get split into smaller sized accesses. */
522 int
523 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
524 {
525 if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
526 return 0;
527 return xstormy16_below100_operand (x, mode);
528 }
529
530 /* Expand an 8-bit IOR. This either detects the one case we can
531 actually do, or uses a 16-bit IOR. */
532 void
533 xstormy16_expand_iorqi3 (rtx *operands)
534 {
535 rtx in, out, outsub, val;
536
537 out = operands[0];
538 in = operands[1];
539 val = operands[2];
540
541 if (xstormy16_onebit_set_operand (val, QImode))
542 {
543 if (!xstormy16_below100_or_register (in, QImode))
544 in = copy_to_mode_reg (QImode, in);
545 if (!xstormy16_below100_or_register (out, QImode))
546 out = gen_reg_rtx (QImode);
547 emit_insn (gen_iorqi3_internal (out, in, val));
548 if (out != operands[0])
549 emit_move_insn (operands[0], out);
550 return;
551 }
552
553 if (GET_CODE (in) != REG)
554 in = copy_to_mode_reg (QImode, in);
555 if (GET_CODE (val) != REG
556 && GET_CODE (val) != CONST_INT)
557 val = copy_to_mode_reg (QImode, val);
558 if (GET_CODE (out) != REG)
559 out = gen_reg_rtx (QImode);
560
561 in = simplify_gen_subreg (HImode, in, QImode, 0);
562 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
563 if (GET_CODE (val) != CONST_INT)
564 val = simplify_gen_subreg (HImode, val, QImode, 0);
565
566 emit_insn (gen_iorhi3 (outsub, in, val));
567
568 if (out != operands[0])
569 emit_move_insn (operands[0], out);
570 }
571
572 /* Likewise, for AND. */
573 void
574 xstormy16_expand_andqi3 (rtx *operands)
575 {
576 rtx in, out, outsub, val;
577
578 out = operands[0];
579 in = operands[1];
580 val = operands[2];
581
582 if (xstormy16_onebit_clr_operand (val, QImode))
583 {
584 if (!xstormy16_below100_or_register (in, QImode))
585 in = copy_to_mode_reg (QImode, in);
586 if (!xstormy16_below100_or_register (out, QImode))
587 out = gen_reg_rtx (QImode);
588 emit_insn (gen_andqi3_internal (out, in, val));
589 if (out != operands[0])
590 emit_move_insn (operands[0], out);
591 return;
592 }
593
594 if (GET_CODE (in) != REG)
595 in = copy_to_mode_reg (QImode, in);
596 if (GET_CODE (val) != REG
597 && GET_CODE (val) != CONST_INT)
598 val = copy_to_mode_reg (QImode, val);
599 if (GET_CODE (out) != REG)
600 out = gen_reg_rtx (QImode);
601
602 in = simplify_gen_subreg (HImode, in, QImode, 0);
603 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
604 if (GET_CODE (val) != CONST_INT)
605 val = simplify_gen_subreg (HImode, val, QImode, 0);
606
607 emit_insn (gen_andhi3 (outsub, in, val));
608
609 if (out != operands[0])
610 emit_move_insn (operands[0], out);
611 }
612
613 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
614 (GET_CODE (X) == CONST_INT \
615 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
616
617 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
618 (GET_CODE (X) == CONST_INT \
619 && INTVAL (X) + (OFFSET) >= 0 \
620 && INTVAL (X) + (OFFSET) < 0x8000 \
621 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
622
623 int
624 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
625 rtx x, int strict)
626 {
627 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
628 return 1;
629
630 if (GET_CODE (x) == PLUS
631 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
632 {
633 x = XEXP (x, 0);
634 /* PR 31232: Do not allow INT+INT as an address. */
635 if (GET_CODE (x) == CONST_INT)
636 return 0;
637 }
638
639 if ((GET_CODE (x) == PRE_MODIFY
640 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
641 || GET_CODE (x) == POST_INC
642 || GET_CODE (x) == PRE_DEC)
643 x = XEXP (x, 0);
644
645 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
646 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
647 return 1;
648
649 if (xstormy16_below100_symbol (x, mode))
650 return 1;
651
652 return 0;
653 }
654
655 /* Return nonzero if memory address X (an RTX) can have different
656 meanings depending on the machine mode of the memory reference it
657 is used for or if the address is valid for some modes but not
658 others.
659
660 Autoincrement and autodecrement addresses typically have mode-dependent
661 effects because the amount of the increment or decrement is the size of the
662 operand being addressed. Some machines have other mode-dependent addresses.
663 Many RISC machines have no mode-dependent addresses.
664
665 You may assume that ADDR is a valid address for the machine.
666
667 On this chip, this is true if the address is valid with an offset
668 of 0 but not of 6, because in that case it cannot be used as an
669 address for DImode or DFmode, or if the address is a post-increment
670 or pre-decrement address. */
671 int
672 xstormy16_mode_dependent_address_p (rtx x)
673 {
674 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
675 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
676 return 1;
677
678 if (GET_CODE (x) == PLUS
679 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
680 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
681 return 1;
682
683 if (GET_CODE (x) == PLUS)
684 x = XEXP (x, 0);
685
686 /* Auto-increment addresses are now treated generically in recog.c. */
687
688 return 0;
689 }
690
691 /* A C expression that defines the optional machine-dependent constraint
692 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific
693 types of operands, usually memory references, for the target machine.
694 Normally this macro will not be defined. If it is required for a particular
695 target machine, it should return 1 if VALUE corresponds to the operand type
696 represented by the constraint letter C. If C is not defined as an extra
697 constraint, the value returned should be 0 regardless of VALUE. */
698 int
699 xstormy16_extra_constraint_p (rtx x, int c)
700 {
701 switch (c)
702 {
703 /* 'Q' is for pushes. */
704 case 'Q':
705 return (GET_CODE (x) == MEM
706 && GET_CODE (XEXP (x, 0)) == POST_INC
707 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
708
709 /* 'R' is for pops. */
710 case 'R':
711 return (GET_CODE (x) == MEM
712 && GET_CODE (XEXP (x, 0)) == PRE_DEC
713 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx);
714
715 /* 'S' is for immediate memory addresses. */
716 case 'S':
717 return (GET_CODE (x) == MEM
718 && GET_CODE (XEXP (x, 0)) == CONST_INT
719 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0));
720
721 /* 'T' is for Rx. */
722 case 'T':
723 /* Not implemented yet. */
724 return 0;
725
726 /* 'U' is for CONST_INT values not between 2 and 15 inclusive,
727 for allocating a scratch register for 32-bit shifts. */
728 case 'U':
729 return (GET_CODE (x) == CONST_INT
730 && (INTVAL (x) < 2 || INTVAL (x) > 15));
731
732 /* 'Z' is for CONST_INT value zero. This is for adding zero to
733 a register in addhi3, which would otherwise require a carry. */
734 case 'Z':
735 return (GET_CODE (x) == CONST_INT
736 && (INTVAL (x) == 0));
737
738 case 'W':
739 return xstormy16_below100_operand (x, GET_MODE (x));
740
741 default:
742 return 0;
743 }
744 }
745
746 int
747 short_memory_operand (rtx x, enum machine_mode mode)
748 {
749 if (! memory_operand (x, mode))
750 return 0;
751 return (GET_CODE (XEXP (x, 0)) != PLUS);
752 }
753
754 /* Splitter for the 'move' patterns, for modes not directly implemented
755 by hardware. Emit insns to copy a value of mode MODE from SRC to
756 DEST.
757
758 This function is only called when reload_completed.
759 */
760
761 void
762 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
763 {
764 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
765 int direction, end, i;
766 int src_modifies = 0;
767 int dest_modifies = 0;
768 int src_volatile = 0;
769 int dest_volatile = 0;
770 rtx mem_operand;
771 rtx auto_inc_reg_rtx = NULL_RTX;
772
773 /* Check initial conditions. */
774 gcc_assert (reload_completed
775 && mode != QImode && mode != HImode
776 && nonimmediate_operand (dest, mode)
777 && general_operand (src, mode));
778
779 /* This case is not supported below, and shouldn't be generated. */
780 gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
781
782 /* This case is very very bad after reload, so trap it now. */
783 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
784
785 /* The general idea is to copy by words, offsetting the source and
786 destination. Normally the least-significant word will be copied
787 first, but for pre-dec operations it's better to copy the
788 most-significant word first. Only one operand can be a pre-dec
789 or post-inc operand.
790
791 It's also possible that the copy overlaps so that the direction
792 must be reversed. */
793 direction = 1;
794
795 if (GET_CODE (dest) == MEM)
796 {
797 mem_operand = XEXP (dest, 0);
798 dest_modifies = side_effects_p (mem_operand);
799 if (auto_inc_p (mem_operand))
800 auto_inc_reg_rtx = XEXP (mem_operand, 0);
801 dest_volatile = MEM_VOLATILE_P (dest);
802 if (dest_volatile)
803 {
804 dest = copy_rtx (dest);
805 MEM_VOLATILE_P (dest) = 0;
806 }
807 }
808 else if (GET_CODE (src) == MEM)
809 {
810 mem_operand = XEXP (src, 0);
811 src_modifies = side_effects_p (mem_operand);
812 if (auto_inc_p (mem_operand))
813 auto_inc_reg_rtx = XEXP (mem_operand, 0);
814 src_volatile = MEM_VOLATILE_P (src);
815 if (src_volatile)
816 {
817 src = copy_rtx (src);
818 MEM_VOLATILE_P (src) = 0;
819 }
820 }
821 else
822 mem_operand = NULL_RTX;
823
824 if (mem_operand == NULL_RTX)
825 {
826 if (GET_CODE (src) == REG
827 && GET_CODE (dest) == REG
828 && reg_overlap_mentioned_p (dest, src)
829 && REGNO (dest) > REGNO (src))
830 direction = -1;
831 }
832 else if (GET_CODE (mem_operand) == PRE_DEC
833 || (GET_CODE (mem_operand) == PLUS
834 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
835 direction = -1;
836 else if (GET_CODE (src) == MEM
837 && reg_overlap_mentioned_p (dest, src))
838 {
839 int regno;
840
841 gcc_assert (GET_CODE (dest) == REG);
842 regno = REGNO (dest);
843
844 gcc_assert (refers_to_regno_p (regno, regno + num_words,
845 mem_operand, 0));
846
847 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
848 direction = -1;
849 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
850 mem_operand, 0))
851 direction = 1;
852 else
853 /* This means something like
854 (set (reg:DI r0) (mem:DI (reg:HI r1)))
855 which we'd need to support by doing the set of the second word
856 last. */
857 gcc_unreachable ();
858 }
859
860 end = direction < 0 ? -1 : num_words;
861 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
862 {
863 rtx w_src, w_dest, insn;
864
865 if (src_modifies)
866 w_src = gen_rtx_MEM (word_mode, mem_operand);
867 else
868 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
869 if (src_volatile)
870 MEM_VOLATILE_P (w_src) = 1;
871 if (dest_modifies)
872 w_dest = gen_rtx_MEM (word_mode, mem_operand);
873 else
874 w_dest = simplify_gen_subreg (word_mode, dest, mode,
875 i * UNITS_PER_WORD);
876 if (dest_volatile)
877 MEM_VOLATILE_P (w_dest) = 1;
878
879 /* The simplify_subreg calls must always be able to simplify. */
880 gcc_assert (GET_CODE (w_src) != SUBREG
881 && GET_CODE (w_dest) != SUBREG);
882
883 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
884 if (auto_inc_reg_rtx)
885 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
886 auto_inc_reg_rtx,
887 REG_NOTES (insn));
888 }
889 }
890
891 /* Expander for the 'move' patterns. Emit insns to copy a value of
892 mode MODE from SRC to DEST. */
893
894 void
895 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
896 {
897 if ((GET_CODE (dest) == MEM) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
898 {
899 rtx pmv = XEXP (dest, 0);
900 rtx dest_reg = XEXP (pmv, 0);
901 rtx dest_mod = XEXP (pmv, 1);
902 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
903 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
904
905 dest = gen_rtx_MEM (mode, dest_reg);
906 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
907 }
908 else if ((GET_CODE (src) == MEM) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
909 {
910 rtx pmv = XEXP (src, 0);
911 rtx src_reg = XEXP (pmv, 0);
912 rtx src_mod = XEXP (pmv, 1);
913 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
914 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
915
916 src = gen_rtx_MEM (mode, src_reg);
917 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
918 }
919
920 /* There are only limited immediate-to-memory move instructions. */
921 if (! reload_in_progress
922 && ! reload_completed
923 && GET_CODE (dest) == MEM
924 && (GET_CODE (XEXP (dest, 0)) != CONST_INT
925 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
926 && ! xstormy16_below100_operand (dest, mode)
927 && GET_CODE (src) != REG
928 && GET_CODE (src) != SUBREG)
929 src = copy_to_mode_reg (mode, src);
930
931 /* Don't emit something we would immediately split. */
932 if (reload_completed
933 && mode != HImode && mode != QImode)
934 {
935 xstormy16_split_move (mode, dest, src);
936 return;
937 }
938
939 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
940 }
941
942 \f
943 /* Stack Layout:
944
945 The stack is laid out as follows:
946
947 SP->
948 FP-> Local variables
949 Register save area (up to 4 words)
950 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
951
952 AP-> Return address (two words)
953 9th procedure parameter word
954 10th procedure parameter word
955 ...
956 last procedure parameter word
957
958 The frame pointer location is tuned to make it most likely that all
959 parameters and local variables can be accessed using a load-indexed
960 instruction. */
961
962 /* A structure to describe the layout. */
963 struct xstormy16_stack_layout
964 {
965 /* Size of the topmost three items on the stack. */
966 int locals_size;
967 int register_save_size;
968 int stdarg_save_size;
969 /* Sum of the above items. */
970 int frame_size;
971 /* Various offsets. */
972 int first_local_minus_ap;
973 int sp_minus_fp;
974 int fp_minus_ap;
975 };
976
977 /* Does REGNO need to be saved? */
978 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
979 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
980 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
981 && (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
982 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
983
984 /* Compute the stack layout. */
985 struct xstormy16_stack_layout
986 xstormy16_compute_stack_layout (void)
987 {
988 struct xstormy16_stack_layout layout;
989 int regno;
990 const int ifun = xstormy16_interrupt_function_p ();
991
992 layout.locals_size = get_frame_size ();
993
994 layout.register_save_size = 0;
995 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
996 if (REG_NEEDS_SAVE (regno, ifun))
997 layout.register_save_size += UNITS_PER_WORD;
998
999 if (cfun->stdarg)
1000 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1001 else
1002 layout.stdarg_save_size = 0;
1003
1004 layout.frame_size = (layout.locals_size
1005 + layout.register_save_size
1006 + layout.stdarg_save_size);
1007
1008 if (crtl->args.size <= 2048 && crtl->args.size != -1)
1009 {
1010 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1011 + crtl->args.size <= 2048)
1012 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET;
1013 else
1014 layout.fp_minus_ap = 2048 - crtl->args.size;
1015 }
1016 else
1017 layout.fp_minus_ap = (layout.stdarg_save_size
1018 + layout.register_save_size
1019 + INCOMING_FRAME_SP_OFFSET);
1020 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET
1021 - layout.fp_minus_ap);
1022 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
1023 return layout;
1024 }
1025
1026 /* Determine how all the special registers get eliminated. */
1027 int
1028 xstormy16_initial_elimination_offset (int from, int to)
1029 {
1030 struct xstormy16_stack_layout layout;
1031 int result;
1032
1033 layout = xstormy16_compute_stack_layout ();
1034
1035 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1036 result = layout.sp_minus_fp - layout.locals_size;
1037 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1038 result = -layout.locals_size;
1039 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1040 result = -layout.fp_minus_ap;
1041 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1042 result = -(layout.sp_minus_fp + layout.fp_minus_ap);
1043 else
1044 gcc_unreachable ();
1045
1046 return result;
1047 }
1048
1049 static rtx
1050 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1051 {
1052 rtx set, clobber, insn;
1053
1054 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1055 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16));
1056 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1057 return insn;
1058 }
1059
1060 /* Called after register allocation to add any instructions needed for
1061 the prologue. Using a prologue insn is favored compared to putting
1062 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1063 since it allows the scheduler to intermix instructions with the
1064 saves of the caller saved registers. In some cases, it might be
1065 necessary to emit a barrier instruction as the last insn to prevent
1066 such scheduling.
1067
1068 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1069 so that the debug info generation code can handle them properly. */
1070 void
1071 xstormy16_expand_prologue (void)
1072 {
1073 struct xstormy16_stack_layout layout;
1074 int regno;
1075 rtx insn;
1076 rtx mem_push_rtx;
1077 const int ifun = xstormy16_interrupt_function_p ();
1078
1079 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1080 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1081
1082 layout = xstormy16_compute_stack_layout ();
1083
1084 if (layout.locals_size >= 32768)
1085 error ("local variable memory requirements exceed capacity");
1086
1087 /* Save the argument registers if necessary. */
1088 if (layout.stdarg_save_size)
1089 for (regno = FIRST_ARGUMENT_REGISTER;
1090 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1091 regno++)
1092 {
1093 rtx dwarf;
1094 rtx reg = gen_rtx_REG (HImode, regno);
1095
1096 insn = emit_move_insn (mem_push_rtx, reg);
1097 RTX_FRAME_RELATED_P (insn) = 1;
1098
1099 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1100
1101 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1102 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1103 reg);
1104 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1105 plus_constant (stack_pointer_rtx,
1106 GET_MODE_SIZE (Pmode)));
1107 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1108 dwarf,
1109 REG_NOTES (insn));
1110 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1111 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1112 }
1113
1114 /* Push each of the registers to save. */
1115 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1116 if (REG_NEEDS_SAVE (regno, ifun))
1117 {
1118 rtx dwarf;
1119 rtx reg = gen_rtx_REG (HImode, regno);
1120
1121 insn = emit_move_insn (mem_push_rtx, reg);
1122 RTX_FRAME_RELATED_P (insn) = 1;
1123
1124 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1125
1126 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1127 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1128 reg);
1129 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1130 plus_constant (stack_pointer_rtx,
1131 GET_MODE_SIZE (Pmode)));
1132 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1133 dwarf,
1134 REG_NOTES (insn));
1135 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1136 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1137 }
1138
1139 /* It's just possible that the SP here might be what we need for
1140 the new FP... */
1141 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1142 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1143
1144 /* Allocate space for local variables. */
1145 if (layout.locals_size)
1146 {
1147 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1148 GEN_INT (layout.locals_size));
1149 RTX_FRAME_RELATED_P (insn) = 1;
1150 }
1151
1152 /* Set up the frame pointer, if required. */
1153 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1154 {
1155 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1156
1157 if (layout.sp_minus_fp)
1158 emit_addhi3_postreload (hard_frame_pointer_rtx,
1159 hard_frame_pointer_rtx,
1160 GEN_INT (-layout.sp_minus_fp));
1161 }
1162 }
1163
1164 /* Do we need an epilogue at all? */
1165 int
1166 direct_return (void)
1167 {
1168 return (reload_completed
1169 && xstormy16_compute_stack_layout ().frame_size == 0);
1170 }
1171
1172 /* Called after register allocation to add any instructions needed for
1173 the epilogue. Using an epilogue insn is favored compared to putting
1174 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1175 since it allows the scheduler to intermix instructions with the
1176 saves of the caller saved registers. In some cases, it might be
1177 necessary to emit a barrier instruction as the last insn to prevent
1178 such scheduling. */
1179
1180 void
1181 xstormy16_expand_epilogue (void)
1182 {
1183 struct xstormy16_stack_layout layout;
1184 rtx mem_pop_rtx, insn;
1185 int regno;
1186 const int ifun = xstormy16_interrupt_function_p ();
1187
1188 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1189 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1190
1191 layout = xstormy16_compute_stack_layout ();
1192
1193 /* Pop the stack for the locals. */
1194 if (layout.locals_size)
1195 {
1196 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1197 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1198 else
1199 {
1200 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1201 GEN_INT (- layout.locals_size));
1202 RTX_FRAME_RELATED_P (insn) = 1;
1203 }
1204 }
1205
1206 /* Restore any call-saved registers. */
1207 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1208 if (REG_NEEDS_SAVE (regno, ifun))
1209 {
1210 rtx dwarf;
1211
1212 insn = emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1213 RTX_FRAME_RELATED_P (insn) = 1;
1214 dwarf = gen_rtx_SET (Pmode, stack_pointer_rtx,
1215 plus_constant (stack_pointer_rtx,
1216 -GET_MODE_SIZE (Pmode)));
1217 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1218 dwarf,
1219 REG_NOTES (insn));
1220 }
1221
1222 /* Pop the stack for the stdarg save area. */
1223 if (layout.stdarg_save_size)
1224 {
1225 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1226 GEN_INT (- layout.stdarg_save_size));
1227 RTX_FRAME_RELATED_P (insn) = 1;
1228 }
1229
1230 /* Return. */
1231 if (ifun)
1232 emit_jump_insn (gen_return_internal_interrupt ());
1233 else
1234 emit_jump_insn (gen_return_internal ());
1235 }
1236
1237 int
1238 xstormy16_epilogue_uses (int regno)
1239 {
1240 if (reload_completed && call_used_regs[regno])
1241 {
1242 const int ifun = xstormy16_interrupt_function_p ();
1243 return REG_NEEDS_SAVE (regno, ifun);
1244 }
1245 return 0;
1246 }
1247
1248 void
1249 xstormy16_function_profiler (void)
1250 {
1251 sorry ("function_profiler support");
1252 }
1253
1254 \f
1255 /* Return an updated summarizer variable CUM to advance past an
1256 argument in the argument list. The values MODE, TYPE and NAMED
1257 describe that argument. Once this is done, the variable CUM is
1258 suitable for analyzing the *following* argument with
1259 `FUNCTION_ARG', etc.
1260
1261 This function need not do anything if the argument in question was
1262 passed on the stack. The compiler knows how to track the amount of
1263 stack space used for arguments without any special help. However,
1264 it makes life easier for xstormy16_build_va_list if it does update
1265 the word count. */
1266 CUMULATIVE_ARGS
1267 xstormy16_function_arg_advance (CUMULATIVE_ARGS cum, enum machine_mode mode,
1268 tree type, int named ATTRIBUTE_UNUSED)
1269 {
1270 /* If an argument would otherwise be passed partially in registers,
1271 and partially on the stack, the whole of it is passed on the
1272 stack. */
1273 if (cum < NUM_ARGUMENT_REGISTERS
1274 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1275 cum = NUM_ARGUMENT_REGISTERS;
1276
1277 cum += XSTORMY16_WORD_SIZE (type, mode);
1278
1279 return cum;
1280 }
1281
1282 rtx
1283 xstormy16_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
1284 tree type, int named ATTRIBUTE_UNUSED)
1285 {
1286 if (mode == VOIDmode)
1287 return const0_rtx;
1288 if (targetm.calls.must_pass_in_stack (mode, type)
1289 || cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1290 return 0;
1291 return gen_rtx_REG (mode, cum + 2);
1292 }
1293
1294 /* Build the va_list type.
1295
1296 For this chip, va_list is a record containing a counter and a pointer.
1297 The counter is of type 'int' and indicates how many bytes
1298 have been used to date. The pointer indicates the stack position
1299 for arguments that have not been passed in registers.
1300 To keep the layout nice, the pointer is first in the structure. */
1301
1302 static tree
1303 xstormy16_build_builtin_va_list (void)
1304 {
1305 tree f_1, f_2, record, type_decl;
1306
1307 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1308 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
1309
1310 f_1 = build_decl (FIELD_DECL, get_identifier ("base"),
1311 ptr_type_node);
1312 f_2 = build_decl (FIELD_DECL, get_identifier ("count"),
1313 unsigned_type_node);
1314
1315 DECL_FIELD_CONTEXT (f_1) = record;
1316 DECL_FIELD_CONTEXT (f_2) = record;
1317
1318 TREE_CHAIN (record) = type_decl;
1319 TYPE_NAME (record) = type_decl;
1320 TYPE_FIELDS (record) = f_1;
1321 TREE_CHAIN (f_1) = f_2;
1322
1323 layout_type (record);
1324
1325 return record;
1326 }
1327
1328 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1329 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1330 variable to initialize. NEXTARG is the machine independent notion of the
1331 'next' argument after the variable arguments. */
1332 static void
1333 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1334 {
1335 tree f_base, f_count;
1336 tree base, count;
1337 tree t,u;
1338
1339 if (xstormy16_interrupt_function_p ())
1340 error ("cannot use va_start in interrupt function");
1341
1342 f_base = TYPE_FIELDS (va_list_type_node);
1343 f_count = TREE_CHAIN (f_base);
1344
1345 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1346 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1347 NULL_TREE);
1348
1349 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1350 u = build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET);
1351 u = fold_convert (TREE_TYPE (count), u);
1352 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1353 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1354 TREE_SIDE_EFFECTS (t) = 1;
1355 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1356
1357 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1358 build_int_cst (NULL_TREE,
1359 crtl->args.info * UNITS_PER_WORD));
1360 TREE_SIDE_EFFECTS (t) = 1;
1361 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1362 }
1363
1364 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1365 of type va_list as a tree, TYPE is the type passed to va_arg.
1366 Note: This algorithm is documented in stormy-abi. */
1367
1368 static tree
1369 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1370 gimple_seq *post_p ATTRIBUTE_UNUSED)
1371 {
1372 tree f_base, f_count;
1373 tree base, count;
1374 tree count_tmp, addr, t;
1375 tree lab_gotaddr, lab_fromstack;
1376 int size, size_of_reg_args, must_stack;
1377 tree size_tree;
1378
1379 f_base = TYPE_FIELDS (va_list_type_node);
1380 f_count = TREE_CHAIN (f_base);
1381
1382 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1383 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1384 NULL_TREE);
1385
1386 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1387 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1388 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1389
1390 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1391
1392 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1393 lab_gotaddr = create_artificial_label ();
1394 lab_fromstack = create_artificial_label ();
1395 addr = create_tmp_var (ptr_type_node, NULL);
1396
1397 if (!must_stack)
1398 {
1399 tree r;
1400
1401 t = fold_convert (TREE_TYPE (count), size_tree);
1402 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1403 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1404 t = build2 (GT_EXPR, boolean_type_node, t, r);
1405 t = build3 (COND_EXPR, void_type_node, t,
1406 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1407 NULL_TREE);
1408 gimplify_and_add (t, pre_p);
1409
1410 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1411 gimplify_assign (addr, t, pre_p);
1412
1413 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1414 gimplify_and_add (t, pre_p);
1415
1416 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1417 gimplify_and_add (t, pre_p);
1418 }
1419
1420 /* Arguments larger than a word might need to skip over some
1421 registers, since arguments are either passed entirely in
1422 registers or entirely on the stack. */
1423 size = PUSH_ROUNDING (int_size_in_bytes (type));
1424 if (size > 2 || size < 0 || must_stack)
1425 {
1426 tree r, u;
1427
1428 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1429 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1430
1431 t = fold_convert (TREE_TYPE (count), r);
1432 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1433 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1434 gimplify_and_add (t, pre_p);
1435 }
1436
1437 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1438 - INCOMING_FRAME_SP_OFFSET);
1439 t = fold_convert (TREE_TYPE (count), t);
1440 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1441 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1442 fold_convert (TREE_TYPE (count), size_tree));
1443 t = fold_convert (TREE_TYPE (t), fold (t));
1444 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1445 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1446 gimplify_assign (addr, t, pre_p);
1447
1448 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1449 gimplify_and_add (t, pre_p);
1450
1451 t = fold_convert (TREE_TYPE (count), size_tree);
1452 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1453 gimplify_assign (count, t, pre_p);
1454
1455 addr = fold_convert (build_pointer_type (type), addr);
1456 return build_va_arg_indirect_ref (addr);
1457 }
1458
1459 /* Initialize the variable parts of a trampoline. ADDR is an RTX for
1460 the address of the trampoline; FNADDR is an RTX for the address of
1461 the nested function; STATIC_CHAIN is an RTX for the static chain
1462 value that should be passed to the function when it is called. */
1463 void
1464 xstormy16_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
1465 {
1466 rtx reg_addr = gen_reg_rtx (Pmode);
1467 rtx temp = gen_reg_rtx (HImode);
1468 rtx reg_fnaddr = gen_reg_rtx (HImode);
1469 rtx reg_addr_mem;
1470
1471 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr);
1472
1473 emit_move_insn (reg_addr, addr);
1474 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1475 emit_move_insn (reg_addr_mem, temp);
1476 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1477 emit_move_insn (temp, static_chain);
1478 emit_move_insn (reg_addr_mem, temp);
1479 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1480 emit_move_insn (reg_fnaddr, fnaddr);
1481 emit_move_insn (temp, reg_fnaddr);
1482 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1483 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1484 emit_move_insn (reg_addr_mem, temp);
1485 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1486 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1487 emit_move_insn (reg_addr_mem, reg_fnaddr);
1488 }
1489
1490 /* Worker function for FUNCTION_VALUE. */
1491
1492 rtx
1493 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1494 {
1495 enum machine_mode mode;
1496 mode = TYPE_MODE (valtype);
1497 PROMOTE_MODE (mode, 0, valtype);
1498 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1499 }
1500
1501 /* A C compound statement that outputs the assembler code for a thunk function,
1502 used to implement C++ virtual function calls with multiple inheritance. The
1503 thunk acts as a wrapper around a virtual function, adjusting the implicit
1504 object parameter before handing control off to the real function.
1505
1506 First, emit code to add the integer DELTA to the location that contains the
1507 incoming first argument. Assume that this argument contains a pointer, and
1508 is the one used to pass the `this' pointer in C++. This is the incoming
1509 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1510 addition must preserve the values of all other incoming arguments.
1511
1512 After the addition, emit code to jump to FUNCTION, which is a
1513 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1514 the return address. Hence returning from FUNCTION will return to whoever
1515 called the current `thunk'.
1516
1517 The effect must be as if @var{function} had been called directly
1518 with the adjusted first argument. This macro is responsible for
1519 emitting all of the code for a thunk function;
1520 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1521 not invoked.
1522
1523 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1524 extracted from it.) It might possibly be useful on some targets, but
1525 probably not. */
1526
1527 static void
1528 xstormy16_asm_output_mi_thunk (FILE *file,
1529 tree thunk_fndecl ATTRIBUTE_UNUSED,
1530 HOST_WIDE_INT delta,
1531 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1532 tree function)
1533 {
1534 int regnum = FIRST_ARGUMENT_REGISTER;
1535
1536 /* There might be a hidden first argument for a returned structure. */
1537 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1538 regnum += 1;
1539
1540 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1541 fputs ("\tjmpf ", file);
1542 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1543 putc ('\n', file);
1544 }
1545
1546 /* The purpose of this function is to override the default behavior of
1547 BSS objects. Normally, they go into .bss or .sbss via ".common"
1548 directives, but we need to override that and put them in
1549 .bss_below100. We can't just use a section override (like we do
1550 for .data_below100), because that makes them initialized rather
1551 than uninitialized. */
1552 void
1553 xstormy16_asm_output_aligned_common (FILE *stream,
1554 tree decl,
1555 const char *name,
1556 int size,
1557 int align,
1558 int global)
1559 {
1560 rtx mem = DECL_RTL (decl);
1561 rtx symbol;
1562
1563 if (mem != NULL_RTX
1564 && GET_CODE (mem) == MEM
1565 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1566 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1567 {
1568 const char *name2;
1569 int p2align = 0;
1570
1571 switch_to_section (bss100_section);
1572
1573 while (align > 8)
1574 {
1575 align /= 2;
1576 p2align ++;
1577 }
1578
1579 name2 = default_strip_name_encoding (name);
1580 if (global)
1581 fprintf (stream, "\t.globl\t%s\n", name2);
1582 if (p2align)
1583 fprintf (stream, "\t.p2align %d\n", p2align);
1584 fprintf (stream, "\t.type\t%s, @object\n", name2);
1585 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1586 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1587 return;
1588 }
1589
1590 if (!global)
1591 {
1592 fprintf (stream, "\t.local\t");
1593 assemble_name (stream, name);
1594 fprintf (stream, "\n");
1595 }
1596 fprintf (stream, "\t.comm\t");
1597 assemble_name (stream, name);
1598 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1599 }
1600
1601 /* Implement TARGET_ASM_INIT_SECTIONS. */
1602
1603 static void
1604 xstormy16_asm_init_sections (void)
1605 {
1606 bss100_section
1607 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1608 output_section_asm_op,
1609 "\t.section \".bss_below100\",\"aw\",@nobits");
1610 }
1611
1612 /* Mark symbols with the "below100" attribute so that we can use the
1613 special addressing modes for them. */
1614
1615 static void
1616 xstormy16_encode_section_info (tree decl, rtx r, int first)
1617 {
1618 default_encode_section_info (decl, r, first);
1619
1620 if (TREE_CODE (decl) == VAR_DECL
1621 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1622 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1623 {
1624 rtx symbol = XEXP (r, 0);
1625
1626 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1627 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1628 }
1629 }
1630
1631 /* Output constructors and destructors. Just like
1632 default_named_section_asm_out_* but don't set the sections writable. */
1633 #undef TARGET_ASM_CONSTRUCTOR
1634 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1635 #undef TARGET_ASM_DESTRUCTOR
1636 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1637
1638 static void
1639 xstormy16_asm_out_destructor (rtx symbol, int priority)
1640 {
1641 const char *section = ".dtors";
1642 char buf[16];
1643
1644 /* ??? This only works reliably with the GNU linker. */
1645 if (priority != DEFAULT_INIT_PRIORITY)
1646 {
1647 sprintf (buf, ".dtors.%.5u",
1648 /* Invert the numbering so the linker puts us in the proper
1649 order; constructors are run from right to left, and the
1650 linker sorts in increasing order. */
1651 MAX_INIT_PRIORITY - priority);
1652 section = buf;
1653 }
1654
1655 switch_to_section (get_section (section, 0, NULL));
1656 assemble_align (POINTER_SIZE);
1657 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1658 }
1659
1660 static void
1661 xstormy16_asm_out_constructor (rtx symbol, int priority)
1662 {
1663 const char *section = ".ctors";
1664 char buf[16];
1665
1666 /* ??? This only works reliably with the GNU linker. */
1667 if (priority != DEFAULT_INIT_PRIORITY)
1668 {
1669 sprintf (buf, ".ctors.%.5u",
1670 /* Invert the numbering so the linker puts us in the proper
1671 order; constructors are run from right to left, and the
1672 linker sorts in increasing order. */
1673 MAX_INIT_PRIORITY - priority);
1674 section = buf;
1675 }
1676
1677 switch_to_section (get_section (section, 0, NULL));
1678 assemble_align (POINTER_SIZE);
1679 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1680 }
1681 \f
1682 /* Print a memory address as an operand to reference that memory location. */
1683 void
1684 xstormy16_print_operand_address (FILE *file, rtx address)
1685 {
1686 HOST_WIDE_INT offset;
1687 int pre_dec, post_inc;
1688
1689 /* There are a few easy cases. */
1690 if (GET_CODE (address) == CONST_INT)
1691 {
1692 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1693 return;
1694 }
1695
1696 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL)
1697 {
1698 output_addr_const (file, address);
1699 return;
1700 }
1701
1702 /* Otherwise, it's hopefully something of the form
1703 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...))
1704 */
1705
1706 if (GET_CODE (address) == PLUS)
1707 {
1708 gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
1709 offset = INTVAL (XEXP (address, 1));
1710 address = XEXP (address, 0);
1711 }
1712 else
1713 offset = 0;
1714
1715 pre_dec = (GET_CODE (address) == PRE_DEC);
1716 post_inc = (GET_CODE (address) == POST_INC);
1717 if (pre_dec || post_inc)
1718 address = XEXP (address, 0);
1719
1720 gcc_assert (GET_CODE (address) == REG);
1721
1722 fputc ('(', file);
1723 if (pre_dec)
1724 fputs ("--", file);
1725 fputs (reg_names [REGNO (address)], file);
1726 if (post_inc)
1727 fputs ("++", file);
1728 if (offset != 0)
1729 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1730 fputc (')', file);
1731 }
1732
1733 /* Print an operand to an assembler instruction. */
1734 void
1735 xstormy16_print_operand (FILE *file, rtx x, int code)
1736 {
1737 switch (code)
1738 {
1739 case 'B':
1740 /* There is either one bit set, or one bit clear, in X.
1741 Print it preceded by '#'. */
1742 {
1743 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1744 HOST_WIDE_INT xx = 1;
1745 HOST_WIDE_INT l;
1746
1747 if (GET_CODE (x) == CONST_INT)
1748 xx = INTVAL (x);
1749 else
1750 output_operand_lossage ("'B' operand is not constant");
1751
1752 /* GCC sign-extends masks with the MSB set, so we have to
1753 detect all the cases that differ only in sign extension
1754 beyond the bits we care about. Normally, the predicates
1755 and constraints ensure that we have the right values. This
1756 works correctly for valid masks. */
1757 if (bits_set[xx & 7] <= 1)
1758 {
1759 /* Remove sign extension bits. */
1760 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1761 xx &= 0xff;
1762 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1763 xx &= 0xffff;
1764 l = exact_log2 (xx);
1765 }
1766 else
1767 {
1768 /* Add sign extension bits. */
1769 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1770 xx |= ~(HOST_WIDE_INT)0xff;
1771 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1772 xx |= ~(HOST_WIDE_INT)0xffff;
1773 l = exact_log2 (~xx);
1774 }
1775
1776 if (l == -1)
1777 output_operand_lossage ("'B' operand has multiple bits set");
1778
1779 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1780 return;
1781 }
1782
1783 case 'C':
1784 /* Print the symbol without a surrounding @fptr(). */
1785 if (GET_CODE (x) == SYMBOL_REF)
1786 assemble_name (file, XSTR (x, 0));
1787 else if (GET_CODE (x) == LABEL_REF)
1788 output_asm_label (x);
1789 else
1790 xstormy16_print_operand_address (file, x);
1791 return;
1792
1793 case 'o':
1794 case 'O':
1795 /* Print the immediate operand less one, preceded by '#'.
1796 For 'O', negate it first. */
1797 {
1798 HOST_WIDE_INT xx = 0;
1799
1800 if (GET_CODE (x) == CONST_INT)
1801 xx = INTVAL (x);
1802 else
1803 output_operand_lossage ("'o' operand is not constant");
1804
1805 if (code == 'O')
1806 xx = -xx;
1807
1808 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1809 return;
1810 }
1811
1812 case 'b':
1813 /* Print the shift mask for bp/bn. */
1814 {
1815 HOST_WIDE_INT xx = 1;
1816 HOST_WIDE_INT l;
1817
1818 if (GET_CODE (x) == CONST_INT)
1819 xx = INTVAL (x);
1820 else
1821 output_operand_lossage ("'B' operand is not constant");
1822
1823 l = 7 - xx;
1824
1825 fputs (IMMEDIATE_PREFIX, file);
1826 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1827 return;
1828 }
1829
1830 case 0:
1831 /* Handled below. */
1832 break;
1833
1834 default:
1835 output_operand_lossage ("xstormy16_print_operand: unknown code");
1836 return;
1837 }
1838
1839 switch (GET_CODE (x))
1840 {
1841 case REG:
1842 fputs (reg_names [REGNO (x)], file);
1843 break;
1844
1845 case MEM:
1846 xstormy16_print_operand_address (file, XEXP (x, 0));
1847 break;
1848
1849 default:
1850 /* Some kind of constant or label; an immediate operand,
1851 so prefix it with '#' for the assembler. */
1852 fputs (IMMEDIATE_PREFIX, file);
1853 output_addr_const (file, x);
1854 break;
1855 }
1856
1857 return;
1858 }
1859
1860 \f
1861 /* Expander for the `casesi' pattern.
1862 INDEX is the index of the switch statement.
1863 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1864 to the first table entry.
1865 RANGE is the number of table entries.
1866 TABLE is an ADDR_VEC that is the jump table.
1867 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1868 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1869 */
1870
1871 void
1872 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1873 rtx table, rtx default_label)
1874 {
1875 HOST_WIDE_INT range_i = INTVAL (range);
1876 rtx int_index;
1877
1878 /* This code uses 'br', so it can deal only with tables of size up to
1879 8192 entries. */
1880 if (range_i >= 8192)
1881 sorry ("switch statement of size %lu entries too large",
1882 (unsigned long) range_i);
1883
1884 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1885 OPTAB_LIB_WIDEN);
1886 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1887 default_label);
1888 int_index = gen_lowpart_common (HImode, index);
1889 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1890 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1891 }
1892
1893 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1894 instructions, without label or alignment or any other special
1895 constructs. We know that the previous instruction will be the
1896 `tablejump_pcrel' output above.
1897
1898 TODO: it might be nice to output 'br' instructions if they could
1899 all reach. */
1900
1901 void
1902 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1903 {
1904 int vlen, idx;
1905
1906 switch_to_section (current_function_section ());
1907
1908 vlen = XVECLEN (table, 0);
1909 for (idx = 0; idx < vlen; idx++)
1910 {
1911 fputs ("\tjmpf ", file);
1912 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1913 fputc ('\n', file);
1914 }
1915 }
1916
1917 \f
1918 /* Expander for the `call' patterns.
1919 INDEX is the index of the switch statement.
1920 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1921 to the first table entry.
1922 RANGE is the number of table entries.
1923 TABLE is an ADDR_VEC that is the jump table.
1924 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1925 range LOWER_BOUND to LOWER_BOUND+RANGE-1.
1926 */
1927
1928 void
1929 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1930 {
1931 rtx call, temp;
1932 enum machine_mode mode;
1933
1934 gcc_assert (GET_CODE (dest) == MEM);
1935 dest = XEXP (dest, 0);
1936
1937 if (! CONSTANT_P (dest)
1938 && GET_CODE (dest) != REG)
1939 dest = force_reg (Pmode, dest);
1940
1941 if (retval == NULL)
1942 mode = VOIDmode;
1943 else
1944 mode = GET_MODE (retval);
1945
1946 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1947 counter);
1948 if (retval)
1949 call = gen_rtx_SET (VOIDmode, retval, call);
1950
1951 if (! CONSTANT_P (dest))
1952 {
1953 temp = gen_reg_rtx (HImode);
1954 emit_move_insn (temp, const0_rtx);
1955 }
1956 else
1957 temp = const0_rtx;
1958
1959 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1960 gen_rtx_USE (VOIDmode, temp)));
1961 emit_call_insn (call);
1962 }
1963 \f
1964 /* Expanders for multiword computational operations. */
1965
1966 /* Expander for arithmetic operations; emit insns to compute
1967
1968 (set DEST (CODE:MODE SRC0 SRC1))
1969
1970 using CARRY as a temporary. When CODE is COMPARE, a branch
1971 template is generated (this saves duplicating code in
1972 xstormy16_split_cbranch). */
1973
1974 void
1975 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1976 rtx dest, rtx src0, rtx src1, rtx carry)
1977 {
1978 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1979 int i;
1980 int firstloop = 1;
1981
1982 if (code == NEG)
1983 emit_move_insn (src0, const0_rtx);
1984
1985 for (i = 0; i < num_words; i++)
1986 {
1987 rtx w_src0, w_src1, w_dest;
1988 rtx insn;
1989
1990 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1991 i * UNITS_PER_WORD);
1992 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1993 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1994
1995 switch (code)
1996 {
1997 case PLUS:
1998 if (firstloop
1999 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2000 continue;
2001
2002 if (firstloop)
2003 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry);
2004 else
2005 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry);
2006 break;
2007
2008 case NEG:
2009 case MINUS:
2010 case COMPARE:
2011 if (code == COMPARE && i == num_words - 1)
2012 {
2013 rtx branch, sub, clobber, sub_1;
2014
2015 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2016 gen_rtx_ZERO_EXTEND (HImode, carry));
2017 sub = gen_rtx_SET (VOIDmode, w_dest,
2018 gen_rtx_MINUS (HImode, sub_1, w_src1));
2019 clobber = gen_rtx_CLOBBER (VOIDmode, carry);
2020 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2021 gen_rtx_IF_THEN_ELSE (VOIDmode,
2022 gen_rtx_EQ (HImode,
2023 sub_1,
2024 w_src1),
2025 pc_rtx,
2026 pc_rtx));
2027 insn = gen_rtx_PARALLEL (VOIDmode,
2028 gen_rtvec (3, branch, sub, clobber));
2029 }
2030 else if (firstloop
2031 && code != COMPARE
2032 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0)
2033 continue;
2034 else if (firstloop)
2035 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry);
2036 else
2037 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry);
2038 break;
2039
2040 case IOR:
2041 case XOR:
2042 case AND:
2043 if (GET_CODE (w_src1) == CONST_INT
2044 && INTVAL (w_src1) == -(code == AND))
2045 continue;
2046
2047 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2048 w_src0, w_src1));
2049 break;
2050
2051 case NOT:
2052 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2053 break;
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058
2059 firstloop = 0;
2060 emit (insn);
2061 }
2062
2063 /* If we emit nothing, try_split() will think we failed. So emit
2064 something that does nothing and can be optimized away. */
2065 if (firstloop)
2066 emit (gen_nop ());
2067 }
2068
2069 /* The shift operations are split at output time for constant values;
2070 variable-width shifts get handed off to a library routine.
2071
2072 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2073 SIZE_R will be a CONST_INT, X will be a hard register. */
2074
2075 const char *
2076 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2077 rtx x, rtx size_r, rtx temp)
2078 {
2079 HOST_WIDE_INT size;
2080 const char *r0, *r1, *rt;
2081 static char r[64];
2082
2083 gcc_assert (GET_CODE (size_r) == CONST_INT
2084 && GET_CODE (x) == REG && mode == SImode);
2085 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2086
2087 if (size == 0)
2088 return "";
2089
2090 r0 = reg_names [REGNO (x)];
2091 r1 = reg_names [REGNO (x) + 1];
2092
2093 /* For shifts of size 1, we can use the rotate instructions. */
2094 if (size == 1)
2095 {
2096 switch (code)
2097 {
2098 case ASHIFT:
2099 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2100 break;
2101 case ASHIFTRT:
2102 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2103 break;
2104 case LSHIFTRT:
2105 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2106 break;
2107 default:
2108 gcc_unreachable ();
2109 }
2110 return r;
2111 }
2112
2113 /* For large shifts, there are easy special cases. */
2114 if (size == 16)
2115 {
2116 switch (code)
2117 {
2118 case ASHIFT:
2119 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2120 break;
2121 case ASHIFTRT:
2122 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2123 break;
2124 case LSHIFTRT:
2125 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2126 break;
2127 default:
2128 gcc_unreachable ();
2129 }
2130 return r;
2131 }
2132 if (size > 16)
2133 {
2134 switch (code)
2135 {
2136 case ASHIFT:
2137 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2138 r1, r0, r0, r1, (int) size - 16);
2139 break;
2140 case ASHIFTRT:
2141 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2142 r0, r1, r1, r0, (int) size - 16);
2143 break;
2144 case LSHIFTRT:
2145 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2146 r0, r1, r1, r0, (int) size - 16);
2147 break;
2148 default:
2149 gcc_unreachable ();
2150 }
2151 return r;
2152 }
2153
2154 /* For the rest, we have to do more work. In particular, we
2155 need a temporary. */
2156 rt = reg_names [REGNO (temp)];
2157 switch (code)
2158 {
2159 case ASHIFT:
2160 sprintf (r,
2161 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2162 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16-size),
2163 r1, rt);
2164 break;
2165 case ASHIFTRT:
2166 sprintf (r,
2167 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2168 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2169 r0, rt);
2170 break;
2171 case LSHIFTRT:
2172 sprintf (r,
2173 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2174 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16-size),
2175 r0, rt);
2176 break;
2177 default:
2178 gcc_unreachable ();
2179 }
2180 return r;
2181 }
2182 \f
2183 /* Attribute handling. */
2184
2185 /* Return nonzero if the function is an interrupt function. */
2186 int
2187 xstormy16_interrupt_function_p (void)
2188 {
2189 tree attributes;
2190
2191 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2192 any functions are declared, which is demonstrably wrong, but
2193 it is worked around here. FIXME. */
2194 if (!cfun)
2195 return 0;
2196
2197 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2198 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2199 }
2200
2201 #undef TARGET_ATTRIBUTE_TABLE
2202 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2203 static tree xstormy16_handle_interrupt_attribute
2204 (tree *, tree, tree, int, bool *);
2205 static tree xstormy16_handle_below100_attribute
2206 (tree *, tree, tree, int, bool *);
2207
2208 static const struct attribute_spec xstormy16_attribute_table[] =
2209 {
2210 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2211 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2212 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2213 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2214 { NULL, 0, 0, false, false, false, NULL }
2215 };
2216
2217 /* Handle an "interrupt" attribute;
2218 arguments as in struct attribute_spec.handler. */
2219 static tree
2220 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2221 tree args ATTRIBUTE_UNUSED,
2222 int flags ATTRIBUTE_UNUSED,
2223 bool *no_add_attrs)
2224 {
2225 if (TREE_CODE (*node) != FUNCTION_TYPE)
2226 {
2227 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2228 IDENTIFIER_POINTER (name));
2229 *no_add_attrs = true;
2230 }
2231
2232 return NULL_TREE;
2233 }
2234
2235 /* Handle an "below" attribute;
2236 arguments as in struct attribute_spec.handler. */
2237 static tree
2238 xstormy16_handle_below100_attribute (tree *node,
2239 tree name ATTRIBUTE_UNUSED,
2240 tree args ATTRIBUTE_UNUSED,
2241 int flags ATTRIBUTE_UNUSED,
2242 bool *no_add_attrs)
2243 {
2244 if (TREE_CODE (*node) != VAR_DECL
2245 && TREE_CODE (*node) != POINTER_TYPE
2246 && TREE_CODE (*node) != TYPE_DECL)
2247 {
2248 warning (OPT_Wattributes,
2249 "%<__BELOW100__%> attribute only applies to variables");
2250 *no_add_attrs = true;
2251 }
2252 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2253 {
2254 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2255 {
2256 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2257 "with auto storage class");
2258 *no_add_attrs = true;
2259 }
2260 }
2261
2262 return NULL_TREE;
2263 }
2264 \f
2265 #undef TARGET_INIT_BUILTINS
2266 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2267 #undef TARGET_EXPAND_BUILTIN
2268 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2269
2270 static struct {
2271 const char *name;
2272 int md_code;
2273 const char *arg_ops; /* 0..9, t for temp register, r for return value */
2274 const char *arg_types; /* s=short,l=long, upper case for unsigned */
2275 } s16builtins[] = {
2276 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2277 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2278 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2279 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2280 { 0, 0, 0, 0 }
2281 };
2282
2283 static void
2284 xstormy16_init_builtins (void)
2285 {
2286 tree args, ret_type, arg;
2287 int i, a;
2288
2289 ret_type = void_type_node;
2290
2291 for (i=0; s16builtins[i].name; i++)
2292 {
2293 args = void_list_node;
2294 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--)
2295 {
2296 switch (s16builtins[i].arg_types[a])
2297 {
2298 case 's': arg = short_integer_type_node; break;
2299 case 'S': arg = short_unsigned_type_node; break;
2300 case 'l': arg = long_integer_type_node; break;
2301 case 'L': arg = long_unsigned_type_node; break;
2302 default: gcc_unreachable ();
2303 }
2304 if (a == 0)
2305 ret_type = arg;
2306 else
2307 args = tree_cons (NULL_TREE, arg, args);
2308 }
2309 add_builtin_function (s16builtins[i].name,
2310 build_function_type (ret_type, args),
2311 i, BUILT_IN_MD, NULL, NULL);
2312 }
2313 }
2314
2315 static rtx
2316 xstormy16_expand_builtin (tree exp, rtx target,
2317 rtx subtarget ATTRIBUTE_UNUSED,
2318 enum machine_mode mode ATTRIBUTE_UNUSED,
2319 int ignore ATTRIBUTE_UNUSED)
2320 {
2321 rtx op[10], args[10], pat, copyto[10], retval = 0;
2322 tree fndecl, argtree;
2323 int i, a, o, code;
2324
2325 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2326 argtree = TREE_OPERAND (exp, 1);
2327 i = DECL_FUNCTION_CODE (fndecl);
2328 code = s16builtins[i].md_code;
2329
2330 for (a = 0; a < 10 && argtree; a++)
2331 {
2332 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0);
2333 argtree = TREE_CHAIN (argtree);
2334 }
2335
2336 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2337 {
2338 char ao = s16builtins[i].arg_ops[o];
2339 char c = insn_data[code].operand[o].constraint[0];
2340 int omode;
2341
2342 copyto[o] = 0;
2343
2344 omode = insn_data[code].operand[o].mode;
2345 if (ao == 'r')
2346 op[o] = target ? target : gen_reg_rtx (omode);
2347 else if (ao == 't')
2348 op[o] = gen_reg_rtx (omode);
2349 else
2350 op[o] = args[(int) hex_value (ao)];
2351
2352 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2353 {
2354 if (c == '+' || c == '=')
2355 {
2356 copyto[o] = op[o];
2357 op[o] = gen_reg_rtx (omode);
2358 }
2359 else
2360 op[o] = copy_to_mode_reg (omode, op[o]);
2361 }
2362
2363 if (ao == 'r')
2364 retval = op[o];
2365 }
2366
2367 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2368 op[5], op[6], op[7], op[8], op[9]);
2369 emit_insn (pat);
2370
2371 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2372 if (copyto[o])
2373 {
2374 emit_move_insn (copyto[o], op[o]);
2375 if (op[o] == retval)
2376 retval = copyto[o];
2377 }
2378
2379 return retval;
2380 }
2381 \f
2382
2383 /* Look for combinations of insns that can be converted to BN or BP
2384 opcodes. This is, unfortunately, too complex to do with MD
2385 patterns. */
2386 static void
2387 combine_bnp (rtx insn)
2388 {
2389 int insn_code, regno, need_extend;
2390 unsigned int mask;
2391 rtx cond, reg, and, load, qireg, mem;
2392 enum machine_mode load_mode = QImode;
2393 enum machine_mode and_mode = QImode;
2394 rtx shift = NULL_RTX;
2395
2396 insn_code = recog_memoized (insn);
2397 if (insn_code != CODE_FOR_cbranchhi
2398 && insn_code != CODE_FOR_cbranchhi_neg)
2399 return;
2400
2401 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2402 cond = XEXP (cond, 1); /* if */
2403 cond = XEXP (cond, 0); /* cond */
2404 switch (GET_CODE (cond))
2405 {
2406 case NE:
2407 case EQ:
2408 need_extend = 0;
2409 break;
2410 case LT:
2411 case GE:
2412 need_extend = 1;
2413 break;
2414 default:
2415 return;
2416 }
2417
2418 reg = XEXP (cond, 0);
2419 if (GET_CODE (reg) != REG)
2420 return;
2421 regno = REGNO (reg);
2422 if (XEXP (cond, 1) != const0_rtx)
2423 return;
2424 if (! find_regno_note (insn, REG_DEAD, regno))
2425 return;
2426 qireg = gen_rtx_REG (QImode, regno);
2427
2428 if (need_extend)
2429 {
2430 /* LT and GE conditionals should have a sign extend before
2431 them. */
2432 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2433 {
2434 int and_code = recog_memoized (and);
2435
2436 if (and_code == CODE_FOR_extendqihi2
2437 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2438 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
2439 break;
2440
2441 if (and_code == CODE_FOR_movhi_internal
2442 && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
2443 {
2444 /* This is for testing bit 15. */
2445 and = insn;
2446 break;
2447 }
2448
2449 if (reg_mentioned_p (reg, and))
2450 return;
2451
2452 if (GET_CODE (and) != NOTE
2453 && GET_CODE (and) != INSN)
2454 return;
2455 }
2456 }
2457 else
2458 {
2459 /* EQ and NE conditionals have an AND before them. */
2460 for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
2461 {
2462 if (recog_memoized (and) == CODE_FOR_andhi3
2463 && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
2464 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
2465 break;
2466
2467 if (reg_mentioned_p (reg, and))
2468 return;
2469
2470 if (GET_CODE (and) != NOTE
2471 && GET_CODE (and) != INSN)
2472 return;
2473 }
2474
2475 if (and)
2476 {
2477 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2478 followed by an AND like this:
2479
2480 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2481 (clobber (reg:BI carry))]
2482
2483 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2484
2485 Attempt to detect this here. */
2486 for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
2487 {
2488 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2489 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2490 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2491 break;
2492
2493 if (reg_mentioned_p (reg, shift)
2494 || (GET_CODE (shift) != NOTE
2495 && GET_CODE (shift) != INSN))
2496 {
2497 shift = NULL_RTX;
2498 break;
2499 }
2500 }
2501 }
2502 }
2503 if (!and)
2504 return;
2505
2506 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
2507 load;
2508 load = prev_real_insn (load))
2509 {
2510 int load_code = recog_memoized (load);
2511
2512 if (load_code == CODE_FOR_movhi_internal
2513 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2514 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2515 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2516 {
2517 load_mode = HImode;
2518 break;
2519 }
2520
2521 if (load_code == CODE_FOR_movqi_internal
2522 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2523 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2524 {
2525 load_mode = QImode;
2526 break;
2527 }
2528
2529 if (load_code == CODE_FOR_zero_extendqihi2
2530 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2531 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2532 {
2533 load_mode = QImode;
2534 and_mode = HImode;
2535 break;
2536 }
2537
2538 if (reg_mentioned_p (reg, load))
2539 return;
2540
2541 if (GET_CODE (load) != NOTE
2542 && GET_CODE (load) != INSN)
2543 return;
2544 }
2545 if (!load)
2546 return;
2547
2548 mem = SET_SRC (PATTERN (load));
2549
2550 if (need_extend)
2551 {
2552 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2553
2554 /* If the mem includes a zero-extend operation and we are
2555 going to generate a sign-extend operation then move the
2556 mem inside the zero-extend. */
2557 if (GET_CODE (mem) == ZERO_EXTEND)
2558 mem = XEXP (mem, 0);
2559 }
2560 else
2561 {
2562 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
2563 return;
2564
2565 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
2566
2567 if (shift)
2568 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2569 }
2570
2571 if (load_mode == HImode)
2572 {
2573 rtx addr = XEXP (mem, 0);
2574
2575 if (! (mask & 0xff))
2576 {
2577 addr = plus_constant (addr, 1);
2578 mask >>= 8;
2579 }
2580 mem = gen_rtx_MEM (QImode, addr);
2581 }
2582
2583 if (need_extend)
2584 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2585 else
2586 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2587
2588 INSN_CODE (insn) = -1;
2589 delete_insn (load);
2590
2591 if (and != insn)
2592 delete_insn (and);
2593
2594 if (shift != NULL_RTX)
2595 delete_insn (shift);
2596 }
2597
2598 static void
2599 xstormy16_reorg (void)
2600 {
2601 rtx insn;
2602
2603 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2604 {
2605 if (! JUMP_P (insn))
2606 continue;
2607 combine_bnp (insn);
2608 }
2609 }
2610
2611 \f
2612 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2613
2614 static bool
2615 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2616 {
2617 const HOST_WIDE_INT size = int_size_in_bytes (type);
2618 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2619 }
2620 \f
2621 #undef TARGET_ASM_ALIGNED_HI_OP
2622 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2623 #undef TARGET_ASM_ALIGNED_SI_OP
2624 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2625 #undef TARGET_ENCODE_SECTION_INFO
2626 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2627
2628 /* select_section doesn't handle .bss_below100. */
2629 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2630 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2631
2632 #undef TARGET_ASM_OUTPUT_MI_THUNK
2633 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2634 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2635 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2636
2637 #undef TARGET_RTX_COSTS
2638 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2639 #undef TARGET_ADDRESS_COST
2640 #define TARGET_ADDRESS_COST xstormy16_address_cost
2641
2642 #undef TARGET_BUILD_BUILTIN_VA_LIST
2643 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2644 #undef TARGET_EXPAND_BUILTIN_VA_START
2645 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2646 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2647 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2648
2649 #undef TARGET_PROMOTE_FUNCTION_ARGS
2650 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
2651 #undef TARGET_PROMOTE_FUNCTION_RETURN
2652 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
2653 #undef TARGET_PROMOTE_PROTOTYPES
2654 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2655
2656 #undef TARGET_RETURN_IN_MEMORY
2657 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2658
2659 #undef TARGET_MACHINE_DEPENDENT_REORG
2660 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2661
2662 struct gcc_target targetm = TARGET_INITIALIZER;
2663
2664 #include "gt-stormy16.h"