]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
.
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2017 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "intl.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "cgraph.h"
32 #include "c-family/c-common.h"
33 #include "cfghooks.h"
34 #include "df.h"
35 #include "memmodel.h"
36 #include "tm_p.h"
37 #include "optabs.h"
38 #include "regs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "conditions.h"
42 #include "insn-attr.h"
43 #include "reload.h"
44 #include "varasm.h"
45 #include "calls.h"
46 #include "stor-layout.h"
47 #include "output.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "cfgrtl.h"
52 #include "params.h"
53 #include "builtins.h"
54 #include "context.h"
55 #include "tree-pass.h"
56 #include "print-rtl.h"
57 #include "rtl-iter.h"
58
59 /* This file should be included last. */
60 #include "target-def.h"
61
62 /* Maximal allowed offset for an address in the LD command */
63 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
64
65 /* Return true if STR starts with PREFIX and false, otherwise. */
66 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
67
68 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
69 address space where data is to be located.
70 As the only non-generic address spaces are all located in flash,
71 this can be used to test if data shall go into some .progmem* section.
72 This must be the rightmost field of machine dependent section flags. */
73 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
74
75 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
76 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
77
78 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
79 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
81 do { \
82 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
83 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
84 } while (0)
85
86 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
87 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
88 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
89 / SYMBOL_FLAG_MACH_DEP)
90
91 /* (AVR_TINY only): Symbol has attribute progmem */
92 #define AVR_SYMBOL_FLAG_TINY_PM \
93 (SYMBOL_FLAG_MACH_DEP << 7)
94
95 /* (AVR_TINY only): Symbol has attribute absdata */
96 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
97 (SYMBOL_FLAG_MACH_DEP << 8)
98
99 #define TINY_ADIW(REG1, REG2, I) \
100 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
101 "sbci " #REG2 ",hi8(-(" #I "))"
102
103 #define TINY_SBIW(REG1, REG2, I) \
104 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
105 "sbci " #REG2 ",hi8((" #I "))"
106
107 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
108 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
109
110 /* Known address spaces. The order must be the same as in the respective
111 enum from avr.h (or designated initialized must be used). */
112 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
113 {
114 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
115 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
116 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
117 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
118 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
119 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
120 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
121 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
122 };
123
124
125 /* Holding RAM addresses of some SFRs used by the compiler and that
126 are unique over all devices in an architecture like 'avr4'. */
127
128 typedef struct
129 {
130 /* SREG: The processor status */
131 int sreg;
132
133 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
134 int ccp;
135 int rampd;
136 int rampx;
137 int rampy;
138
139 /* RAMPZ: The high byte of 24-bit address used with ELPM */
140 int rampz;
141
142 /* SP: The stack pointer and its low and high byte */
143 int sp_l;
144 int sp_h;
145 } avr_addr_t;
146
147 static avr_addr_t avr_addr;
148
149
150 /* Prototypes for local helper functions. */
151
152 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
153 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
154 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
155 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
156 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
157 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
158
159 static int get_sequence_length (rtx_insn *insns);
160 static int sequent_regs_live (void);
161 static const char *ptrreg_to_str (int);
162 static const char *cond_string (enum rtx_code);
163 static int avr_num_arg_regs (machine_mode, const_tree);
164 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
165 int, bool);
166 static void output_reload_in_const (rtx*, rtx, int*, bool);
167 static struct machine_function * avr_init_machine_status (void);
168
169
170 /* Prototypes for hook implementors if needed before their implementation. */
171
172 static bool avr_rtx_costs (rtx, machine_mode, int, int, int*, bool);
173
174
175 /* Allocate registers from r25 to r8 for parameters for function calls. */
176 #define FIRST_CUM_REG 26
177
178 /* Last call saved register */
179 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
180
181 /* Implicit target register of LPM instruction (R0) */
182 extern GTY(()) rtx lpm_reg_rtx;
183 rtx lpm_reg_rtx;
184
185 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
186 extern GTY(()) rtx lpm_addr_reg_rtx;
187 rtx lpm_addr_reg_rtx;
188
189 /* Temporary register RTX (reg:QI TMP_REGNO) */
190 extern GTY(()) rtx tmp_reg_rtx;
191 rtx tmp_reg_rtx;
192
193 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
194 extern GTY(()) rtx zero_reg_rtx;
195 rtx zero_reg_rtx;
196
197 /* RTXs for all general purpose registers as QImode */
198 extern GTY(()) rtx all_regs_rtx[32];
199 rtx all_regs_rtx[32];
200
201 /* SREG, the processor status */
202 extern GTY(()) rtx sreg_rtx;
203 rtx sreg_rtx;
204
205 /* RAMP* special function registers */
206 extern GTY(()) rtx rampd_rtx;
207 extern GTY(()) rtx rampx_rtx;
208 extern GTY(()) rtx rampy_rtx;
209 extern GTY(()) rtx rampz_rtx;
210 rtx rampd_rtx;
211 rtx rampx_rtx;
212 rtx rampy_rtx;
213 rtx rampz_rtx;
214
215 /* RTX containing the strings "" and "e", respectively */
216 static GTY(()) rtx xstring_empty;
217 static GTY(()) rtx xstring_e;
218
219 /* Current architecture. */
220 const avr_arch_t *avr_arch;
221
222 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
223 or to address space __flash* or __memx. Only used as singletons inside
224 avr_asm_select_section, but it must not be local there because of GTY. */
225 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
226
227 /* Condition for insns/expanders from avr-dimode.md. */
228 bool avr_have_dimode = true;
229
230 /* To track if code will use .bss and/or .data. */
231 bool avr_need_clear_bss_p = false;
232 bool avr_need_copy_data_p = false;
233
234 \f
235 /* Transform UP into lowercase and write the result to LO.
236 You must provide enough space for LO. Return LO. */
237
238 static char*
239 avr_tolower (char *lo, const char *up)
240 {
241 char *lo0 = lo;
242
243 for (; *up; up++, lo++)
244 *lo = TOLOWER (*up);
245
246 *lo = '\0';
247
248 return lo0;
249 }
250
251
252 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
253 Return true if the least significant N_BYTES bytes of XVAL all have a
254 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
255 of integers which contains an integer N iff bit N of POP_MASK is set. */
256
257 bool
258 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
259 {
260 machine_mode mode = GET_MODE (xval);
261
262 if (VOIDmode == mode)
263 mode = SImode;
264
265 for (int i = 0; i < n_bytes; i++)
266 {
267 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
268 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
269
270 if (0 == (pop_mask & (1 << popcount_hwi (val8))))
271 return false;
272 }
273
274 return true;
275 }
276
277
278 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
279 the bit representation of X by "casting" it to CONST_INT. */
280
281 rtx
282 avr_to_int_mode (rtx x)
283 {
284 machine_mode mode = GET_MODE (x);
285
286 return VOIDmode == mode
287 ? x
288 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
289 }
290
291 namespace {
292
293 static const pass_data avr_pass_data_recompute_notes =
294 {
295 RTL_PASS, // type
296 "", // name (will be patched)
297 OPTGROUP_NONE, // optinfo_flags
298 TV_DF_SCAN, // tv_id
299 0, // properties_required
300 0, // properties_provided
301 0, // properties_destroyed
302 0, // todo_flags_start
303 TODO_df_finish | TODO_df_verify // todo_flags_finish
304 };
305
306
307 class avr_pass_recompute_notes : public rtl_opt_pass
308 {
309 public:
310 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
311 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
312 {
313 this->name = name;
314 }
315
316 virtual unsigned int execute (function*)
317 {
318 df_note_add_problem ();
319 df_analyze ();
320
321 return 0;
322 }
323 }; // avr_pass_recompute_notes
324
325 static const pass_data avr_pass_data_casesi =
326 {
327 RTL_PASS, // type
328 "", // name (will be patched)
329 OPTGROUP_NONE, // optinfo_flags
330 TV_DF_SCAN, // tv_id
331 0, // properties_required
332 0, // properties_provided
333 0, // properties_destroyed
334 0, // todo_flags_start
335 0 // todo_flags_finish
336 };
337
338
339 class avr_pass_casesi : public rtl_opt_pass
340 {
341 public:
342 avr_pass_casesi (gcc::context *ctxt, const char *name)
343 : rtl_opt_pass (avr_pass_data_casesi, ctxt)
344 {
345 this->name = name;
346 }
347
348 void avr_rest_of_handle_casesi (function*);
349
350 virtual bool gate (function*) { return optimize > 0; }
351
352 virtual unsigned int execute (function *func)
353 {
354 avr_rest_of_handle_casesi (func);
355
356 return 0;
357 }
358 }; // avr_pass_casesi
359
360 } // anon namespace
361
362 rtl_opt_pass*
363 make_avr_pass_recompute_notes (gcc::context *ctxt)
364 {
365 return new avr_pass_recompute_notes (ctxt, "avr-notes-free-cfg");
366 }
367
368 rtl_opt_pass*
369 make_avr_pass_casesi (gcc::context *ctxt)
370 {
371 return new avr_pass_casesi (ctxt, "avr-casesi");
372 }
373
374
375 /* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
376
377 static rtx_insn*
378 avr_parallel_insn_from_insns (rtx_insn *i[6])
379 {
380 rtvec vec = gen_rtvec (6, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
381 PATTERN (i[3]), PATTERN (i[4]), PATTERN (i[5]));
382 start_sequence();
383 emit (gen_rtx_PARALLEL (VOIDmode, vec));
384 rtx_insn *insn = get_insns();
385 end_sequence();
386
387 return insn;
388 }
389
390
391 /* Return true if we see an insn stream generated by casesi expander together
392 with an extension to SImode of the switch value.
393
394 If this is the case, fill in the insns from casesi to INSNS[1..5] and
395 the SImode extension to INSNS[0]. Moreover, extract the operands of
396 pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
397
398 static bool
399 avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
400 {
401 rtx set_5, set_0;
402
403 /* A first and quick test for a casesi sequences. As a side effect of
404 the test, harvest respective insns to INSNS[0..5]. */
405
406 if (!(JUMP_P (insns[5] = insn)
407 // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
408 // hence the following test ensures that we are actually dealing
409 // with code from casesi.
410 && (set_5 = single_set (insns[5]))
411 && UNSPEC == GET_CODE (SET_SRC (set_5))
412 && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_5), 1)
413
414 && (insns[4] = prev_real_insn (insns[5]))
415 && (insns[3] = prev_real_insn (insns[4]))
416 && (insns[2] = prev_real_insn (insns[3]))
417 && (insns[1] = prev_real_insn (insns[2]))
418
419 // Insn prior to casesi.
420 && (insns[0] = prev_real_insn (insns[1]))
421 && (set_0 = single_set (insns[0]))
422 && extend_operator (SET_SRC (set_0), SImode)))
423 {
424 return false;
425 }
426
427 if (dump_file)
428 {
429 fprintf (dump_file, ";; Sequence from casesi in "
430 "[bb %d]:\n\n", bb->index);
431 for (int i = 0; i < 6; i++)
432 print_rtl_single (dump_file, insns[i]);
433 }
434
435 /* We have to deal with quite some operands. Extracting them by hand
436 would be tedious, therefore wrap the insn patterns into a parallel,
437 run recog against it and then use insn extract to get the operands. */
438
439 rtx_insn *xinsn = avr_parallel_insn_from_insns (insns);
440
441 INSN_CODE (xinsn) = recog (PATTERN (xinsn), xinsn, NULL /* num_clobbers */);
442
443 /* Failing to recognize means that someone changed the casesi expander or
444 that some passes prior to this one performed some unexpected changes.
445 Gracefully drop such situations instead of aborting. */
446
447 if (INSN_CODE (xinsn) < 0)
448 {
449 if (dump_file)
450 fprintf (dump_file, ";; Sequence not recognized, giving up.\n\n");
451
452 return false;
453 }
454
455 gcc_assert (CODE_FOR_casesi_qi_sequence == INSN_CODE (xinsn)
456 || CODE_FOR_casesi_hi_sequence == INSN_CODE (xinsn));
457
458 extract_insn (xinsn);
459
460 // Assert on the anatomy of xinsn's operands we are going to work with.
461
462 gcc_assert (11 == recog_data.n_operands);
463 gcc_assert (4 == recog_data.n_dups);
464
465 if (dump_file)
466 {
467 fprintf (dump_file, ";; Operands extracted:\n");
468 for (int i = 0; i < recog_data.n_operands; i++)
469 avr_fdump (dump_file, ";; $%d = %r\n", i, recog_data.operand[i]);
470 fprintf (dump_file, "\n");
471 }
472
473 return true;
474 }
475
476
477 /* Perform some extra checks on operands of casesi_<mode>_sequence.
478 Not all operand dependencies can be described by means of predicates.
479 This function performs left over checks and should always return true.
480 Returning false means that someone changed the casesi expander but did
481 not adjust casesi_<mode>_sequence. */
482
483 bool
484 avr_casei_sequence_check_operands (rtx *xop)
485 {
486 rtx sub_5 = NULL_RTX;
487
488 if (AVR_HAVE_EIJMP_EICALL
489 // The last clobber op of the tablejump.
490 && xop[8] == all_regs_rtx[24])
491 {
492 // $6 is: (subreg:SI ($5) 0)
493 sub_5 = xop[6];
494 }
495
496 if (!AVR_HAVE_EIJMP_EICALL
497 // $6 is: (plus:HI (subreg:SI ($5) 0)
498 // (label_ref ($3)))
499 && PLUS == GET_CODE (xop[6])
500 && LABEL_REF == GET_CODE (XEXP (xop[6], 1))
501 && rtx_equal_p (xop[3], XEXP (XEXP (xop[6], 1), 0))
502 // The last clobber op of the tablejump.
503 && xop[8] == const0_rtx)
504 {
505 sub_5 = XEXP (xop[6], 0);
506 }
507
508 if (sub_5
509 && SUBREG_P (sub_5)
510 && 0 == SUBREG_BYTE (sub_5)
511 && rtx_equal_p (xop[5], SUBREG_REG (sub_5)))
512 return true;
513
514 if (dump_file)
515 fprintf (dump_file, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
516
517 return false;
518 }
519
520
521 /* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
522 extension of an 8-bit or 16-bit integer to SImode. XOP contains the
523 operands of INSNS as extracted by insn_extract from pattern
524 casesi_<mode>_sequence:
525
526 $0: SImode reg switch value as result of $9.
527 $1: Negative of smallest index in switch.
528 $2: Number of entries in switch.
529 $3: Label to table.
530 $4: Label if out-of-bounds.
531 $5: $0 + $1.
532 $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
533 2-byte PC: subreg:HI ($5)
534 $7: HI reg index into table (Z or pseudo)
535 $8: R24 or const0_rtx (to be clobbered)
536 $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
537 $10: QImode or HImode register input of $9.
538
539 Try to optimize this sequence, i.e. use the original HImode / QImode
540 switch value instead of SImode. */
541
542 static void
543 avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
544 {
545 // Original mode of the switch value; this is QImode or HImode.
546 machine_mode mode = GET_MODE (xop[10]);
547
548 // How the original switch value was extended to SImode; this is
549 // SIGN_EXTEND or ZERO_EXTEND.
550 enum rtx_code code = GET_CODE (xop[9]);
551
552 // Lower index, upper index (plus one) and range of case calues.
553 HOST_WIDE_INT low_idx = -INTVAL (xop[1]);
554 HOST_WIDE_INT num_idx = INTVAL (xop[2]);
555 HOST_WIDE_INT hig_idx = low_idx + num_idx;
556
557 // Maximum ranges of (un)signed QImode resp. HImode.
558 unsigned umax = QImode == mode ? 0xff : 0xffff;
559 int imax = QImode == mode ? 0x7f : 0x7fff;
560 int imin = -imax - 1;
561
562 // Testing the case range and whether it fits into the range of the
563 // (un)signed mode. This test should actually always pass because it
564 // makes no sense to have case values outside the mode range. Notice
565 // that case labels which are unreachable because they are outside the
566 // mode of the switch value (e.g. "case -1" for uint8_t) have already
567 // been thrown away by the middle-end.
568
569 if (SIGN_EXTEND == code
570 && low_idx >= imin
571 && hig_idx <= imax)
572 {
573 // ok
574 }
575 else if (ZERO_EXTEND == code
576 && low_idx >= 0
577 && (unsigned) hig_idx <= umax)
578 {
579 // ok
580 }
581 else
582 {
583 if (dump_file)
584 fprintf (dump_file, ";; Case ranges too big, giving up.\n\n");
585 return;
586 }
587
588 // Do normalization of switch value $10 and out-of-bound check in its
589 // original mode instead of in SImode. Use a newly created pseudo.
590 // This will replace insns[1..2].
591
592 start_sequence();
593
594 rtx_insn *seq1, *seq2, *last1, *last2;
595
596 rtx reg = copy_to_mode_reg (mode, xop[10]);
597
598 rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
599 rtx (*gen_cmp)(rtx,rtx) = QImode == mode ? gen_cmpqi3 : gen_cmphi3;
600
601 emit_insn (gen_add (reg, reg, gen_int_mode (-low_idx, mode)));
602 emit_insn (gen_cmp (reg, gen_int_mode (num_idx, mode)));
603
604 seq1 = get_insns();
605 last1 = get_last_insn();
606 end_sequence();
607
608 emit_insn_before (seq1, insns[1]);
609
610 // After the out-of-bounds test and corresponding branch, use a
611 // 16-bit index. If QImode is used, extend it to HImode first.
612 // This will replace insns[4].
613
614 start_sequence();
615
616 if (QImode == mode)
617 reg = force_reg (HImode, gen_rtx_fmt_e (code, HImode, reg));
618
619 rtx pat_4 = AVR_3_BYTE_PC
620 ? gen_movhi (xop[7], reg)
621 : gen_addhi3 (xop[7], reg, gen_rtx_LABEL_REF (VOIDmode, xop[3]));
622
623 emit_insn (pat_4);
624
625 seq2 = get_insns();
626 last2 = get_last_insn();
627 end_sequence();
628
629 emit_insn_after (seq2, insns[4]);
630
631 if (dump_file)
632 {
633 fprintf (dump_file, ";; New insns: ");
634
635 for (rtx_insn *insn = seq1; ; insn = NEXT_INSN (insn))
636 {
637 fprintf (dump_file, "%d, ", INSN_UID (insn));
638 if (insn == last1)
639 break;
640 }
641 for (rtx_insn *insn = seq2; ; insn = NEXT_INSN (insn))
642 {
643 fprintf (dump_file, "%d%s", INSN_UID (insn),
644 insn == last2 ? ".\n\n" : ", ");
645 if (insn == last2)
646 break;
647 }
648
649 fprintf (dump_file, ";; Deleting insns: %d, %d, %d.\n\n",
650 INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[4]));
651 }
652
653 // Pseudodelete the SImode and subreg of SImode insns. We don't care
654 // about the extension insns[0]: Its result is now unused and other
655 // passes will clean it up.
656
657 SET_INSN_DELETED (insns[1]);
658 SET_INSN_DELETED (insns[2]);
659 SET_INSN_DELETED (insns[4]);
660 }
661
662
663 void
664 avr_pass_casesi::avr_rest_of_handle_casesi (function *func)
665 {
666 basic_block bb;
667
668 FOR_EACH_BB_FN (bb, func)
669 {
670 rtx_insn *insn, *insns[6];
671
672 FOR_BB_INSNS (bb, insn)
673 {
674 if (avr_is_casesi_sequence (bb, insn, insns))
675 {
676 avr_optimize_casesi (insns, recog_data.operand);
677 }
678 }
679 }
680 }
681
682
683 /* Set `avr_arch' as specified by `-mmcu='.
684 Return true on success. */
685
686 static bool
687 avr_set_core_architecture (void)
688 {
689 /* Search for mcu core architecture. */
690
691 if (!avr_mmcu)
692 avr_mmcu = AVR_MMCU_DEFAULT;
693
694 avr_arch = &avr_arch_types[0];
695
696 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
697 {
698 if (NULL == mcu->name)
699 {
700 /* Reached the end of `avr_mcu_types'. This should actually never
701 happen as options are provided by device-specs. It could be a
702 typo in a device-specs or calling the compiler proper directly
703 with -mmcu=<device>. */
704
705 error ("unknown core architecture %qs specified with %qs",
706 avr_mmcu, "-mmcu=");
707 avr_inform_core_architectures ();
708 break;
709 }
710 else if (0 == strcmp (mcu->name, avr_mmcu)
711 // Is this a proper architecture ?
712 && NULL == mcu->macro)
713 {
714 avr_arch = &avr_arch_types[mcu->arch_id];
715 if (avr_n_flash < 0)
716 avr_n_flash = 1 + (mcu->flash_size - 1) / 0x10000;
717
718 return true;
719 }
720 }
721
722 return false;
723 }
724
725
726 /* Implement `TARGET_OPTION_OVERRIDE'. */
727
728 static void
729 avr_option_override (void)
730 {
731 /* Disable -fdelete-null-pointer-checks option for AVR target.
732 This option compiler assumes that dereferencing of a null pointer
733 would halt the program. For AVR this assumption is not true and
734 programs can safely dereference null pointers. Changes made by this
735 option may not work properly for AVR. So disable this option. */
736
737 flag_delete_null_pointer_checks = 0;
738
739 /* caller-save.c looks for call-clobbered hard registers that are assigned
740 to pseudos that cross calls and tries so save-restore them around calls
741 in order to reduce the number of stack slots needed.
742
743 This might lead to situations where reload is no more able to cope
744 with the challenge of AVR's very few address registers and fails to
745 perform the requested spills. */
746
747 if (avr_strict_X)
748 flag_caller_saves = 0;
749
750 /* Allow optimizer to introduce store data races. This used to be the
751 default - it was changed because bigger targets did not see any
752 performance decrease. For the AVR though, disallowing data races
753 introduces additional code in LIM and increases reg pressure. */
754
755 maybe_set_param_value (PARAM_ALLOW_STORE_DATA_RACES, 1,
756 global_options.x_param_values,
757 global_options_set.x_param_values);
758
759 /* Unwind tables currently require a frame pointer for correctness,
760 see toplev.c:process_options(). */
761
762 if ((flag_unwind_tables
763 || flag_non_call_exceptions
764 || flag_asynchronous_unwind_tables)
765 && !ACCUMULATE_OUTGOING_ARGS)
766 {
767 flag_omit_frame_pointer = 0;
768 }
769
770 if (flag_pic == 1)
771 warning (OPT_fpic, "-fpic is not supported");
772 if (flag_pic == 2)
773 warning (OPT_fPIC, "-fPIC is not supported");
774 if (flag_pie == 1)
775 warning (OPT_fpie, "-fpie is not supported");
776 if (flag_pie == 2)
777 warning (OPT_fPIE, "-fPIE is not supported");
778
779 #if !defined (HAVE_AS_AVR_MGCCISR_OPTION)
780 TARGET_GASISR_PROLOGUES = 0;
781 #endif
782
783 if (!avr_set_core_architecture())
784 return;
785
786 /* RAM addresses of some SFRs common to all devices in respective arch. */
787
788 /* SREG: Status Register containing flags like I (global IRQ) */
789 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
790
791 /* RAMPZ: Address' high part when loading via ELPM */
792 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
793
794 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
795 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
796 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
797 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
798
799 /* SP: Stack Pointer (SP_H:SP_L) */
800 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
801 avr_addr.sp_h = avr_addr.sp_l + 1;
802
803 init_machine_status = avr_init_machine_status;
804
805 avr_log_set_avr_log();
806 }
807
808 /* Function to set up the backend function structure. */
809
810 static struct machine_function *
811 avr_init_machine_status (void)
812 {
813 return ggc_cleared_alloc<machine_function> ();
814 }
815
816
817 /* Implement `INIT_EXPANDERS'. */
818 /* The function works like a singleton. */
819
820 void
821 avr_init_expanders (void)
822 {
823 for (int regno = 0; regno < 32; regno ++)
824 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
825
826 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
827 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
828 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
829
830 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
831
832 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
833 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
834 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
835 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
836 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
837
838 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
839 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
840
841 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
842 to be present */
843 if (AVR_TINY)
844 avr_have_dimode = false;
845 }
846
847
848 /* Implement `REGNO_REG_CLASS'. */
849 /* Return register class for register R. */
850
851 enum reg_class
852 avr_regno_reg_class (int r)
853 {
854 static const enum reg_class reg_class_tab[] =
855 {
856 R0_REG,
857 /* r1 - r15 */
858 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
859 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
860 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
861 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
862 /* r16 - r23 */
863 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
864 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
865 /* r24, r25 */
866 ADDW_REGS, ADDW_REGS,
867 /* X: r26, 27 */
868 POINTER_X_REGS, POINTER_X_REGS,
869 /* Y: r28, r29 */
870 POINTER_Y_REGS, POINTER_Y_REGS,
871 /* Z: r30, r31 */
872 POINTER_Z_REGS, POINTER_Z_REGS,
873 /* SP: SPL, SPH */
874 STACK_REG, STACK_REG
875 };
876
877 if (r <= 33)
878 return reg_class_tab[r];
879
880 return ALL_REGS;
881 }
882
883
884 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
885
886 static bool
887 avr_scalar_mode_supported_p (machine_mode mode)
888 {
889 if (ALL_FIXED_POINT_MODE_P (mode))
890 return true;
891
892 if (PSImode == mode)
893 return true;
894
895 return default_scalar_mode_supported_p (mode);
896 }
897
898
899 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
900
901 static bool
902 avr_decl_flash_p (tree decl)
903 {
904 if (TREE_CODE (decl) != VAR_DECL
905 || TREE_TYPE (decl) == error_mark_node)
906 {
907 return false;
908 }
909
910 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
911 }
912
913
914 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
915 address space and FALSE, otherwise. */
916
917 static bool
918 avr_decl_memx_p (tree decl)
919 {
920 if (TREE_CODE (decl) != VAR_DECL
921 || TREE_TYPE (decl) == error_mark_node)
922 {
923 return false;
924 }
925
926 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
927 }
928
929
930 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
931
932 bool
933 avr_mem_flash_p (rtx x)
934 {
935 return (MEM_P (x)
936 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
937 }
938
939
940 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
941 address space and FALSE, otherwise. */
942
943 bool
944 avr_mem_memx_p (rtx x)
945 {
946 return (MEM_P (x)
947 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
948 }
949
950
951 /* A helper for the subsequent function attribute used to dig for
952 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
953
954 static inline int
955 avr_lookup_function_attribute1 (const_tree func, const char *name)
956 {
957 if (FUNCTION_DECL == TREE_CODE (func))
958 {
959 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
960 {
961 return true;
962 }
963
964 func = TREE_TYPE (func);
965 }
966
967 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
968 || TREE_CODE (func) == METHOD_TYPE);
969
970 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
971 }
972
973 /* Return nonzero if FUNC is a naked function. */
974
975 static int
976 avr_naked_function_p (tree func)
977 {
978 return avr_lookup_function_attribute1 (func, "naked");
979 }
980
981 /* Return nonzero if FUNC is an interrupt function as specified
982 by the "interrupt" attribute. */
983
984 static int
985 avr_interrupt_function_p (tree func)
986 {
987 return avr_lookup_function_attribute1 (func, "interrupt");
988 }
989
990 /* Return nonzero if FUNC is a signal function as specified
991 by the "signal" attribute. */
992
993 static int
994 avr_signal_function_p (tree func)
995 {
996 return avr_lookup_function_attribute1 (func, "signal");
997 }
998
999 /* Return nonzero if FUNC is an OS_task function. */
1000
1001 static int
1002 avr_OS_task_function_p (tree func)
1003 {
1004 return avr_lookup_function_attribute1 (func, "OS_task");
1005 }
1006
1007 /* Return nonzero if FUNC is an OS_main function. */
1008
1009 static int
1010 avr_OS_main_function_p (tree func)
1011 {
1012 return avr_lookup_function_attribute1 (func, "OS_main");
1013 }
1014
1015
1016 /* Return nonzero if FUNC is a no_gccisr function as specified
1017 by the "no_gccisr" attribute. */
1018
1019 static int
1020 avr_no_gccisr_function_p (tree func)
1021 {
1022 return avr_lookup_function_attribute1 (func, "no_gccisr");
1023 }
1024
1025 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
1026 /* Sanity cheching for above function attributes. */
1027
1028 static void
1029 avr_set_current_function (tree decl)
1030 {
1031 location_t loc;
1032 const char *isr;
1033
1034 if (decl == NULL_TREE
1035 || current_function_decl == NULL_TREE
1036 || current_function_decl == error_mark_node
1037 || ! cfun->machine
1038 || cfun->machine->attributes_checked_p)
1039 return;
1040
1041 loc = DECL_SOURCE_LOCATION (decl);
1042
1043 cfun->machine->is_naked = avr_naked_function_p (decl);
1044 cfun->machine->is_signal = avr_signal_function_p (decl);
1045 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
1046 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
1047 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
1048 cfun->machine->is_no_gccisr = avr_no_gccisr_function_p (decl);
1049
1050 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
1051
1052 /* Too much attributes make no sense as they request conflicting features. */
1053
1054 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
1055 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
1056 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
1057 " exclusive", "OS_task", "OS_main", isr);
1058
1059 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
1060
1061 if (cfun->machine->is_naked
1062 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1063 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
1064 " no effect on %qs function", "OS_task", "OS_main", "naked");
1065
1066 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1067 {
1068 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
1069 tree ret = TREE_TYPE (TREE_TYPE (decl));
1070 const char *name;
1071
1072 name = DECL_ASSEMBLER_NAME_SET_P (decl)
1073 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
1074 : IDENTIFIER_POINTER (DECL_NAME (decl));
1075
1076 /* Skip a leading '*' that might still prefix the assembler name,
1077 e.g. in non-LTO runs. */
1078
1079 name = default_strip_name_encoding (name);
1080
1081 /* Interrupt handlers must be void __vector (void) functions. */
1082
1083 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
1084 error_at (loc, "%qs function cannot have arguments", isr);
1085
1086 if (TREE_CODE (ret) != VOID_TYPE)
1087 error_at (loc, "%qs function cannot return a value", isr);
1088
1089 #if defined WITH_AVRLIBC
1090 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
1091 using this when it switched from SIGNAL and INTERRUPT to ISR. */
1092
1093 if (cfun->machine->is_interrupt)
1094 cfun->machine->is_signal = 0;
1095
1096 /* If the function has the 'signal' or 'interrupt' attribute, ensure
1097 that the name of the function is "__vector_NN" so as to catch
1098 when the user misspells the vector name. */
1099
1100 if (!STR_PREFIX_P (name, "__vector"))
1101 warning_at (loc, OPT_Wmisspelled_isr, "%qs appears to be a misspelled "
1102 "%qs handler, missing %<__vector%> prefix", name, isr);
1103 #endif // AVR-LibC naming conventions
1104 }
1105
1106 #if defined WITH_AVRLIBC
1107 // Common problem is using "ISR" without first including avr/interrupt.h.
1108 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
1109 name = default_strip_name_encoding (name);
1110 if (0 == strcmp ("ISR", name)
1111 || 0 == strcmp ("INTERRUPT", name)
1112 || 0 == strcmp ("SIGNAL", name))
1113 {
1114 warning_at (loc, OPT_Wmisspelled_isr, "%qs is a reserved indentifier"
1115 " in AVR-LibC. Consider %<#include <avr/interrupt.h>%>"
1116 " before using the %qs macro", name, name);
1117 }
1118 #endif // AVR-LibC naming conventions
1119
1120 /* Don't print the above diagnostics more than once. */
1121
1122 cfun->machine->attributes_checked_p = 1;
1123 }
1124
1125
1126 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
1127
1128 int
1129 avr_accumulate_outgoing_args (void)
1130 {
1131 if (!cfun)
1132 return TARGET_ACCUMULATE_OUTGOING_ARGS;
1133
1134 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1135 what offset is correct. In some cases it is relative to
1136 virtual_outgoing_args_rtx and in others it is relative to
1137 virtual_stack_vars_rtx. For example code see
1138 gcc.c-torture/execute/built-in-setjmp.c
1139 gcc.c-torture/execute/builtins/sprintf-chk.c */
1140
1141 return (TARGET_ACCUMULATE_OUTGOING_ARGS
1142 && !(cfun->calls_setjmp
1143 || cfun->has_nonlocal_label));
1144 }
1145
1146
1147 /* Report contribution of accumulated outgoing arguments to stack size. */
1148
1149 static inline int
1150 avr_outgoing_args_size (void)
1151 {
1152 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
1153 }
1154
1155
1156 /* Implement `STARTING_FRAME_OFFSET'. */
1157 /* This is the offset from the frame pointer register to the first stack slot
1158 that contains a variable living in the frame. */
1159
1160 int
1161 avr_starting_frame_offset (void)
1162 {
1163 return 1 + avr_outgoing_args_size ();
1164 }
1165
1166
1167 /* Return the number of hard registers to push/pop in the prologue/epilogue
1168 of the current function, and optionally store these registers in SET. */
1169
1170 static int
1171 avr_regs_to_save (HARD_REG_SET *set)
1172 {
1173 int count;
1174 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1175
1176 if (set)
1177 CLEAR_HARD_REG_SET (*set);
1178 count = 0;
1179
1180 /* No need to save any registers if the function never returns or
1181 has the "OS_task" or "OS_main" attribute. */
1182
1183 if (TREE_THIS_VOLATILE (current_function_decl)
1184 || cfun->machine->is_OS_task
1185 || cfun->machine->is_OS_main)
1186 return 0;
1187
1188 for (int reg = 0; reg < 32; reg++)
1189 {
1190 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1191 any global register variables. */
1192
1193 if (fixed_regs[reg])
1194 continue;
1195
1196 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
1197 || (df_regs_ever_live_p (reg)
1198 && (int_or_sig_p || !call_used_regs[reg])
1199 /* Don't record frame pointer registers here. They are treated
1200 indivitually in prologue. */
1201 && !(frame_pointer_needed
1202 && (reg == REG_Y || reg == REG_Y + 1))))
1203 {
1204 if (set)
1205 SET_HARD_REG_BIT (*set, reg);
1206 count++;
1207 }
1208 }
1209 return count;
1210 }
1211
1212
1213 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1214
1215 static bool
1216 avr_allocate_stack_slots_for_args (void)
1217 {
1218 return !cfun->machine->is_naked;
1219 }
1220
1221
1222 /* Return true if register FROM can be eliminated via register TO. */
1223
1224 static bool
1225 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1226 {
1227 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
1228 || !frame_pointer_needed);
1229 }
1230
1231
1232 /* Implement `TARGET_WARN_FUNC_RETURN'. */
1233
1234 static bool
1235 avr_warn_func_return (tree decl)
1236 {
1237 /* Naked functions are implemented entirely in assembly, including the
1238 return sequence, so suppress warnings about this. */
1239
1240 return !avr_naked_function_p (decl);
1241 }
1242
1243 /* Compute offset between arg_pointer and frame_pointer. */
1244
1245 int
1246 avr_initial_elimination_offset (int from, int to)
1247 {
1248 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1249 return 0;
1250 else
1251 {
1252 int offset = frame_pointer_needed ? 2 : 0;
1253 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
1254
1255 // If FROM is ARG_POINTER_REGNUM, we are not in an ISR as ISRs
1256 // might not have arguments. Hence the following is not affected
1257 // by gasisr prologues.
1258 offset += avr_regs_to_save (NULL);
1259 return (get_frame_size () + avr_outgoing_args_size()
1260 + avr_pc_size + 1 + offset);
1261 }
1262 }
1263
1264
1265 /* Helper for the function below. */
1266
1267 static void
1268 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
1269 {
1270 *node = make_node (FIXED_POINT_TYPE);
1271 TYPE_SATURATING (*node) = sat_p;
1272 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
1273 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
1274 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
1275 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
1276 SET_TYPE_ALIGN (*node, 8);
1277 SET_TYPE_MODE (*node, mode);
1278
1279 layout_type (*node);
1280 }
1281
1282
1283 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
1284
1285 static tree
1286 avr_build_builtin_va_list (void)
1287 {
1288 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1289 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1290 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1291 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1292 to the long long accum modes instead of the desired [U]TAmode.
1293
1294 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
1295 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
1296 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1297 libgcc to detect IBIT and FBIT. */
1298
1299 avr_adjust_type_node (&ta_type_node, TAmode, 0);
1300 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
1301 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
1302 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
1303
1304 unsigned_long_long_accum_type_node = uta_type_node;
1305 long_long_accum_type_node = ta_type_node;
1306 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
1307 sat_long_long_accum_type_node = sat_ta_type_node;
1308
1309 /* Dispatch to the default handler. */
1310
1311 return std_build_builtin_va_list ();
1312 }
1313
1314
1315 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
1316 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1317 frame pointer by +STARTING_FRAME_OFFSET.
1318 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
1319 avoids creating add/sub of offset in nonlocal goto and setjmp. */
1320
1321 static rtx
1322 avr_builtin_setjmp_frame_value (void)
1323 {
1324 rtx xval = gen_reg_rtx (Pmode);
1325 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
1326 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
1327 return xval;
1328 }
1329
1330
1331 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1332 This is return address of function. */
1333
1334 rtx
1335 avr_return_addr_rtx (int count, rtx tem)
1336 {
1337 rtx r;
1338
1339 /* Can only return this function's return address. Others not supported. */
1340 if (count)
1341 return NULL;
1342
1343 if (AVR_3_BYTE_PC)
1344 {
1345 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1346 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1347 " of address");
1348 }
1349 else
1350 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1351
1352 cfun->machine->use_L__stack_usage = 1;
1353
1354 r = gen_rtx_PLUS (Pmode, tem, r);
1355 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1356 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1357 return r;
1358 }
1359
1360 /* Return 1 if the function epilogue is just a single "ret". */
1361
1362 int
1363 avr_simple_epilogue (void)
1364 {
1365 return (! frame_pointer_needed
1366 && get_frame_size () == 0
1367 && avr_outgoing_args_size() == 0
1368 && avr_regs_to_save (NULL) == 0
1369 && ! cfun->machine->is_interrupt
1370 && ! cfun->machine->is_signal
1371 && ! cfun->machine->is_naked
1372 && ! TREE_THIS_VOLATILE (current_function_decl));
1373 }
1374
1375 /* This function checks sequence of live registers. */
1376
1377 static int
1378 sequent_regs_live (void)
1379 {
1380 int live_seq = 0;
1381 int cur_seq = 0;
1382
1383 for (int reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1384 {
1385 if (fixed_regs[reg])
1386 {
1387 /* Don't recognize sequences that contain global register
1388 variables. */
1389
1390 if (live_seq != 0)
1391 return 0;
1392 else
1393 continue;
1394 }
1395
1396 if (!call_used_regs[reg])
1397 {
1398 if (df_regs_ever_live_p (reg))
1399 {
1400 ++live_seq;
1401 ++cur_seq;
1402 }
1403 else
1404 cur_seq = 0;
1405 }
1406 }
1407
1408 if (!frame_pointer_needed)
1409 {
1410 if (df_regs_ever_live_p (REG_Y))
1411 {
1412 ++live_seq;
1413 ++cur_seq;
1414 }
1415 else
1416 cur_seq = 0;
1417
1418 if (df_regs_ever_live_p (REG_Y + 1))
1419 {
1420 ++live_seq;
1421 ++cur_seq;
1422 }
1423 else
1424 cur_seq = 0;
1425 }
1426 else
1427 {
1428 cur_seq += 2;
1429 live_seq += 2;
1430 }
1431 return (cur_seq == live_seq) ? live_seq : 0;
1432 }
1433
1434 namespace {
1435 static const pass_data avr_pass_data_pre_proep =
1436 {
1437 RTL_PASS, // type
1438 "", // name (will be patched)
1439 OPTGROUP_NONE, // optinfo_flags
1440 TV_DF_SCAN, // tv_id
1441 0, // properties_required
1442 0, // properties_provided
1443 0, // properties_destroyed
1444 0, // todo_flags_start
1445 0 // todo_flags_finish
1446 };
1447
1448
1449 class avr_pass_pre_proep : public rtl_opt_pass
1450 {
1451 public:
1452 avr_pass_pre_proep (gcc::context *ctxt, const char *name)
1453 : rtl_opt_pass (avr_pass_data_pre_proep, ctxt)
1454 {
1455 this->name = name;
1456 }
1457
1458 void compute_maybe_gasisr (function*);
1459
1460 virtual unsigned int execute (function *fun)
1461 {
1462 if (TARGET_GASISR_PROLOGUES
1463 // Whether this function is an ISR worth scanning at all.
1464 && !fun->machine->is_no_gccisr
1465 && (fun->machine->is_interrupt
1466 || fun->machine->is_signal)
1467 && !cfun->machine->is_naked
1468 // Paranoia: Non-local gotos and labels that might escape.
1469 && !cfun->calls_setjmp
1470 && !cfun->has_nonlocal_label
1471 && !cfun->has_forced_label_in_static)
1472 {
1473 compute_maybe_gasisr (fun);
1474 }
1475
1476 return 0;
1477 }
1478
1479 }; // avr_pass_pre_proep
1480
1481 } // anon namespace
1482
1483 rtl_opt_pass*
1484 make_avr_pass_pre_proep (gcc::context *ctxt)
1485 {
1486 return new avr_pass_pre_proep (ctxt, "avr-pre-proep");
1487 }
1488
1489
1490 /* Set fun->machine->gasisr.maybe provided we don't find anything that
1491 prohibits GAS generating parts of ISR prologues / epilogues for us. */
1492
1493 void
1494 avr_pass_pre_proep::compute_maybe_gasisr (function *fun)
1495 {
1496 // Don't use BB iterators so that we see JUMP_TABLE_DATA.
1497
1498 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
1499 {
1500 // Transparent calls always use [R]CALL and are filtered out by GAS.
1501 // ISRs don't use -mcall-prologues, hence what remains to be filtered
1502 // out are open coded (tail) calls.
1503
1504 if (CALL_P (insn))
1505 return;
1506
1507 // __tablejump2__ clobbers something and is targeted by JMP so
1508 // that GAS won't see its usage.
1509
1510 if (AVR_HAVE_JMP_CALL
1511 && JUMP_TABLE_DATA_P (insn))
1512 return;
1513
1514 // Non-local gotos not seen in *FUN.
1515
1516 if (JUMP_P (insn)
1517 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1518 return;
1519 }
1520
1521 fun->machine->gasisr.maybe = 1;
1522 }
1523
1524
1525 /* Obtain the length sequence of insns. */
1526
1527 int
1528 get_sequence_length (rtx_insn *insns)
1529 {
1530 int length = 0;
1531
1532 for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
1533 length += get_attr_length (insn);
1534
1535 return length;
1536 }
1537
1538
1539 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1540
1541 rtx
1542 avr_incoming_return_addr_rtx (void)
1543 {
1544 /* The return address is at the top of the stack. Note that the push
1545 was via post-decrement, which means the actual address is off by one. */
1546 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1547 }
1548
1549
1550 /* Unset a bit in *SET. If successful, return the respective bit number.
1551 Otherwise, return -1 and *SET is unaltered. */
1552
1553 static int
1554 avr_hregs_split_reg (HARD_REG_SET *set)
1555 {
1556 for (int regno = 0; regno < 32; regno++)
1557 if (TEST_HARD_REG_BIT (*set, regno))
1558 {
1559 // Don't remove a register from *SET which might indicate that
1560 // some RAMP* register might need ISR prologue / epilogue treatment.
1561
1562 if (AVR_HAVE_RAMPX
1563 && (REG_X == regno || REG_X + 1 == regno)
1564 && TEST_HARD_REG_BIT (*set, REG_X)
1565 && TEST_HARD_REG_BIT (*set, REG_X + 1))
1566 continue;
1567
1568 if (AVR_HAVE_RAMPY
1569 && !frame_pointer_needed
1570 && (REG_Y == regno || REG_Y + 1 == regno)
1571 && TEST_HARD_REG_BIT (*set, REG_Y)
1572 && TEST_HARD_REG_BIT (*set, REG_Y + 1))
1573 continue;
1574
1575 if (AVR_HAVE_RAMPZ
1576 && (REG_Z == regno || REG_Z + 1 == regno)
1577 && TEST_HARD_REG_BIT (*set, REG_Z)
1578 && TEST_HARD_REG_BIT (*set, REG_Z + 1))
1579 continue;
1580
1581 CLEAR_HARD_REG_BIT (*set, regno);
1582 return regno;
1583 }
1584
1585 return -1;
1586 }
1587
1588
1589 /* Helper for expand_prologue. Emit a push of a byte register. */
1590
1591 static void
1592 emit_push_byte (unsigned regno, bool frame_related_p)
1593 {
1594 rtx mem, reg;
1595 rtx_insn *insn;
1596
1597 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1598 mem = gen_frame_mem (QImode, mem);
1599 reg = gen_rtx_REG (QImode, regno);
1600
1601 insn = emit_insn (gen_rtx_SET (mem, reg));
1602 if (frame_related_p)
1603 RTX_FRAME_RELATED_P (insn) = 1;
1604
1605 cfun->machine->stack_usage++;
1606 }
1607
1608
1609 /* Helper for expand_prologue. Emit a push of a SFR via register TREG.
1610 SFR is a MEM representing the memory location of the SFR.
1611 If CLR_P then clear the SFR after the push using zero_reg. */
1612
1613 static void
1614 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p, int treg)
1615 {
1616 rtx_insn *insn;
1617
1618 gcc_assert (MEM_P (sfr));
1619
1620 /* IN treg, IO(SFR) */
1621 insn = emit_move_insn (all_regs_rtx[treg], sfr);
1622 if (frame_related_p)
1623 RTX_FRAME_RELATED_P (insn) = 1;
1624
1625 /* PUSH treg */
1626 emit_push_byte (treg, frame_related_p);
1627
1628 if (clr_p)
1629 {
1630 /* OUT IO(SFR), __zero_reg__ */
1631 insn = emit_move_insn (sfr, const0_rtx);
1632 if (frame_related_p)
1633 RTX_FRAME_RELATED_P (insn) = 1;
1634 }
1635 }
1636
1637 static void
1638 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1639 {
1640 rtx_insn *insn;
1641 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1642 int live_seq = sequent_regs_live ();
1643
1644 HOST_WIDE_INT size_max
1645 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1646
1647 bool minimize = (TARGET_CALL_PROLOGUES
1648 && size < size_max
1649 && live_seq
1650 && !isr_p
1651 && !cfun->machine->is_OS_task
1652 && !cfun->machine->is_OS_main
1653 && !AVR_TINY);
1654
1655 if (minimize
1656 && (frame_pointer_needed
1657 || avr_outgoing_args_size() > 8
1658 || (AVR_2_BYTE_PC && live_seq > 6)
1659 || live_seq > 7))
1660 {
1661 rtx pattern;
1662 int first_reg, reg, offset;
1663
1664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1665 gen_int_mode (size, HImode));
1666
1667 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1668 gen_int_mode (live_seq+size, HImode));
1669 insn = emit_insn (pattern);
1670 RTX_FRAME_RELATED_P (insn) = 1;
1671
1672 /* Describe the effect of the unspec_volatile call to prologue_saves.
1673 Note that this formulation assumes that add_reg_note pushes the
1674 notes to the front. Thus we build them in the reverse order of
1675 how we want dwarf2out to process them. */
1676
1677 /* The function does always set frame_pointer_rtx, but whether that
1678 is going to be permanent in the function is frame_pointer_needed. */
1679
1680 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1681 gen_rtx_SET ((frame_pointer_needed
1682 ? frame_pointer_rtx
1683 : stack_pointer_rtx),
1684 plus_constant (Pmode, stack_pointer_rtx,
1685 -(size + live_seq))));
1686
1687 /* Note that live_seq always contains r28+r29, but the other
1688 registers to be saved are all below 18. */
1689
1690 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1691
1692 for (reg = 29, offset = -live_seq + 1;
1693 reg >= first_reg;
1694 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1695 {
1696 rtx m, r;
1697
1698 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1699 offset));
1700 r = gen_rtx_REG (QImode, reg);
1701 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1702 }
1703
1704 cfun->machine->stack_usage += size + live_seq;
1705 }
1706 else /* !minimize */
1707 {
1708 for (int reg = 0; reg < 32; ++reg)
1709 if (TEST_HARD_REG_BIT (set, reg))
1710 emit_push_byte (reg, true);
1711
1712 if (frame_pointer_needed
1713 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1714 {
1715 /* Push frame pointer. Always be consistent about the
1716 ordering of pushes -- epilogue_restores expects the
1717 register pair to be pushed low byte first. */
1718
1719 emit_push_byte (REG_Y, true);
1720 emit_push_byte (REG_Y + 1, true);
1721 }
1722
1723 if (frame_pointer_needed
1724 && size == 0)
1725 {
1726 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1727 RTX_FRAME_RELATED_P (insn) = 1;
1728 }
1729
1730 if (size != 0)
1731 {
1732 /* Creating a frame can be done by direct manipulation of the
1733 stack or via the frame pointer. These two methods are:
1734 fp = sp
1735 fp -= size
1736 sp = fp
1737 or
1738 sp -= size
1739 fp = sp (*)
1740 the optimum method depends on function type, stack and
1741 frame size. To avoid a complex logic, both methods are
1742 tested and shortest is selected.
1743
1744 There is also the case where SIZE != 0 and no frame pointer is
1745 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1746 In that case, insn (*) is not needed in that case.
1747 We use the X register as scratch. This is save because in X
1748 is call-clobbered.
1749 In an interrupt routine, the case of SIZE != 0 together with
1750 !frame_pointer_needed can only occur if the function is not a
1751 leaf function and thus X has already been saved. */
1752
1753 int irq_state = -1;
1754 HOST_WIDE_INT size_cfa = size, neg_size;
1755 rtx_insn *fp_plus_insns;
1756 rtx fp, my_fp;
1757
1758 gcc_assert (frame_pointer_needed
1759 || !isr_p
1760 || !crtl->is_leaf);
1761
1762 fp = my_fp = (frame_pointer_needed
1763 ? frame_pointer_rtx
1764 : gen_rtx_REG (Pmode, REG_X));
1765
1766 if (AVR_HAVE_8BIT_SP)
1767 {
1768 /* The high byte (r29) does not change:
1769 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1770
1771 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1772 }
1773
1774 /* Cut down size and avoid size = 0 so that we don't run
1775 into ICE like PR52488 in the remainder. */
1776
1777 if (size > size_max)
1778 {
1779 /* Don't error so that insane code from newlib still compiles
1780 and does not break building newlib. As PR51345 is implemented
1781 now, there are multilib variants with -msp8.
1782
1783 If user wants sanity checks he can use -Wstack-usage=
1784 or similar options.
1785
1786 For CFA we emit the original, non-saturated size so that
1787 the generic machinery is aware of the real stack usage and
1788 will print the above diagnostic as expected. */
1789
1790 size = size_max;
1791 }
1792
1793 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1794 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1795
1796 /************ Method 1: Adjust frame pointer ************/
1797
1798 start_sequence ();
1799
1800 /* Normally, the dwarf2out frame-related-expr interpreter does
1801 not expect to have the CFA change once the frame pointer is
1802 set up. Thus, we avoid marking the move insn below and
1803 instead indicate that the entire operation is complete after
1804 the frame pointer subtraction is done. */
1805
1806 insn = emit_move_insn (fp, stack_pointer_rtx);
1807 if (frame_pointer_needed)
1808 {
1809 RTX_FRAME_RELATED_P (insn) = 1;
1810 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1811 gen_rtx_SET (fp, stack_pointer_rtx));
1812 }
1813
1814 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1815 my_fp, neg_size));
1816
1817 if (frame_pointer_needed)
1818 {
1819 RTX_FRAME_RELATED_P (insn) = 1;
1820 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1821 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1822 -size_cfa)));
1823 }
1824
1825 /* Copy to stack pointer. Note that since we've already
1826 changed the CFA to the frame pointer this operation
1827 need not be annotated if frame pointer is needed.
1828 Always move through unspec, see PR50063.
1829 For meaning of irq_state see movhi_sp_r insn. */
1830
1831 if (cfun->machine->is_interrupt)
1832 irq_state = 1;
1833
1834 if (TARGET_NO_INTERRUPTS
1835 || cfun->machine->is_signal
1836 || cfun->machine->is_OS_main)
1837 irq_state = 0;
1838
1839 if (AVR_HAVE_8BIT_SP)
1840 irq_state = 2;
1841
1842 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1843 fp, GEN_INT (irq_state)));
1844 if (!frame_pointer_needed)
1845 {
1846 RTX_FRAME_RELATED_P (insn) = 1;
1847 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1848 gen_rtx_SET (stack_pointer_rtx,
1849 plus_constant (Pmode,
1850 stack_pointer_rtx,
1851 -size_cfa)));
1852 }
1853
1854 fp_plus_insns = get_insns ();
1855 end_sequence ();
1856
1857 /************ Method 2: Adjust Stack pointer ************/
1858
1859 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1860 can only handle specific offsets. */
1861
1862 int n_rcall = size / (AVR_3_BYTE_PC ? 3 : 2);
1863
1864 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode)
1865 // Don't use more than 3 RCALLs.
1866 && n_rcall <= 3)
1867 {
1868 rtx_insn *sp_plus_insns;
1869
1870 start_sequence ();
1871
1872 insn = emit_move_insn (stack_pointer_rtx,
1873 plus_constant (Pmode, stack_pointer_rtx,
1874 -size));
1875 RTX_FRAME_RELATED_P (insn) = 1;
1876 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1877 gen_rtx_SET (stack_pointer_rtx,
1878 plus_constant (Pmode,
1879 stack_pointer_rtx,
1880 -size_cfa)));
1881 if (frame_pointer_needed)
1882 {
1883 insn = emit_move_insn (fp, stack_pointer_rtx);
1884 RTX_FRAME_RELATED_P (insn) = 1;
1885 }
1886
1887 sp_plus_insns = get_insns ();
1888 end_sequence ();
1889
1890 /************ Use shortest method ************/
1891
1892 emit_insn (get_sequence_length (sp_plus_insns)
1893 < get_sequence_length (fp_plus_insns)
1894 ? sp_plus_insns
1895 : fp_plus_insns);
1896 }
1897 else
1898 {
1899 emit_insn (fp_plus_insns);
1900 }
1901
1902 cfun->machine->stack_usage += size_cfa;
1903 } /* !minimize && size != 0 */
1904 } /* !minimize */
1905 }
1906
1907
1908 /* Output function prologue. */
1909
1910 void
1911 avr_expand_prologue (void)
1912 {
1913 HARD_REG_SET set;
1914 HOST_WIDE_INT size;
1915
1916 size = get_frame_size() + avr_outgoing_args_size();
1917
1918 cfun->machine->stack_usage = 0;
1919
1920 /* Prologue: naked. */
1921 if (cfun->machine->is_naked)
1922 {
1923 return;
1924 }
1925
1926 avr_regs_to_save (&set);
1927
1928 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1929 {
1930 int treg = AVR_TMP_REGNO;
1931 /* Enable interrupts. */
1932 if (cfun->machine->is_interrupt)
1933 emit_insn (gen_enable_interrupt ());
1934
1935 if (cfun->machine->gasisr.maybe)
1936 {
1937 /* Let GAS PR21472 emit prologue preamble for us which handles SREG,
1938 ZERO_REG and TMP_REG and one additional, optional register for
1939 us in an optimal way. This even scans through inline asm. */
1940
1941 cfun->machine->gasisr.yes = 1;
1942
1943 // The optional reg or TMP_REG if we don't need one. If we need one,
1944 // remove that reg from SET so that it's not puhed / popped twice.
1945 // We also use it below instead of TMP_REG in some places.
1946
1947 treg = avr_hregs_split_reg (&set);
1948 if (treg < 0)
1949 treg = AVR_TMP_REGNO;
1950 cfun->machine->gasisr.regno = treg;
1951
1952 // The worst case of pushes. The exact number can be inferred
1953 // at assembly time by magic expression __gcc_isr.n_pushed.
1954 cfun->machine->stack_usage += 3 + (treg != AVR_TMP_REGNO);
1955
1956 // Emit a Prologue chunk. Epilogue chunk(s) might follow.
1957 // The final Done chunk is emit by final postscan.
1958 emit_insn (gen_gasisr (GEN_INT (GASISR_Prologue), GEN_INT (treg)));
1959 }
1960 else // !TARGET_GASISR_PROLOGUES: Classic, dumb prologue preamble.
1961 {
1962 /* Push zero reg. */
1963 emit_push_byte (AVR_ZERO_REGNO, true);
1964
1965 /* Push tmp reg. */
1966 emit_push_byte (AVR_TMP_REGNO, true);
1967
1968 /* Push SREG. */
1969 /* ??? There's no dwarf2 column reserved for SREG. */
1970 emit_push_sfr (sreg_rtx, false, false /* clr */, AVR_TMP_REGNO);
1971
1972 /* Clear zero reg. */
1973 emit_move_insn (zero_reg_rtx, const0_rtx);
1974
1975 /* Prevent any attempt to delete the setting of ZERO_REG! */
1976 emit_use (zero_reg_rtx);
1977 }
1978
1979 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1980 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1981
1982 if (AVR_HAVE_RAMPD)
1983 emit_push_sfr (rampd_rtx, false /* frame */, true /* clr */, treg);
1984
1985 if (AVR_HAVE_RAMPX
1986 && TEST_HARD_REG_BIT (set, REG_X)
1987 && TEST_HARD_REG_BIT (set, REG_X + 1))
1988 {
1989 emit_push_sfr (rampx_rtx, false /* frame */, true /* clr */, treg);
1990 }
1991
1992 if (AVR_HAVE_RAMPY
1993 && (frame_pointer_needed
1994 || (TEST_HARD_REG_BIT (set, REG_Y)
1995 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1996 {
1997 emit_push_sfr (rampy_rtx, false /* frame */, true /* clr */, treg);
1998 }
1999
2000 if (AVR_HAVE_RAMPZ
2001 && TEST_HARD_REG_BIT (set, REG_Z)
2002 && TEST_HARD_REG_BIT (set, REG_Z + 1))
2003 {
2004 emit_push_sfr (rampz_rtx, false /* frame */, AVR_HAVE_RAMPD, treg);
2005 }
2006 } /* is_interrupt is_signal */
2007
2008 avr_prologue_setup_frame (size, set);
2009
2010 if (flag_stack_usage_info)
2011 current_function_static_stack_size
2012 = cfun->machine->stack_usage + INCOMING_FRAME_SP_OFFSET;
2013 }
2014
2015
2016 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
2017 /* Output summary at end of function prologue. */
2018
2019 static void
2020 avr_asm_function_end_prologue (FILE *file)
2021 {
2022 if (cfun->machine->is_naked)
2023 {
2024 fputs ("/* prologue: naked */\n", file);
2025 }
2026 else
2027 {
2028 if (cfun->machine->is_interrupt)
2029 {
2030 fputs ("/* prologue: Interrupt */\n", file);
2031 }
2032 else if (cfun->machine->is_signal)
2033 {
2034 fputs ("/* prologue: Signal */\n", file);
2035 }
2036 else
2037 fputs ("/* prologue: function */\n", file);
2038 }
2039
2040 if (ACCUMULATE_OUTGOING_ARGS)
2041 fprintf (file, "/* outgoing args size = %d */\n",
2042 avr_outgoing_args_size());
2043
2044 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
2045 get_frame_size());
2046
2047 if (!cfun->machine->gasisr.yes)
2048 {
2049 fprintf (file, "/* stack size = %d */\n", cfun->machine->stack_usage);
2050 // Create symbol stack offset so all functions have it. Add 1 to stack
2051 // usage for offset so that SP + .L__stack_offset = return address.
2052 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
2053 }
2054 else
2055 {
2056 int used_by_gasisr = 3 + (cfun->machine->gasisr.regno != AVR_TMP_REGNO);
2057 int to = cfun->machine->stack_usage;
2058 int from = to - used_by_gasisr;
2059 // Number of pushed regs is only known at assembly-time.
2060 fprintf (file, "/* stack size = %d...%d */\n", from , to);
2061 fprintf (file, ".L__stack_usage = %d + __gcc_isr.n_pushed\n", from);
2062 }
2063 }
2064
2065
2066 /* Implement `EPILOGUE_USES'. */
2067
2068 int
2069 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
2070 {
2071 if (reload_completed
2072 && cfun->machine
2073 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
2074 return 1;
2075 return 0;
2076 }
2077
2078 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
2079
2080 static void
2081 emit_pop_byte (unsigned regno)
2082 {
2083 rtx mem, reg;
2084
2085 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
2086 mem = gen_frame_mem (QImode, mem);
2087 reg = gen_rtx_REG (QImode, regno);
2088
2089 emit_insn (gen_rtx_SET (reg, mem));
2090 }
2091
2092 /* Output RTL epilogue. */
2093
2094 void
2095 avr_expand_epilogue (bool sibcall_p)
2096 {
2097 int live_seq;
2098 HARD_REG_SET set;
2099 int minimize;
2100 HOST_WIDE_INT size;
2101 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
2102
2103 size = get_frame_size() + avr_outgoing_args_size();
2104
2105 /* epilogue: naked */
2106 if (cfun->machine->is_naked)
2107 {
2108 gcc_assert (!sibcall_p);
2109
2110 emit_jump_insn (gen_return ());
2111 return;
2112 }
2113
2114 avr_regs_to_save (&set);
2115 live_seq = sequent_regs_live ();
2116
2117 minimize = (TARGET_CALL_PROLOGUES
2118 && live_seq
2119 && !isr_p
2120 && !cfun->machine->is_OS_task
2121 && !cfun->machine->is_OS_main
2122 && !AVR_TINY);
2123
2124 if (minimize
2125 && (live_seq > 4
2126 || frame_pointer_needed
2127 || size))
2128 {
2129 /* Get rid of frame. */
2130
2131 if (!frame_pointer_needed)
2132 {
2133 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2134 }
2135
2136 if (size)
2137 {
2138 emit_move_insn (frame_pointer_rtx,
2139 plus_constant (Pmode, frame_pointer_rtx, size));
2140 }
2141
2142 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
2143 return;
2144 }
2145
2146 if (size)
2147 {
2148 /* Try two methods to adjust stack and select shortest. */
2149
2150 int irq_state = -1;
2151 rtx fp, my_fp;
2152 rtx_insn *fp_plus_insns;
2153 HOST_WIDE_INT size_max;
2154
2155 gcc_assert (frame_pointer_needed
2156 || !isr_p
2157 || !crtl->is_leaf);
2158
2159 fp = my_fp = (frame_pointer_needed
2160 ? frame_pointer_rtx
2161 : gen_rtx_REG (Pmode, REG_X));
2162
2163 if (AVR_HAVE_8BIT_SP)
2164 {
2165 /* The high byte (r29) does not change:
2166 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
2167
2168 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
2169 }
2170
2171 /* For rationale see comment in prologue generation. */
2172
2173 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
2174 if (size > size_max)
2175 size = size_max;
2176 size = trunc_int_for_mode (size, GET_MODE (my_fp));
2177
2178 /********** Method 1: Adjust fp register **********/
2179
2180 start_sequence ();
2181
2182 if (!frame_pointer_needed)
2183 emit_move_insn (fp, stack_pointer_rtx);
2184
2185 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
2186
2187 /* Copy to stack pointer. */
2188
2189 if (TARGET_NO_INTERRUPTS)
2190 irq_state = 0;
2191
2192 if (AVR_HAVE_8BIT_SP)
2193 irq_state = 2;
2194
2195 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
2196 GEN_INT (irq_state)));
2197
2198 fp_plus_insns = get_insns ();
2199 end_sequence ();
2200
2201 /********** Method 2: Adjust Stack pointer **********/
2202
2203 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
2204 {
2205 rtx_insn *sp_plus_insns;
2206
2207 start_sequence ();
2208
2209 emit_move_insn (stack_pointer_rtx,
2210 plus_constant (Pmode, stack_pointer_rtx, size));
2211
2212 sp_plus_insns = get_insns ();
2213 end_sequence ();
2214
2215 /************ Use shortest method ************/
2216
2217 emit_insn (get_sequence_length (sp_plus_insns)
2218 < get_sequence_length (fp_plus_insns)
2219 ? sp_plus_insns
2220 : fp_plus_insns);
2221 }
2222 else
2223 emit_insn (fp_plus_insns);
2224 } /* size != 0 */
2225
2226 if (frame_pointer_needed
2227 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
2228 {
2229 /* Restore previous frame_pointer. See avr_expand_prologue for
2230 rationale for not using pophi. */
2231
2232 emit_pop_byte (REG_Y + 1);
2233 emit_pop_byte (REG_Y);
2234 }
2235
2236 /* Restore used registers. */
2237
2238 int treg = AVR_TMP_REGNO;
2239
2240 if (isr_p
2241 && cfun->machine->gasisr.yes)
2242 {
2243 treg = cfun->machine->gasisr.regno;
2244 CLEAR_HARD_REG_BIT (set, treg);
2245 }
2246
2247 for (int reg = 31; reg >= 0; --reg)
2248 if (TEST_HARD_REG_BIT (set, reg))
2249 emit_pop_byte (reg);
2250
2251 if (isr_p)
2252 {
2253 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2254 The conditions to restore them must be tha same as in prologue. */
2255
2256 if (AVR_HAVE_RAMPZ
2257 && TEST_HARD_REG_BIT (set, REG_Z)
2258 && TEST_HARD_REG_BIT (set, REG_Z + 1))
2259 {
2260 emit_pop_byte (treg);
2261 emit_move_insn (rampz_rtx, all_regs_rtx[treg]);
2262 }
2263
2264 if (AVR_HAVE_RAMPY
2265 && (frame_pointer_needed
2266 || (TEST_HARD_REG_BIT (set, REG_Y)
2267 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
2268 {
2269 emit_pop_byte (treg);
2270 emit_move_insn (rampy_rtx, all_regs_rtx[treg]);
2271 }
2272
2273 if (AVR_HAVE_RAMPX
2274 && TEST_HARD_REG_BIT (set, REG_X)
2275 && TEST_HARD_REG_BIT (set, REG_X + 1))
2276 {
2277 emit_pop_byte (treg);
2278 emit_move_insn (rampx_rtx, all_regs_rtx[treg]);
2279 }
2280
2281 if (AVR_HAVE_RAMPD)
2282 {
2283 emit_pop_byte (treg);
2284 emit_move_insn (rampd_rtx, all_regs_rtx[treg]);
2285 }
2286
2287 if (cfun->machine->gasisr.yes)
2288 {
2289 // Emit an Epilogue chunk.
2290 emit_insn (gen_gasisr (GEN_INT (GASISR_Epilogue),
2291 GEN_INT (cfun->machine->gasisr.regno)));
2292 }
2293 else // !TARGET_GASISR_PROLOGUES
2294 {
2295 /* Restore SREG using tmp_reg as scratch. */
2296
2297 emit_pop_byte (AVR_TMP_REGNO);
2298 emit_move_insn (sreg_rtx, tmp_reg_rtx);
2299
2300 /* Restore tmp REG. */
2301 emit_pop_byte (AVR_TMP_REGNO);
2302
2303 /* Restore zero REG. */
2304 emit_pop_byte (AVR_ZERO_REGNO);
2305 }
2306 }
2307
2308 if (!sibcall_p)
2309 emit_jump_insn (gen_return ());
2310 }
2311
2312
2313 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
2314
2315 static void
2316 avr_asm_function_begin_epilogue (FILE *file)
2317 {
2318 app_disable();
2319 fprintf (file, "/* epilogue start */\n");
2320 }
2321
2322
2323 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
2324
2325 static bool
2326 avr_cannot_modify_jumps_p (void)
2327 {
2328 /* Naked Functions must not have any instructions after
2329 their epilogue, see PR42240 */
2330
2331 if (reload_completed
2332 && cfun->machine
2333 && cfun->machine->is_naked)
2334 {
2335 return true;
2336 }
2337
2338 return false;
2339 }
2340
2341
2342 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
2343
2344 static bool
2345 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
2346 {
2347 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
2348 This hook just serves to hack around PR rtl-optimization/52543 by
2349 claiming that non-generic addresses were mode-dependent so that
2350 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
2351 RTXes to probe SET and MEM costs and assumes that MEM is always in the
2352 generic address space which is not true. */
2353
2354 return !ADDR_SPACE_GENERIC_P (as);
2355 }
2356
2357
2358 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2359 address with the `absdata' variable attribute, i.e. respective
2360 data can be read / written by LDS / STS instruction.
2361 This is used only for AVR_TINY. */
2362
2363 static bool
2364 avr_address_tiny_absdata_p (rtx x, machine_mode mode)
2365 {
2366 if (CONST == GET_CODE (x))
2367 x = XEXP (XEXP (x, 0), 0);
2368
2369 if (SYMBOL_REF_P (x))
2370 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_ABSDATA;
2371
2372 if (CONST_INT_P (x)
2373 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)))
2374 return true;
2375
2376 return false;
2377 }
2378
2379
2380 /* Helper function for `avr_legitimate_address_p'. */
2381
2382 static inline bool
2383 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
2384 RTX_CODE outer_code, bool strict)
2385 {
2386 return (REG_P (reg)
2387 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
2388 as, outer_code, UNKNOWN)
2389 || (!strict
2390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
2391 }
2392
2393
2394 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2395 machine for a memory operand of mode MODE. */
2396
2397 static bool
2398 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2399 {
2400 bool ok = CONSTANT_ADDRESS_P (x);
2401
2402 switch (GET_CODE (x))
2403 {
2404 case REG:
2405 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
2406 MEM, strict);
2407
2408 if (strict
2409 && GET_MODE_SIZE (mode) > 4
2410 && REG_X == REGNO (x))
2411 {
2412 ok = false;
2413 }
2414 break;
2415
2416 case POST_INC:
2417 case PRE_DEC:
2418 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
2419 GET_CODE (x), strict);
2420 break;
2421
2422 case PLUS:
2423 {
2424 rtx reg = XEXP (x, 0);
2425 rtx op1 = XEXP (x, 1);
2426
2427 if (REG_P (reg)
2428 && CONST_INT_P (op1)
2429 && INTVAL (op1) >= 0)
2430 {
2431 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
2432
2433 if (fit)
2434 {
2435 ok = (! strict
2436 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
2437 PLUS, strict));
2438
2439 if (reg == frame_pointer_rtx
2440 || reg == arg_pointer_rtx)
2441 {
2442 ok = true;
2443 }
2444 }
2445 else if (frame_pointer_needed
2446 && reg == frame_pointer_rtx)
2447 {
2448 ok = true;
2449 }
2450 }
2451 }
2452 break;
2453
2454 default:
2455 break;
2456 }
2457
2458 if (AVR_TINY
2459 && CONSTANT_ADDRESS_P (x))
2460 {
2461 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2462 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
2463
2464 ok = avr_address_tiny_absdata_p (x, mode);
2465 }
2466
2467 if (avr_log.legitimate_address_p)
2468 {
2469 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2470 "reload_completed=%d reload_in_progress=%d %s:",
2471 ok, mode, strict, reload_completed, reload_in_progress,
2472 reg_renumber ? "(reg_renumber)" : "");
2473
2474 if (GET_CODE (x) == PLUS
2475 && REG_P (XEXP (x, 0))
2476 && CONST_INT_P (XEXP (x, 1))
2477 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
2478 && reg_renumber)
2479 {
2480 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
2481 true_regnum (XEXP (x, 0)));
2482 }
2483
2484 avr_edump ("\n%r\n", x);
2485 }
2486
2487 return ok;
2488 }
2489
2490
2491 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2492 now only a helper for avr_addr_space_legitimize_address. */
2493 /* Attempts to replace X with a valid
2494 memory address for an operand of mode MODE */
2495
2496 static rtx
2497 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
2498 {
2499 bool big_offset_p = false;
2500
2501 x = oldx;
2502
2503 if (AVR_TINY)
2504 {
2505 if (CONSTANT_ADDRESS_P (x)
2506 && ! avr_address_tiny_absdata_p (x, mode))
2507 {
2508 x = force_reg (Pmode, x);
2509 }
2510 }
2511
2512 if (GET_CODE (oldx) == PLUS
2513 && REG_P (XEXP (oldx, 0)))
2514 {
2515 if (REG_P (XEXP (oldx, 1)))
2516 x = force_reg (GET_MODE (oldx), oldx);
2517 else if (CONST_INT_P (XEXP (oldx, 1)))
2518 {
2519 int offs = INTVAL (XEXP (oldx, 1));
2520 if (frame_pointer_rtx != XEXP (oldx, 0)
2521 && offs > MAX_LD_OFFSET (mode))
2522 {
2523 big_offset_p = true;
2524 x = force_reg (GET_MODE (oldx), oldx);
2525 }
2526 }
2527 }
2528
2529 if (avr_log.legitimize_address)
2530 {
2531 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
2532
2533 if (x != oldx)
2534 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
2535 }
2536
2537 return x;
2538 }
2539
2540
2541 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
2542 /* This will allow register R26/27 to be used where it is no worse than normal
2543 base pointers R28/29 or R30/31. For example, if base offset is greater
2544 than 63 bytes or for R++ or --R addressing. */
2545
2546 rtx
2547 avr_legitimize_reload_address (rtx *px, machine_mode mode,
2548 int opnum, int type, int addr_type,
2549 int ind_levels ATTRIBUTE_UNUSED,
2550 rtx (*mk_memloc)(rtx,int))
2551 {
2552 rtx x = *px;
2553
2554 if (avr_log.legitimize_reload_address)
2555 avr_edump ("\n%?:%m %r\n", mode, x);
2556
2557 if (1 && (GET_CODE (x) == POST_INC
2558 || GET_CODE (x) == PRE_DEC))
2559 {
2560 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2561 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2562 opnum, RELOAD_OTHER);
2563
2564 if (avr_log.legitimize_reload_address)
2565 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2566 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2567
2568 return x;
2569 }
2570
2571 if (GET_CODE (x) == PLUS
2572 && REG_P (XEXP (x, 0))
2573 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2574 && CONST_INT_P (XEXP (x, 1))
2575 && INTVAL (XEXP (x, 1)) >= 1)
2576 {
2577 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2578
2579 if (fit)
2580 {
2581 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2582 {
2583 int regno = REGNO (XEXP (x, 0));
2584 rtx mem = mk_memloc (x, regno);
2585
2586 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2587 POINTER_REGS, Pmode, VOIDmode, 0, 0,
2588 1, (enum reload_type) addr_type);
2589
2590 if (avr_log.legitimize_reload_address)
2591 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2592 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2593
2594 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2595 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2596 opnum, (enum reload_type) type);
2597
2598 if (avr_log.legitimize_reload_address)
2599 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2600 BASE_POINTER_REGS, mem, NULL_RTX);
2601
2602 return x;
2603 }
2604 }
2605 else if (! (frame_pointer_needed
2606 && XEXP (x, 0) == frame_pointer_rtx))
2607 {
2608 push_reload (x, NULL_RTX, px, NULL,
2609 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2610 opnum, (enum reload_type) type);
2611
2612 if (avr_log.legitimize_reload_address)
2613 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2614 POINTER_REGS, x, NULL_RTX);
2615
2616 return x;
2617 }
2618 }
2619
2620 return NULL_RTX;
2621 }
2622
2623
2624 /* Helper function to print assembler resp. track instruction
2625 sequence lengths. Always return "".
2626
2627 If PLEN == NULL:
2628 Output assembler code from template TPL with operands supplied
2629 by OPERANDS. This is just forwarding to output_asm_insn.
2630
2631 If PLEN != NULL:
2632 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2633 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2634 Don't output anything.
2635 */
2636
2637 static const char*
2638 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2639 {
2640 if (NULL == plen)
2641 {
2642 output_asm_insn (tpl, operands);
2643 }
2644 else
2645 {
2646 if (n_words < 0)
2647 *plen = -n_words;
2648 else
2649 *plen += n_words;
2650 }
2651
2652 return "";
2653 }
2654
2655
2656 /* Return a pointer register name as a string. */
2657
2658 static const char*
2659 ptrreg_to_str (int regno)
2660 {
2661 switch (regno)
2662 {
2663 case REG_X: return "X";
2664 case REG_Y: return "Y";
2665 case REG_Z: return "Z";
2666 default:
2667 output_operand_lossage ("address operand requires constraint for"
2668 " X, Y, or Z register");
2669 }
2670 return NULL;
2671 }
2672
2673 /* Return the condition name as a string.
2674 Used in conditional jump constructing */
2675
2676 static const char*
2677 cond_string (enum rtx_code code)
2678 {
2679 switch (code)
2680 {
2681 case NE:
2682 return "ne";
2683 case EQ:
2684 return "eq";
2685 case GE:
2686 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2687 return "pl";
2688 else
2689 return "ge";
2690 case LT:
2691 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2692 return "mi";
2693 else
2694 return "lt";
2695 case GEU:
2696 return "sh";
2697 case LTU:
2698 return "lo";
2699 default:
2700 gcc_unreachable ();
2701 }
2702
2703 return "";
2704 }
2705
2706
2707 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
2708 This must be used for AVR_TINY only because on other cores
2709 the flash memory is not visible in the RAM address range and
2710 cannot be read by, say, LD instruction. */
2711
2712 static bool
2713 avr_address_tiny_pm_p (rtx x)
2714 {
2715 if (CONST == GET_CODE (x))
2716 x = XEXP (XEXP (x, 0), 0);
2717
2718 if (SYMBOL_REF_P (x))
2719 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_PM;
2720
2721 return false;
2722 }
2723
2724 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2725 /* Output ADDR to FILE as address. */
2726
2727 static void
2728 avr_print_operand_address (FILE *file, machine_mode /*mode*/, rtx addr)
2729 {
2730 if (AVR_TINY
2731 && avr_address_tiny_pm_p (addr))
2732 {
2733 addr = plus_constant (Pmode, addr, avr_arch->flash_pm_offset);
2734 }
2735
2736 switch (GET_CODE (addr))
2737 {
2738 case REG:
2739 fprintf (file, "%s", ptrreg_to_str (REGNO (addr)));
2740 break;
2741
2742 case PRE_DEC:
2743 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2744 break;
2745
2746 case POST_INC:
2747 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2748 break;
2749
2750 default:
2751 if (CONSTANT_ADDRESS_P (addr)
2752 && text_segment_operand (addr, VOIDmode))
2753 {
2754 rtx x = addr;
2755 if (GET_CODE (x) == CONST)
2756 x = XEXP (x, 0);
2757 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
2758 {
2759 /* Assembler gs() will implant word address. Make offset
2760 a byte offset inside gs() for assembler. This is
2761 needed because the more logical (constant+gs(sym)) is not
2762 accepted by gas. For 128K and smaller devices this is ok.
2763 For large devices it will create a trampoline to offset
2764 from symbol which may not be what the user really wanted. */
2765
2766 fprintf (file, "gs(");
2767 output_addr_const (file, XEXP (x, 0));
2768 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2769 2 * INTVAL (XEXP (x, 1)));
2770 if (AVR_3_BYTE_PC)
2771 if (warning (0, "pointer offset from symbol maybe incorrect"))
2772 {
2773 output_addr_const (stderr, addr);
2774 fprintf (stderr, "\n");
2775 }
2776 }
2777 else
2778 {
2779 fprintf (file, "gs(");
2780 output_addr_const (file, addr);
2781 fprintf (file, ")");
2782 }
2783 }
2784 else
2785 output_addr_const (file, addr);
2786 }
2787 }
2788
2789
2790 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2791
2792 static bool
2793 avr_print_operand_punct_valid_p (unsigned char code)
2794 {
2795 return code == '~' || code == '!';
2796 }
2797
2798
2799 /* Implement `TARGET_PRINT_OPERAND'. */
2800 /* Output X as assembler operand to file FILE.
2801 For a description of supported %-codes, see top of avr.md. */
2802
2803 static void
2804 avr_print_operand (FILE *file, rtx x, int code)
2805 {
2806 int abcd = 0, ef = 0, ij = 0;
2807
2808 if (code >= 'A' && code <= 'D')
2809 abcd = code - 'A';
2810 else if (code == 'E' || code == 'F')
2811 ef = code - 'E';
2812 else if (code == 'I' || code == 'J')
2813 ij = code - 'I';
2814
2815 if (code == '~')
2816 {
2817 if (!AVR_HAVE_JMP_CALL)
2818 fputc ('r', file);
2819 }
2820 else if (code == '!')
2821 {
2822 if (AVR_HAVE_EIJMP_EICALL)
2823 fputc ('e', file);
2824 }
2825 else if (code == 't'
2826 || code == 'T')
2827 {
2828 static int t_regno = -1;
2829 static int t_nbits = -1;
2830
2831 if (REG_P (x) && t_regno < 0 && code == 'T')
2832 {
2833 t_regno = REGNO (x);
2834 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2835 }
2836 else if (CONST_INT_P (x) && t_regno >= 0
2837 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2838 {
2839 int bpos = INTVAL (x);
2840
2841 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2842 if (code == 'T')
2843 fprintf (file, ",%d", bpos % 8);
2844
2845 t_regno = -1;
2846 }
2847 else
2848 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2849 }
2850 else if (code == 'E' || code == 'F')
2851 {
2852 rtx op = XEXP (x, 0);
2853 fprintf (file, "%s", reg_names[REGNO (op) + ef]);
2854 }
2855 else if (code == 'I' || code == 'J')
2856 {
2857 rtx op = XEXP (XEXP (x, 0), 0);
2858 fprintf (file, "%s", reg_names[REGNO (op) + ij]);
2859 }
2860 else if (REG_P (x))
2861 {
2862 if (x == zero_reg_rtx)
2863 fprintf (file, "__zero_reg__");
2864 else if (code == 'r' && REGNO (x) < 32)
2865 fprintf (file, "%d", (int) REGNO (x));
2866 else
2867 fprintf (file, "%s", reg_names[REGNO (x) + abcd]);
2868 }
2869 else if (CONST_INT_P (x))
2870 {
2871 HOST_WIDE_INT ival = INTVAL (x);
2872
2873 if ('i' != code)
2874 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2875 else if (low_io_address_operand (x, VOIDmode)
2876 || high_io_address_operand (x, VOIDmode))
2877 {
2878 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2879 fprintf (file, "__RAMPZ__");
2880 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2881 fprintf (file, "__RAMPY__");
2882 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2883 fprintf (file, "__RAMPX__");
2884 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2885 fprintf (file, "__RAMPD__");
2886 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2887 fprintf (file, "__CCP__");
2888 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2889 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2890 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2891 else
2892 {
2893 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2894 ival - avr_arch->sfr_offset);
2895 }
2896 }
2897 else
2898 fatal_insn ("bad address, not an I/O address:", x);
2899 }
2900 else if (MEM_P (x))
2901 {
2902 rtx addr = XEXP (x, 0);
2903
2904 if (code == 'm')
2905 {
2906 if (!CONSTANT_P (addr))
2907 fatal_insn ("bad address, not a constant:", addr);
2908 /* Assembler template with m-code is data - not progmem section */
2909 if (text_segment_operand (addr, VOIDmode))
2910 if (warning (0, "accessing data memory with"
2911 " program memory address"))
2912 {
2913 output_addr_const (stderr, addr);
2914 fprintf(stderr,"\n");
2915 }
2916 output_addr_const (file, addr);
2917 }
2918 else if (code == 'i')
2919 {
2920 avr_print_operand (file, addr, 'i');
2921 }
2922 else if (code == 'o')
2923 {
2924 if (GET_CODE (addr) != PLUS)
2925 fatal_insn ("bad address, not (reg+disp):", addr);
2926
2927 avr_print_operand (file, XEXP (addr, 1), 0);
2928 }
2929 else if (code == 'b')
2930 {
2931 if (GET_CODE (addr) != PLUS)
2932 fatal_insn ("bad address, not (reg+disp):", addr);
2933
2934 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2935 }
2936 else if (code == 'p' || code == 'r')
2937 {
2938 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2939 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2940
2941 if (code == 'p')
2942 /* X, Y, Z */
2943 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2944 else
2945 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2946 }
2947 else if (GET_CODE (addr) == PLUS)
2948 {
2949 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2950 if (REGNO (XEXP (addr, 0)) == REG_X)
2951 fatal_insn ("internal compiler error. Bad address:"
2952 ,addr);
2953 fputc ('+', file);
2954 avr_print_operand (file, XEXP (addr, 1), code);
2955 }
2956 else
2957 avr_print_operand_address (file, VOIDmode, addr);
2958 }
2959 else if (code == 'i')
2960 {
2961 if (SYMBOL_REF_P (x) && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2962 avr_print_operand_address
2963 (file, VOIDmode, plus_constant (HImode, x, -avr_arch->sfr_offset));
2964 else
2965 fatal_insn ("bad address, not an I/O address:", x);
2966 }
2967 else if (code == 'x')
2968 {
2969 /* Constant progmem address - like used in jmp or call */
2970 if (0 == text_segment_operand (x, VOIDmode))
2971 if (warning (0, "accessing program memory"
2972 " with data memory address"))
2973 {
2974 output_addr_const (stderr, x);
2975 fprintf(stderr,"\n");
2976 }
2977 /* Use normal symbol for direct address no linker trampoline needed */
2978 output_addr_const (file, x);
2979 }
2980 else if (CONST_FIXED_P (x))
2981 {
2982 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2983 if (code != 0)
2984 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2985 code);
2986 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2987 }
2988 else if (CONST_DOUBLE_P (x))
2989 {
2990 long val;
2991 if (GET_MODE (x) != SFmode)
2992 fatal_insn ("internal compiler error. Unknown mode:", x);
2993 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), val);
2994 fprintf (file, "0x%lx", val);
2995 }
2996 else if (GET_CODE (x) == CONST_STRING)
2997 fputs (XSTR (x, 0), file);
2998 else if (code == 'j')
2999 fputs (cond_string (GET_CODE (x)), file);
3000 else if (code == 'k')
3001 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
3002 else
3003 avr_print_operand_address (file, VOIDmode, x);
3004 }
3005
3006
3007 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
3008
3009 /* Prefer sequence of loads/stores for moves of size upto
3010 two - two pairs of load/store instructions are always better
3011 than the 5 instruction sequence for a loop (1 instruction
3012 for loop counter setup, and 4 for the body of the loop). */
3013
3014 static bool
3015 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
3016 unsigned int align ATTRIBUTE_UNUSED,
3017 enum by_pieces_operation op,
3018 bool speed_p)
3019 {
3020 if (op != MOVE_BY_PIECES
3021 || (speed_p && size > MOVE_MAX_PIECES))
3022 return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
3023
3024 return size <= MOVE_MAX_PIECES;
3025 }
3026
3027
3028 /* Worker function for `NOTICE_UPDATE_CC'. */
3029 /* Update the condition code in the INSN. */
3030
3031 void
3032 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
3033 {
3034 rtx set;
3035 enum attr_cc cc = get_attr_cc (insn);
3036
3037 switch (cc)
3038 {
3039 default:
3040 break;
3041
3042 case CC_PLUS:
3043 case CC_LDI:
3044 {
3045 rtx *op = recog_data.operand;
3046 int len_dummy, icc;
3047
3048 /* Extract insn's operands. */
3049 extract_constrain_insn_cached (insn);
3050
3051 switch (cc)
3052 {
3053 default:
3054 gcc_unreachable();
3055
3056 case CC_PLUS:
3057 avr_out_plus (insn, op, &len_dummy, &icc);
3058 cc = (enum attr_cc) icc;
3059 break;
3060
3061 case CC_LDI:
3062
3063 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
3064 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
3065 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
3066 ? CC_CLOBBER
3067 /* Any other "r,rL" combination does not alter cc0. */
3068 : CC_NONE;
3069
3070 break;
3071 } /* inner switch */
3072
3073 break;
3074 }
3075 } /* outer swicth */
3076
3077 switch (cc)
3078 {
3079 default:
3080 /* Special values like CC_OUT_PLUS from above have been
3081 mapped to "standard" CC_* values so we never come here. */
3082
3083 gcc_unreachable();
3084 break;
3085
3086 case CC_NONE:
3087 /* Insn does not affect CC at all, but it might set some registers
3088 that are stored in cc_status. If such a register is affected by
3089 the current insn, for example by means of a SET or a CLOBBER,
3090 then we must reset cc_status; cf. PR77326.
3091
3092 Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
3093 will abort on COMPARE (which might be found in cc_status.value1/2).
3094 Thus work out the registers set by the insn and regs mentioned
3095 in cc_status.value1/2. */
3096
3097 if (cc_status.value1
3098 || cc_status.value2)
3099 {
3100 HARD_REG_SET regs_used;
3101 HARD_REG_SET regs_set;
3102 CLEAR_HARD_REG_SET (regs_used);
3103
3104 if (cc_status.value1
3105 && !CONSTANT_P (cc_status.value1))
3106 {
3107 find_all_hard_regs (cc_status.value1, &regs_used);
3108 }
3109
3110 if (cc_status.value2
3111 && !CONSTANT_P (cc_status.value2))
3112 {
3113 find_all_hard_regs (cc_status.value2, &regs_used);
3114 }
3115
3116 find_all_hard_reg_sets (insn, &regs_set, false);
3117
3118 if (hard_reg_set_intersect_p (regs_used, regs_set))
3119 {
3120 CC_STATUS_INIT;
3121 }
3122 }
3123
3124 break; // CC_NONE
3125
3126 case CC_SET_N:
3127 CC_STATUS_INIT;
3128 break;
3129
3130 case CC_SET_ZN:
3131 set = single_set (insn);
3132 CC_STATUS_INIT;
3133 if (set)
3134 {
3135 cc_status.flags |= CC_NO_OVERFLOW;
3136 cc_status.value1 = SET_DEST (set);
3137 }
3138 break;
3139
3140 case CC_SET_VZN:
3141 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
3142 of this combination, cf. also PR61055. */
3143 CC_STATUS_INIT;
3144 break;
3145
3146 case CC_SET_CZN:
3147 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
3148 The V flag may or may not be known but that's ok because
3149 alter_cond will change tests to use EQ/NE. */
3150 set = single_set (insn);
3151 CC_STATUS_INIT;
3152 if (set)
3153 {
3154 cc_status.value1 = SET_DEST (set);
3155 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
3156 }
3157 break;
3158
3159 case CC_COMPARE:
3160 set = single_set (insn);
3161 CC_STATUS_INIT;
3162 if (set)
3163 cc_status.value1 = SET_SRC (set);
3164 break;
3165
3166 case CC_CLOBBER:
3167 /* Insn doesn't leave CC in a usable state. */
3168 CC_STATUS_INIT;
3169 break;
3170 }
3171 }
3172
3173 /* Choose mode for jump insn:
3174 1 - relative jump in range -63 <= x <= 62 ;
3175 2 - relative jump in range -2046 <= x <= 2045 ;
3176 3 - absolute jump (only for ATmega[16]03). */
3177
3178 int
3179 avr_jump_mode (rtx x, rtx_insn *insn)
3180 {
3181 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
3182 ? XEXP (x, 0) : x));
3183 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
3184 int jump_distance = cur_addr - dest_addr;
3185
3186 if (IN_RANGE (jump_distance, -63, 62))
3187 return 1;
3188 else if (IN_RANGE (jump_distance, -2046, 2045))
3189 return 2;
3190 else if (AVR_HAVE_JMP_CALL)
3191 return 3;
3192
3193 return 2;
3194 }
3195
3196 /* Return an AVR condition jump commands.
3197 X is a comparison RTX.
3198 LEN is a number returned by avr_jump_mode function.
3199 If REVERSE nonzero then condition code in X must be reversed. */
3200
3201 const char*
3202 ret_cond_branch (rtx x, int len, int reverse)
3203 {
3204 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
3205
3206 switch (cond)
3207 {
3208 case GT:
3209 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3210 return (len == 1 ? ("breq .+2" CR_TAB
3211 "brpl %0") :
3212 len == 2 ? ("breq .+4" CR_TAB
3213 "brmi .+2" CR_TAB
3214 "rjmp %0") :
3215 ("breq .+6" CR_TAB
3216 "brmi .+4" CR_TAB
3217 "jmp %0"));
3218
3219 else
3220 return (len == 1 ? ("breq .+2" CR_TAB
3221 "brge %0") :
3222 len == 2 ? ("breq .+4" CR_TAB
3223 "brlt .+2" CR_TAB
3224 "rjmp %0") :
3225 ("breq .+6" CR_TAB
3226 "brlt .+4" CR_TAB
3227 "jmp %0"));
3228 case GTU:
3229 return (len == 1 ? ("breq .+2" CR_TAB
3230 "brsh %0") :
3231 len == 2 ? ("breq .+4" CR_TAB
3232 "brlo .+2" CR_TAB
3233 "rjmp %0") :
3234 ("breq .+6" CR_TAB
3235 "brlo .+4" CR_TAB
3236 "jmp %0"));
3237 case LE:
3238 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3239 return (len == 1 ? ("breq %0" CR_TAB
3240 "brmi %0") :
3241 len == 2 ? ("breq .+2" CR_TAB
3242 "brpl .+2" CR_TAB
3243 "rjmp %0") :
3244 ("breq .+2" CR_TAB
3245 "brpl .+4" CR_TAB
3246 "jmp %0"));
3247 else
3248 return (len == 1 ? ("breq %0" CR_TAB
3249 "brlt %0") :
3250 len == 2 ? ("breq .+2" CR_TAB
3251 "brge .+2" CR_TAB
3252 "rjmp %0") :
3253 ("breq .+2" CR_TAB
3254 "brge .+4" CR_TAB
3255 "jmp %0"));
3256 case LEU:
3257 return (len == 1 ? ("breq %0" CR_TAB
3258 "brlo %0") :
3259 len == 2 ? ("breq .+2" CR_TAB
3260 "brsh .+2" CR_TAB
3261 "rjmp %0") :
3262 ("breq .+2" CR_TAB
3263 "brsh .+4" CR_TAB
3264 "jmp %0"));
3265 default:
3266 if (reverse)
3267 {
3268 switch (len)
3269 {
3270 case 1:
3271 return "br%k1 %0";
3272 case 2:
3273 return ("br%j1 .+2" CR_TAB
3274 "rjmp %0");
3275 default:
3276 return ("br%j1 .+4" CR_TAB
3277 "jmp %0");
3278 }
3279 }
3280 else
3281 {
3282 switch (len)
3283 {
3284 case 1:
3285 return "br%j1 %0";
3286 case 2:
3287 return ("br%k1 .+2" CR_TAB
3288 "rjmp %0");
3289 default:
3290 return ("br%k1 .+4" CR_TAB
3291 "jmp %0");
3292 }
3293 }
3294 }
3295 return "";
3296 }
3297
3298
3299 /* Worker function for `FINAL_PRESCAN_INSN'. */
3300 /* Output insn cost for next insn. */
3301
3302 void
3303 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
3304 int num_operands ATTRIBUTE_UNUSED)
3305 {
3306 if (avr_log.rtx_costs)
3307 {
3308 rtx set = single_set (insn);
3309
3310 if (set)
3311 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
3312 set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)),
3313 optimize_insn_for_speed_p ()));
3314 else
3315 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
3316 rtx_cost (PATTERN (insn), VOIDmode, INSN, 0,
3317 optimize_insn_for_speed_p()));
3318 }
3319
3320 if (avr_log.insn_addresses)
3321 fprintf (asm_out_file, ";; ADDR = %d\n",
3322 (int) INSN_ADDRESSES (INSN_UID (insn)));
3323 }
3324
3325
3326 /* Implement `TARGET_ASM_FINAL_POSTSCAN_INSN'. */
3327 /* When GAS generates (parts of) ISR prologue / epilogue for us, we must
3328 hint GAS about the end of the code to scan. There migh be code located
3329 after the last epilogue. */
3330
3331 static void
3332 avr_asm_final_postscan_insn (FILE *stream, rtx_insn *insn, rtx*, int)
3333 {
3334 if (cfun->machine->gasisr.yes
3335 && !next_real_insn (insn))
3336 {
3337 app_disable();
3338 fprintf (stream, "\t__gcc_isr %d,r%d\n", GASISR_Done,
3339 cfun->machine->gasisr.regno);
3340 }
3341 }
3342
3343
3344 /* Return 0 if undefined, 1 if always true or always false. */
3345
3346 int
3347 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
3348 {
3349 unsigned int max = (mode == QImode ? 0xff :
3350 mode == HImode ? 0xffff :
3351 mode == PSImode ? 0xffffff :
3352 mode == SImode ? 0xffffffff : 0);
3353 if (max && op && CONST_INT_P (x))
3354 {
3355 if (unsigned_condition (op) != op)
3356 max >>= 1;
3357
3358 if (max != (INTVAL (x) & max)
3359 && INTVAL (x) != 0xff)
3360 return 1;
3361 }
3362 return 0;
3363 }
3364
3365
3366 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
3367 /* Returns nonzero if REGNO is the number of a hard
3368 register in which function arguments are sometimes passed. */
3369
3370 int
3371 avr_function_arg_regno_p (int r)
3372 {
3373 return AVR_TINY ? IN_RANGE (r, 20, 25) : IN_RANGE (r, 8, 25);
3374 }
3375
3376
3377 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
3378 /* Initializing the variable cum for the state at the beginning
3379 of the argument list. */
3380
3381 void
3382 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
3383 tree fndecl ATTRIBUTE_UNUSED)
3384 {
3385 cum->nregs = AVR_TINY ? 6 : 18;
3386 cum->regno = FIRST_CUM_REG;
3387 if (!libname && stdarg_p (fntype))
3388 cum->nregs = 0;
3389
3390 /* Assume the calle may be tail called */
3391
3392 cfun->machine->sibcall_fails = 0;
3393 }
3394
3395 /* Returns the number of registers to allocate for a function argument. */
3396
3397 static int
3398 avr_num_arg_regs (machine_mode mode, const_tree type)
3399 {
3400 int size;
3401
3402 if (mode == BLKmode)
3403 size = int_size_in_bytes (type);
3404 else
3405 size = GET_MODE_SIZE (mode);
3406
3407 /* Align all function arguments to start in even-numbered registers.
3408 Odd-sized arguments leave holes above them. */
3409
3410 return (size + 1) & ~1;
3411 }
3412
3413
3414 /* Implement `TARGET_FUNCTION_ARG'. */
3415 /* Controls whether a function argument is passed
3416 in a register, and which register. */
3417
3418 static rtx
3419 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
3420 const_tree type, bool named ATTRIBUTE_UNUSED)
3421 {
3422 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3423 int bytes = avr_num_arg_regs (mode, type);
3424
3425 if (cum->nregs && bytes <= cum->nregs)
3426 return gen_rtx_REG (mode, cum->regno - bytes);
3427
3428 return NULL_RTX;
3429 }
3430
3431
3432 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
3433 /* Update the summarizer variable CUM to advance past an argument
3434 in the argument list. */
3435
3436 static void
3437 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
3438 const_tree type, bool named ATTRIBUTE_UNUSED)
3439 {
3440 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3441 int bytes = avr_num_arg_regs (mode, type);
3442
3443 cum->nregs -= bytes;
3444 cum->regno -= bytes;
3445
3446 /* A parameter is being passed in a call-saved register. As the original
3447 contents of these regs has to be restored before leaving the function,
3448 a function must not pass arguments in call-saved regs in order to get
3449 tail-called. */
3450
3451 if (cum->regno >= 8
3452 && cum->nregs >= 0
3453 && !call_used_regs[cum->regno])
3454 {
3455 /* FIXME: We ship info on failing tail-call in struct machine_function.
3456 This uses internals of calls.c:expand_call() and the way args_so_far
3457 is used. targetm.function_ok_for_sibcall() needs to be extended to
3458 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
3459 dependent so that such an extension is not wanted. */
3460
3461 cfun->machine->sibcall_fails = 1;
3462 }
3463
3464 /* Test if all registers needed by the ABI are actually available. If the
3465 user has fixed a GPR needed to pass an argument, an (implicit) function
3466 call will clobber that fixed register. See PR45099 for an example. */
3467
3468 if (cum->regno >= 8
3469 && cum->nregs >= 0)
3470 {
3471 for (int regno = cum->regno; regno < cum->regno + bytes; regno++)
3472 if (fixed_regs[regno])
3473 warning (0, "fixed register %s used to pass parameter to function",
3474 reg_names[regno]);
3475 }
3476
3477 if (cum->nregs <= 0)
3478 {
3479 cum->nregs = 0;
3480 cum->regno = FIRST_CUM_REG;
3481 }
3482 }
3483
3484 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3485 /* Decide whether we can make a sibling call to a function. DECL is the
3486 declaration of the function being targeted by the call and EXP is the
3487 CALL_EXPR representing the call. */
3488
3489 static bool
3490 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
3491 {
3492 tree fntype_callee;
3493
3494 /* Tail-calling must fail if callee-saved regs are used to pass
3495 function args. We must not tail-call when `epilogue_restores'
3496 is used. Unfortunately, we cannot tell at this point if that
3497 actually will happen or not, and we cannot step back from
3498 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
3499
3500 if (cfun->machine->sibcall_fails
3501 || TARGET_CALL_PROLOGUES)
3502 {
3503 return false;
3504 }
3505
3506 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
3507
3508 if (decl_callee)
3509 {
3510 decl_callee = TREE_TYPE (decl_callee);
3511 }
3512 else
3513 {
3514 decl_callee = fntype_callee;
3515
3516 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
3517 && METHOD_TYPE != TREE_CODE (decl_callee))
3518 {
3519 decl_callee = TREE_TYPE (decl_callee);
3520 }
3521 }
3522
3523 /* Ensure that caller and callee have compatible epilogues */
3524
3525 if (cfun->machine->is_interrupt
3526 || cfun->machine->is_signal
3527 || cfun->machine->is_naked
3528 || avr_naked_function_p (decl_callee)
3529 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
3530 || (avr_OS_task_function_p (decl_callee)
3531 != cfun->machine->is_OS_task)
3532 || (avr_OS_main_function_p (decl_callee)
3533 != cfun->machine->is_OS_main))
3534 {
3535 return false;
3536 }
3537
3538 return true;
3539 }
3540
3541 /***********************************************************************
3542 Functions for outputting various mov's for a various modes
3543 ************************************************************************/
3544
3545 /* Return true if a value of mode MODE is read from flash by
3546 __load_* function from libgcc. */
3547
3548 bool
3549 avr_load_libgcc_p (rtx op)
3550 {
3551 machine_mode mode = GET_MODE (op);
3552 int n_bytes = GET_MODE_SIZE (mode);
3553
3554 return (n_bytes > 2
3555 && !AVR_HAVE_LPMX
3556 && avr_mem_flash_p (op));
3557 }
3558
3559 /* Return true if a value of mode MODE is read by __xload_* function. */
3560
3561 bool
3562 avr_xload_libgcc_p (machine_mode mode)
3563 {
3564 int n_bytes = GET_MODE_SIZE (mode);
3565
3566 return (n_bytes > 1
3567 || avr_n_flash > 1);
3568 }
3569
3570
3571 /* Fixme: This is a hack because secondary reloads don't works as expected.
3572
3573 Find an unused d-register to be used as scratch in INSN.
3574 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3575 is a register, skip all possible return values that overlap EXCLUDE.
3576 The policy for the returned register is similar to that of
3577 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3578 of INSN.
3579
3580 Return a QImode d-register or NULL_RTX if nothing found. */
3581
3582 static rtx
3583 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
3584 {
3585 bool isr_p = (avr_interrupt_function_p (current_function_decl)
3586 || avr_signal_function_p (current_function_decl));
3587
3588 for (int regno = 16; regno < 32; regno++)
3589 {
3590 rtx reg = all_regs_rtx[regno];
3591
3592 if ((exclude
3593 && reg_overlap_mentioned_p (exclude, reg))
3594 || fixed_regs[regno])
3595 {
3596 continue;
3597 }
3598
3599 /* Try non-live register */
3600
3601 if (!df_regs_ever_live_p (regno)
3602 && (TREE_THIS_VOLATILE (current_function_decl)
3603 || cfun->machine->is_OS_task
3604 || cfun->machine->is_OS_main
3605 || (!isr_p && call_used_regs[regno])))
3606 {
3607 return reg;
3608 }
3609
3610 /* Any live register can be used if it is unused after.
3611 Prologue/epilogue will care for it as needed. */
3612
3613 if (df_regs_ever_live_p (regno)
3614 && reg_unused_after (insn, reg))
3615 {
3616 return reg;
3617 }
3618 }
3619
3620 return NULL_RTX;
3621 }
3622
3623
3624 /* Helper function for the next function in the case where only restricted
3625 version of LPM instruction is available. */
3626
3627 static const char*
3628 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3629 {
3630 rtx dest = xop[0];
3631 rtx addr = xop[1];
3632 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3633 int regno_dest;
3634
3635 regno_dest = REGNO (dest);
3636
3637 /* The implicit target register of LPM. */
3638 xop[3] = lpm_reg_rtx;
3639
3640 switch (GET_CODE (addr))
3641 {
3642 default:
3643 gcc_unreachable();
3644
3645 case REG:
3646
3647 gcc_assert (REG_Z == REGNO (addr));
3648
3649 switch (n_bytes)
3650 {
3651 default:
3652 gcc_unreachable();
3653
3654 case 1:
3655 avr_asm_len ("%4lpm", xop, plen, 1);
3656
3657 if (regno_dest != LPM_REGNO)
3658 avr_asm_len ("mov %0,%3", xop, plen, 1);
3659
3660 return "";
3661
3662 case 2:
3663 if (REGNO (dest) == REG_Z)
3664 return avr_asm_len ("%4lpm" CR_TAB
3665 "push %3" CR_TAB
3666 "adiw %2,1" CR_TAB
3667 "%4lpm" CR_TAB
3668 "mov %B0,%3" CR_TAB
3669 "pop %A0", xop, plen, 6);
3670
3671 avr_asm_len ("%4lpm" CR_TAB
3672 "mov %A0,%3" CR_TAB
3673 "adiw %2,1" CR_TAB
3674 "%4lpm" CR_TAB
3675 "mov %B0,%3", xop, plen, 5);
3676
3677 if (!reg_unused_after (insn, addr))
3678 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3679
3680 break; /* 2 */
3681 }
3682
3683 break; /* REG */
3684
3685 case POST_INC:
3686
3687 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3688 && n_bytes <= 4);
3689
3690 if (regno_dest == LPM_REGNO)
3691 avr_asm_len ("%4lpm" CR_TAB
3692 "adiw %2,1", xop, plen, 2);
3693 else
3694 avr_asm_len ("%4lpm" CR_TAB
3695 "mov %A0,%3" CR_TAB
3696 "adiw %2,1", xop, plen, 3);
3697
3698 if (n_bytes >= 2)
3699 avr_asm_len ("%4lpm" CR_TAB
3700 "mov %B0,%3" CR_TAB
3701 "adiw %2,1", xop, plen, 3);
3702
3703 if (n_bytes >= 3)
3704 avr_asm_len ("%4lpm" CR_TAB
3705 "mov %C0,%3" CR_TAB
3706 "adiw %2,1", xop, plen, 3);
3707
3708 if (n_bytes >= 4)
3709 avr_asm_len ("%4lpm" CR_TAB
3710 "mov %D0,%3" CR_TAB
3711 "adiw %2,1", xop, plen, 3);
3712
3713 break; /* POST_INC */
3714
3715 } /* switch CODE (addr) */
3716
3717 return "";
3718 }
3719
3720
3721 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3722 OP[1] in AS1 to register OP[0].
3723 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3724 Return "". */
3725
3726 const char*
3727 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3728 {
3729 rtx xop[7];
3730 rtx dest = op[0];
3731 rtx src = SET_SRC (single_set (insn));
3732 rtx addr;
3733 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3734 int segment;
3735 RTX_CODE code;
3736 addr_space_t as = MEM_ADDR_SPACE (src);
3737
3738 if (plen)
3739 *plen = 0;
3740
3741 if (MEM_P (dest))
3742 {
3743 warning (0, "writing to address space %qs not supported",
3744 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3745
3746 return "";
3747 }
3748
3749 addr = XEXP (src, 0);
3750 code = GET_CODE (addr);
3751
3752 gcc_assert (REG_P (dest));
3753 gcc_assert (REG == code || POST_INC == code);
3754
3755 xop[0] = dest;
3756 xop[1] = addr;
3757 xop[2] = lpm_addr_reg_rtx;
3758 xop[4] = xstring_empty;
3759 xop[5] = tmp_reg_rtx;
3760 xop[6] = XEXP (rampz_rtx, 0);
3761
3762 segment = avr_addrspace[as].segment;
3763
3764 /* Set RAMPZ as needed. */
3765
3766 if (segment)
3767 {
3768 xop[4] = GEN_INT (segment);
3769 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3770
3771 if (xop[3] != NULL_RTX)
3772 {
3773 avr_asm_len ("ldi %3,%4" CR_TAB
3774 "out %i6,%3", xop, plen, 2);
3775 }
3776 else if (segment == 1)
3777 {
3778 avr_asm_len ("clr %5" CR_TAB
3779 "inc %5" CR_TAB
3780 "out %i6,%5", xop, plen, 3);
3781 }
3782 else
3783 {
3784 avr_asm_len ("mov %5,%2" CR_TAB
3785 "ldi %2,%4" CR_TAB
3786 "out %i6,%2" CR_TAB
3787 "mov %2,%5", xop, plen, 4);
3788 }
3789
3790 xop[4] = xstring_e;
3791
3792 if (!AVR_HAVE_ELPMX)
3793 return avr_out_lpm_no_lpmx (insn, xop, plen);
3794 }
3795 else if (!AVR_HAVE_LPMX)
3796 {
3797 return avr_out_lpm_no_lpmx (insn, xop, plen);
3798 }
3799
3800 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3801
3802 switch (GET_CODE (addr))
3803 {
3804 default:
3805 gcc_unreachable();
3806
3807 case REG:
3808
3809 gcc_assert (REG_Z == REGNO (addr));
3810
3811 switch (n_bytes)
3812 {
3813 default:
3814 gcc_unreachable();
3815
3816 case 1:
3817 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3818
3819 case 2:
3820 if (REGNO (dest) == REG_Z)
3821 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3822 "%4lpm %B0,%a2" CR_TAB
3823 "mov %A0,%5", xop, plen, 3);
3824 else
3825 {
3826 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3827 "%4lpm %B0,%a2", xop, plen, 2);
3828
3829 if (!reg_unused_after (insn, addr))
3830 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3831 }
3832
3833 break; /* 2 */
3834
3835 case 3:
3836
3837 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3838 "%4lpm %B0,%a2+" CR_TAB
3839 "%4lpm %C0,%a2", xop, plen, 3);
3840
3841 if (!reg_unused_after (insn, addr))
3842 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3843
3844 break; /* 3 */
3845
3846 case 4:
3847
3848 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3849 "%4lpm %B0,%a2+", xop, plen, 2);
3850
3851 if (REGNO (dest) == REG_Z - 2)
3852 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3853 "%4lpm %C0,%a2" CR_TAB
3854 "mov %D0,%5", xop, plen, 3);
3855 else
3856 {
3857 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3858 "%4lpm %D0,%a2", xop, plen, 2);
3859
3860 if (!reg_unused_after (insn, addr))
3861 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3862 }
3863
3864 break; /* 4 */
3865 } /* n_bytes */
3866
3867 break; /* REG */
3868
3869 case POST_INC:
3870
3871 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3872 && n_bytes <= 4);
3873
3874 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3875 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3876 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3877 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3878
3879 break; /* POST_INC */
3880
3881 } /* switch CODE (addr) */
3882
3883 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3884 {
3885 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3886
3887 xop[0] = zero_reg_rtx;
3888 avr_asm_len ("out %i6,%0", xop, plen, 1);
3889 }
3890
3891 return "";
3892 }
3893
3894
3895 /* Worker function for xload_8 insn. */
3896
3897 const char*
3898 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3899 {
3900 rtx xop[4];
3901
3902 xop[0] = op[0];
3903 xop[1] = op[1];
3904 xop[2] = lpm_addr_reg_rtx;
3905 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3906
3907 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3908
3909 avr_asm_len ("sbrc %1,7" CR_TAB
3910 "ld %3,%a2", xop, plen, 2);
3911
3912 if (REGNO (xop[0]) != REGNO (xop[3]))
3913 avr_asm_len ("mov %0,%3", xop, plen, 1);
3914
3915 return "";
3916 }
3917
3918
3919 const char*
3920 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3921 {
3922 rtx dest = operands[0];
3923 rtx src = operands[1];
3924
3925 if (avr_mem_flash_p (src)
3926 || avr_mem_flash_p (dest))
3927 {
3928 return avr_out_lpm (insn, operands, plen);
3929 }
3930
3931 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3932
3933 if (REG_P (dest))
3934 {
3935 if (REG_P (src)) /* mov r,r */
3936 {
3937 if (test_hard_reg_class (STACK_REG, dest))
3938 return avr_asm_len ("out %0,%1", operands, plen, -1);
3939 else if (test_hard_reg_class (STACK_REG, src))
3940 return avr_asm_len ("in %0,%1", operands, plen, -1);
3941
3942 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3943 }
3944 else if (CONSTANT_P (src))
3945 {
3946 output_reload_in_const (operands, NULL_RTX, plen, false);
3947 return "";
3948 }
3949 else if (MEM_P (src))
3950 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3951 }
3952 else if (MEM_P (dest))
3953 {
3954 rtx xop[2];
3955
3956 xop[0] = dest;
3957 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3958
3959 return out_movqi_mr_r (insn, xop, plen);
3960 }
3961
3962 return "";
3963 }
3964
3965
3966 const char *
3967 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3968 {
3969 rtx dest = xop[0];
3970 rtx src = xop[1];
3971
3972 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3973
3974 if (avr_mem_flash_p (src)
3975 || avr_mem_flash_p (dest))
3976 {
3977 return avr_out_lpm (insn, xop, plen);
3978 }
3979
3980 if (REG_P (dest))
3981 {
3982 if (REG_P (src)) /* mov r,r */
3983 {
3984 if (test_hard_reg_class (STACK_REG, dest))
3985 {
3986 if (AVR_HAVE_8BIT_SP)
3987 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3988
3989 if (AVR_XMEGA)
3990 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3991 "out __SP_H__,%B1", xop, plen, -2);
3992
3993 /* Use simple load of SP if no interrupts are used. */
3994
3995 return TARGET_NO_INTERRUPTS
3996 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3997 "out __SP_L__,%A1", xop, plen, -2)
3998 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3999 "cli" CR_TAB
4000 "out __SP_H__,%B1" CR_TAB
4001 "out __SREG__,__tmp_reg__" CR_TAB
4002 "out __SP_L__,%A1", xop, plen, -5);
4003 }
4004 else if (test_hard_reg_class (STACK_REG, src))
4005 {
4006 return !AVR_HAVE_SPH
4007 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
4008 "clr %B0", xop, plen, -2)
4009
4010 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
4011 "in %B0,__SP_H__", xop, plen, -2);
4012 }
4013
4014 return AVR_HAVE_MOVW
4015 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
4016
4017 : avr_asm_len ("mov %A0,%A1" CR_TAB
4018 "mov %B0,%B1", xop, plen, -2);
4019 } /* REG_P (src) */
4020 else if (CONSTANT_P (src))
4021 {
4022 return output_reload_inhi (xop, NULL, plen);
4023 }
4024 else if (MEM_P (src))
4025 {
4026 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
4027 }
4028 }
4029 else if (MEM_P (dest))
4030 {
4031 rtx xop[2];
4032
4033 xop[0] = dest;
4034 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4035
4036 return out_movhi_mr_r (insn, xop, plen);
4037 }
4038
4039 fatal_insn ("invalid insn:", insn);
4040
4041 return "";
4042 }
4043
4044
4045 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
4046
4047 static const char*
4048 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4049 {
4050 rtx dest = op[0];
4051 rtx src = op[1];
4052 rtx x = XEXP (src, 0);
4053
4054 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4055 "ld %0,%b1" , op, plen, -3);
4056
4057 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4058 && !reg_unused_after (insn, XEXP (x, 0)))
4059 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
4060
4061 return "";
4062 }
4063
4064 static const char*
4065 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4066 {
4067 rtx dest = op[0];
4068 rtx src = op[1];
4069 rtx x = XEXP (src, 0);
4070
4071 if (CONSTANT_ADDRESS_P (x))
4072 {
4073 int n_words = AVR_TINY ? 1 : 2;
4074 return io_address_operand (x, QImode)
4075 ? avr_asm_len ("in %0,%i1", op, plen, -1)
4076 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
4077 }
4078
4079 if (GET_CODE (x) == PLUS
4080 && REG_P (XEXP (x, 0))
4081 && CONST_INT_P (XEXP (x, 1)))
4082 {
4083 /* memory access by reg+disp */
4084
4085 int disp = INTVAL (XEXP (x, 1));
4086
4087 if (AVR_TINY)
4088 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
4089
4090 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
4091 {
4092 if (REGNO (XEXP (x, 0)) != REG_Y)
4093 fatal_insn ("incorrect insn:",insn);
4094
4095 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4096 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
4097 "ldd %0,Y+63" CR_TAB
4098 "sbiw r28,%o1-63", op, plen, -3);
4099
4100 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4101 "sbci r29,hi8(-%o1)" CR_TAB
4102 "ld %0,Y" CR_TAB
4103 "subi r28,lo8(%o1)" CR_TAB
4104 "sbci r29,hi8(%o1)", op, plen, -5);
4105 }
4106 else if (REGNO (XEXP (x, 0)) == REG_X)
4107 {
4108 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
4109 it but I have this situation with extremal optimizing options. */
4110
4111 avr_asm_len ("adiw r26,%o1" CR_TAB
4112 "ld %0,X", op, plen, -2);
4113
4114 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4115 && !reg_unused_after (insn, XEXP (x, 0)))
4116 {
4117 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
4118 }
4119
4120 return "";
4121 }
4122
4123 return avr_asm_len ("ldd %0,%1", op, plen, -1);
4124 }
4125
4126 return avr_asm_len ("ld %0,%1", op, plen, -1);
4127 }
4128
4129
4130 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4131
4132 static const char*
4133 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4134 {
4135 rtx dest = op[0];
4136 rtx src = op[1];
4137 rtx base = XEXP (src, 0);
4138
4139 int reg_dest = true_regnum (dest);
4140 int reg_base = true_regnum (base);
4141
4142 if (reg_dest == reg_base) /* R = (R) */
4143 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4144 "ld %B0,%1" CR_TAB
4145 "mov %A0,__tmp_reg__", op, plen, -3);
4146
4147 avr_asm_len ("ld %A0,%1+" CR_TAB
4148 "ld %B0,%1", op, plen, -2);
4149
4150 if (!reg_unused_after (insn, base))
4151 avr_asm_len (TINY_SBIW (%E1, %F1, 1), op, plen, 2);
4152
4153 return "";
4154 }
4155
4156
4157 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4158
4159 static const char*
4160 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4161 {
4162 rtx dest = op[0];
4163 rtx src = op[1];
4164 rtx base = XEXP (src, 0);
4165
4166 int reg_dest = true_regnum (dest);
4167 int reg_base = true_regnum (XEXP (base, 0));
4168
4169 if (reg_base == reg_dest)
4170 {
4171 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4172 "ld __tmp_reg__,%b1+" CR_TAB
4173 "ld %B0,%b1" CR_TAB
4174 "mov %A0,__tmp_reg__", op, plen, -5);
4175 }
4176 else
4177 {
4178 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4179 "ld %A0,%b1+" CR_TAB
4180 "ld %B0,%b1", op, plen, -4);
4181
4182 if (!reg_unused_after (insn, XEXP (base, 0)))
4183 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+1), op, plen, 2);
4184
4185 return "";
4186 }
4187 }
4188
4189
4190 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4191
4192 static const char*
4193 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
4194 {
4195 int mem_volatile_p = 0;
4196 rtx dest = op[0];
4197 rtx src = op[1];
4198 rtx base = XEXP (src, 0);
4199
4200 /* "volatile" forces reading low byte first, even if less efficient,
4201 for correct operation with 16-bit I/O registers. */
4202 mem_volatile_p = MEM_VOLATILE_P (src);
4203
4204 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4205 fatal_insn ("incorrect insn:", insn);
4206
4207 if (!mem_volatile_p)
4208 return avr_asm_len ("ld %B0,%1" CR_TAB
4209 "ld %A0,%1", op, plen, -2);
4210
4211 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
4212 "ld %A0,%p1+" CR_TAB
4213 "ld %B0,%p1" CR_TAB
4214 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
4215 }
4216
4217
4218 static const char*
4219 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4220 {
4221 rtx dest = op[0];
4222 rtx src = op[1];
4223 rtx base = XEXP (src, 0);
4224 int reg_dest = true_regnum (dest);
4225 int reg_base = true_regnum (base);
4226 /* "volatile" forces reading low byte first, even if less efficient,
4227 for correct operation with 16-bit I/O registers. */
4228 int mem_volatile_p = MEM_VOLATILE_P (src);
4229
4230 if (reg_base > 0)
4231 {
4232 if (AVR_TINY)
4233 return avr_out_movhi_r_mr_reg_no_disp_tiny (insn, op, plen);
4234
4235 if (reg_dest == reg_base) /* R = (R) */
4236 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4237 "ld %B0,%1" CR_TAB
4238 "mov %A0,__tmp_reg__", op, plen, -3);
4239
4240 if (reg_base != REG_X)
4241 return avr_asm_len ("ld %A0,%1" CR_TAB
4242 "ldd %B0,%1+1", op, plen, -2);
4243
4244 avr_asm_len ("ld %A0,X+" CR_TAB
4245 "ld %B0,X", op, plen, -2);
4246
4247 if (!reg_unused_after (insn, base))
4248 avr_asm_len ("sbiw r26,1", op, plen, 1);
4249
4250 return "";
4251 }
4252 else if (GET_CODE (base) == PLUS) /* (R + i) */
4253 {
4254 int disp = INTVAL (XEXP (base, 1));
4255 int reg_base = true_regnum (XEXP (base, 0));
4256
4257 if (AVR_TINY)
4258 return avr_out_movhi_r_mr_reg_disp_tiny (insn, op, plen);
4259
4260 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4261 {
4262 if (REGNO (XEXP (base, 0)) != REG_Y)
4263 fatal_insn ("incorrect insn:",insn);
4264
4265 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
4266 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4267 "ldd %A0,Y+62" CR_TAB
4268 "ldd %B0,Y+63" CR_TAB
4269 "sbiw r28,%o1-62", op, plen, -4)
4270
4271 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4272 "sbci r29,hi8(-%o1)" CR_TAB
4273 "ld %A0,Y" CR_TAB
4274 "ldd %B0,Y+1" CR_TAB
4275 "subi r28,lo8(%o1)" CR_TAB
4276 "sbci r29,hi8(%o1)", op, plen, -6);
4277 }
4278
4279 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4280 it but I have this situation with extremal
4281 optimization options. */
4282
4283 if (reg_base == REG_X)
4284 {
4285 if (reg_base == reg_dest)
4286 return avr_asm_len ("adiw r26,%o1" CR_TAB
4287 "ld __tmp_reg__,X+" CR_TAB
4288 "ld %B0,X" CR_TAB
4289 "mov %A0,__tmp_reg__", op, plen, -4);
4290
4291 avr_asm_len ("adiw r26,%o1" CR_TAB
4292 "ld %A0,X+" CR_TAB
4293 "ld %B0,X", op, plen, -3);
4294
4295 if (!reg_unused_after (insn, XEXP (base, 0)))
4296 avr_asm_len ("sbiw r26,%o1+1", op, plen, 1);
4297
4298 return "";
4299 }
4300
4301 return reg_base == reg_dest
4302 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4303 "ldd %B0,%B1" CR_TAB
4304 "mov %A0,__tmp_reg__", op, plen, -3)
4305
4306 : avr_asm_len ("ldd %A0,%A1" CR_TAB
4307 "ldd %B0,%B1", op, plen, -2);
4308 }
4309 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4310 {
4311 if (AVR_TINY)
4312 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
4313
4314 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4315 fatal_insn ("incorrect insn:", insn);
4316
4317 if (!mem_volatile_p)
4318 return avr_asm_len ("ld %B0,%1" CR_TAB
4319 "ld %A0,%1", op, plen, -2);
4320
4321 return REGNO (XEXP (base, 0)) == REG_X
4322 ? avr_asm_len ("sbiw r26,2" CR_TAB
4323 "ld %A0,X+" CR_TAB
4324 "ld %B0,X" CR_TAB
4325 "sbiw r26,1", op, plen, -4)
4326
4327 : avr_asm_len ("sbiw %r1,2" CR_TAB
4328 "ld %A0,%p1" CR_TAB
4329 "ldd %B0,%p1+1", op, plen, -3);
4330 }
4331 else if (GET_CODE (base) == POST_INC) /* (R++) */
4332 {
4333 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4334 fatal_insn ("incorrect insn:", insn);
4335
4336 return avr_asm_len ("ld %A0,%1" CR_TAB
4337 "ld %B0,%1", op, plen, -2);
4338 }
4339 else if (CONSTANT_ADDRESS_P (base))
4340 {
4341 int n_words = AVR_TINY ? 2 : 4;
4342 return io_address_operand (base, HImode)
4343 ? avr_asm_len ("in %A0,%i1" CR_TAB
4344 "in %B0,%i1+1", op, plen, -2)
4345
4346 : avr_asm_len ("lds %A0,%m1" CR_TAB
4347 "lds %B0,%m1+1", op, plen, -n_words);
4348 }
4349
4350 fatal_insn ("unknown move insn:",insn);
4351 return "";
4352 }
4353
4354 static const char*
4355 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4356 {
4357 rtx dest = op[0];
4358 rtx src = op[1];
4359 rtx base = XEXP (src, 0);
4360 int reg_dest = true_regnum (dest);
4361 int reg_base = true_regnum (base);
4362
4363 if (reg_dest == reg_base)
4364 {
4365 /* "ld r26,-X" is undefined */
4366 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
4367 "ld %D0,%1" CR_TAB
4368 "ld %C0,-%1" CR_TAB
4369 "ld __tmp_reg__,-%1" CR_TAB
4370 TINY_SBIW (%E1, %F1, 1) CR_TAB
4371 "ld %A0,%1" CR_TAB
4372 "mov %B0,__tmp_reg__");
4373 }
4374 else if (reg_dest == reg_base - 2)
4375 {
4376 return *l = 5, ("ld %A0,%1+" CR_TAB
4377 "ld %B0,%1+" CR_TAB
4378 "ld __tmp_reg__,%1+" CR_TAB
4379 "ld %D0,%1" CR_TAB
4380 "mov %C0,__tmp_reg__");
4381 }
4382 else if (reg_unused_after (insn, base))
4383 {
4384 return *l = 4, ("ld %A0,%1+" CR_TAB
4385 "ld %B0,%1+" CR_TAB
4386 "ld %C0,%1+" CR_TAB
4387 "ld %D0,%1");
4388 }
4389 else
4390 {
4391 return *l = 6, ("ld %A0,%1+" CR_TAB
4392 "ld %B0,%1+" CR_TAB
4393 "ld %C0,%1+" CR_TAB
4394 "ld %D0,%1" CR_TAB
4395 TINY_SBIW (%E1, %F1, 3));
4396 }
4397 }
4398
4399
4400 static const char*
4401 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4402 {
4403 rtx dest = op[0];
4404 rtx src = op[1];
4405 rtx base = XEXP (src, 0);
4406 int reg_dest = true_regnum (dest);
4407 int reg_base = true_regnum (XEXP (base, 0));
4408
4409 if (reg_dest == reg_base)
4410 {
4411 /* "ld r26,-X" is undefined */
4412 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
4413 "ld %D0,%b1" CR_TAB
4414 "ld %C0,-%b1" CR_TAB
4415 "ld __tmp_reg__,-%b1" CR_TAB
4416 TINY_SBIW (%I1, %J1, 1) CR_TAB
4417 "ld %A0,%b1" CR_TAB
4418 "mov %B0,__tmp_reg__");
4419 }
4420 else if (reg_dest == reg_base - 2)
4421 {
4422 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4423 "ld %A0,%b1+" CR_TAB
4424 "ld %B0,%b1+" CR_TAB
4425 "ld __tmp_reg__,%b1+" CR_TAB
4426 "ld %D0,%b1" CR_TAB
4427 "mov %C0,__tmp_reg__");
4428 }
4429 else if (reg_unused_after (insn, XEXP (base, 0)))
4430 {
4431 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4432 "ld %A0,%b1+" CR_TAB
4433 "ld %B0,%b1+" CR_TAB
4434 "ld %C0,%b1+" CR_TAB
4435 "ld %D0,%b1");
4436 }
4437 else
4438 {
4439 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4440 "ld %A0,%b1+" CR_TAB
4441 "ld %B0,%b1+" CR_TAB
4442 "ld %C0,%b1+" CR_TAB
4443 "ld %D0,%b1" CR_TAB
4444 TINY_SBIW (%I1, %J1, %o1+3));
4445 }
4446 }
4447
4448 static const char*
4449 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
4450 {
4451 rtx dest = op[0];
4452 rtx src = op[1];
4453 rtx base = XEXP (src, 0);
4454 int reg_dest = true_regnum (dest);
4455 int reg_base = true_regnum (base);
4456 int tmp;
4457
4458 if (!l)
4459 l = &tmp;
4460
4461 if (reg_base > 0)
4462 {
4463 if (AVR_TINY)
4464 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
4465
4466 if (reg_base == REG_X) /* (R26) */
4467 {
4468 if (reg_dest == REG_X)
4469 /* "ld r26,-X" is undefined */
4470 return *l=7, ("adiw r26,3" CR_TAB
4471 "ld r29,X" CR_TAB
4472 "ld r28,-X" CR_TAB
4473 "ld __tmp_reg__,-X" CR_TAB
4474 "sbiw r26,1" CR_TAB
4475 "ld r26,X" CR_TAB
4476 "mov r27,__tmp_reg__");
4477 else if (reg_dest == REG_X - 2)
4478 return *l=5, ("ld %A0,X+" CR_TAB
4479 "ld %B0,X+" CR_TAB
4480 "ld __tmp_reg__,X+" CR_TAB
4481 "ld %D0,X" CR_TAB
4482 "mov %C0,__tmp_reg__");
4483 else if (reg_unused_after (insn, base))
4484 return *l=4, ("ld %A0,X+" CR_TAB
4485 "ld %B0,X+" CR_TAB
4486 "ld %C0,X+" CR_TAB
4487 "ld %D0,X");
4488 else
4489 return *l=5, ("ld %A0,X+" CR_TAB
4490 "ld %B0,X+" CR_TAB
4491 "ld %C0,X+" CR_TAB
4492 "ld %D0,X" CR_TAB
4493 "sbiw r26,3");
4494 }
4495 else
4496 {
4497 if (reg_dest == reg_base)
4498 return *l=5, ("ldd %D0,%1+3" CR_TAB
4499 "ldd %C0,%1+2" CR_TAB
4500 "ldd __tmp_reg__,%1+1" CR_TAB
4501 "ld %A0,%1" CR_TAB
4502 "mov %B0,__tmp_reg__");
4503 else if (reg_base == reg_dest + 2)
4504 return *l=5, ("ld %A0,%1" CR_TAB
4505 "ldd %B0,%1+1" CR_TAB
4506 "ldd __tmp_reg__,%1+2" CR_TAB
4507 "ldd %D0,%1+3" CR_TAB
4508 "mov %C0,__tmp_reg__");
4509 else
4510 return *l=4, ("ld %A0,%1" CR_TAB
4511 "ldd %B0,%1+1" CR_TAB
4512 "ldd %C0,%1+2" CR_TAB
4513 "ldd %D0,%1+3");
4514 }
4515 }
4516 else if (GET_CODE (base) == PLUS) /* (R + i) */
4517 {
4518 int disp = INTVAL (XEXP (base, 1));
4519
4520 if (AVR_TINY)
4521 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
4522
4523 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4524 {
4525 if (REGNO (XEXP (base, 0)) != REG_Y)
4526 fatal_insn ("incorrect insn:",insn);
4527
4528 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4529 return *l = 6, ("adiw r28,%o1-60" CR_TAB
4530 "ldd %A0,Y+60" CR_TAB
4531 "ldd %B0,Y+61" CR_TAB
4532 "ldd %C0,Y+62" CR_TAB
4533 "ldd %D0,Y+63" CR_TAB
4534 "sbiw r28,%o1-60");
4535
4536 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
4537 "sbci r29,hi8(-%o1)" CR_TAB
4538 "ld %A0,Y" CR_TAB
4539 "ldd %B0,Y+1" CR_TAB
4540 "ldd %C0,Y+2" CR_TAB
4541 "ldd %D0,Y+3" CR_TAB
4542 "subi r28,lo8(%o1)" CR_TAB
4543 "sbci r29,hi8(%o1)");
4544 }
4545
4546 reg_base = true_regnum (XEXP (base, 0));
4547 if (reg_base == REG_X)
4548 {
4549 /* R = (X + d) */
4550 if (reg_dest == REG_X)
4551 {
4552 *l = 7;
4553 /* "ld r26,-X" is undefined */
4554 return ("adiw r26,%o1+3" CR_TAB
4555 "ld r29,X" CR_TAB
4556 "ld r28,-X" CR_TAB
4557 "ld __tmp_reg__,-X" CR_TAB
4558 "sbiw r26,1" CR_TAB
4559 "ld r26,X" CR_TAB
4560 "mov r27,__tmp_reg__");
4561 }
4562 *l = 6;
4563 if (reg_dest == REG_X - 2)
4564 return ("adiw r26,%o1" CR_TAB
4565 "ld r24,X+" CR_TAB
4566 "ld r25,X+" CR_TAB
4567 "ld __tmp_reg__,X+" CR_TAB
4568 "ld r27,X" CR_TAB
4569 "mov r26,__tmp_reg__");
4570
4571 return ("adiw r26,%o1" CR_TAB
4572 "ld %A0,X+" CR_TAB
4573 "ld %B0,X+" CR_TAB
4574 "ld %C0,X+" CR_TAB
4575 "ld %D0,X" CR_TAB
4576 "sbiw r26,%o1+3");
4577 }
4578 if (reg_dest == reg_base)
4579 return *l=5, ("ldd %D0,%D1" CR_TAB
4580 "ldd %C0,%C1" CR_TAB
4581 "ldd __tmp_reg__,%B1" CR_TAB
4582 "ldd %A0,%A1" CR_TAB
4583 "mov %B0,__tmp_reg__");
4584 else if (reg_dest == reg_base - 2)
4585 return *l=5, ("ldd %A0,%A1" CR_TAB
4586 "ldd %B0,%B1" CR_TAB
4587 "ldd __tmp_reg__,%C1" CR_TAB
4588 "ldd %D0,%D1" CR_TAB
4589 "mov %C0,__tmp_reg__");
4590 return *l=4, ("ldd %A0,%A1" CR_TAB
4591 "ldd %B0,%B1" CR_TAB
4592 "ldd %C0,%C1" CR_TAB
4593 "ldd %D0,%D1");
4594 }
4595 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4596 return *l=4, ("ld %D0,%1" CR_TAB
4597 "ld %C0,%1" CR_TAB
4598 "ld %B0,%1" CR_TAB
4599 "ld %A0,%1");
4600 else if (GET_CODE (base) == POST_INC) /* (R++) */
4601 return *l=4, ("ld %A0,%1" CR_TAB
4602 "ld %B0,%1" CR_TAB
4603 "ld %C0,%1" CR_TAB
4604 "ld %D0,%1");
4605 else if (CONSTANT_ADDRESS_P (base))
4606 {
4607 if (io_address_operand (base, SImode))
4608 {
4609 *l = 4;
4610 return ("in %A0,%i1" CR_TAB
4611 "in %B0,%i1+1" CR_TAB
4612 "in %C0,%i1+2" CR_TAB
4613 "in %D0,%i1+3");
4614 }
4615 else
4616 {
4617 *l = AVR_TINY ? 4 : 8;
4618 return ("lds %A0,%m1" CR_TAB
4619 "lds %B0,%m1+1" CR_TAB
4620 "lds %C0,%m1+2" CR_TAB
4621 "lds %D0,%m1+3");
4622 }
4623 }
4624
4625 fatal_insn ("unknown move insn:",insn);
4626 return "";
4627 }
4628
4629 static const char*
4630 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4631 {
4632 rtx dest = op[0];
4633 rtx src = op[1];
4634 rtx base = XEXP (dest, 0);
4635 int reg_base = true_regnum (base);
4636 int reg_src = true_regnum (src);
4637
4638 if (reg_base == reg_src)
4639 {
4640 /* "ld r26,-X" is undefined */
4641 if (reg_unused_after (insn, base))
4642 {
4643 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4644 "st %0,%A1" CR_TAB
4645 TINY_ADIW (%E0, %F0, 1) CR_TAB
4646 "st %0+,__tmp_reg__" CR_TAB
4647 "st %0+,%C1" CR_TAB
4648 "st %0+,%D1");
4649 }
4650 else
4651 {
4652 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4653 "st %0,%A1" CR_TAB
4654 TINY_ADIW (%E0, %F0, 1) CR_TAB
4655 "st %0+,__tmp_reg__" CR_TAB
4656 "st %0+,%C1" CR_TAB
4657 "st %0+,%D1" CR_TAB
4658 TINY_SBIW (%E0, %F0, 3));
4659 }
4660 }
4661 else if (reg_base == reg_src + 2)
4662 {
4663 if (reg_unused_after (insn, base))
4664 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4665 "mov __tmp_reg__,%D1" CR_TAB
4666 "st %0+,%A1" CR_TAB
4667 "st %0+,%B1" CR_TAB
4668 "st %0+,__zero_reg__" CR_TAB
4669 "st %0,__tmp_reg__" CR_TAB
4670 "clr __zero_reg__");
4671 else
4672 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4673 "mov __tmp_reg__,%D1" CR_TAB
4674 "st %0+,%A1" CR_TAB
4675 "st %0+,%B1" CR_TAB
4676 "st %0+,__zero_reg__" CR_TAB
4677 "st %0,__tmp_reg__" CR_TAB
4678 "clr __zero_reg__" CR_TAB
4679 TINY_SBIW (%E0, %F0, 3));
4680 }
4681
4682 return *l = 6, ("st %0+,%A1" CR_TAB
4683 "st %0+,%B1" CR_TAB
4684 "st %0+,%C1" CR_TAB
4685 "st %0,%D1" CR_TAB
4686 TINY_SBIW (%E0, %F0, 3));
4687 }
4688
4689 static const char*
4690 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4691 {
4692 rtx dest = op[0];
4693 rtx src = op[1];
4694 rtx base = XEXP (dest, 0);
4695 int reg_base = REGNO (XEXP (base, 0));
4696 int reg_src =true_regnum (src);
4697
4698 if (reg_base == reg_src)
4699 {
4700 *l = 11;
4701 return ("mov __tmp_reg__,%A2" CR_TAB
4702 "mov __zero_reg__,%B2" CR_TAB
4703 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4704 "st %b0+,__tmp_reg__" CR_TAB
4705 "st %b0+,__zero_reg__" CR_TAB
4706 "st %b0+,%C2" CR_TAB
4707 "st %b0,%D2" CR_TAB
4708 "clr __zero_reg__" CR_TAB
4709 TINY_SBIW (%I0, %J0, %o0+3));
4710 }
4711 else if (reg_src == reg_base - 2)
4712 {
4713 *l = 11;
4714 return ("mov __tmp_reg__,%C2" CR_TAB
4715 "mov __zero_reg__,%D2" CR_TAB
4716 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4717 "st %b0+,%A0" CR_TAB
4718 "st %b0+,%B0" CR_TAB
4719 "st %b0+,__tmp_reg__" CR_TAB
4720 "st %b0,__zero_reg__" CR_TAB
4721 "clr __zero_reg__" CR_TAB
4722 TINY_SBIW (%I0, %J0, %o0+3));
4723 }
4724 *l = 8;
4725 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4726 "st %b0+,%A1" CR_TAB
4727 "st %b0+,%B1" CR_TAB
4728 "st %b0+,%C1" CR_TAB
4729 "st %b0,%D1" CR_TAB
4730 TINY_SBIW (%I0, %J0, %o0+3));
4731 }
4732
4733 static const char*
4734 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4735 {
4736 rtx dest = op[0];
4737 rtx src = op[1];
4738 rtx base = XEXP (dest, 0);
4739 int reg_base = true_regnum (base);
4740 int reg_src = true_regnum (src);
4741 int tmp;
4742
4743 if (!l)
4744 l = &tmp;
4745
4746 if (CONSTANT_ADDRESS_P (base))
4747 {
4748 if (io_address_operand (base, SImode))
4749 {
4750 return *l=4,("out %i0, %A1" CR_TAB
4751 "out %i0+1,%B1" CR_TAB
4752 "out %i0+2,%C1" CR_TAB
4753 "out %i0+3,%D1");
4754 }
4755 else
4756 {
4757 *l = AVR_TINY ? 4 : 8;
4758 return ("sts %m0,%A1" CR_TAB
4759 "sts %m0+1,%B1" CR_TAB
4760 "sts %m0+2,%C1" CR_TAB
4761 "sts %m0+3,%D1");
4762 }
4763 }
4764
4765 if (reg_base > 0) /* (r) */
4766 {
4767 if (AVR_TINY)
4768 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4769
4770 if (reg_base == REG_X) /* (R26) */
4771 {
4772 if (reg_src == REG_X)
4773 {
4774 /* "st X+,r26" is undefined */
4775 if (reg_unused_after (insn, base))
4776 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4777 "st X,r26" CR_TAB
4778 "adiw r26,1" CR_TAB
4779 "st X+,__tmp_reg__" CR_TAB
4780 "st X+,r28" CR_TAB
4781 "st X,r29");
4782 else
4783 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4784 "st X,r26" CR_TAB
4785 "adiw r26,1" CR_TAB
4786 "st X+,__tmp_reg__" CR_TAB
4787 "st X+,r28" CR_TAB
4788 "st X,r29" CR_TAB
4789 "sbiw r26,3");
4790 }
4791 else if (reg_base == reg_src + 2)
4792 {
4793 if (reg_unused_after (insn, base))
4794 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4795 "mov __tmp_reg__,%D1" CR_TAB
4796 "st %0+,%A1" CR_TAB
4797 "st %0+,%B1" CR_TAB
4798 "st %0+,__zero_reg__" CR_TAB
4799 "st %0,__tmp_reg__" CR_TAB
4800 "clr __zero_reg__");
4801 else
4802 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4803 "mov __tmp_reg__,%D1" CR_TAB
4804 "st %0+,%A1" CR_TAB
4805 "st %0+,%B1" CR_TAB
4806 "st %0+,__zero_reg__" CR_TAB
4807 "st %0,__tmp_reg__" CR_TAB
4808 "clr __zero_reg__" CR_TAB
4809 "sbiw r26,3");
4810 }
4811 return *l=5, ("st %0+,%A1" CR_TAB
4812 "st %0+,%B1" CR_TAB
4813 "st %0+,%C1" CR_TAB
4814 "st %0,%D1" CR_TAB
4815 "sbiw r26,3");
4816 }
4817 else
4818 return *l=4, ("st %0,%A1" CR_TAB
4819 "std %0+1,%B1" CR_TAB
4820 "std %0+2,%C1" CR_TAB
4821 "std %0+3,%D1");
4822 }
4823 else if (GET_CODE (base) == PLUS) /* (R + i) */
4824 {
4825 int disp = INTVAL (XEXP (base, 1));
4826
4827 if (AVR_TINY)
4828 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4829
4830 reg_base = REGNO (XEXP (base, 0));
4831 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4832 {
4833 if (reg_base != REG_Y)
4834 fatal_insn ("incorrect insn:",insn);
4835
4836 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4837 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4838 "std Y+60,%A1" CR_TAB
4839 "std Y+61,%B1" CR_TAB
4840 "std Y+62,%C1" CR_TAB
4841 "std Y+63,%D1" CR_TAB
4842 "sbiw r28,%o0-60");
4843
4844 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4845 "sbci r29,hi8(-%o0)" CR_TAB
4846 "st Y,%A1" CR_TAB
4847 "std Y+1,%B1" CR_TAB
4848 "std Y+2,%C1" CR_TAB
4849 "std Y+3,%D1" CR_TAB
4850 "subi r28,lo8(%o0)" CR_TAB
4851 "sbci r29,hi8(%o0)");
4852 }
4853 if (reg_base == REG_X)
4854 {
4855 /* (X + d) = R */
4856 if (reg_src == REG_X)
4857 {
4858 *l = 9;
4859 return ("mov __tmp_reg__,r26" CR_TAB
4860 "mov __zero_reg__,r27" CR_TAB
4861 "adiw r26,%o0" CR_TAB
4862 "st X+,__tmp_reg__" CR_TAB
4863 "st X+,__zero_reg__" CR_TAB
4864 "st X+,r28" CR_TAB
4865 "st X,r29" CR_TAB
4866 "clr __zero_reg__" CR_TAB
4867 "sbiw r26,%o0+3");
4868 }
4869 else if (reg_src == REG_X - 2)
4870 {
4871 *l = 9;
4872 return ("mov __tmp_reg__,r26" CR_TAB
4873 "mov __zero_reg__,r27" CR_TAB
4874 "adiw r26,%o0" CR_TAB
4875 "st X+,r24" CR_TAB
4876 "st X+,r25" CR_TAB
4877 "st X+,__tmp_reg__" CR_TAB
4878 "st X,__zero_reg__" CR_TAB
4879 "clr __zero_reg__" CR_TAB
4880 "sbiw r26,%o0+3");
4881 }
4882 *l = 6;
4883 return ("adiw r26,%o0" CR_TAB
4884 "st X+,%A1" CR_TAB
4885 "st X+,%B1" CR_TAB
4886 "st X+,%C1" CR_TAB
4887 "st X,%D1" CR_TAB
4888 "sbiw r26,%o0+3");
4889 }
4890 return *l=4, ("std %A0,%A1" CR_TAB
4891 "std %B0,%B1" CR_TAB
4892 "std %C0,%C1" CR_TAB
4893 "std %D0,%D1");
4894 }
4895 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4896 return *l=4, ("st %0,%D1" CR_TAB
4897 "st %0,%C1" CR_TAB
4898 "st %0,%B1" CR_TAB
4899 "st %0,%A1");
4900 else if (GET_CODE (base) == POST_INC) /* (R++) */
4901 return *l=4, ("st %0,%A1" CR_TAB
4902 "st %0,%B1" CR_TAB
4903 "st %0,%C1" CR_TAB
4904 "st %0,%D1");
4905 fatal_insn ("unknown move insn:",insn);
4906 return "";
4907 }
4908
4909 const char *
4910 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4911 {
4912 int dummy;
4913 rtx dest = operands[0];
4914 rtx src = operands[1];
4915 int *real_l = l;
4916
4917 if (avr_mem_flash_p (src)
4918 || avr_mem_flash_p (dest))
4919 {
4920 return avr_out_lpm (insn, operands, real_l);
4921 }
4922
4923 if (!l)
4924 l = &dummy;
4925
4926 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4927
4928 if (REG_P (dest))
4929 {
4930 if (REG_P (src)) /* mov r,r */
4931 {
4932 if (true_regnum (dest) > true_regnum (src))
4933 {
4934 if (AVR_HAVE_MOVW)
4935 {
4936 *l = 2;
4937 return ("movw %C0,%C1" CR_TAB
4938 "movw %A0,%A1");
4939 }
4940 *l = 4;
4941 return ("mov %D0,%D1" CR_TAB
4942 "mov %C0,%C1" CR_TAB
4943 "mov %B0,%B1" CR_TAB
4944 "mov %A0,%A1");
4945 }
4946 else
4947 {
4948 if (AVR_HAVE_MOVW)
4949 {
4950 *l = 2;
4951 return ("movw %A0,%A1" CR_TAB
4952 "movw %C0,%C1");
4953 }
4954 *l = 4;
4955 return ("mov %A0,%A1" CR_TAB
4956 "mov %B0,%B1" CR_TAB
4957 "mov %C0,%C1" CR_TAB
4958 "mov %D0,%D1");
4959 }
4960 }
4961 else if (CONSTANT_P (src))
4962 {
4963 return output_reload_insisf (operands, NULL_RTX, real_l);
4964 }
4965 else if (MEM_P (src))
4966 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4967 }
4968 else if (MEM_P (dest))
4969 {
4970 const char *templ;
4971
4972 if (src == CONST0_RTX (GET_MODE (dest)))
4973 operands[1] = zero_reg_rtx;
4974
4975 templ = out_movsi_mr_r (insn, operands, real_l);
4976
4977 if (!real_l)
4978 output_asm_insn (templ, operands);
4979
4980 operands[1] = src;
4981 return "";
4982 }
4983 fatal_insn ("invalid insn:", insn);
4984 return "";
4985 }
4986
4987
4988 /* Handle loads of 24-bit types from memory to register. */
4989
4990 static const char*
4991 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4992 {
4993 rtx dest = op[0];
4994 rtx src = op[1];
4995 rtx base = XEXP (src, 0);
4996 int reg_dest = true_regnum (dest);
4997 int reg_base = true_regnum (base);
4998
4999 if (reg_base == reg_dest)
5000 {
5001 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
5002 "ld %C0,%1" CR_TAB
5003 "ld __tmp_reg__,-%1" CR_TAB
5004 TINY_SBIW (%E1, %F1, 1) CR_TAB
5005 "ld %A0,%1" CR_TAB
5006 "mov %B0,__tmp_reg__", op, plen, -8);
5007 }
5008 else
5009 {
5010 avr_asm_len ("ld %A0,%1+" CR_TAB
5011 "ld %B0,%1+" CR_TAB
5012 "ld %C0,%1", op, plen, -3);
5013
5014 if (reg_dest != reg_base - 2
5015 && !reg_unused_after (insn, base))
5016 {
5017 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
5018 }
5019 return "";
5020 }
5021 }
5022
5023 static const char*
5024 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5025 {
5026 rtx dest = op[0];
5027 rtx src = op[1];
5028 rtx base = XEXP (src, 0);
5029 int reg_dest = true_regnum (dest);
5030 int reg_base = true_regnum (base);
5031
5032 reg_base = true_regnum (XEXP (base, 0));
5033 if (reg_base == reg_dest)
5034 {
5035 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
5036 "ld %C0,%b1" CR_TAB
5037 "ld __tmp_reg__,-%b1" CR_TAB
5038 TINY_SBIW (%I1, %J1, 1) CR_TAB
5039 "ld %A0,%b1" CR_TAB
5040 "mov %B0,__tmp_reg__", op, plen, -8);
5041 }
5042 else
5043 {
5044 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
5045 "ld %A0,%b1+" CR_TAB
5046 "ld %B0,%b1+" CR_TAB
5047 "ld %C0,%b1", op, plen, -5);
5048
5049 if (reg_dest != reg_base - 2
5050 && !reg_unused_after (insn, XEXP (base, 0)))
5051 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
5052
5053 return "";
5054 }
5055 }
5056
5057 static const char*
5058 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
5059 {
5060 rtx dest = op[0];
5061 rtx src = op[1];
5062 rtx base = XEXP (src, 0);
5063 int reg_dest = true_regnum (dest);
5064 int reg_base = true_regnum (base);
5065
5066 if (reg_base > 0)
5067 {
5068 if (AVR_TINY)
5069 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
5070
5071 if (reg_base == REG_X) /* (R26) */
5072 {
5073 if (reg_dest == REG_X)
5074 /* "ld r26,-X" is undefined */
5075 return avr_asm_len ("adiw r26,2" CR_TAB
5076 "ld r28,X" CR_TAB
5077 "ld __tmp_reg__,-X" CR_TAB
5078 "sbiw r26,1" CR_TAB
5079 "ld r26,X" CR_TAB
5080 "mov r27,__tmp_reg__", op, plen, -6);
5081 else
5082 {
5083 avr_asm_len ("ld %A0,X+" CR_TAB
5084 "ld %B0,X+" CR_TAB
5085 "ld %C0,X", op, plen, -3);
5086
5087 if (reg_dest != REG_X - 2
5088 && !reg_unused_after (insn, base))
5089 {
5090 avr_asm_len ("sbiw r26,2", op, plen, 1);
5091 }
5092
5093 return "";
5094 }
5095 }
5096 else /* reg_base != REG_X */
5097 {
5098 if (reg_dest == reg_base)
5099 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
5100 "ldd __tmp_reg__,%1+1" CR_TAB
5101 "ld %A0,%1" CR_TAB
5102 "mov %B0,__tmp_reg__", op, plen, -4);
5103 else
5104 return avr_asm_len ("ld %A0,%1" CR_TAB
5105 "ldd %B0,%1+1" CR_TAB
5106 "ldd %C0,%1+2", op, plen, -3);
5107 }
5108 }
5109 else if (GET_CODE (base) == PLUS) /* (R + i) */
5110 {
5111 int disp = INTVAL (XEXP (base, 1));
5112
5113 if (AVR_TINY)
5114 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
5115
5116 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
5117 {
5118 if (REGNO (XEXP (base, 0)) != REG_Y)
5119 fatal_insn ("incorrect insn:",insn);
5120
5121 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
5122 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
5123 "ldd %A0,Y+61" CR_TAB
5124 "ldd %B0,Y+62" CR_TAB
5125 "ldd %C0,Y+63" CR_TAB
5126 "sbiw r28,%o1-61", op, plen, -5);
5127
5128 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
5129 "sbci r29,hi8(-%o1)" CR_TAB
5130 "ld %A0,Y" CR_TAB
5131 "ldd %B0,Y+1" CR_TAB
5132 "ldd %C0,Y+2" CR_TAB
5133 "subi r28,lo8(%o1)" CR_TAB
5134 "sbci r29,hi8(%o1)", op, plen, -7);
5135 }
5136
5137 reg_base = true_regnum (XEXP (base, 0));
5138 if (reg_base == REG_X)
5139 {
5140 /* R = (X + d) */
5141 if (reg_dest == REG_X)
5142 {
5143 /* "ld r26,-X" is undefined */
5144 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
5145 "ld r28,X" CR_TAB
5146 "ld __tmp_reg__,-X" CR_TAB
5147 "sbiw r26,1" CR_TAB
5148 "ld r26,X" CR_TAB
5149 "mov r27,__tmp_reg__", op, plen, -6);
5150 }
5151
5152 avr_asm_len ("adiw r26,%o1" CR_TAB
5153 "ld %A0,X+" CR_TAB
5154 "ld %B0,X+" CR_TAB
5155 "ld %C0,X", op, plen, -4);
5156
5157 if (reg_dest != REG_W
5158 && !reg_unused_after (insn, XEXP (base, 0)))
5159 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
5160
5161 return "";
5162 }
5163
5164 if (reg_dest == reg_base)
5165 return avr_asm_len ("ldd %C0,%C1" CR_TAB
5166 "ldd __tmp_reg__,%B1" CR_TAB
5167 "ldd %A0,%A1" CR_TAB
5168 "mov %B0,__tmp_reg__", op, plen, -4);
5169
5170 return avr_asm_len ("ldd %A0,%A1" CR_TAB
5171 "ldd %B0,%B1" CR_TAB
5172 "ldd %C0,%C1", op, plen, -3);
5173 }
5174 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5175 return avr_asm_len ("ld %C0,%1" CR_TAB
5176 "ld %B0,%1" CR_TAB
5177 "ld %A0,%1", op, plen, -3);
5178 else if (GET_CODE (base) == POST_INC) /* (R++) */
5179 return avr_asm_len ("ld %A0,%1" CR_TAB
5180 "ld %B0,%1" CR_TAB
5181 "ld %C0,%1", op, plen, -3);
5182
5183 else if (CONSTANT_ADDRESS_P (base))
5184 {
5185 int n_words = AVR_TINY ? 3 : 6;
5186 return avr_asm_len ("lds %A0,%m1" CR_TAB
5187 "lds %B0,%m1+1" CR_TAB
5188 "lds %C0,%m1+2", op, plen , -n_words);
5189 }
5190
5191 fatal_insn ("unknown move insn:",insn);
5192 return "";
5193 }
5194
5195
5196 static const char*
5197 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5198 {
5199 rtx dest = op[0];
5200 rtx src = op[1];
5201 rtx base = XEXP (dest, 0);
5202 int reg_base = true_regnum (base);
5203 int reg_src = true_regnum (src);
5204
5205 if (reg_base == reg_src)
5206 {
5207 avr_asm_len ("st %0,%A1" CR_TAB
5208 "mov __tmp_reg__,%B1" CR_TAB
5209 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
5210 "st %0+,__tmp_reg__" CR_TAB
5211 "st %0,%C1", op, plen, -6);
5212
5213 }
5214 else if (reg_src == reg_base - 2)
5215 {
5216 avr_asm_len ("st %0,%A1" CR_TAB
5217 "mov __tmp_reg__,%C1" CR_TAB
5218 TINY_ADIW (%E0, %F0, 1) CR_TAB
5219 "st %0+,%B1" CR_TAB
5220 "st %0,__tmp_reg__", op, plen, 6);
5221 }
5222 else
5223 {
5224 avr_asm_len ("st %0+,%A1" CR_TAB
5225 "st %0+,%B1" CR_TAB
5226 "st %0,%C1", op, plen, -3);
5227 }
5228
5229 if (!reg_unused_after (insn, base))
5230 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
5231
5232 return "";
5233 }
5234
5235 static const char*
5236 avr_out_store_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5237 {
5238 rtx dest = op[0];
5239 rtx src = op[1];
5240 rtx base = XEXP (dest, 0);
5241 int reg_base = REGNO (XEXP (base, 0));
5242 int reg_src = true_regnum (src);
5243
5244 if (reg_src == reg_base)
5245 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5246 "mov __zero_reg__,%B1" CR_TAB
5247 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5248 "st %b0+,__tmp_reg__" CR_TAB
5249 "st %b0+,__zero_reg__" CR_TAB
5250 "st %b0,%C1" CR_TAB
5251 "clr __zero_reg__", op, plen, -8);
5252 else if (reg_src == reg_base - 2)
5253 avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
5254 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5255 "st %b0+,%A1" CR_TAB
5256 "st %b0+,%B1" CR_TAB
5257 "st %b0,__tmp_reg__", op, plen, -6);
5258 else
5259 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5260 "st %b0+,%A1" CR_TAB
5261 "st %b0+,%B1" CR_TAB
5262 "st %b0,%C1", op, plen, -5);
5263
5264 if (!reg_unused_after (insn, XEXP (base, 0)))
5265 avr_asm_len (TINY_SBIW (%I0, %J0, %o0+2), op, plen, 2);
5266
5267 return "";
5268 }
5269
5270 /* Handle store of 24-bit type from register or zero to memory. */
5271
5272 static const char*
5273 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
5274 {
5275 rtx dest = op[0];
5276 rtx src = op[1];
5277 rtx base = XEXP (dest, 0);
5278 int reg_base = true_regnum (base);
5279
5280 if (CONSTANT_ADDRESS_P (base))
5281 {
5282 int n_words = AVR_TINY ? 3 : 6;
5283 return avr_asm_len ("sts %m0,%A1" CR_TAB
5284 "sts %m0+1,%B1" CR_TAB
5285 "sts %m0+2,%C1", op, plen, -n_words);
5286 }
5287
5288 if (reg_base > 0) /* (r) */
5289 {
5290 if (AVR_TINY)
5291 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
5292
5293 if (reg_base == REG_X) /* (R26) */
5294 {
5295 gcc_assert (!reg_overlap_mentioned_p (base, src));
5296
5297 avr_asm_len ("st %0+,%A1" CR_TAB
5298 "st %0+,%B1" CR_TAB
5299 "st %0,%C1", op, plen, -3);
5300
5301 if (!reg_unused_after (insn, base))
5302 avr_asm_len ("sbiw r26,2", op, plen, 1);
5303
5304 return "";
5305 }
5306 else
5307 return avr_asm_len ("st %0,%A1" CR_TAB
5308 "std %0+1,%B1" CR_TAB
5309 "std %0+2,%C1", op, plen, -3);
5310 }
5311 else if (GET_CODE (base) == PLUS) /* (R + i) */
5312 {
5313 int disp = INTVAL (XEXP (base, 1));
5314
5315 if (AVR_TINY)
5316 return avr_out_store_psi_reg_disp_tiny (insn, op, plen);
5317
5318 reg_base = REGNO (XEXP (base, 0));
5319
5320 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5321 {
5322 if (reg_base != REG_Y)
5323 fatal_insn ("incorrect insn:",insn);
5324
5325 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5326 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5327 "std Y+61,%A1" CR_TAB
5328 "std Y+62,%B1" CR_TAB
5329 "std Y+63,%C1" CR_TAB
5330 "sbiw r28,%o0-61", op, plen, -5);
5331
5332 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5333 "sbci r29,hi8(-%o0)" CR_TAB
5334 "st Y,%A1" CR_TAB
5335 "std Y+1,%B1" CR_TAB
5336 "std Y+2,%C1" CR_TAB
5337 "subi r28,lo8(%o0)" CR_TAB
5338 "sbci r29,hi8(%o0)", op, plen, -7);
5339 }
5340 if (reg_base == REG_X)
5341 {
5342 /* (X + d) = R */
5343 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
5344
5345 avr_asm_len ("adiw r26,%o0" CR_TAB
5346 "st X+,%A1" CR_TAB
5347 "st X+,%B1" CR_TAB
5348 "st X,%C1", op, plen, -4);
5349
5350 if (!reg_unused_after (insn, XEXP (base, 0)))
5351 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
5352
5353 return "";
5354 }
5355
5356 return avr_asm_len ("std %A0,%A1" CR_TAB
5357 "std %B0,%B1" CR_TAB
5358 "std %C0,%C1", op, plen, -3);
5359 }
5360 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5361 return avr_asm_len ("st %0,%C1" CR_TAB
5362 "st %0,%B1" CR_TAB
5363 "st %0,%A1", op, plen, -3);
5364 else if (GET_CODE (base) == POST_INC) /* (R++) */
5365 return avr_asm_len ("st %0,%A1" CR_TAB
5366 "st %0,%B1" CR_TAB
5367 "st %0,%C1", op, plen, -3);
5368
5369 fatal_insn ("unknown move insn:",insn);
5370 return "";
5371 }
5372
5373
5374 /* Move around 24-bit stuff. */
5375
5376 const char *
5377 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
5378 {
5379 rtx dest = op[0];
5380 rtx src = op[1];
5381
5382 if (avr_mem_flash_p (src)
5383 || avr_mem_flash_p (dest))
5384 {
5385 return avr_out_lpm (insn, op, plen);
5386 }
5387
5388 if (register_operand (dest, VOIDmode))
5389 {
5390 if (register_operand (src, VOIDmode)) /* mov r,r */
5391 {
5392 if (true_regnum (dest) > true_regnum (src))
5393 {
5394 avr_asm_len ("mov %C0,%C1", op, plen, -1);
5395
5396 if (AVR_HAVE_MOVW)
5397 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
5398 else
5399 return avr_asm_len ("mov %B0,%B1" CR_TAB
5400 "mov %A0,%A1", op, plen, 2);
5401 }
5402 else
5403 {
5404 if (AVR_HAVE_MOVW)
5405 avr_asm_len ("movw %A0,%A1", op, plen, -1);
5406 else
5407 avr_asm_len ("mov %A0,%A1" CR_TAB
5408 "mov %B0,%B1", op, plen, -2);
5409
5410 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
5411 }
5412 }
5413 else if (CONSTANT_P (src))
5414 {
5415 return avr_out_reload_inpsi (op, NULL_RTX, plen);
5416 }
5417 else if (MEM_P (src))
5418 return avr_out_load_psi (insn, op, plen); /* mov r,m */
5419 }
5420 else if (MEM_P (dest))
5421 {
5422 rtx xop[2];
5423
5424 xop[0] = dest;
5425 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
5426
5427 return avr_out_store_psi (insn, xop, plen);
5428 }
5429
5430 fatal_insn ("invalid insn:", insn);
5431 return "";
5432 }
5433
5434 static const char*
5435 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5436 {
5437 rtx dest = op[0];
5438 rtx src = op[1];
5439 rtx x = XEXP (dest, 0);
5440
5441 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5442 {
5443 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5444 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5445 "st %b0,__tmp_reg__", op, plen, -4);
5446 }
5447 else
5448 {
5449 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5450 "st %b0,%1", op, plen, -3);
5451 }
5452
5453 if (!reg_unused_after (insn, XEXP (x, 0)))
5454 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5455
5456 return "";
5457 }
5458
5459 static const char*
5460 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5461 {
5462 rtx dest = op[0];
5463 rtx src = op[1];
5464 rtx x = XEXP (dest, 0);
5465
5466 if (CONSTANT_ADDRESS_P (x))
5467 {
5468 int n_words = AVR_TINY ? 1 : 2;
5469 return io_address_operand (x, QImode)
5470 ? avr_asm_len ("out %i0,%1", op, plen, -1)
5471 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
5472 }
5473 else if (GET_CODE (x) == PLUS
5474 && REG_P (XEXP (x, 0))
5475 && CONST_INT_P (XEXP (x, 1)))
5476 {
5477 /* memory access by reg+disp */
5478
5479 int disp = INTVAL (XEXP (x, 1));
5480
5481 if (AVR_TINY)
5482 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
5483
5484 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
5485 {
5486 if (REGNO (XEXP (x, 0)) != REG_Y)
5487 fatal_insn ("incorrect insn:",insn);
5488
5489 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5490 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5491 "std Y+63,%1" CR_TAB
5492 "sbiw r28,%o0-63", op, plen, -3);
5493
5494 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5495 "sbci r29,hi8(-%o0)" CR_TAB
5496 "st Y,%1" CR_TAB
5497 "subi r28,lo8(%o0)" CR_TAB
5498 "sbci r29,hi8(%o0)", op, plen, -5);
5499 }
5500 else if (REGNO (XEXP (x, 0)) == REG_X)
5501 {
5502 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5503 {
5504 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5505 "adiw r26,%o0" CR_TAB
5506 "st X,__tmp_reg__", op, plen, -3);
5507 }
5508 else
5509 {
5510 avr_asm_len ("adiw r26,%o0" CR_TAB
5511 "st X,%1", op, plen, -2);
5512 }
5513
5514 if (!reg_unused_after (insn, XEXP (x, 0)))
5515 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
5516
5517 return "";
5518 }
5519
5520 return avr_asm_len ("std %0,%1", op, plen, -1);
5521 }
5522
5523 return avr_asm_len ("st %0,%1", op, plen, -1);
5524 }
5525
5526
5527 /* Helper for the next function for XMEGA. It does the same
5528 but with low byte first. */
5529
5530 static const char*
5531 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
5532 {
5533 rtx dest = op[0];
5534 rtx src = op[1];
5535 rtx base = XEXP (dest, 0);
5536 int reg_base = true_regnum (base);
5537 int reg_src = true_regnum (src);
5538
5539 /* "volatile" forces writing low byte first, even if less efficient,
5540 for correct operation with 16-bit I/O registers like SP. */
5541 int mem_volatile_p = MEM_VOLATILE_P (dest);
5542
5543 if (CONSTANT_ADDRESS_P (base))
5544 {
5545 return io_address_operand (base, HImode)
5546 ? avr_asm_len ("out %i0,%A1" CR_TAB
5547 "out %i0+1,%B1", op, plen, -2)
5548
5549 : avr_asm_len ("sts %m0,%A1" CR_TAB
5550 "sts %m0+1,%B1", op, plen, -4);
5551 }
5552
5553 if (reg_base > 0)
5554 {
5555 if (reg_base != REG_X)
5556 return avr_asm_len ("st %0,%A1" CR_TAB
5557 "std %0+1,%B1", op, plen, -2);
5558
5559 if (reg_src == REG_X)
5560 /* "st X+,r26" and "st -X,r26" are undefined. */
5561 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5562 "st X,r26" CR_TAB
5563 "adiw r26,1" CR_TAB
5564 "st X,__tmp_reg__", op, plen, -4);
5565 else
5566 avr_asm_len ("st X+,%A1" CR_TAB
5567 "st X,%B1", op, plen, -2);
5568
5569 return reg_unused_after (insn, base)
5570 ? ""
5571 : avr_asm_len ("sbiw r26,1", op, plen, 1);
5572 }
5573 else if (GET_CODE (base) == PLUS)
5574 {
5575 int disp = INTVAL (XEXP (base, 1));
5576 reg_base = REGNO (XEXP (base, 0));
5577 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5578 {
5579 if (reg_base != REG_Y)
5580 fatal_insn ("incorrect insn:",insn);
5581
5582 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5583 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5584 "std Y+62,%A1" CR_TAB
5585 "std Y+63,%B1" CR_TAB
5586 "sbiw r28,%o0-62", op, plen, -4)
5587
5588 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5589 "sbci r29,hi8(-%o0)" CR_TAB
5590 "st Y,%A1" CR_TAB
5591 "std Y+1,%B1" CR_TAB
5592 "subi r28,lo8(%o0)" CR_TAB
5593 "sbci r29,hi8(%o0)", op, plen, -6);
5594 }
5595
5596 if (reg_base != REG_X)
5597 return avr_asm_len ("std %A0,%A1" CR_TAB
5598 "std %B0,%B1", op, plen, -2);
5599 /* (X + d) = R */
5600 return reg_src == REG_X
5601 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5602 "mov __zero_reg__,r27" CR_TAB
5603 "adiw r26,%o0" CR_TAB
5604 "st X+,__tmp_reg__" CR_TAB
5605 "st X,__zero_reg__" CR_TAB
5606 "clr __zero_reg__" CR_TAB
5607 "sbiw r26,%o0+1", op, plen, -7)
5608
5609 : avr_asm_len ("adiw r26,%o0" CR_TAB
5610 "st X+,%A1" CR_TAB
5611 "st X,%B1" CR_TAB
5612 "sbiw r26,%o0+1", op, plen, -4);
5613 }
5614 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5615 {
5616 if (!mem_volatile_p)
5617 return avr_asm_len ("st %0,%B1" CR_TAB
5618 "st %0,%A1", op, plen, -2);
5619
5620 return REGNO (XEXP (base, 0)) == REG_X
5621 ? avr_asm_len ("sbiw r26,2" CR_TAB
5622 "st X+,%A1" CR_TAB
5623 "st X,%B1" CR_TAB
5624 "sbiw r26,1", op, plen, -4)
5625
5626 : avr_asm_len ("sbiw %r0,2" CR_TAB
5627 "st %p0,%A1" CR_TAB
5628 "std %p0+1,%B1", op, plen, -3);
5629 }
5630 else if (GET_CODE (base) == POST_INC) /* (R++) */
5631 {
5632 return avr_asm_len ("st %0,%A1" CR_TAB
5633 "st %0,%B1", op, plen, -2);
5634
5635 }
5636 fatal_insn ("unknown move insn:",insn);
5637 return "";
5638 }
5639
5640 static const char*
5641 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5642 {
5643 rtx dest = op[0];
5644 rtx src = op[1];
5645 rtx base = XEXP (dest, 0);
5646 int reg_base = true_regnum (base);
5647 int reg_src = true_regnum (src);
5648 int mem_volatile_p = MEM_VOLATILE_P (dest);
5649
5650 if (reg_base == reg_src)
5651 {
5652 return !mem_volatile_p && reg_unused_after (insn, src)
5653 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5654 "st %0,%A1" CR_TAB
5655 TINY_ADIW (%E0, %F0, 1) CR_TAB
5656 "st %0,__tmp_reg__", op, plen, -5)
5657 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5658 TINY_ADIW (%E0, %F0, 1) CR_TAB
5659 "st %0,__tmp_reg__" CR_TAB
5660 TINY_SBIW (%E0, %F0, 1) CR_TAB
5661 "st %0, %A1", op, plen, -7);
5662 }
5663
5664 return !mem_volatile_p && reg_unused_after (insn, base)
5665 ? avr_asm_len ("st %0+,%A1" CR_TAB
5666 "st %0,%B1", op, plen, -2)
5667 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5668 "st %0,%B1" CR_TAB
5669 "st -%0,%A1", op, plen, -4);
5670 }
5671
5672 static const char*
5673 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5674 {
5675 rtx dest = op[0];
5676 rtx src = op[1];
5677 rtx base = XEXP (dest, 0);
5678 int reg_base = REGNO (XEXP (base, 0));
5679 int reg_src = true_regnum (src);
5680
5681 if (reg_src == reg_base)
5682 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5683 "mov __zero_reg__,%B1" CR_TAB
5684 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5685 "st %b0,__zero_reg__" CR_TAB
5686 "st -%b0,__tmp_reg__" CR_TAB
5687 "clr __zero_reg__", op, plen, -7);
5688 else
5689 avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5690 "st %b0,%B1" CR_TAB
5691 "st -%b0,%A1", op, plen, -4);
5692
5693 if (!reg_unused_after (insn, XEXP (base, 0)))
5694 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5695
5696 return "";
5697 }
5698
5699 static const char*
5700 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5701 {
5702 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5703 "st %p0,%B1" CR_TAB
5704 "st -%p0,%A1" CR_TAB
5705 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5706 }
5707
5708 static const char*
5709 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5710 {
5711 rtx dest = op[0];
5712 rtx src = op[1];
5713 rtx base = XEXP (dest, 0);
5714 int reg_base = true_regnum (base);
5715 int reg_src = true_regnum (src);
5716 int mem_volatile_p;
5717
5718 /* "volatile" forces writing high-byte first (no-xmega) resp.
5719 low-byte first (xmega) even if less efficient, for correct
5720 operation with 16-bit I/O registers like. */
5721
5722 if (AVR_XMEGA)
5723 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5724
5725 mem_volatile_p = MEM_VOLATILE_P (dest);
5726
5727 if (CONSTANT_ADDRESS_P (base))
5728 {
5729 int n_words = AVR_TINY ? 2 : 4;
5730 return io_address_operand (base, HImode)
5731 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5732 "out %i0,%A1", op, plen, -2)
5733
5734 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5735 "sts %m0,%A1", op, plen, -n_words);
5736 }
5737
5738 if (reg_base > 0)
5739 {
5740 if (AVR_TINY)
5741 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5742
5743 if (reg_base != REG_X)
5744 return avr_asm_len ("std %0+1,%B1" CR_TAB
5745 "st %0,%A1", op, plen, -2);
5746
5747 if (reg_src == REG_X)
5748 /* "st X+,r26" and "st -X,r26" are undefined. */
5749 return !mem_volatile_p && reg_unused_after (insn, src)
5750 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5751 "st X,r26" CR_TAB
5752 "adiw r26,1" CR_TAB
5753 "st X,__tmp_reg__", op, plen, -4)
5754
5755 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5756 "adiw r26,1" CR_TAB
5757 "st X,__tmp_reg__" CR_TAB
5758 "sbiw r26,1" CR_TAB
5759 "st X,r26", op, plen, -5);
5760
5761 return !mem_volatile_p && reg_unused_after (insn, base)
5762 ? avr_asm_len ("st X+,%A1" CR_TAB
5763 "st X,%B1", op, plen, -2)
5764 : avr_asm_len ("adiw r26,1" CR_TAB
5765 "st X,%B1" CR_TAB
5766 "st -X,%A1", op, plen, -3);
5767 }
5768 else if (GET_CODE (base) == PLUS)
5769 {
5770 int disp = INTVAL (XEXP (base, 1));
5771
5772 if (AVR_TINY)
5773 return avr_out_movhi_mr_r_reg_disp_tiny (insn, op, plen);
5774
5775 reg_base = REGNO (XEXP (base, 0));
5776 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5777 {
5778 if (reg_base != REG_Y)
5779 fatal_insn ("incorrect insn:",insn);
5780
5781 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5782 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5783 "std Y+63,%B1" CR_TAB
5784 "std Y+62,%A1" CR_TAB
5785 "sbiw r28,%o0-62", op, plen, -4)
5786
5787 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5788 "sbci r29,hi8(-%o0)" CR_TAB
5789 "std Y+1,%B1" CR_TAB
5790 "st Y,%A1" CR_TAB
5791 "subi r28,lo8(%o0)" CR_TAB
5792 "sbci r29,hi8(%o0)", op, plen, -6);
5793 }
5794
5795 if (reg_base != REG_X)
5796 return avr_asm_len ("std %B0,%B1" CR_TAB
5797 "std %A0,%A1", op, plen, -2);
5798 /* (X + d) = R */
5799 return reg_src == REG_X
5800 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5801 "mov __zero_reg__,r27" CR_TAB
5802 "adiw r26,%o0+1" CR_TAB
5803 "st X,__zero_reg__" CR_TAB
5804 "st -X,__tmp_reg__" CR_TAB
5805 "clr __zero_reg__" CR_TAB
5806 "sbiw r26,%o0", op, plen, -7)
5807
5808 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5809 "st X,%B1" CR_TAB
5810 "st -X,%A1" CR_TAB
5811 "sbiw r26,%o0", op, plen, -4);
5812 }
5813 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5814 {
5815 return avr_asm_len ("st %0,%B1" CR_TAB
5816 "st %0,%A1", op, plen, -2);
5817 }
5818 else if (GET_CODE (base) == POST_INC) /* (R++) */
5819 {
5820 if (!mem_volatile_p)
5821 return avr_asm_len ("st %0,%A1" CR_TAB
5822 "st %0,%B1", op, plen, -2);
5823
5824 if (AVR_TINY)
5825 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5826
5827 return REGNO (XEXP (base, 0)) == REG_X
5828 ? avr_asm_len ("adiw r26,1" CR_TAB
5829 "st X,%B1" CR_TAB
5830 "st -X,%A1" CR_TAB
5831 "adiw r26,2", op, plen, -4)
5832
5833 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5834 "st %p0,%A1" CR_TAB
5835 "adiw %r0,2", op, plen, -3);
5836 }
5837 fatal_insn ("unknown move insn:",insn);
5838 return "";
5839 }
5840
5841 /* Return 1 if frame pointer for current function required. */
5842
5843 static bool
5844 avr_frame_pointer_required_p (void)
5845 {
5846 return (cfun->calls_alloca
5847 || cfun->calls_setjmp
5848 || cfun->has_nonlocal_label
5849 || crtl->args.info.nregs == 0
5850 || get_frame_size () > 0);
5851 }
5852
5853 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5854
5855 static RTX_CODE
5856 compare_condition (rtx_insn *insn)
5857 {
5858 rtx_insn *next = next_real_insn (insn);
5859
5860 if (next && JUMP_P (next))
5861 {
5862 rtx pat = PATTERN (next);
5863 rtx src = SET_SRC (pat);
5864
5865 if (IF_THEN_ELSE == GET_CODE (src))
5866 return GET_CODE (XEXP (src, 0));
5867 }
5868
5869 return UNKNOWN;
5870 }
5871
5872
5873 /* Returns true iff INSN is a tst insn that only tests the sign. */
5874
5875 static bool
5876 compare_sign_p (rtx_insn *insn)
5877 {
5878 RTX_CODE cond = compare_condition (insn);
5879 return (cond == GE || cond == LT);
5880 }
5881
5882
5883 /* Returns true iff the next insn is a JUMP_INSN with a condition
5884 that needs to be swapped (GT, GTU, LE, LEU). */
5885
5886 static bool
5887 compare_diff_p (rtx_insn *insn)
5888 {
5889 RTX_CODE cond = compare_condition (insn);
5890 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5891 }
5892
5893 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5894
5895 static bool
5896 compare_eq_p (rtx_insn *insn)
5897 {
5898 RTX_CODE cond = compare_condition (insn);
5899 return (cond == EQ || cond == NE);
5900 }
5901
5902
5903 /* Output compare instruction
5904
5905 compare (XOP[0], XOP[1])
5906
5907 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5908 XOP[2] is an 8-bit scratch register as needed.
5909
5910 PLEN == NULL: Output instructions.
5911 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5912 Don't output anything. */
5913
5914 const char*
5915 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5916 {
5917 /* Register to compare and value to compare against. */
5918 rtx xreg = xop[0];
5919 rtx xval = xop[1];
5920
5921 /* MODE of the comparison. */
5922 machine_mode mode;
5923
5924 /* Number of bytes to operate on. */
5925 int n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5926
5927 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5928 int clobber_val = -1;
5929
5930 /* Map fixed mode operands to integer operands with the same binary
5931 representation. They are easier to handle in the remainder. */
5932
5933 if (CONST_FIXED_P (xval))
5934 {
5935 xreg = avr_to_int_mode (xop[0]);
5936 xval = avr_to_int_mode (xop[1]);
5937 }
5938
5939 mode = GET_MODE (xreg);
5940
5941 gcc_assert (REG_P (xreg));
5942 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5943 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5944
5945 if (plen)
5946 *plen = 0;
5947
5948 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5949 against 0 by ORing the bytes. This is one instruction shorter.
5950 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5951 and therefore don't use this. */
5952
5953 if (!test_hard_reg_class (LD_REGS, xreg)
5954 && compare_eq_p (insn)
5955 && reg_unused_after (insn, xreg))
5956 {
5957 if (xval == const1_rtx)
5958 {
5959 avr_asm_len ("dec %A0" CR_TAB
5960 "or %A0,%B0", xop, plen, 2);
5961
5962 if (n_bytes >= 3)
5963 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5964
5965 if (n_bytes >= 4)
5966 avr_asm_len ("or %A0,%D0", xop, plen, 1);
5967
5968 return "";
5969 }
5970 else if (xval == constm1_rtx)
5971 {
5972 if (n_bytes >= 4)
5973 avr_asm_len ("and %A0,%D0", xop, plen, 1);
5974
5975 if (n_bytes >= 3)
5976 avr_asm_len ("and %A0,%C0", xop, plen, 1);
5977
5978 return avr_asm_len ("and %A0,%B0" CR_TAB
5979 "com %A0", xop, plen, 2);
5980 }
5981 }
5982
5983 /* Comparisons == -1 and != -1 of a d-register that's used after the
5984 comparison. (If it's unused after we use CPI / SBCI or ADIW sequence
5985 from below.) Instead of CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx we can
5986 use CPI Rlo,-1 / CPC Rhi,Rlo which is 1 instruction shorter:
5987 If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
5988 when CPC'ing the high part. If CPI is false then CPC cannot render
5989 the result to true. This also works for the more generic case where
5990 the constant is of the form 0xabab. */
5991
5992 if (n_bytes == 2
5993 && xval != const0_rtx
5994 && test_hard_reg_class (LD_REGS, xreg)
5995 && compare_eq_p (insn)
5996 && !reg_unused_after (insn, xreg))
5997 {
5998 rtx xlo8 = simplify_gen_subreg (QImode, xval, mode, 0);
5999 rtx xhi8 = simplify_gen_subreg (QImode, xval, mode, 1);
6000
6001 if (INTVAL (xlo8) == INTVAL (xhi8))
6002 {
6003 xop[0] = xreg;
6004 xop[1] = xlo8;
6005
6006 return avr_asm_len ("cpi %A0,%1" CR_TAB
6007 "cpc %B0,%A0", xop, plen, 2);
6008 }
6009 }
6010
6011 for (int i = 0; i < n_bytes; i++)
6012 {
6013 /* We compare byte-wise. */
6014 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
6015 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6016
6017 /* 8-bit value to compare with this byte. */
6018 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6019
6020 /* Registers R16..R31 can operate with immediate. */
6021 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6022
6023 xop[0] = reg8;
6024 xop[1] = gen_int_mode (val8, QImode);
6025
6026 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
6027
6028 if (i == 0
6029 && test_hard_reg_class (ADDW_REGS, reg8))
6030 {
6031 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
6032
6033 if (IN_RANGE (val16, 0, 63)
6034 && (val8 == 0
6035 || reg_unused_after (insn, xreg)))
6036 {
6037 if (AVR_TINY)
6038 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
6039 else
6040 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
6041
6042 i++;
6043 continue;
6044 }
6045
6046 if (n_bytes == 2
6047 && IN_RANGE (val16, -63, -1)
6048 && compare_eq_p (insn)
6049 && reg_unused_after (insn, xreg))
6050 {
6051 return AVR_TINY
6052 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
6053 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
6054 }
6055 }
6056
6057 /* Comparing against 0 is easy. */
6058
6059 if (val8 == 0)
6060 {
6061 avr_asm_len (i == 0
6062 ? "cp %0,__zero_reg__"
6063 : "cpc %0,__zero_reg__", xop, plen, 1);
6064 continue;
6065 }
6066
6067 /* Upper registers can compare and subtract-with-carry immediates.
6068 Notice that compare instructions do the same as respective subtract
6069 instruction; the only difference is that comparisons don't write
6070 the result back to the target register. */
6071
6072 if (ld_reg_p)
6073 {
6074 if (i == 0)
6075 {
6076 avr_asm_len ("cpi %0,%1", xop, plen, 1);
6077 continue;
6078 }
6079 else if (reg_unused_after (insn, xreg))
6080 {
6081 avr_asm_len ("sbci %0,%1", xop, plen, 1);
6082 continue;
6083 }
6084 }
6085
6086 /* Must load the value into the scratch register. */
6087
6088 gcc_assert (REG_P (xop[2]));
6089
6090 if (clobber_val != (int) val8)
6091 avr_asm_len ("ldi %2,%1", xop, plen, 1);
6092 clobber_val = (int) val8;
6093
6094 avr_asm_len (i == 0
6095 ? "cp %0,%2"
6096 : "cpc %0,%2", xop, plen, 1);
6097 }
6098
6099 return "";
6100 }
6101
6102
6103 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
6104
6105 const char*
6106 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
6107 {
6108 rtx xop[3];
6109
6110 xop[0] = gen_rtx_REG (DImode, 18);
6111 xop[1] = op[0];
6112 xop[2] = op[1];
6113
6114 return avr_out_compare (insn, xop, plen);
6115 }
6116
6117 /* Output test instruction for HImode. */
6118
6119 const char*
6120 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
6121 {
6122 if (compare_sign_p (insn))
6123 {
6124 avr_asm_len ("tst %B0", op, plen, -1);
6125 }
6126 else if (reg_unused_after (insn, op[0])
6127 && compare_eq_p (insn))
6128 {
6129 /* Faster than sbiw if we can clobber the operand. */
6130 avr_asm_len ("or %A0,%B0", op, plen, -1);
6131 }
6132 else
6133 {
6134 avr_out_compare (insn, op, plen);
6135 }
6136
6137 return "";
6138 }
6139
6140
6141 /* Output test instruction for PSImode. */
6142
6143 const char*
6144 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
6145 {
6146 if (compare_sign_p (insn))
6147 {
6148 avr_asm_len ("tst %C0", op, plen, -1);
6149 }
6150 else if (reg_unused_after (insn, op[0])
6151 && compare_eq_p (insn))
6152 {
6153 /* Faster than sbiw if we can clobber the operand. */
6154 avr_asm_len ("or %A0,%B0" CR_TAB
6155 "or %A0,%C0", op, plen, -2);
6156 }
6157 else
6158 {
6159 avr_out_compare (insn, op, plen);
6160 }
6161
6162 return "";
6163 }
6164
6165
6166 /* Output test instruction for SImode. */
6167
6168 const char*
6169 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
6170 {
6171 if (compare_sign_p (insn))
6172 {
6173 avr_asm_len ("tst %D0", op, plen, -1);
6174 }
6175 else if (reg_unused_after (insn, op[0])
6176 && compare_eq_p (insn))
6177 {
6178 /* Faster than sbiw if we can clobber the operand. */
6179 avr_asm_len ("or %A0,%B0" CR_TAB
6180 "or %A0,%C0" CR_TAB
6181 "or %A0,%D0", op, plen, -3);
6182 }
6183 else
6184 {
6185 avr_out_compare (insn, op, plen);
6186 }
6187
6188 return "";
6189 }
6190
6191
6192 /* Generate asm equivalent for various shifts. This only handles cases
6193 that are not already carefully hand-optimized in ?sh??i3_out.
6194
6195 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
6196 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
6197 OPERANDS[3] is a QImode scratch register from LD regs if
6198 available and SCRATCH, otherwise (no scratch available)
6199
6200 TEMPL is an assembler template that shifts by one position.
6201 T_LEN is the length of this template. */
6202
6203 void
6204 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
6205 int *plen, int t_len)
6206 {
6207 bool second_label = true;
6208 bool saved_in_tmp = false;
6209 bool use_zero_reg = false;
6210 rtx op[5];
6211
6212 op[0] = operands[0];
6213 op[1] = operands[1];
6214 op[2] = operands[2];
6215 op[3] = operands[3];
6216
6217 if (plen)
6218 *plen = 0;
6219
6220 if (CONST_INT_P (operands[2]))
6221 {
6222 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
6223 && REG_P (operands[3]));
6224 int count = INTVAL (operands[2]);
6225 int max_len = 10; /* If larger than this, always use a loop. */
6226
6227 if (count <= 0)
6228 return;
6229
6230 if (count < 8 && !scratch)
6231 use_zero_reg = true;
6232
6233 if (optimize_size)
6234 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
6235
6236 if (t_len * count <= max_len)
6237 {
6238 /* Output shifts inline with no loop - faster. */
6239
6240 while (count-- > 0)
6241 avr_asm_len (templ, op, plen, t_len);
6242
6243 return;
6244 }
6245
6246 if (scratch)
6247 {
6248 avr_asm_len ("ldi %3,%2", op, plen, 1);
6249 }
6250 else if (use_zero_reg)
6251 {
6252 /* Hack to save one word: use __zero_reg__ as loop counter.
6253 Set one bit, then shift in a loop until it is 0 again. */
6254
6255 op[3] = zero_reg_rtx;
6256
6257 avr_asm_len ("set" CR_TAB
6258 "bld %3,%2-1", op, plen, 2);
6259 }
6260 else
6261 {
6262 /* No scratch register available, use one from LD_REGS (saved in
6263 __tmp_reg__) that doesn't overlap with registers to shift. */
6264
6265 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
6266 op[4] = tmp_reg_rtx;
6267 saved_in_tmp = true;
6268
6269 avr_asm_len ("mov %4,%3" CR_TAB
6270 "ldi %3,%2", op, plen, 2);
6271 }
6272
6273 second_label = false;
6274 }
6275 else if (MEM_P (op[2]))
6276 {
6277 rtx op_mov[2];
6278
6279 op_mov[0] = op[3] = tmp_reg_rtx;
6280 op_mov[1] = op[2];
6281
6282 out_movqi_r_mr (insn, op_mov, plen);
6283 }
6284 else if (register_operand (op[2], QImode))
6285 {
6286 op[3] = op[2];
6287
6288 if (!reg_unused_after (insn, op[2])
6289 || reg_overlap_mentioned_p (op[0], op[2]))
6290 {
6291 op[3] = tmp_reg_rtx;
6292 avr_asm_len ("mov %3,%2", op, plen, 1);
6293 }
6294 }
6295 else
6296 fatal_insn ("bad shift insn:", insn);
6297
6298 if (second_label)
6299 avr_asm_len ("rjmp 2f", op, plen, 1);
6300
6301 avr_asm_len ("1:", op, plen, 0);
6302 avr_asm_len (templ, op, plen, t_len);
6303
6304 if (second_label)
6305 avr_asm_len ("2:", op, plen, 0);
6306
6307 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
6308 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
6309
6310 if (saved_in_tmp)
6311 avr_asm_len ("mov %3,%4", op, plen, 1);
6312 }
6313
6314
6315 /* 8bit shift left ((char)x << i) */
6316
6317 const char *
6318 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
6319 {
6320 if (CONST_INT_P (operands[2]))
6321 {
6322 int k;
6323
6324 if (!len)
6325 len = &k;
6326
6327 switch (INTVAL (operands[2]))
6328 {
6329 default:
6330 if (INTVAL (operands[2]) < 8)
6331 break;
6332
6333 *len = 1;
6334 return "clr %0";
6335
6336 case 1:
6337 *len = 1;
6338 return "lsl %0";
6339
6340 case 2:
6341 *len = 2;
6342 return ("lsl %0" CR_TAB
6343 "lsl %0");
6344
6345 case 3:
6346 *len = 3;
6347 return ("lsl %0" CR_TAB
6348 "lsl %0" CR_TAB
6349 "lsl %0");
6350
6351 case 4:
6352 if (test_hard_reg_class (LD_REGS, operands[0]))
6353 {
6354 *len = 2;
6355 return ("swap %0" CR_TAB
6356 "andi %0,0xf0");
6357 }
6358 *len = 4;
6359 return ("lsl %0" CR_TAB
6360 "lsl %0" CR_TAB
6361 "lsl %0" CR_TAB
6362 "lsl %0");
6363
6364 case 5:
6365 if (test_hard_reg_class (LD_REGS, operands[0]))
6366 {
6367 *len = 3;
6368 return ("swap %0" CR_TAB
6369 "lsl %0" CR_TAB
6370 "andi %0,0xe0");
6371 }
6372 *len = 5;
6373 return ("lsl %0" CR_TAB
6374 "lsl %0" CR_TAB
6375 "lsl %0" CR_TAB
6376 "lsl %0" CR_TAB
6377 "lsl %0");
6378
6379 case 6:
6380 if (test_hard_reg_class (LD_REGS, operands[0]))
6381 {
6382 *len = 4;
6383 return ("swap %0" CR_TAB
6384 "lsl %0" CR_TAB
6385 "lsl %0" CR_TAB
6386 "andi %0,0xc0");
6387 }
6388 *len = 6;
6389 return ("lsl %0" CR_TAB
6390 "lsl %0" CR_TAB
6391 "lsl %0" CR_TAB
6392 "lsl %0" CR_TAB
6393 "lsl %0" CR_TAB
6394 "lsl %0");
6395
6396 case 7:
6397 *len = 3;
6398 return ("ror %0" CR_TAB
6399 "clr %0" CR_TAB
6400 "ror %0");
6401 }
6402 }
6403 else if (CONSTANT_P (operands[2]))
6404 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6405
6406 out_shift_with_cnt ("lsl %0",
6407 insn, operands, len, 1);
6408 return "";
6409 }
6410
6411
6412 /* 16bit shift left ((short)x << i) */
6413
6414 const char *
6415 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
6416 {
6417 if (CONST_INT_P (operands[2]))
6418 {
6419 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6420 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6421 int k;
6422 int *t = len;
6423
6424 if (!len)
6425 len = &k;
6426
6427 switch (INTVAL (operands[2]))
6428 {
6429 default:
6430 if (INTVAL (operands[2]) < 16)
6431 break;
6432
6433 *len = 2;
6434 return ("clr %B0" CR_TAB
6435 "clr %A0");
6436
6437 case 4:
6438 if (optimize_size && scratch)
6439 break; /* 5 */
6440 if (ldi_ok)
6441 {
6442 *len = 6;
6443 return ("swap %A0" CR_TAB
6444 "swap %B0" CR_TAB
6445 "andi %B0,0xf0" CR_TAB
6446 "eor %B0,%A0" CR_TAB
6447 "andi %A0,0xf0" CR_TAB
6448 "eor %B0,%A0");
6449 }
6450 if (scratch)
6451 {
6452 *len = 7;
6453 return ("swap %A0" CR_TAB
6454 "swap %B0" CR_TAB
6455 "ldi %3,0xf0" CR_TAB
6456 "and %B0,%3" CR_TAB
6457 "eor %B0,%A0" CR_TAB
6458 "and %A0,%3" CR_TAB
6459 "eor %B0,%A0");
6460 }
6461 break; /* optimize_size ? 6 : 8 */
6462
6463 case 5:
6464 if (optimize_size)
6465 break; /* scratch ? 5 : 6 */
6466 if (ldi_ok)
6467 {
6468 *len = 8;
6469 return ("lsl %A0" CR_TAB
6470 "rol %B0" CR_TAB
6471 "swap %A0" CR_TAB
6472 "swap %B0" CR_TAB
6473 "andi %B0,0xf0" CR_TAB
6474 "eor %B0,%A0" CR_TAB
6475 "andi %A0,0xf0" CR_TAB
6476 "eor %B0,%A0");
6477 }
6478 if (scratch)
6479 {
6480 *len = 9;
6481 return ("lsl %A0" CR_TAB
6482 "rol %B0" CR_TAB
6483 "swap %A0" CR_TAB
6484 "swap %B0" CR_TAB
6485 "ldi %3,0xf0" CR_TAB
6486 "and %B0,%3" CR_TAB
6487 "eor %B0,%A0" CR_TAB
6488 "and %A0,%3" CR_TAB
6489 "eor %B0,%A0");
6490 }
6491 break; /* 10 */
6492
6493 case 6:
6494 if (optimize_size)
6495 break; /* scratch ? 5 : 6 */
6496 *len = 9;
6497 return ("clr __tmp_reg__" CR_TAB
6498 "lsr %B0" CR_TAB
6499 "ror %A0" CR_TAB
6500 "ror __tmp_reg__" CR_TAB
6501 "lsr %B0" CR_TAB
6502 "ror %A0" CR_TAB
6503 "ror __tmp_reg__" CR_TAB
6504 "mov %B0,%A0" CR_TAB
6505 "mov %A0,__tmp_reg__");
6506
6507 case 7:
6508 *len = 5;
6509 return ("lsr %B0" CR_TAB
6510 "mov %B0,%A0" CR_TAB
6511 "clr %A0" CR_TAB
6512 "ror %B0" CR_TAB
6513 "ror %A0");
6514
6515 case 8:
6516 return *len = 2, ("mov %B0,%A1" CR_TAB
6517 "clr %A0");
6518
6519 case 9:
6520 *len = 3;
6521 return ("mov %B0,%A0" CR_TAB
6522 "clr %A0" CR_TAB
6523 "lsl %B0");
6524
6525 case 10:
6526 *len = 4;
6527 return ("mov %B0,%A0" CR_TAB
6528 "clr %A0" CR_TAB
6529 "lsl %B0" CR_TAB
6530 "lsl %B0");
6531
6532 case 11:
6533 *len = 5;
6534 return ("mov %B0,%A0" CR_TAB
6535 "clr %A0" CR_TAB
6536 "lsl %B0" CR_TAB
6537 "lsl %B0" CR_TAB
6538 "lsl %B0");
6539
6540 case 12:
6541 if (ldi_ok)
6542 {
6543 *len = 4;
6544 return ("mov %B0,%A0" CR_TAB
6545 "clr %A0" CR_TAB
6546 "swap %B0" CR_TAB
6547 "andi %B0,0xf0");
6548 }
6549 if (scratch)
6550 {
6551 *len = 5;
6552 return ("mov %B0,%A0" CR_TAB
6553 "clr %A0" CR_TAB
6554 "swap %B0" CR_TAB
6555 "ldi %3,0xf0" CR_TAB
6556 "and %B0,%3");
6557 }
6558 *len = 6;
6559 return ("mov %B0,%A0" CR_TAB
6560 "clr %A0" CR_TAB
6561 "lsl %B0" CR_TAB
6562 "lsl %B0" CR_TAB
6563 "lsl %B0" CR_TAB
6564 "lsl %B0");
6565
6566 case 13:
6567 if (ldi_ok)
6568 {
6569 *len = 5;
6570 return ("mov %B0,%A0" CR_TAB
6571 "clr %A0" CR_TAB
6572 "swap %B0" CR_TAB
6573 "lsl %B0" CR_TAB
6574 "andi %B0,0xe0");
6575 }
6576 if (AVR_HAVE_MUL && scratch)
6577 {
6578 *len = 5;
6579 return ("ldi %3,0x20" CR_TAB
6580 "mul %A0,%3" CR_TAB
6581 "mov %B0,r0" CR_TAB
6582 "clr %A0" CR_TAB
6583 "clr __zero_reg__");
6584 }
6585 if (optimize_size && scratch)
6586 break; /* 5 */
6587 if (scratch)
6588 {
6589 *len = 6;
6590 return ("mov %B0,%A0" CR_TAB
6591 "clr %A0" CR_TAB
6592 "swap %B0" CR_TAB
6593 "lsl %B0" CR_TAB
6594 "ldi %3,0xe0" CR_TAB
6595 "and %B0,%3");
6596 }
6597 if (AVR_HAVE_MUL)
6598 {
6599 *len = 6;
6600 return ("set" CR_TAB
6601 "bld r1,5" CR_TAB
6602 "mul %A0,r1" CR_TAB
6603 "mov %B0,r0" CR_TAB
6604 "clr %A0" CR_TAB
6605 "clr __zero_reg__");
6606 }
6607 *len = 7;
6608 return ("mov %B0,%A0" CR_TAB
6609 "clr %A0" CR_TAB
6610 "lsl %B0" CR_TAB
6611 "lsl %B0" CR_TAB
6612 "lsl %B0" CR_TAB
6613 "lsl %B0" CR_TAB
6614 "lsl %B0");
6615
6616 case 14:
6617 if (AVR_HAVE_MUL && ldi_ok)
6618 {
6619 *len = 5;
6620 return ("ldi %B0,0x40" CR_TAB
6621 "mul %A0,%B0" CR_TAB
6622 "mov %B0,r0" CR_TAB
6623 "clr %A0" CR_TAB
6624 "clr __zero_reg__");
6625 }
6626 if (AVR_HAVE_MUL && scratch)
6627 {
6628 *len = 5;
6629 return ("ldi %3,0x40" CR_TAB
6630 "mul %A0,%3" CR_TAB
6631 "mov %B0,r0" CR_TAB
6632 "clr %A0" CR_TAB
6633 "clr __zero_reg__");
6634 }
6635 if (optimize_size && ldi_ok)
6636 {
6637 *len = 5;
6638 return ("mov %B0,%A0" CR_TAB
6639 "ldi %A0,6" "\n1:\t"
6640 "lsl %B0" CR_TAB
6641 "dec %A0" CR_TAB
6642 "brne 1b");
6643 }
6644 if (optimize_size && scratch)
6645 break; /* 5 */
6646 *len = 6;
6647 return ("clr %B0" CR_TAB
6648 "lsr %A0" CR_TAB
6649 "ror %B0" CR_TAB
6650 "lsr %A0" CR_TAB
6651 "ror %B0" CR_TAB
6652 "clr %A0");
6653
6654 case 15:
6655 *len = 4;
6656 return ("clr %B0" CR_TAB
6657 "lsr %A0" CR_TAB
6658 "ror %B0" CR_TAB
6659 "clr %A0");
6660 }
6661 len = t;
6662 }
6663 out_shift_with_cnt ("lsl %A0" CR_TAB
6664 "rol %B0", insn, operands, len, 2);
6665 return "";
6666 }
6667
6668
6669 /* 24-bit shift left */
6670
6671 const char*
6672 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6673 {
6674 if (plen)
6675 *plen = 0;
6676
6677 if (CONST_INT_P (op[2]))
6678 {
6679 switch (INTVAL (op[2]))
6680 {
6681 default:
6682 if (INTVAL (op[2]) < 24)
6683 break;
6684
6685 return avr_asm_len ("clr %A0" CR_TAB
6686 "clr %B0" CR_TAB
6687 "clr %C0", op, plen, 3);
6688
6689 case 8:
6690 {
6691 int reg0 = REGNO (op[0]);
6692 int reg1 = REGNO (op[1]);
6693
6694 if (reg0 >= reg1)
6695 return avr_asm_len ("mov %C0,%B1" CR_TAB
6696 "mov %B0,%A1" CR_TAB
6697 "clr %A0", op, plen, 3);
6698 else
6699 return avr_asm_len ("clr %A0" CR_TAB
6700 "mov %B0,%A1" CR_TAB
6701 "mov %C0,%B1", op, plen, 3);
6702 }
6703
6704 case 16:
6705 {
6706 int reg0 = REGNO (op[0]);
6707 int reg1 = REGNO (op[1]);
6708
6709 if (reg0 + 2 != reg1)
6710 avr_asm_len ("mov %C0,%A0", op, plen, 1);
6711
6712 return avr_asm_len ("clr %B0" CR_TAB
6713 "clr %A0", op, plen, 2);
6714 }
6715
6716 case 23:
6717 return avr_asm_len ("clr %C0" CR_TAB
6718 "lsr %A0" CR_TAB
6719 "ror %C0" CR_TAB
6720 "clr %B0" CR_TAB
6721 "clr %A0", op, plen, 5);
6722 }
6723 }
6724
6725 out_shift_with_cnt ("lsl %A0" CR_TAB
6726 "rol %B0" CR_TAB
6727 "rol %C0", insn, op, plen, 3);
6728 return "";
6729 }
6730
6731
6732 /* 32bit shift left ((long)x << i) */
6733
6734 const char *
6735 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6736 {
6737 if (CONST_INT_P (operands[2]))
6738 {
6739 int k;
6740 int *t = len;
6741
6742 if (!len)
6743 len = &k;
6744
6745 switch (INTVAL (operands[2]))
6746 {
6747 default:
6748 if (INTVAL (operands[2]) < 32)
6749 break;
6750
6751 if (AVR_HAVE_MOVW)
6752 return *len = 3, ("clr %D0" CR_TAB
6753 "clr %C0" CR_TAB
6754 "movw %A0,%C0");
6755 *len = 4;
6756 return ("clr %D0" CR_TAB
6757 "clr %C0" CR_TAB
6758 "clr %B0" CR_TAB
6759 "clr %A0");
6760
6761 case 8:
6762 {
6763 int reg0 = true_regnum (operands[0]);
6764 int reg1 = true_regnum (operands[1]);
6765 *len = 4;
6766 if (reg0 >= reg1)
6767 return ("mov %D0,%C1" CR_TAB
6768 "mov %C0,%B1" CR_TAB
6769 "mov %B0,%A1" CR_TAB
6770 "clr %A0");
6771 else
6772 return ("clr %A0" CR_TAB
6773 "mov %B0,%A1" CR_TAB
6774 "mov %C0,%B1" CR_TAB
6775 "mov %D0,%C1");
6776 }
6777
6778 case 16:
6779 {
6780 int reg0 = true_regnum (operands[0]);
6781 int reg1 = true_regnum (operands[1]);
6782 if (reg0 + 2 == reg1)
6783 return *len = 2, ("clr %B0" CR_TAB
6784 "clr %A0");
6785 if (AVR_HAVE_MOVW)
6786 return *len = 3, ("movw %C0,%A1" CR_TAB
6787 "clr %B0" CR_TAB
6788 "clr %A0");
6789 else
6790 return *len = 4, ("mov %C0,%A1" CR_TAB
6791 "mov %D0,%B1" CR_TAB
6792 "clr %B0" CR_TAB
6793 "clr %A0");
6794 }
6795
6796 case 24:
6797 *len = 4;
6798 return ("mov %D0,%A1" CR_TAB
6799 "clr %C0" CR_TAB
6800 "clr %B0" CR_TAB
6801 "clr %A0");
6802
6803 case 31:
6804 *len = 6;
6805 return ("clr %D0" CR_TAB
6806 "lsr %A0" CR_TAB
6807 "ror %D0" CR_TAB
6808 "clr %C0" CR_TAB
6809 "clr %B0" CR_TAB
6810 "clr %A0");
6811 }
6812 len = t;
6813 }
6814 out_shift_with_cnt ("lsl %A0" CR_TAB
6815 "rol %B0" CR_TAB
6816 "rol %C0" CR_TAB
6817 "rol %D0", insn, operands, len, 4);
6818 return "";
6819 }
6820
6821 /* 8bit arithmetic shift right ((signed char)x >> i) */
6822
6823 const char *
6824 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6825 {
6826 if (CONST_INT_P (operands[2]))
6827 {
6828 int k;
6829
6830 if (!len)
6831 len = &k;
6832
6833 switch (INTVAL (operands[2]))
6834 {
6835 case 1:
6836 *len = 1;
6837 return "asr %0";
6838
6839 case 2:
6840 *len = 2;
6841 return ("asr %0" CR_TAB
6842 "asr %0");
6843
6844 case 3:
6845 *len = 3;
6846 return ("asr %0" CR_TAB
6847 "asr %0" CR_TAB
6848 "asr %0");
6849
6850 case 4:
6851 *len = 4;
6852 return ("asr %0" CR_TAB
6853 "asr %0" CR_TAB
6854 "asr %0" CR_TAB
6855 "asr %0");
6856
6857 case 5:
6858 *len = 5;
6859 return ("asr %0" CR_TAB
6860 "asr %0" CR_TAB
6861 "asr %0" CR_TAB
6862 "asr %0" CR_TAB
6863 "asr %0");
6864
6865 case 6:
6866 *len = 4;
6867 return ("bst %0,6" CR_TAB
6868 "lsl %0" CR_TAB
6869 "sbc %0,%0" CR_TAB
6870 "bld %0,0");
6871
6872 default:
6873 if (INTVAL (operands[2]) < 8)
6874 break;
6875
6876 /* fall through */
6877
6878 case 7:
6879 *len = 2;
6880 return ("lsl %0" CR_TAB
6881 "sbc %0,%0");
6882 }
6883 }
6884 else if (CONSTANT_P (operands[2]))
6885 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6886
6887 out_shift_with_cnt ("asr %0",
6888 insn, operands, len, 1);
6889 return "";
6890 }
6891
6892
6893 /* 16bit arithmetic shift right ((signed short)x >> i) */
6894
6895 const char *
6896 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6897 {
6898 if (CONST_INT_P (operands[2]))
6899 {
6900 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6901 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6902 int k;
6903 int *t = len;
6904
6905 if (!len)
6906 len = &k;
6907
6908 switch (INTVAL (operands[2]))
6909 {
6910 case 4:
6911 case 5:
6912 /* XXX try to optimize this too? */
6913 break;
6914
6915 case 6:
6916 if (optimize_size)
6917 break; /* scratch ? 5 : 6 */
6918 *len = 8;
6919 return ("mov __tmp_reg__,%A0" CR_TAB
6920 "mov %A0,%B0" CR_TAB
6921 "lsl __tmp_reg__" CR_TAB
6922 "rol %A0" CR_TAB
6923 "sbc %B0,%B0" CR_TAB
6924 "lsl __tmp_reg__" CR_TAB
6925 "rol %A0" CR_TAB
6926 "rol %B0");
6927
6928 case 7:
6929 *len = 4;
6930 return ("lsl %A0" CR_TAB
6931 "mov %A0,%B0" CR_TAB
6932 "rol %A0" CR_TAB
6933 "sbc %B0,%B0");
6934
6935 case 8:
6936 {
6937 int reg0 = true_regnum (operands[0]);
6938 int reg1 = true_regnum (operands[1]);
6939
6940 if (reg0 == reg1)
6941 return *len = 3, ("mov %A0,%B0" CR_TAB
6942 "lsl %B0" CR_TAB
6943 "sbc %B0,%B0");
6944 else
6945 return *len = 4, ("mov %A0,%B1" CR_TAB
6946 "clr %B0" CR_TAB
6947 "sbrc %A0,7" CR_TAB
6948 "dec %B0");
6949 }
6950
6951 case 9:
6952 *len = 4;
6953 return ("mov %A0,%B0" CR_TAB
6954 "lsl %B0" CR_TAB
6955 "sbc %B0,%B0" CR_TAB
6956 "asr %A0");
6957
6958 case 10:
6959 *len = 5;
6960 return ("mov %A0,%B0" CR_TAB
6961 "lsl %B0" CR_TAB
6962 "sbc %B0,%B0" CR_TAB
6963 "asr %A0" CR_TAB
6964 "asr %A0");
6965
6966 case 11:
6967 if (AVR_HAVE_MUL && ldi_ok)
6968 {
6969 *len = 5;
6970 return ("ldi %A0,0x20" CR_TAB
6971 "muls %B0,%A0" CR_TAB
6972 "mov %A0,r1" CR_TAB
6973 "sbc %B0,%B0" CR_TAB
6974 "clr __zero_reg__");
6975 }
6976 if (optimize_size && scratch)
6977 break; /* 5 */
6978 *len = 6;
6979 return ("mov %A0,%B0" CR_TAB
6980 "lsl %B0" CR_TAB
6981 "sbc %B0,%B0" CR_TAB
6982 "asr %A0" CR_TAB
6983 "asr %A0" CR_TAB
6984 "asr %A0");
6985
6986 case 12:
6987 if (AVR_HAVE_MUL && ldi_ok)
6988 {
6989 *len = 5;
6990 return ("ldi %A0,0x10" CR_TAB
6991 "muls %B0,%A0" CR_TAB
6992 "mov %A0,r1" CR_TAB
6993 "sbc %B0,%B0" CR_TAB
6994 "clr __zero_reg__");
6995 }
6996 if (optimize_size && scratch)
6997 break; /* 5 */
6998 *len = 7;
6999 return ("mov %A0,%B0" CR_TAB
7000 "lsl %B0" CR_TAB
7001 "sbc %B0,%B0" CR_TAB
7002 "asr %A0" CR_TAB
7003 "asr %A0" CR_TAB
7004 "asr %A0" CR_TAB
7005 "asr %A0");
7006
7007 case 13:
7008 if (AVR_HAVE_MUL && ldi_ok)
7009 {
7010 *len = 5;
7011 return ("ldi %A0,0x08" CR_TAB
7012 "muls %B0,%A0" CR_TAB
7013 "mov %A0,r1" CR_TAB
7014 "sbc %B0,%B0" CR_TAB
7015 "clr __zero_reg__");
7016 }
7017 if (optimize_size)
7018 break; /* scratch ? 5 : 7 */
7019 *len = 8;
7020 return ("mov %A0,%B0" CR_TAB
7021 "lsl %B0" CR_TAB
7022 "sbc %B0,%B0" CR_TAB
7023 "asr %A0" CR_TAB
7024 "asr %A0" CR_TAB
7025 "asr %A0" CR_TAB
7026 "asr %A0" CR_TAB
7027 "asr %A0");
7028
7029 case 14:
7030 *len = 5;
7031 return ("lsl %B0" CR_TAB
7032 "sbc %A0,%A0" CR_TAB
7033 "lsl %B0" CR_TAB
7034 "mov %B0,%A0" CR_TAB
7035 "rol %A0");
7036
7037 default:
7038 if (INTVAL (operands[2]) < 16)
7039 break;
7040
7041 /* fall through */
7042
7043 case 15:
7044 return *len = 3, ("lsl %B0" CR_TAB
7045 "sbc %A0,%A0" CR_TAB
7046 "mov %B0,%A0");
7047 }
7048 len = t;
7049 }
7050 out_shift_with_cnt ("asr %B0" CR_TAB
7051 "ror %A0", insn, operands, len, 2);
7052 return "";
7053 }
7054
7055
7056 /* 24-bit arithmetic shift right */
7057
7058 const char*
7059 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7060 {
7061 int dest = REGNO (op[0]);
7062 int src = REGNO (op[1]);
7063
7064 if (CONST_INT_P (op[2]))
7065 {
7066 if (plen)
7067 *plen = 0;
7068
7069 switch (INTVAL (op[2]))
7070 {
7071 case 8:
7072 if (dest <= src)
7073 return avr_asm_len ("mov %A0,%B1" CR_TAB
7074 "mov %B0,%C1" CR_TAB
7075 "clr %C0" CR_TAB
7076 "sbrc %B0,7" CR_TAB
7077 "dec %C0", op, plen, 5);
7078 else
7079 return avr_asm_len ("clr %C0" CR_TAB
7080 "sbrc %C1,7" CR_TAB
7081 "dec %C0" CR_TAB
7082 "mov %B0,%C1" CR_TAB
7083 "mov %A0,%B1", op, plen, 5);
7084
7085 case 16:
7086 if (dest != src + 2)
7087 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7088
7089 return avr_asm_len ("clr %B0" CR_TAB
7090 "sbrc %A0,7" CR_TAB
7091 "com %B0" CR_TAB
7092 "mov %C0,%B0", op, plen, 4);
7093
7094 default:
7095 if (INTVAL (op[2]) < 24)
7096 break;
7097
7098 /* fall through */
7099
7100 case 23:
7101 return avr_asm_len ("lsl %C0" CR_TAB
7102 "sbc %A0,%A0" CR_TAB
7103 "mov %B0,%A0" CR_TAB
7104 "mov %C0,%A0", op, plen, 4);
7105 } /* switch */
7106 }
7107
7108 out_shift_with_cnt ("asr %C0" CR_TAB
7109 "ror %B0" CR_TAB
7110 "ror %A0", insn, op, plen, 3);
7111 return "";
7112 }
7113
7114
7115 /* 32-bit arithmetic shift right ((signed long)x >> i) */
7116
7117 const char *
7118 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7119 {
7120 if (CONST_INT_P (operands[2]))
7121 {
7122 int k;
7123 int *t = len;
7124
7125 if (!len)
7126 len = &k;
7127
7128 switch (INTVAL (operands[2]))
7129 {
7130 case 8:
7131 {
7132 int reg0 = true_regnum (operands[0]);
7133 int reg1 = true_regnum (operands[1]);
7134 *len=6;
7135 if (reg0 <= reg1)
7136 return ("mov %A0,%B1" CR_TAB
7137 "mov %B0,%C1" CR_TAB
7138 "mov %C0,%D1" CR_TAB
7139 "clr %D0" CR_TAB
7140 "sbrc %C0,7" CR_TAB
7141 "dec %D0");
7142 else
7143 return ("clr %D0" CR_TAB
7144 "sbrc %D1,7" CR_TAB
7145 "dec %D0" CR_TAB
7146 "mov %C0,%D1" CR_TAB
7147 "mov %B0,%C1" CR_TAB
7148 "mov %A0,%B1");
7149 }
7150
7151 case 16:
7152 {
7153 int reg0 = true_regnum (operands[0]);
7154 int reg1 = true_regnum (operands[1]);
7155
7156 if (reg0 == reg1 + 2)
7157 return *len = 4, ("clr %D0" CR_TAB
7158 "sbrc %B0,7" CR_TAB
7159 "com %D0" CR_TAB
7160 "mov %C0,%D0");
7161 if (AVR_HAVE_MOVW)
7162 return *len = 5, ("movw %A0,%C1" CR_TAB
7163 "clr %D0" CR_TAB
7164 "sbrc %B0,7" CR_TAB
7165 "com %D0" CR_TAB
7166 "mov %C0,%D0");
7167 else
7168 return *len = 6, ("mov %B0,%D1" CR_TAB
7169 "mov %A0,%C1" CR_TAB
7170 "clr %D0" CR_TAB
7171 "sbrc %B0,7" CR_TAB
7172 "com %D0" CR_TAB
7173 "mov %C0,%D0");
7174 }
7175
7176 case 24:
7177 return *len = 6, ("mov %A0,%D1" CR_TAB
7178 "clr %D0" CR_TAB
7179 "sbrc %A0,7" CR_TAB
7180 "com %D0" CR_TAB
7181 "mov %B0,%D0" CR_TAB
7182 "mov %C0,%D0");
7183
7184 default:
7185 if (INTVAL (operands[2]) < 32)
7186 break;
7187
7188 /* fall through */
7189
7190 case 31:
7191 if (AVR_HAVE_MOVW)
7192 return *len = 4, ("lsl %D0" CR_TAB
7193 "sbc %A0,%A0" CR_TAB
7194 "mov %B0,%A0" CR_TAB
7195 "movw %C0,%A0");
7196 else
7197 return *len = 5, ("lsl %D0" CR_TAB
7198 "sbc %A0,%A0" CR_TAB
7199 "mov %B0,%A0" CR_TAB
7200 "mov %C0,%A0" CR_TAB
7201 "mov %D0,%A0");
7202 }
7203 len = t;
7204 }
7205 out_shift_with_cnt ("asr %D0" CR_TAB
7206 "ror %C0" CR_TAB
7207 "ror %B0" CR_TAB
7208 "ror %A0", insn, operands, len, 4);
7209 return "";
7210 }
7211
7212 /* 8-bit logic shift right ((unsigned char)x >> i) */
7213
7214 const char *
7215 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
7216 {
7217 if (CONST_INT_P (operands[2]))
7218 {
7219 int k;
7220
7221 if (!len)
7222 len = &k;
7223
7224 switch (INTVAL (operands[2]))
7225 {
7226 default:
7227 if (INTVAL (operands[2]) < 8)
7228 break;
7229
7230 *len = 1;
7231 return "clr %0";
7232
7233 case 1:
7234 *len = 1;
7235 return "lsr %0";
7236
7237 case 2:
7238 *len = 2;
7239 return ("lsr %0" CR_TAB
7240 "lsr %0");
7241 case 3:
7242 *len = 3;
7243 return ("lsr %0" CR_TAB
7244 "lsr %0" CR_TAB
7245 "lsr %0");
7246
7247 case 4:
7248 if (test_hard_reg_class (LD_REGS, operands[0]))
7249 {
7250 *len=2;
7251 return ("swap %0" CR_TAB
7252 "andi %0,0x0f");
7253 }
7254 *len = 4;
7255 return ("lsr %0" CR_TAB
7256 "lsr %0" CR_TAB
7257 "lsr %0" CR_TAB
7258 "lsr %0");
7259
7260 case 5:
7261 if (test_hard_reg_class (LD_REGS, operands[0]))
7262 {
7263 *len = 3;
7264 return ("swap %0" CR_TAB
7265 "lsr %0" CR_TAB
7266 "andi %0,0x7");
7267 }
7268 *len = 5;
7269 return ("lsr %0" CR_TAB
7270 "lsr %0" CR_TAB
7271 "lsr %0" CR_TAB
7272 "lsr %0" CR_TAB
7273 "lsr %0");
7274
7275 case 6:
7276 if (test_hard_reg_class (LD_REGS, operands[0]))
7277 {
7278 *len = 4;
7279 return ("swap %0" CR_TAB
7280 "lsr %0" CR_TAB
7281 "lsr %0" CR_TAB
7282 "andi %0,0x3");
7283 }
7284 *len = 6;
7285 return ("lsr %0" CR_TAB
7286 "lsr %0" CR_TAB
7287 "lsr %0" CR_TAB
7288 "lsr %0" CR_TAB
7289 "lsr %0" CR_TAB
7290 "lsr %0");
7291
7292 case 7:
7293 *len = 3;
7294 return ("rol %0" CR_TAB
7295 "clr %0" CR_TAB
7296 "rol %0");
7297 }
7298 }
7299 else if (CONSTANT_P (operands[2]))
7300 fatal_insn ("internal compiler error. Incorrect shift:", insn);
7301
7302 out_shift_with_cnt ("lsr %0",
7303 insn, operands, len, 1);
7304 return "";
7305 }
7306
7307 /* 16-bit logic shift right ((unsigned short)x >> i) */
7308
7309 const char *
7310 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
7311 {
7312 if (CONST_INT_P (operands[2]))
7313 {
7314 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
7315 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
7316 int k;
7317 int *t = len;
7318
7319 if (!len)
7320 len = &k;
7321
7322 switch (INTVAL (operands[2]))
7323 {
7324 default:
7325 if (INTVAL (operands[2]) < 16)
7326 break;
7327
7328 *len = 2;
7329 return ("clr %B0" CR_TAB
7330 "clr %A0");
7331
7332 case 4:
7333 if (optimize_size && scratch)
7334 break; /* 5 */
7335 if (ldi_ok)
7336 {
7337 *len = 6;
7338 return ("swap %B0" CR_TAB
7339 "swap %A0" CR_TAB
7340 "andi %A0,0x0f" CR_TAB
7341 "eor %A0,%B0" CR_TAB
7342 "andi %B0,0x0f" CR_TAB
7343 "eor %A0,%B0");
7344 }
7345 if (scratch)
7346 {
7347 *len = 7;
7348 return ("swap %B0" CR_TAB
7349 "swap %A0" CR_TAB
7350 "ldi %3,0x0f" CR_TAB
7351 "and %A0,%3" CR_TAB
7352 "eor %A0,%B0" CR_TAB
7353 "and %B0,%3" CR_TAB
7354 "eor %A0,%B0");
7355 }
7356 break; /* optimize_size ? 6 : 8 */
7357
7358 case 5:
7359 if (optimize_size)
7360 break; /* scratch ? 5 : 6 */
7361 if (ldi_ok)
7362 {
7363 *len = 8;
7364 return ("lsr %B0" CR_TAB
7365 "ror %A0" CR_TAB
7366 "swap %B0" CR_TAB
7367 "swap %A0" CR_TAB
7368 "andi %A0,0x0f" CR_TAB
7369 "eor %A0,%B0" CR_TAB
7370 "andi %B0,0x0f" CR_TAB
7371 "eor %A0,%B0");
7372 }
7373 if (scratch)
7374 {
7375 *len = 9;
7376 return ("lsr %B0" CR_TAB
7377 "ror %A0" CR_TAB
7378 "swap %B0" CR_TAB
7379 "swap %A0" CR_TAB
7380 "ldi %3,0x0f" CR_TAB
7381 "and %A0,%3" CR_TAB
7382 "eor %A0,%B0" CR_TAB
7383 "and %B0,%3" CR_TAB
7384 "eor %A0,%B0");
7385 }
7386 break; /* 10 */
7387
7388 case 6:
7389 if (optimize_size)
7390 break; /* scratch ? 5 : 6 */
7391 *len = 9;
7392 return ("clr __tmp_reg__" CR_TAB
7393 "lsl %A0" CR_TAB
7394 "rol %B0" CR_TAB
7395 "rol __tmp_reg__" CR_TAB
7396 "lsl %A0" CR_TAB
7397 "rol %B0" CR_TAB
7398 "rol __tmp_reg__" CR_TAB
7399 "mov %A0,%B0" CR_TAB
7400 "mov %B0,__tmp_reg__");
7401
7402 case 7:
7403 *len = 5;
7404 return ("lsl %A0" CR_TAB
7405 "mov %A0,%B0" CR_TAB
7406 "rol %A0" CR_TAB
7407 "sbc %B0,%B0" CR_TAB
7408 "neg %B0");
7409
7410 case 8:
7411 return *len = 2, ("mov %A0,%B1" CR_TAB
7412 "clr %B0");
7413
7414 case 9:
7415 *len = 3;
7416 return ("mov %A0,%B0" CR_TAB
7417 "clr %B0" CR_TAB
7418 "lsr %A0");
7419
7420 case 10:
7421 *len = 4;
7422 return ("mov %A0,%B0" CR_TAB
7423 "clr %B0" CR_TAB
7424 "lsr %A0" CR_TAB
7425 "lsr %A0");
7426
7427 case 11:
7428 *len = 5;
7429 return ("mov %A0,%B0" CR_TAB
7430 "clr %B0" CR_TAB
7431 "lsr %A0" CR_TAB
7432 "lsr %A0" CR_TAB
7433 "lsr %A0");
7434
7435 case 12:
7436 if (ldi_ok)
7437 {
7438 *len = 4;
7439 return ("mov %A0,%B0" CR_TAB
7440 "clr %B0" CR_TAB
7441 "swap %A0" CR_TAB
7442 "andi %A0,0x0f");
7443 }
7444 if (scratch)
7445 {
7446 *len = 5;
7447 return ("mov %A0,%B0" CR_TAB
7448 "clr %B0" CR_TAB
7449 "swap %A0" CR_TAB
7450 "ldi %3,0x0f" CR_TAB
7451 "and %A0,%3");
7452 }
7453 *len = 6;
7454 return ("mov %A0,%B0" CR_TAB
7455 "clr %B0" CR_TAB
7456 "lsr %A0" CR_TAB
7457 "lsr %A0" CR_TAB
7458 "lsr %A0" CR_TAB
7459 "lsr %A0");
7460
7461 case 13:
7462 if (ldi_ok)
7463 {
7464 *len = 5;
7465 return ("mov %A0,%B0" CR_TAB
7466 "clr %B0" CR_TAB
7467 "swap %A0" CR_TAB
7468 "lsr %A0" CR_TAB
7469 "andi %A0,0x07");
7470 }
7471 if (AVR_HAVE_MUL && scratch)
7472 {
7473 *len = 5;
7474 return ("ldi %3,0x08" CR_TAB
7475 "mul %B0,%3" CR_TAB
7476 "mov %A0,r1" CR_TAB
7477 "clr %B0" CR_TAB
7478 "clr __zero_reg__");
7479 }
7480 if (optimize_size && scratch)
7481 break; /* 5 */
7482 if (scratch)
7483 {
7484 *len = 6;
7485 return ("mov %A0,%B0" CR_TAB
7486 "clr %B0" CR_TAB
7487 "swap %A0" CR_TAB
7488 "lsr %A0" CR_TAB
7489 "ldi %3,0x07" CR_TAB
7490 "and %A0,%3");
7491 }
7492 if (AVR_HAVE_MUL)
7493 {
7494 *len = 6;
7495 return ("set" CR_TAB
7496 "bld r1,3" CR_TAB
7497 "mul %B0,r1" CR_TAB
7498 "mov %A0,r1" CR_TAB
7499 "clr %B0" CR_TAB
7500 "clr __zero_reg__");
7501 }
7502 *len = 7;
7503 return ("mov %A0,%B0" CR_TAB
7504 "clr %B0" CR_TAB
7505 "lsr %A0" CR_TAB
7506 "lsr %A0" CR_TAB
7507 "lsr %A0" CR_TAB
7508 "lsr %A0" CR_TAB
7509 "lsr %A0");
7510
7511 case 14:
7512 if (AVR_HAVE_MUL && ldi_ok)
7513 {
7514 *len = 5;
7515 return ("ldi %A0,0x04" CR_TAB
7516 "mul %B0,%A0" CR_TAB
7517 "mov %A0,r1" CR_TAB
7518 "clr %B0" CR_TAB
7519 "clr __zero_reg__");
7520 }
7521 if (AVR_HAVE_MUL && scratch)
7522 {
7523 *len = 5;
7524 return ("ldi %3,0x04" CR_TAB
7525 "mul %B0,%3" CR_TAB
7526 "mov %A0,r1" CR_TAB
7527 "clr %B0" CR_TAB
7528 "clr __zero_reg__");
7529 }
7530 if (optimize_size && ldi_ok)
7531 {
7532 *len = 5;
7533 return ("mov %A0,%B0" CR_TAB
7534 "ldi %B0,6" "\n1:\t"
7535 "lsr %A0" CR_TAB
7536 "dec %B0" CR_TAB
7537 "brne 1b");
7538 }
7539 if (optimize_size && scratch)
7540 break; /* 5 */
7541 *len = 6;
7542 return ("clr %A0" CR_TAB
7543 "lsl %B0" CR_TAB
7544 "rol %A0" CR_TAB
7545 "lsl %B0" CR_TAB
7546 "rol %A0" CR_TAB
7547 "clr %B0");
7548
7549 case 15:
7550 *len = 4;
7551 return ("clr %A0" CR_TAB
7552 "lsl %B0" CR_TAB
7553 "rol %A0" CR_TAB
7554 "clr %B0");
7555 }
7556 len = t;
7557 }
7558 out_shift_with_cnt ("lsr %B0" CR_TAB
7559 "ror %A0", insn, operands, len, 2);
7560 return "";
7561 }
7562
7563
7564 /* 24-bit logic shift right */
7565
7566 const char*
7567 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7568 {
7569 int dest = REGNO (op[0]);
7570 int src = REGNO (op[1]);
7571
7572 if (CONST_INT_P (op[2]))
7573 {
7574 if (plen)
7575 *plen = 0;
7576
7577 switch (INTVAL (op[2]))
7578 {
7579 case 8:
7580 if (dest <= src)
7581 return avr_asm_len ("mov %A0,%B1" CR_TAB
7582 "mov %B0,%C1" CR_TAB
7583 "clr %C0", op, plen, 3);
7584 else
7585 return avr_asm_len ("clr %C0" CR_TAB
7586 "mov %B0,%C1" CR_TAB
7587 "mov %A0,%B1", op, plen, 3);
7588
7589 case 16:
7590 if (dest != src + 2)
7591 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7592
7593 return avr_asm_len ("clr %B0" CR_TAB
7594 "clr %C0", op, plen, 2);
7595
7596 default:
7597 if (INTVAL (op[2]) < 24)
7598 break;
7599
7600 /* fall through */
7601
7602 case 23:
7603 return avr_asm_len ("clr %A0" CR_TAB
7604 "sbrc %C0,7" CR_TAB
7605 "inc %A0" CR_TAB
7606 "clr %B0" CR_TAB
7607 "clr %C0", op, plen, 5);
7608 } /* switch */
7609 }
7610
7611 out_shift_with_cnt ("lsr %C0" CR_TAB
7612 "ror %B0" CR_TAB
7613 "ror %A0", insn, op, plen, 3);
7614 return "";
7615 }
7616
7617
7618 /* 32-bit logic shift right ((unsigned int)x >> i) */
7619
7620 const char *
7621 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7622 {
7623 if (CONST_INT_P (operands[2]))
7624 {
7625 int k;
7626 int *t = len;
7627
7628 if (!len)
7629 len = &k;
7630
7631 switch (INTVAL (operands[2]))
7632 {
7633 default:
7634 if (INTVAL (operands[2]) < 32)
7635 break;
7636
7637 if (AVR_HAVE_MOVW)
7638 return *len = 3, ("clr %D0" CR_TAB
7639 "clr %C0" CR_TAB
7640 "movw %A0,%C0");
7641 *len = 4;
7642 return ("clr %D0" CR_TAB
7643 "clr %C0" CR_TAB
7644 "clr %B0" CR_TAB
7645 "clr %A0");
7646
7647 case 8:
7648 {
7649 int reg0 = true_regnum (operands[0]);
7650 int reg1 = true_regnum (operands[1]);
7651 *len = 4;
7652 if (reg0 <= reg1)
7653 return ("mov %A0,%B1" CR_TAB
7654 "mov %B0,%C1" CR_TAB
7655 "mov %C0,%D1" CR_TAB
7656 "clr %D0");
7657 else
7658 return ("clr %D0" CR_TAB
7659 "mov %C0,%D1" CR_TAB
7660 "mov %B0,%C1" CR_TAB
7661 "mov %A0,%B1");
7662 }
7663
7664 case 16:
7665 {
7666 int reg0 = true_regnum (operands[0]);
7667 int reg1 = true_regnum (operands[1]);
7668
7669 if (reg0 == reg1 + 2)
7670 return *len = 2, ("clr %C0" CR_TAB
7671 "clr %D0");
7672 if (AVR_HAVE_MOVW)
7673 return *len = 3, ("movw %A0,%C1" CR_TAB
7674 "clr %C0" CR_TAB
7675 "clr %D0");
7676 else
7677 return *len = 4, ("mov %B0,%D1" CR_TAB
7678 "mov %A0,%C1" CR_TAB
7679 "clr %C0" CR_TAB
7680 "clr %D0");
7681 }
7682
7683 case 24:
7684 return *len = 4, ("mov %A0,%D1" CR_TAB
7685 "clr %B0" CR_TAB
7686 "clr %C0" CR_TAB
7687 "clr %D0");
7688
7689 case 31:
7690 *len = 6;
7691 return ("clr %A0" CR_TAB
7692 "sbrc %D0,7" CR_TAB
7693 "inc %A0" CR_TAB
7694 "clr %B0" CR_TAB
7695 "clr %C0" CR_TAB
7696 "clr %D0");
7697 }
7698 len = t;
7699 }
7700 out_shift_with_cnt ("lsr %D0" CR_TAB
7701 "ror %C0" CR_TAB
7702 "ror %B0" CR_TAB
7703 "ror %A0", insn, operands, len, 4);
7704 return "";
7705 }
7706
7707
7708 /* Output addition of register XOP[0] and compile time constant XOP[2].
7709 CODE == PLUS: perform addition by using ADD instructions or
7710 CODE == MINUS: perform addition by using SUB instructions:
7711
7712 XOP[0] = XOP[0] + XOP[2]
7713
7714 Or perform addition/subtraction with register XOP[2] depending on CODE:
7715
7716 XOP[0] = XOP[0] +/- XOP[2]
7717
7718 If PLEN == NULL, print assembler instructions to perform the operation;
7719 otherwise, set *PLEN to the length of the instruction sequence (in words)
7720 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7721 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7722
7723 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7724 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7725 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7726 the subtrahend in the original insn, provided it is a compile time constant.
7727 In all other cases, SIGN is 0.
7728
7729 If OUT_LABEL is true, print the final 0: label which is needed for
7730 saturated addition / subtraction. The only case where OUT_LABEL = false
7731 is useful is for saturated addition / subtraction performed during
7732 fixed-point rounding, cf. `avr_out_round'. */
7733
7734 static void
7735 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7736 enum rtx_code code_sat, int sign, bool out_label)
7737 {
7738 /* MODE of the operation. */
7739 machine_mode mode = GET_MODE (xop[0]);
7740
7741 /* INT_MODE of the same size. */
7742 machine_mode imode = int_mode_for_mode (mode);
7743
7744 /* Number of bytes to operate on. */
7745 int n_bytes = GET_MODE_SIZE (mode);
7746
7747 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7748 int clobber_val = -1;
7749
7750 /* op[0]: 8-bit destination register
7751 op[1]: 8-bit const int
7752 op[2]: 8-bit scratch register */
7753 rtx op[3];
7754
7755 /* Started the operation? Before starting the operation we may skip
7756 adding 0. This is no more true after the operation started because
7757 carry must be taken into account. */
7758 bool started = false;
7759
7760 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7761 rtx xval = xop[2];
7762
7763 /* Output a BRVC instruction. Only needed with saturation. */
7764 bool out_brvc = true;
7765
7766 if (plen)
7767 *plen = 0;
7768
7769 if (REG_P (xop[2]))
7770 {
7771 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7772
7773 for (int i = 0; i < n_bytes; i++)
7774 {
7775 /* We operate byte-wise on the destination. */
7776 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7777 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7778
7779 if (i == 0)
7780 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7781 op, plen, 1);
7782 else
7783 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7784 op, plen, 1);
7785 }
7786
7787 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7788 {
7789 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7790
7791 if (MINUS == code)
7792 return;
7793 }
7794
7795 goto saturate;
7796 }
7797
7798 /* Except in the case of ADIW with 16-bit register (see below)
7799 addition does not set cc0 in a usable way. */
7800
7801 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7802
7803 if (CONST_FIXED_P (xval))
7804 xval = avr_to_int_mode (xval);
7805
7806 /* Adding/Subtracting zero is a no-op. */
7807
7808 if (xval == const0_rtx)
7809 {
7810 *pcc = CC_NONE;
7811 return;
7812 }
7813
7814 if (MINUS == code)
7815 xval = simplify_unary_operation (NEG, imode, xval, imode);
7816
7817 op[2] = xop[3];
7818
7819 if (SS_PLUS == code_sat && MINUS == code
7820 && sign < 0
7821 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7822 & GET_MODE_MASK (QImode)))
7823 {
7824 /* We compute x + 0x80 by means of SUB instructions. We negated the
7825 constant subtrahend above and are left with x - (-128) so that we
7826 need something like SUBI r,128 which does not exist because SUBI sets
7827 V according to the sign of the subtrahend. Notice the only case
7828 where this must be done is when NEG overflowed in case [2s] because
7829 the V computation needs the right sign of the subtrahend. */
7830
7831 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes - 1);
7832
7833 avr_asm_len ("subi %0,128" CR_TAB
7834 "brmi 0f", &msb, plen, 2);
7835 out_brvc = false;
7836
7837 goto saturate;
7838 }
7839
7840 for (int i = 0; i < n_bytes; i++)
7841 {
7842 /* We operate byte-wise on the destination. */
7843 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7844 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7845
7846 /* 8-bit value to operate with this byte. */
7847 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7848
7849 /* Registers R16..R31 can operate with immediate. */
7850 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7851
7852 op[0] = reg8;
7853 op[1] = gen_int_mode (val8, QImode);
7854
7855 /* To get usable cc0 no low-bytes must have been skipped. */
7856
7857 if (i && !started)
7858 *pcc = CC_CLOBBER;
7859
7860 if (!started
7861 && i % 2 == 0
7862 && i + 2 <= n_bytes
7863 && test_hard_reg_class (ADDW_REGS, reg8))
7864 {
7865 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7866 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7867
7868 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7869 i.e. operate word-wise. */
7870
7871 if (val16 < 64)
7872 {
7873 if (val16 != 0)
7874 {
7875 started = true;
7876 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7877 op, plen, 1);
7878
7879 if (n_bytes == 2 && PLUS == code)
7880 *pcc = CC_SET_CZN;
7881 }
7882
7883 i++;
7884 continue;
7885 }
7886 }
7887
7888 if (val8 == 0)
7889 {
7890 if (started)
7891 avr_asm_len (code == PLUS
7892 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7893 op, plen, 1);
7894 continue;
7895 }
7896 else if ((val8 == 1 || val8 == 0xff)
7897 && UNKNOWN == code_sat
7898 && !started
7899 && i == n_bytes - 1)
7900 {
7901 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7902 op, plen, 1);
7903 *pcc = CC_CLOBBER;
7904 break;
7905 }
7906
7907 switch (code)
7908 {
7909 case PLUS:
7910
7911 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7912
7913 if (plen != NULL && UNKNOWN != code_sat)
7914 {
7915 /* This belongs to the x + 0x80 corner case. The code with
7916 ADD instruction is not smaller, thus make this case
7917 expensive so that the caller won't pick it. */
7918
7919 *plen += 10;
7920 break;
7921 }
7922
7923 if (clobber_val != (int) val8)
7924 avr_asm_len ("ldi %2,%1", op, plen, 1);
7925 clobber_val = (int) val8;
7926
7927 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7928
7929 break; /* PLUS */
7930
7931 case MINUS:
7932
7933 if (ld_reg_p)
7934 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7935 else
7936 {
7937 gcc_assert (plen != NULL || REG_P (op[2]));
7938
7939 if (clobber_val != (int) val8)
7940 avr_asm_len ("ldi %2,%1", op, plen, 1);
7941 clobber_val = (int) val8;
7942
7943 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7944 }
7945
7946 break; /* MINUS */
7947
7948 default:
7949 /* Unknown code */
7950 gcc_unreachable();
7951 }
7952
7953 started = true;
7954
7955 } /* for all sub-bytes */
7956
7957 saturate:
7958
7959 if (UNKNOWN == code_sat)
7960 return;
7961
7962 *pcc = (int) CC_CLOBBER;
7963
7964 /* Vanilla addition/subtraction is done. We are left with saturation.
7965
7966 We have to compute A = A <op> B where A is a register and
7967 B is a register or a non-zero compile time constant CONST.
7968 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7969 B stands for the original operand $2 in INSN. In the case of B = CONST,
7970 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7971
7972 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7973
7974
7975 unsigned
7976 operation | code | sat if | b is | sat value | case
7977 -----------------+-------+----------+--------------+-----------+-------
7978 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7979 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7980 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7981 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7982
7983
7984 signed
7985 operation | code | sat if | b is | sat value | case
7986 -----------------+-------+----------+--------------+-----------+-------
7987 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7988 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7989 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7990 - as a + (-b) | add | V == 1 | const | s- | [4s]
7991
7992 s+ = b < 0 ? -0x80 : 0x7f
7993 s- = b < 0 ? 0x7f : -0x80
7994
7995 The cases a - b actually perform a - (-(-b)) if B is CONST.
7996 */
7997
7998 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7999 op[1] = n_bytes > 1
8000 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
8001 : NULL_RTX;
8002
8003 bool need_copy = true;
8004 int len_call = 1 + AVR_HAVE_JMP_CALL;
8005
8006 switch (code_sat)
8007 {
8008 default:
8009 gcc_unreachable();
8010
8011 case SS_PLUS:
8012 case SS_MINUS:
8013
8014 if (out_brvc)
8015 avr_asm_len ("brvc 0f", op, plen, 1);
8016
8017 if (reg_overlap_mentioned_p (xop[0], xop[2]))
8018 {
8019 /* [1s,reg] */
8020
8021 if (n_bytes == 1)
8022 avr_asm_len ("ldi %0,0x7f" CR_TAB
8023 "adc %0,__zero_reg__", op, plen, 2);
8024 else
8025 avr_asm_len ("ldi %0,0x7f" CR_TAB
8026 "ldi %1,0xff" CR_TAB
8027 "adc %1,__zero_reg__" CR_TAB
8028 "adc %0,__zero_reg__", op, plen, 4);
8029 }
8030 else if (sign == 0 && PLUS == code)
8031 {
8032 /* [1s,reg] */
8033
8034 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8035
8036 if (n_bytes == 1)
8037 avr_asm_len ("ldi %0,0x80" CR_TAB
8038 "sbrs %2,7" CR_TAB
8039 "dec %0", op, plen, 3);
8040 else
8041 avr_asm_len ("ldi %0,0x80" CR_TAB
8042 "cp %2,%0" CR_TAB
8043 "sbc %1,%1" CR_TAB
8044 "sbci %0,0", op, plen, 4);
8045 }
8046 else if (sign == 0 && MINUS == code)
8047 {
8048 /* [3s,reg] */
8049
8050 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8051
8052 if (n_bytes == 1)
8053 avr_asm_len ("ldi %0,0x7f" CR_TAB
8054 "sbrs %2,7" CR_TAB
8055 "inc %0", op, plen, 3);
8056 else
8057 avr_asm_len ("ldi %0,0x7f" CR_TAB
8058 "cp %0,%2" CR_TAB
8059 "sbc %1,%1" CR_TAB
8060 "sbci %0,-1", op, plen, 4);
8061 }
8062 else if ((sign < 0) ^ (SS_MINUS == code_sat))
8063 {
8064 /* [1s,const,B < 0] [2s,B < 0] */
8065 /* [3s,const,B > 0] [4s,B > 0] */
8066
8067 if (n_bytes == 8)
8068 {
8069 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8070 need_copy = false;
8071 }
8072
8073 avr_asm_len ("ldi %0,0x80", op, plen, 1);
8074 if (n_bytes > 1 && need_copy)
8075 avr_asm_len ("clr %1", op, plen, 1);
8076 }
8077 else if ((sign > 0) ^ (SS_MINUS == code_sat))
8078 {
8079 /* [1s,const,B > 0] [2s,B > 0] */
8080 /* [3s,const,B < 0] [4s,B < 0] */
8081
8082 if (n_bytes == 8)
8083 {
8084 avr_asm_len ("sec" CR_TAB
8085 "%~call __sbc_8", op, plen, 1 + len_call);
8086 need_copy = false;
8087 }
8088
8089 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
8090 if (n_bytes > 1 && need_copy)
8091 avr_asm_len ("ldi %1,0xff", op, plen, 1);
8092 }
8093 else
8094 gcc_unreachable();
8095
8096 break;
8097
8098 case US_PLUS:
8099 /* [1u] : [2u] */
8100
8101 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
8102
8103 if (n_bytes == 8)
8104 {
8105 if (MINUS == code)
8106 avr_asm_len ("sec", op, plen, 1);
8107 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
8108
8109 need_copy = false;
8110 }
8111 else
8112 {
8113 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
8114 avr_asm_len ("sec" CR_TAB
8115 "sbc %0,%0", op, plen, 2);
8116 else
8117 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
8118 op, plen, 1);
8119 }
8120 break; /* US_PLUS */
8121
8122 case US_MINUS:
8123 /* [4u] : [3u] */
8124
8125 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
8126
8127 if (n_bytes == 8)
8128 {
8129 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8130 need_copy = false;
8131 }
8132 else
8133 avr_asm_len ("clr %0", op, plen, 1);
8134
8135 break;
8136 }
8137
8138 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
8139 Now copy the right value to the LSBs. */
8140
8141 if (need_copy && n_bytes > 1)
8142 {
8143 if (US_MINUS == code_sat || US_PLUS == code_sat)
8144 {
8145 avr_asm_len ("mov %1,%0", op, plen, 1);
8146
8147 if (n_bytes > 2)
8148 {
8149 op[0] = xop[0];
8150 if (AVR_HAVE_MOVW)
8151 avr_asm_len ("movw %0,%1", op, plen, 1);
8152 else
8153 avr_asm_len ("mov %A0,%1" CR_TAB
8154 "mov %B0,%1", op, plen, 2);
8155 }
8156 }
8157 else if (n_bytes > 2)
8158 {
8159 op[0] = xop[0];
8160 avr_asm_len ("mov %A0,%1" CR_TAB
8161 "mov %B0,%1", op, plen, 2);
8162 }
8163 }
8164
8165 if (need_copy && n_bytes == 8)
8166 {
8167 if (AVR_HAVE_MOVW)
8168 avr_asm_len ("movw %r0+2,%0" CR_TAB
8169 "movw %r0+4,%0", xop, plen, 2);
8170 else
8171 avr_asm_len ("mov %r0+2,%0" CR_TAB
8172 "mov %r0+3,%0" CR_TAB
8173 "mov %r0+4,%0" CR_TAB
8174 "mov %r0+5,%0", xop, plen, 4);
8175 }
8176
8177 if (out_label)
8178 avr_asm_len ("0:", op, plen, 0);
8179 }
8180
8181
8182 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
8183 is ont a compile-time constant:
8184
8185 XOP[0] = XOP[0] +/- XOP[2]
8186
8187 This is a helper for the function below. The only insns that need this
8188 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
8189
8190 static const char*
8191 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
8192 {
8193 machine_mode mode = GET_MODE (xop[0]);
8194
8195 /* Only pointer modes want to add symbols. */
8196
8197 gcc_assert (mode == HImode || mode == PSImode);
8198
8199 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
8200
8201 avr_asm_len (PLUS == code
8202 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
8203 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
8204 xop, plen, -2);
8205
8206 if (PSImode == mode)
8207 avr_asm_len (PLUS == code
8208 ? "sbci %C0,hlo8(-(%2))"
8209 : "sbci %C0,hlo8(%2)", xop, plen, 1);
8210 return "";
8211 }
8212
8213
8214 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
8215
8216 INSN is a single_set insn or an insn pattern with a binary operation as
8217 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
8218
8219 XOP are the operands of INSN. In the case of 64-bit operations with
8220 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
8221 The non-saturating insns up to 32 bits may or may not supply a "d" class
8222 scratch as XOP[3].
8223
8224 If PLEN == NULL output the instructions.
8225 If PLEN != NULL set *PLEN to the length of the sequence in words.
8226
8227 PCC is a pointer to store the instructions' effect on cc0.
8228 PCC may be NULL.
8229
8230 PLEN and PCC default to NULL.
8231
8232 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
8233
8234 Return "" */
8235
8236 const char*
8237 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
8238 {
8239 int cc_plus, cc_minus, cc_dummy;
8240 int len_plus, len_minus;
8241 rtx op[4];
8242 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8243 rtx xdest = SET_DEST (xpattern);
8244 machine_mode mode = GET_MODE (xdest);
8245 machine_mode imode = int_mode_for_mode (mode);
8246 int n_bytes = GET_MODE_SIZE (mode);
8247 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
8248 enum rtx_code code
8249 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
8250 ? PLUS : MINUS);
8251
8252 if (!pcc)
8253 pcc = &cc_dummy;
8254
8255 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
8256
8257 if (PLUS == code_sat || MINUS == code_sat)
8258 code_sat = UNKNOWN;
8259
8260 if (n_bytes <= 4 && REG_P (xop[2]))
8261 {
8262 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
8263 return "";
8264 }
8265
8266 if (8 == n_bytes)
8267 {
8268 op[0] = gen_rtx_REG (DImode, ACC_A);
8269 op[1] = gen_rtx_REG (DImode, ACC_A);
8270 op[2] = avr_to_int_mode (xop[0]);
8271 }
8272 else
8273 {
8274 if (!REG_P (xop[2])
8275 && !CONST_INT_P (xop[2])
8276 && !CONST_FIXED_P (xop[2]))
8277 {
8278 return avr_out_plus_symbol (xop, code, plen, pcc);
8279 }
8280
8281 op[0] = avr_to_int_mode (xop[0]);
8282 op[1] = avr_to_int_mode (xop[1]);
8283 op[2] = avr_to_int_mode (xop[2]);
8284 }
8285
8286 /* Saturations and 64-bit operations don't have a clobber operand.
8287 For the other cases, the caller will provide a proper XOP[3]. */
8288
8289 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
8290 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
8291
8292 /* Saturation will need the sign of the original operand. */
8293
8294 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
8295 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
8296
8297 /* If we subtract and the subtrahend is a constant, then negate it
8298 so that avr_out_plus_1 can be used. */
8299
8300 if (MINUS == code)
8301 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
8302
8303 /* Work out the shortest sequence. */
8304
8305 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
8306 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
8307
8308 if (plen)
8309 {
8310 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
8311 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
8312 }
8313 else if (len_minus <= len_plus)
8314 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
8315 else
8316 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
8317
8318 return "";
8319 }
8320
8321
8322 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8323 time constant XOP[2]:
8324
8325 XOP[0] = XOP[0] <op> XOP[2]
8326
8327 and return "". If PLEN == NULL, print assembler instructions to perform the
8328 operation; otherwise, set *PLEN to the length of the instruction sequence
8329 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
8330 register or SCRATCH if no clobber register is needed for the operation.
8331 INSN is an INSN_P or a pattern of an insn. */
8332
8333 const char*
8334 avr_out_bitop (rtx insn, rtx *xop, int *plen)
8335 {
8336 /* CODE and MODE of the operation. */
8337 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8338 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
8339 machine_mode mode = GET_MODE (xop[0]);
8340
8341 /* Number of bytes to operate on. */
8342 int n_bytes = GET_MODE_SIZE (mode);
8343
8344 /* Value of T-flag (0 or 1) or -1 if unknow. */
8345 int set_t = -1;
8346
8347 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8348 int clobber_val = -1;
8349
8350 /* op[0]: 8-bit destination register
8351 op[1]: 8-bit const int
8352 op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8353 op[3]: 8-bit register containing 0xff or NULL_RTX */
8354 rtx op[4];
8355
8356 op[2] = QImode == mode ? NULL_RTX : xop[3];
8357 op[3] = NULL_RTX;
8358
8359 if (plen)
8360 *plen = 0;
8361
8362 for (int i = 0; i < n_bytes; i++)
8363 {
8364 /* We operate byte-wise on the destination. */
8365 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
8366 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
8367
8368 /* 8-bit value to operate with this byte. */
8369 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
8370
8371 /* Number of bits set in the current byte of the constant. */
8372 int pop8 = popcount_hwi (val8);
8373
8374 /* Registers R16..R31 can operate with immediate. */
8375 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
8376
8377 op[0] = reg8;
8378 op[1] = GEN_INT (val8);
8379
8380 switch (code)
8381 {
8382 case IOR:
8383
8384 if (0 == pop8)
8385 continue;
8386 else if (ld_reg_p)
8387 avr_asm_len ("ori %0,%1", op, plen, 1);
8388 else if (1 == pop8)
8389 {
8390 if (set_t != 1)
8391 avr_asm_len ("set", op, plen, 1);
8392 set_t = 1;
8393
8394 op[1] = GEN_INT (exact_log2 (val8));
8395 avr_asm_len ("bld %0,%1", op, plen, 1);
8396 }
8397 else if (8 == pop8)
8398 {
8399 if (op[3] != NULL_RTX)
8400 avr_asm_len ("mov %0,%3", op, plen, 1);
8401 else
8402 avr_asm_len ("clr %0" CR_TAB
8403 "dec %0", op, plen, 2);
8404
8405 op[3] = op[0];
8406 }
8407 else
8408 {
8409 if (clobber_val != (int) val8)
8410 avr_asm_len ("ldi %2,%1", op, plen, 1);
8411 clobber_val = (int) val8;
8412
8413 avr_asm_len ("or %0,%2", op, plen, 1);
8414 }
8415
8416 continue; /* IOR */
8417
8418 case AND:
8419
8420 if (8 == pop8)
8421 continue;
8422 else if (0 == pop8)
8423 avr_asm_len ("clr %0", op, plen, 1);
8424 else if (ld_reg_p)
8425 avr_asm_len ("andi %0,%1", op, plen, 1);
8426 else if (7 == pop8)
8427 {
8428 if (set_t != 0)
8429 avr_asm_len ("clt", op, plen, 1);
8430 set_t = 0;
8431
8432 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
8433 avr_asm_len ("bld %0,%1", op, plen, 1);
8434 }
8435 else
8436 {
8437 if (clobber_val != (int) val8)
8438 avr_asm_len ("ldi %2,%1", op, plen, 1);
8439 clobber_val = (int) val8;
8440
8441 avr_asm_len ("and %0,%2", op, plen, 1);
8442 }
8443
8444 continue; /* AND */
8445
8446 case XOR:
8447
8448 if (0 == pop8)
8449 continue;
8450 else if (8 == pop8)
8451 avr_asm_len ("com %0", op, plen, 1);
8452 else if (ld_reg_p && val8 == (1 << 7))
8453 avr_asm_len ("subi %0,%1", op, plen, 1);
8454 else
8455 {
8456 if (clobber_val != (int) val8)
8457 avr_asm_len ("ldi %2,%1", op, plen, 1);
8458 clobber_val = (int) val8;
8459
8460 avr_asm_len ("eor %0,%2", op, plen, 1);
8461 }
8462
8463 continue; /* XOR */
8464
8465 default:
8466 /* Unknown rtx_code */
8467 gcc_unreachable();
8468 }
8469 } /* for all sub-bytes */
8470
8471 return "";
8472 }
8473
8474
8475 /* Output sign extension from XOP[1] to XOP[0] and return "".
8476 If PLEN == NULL, print assembler instructions to perform the operation;
8477 otherwise, set *PLEN to the length of the instruction sequence (in words)
8478 as printed with PLEN == NULL. */
8479
8480 const char*
8481 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
8482 {
8483 // Size in bytes of source resp. destination operand.
8484 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
8485 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
8486 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
8487
8488 if (plen)
8489 *plen = 0;
8490
8491 // Copy destination to source
8492
8493 if (REGNO (xop[0]) != REGNO (xop[1]))
8494 {
8495 gcc_assert (n_src <= 2);
8496
8497 if (n_src == 2)
8498 avr_asm_len (AVR_HAVE_MOVW
8499 ? "movw %0,%1"
8500 : "mov %B0,%B1", xop, plen, 1);
8501 if (n_src == 1 || !AVR_HAVE_MOVW)
8502 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
8503 }
8504
8505 // Set Carry to the sign bit MSB.7...
8506
8507 if (REGNO (xop[0]) == REGNO (xop[1])
8508 || !reg_unused_after (insn, r_msb))
8509 {
8510 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
8511 r_msb = tmp_reg_rtx;
8512 }
8513
8514 avr_asm_len ("lsl %0", &r_msb, plen, 1);
8515
8516 // ...and propagate it to all the new sign bits
8517
8518 for (unsigned n = n_src; n < n_dest; n++)
8519 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
8520
8521 return "";
8522 }
8523
8524
8525 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8526 PLEN != NULL: Set *PLEN to the length of that sequence.
8527 Return "". */
8528
8529 const char*
8530 avr_out_addto_sp (rtx *op, int *plen)
8531 {
8532 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
8533 int addend = INTVAL (op[0]);
8534
8535 if (plen)
8536 *plen = 0;
8537
8538 if (addend < 0)
8539 {
8540 if (flag_verbose_asm || flag_print_asm_name)
8541 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
8542
8543 while (addend <= -pc_len)
8544 {
8545 addend += pc_len;
8546 avr_asm_len ("rcall .", op, plen, 1);
8547 }
8548
8549 while (addend++ < 0)
8550 avr_asm_len ("push __tmp_reg__", op, plen, 1);
8551 }
8552 else if (addend > 0)
8553 {
8554 if (flag_verbose_asm || flag_print_asm_name)
8555 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
8556
8557 while (addend-- > 0)
8558 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
8559 }
8560
8561 return "";
8562 }
8563
8564
8565 /* Output instructions to insert an inverted bit into OPERANDS[0]:
8566 $0.$1 = ~$2.$3 if XBITNO = NULL
8567 $0.$1 = ~$2.XBITNO if XBITNO != NULL.
8568 If PLEN = NULL then output the respective instruction sequence which
8569 is a combination of BST / BLD and some instruction(s) to invert the bit.
8570 If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
8571 Return "". */
8572
8573 const char*
8574 avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
8575 {
8576 rtx op[4] = { operands[0], operands[1], operands[2],
8577 xbitno == NULL_RTX ? operands [3] : xbitno };
8578
8579 if (INTVAL (op[1]) == 7
8580 && test_hard_reg_class (LD_REGS, op[0]))
8581 {
8582 /* If the inserted bit number is 7 and we have a d-reg, then invert
8583 the bit after the insertion by means of SUBI *,0x80. */
8584
8585 if (INTVAL (op[3]) == 7
8586 && REGNO (op[0]) == REGNO (op[2]))
8587 {
8588 avr_asm_len ("subi %0,0x80", op, plen, -1);
8589 }
8590 else
8591 {
8592 avr_asm_len ("bst %2,%3" CR_TAB
8593 "bld %0,%1" CR_TAB
8594 "subi %0,0x80", op, plen, -3);
8595 }
8596 }
8597 else if (test_hard_reg_class (LD_REGS, op[0])
8598 && (INTVAL (op[1]) != INTVAL (op[3])
8599 || !reg_overlap_mentioned_p (op[0], op[2])))
8600 {
8601 /* If the destination bit is in a d-reg we can jump depending
8602 on the source bit and use ANDI / ORI. This just applies if we
8603 have not an early-clobber situation with the bit. */
8604
8605 avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
8606 "sbrs %2,%3" CR_TAB
8607 "ori %0,1<<%1", op, plen, -3);
8608 }
8609 else
8610 {
8611 /* Otherwise, invert the bit by means of COM before we store it with
8612 BST and then undo the COM if needed. */
8613
8614 avr_asm_len ("com %2" CR_TAB
8615 "bst %2,%3", op, plen, -2);
8616
8617 if (!reg_unused_after (insn, op[2])
8618 // A simple 'reg_unused_after' is not enough because that function
8619 // assumes that the destination register is overwritten completely
8620 // and hence is in order for our purpose. This is not the case
8621 // with BLD which just changes one bit of the destination.
8622 || reg_overlap_mentioned_p (op[0], op[2]))
8623 {
8624 /* Undo the COM from above. */
8625 avr_asm_len ("com %2", op, plen, 1);
8626 }
8627
8628 avr_asm_len ("bld %0,%1", op, plen, 1);
8629 }
8630
8631 return "";
8632 }
8633
8634
8635 /* Outputs instructions needed for fixed point type conversion.
8636 This includes converting between any fixed point type, as well
8637 as converting to any integer type. Conversion between integer
8638 types is not supported.
8639
8640 Converting signed fractional types requires a bit shift if converting
8641 to or from any unsigned fractional type because the decimal place is
8642 shifted by 1 bit. When the destination is a signed fractional, the sign
8643 is stored in either the carry or T bit. */
8644
8645 const char*
8646 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
8647 {
8648 rtx xop[6];
8649 RTX_CODE shift = UNKNOWN;
8650 bool sign_in_carry = false;
8651 bool msb_in_carry = false;
8652 bool lsb_in_tmp_reg = false;
8653 bool lsb_in_carry = false;
8654 bool frac_rounded = false;
8655 const char *code_ashift = "lsl %0";
8656
8657
8658 #define MAY_CLOBBER(RR) \
8659 /* Shorthand used below. */ \
8660 ((sign_bytes \
8661 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
8662 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
8663 || (reg_unused_after (insn, all_regs_rtx[RR]) \
8664 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8665
8666 struct
8667 {
8668 /* bytes : Length of operand in bytes.
8669 ibyte : Length of integral part in bytes.
8670 fbyte, fbit : Length of fractional part in bytes, bits. */
8671
8672 bool sbit;
8673 unsigned fbit, bytes, ibyte, fbyte;
8674 unsigned regno, regno_msb;
8675 } dest, src, *val[2] = { &dest, &src };
8676
8677 if (plen)
8678 *plen = 0;
8679
8680 /* Step 0: Determine information on source and destination operand we
8681 ====== will need in the remainder. */
8682
8683 for (size_t i = 0; i < ARRAY_SIZE (val); i++)
8684 {
8685 machine_mode mode;
8686
8687 xop[i] = operands[i];
8688
8689 mode = GET_MODE (xop[i]);
8690
8691 val[i]->bytes = GET_MODE_SIZE (mode);
8692 val[i]->regno = REGNO (xop[i]);
8693 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
8694
8695 if (SCALAR_INT_MODE_P (mode))
8696 {
8697 val[i]->sbit = intsigned;
8698 val[i]->fbit = 0;
8699 }
8700 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
8701 {
8702 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
8703 val[i]->fbit = GET_MODE_FBIT (mode);
8704 }
8705 else
8706 fatal_insn ("unsupported fixed-point conversion", insn);
8707
8708 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
8709 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
8710 }
8711
8712 // Byte offset of the decimal point taking into account different place
8713 // of the decimal point in input and output and different register numbers
8714 // of input and output.
8715 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
8716
8717 // Number of destination bytes that will come from sign / zero extension.
8718 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
8719
8720 // Number of bytes at the low end to be filled with zeros.
8721 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
8722
8723 // Do we have a 16-Bit register that is cleared?
8724 rtx clrw = NULL_RTX;
8725
8726 bool sign_extend = src.sbit && sign_bytes;
8727
8728 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8729 shift = ASHIFT;
8730 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8731 shift = ASHIFTRT;
8732 else if (dest.fbit % 8 == src.fbit % 8)
8733 shift = UNKNOWN;
8734 else
8735 gcc_unreachable();
8736
8737 /* If we need to round the fraction part, we might need to save/round it
8738 before clobbering any of it in Step 1. Also, we might want to do
8739 the rounding now to make use of LD_REGS. */
8740 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8741 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8742 && !TARGET_FRACT_CONV_TRUNC)
8743 {
8744 bool overlap
8745 = (src.regno <=
8746 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8747 && dest.regno - offset -1 >= dest.regno);
8748 unsigned s0 = dest.regno - offset -1;
8749 bool use_src = true;
8750 unsigned sn;
8751 unsigned copied_msb = src.regno_msb;
8752 bool have_carry = false;
8753
8754 if (src.ibyte > dest.ibyte)
8755 copied_msb -= src.ibyte - dest.ibyte;
8756
8757 for (sn = s0; sn <= copied_msb; sn++)
8758 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8759 && !reg_unused_after (insn, all_regs_rtx[sn]))
8760 use_src = false;
8761 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8762 {
8763 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8764 &all_regs_rtx[src.regno_msb], plen, 2);
8765 sn = src.regno;
8766 if (sn < s0)
8767 {
8768 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8769 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8770 else
8771 avr_asm_len ("sec" CR_TAB
8772 "cpc %0,__zero_reg__",
8773 &all_regs_rtx[sn], plen, 2);
8774 have_carry = true;
8775 }
8776 while (++sn < s0)
8777 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8778
8779 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8780 &all_regs_rtx[s0], plen, 1);
8781 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8782 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8783 avr_asm_len ("\n0:", NULL, plen, 0);
8784 frac_rounded = true;
8785 }
8786 else if (use_src && overlap)
8787 {
8788 avr_asm_len ("clr __tmp_reg__" CR_TAB
8789 "sbrc %1,0" CR_TAB
8790 "dec __tmp_reg__", xop, plen, 1);
8791 sn = src.regno;
8792 if (sn < s0)
8793 {
8794 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8795 have_carry = true;
8796 }
8797
8798 while (++sn < s0)
8799 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8800
8801 if (have_carry)
8802 avr_asm_len ("clt" CR_TAB
8803 "bld __tmp_reg__,7" CR_TAB
8804 "adc %0,__tmp_reg__",
8805 &all_regs_rtx[s0], plen, 1);
8806 else
8807 avr_asm_len ("lsr __tmp_reg" CR_TAB
8808 "add %0,__tmp_reg__",
8809 &all_regs_rtx[s0], plen, 2);
8810 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8811 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8812 frac_rounded = true;
8813 }
8814 else if (overlap)
8815 {
8816 bool use_src
8817 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8818 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8819 || reg_unused_after (insn, all_regs_rtx[s0])));
8820 xop[2] = all_regs_rtx[s0];
8821 unsigned sn = src.regno;
8822 if (!use_src || sn == s0)
8823 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8824 /* We need to consider to-be-discarded bits
8825 if the value is negative. */
8826 if (sn < s0)
8827 {
8828 avr_asm_len ("tst %0" CR_TAB
8829 "brpl 0f",
8830 &all_regs_rtx[src.regno_msb], plen, 2);
8831 /* Test to-be-discarded bytes for any nozero bits.
8832 ??? Could use OR or SBIW to test two registers at once. */
8833 if (sn < s0)
8834 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8835
8836 while (++sn < s0)
8837 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8838 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8839 if (use_src)
8840 avr_asm_len ("breq 0f" CR_TAB
8841 "ori %2,1"
8842 "\n0:\t" "mov __tmp_reg__,%2",
8843 xop, plen, 3);
8844 else
8845 avr_asm_len ("breq 0f" CR_TAB
8846 "set" CR_TAB
8847 "bld __tmp_reg__,0\n0:",
8848 xop, plen, 3);
8849 }
8850 lsb_in_tmp_reg = true;
8851 }
8852 }
8853
8854 /* Step 1: Clear bytes at the low end and copy payload bits from source
8855 ====== to destination. */
8856
8857 int step = offset < 0 ? 1 : -1;
8858 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8859
8860 // We cleared at least that number of registers.
8861 int clr_n = 0;
8862
8863 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8864 {
8865 // Next regno of destination is needed for MOVW
8866 unsigned d1 = d0 + step;
8867
8868 // Current and next regno of source
8869 signed s0 = d0 - offset;
8870 signed s1 = s0 + step;
8871
8872 // Must current resp. next regno be CLRed? This applies to the low
8873 // bytes of the destination that have no associated source bytes.
8874 bool clr0 = s0 < (signed) src.regno;
8875 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8876
8877 // First gather what code to emit (if any) and additional step to
8878 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8879 // is the source rtx for the current loop iteration.
8880 const char *code = NULL;
8881 int stepw = 0;
8882
8883 if (clr0)
8884 {
8885 if (AVR_HAVE_MOVW && clr1 && clrw)
8886 {
8887 xop[2] = all_regs_rtx[d0 & ~1];
8888 xop[3] = clrw;
8889 code = "movw %2,%3";
8890 stepw = step;
8891 }
8892 else
8893 {
8894 xop[2] = all_regs_rtx[d0];
8895 code = "clr %2";
8896
8897 if (++clr_n >= 2
8898 && !clrw
8899 && d0 % 2 == (step > 0))
8900 {
8901 clrw = all_regs_rtx[d0 & ~1];
8902 }
8903 }
8904 }
8905 else if (offset && s0 <= (signed) src.regno_msb)
8906 {
8907 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8908 && d0 % 2 == (offset > 0)
8909 && d1 <= dest.regno_msb && d1 >= dest.regno
8910 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
8911
8912 xop[2] = all_regs_rtx[d0 & ~movw];
8913 xop[3] = all_regs_rtx[s0 & ~movw];
8914 code = movw ? "movw %2,%3" : "mov %2,%3";
8915 stepw = step * movw;
8916 }
8917
8918 if (code)
8919 {
8920 if (sign_extend && shift != ASHIFT && !sign_in_carry
8921 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8922 {
8923 /* We are going to override the sign bit. If we sign-extend,
8924 store the sign in the Carry flag. This is not needed if
8925 the destination will be ASHIFT in the remainder because
8926 the ASHIFT will set Carry without extra instruction. */
8927
8928 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8929 sign_in_carry = true;
8930 }
8931
8932 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8933
8934 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8935 && src.ibyte > dest.ibyte
8936 && (d0 == src_msb || d0 + stepw == src_msb))
8937 {
8938 /* We are going to override the MSB. If we shift right,
8939 store the MSB in the Carry flag. This is only needed if
8940 we don't sign-extend becaue with sign-extension the MSB
8941 (the sign) will be produced by the sign extension. */
8942
8943 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8944 msb_in_carry = true;
8945 }
8946
8947 unsigned src_lsb = dest.regno - offset -1;
8948
8949 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8950 && !lsb_in_tmp_reg
8951 && (d0 == src_lsb || d0 + stepw == src_lsb))
8952 {
8953 /* We are going to override the new LSB; store it into carry. */
8954
8955 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8956 code_ashift = "rol %0";
8957 lsb_in_carry = true;
8958 }
8959
8960 avr_asm_len (code, xop, plen, 1);
8961 d0 += stepw;
8962 }
8963 }
8964
8965 /* Step 2: Shift destination left by 1 bit position. This might be needed
8966 ====== for signed input and unsigned output. */
8967
8968 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8969 {
8970 unsigned s0 = dest.regno - offset -1;
8971
8972 /* n1169 4.1.4 says:
8973 "Conversions from a fixed-point to an integer type round toward zero."
8974 Hence, converting a fract type to integer only gives a non-zero result
8975 for -1. */
8976 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8977 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8978 && !TARGET_FRACT_CONV_TRUNC)
8979 {
8980 gcc_assert (s0 == src.regno_msb);
8981 /* Check if the input is -1. We do that by checking if negating
8982 the input causes an integer overflow. */
8983 unsigned sn = src.regno;
8984 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8985 while (sn <= s0)
8986 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8987
8988 /* Overflow goes with set carry. Clear carry otherwise. */
8989 avr_asm_len ("brvs 0f" CR_TAB
8990 "clc\n0:", NULL, plen, 2);
8991 }
8992 /* Likewise, when converting from accumulator types to integer, we
8993 need to round up negative values. */
8994 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8995 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8996 && !TARGET_FRACT_CONV_TRUNC
8997 && !frac_rounded)
8998 {
8999 bool have_carry = false;
9000
9001 xop[2] = all_regs_rtx[s0];
9002 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
9003 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
9004 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
9005 &all_regs_rtx[src.regno_msb], plen, 2);
9006 if (!lsb_in_tmp_reg)
9007 {
9008 unsigned sn = src.regno;
9009 if (sn < s0)
9010 {
9011 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
9012 plen, 1);
9013 have_carry = true;
9014 }
9015 while (++sn < s0)
9016 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
9017 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
9018 }
9019 /* Add in C and the rounding value 127. */
9020 /* If the destination msb is a sign byte, and in LD_REGS,
9021 grab it as a temporary. */
9022 if (sign_bytes
9023 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
9024 dest.regno_msb))
9025 {
9026 xop[3] = all_regs_rtx[dest.regno_msb];
9027 avr_asm_len ("ldi %3,127", xop, plen, 1);
9028 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
9029 : have_carry ? "adc %2,%3"
9030 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
9031 : "add %2,%3"),
9032 xop, plen, 1);
9033 }
9034 else
9035 {
9036 /* Fall back to use __zero_reg__ as a temporary. */
9037 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
9038 if (have_carry)
9039 avr_asm_len ("clt" CR_TAB
9040 "bld __zero_reg__,7", NULL, plen, 2);
9041 else
9042 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
9043 avr_asm_len (have_carry && lsb_in_tmp_reg
9044 ? "adc __tmp_reg__,__zero_reg__"
9045 : have_carry ? "adc %2,__zero_reg__"
9046 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
9047 : "add %2,__zero_reg__",
9048 xop, plen, 1);
9049 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
9050 }
9051
9052 for (d0 = dest.regno + zero_bytes;
9053 d0 <= dest.regno_msb - sign_bytes; d0++)
9054 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
9055
9056 avr_asm_len (lsb_in_tmp_reg
9057 ? "\n0:\t" "lsl __tmp_reg__"
9058 : "\n0:\t" "lsl %2",
9059 xop, plen, 1);
9060 }
9061 else if (MAY_CLOBBER (s0))
9062 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9063 else
9064 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9065 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9066
9067 code_ashift = "rol %0";
9068 lsb_in_carry = true;
9069 }
9070
9071 if (shift == ASHIFT)
9072 {
9073 for (d0 = dest.regno + zero_bytes;
9074 d0 <= dest.regno_msb - sign_bytes; d0++)
9075 {
9076 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
9077 code_ashift = "rol %0";
9078 }
9079
9080 lsb_in_carry = false;
9081 sign_in_carry = true;
9082 }
9083
9084 /* Step 4a: Store MSB in carry if we don't already have it or will produce
9085 ======= it in sign-extension below. */
9086
9087 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
9088 && src.ibyte > dest.ibyte)
9089 {
9090 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
9091
9092 if (MAY_CLOBBER (s0))
9093 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
9094 else
9095 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9096 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9097
9098 msb_in_carry = true;
9099 }
9100
9101 /* Step 3: Sign-extend or zero-extend the destination as needed.
9102 ====== */
9103
9104 if (sign_extend && !sign_in_carry)
9105 {
9106 unsigned s0 = src.regno_msb;
9107
9108 if (MAY_CLOBBER (s0))
9109 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9110 else
9111 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9112 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9113
9114 sign_in_carry = true;
9115 }
9116
9117 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
9118
9119 unsigned copies = 0;
9120 rtx movw = sign_extend ? NULL_RTX : clrw;
9121
9122 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
9123 {
9124 if (AVR_HAVE_MOVW && movw
9125 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
9126 {
9127 xop[2] = all_regs_rtx[d0];
9128 xop[3] = movw;
9129 avr_asm_len ("movw %2,%3", xop, plen, 1);
9130 d0++;
9131 }
9132 else
9133 {
9134 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
9135 &all_regs_rtx[d0], plen, 1);
9136
9137 if (++copies >= 2 && !movw && d0 % 2 == 1)
9138 movw = all_regs_rtx[d0-1];
9139 }
9140 } /* for */
9141
9142
9143 /* Step 4: Right shift the destination. This might be needed for
9144 ====== conversions from unsigned to signed. */
9145
9146 if (shift == ASHIFTRT)
9147 {
9148 const char *code_ashiftrt = "lsr %0";
9149
9150 if (sign_extend || msb_in_carry)
9151 code_ashiftrt = "ror %0";
9152
9153 if (src.sbit && src.ibyte == dest.ibyte)
9154 code_ashiftrt = "asr %0";
9155
9156 for (d0 = dest.regno_msb - sign_bytes;
9157 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
9158 {
9159 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
9160 code_ashiftrt = "ror %0";
9161 }
9162 }
9163
9164 #undef MAY_CLOBBER
9165
9166 return "";
9167 }
9168
9169
9170 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
9171 XOP[2] is the rounding point, a CONST_INT. The function prints the
9172 instruction sequence if PLEN = NULL and computes the length in words
9173 of the sequence if PLEN != NULL. Most of this function deals with
9174 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
9175
9176 const char*
9177 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9178 {
9179 machine_mode mode = GET_MODE (xop[0]);
9180 machine_mode imode = int_mode_for_mode (mode);
9181 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
9182 int fbit = (int) GET_MODE_FBIT (mode);
9183 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
9184 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
9185 GET_MODE_PRECISION (imode));
9186 // Lengths of PLUS and AND parts.
9187 int len_add = 0, *plen_add = plen ? &len_add : NULL;
9188 int len_and = 0, *plen_and = plen ? &len_and : NULL;
9189
9190 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
9191 // the saturated addition so that we can emit the "rjmp 1f" before the
9192 // "0:" below.
9193
9194 rtx xadd = const_fixed_from_double_int (i_add, mode);
9195 rtx xpattern, xsrc, op[4];
9196
9197 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
9198 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
9199 : gen_rtx_US_PLUS (mode, xop[1], xadd);
9200 xpattern = gen_rtx_SET (xop[0], xsrc);
9201
9202 op[0] = xop[0];
9203 op[1] = xop[1];
9204 op[2] = xadd;
9205 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
9206
9207 avr_asm_len ("rjmp 1f" CR_TAB
9208 "0:", NULL, plen_add, 1);
9209
9210 // Keep all bits from RP and higher: ... 2^(-RP)
9211 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
9212 // Rounding point ^^^^^^^
9213 // Added above ^^^^^^^^^
9214 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
9215 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
9216
9217 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
9218
9219 op[0] = xreg;
9220 op[1] = xreg;
9221 op[2] = xmask;
9222 op[3] = gen_rtx_SCRATCH (QImode);
9223 avr_out_bitop (xpattern, op, plen_and);
9224 avr_asm_len ("1:", NULL, plen, 0);
9225
9226 if (plen)
9227 *plen = len_add + len_and;
9228
9229 return "";
9230 }
9231
9232
9233 /* Create RTL split patterns for byte sized rotate expressions. This
9234 produces a series of move instructions and considers overlap situations.
9235 Overlapping non-HImode operands need a scratch register. */
9236
9237 bool
9238 avr_rotate_bytes (rtx operands[])
9239 {
9240 machine_mode mode = GET_MODE (operands[0]);
9241 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
9242 bool same_reg = rtx_equal_p (operands[0], operands[1]);
9243 int num = INTVAL (operands[2]);
9244 rtx scratch = operands[3];
9245 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
9246 Word move if no scratch is needed, otherwise use size of scratch. */
9247 machine_mode move_mode = QImode;
9248 int move_size, offset, size;
9249
9250 if (num & 0xf)
9251 move_mode = QImode;
9252 else if ((mode == SImode && !same_reg) || !overlapped)
9253 move_mode = HImode;
9254 else
9255 move_mode = GET_MODE (scratch);
9256
9257 /* Force DI rotate to use QI moves since other DI moves are currently split
9258 into QI moves so forward propagation works better. */
9259 if (mode == DImode)
9260 move_mode = QImode;
9261 /* Make scratch smaller if needed. */
9262 if (SCRATCH != GET_CODE (scratch)
9263 && HImode == GET_MODE (scratch)
9264 && QImode == move_mode)
9265 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
9266
9267 move_size = GET_MODE_SIZE (move_mode);
9268 /* Number of bytes/words to rotate. */
9269 offset = (num >> 3) / move_size;
9270 /* Number of moves needed. */
9271 size = GET_MODE_SIZE (mode) / move_size;
9272 /* Himode byte swap is special case to avoid a scratch register. */
9273 if (mode == HImode && same_reg)
9274 {
9275 /* HImode byte swap, using xor. This is as quick as using scratch. */
9276 rtx src, dst;
9277 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
9278 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
9279 if (!rtx_equal_p (dst, src))
9280 {
9281 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9282 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
9283 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9284 }
9285 }
9286 else
9287 {
9288 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9289 /* Create linked list of moves to determine move order. */
9290 struct {
9291 rtx src, dst;
9292 int links;
9293 } move[MAX_SIZE + 8];
9294 int blocked, moves;
9295
9296 gcc_assert (size <= MAX_SIZE);
9297 /* Generate list of subreg moves. */
9298 for (int i = 0; i < size; i++)
9299 {
9300 int from = i;
9301 int to = (from + offset) % size;
9302 move[i].src = simplify_gen_subreg (move_mode, operands[1],
9303 mode, from * move_size);
9304 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
9305 mode, to * move_size);
9306 move[i].links = -1;
9307 }
9308 /* Mark dependence where a dst of one move is the src of another move.
9309 The first move is a conflict as it must wait until second is
9310 performed. We ignore moves to self - we catch this later. */
9311 if (overlapped)
9312 for (int i = 0; i < size; i++)
9313 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
9314 for (int j = 0; j < size; j++)
9315 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
9316 {
9317 /* The dst of move i is the src of move j. */
9318 move[i].links = j;
9319 break;
9320 }
9321
9322 blocked = -1;
9323 moves = 0;
9324 /* Go through move list and perform non-conflicting moves. As each
9325 non-overlapping move is made, it may remove other conflicts
9326 so the process is repeated until no conflicts remain. */
9327 do
9328 {
9329 blocked = -1;
9330 moves = 0;
9331 /* Emit move where dst is not also a src or we have used that
9332 src already. */
9333 for (int i = 0; i < size; i++)
9334 if (move[i].src != NULL_RTX)
9335 {
9336 if (move[i].links == -1
9337 || move[move[i].links].src == NULL_RTX)
9338 {
9339 moves++;
9340 /* Ignore NOP moves to self. */
9341 if (!rtx_equal_p (move[i].dst, move[i].src))
9342 emit_move_insn (move[i].dst, move[i].src);
9343
9344 /* Remove conflict from list. */
9345 move[i].src = NULL_RTX;
9346 }
9347 else
9348 blocked = i;
9349 }
9350
9351 /* Check for deadlock. This is when no moves occurred and we have
9352 at least one blocked move. */
9353 if (moves == 0 && blocked != -1)
9354 {
9355 /* Need to use scratch register to break deadlock.
9356 Add move to put dst of blocked move into scratch.
9357 When this move occurs, it will break chain deadlock.
9358 The scratch register is substituted for real move. */
9359
9360 gcc_assert (SCRATCH != GET_CODE (scratch));
9361
9362 move[size].src = move[blocked].dst;
9363 move[size].dst = scratch;
9364 /* Scratch move is never blocked. */
9365 move[size].links = -1;
9366 /* Make sure we have valid link. */
9367 gcc_assert (move[blocked].links != -1);
9368 /* Replace src of blocking move with scratch reg. */
9369 move[move[blocked].links].src = scratch;
9370 /* Make dependent on scratch move occurring. */
9371 move[blocked].links = size;
9372 size=size+1;
9373 }
9374 }
9375 while (blocked != -1);
9376 }
9377 return true;
9378 }
9379
9380
9381 /* Worker function for `ADJUST_INSN_LENGTH'. */
9382 /* Modifies the length assigned to instruction INSN
9383 LEN is the initially computed length of the insn. */
9384
9385 int
9386 avr_adjust_insn_length (rtx_insn *insn, int len)
9387 {
9388 rtx *op = recog_data.operand;
9389 enum attr_adjust_len adjust_len;
9390
9391 /* As we pretend jump tables in .text, fix branch offsets crossing jump
9392 tables now. */
9393
9394 if (JUMP_TABLE_DATA_P (insn))
9395 return 0;
9396
9397 /* Some complex insns don't need length adjustment and therefore
9398 the length need not/must not be adjusted for these insns.
9399 It is easier to state this in an insn attribute "adjust_len" than
9400 to clutter up code here... */
9401
9402 if (!NONDEBUG_INSN_P (insn)
9403 || -1 == recog_memoized (insn))
9404 {
9405 return len;
9406 }
9407
9408 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
9409
9410 adjust_len = get_attr_adjust_len (insn);
9411
9412 if (adjust_len == ADJUST_LEN_NO)
9413 {
9414 /* Nothing to adjust: The length from attribute "length" is fine.
9415 This is the default. */
9416
9417 return len;
9418 }
9419
9420 /* Extract insn's operands. */
9421
9422 extract_constrain_insn_cached (insn);
9423
9424 /* Dispatch to right function. */
9425
9426 switch (adjust_len)
9427 {
9428 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
9429 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
9430 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
9431
9432 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
9433
9434 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
9435 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
9436
9437 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
9438 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
9439 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
9440 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
9441 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
9442 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
9443 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
9444
9445 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
9446 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
9447 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
9448
9449 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
9450 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
9451 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
9452 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
9453 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9454
9455 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
9456 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
9457 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9458
9459 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
9460 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
9461 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9462
9463 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
9464 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
9465 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
9466
9467 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
9468 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
9469 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
9470
9471 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
9472
9473 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
9474
9475 case ADJUST_LEN_INSV_NOTBIT:
9476 avr_out_insert_notbit (insn, op, NULL_RTX, &len);
9477 break;
9478 case ADJUST_LEN_INSV_NOTBIT_0:
9479 avr_out_insert_notbit (insn, op, const0_rtx, &len);
9480 break;
9481 case ADJUST_LEN_INSV_NOTBIT_7:
9482 avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
9483 break;
9484
9485 default:
9486 gcc_unreachable();
9487 }
9488
9489 return len;
9490 }
9491
9492 /* Return nonzero if register REG dead after INSN. */
9493
9494 int
9495 reg_unused_after (rtx_insn *insn, rtx reg)
9496 {
9497 return (dead_or_set_p (insn, reg)
9498 || (REG_P (reg) && _reg_unused_after (insn, reg)));
9499 }
9500
9501 /* Return nonzero if REG is not used after INSN.
9502 We assume REG is a reload reg, and therefore does
9503 not live past labels. It may live past calls or jumps though. */
9504
9505 int
9506 _reg_unused_after (rtx_insn *insn, rtx reg)
9507 {
9508 enum rtx_code code;
9509 rtx set;
9510
9511 /* If the reg is set by this instruction, then it is safe for our
9512 case. Disregard the case where this is a store to memory, since
9513 we are checking a register used in the store address. */
9514 set = single_set (insn);
9515 if (set && !MEM_P (SET_DEST (set))
9516 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9517 return 1;
9518
9519 while ((insn = NEXT_INSN (insn)))
9520 {
9521 rtx set;
9522 code = GET_CODE (insn);
9523
9524 #if 0
9525 /* If this is a label that existed before reload, then the register
9526 if dead here. However, if this is a label added by reorg, then
9527 the register may still be live here. We can't tell the difference,
9528 so we just ignore labels completely. */
9529 if (code == CODE_LABEL)
9530 return 1;
9531 /* else */
9532 #endif
9533
9534 if (!INSN_P (insn))
9535 continue;
9536
9537 if (code == JUMP_INSN)
9538 return 0;
9539
9540 /* If this is a sequence, we must handle them all at once.
9541 We could have for instance a call that sets the target register,
9542 and an insn in a delay slot that uses the register. In this case,
9543 we must return 0. */
9544 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
9545 {
9546 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
9547 int retval = 0;
9548
9549 for (int i = 0; i < seq->len (); i++)
9550 {
9551 rtx_insn *this_insn = seq->insn (i);
9552 rtx set = single_set (this_insn);
9553
9554 if (CALL_P (this_insn))
9555 code = CALL_INSN;
9556 else if (JUMP_P (this_insn))
9557 {
9558 if (INSN_ANNULLED_BRANCH_P (this_insn))
9559 return 0;
9560 code = JUMP_INSN;
9561 }
9562
9563 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9564 return 0;
9565 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9566 {
9567 if (!MEM_P (SET_DEST (set)))
9568 retval = 1;
9569 else
9570 return 0;
9571 }
9572 if (set == 0
9573 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
9574 return 0;
9575 }
9576 if (retval == 1)
9577 return 1;
9578 else if (code == JUMP_INSN)
9579 return 0;
9580 }
9581
9582 if (code == CALL_INSN)
9583 {
9584 rtx tem;
9585 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
9586 if (GET_CODE (XEXP (tem, 0)) == USE
9587 && REG_P (XEXP (XEXP (tem, 0), 0))
9588 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
9589 return 0;
9590 if (call_used_regs[REGNO (reg)])
9591 return 1;
9592 }
9593
9594 set = single_set (insn);
9595
9596 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9597 return 0;
9598 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9599 return !MEM_P (SET_DEST (set));
9600 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9601 return 0;
9602 }
9603 return 1;
9604 }
9605
9606
9607 /* Implement `TARGET_ASM_INTEGER'. */
9608 /* Target hook for assembling integer objects. The AVR version needs
9609 special handling for references to certain labels. */
9610
9611 static bool
9612 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
9613 {
9614 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
9615 && text_segment_operand (x, VOIDmode))
9616 {
9617 fputs ("\t.word\tgs(", asm_out_file);
9618 output_addr_const (asm_out_file, x);
9619 fputs (")\n", asm_out_file);
9620
9621 return true;
9622 }
9623 else if (GET_MODE (x) == PSImode)
9624 {
9625 /* This needs binutils 2.23+, see PR binutils/13503 */
9626
9627 fputs ("\t.byte\tlo8(", asm_out_file);
9628 output_addr_const (asm_out_file, x);
9629 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9630
9631 fputs ("\t.byte\thi8(", asm_out_file);
9632 output_addr_const (asm_out_file, x);
9633 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9634
9635 fputs ("\t.byte\thh8(", asm_out_file);
9636 output_addr_const (asm_out_file, x);
9637 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9638
9639 return true;
9640 }
9641 else if (CONST_FIXED_P (x))
9642 {
9643 /* varasm fails to handle big fixed modes that don't fit in hwi. */
9644
9645 for (unsigned n = 0; n < size; n++)
9646 {
9647 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
9648 default_assemble_integer (xn, 1, aligned_p);
9649 }
9650
9651 return true;
9652 }
9653
9654 if (AVR_TINY
9655 && avr_address_tiny_pm_p (x))
9656 {
9657 x = plus_constant (Pmode, x, avr_arch->flash_pm_offset);
9658 }
9659
9660 return default_assemble_integer (x, size, aligned_p);
9661 }
9662
9663
9664 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
9665 /* Return value is nonzero if pseudos that have been
9666 assigned to registers of class CLASS would likely be spilled
9667 because registers of CLASS are needed for spill registers. */
9668
9669 static bool
9670 avr_class_likely_spilled_p (reg_class_t c)
9671 {
9672 return (c != ALL_REGS &&
9673 (AVR_TINY ? 1 : c != ADDW_REGS));
9674 }
9675
9676
9677 /* Valid attributes:
9678 progmem - Put data to program memory.
9679 signal - Make a function to be hardware interrupt.
9680 After function prologue interrupts remain disabled.
9681 interrupt - Make a function to be hardware interrupt. Before function
9682 prologue interrupts are enabled by means of SEI.
9683 naked - Don't generate function prologue/epilogue and RET
9684 instruction. */
9685
9686 /* Handle a "progmem" attribute; arguments as in
9687 struct attribute_spec.handler. */
9688
9689 static tree
9690 avr_handle_progmem_attribute (tree *node, tree name,
9691 tree args ATTRIBUTE_UNUSED,
9692 int flags ATTRIBUTE_UNUSED,
9693 bool *no_add_attrs)
9694 {
9695 if (DECL_P (*node))
9696 {
9697 if (TREE_CODE (*node) == TYPE_DECL)
9698 {
9699 /* This is really a decl attribute, not a type attribute,
9700 but try to handle it for GCC 3.0 backwards compatibility. */
9701
9702 tree type = TREE_TYPE (*node);
9703 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
9704 tree newtype = build_type_attribute_variant (type, attr);
9705
9706 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
9707 TREE_TYPE (*node) = newtype;
9708 *no_add_attrs = true;
9709 }
9710 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
9711 {
9712 *no_add_attrs = false;
9713 }
9714 else
9715 {
9716 warning (OPT_Wattributes, "%qE attribute ignored",
9717 name);
9718 *no_add_attrs = true;
9719 }
9720 }
9721
9722 return NULL_TREE;
9723 }
9724
9725 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9726 struct attribute_spec.handler. */
9727
9728 static tree
9729 avr_handle_fndecl_attribute (tree *node, tree name,
9730 tree args ATTRIBUTE_UNUSED,
9731 int flags ATTRIBUTE_UNUSED,
9732 bool *no_add_attrs)
9733 {
9734 if (TREE_CODE (*node) != FUNCTION_DECL)
9735 {
9736 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9737 name);
9738 *no_add_attrs = true;
9739 }
9740
9741 return NULL_TREE;
9742 }
9743
9744 static tree
9745 avr_handle_fntype_attribute (tree *node, tree name,
9746 tree args ATTRIBUTE_UNUSED,
9747 int flags ATTRIBUTE_UNUSED,
9748 bool *no_add_attrs)
9749 {
9750 if (TREE_CODE (*node) != FUNCTION_TYPE)
9751 {
9752 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9753 name);
9754 *no_add_attrs = true;
9755 }
9756
9757 return NULL_TREE;
9758 }
9759
9760 static tree
9761 avr_handle_absdata_attribute (tree *node, tree name, tree /* args */,
9762 int /* flags */, bool *no_add)
9763 {
9764 location_t loc = DECL_SOURCE_LOCATION (*node);
9765
9766 if (AVR_TINY)
9767 {
9768 if (TREE_CODE (*node) != VAR_DECL
9769 || (!TREE_STATIC (*node) && !DECL_EXTERNAL (*node)))
9770 {
9771 warning_at (loc, OPT_Wattributes, "%qE attribute only applies to"
9772 " variables in static storage", name);
9773 *no_add = true;
9774 }
9775 }
9776 else
9777 {
9778 warning_at (loc, OPT_Wattributes, "%qE attribute only supported"
9779 " for reduced Tiny cores", name);
9780 *no_add = true;
9781 }
9782
9783 return NULL_TREE;
9784 }
9785
9786 static tree
9787 avr_handle_addr_attribute (tree *node, tree name, tree args,
9788 int flags ATTRIBUTE_UNUSED, bool *no_add)
9789 {
9790 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9791 location_t loc = DECL_SOURCE_LOCATION (*node);
9792
9793 if (TREE_CODE (*node) != VAR_DECL)
9794 {
9795 warning_at (loc, 0, "%qE attribute only applies to variables", name);
9796 *no_add = true;
9797 }
9798
9799 if (args != NULL_TREE)
9800 {
9801 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9802 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9803 tree arg = TREE_VALUE (args);
9804 if (TREE_CODE (arg) != INTEGER_CST)
9805 {
9806 warning (0, "%qE attribute allows only an integer constant argument",
9807 name);
9808 *no_add = true;
9809 }
9810 else if (io_p
9811 && (!tree_fits_shwi_p (arg)
9812 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9813 ? low_io_address_operand : io_address_operand)
9814 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9815 {
9816 warning_at (loc, 0, "%qE attribute address out of range", name);
9817 *no_add = true;
9818 }
9819 else
9820 {
9821 tree attribs = DECL_ATTRIBUTES (*node);
9822 const char *names[] = { "io", "io_low", "address", NULL } ;
9823 for (const char **p = names; *p; p++)
9824 {
9825 tree other = lookup_attribute (*p, attribs);
9826 if (other && TREE_VALUE (other))
9827 {
9828 warning_at (loc, 0,
9829 "both %s and %qE attribute provide address",
9830 *p, name);
9831 *no_add = true;
9832 break;
9833 }
9834 }
9835 }
9836 }
9837
9838 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9839 warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
9840
9841 return NULL_TREE;
9842 }
9843
9844 rtx
9845 avr_eval_addr_attrib (rtx x)
9846 {
9847 if (SYMBOL_REF_P (x)
9848 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9849 {
9850 tree decl = SYMBOL_REF_DECL (x);
9851 tree attr = NULL_TREE;
9852
9853 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9854 {
9855 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9856 if (!attr || !TREE_VALUE (attr))
9857 attr = lookup_attribute ("io_low", DECL_ATTRIBUTES (decl));
9858 gcc_assert (attr);
9859 }
9860 if (!attr || !TREE_VALUE (attr))
9861 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9862 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9863 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9864 }
9865 return x;
9866 }
9867
9868
9869 /* AVR attributes. */
9870 static const struct attribute_spec
9871 avr_attribute_table[] =
9872 {
9873 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9874 affects_type_identity } */
9875 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
9876 false },
9877 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9878 false },
9879 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9880 false },
9881 { "no_gccisr", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9882 false },
9883 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
9884 false },
9885 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
9886 false },
9887 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
9888 false },
9889 { "io", 0, 1, false, false, false, avr_handle_addr_attribute,
9890 false },
9891 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute,
9892 false },
9893 { "address", 1, 1, false, false, false, avr_handle_addr_attribute,
9894 false },
9895 { "absdata", 0, 0, true, false, false, avr_handle_absdata_attribute,
9896 false },
9897 { NULL, 0, 0, false, false, false, NULL, false }
9898 };
9899
9900
9901 /* Return true if we support address space AS for the architecture in effect
9902 and false, otherwise. If LOC is not UNKNOWN_LOCATION then also issue
9903 a respective error. */
9904
9905 bool
9906 avr_addr_space_supported_p (addr_space_t as, location_t loc)
9907 {
9908 if (AVR_TINY)
9909 {
9910 if (loc != UNKNOWN_LOCATION)
9911 error_at (loc, "address spaces are not supported for reduced "
9912 "Tiny devices");
9913 return false;
9914 }
9915 else if (avr_addrspace[as].segment >= avr_n_flash)
9916 {
9917 if (loc != UNKNOWN_LOCATION)
9918 error_at (loc, "address space %qs not supported for devices with "
9919 "flash size up to %d KiB", avr_addrspace[as].name,
9920 64 * avr_n_flash);
9921 return false;
9922 }
9923
9924 return true;
9925 }
9926
9927
9928 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'. */
9929
9930 static void
9931 avr_addr_space_diagnose_usage (addr_space_t as, location_t loc)
9932 {
9933 (void) avr_addr_space_supported_p (as, loc);
9934 }
9935
9936
9937 /* Look if DECL shall be placed in program memory space by
9938 means of attribute `progmem' or some address-space qualifier.
9939 Return non-zero if DECL is data that must end up in Flash and
9940 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9941
9942 Return 2 if DECL is located in 24-bit flash address-space
9943 Return 1 if DECL is located in 16-bit flash address-space
9944 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9945 Return 0 otherwise */
9946
9947 int
9948 avr_progmem_p (tree decl, tree attributes)
9949 {
9950 tree a;
9951
9952 if (TREE_CODE (decl) != VAR_DECL)
9953 return 0;
9954
9955 if (avr_decl_memx_p (decl))
9956 return 2;
9957
9958 if (avr_decl_flash_p (decl))
9959 return 1;
9960
9961 if (NULL_TREE
9962 != lookup_attribute ("progmem", attributes))
9963 return -1;
9964
9965 a = decl;
9966
9967 do
9968 a = TREE_TYPE(a);
9969 while (TREE_CODE (a) == ARRAY_TYPE);
9970
9971 if (a == error_mark_node)
9972 return 0;
9973
9974 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9975 return -1;
9976
9977 return 0;
9978 }
9979
9980
9981 /* Return true if DECL has attribute `absdata' set. This function should
9982 only be used for AVR_TINY. */
9983
9984 static bool
9985 avr_decl_absdata_p (tree decl, tree attributes)
9986 {
9987 return (TREE_CODE (decl) == VAR_DECL
9988 && NULL_TREE != lookup_attribute ("absdata", attributes));
9989 }
9990
9991
9992 /* Scan type TYP for pointer references to address space ASn.
9993 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9994 the AS are also declared to be CONST.
9995 Otherwise, return the respective address space, i.e. a value != 0. */
9996
9997 static addr_space_t
9998 avr_nonconst_pointer_addrspace (tree typ)
9999 {
10000 while (ARRAY_TYPE == TREE_CODE (typ))
10001 typ = TREE_TYPE (typ);
10002
10003 if (POINTER_TYPE_P (typ))
10004 {
10005 addr_space_t as;
10006 tree target = TREE_TYPE (typ);
10007
10008 /* Pointer to function: Test the function's return type. */
10009
10010 if (FUNCTION_TYPE == TREE_CODE (target))
10011 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
10012
10013 /* "Ordinary" pointers... */
10014
10015 while (TREE_CODE (target) == ARRAY_TYPE)
10016 target = TREE_TYPE (target);
10017
10018 /* Pointers to non-generic address space must be const. */
10019
10020 as = TYPE_ADDR_SPACE (target);
10021
10022 if (!ADDR_SPACE_GENERIC_P (as)
10023 && !TYPE_READONLY (target)
10024 && avr_addr_space_supported_p (as))
10025 {
10026 return as;
10027 }
10028
10029 /* Scan pointer's target type. */
10030
10031 return avr_nonconst_pointer_addrspace (target);
10032 }
10033
10034 return ADDR_SPACE_GENERIC;
10035 }
10036
10037
10038 /* Sanity check NODE so that all pointers targeting non-generic address spaces
10039 go along with CONST qualifier. Writing to these address spaces should
10040 be detected and complained about as early as possible. */
10041
10042 static bool
10043 avr_pgm_check_var_decl (tree node)
10044 {
10045 const char *reason = NULL;
10046
10047 addr_space_t as = ADDR_SPACE_GENERIC;
10048
10049 gcc_assert (as == 0);
10050
10051 if (avr_log.progmem)
10052 avr_edump ("%?: %t\n", node);
10053
10054 switch (TREE_CODE (node))
10055 {
10056 default:
10057 break;
10058
10059 case VAR_DECL:
10060 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10061 reason = _("variable");
10062 break;
10063
10064 case PARM_DECL:
10065 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10066 reason = _("function parameter");
10067 break;
10068
10069 case FIELD_DECL:
10070 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10071 reason = _("structure field");
10072 break;
10073
10074 case FUNCTION_DECL:
10075 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
10076 as)
10077 reason = _("return type of function");
10078 break;
10079
10080 case POINTER_TYPE:
10081 if (as = avr_nonconst_pointer_addrspace (node), as)
10082 reason = _("pointer");
10083 break;
10084 }
10085
10086 if (reason)
10087 {
10088 if (TYPE_P (node))
10089 error ("pointer targeting address space %qs must be const in %qT",
10090 avr_addrspace[as].name, node);
10091 else
10092 error ("pointer targeting address space %qs must be const"
10093 " in %s %q+D",
10094 avr_addrspace[as].name, reason, node);
10095 }
10096
10097 return reason == NULL;
10098 }
10099
10100
10101 /* Add the section attribute if the variable is in progmem. */
10102
10103 static void
10104 avr_insert_attributes (tree node, tree *attributes)
10105 {
10106 avr_pgm_check_var_decl (node);
10107
10108 if (TREE_CODE (node) == VAR_DECL
10109 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
10110 && avr_progmem_p (node, *attributes))
10111 {
10112 addr_space_t as;
10113 tree node0 = node;
10114
10115 /* For C++, we have to peel arrays in order to get correct
10116 determination of readonlyness. */
10117
10118 do
10119 node0 = TREE_TYPE (node0);
10120 while (TREE_CODE (node0) == ARRAY_TYPE);
10121
10122 if (error_mark_node == node0)
10123 return;
10124
10125 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
10126
10127 if (!TYPE_READONLY (node0)
10128 && !TREE_READONLY (node))
10129 {
10130 const char *reason = "__attribute__((progmem))";
10131
10132 if (!ADDR_SPACE_GENERIC_P (as))
10133 reason = avr_addrspace[as].name;
10134
10135 if (avr_log.progmem)
10136 avr_edump ("\n%?: %t\n%t\n", node, node0);
10137
10138 error ("variable %q+D must be const in order to be put into"
10139 " read-only section by means of %qs", node, reason);
10140 }
10141 }
10142 }
10143
10144
10145 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
10146 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
10147 /* Track need of __do_clear_bss. */
10148
10149 void
10150 avr_asm_output_aligned_decl_common (FILE * stream,
10151 tree decl,
10152 const char *name,
10153 unsigned HOST_WIDE_INT size,
10154 unsigned int align, bool local_p)
10155 {
10156 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10157 rtx symbol;
10158
10159 if (mem != NULL_RTX && MEM_P (mem)
10160 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10161 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10162 {
10163 if (!local_p)
10164 {
10165 fprintf (stream, "\t.globl\t");
10166 assemble_name (stream, name);
10167 fprintf (stream, "\n");
10168 }
10169 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
10170 {
10171 assemble_name (stream, name);
10172 fprintf (stream, " = %ld\n",
10173 (long) INTVAL (avr_eval_addr_attrib (symbol)));
10174 }
10175 else if (local_p)
10176 error_at (DECL_SOURCE_LOCATION (decl),
10177 "static IO declaration for %q+D needs an address", decl);
10178 return;
10179 }
10180
10181 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
10182 There is no need to trigger __do_clear_bss code for them. */
10183
10184 if (!STR_PREFIX_P (name, "__gnu_lto"))
10185 avr_need_clear_bss_p = true;
10186
10187 if (local_p)
10188 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
10189 else
10190 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
10191 }
10192
10193 void
10194 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
10195 unsigned HOST_WIDE_INT size, int align,
10196 void (*default_func)
10197 (FILE *, tree, const char *,
10198 unsigned HOST_WIDE_INT, int))
10199 {
10200 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10201 rtx symbol;
10202
10203 if (mem != NULL_RTX && MEM_P (mem)
10204 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10205 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10206 {
10207 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
10208 error_at (DECL_SOURCE_LOCATION (decl),
10209 "IO definition for %q+D needs an address", decl);
10210 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
10211 }
10212 else
10213 default_func (file, decl, name, size, align);
10214 }
10215
10216
10217 /* Unnamed section callback for data_section
10218 to track need of __do_copy_data. */
10219
10220 static void
10221 avr_output_data_section_asm_op (const void *data)
10222 {
10223 avr_need_copy_data_p = true;
10224
10225 /* Dispatch to default. */
10226 output_section_asm_op (data);
10227 }
10228
10229
10230 /* Unnamed section callback for bss_section
10231 to track need of __do_clear_bss. */
10232
10233 static void
10234 avr_output_bss_section_asm_op (const void *data)
10235 {
10236 avr_need_clear_bss_p = true;
10237
10238 /* Dispatch to default. */
10239 output_section_asm_op (data);
10240 }
10241
10242
10243 /* Unnamed section callback for progmem*.data sections. */
10244
10245 static void
10246 avr_output_progmem_section_asm_op (const void *data)
10247 {
10248 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
10249 (const char*) data);
10250 }
10251
10252
10253 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
10254
10255 static void
10256 avr_asm_init_sections (void)
10257 {
10258 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10259 resp. `avr_need_copy_data_p'. If flash is not mapped to RAM then
10260 we have also to track .rodata because it is located in RAM then. */
10261
10262 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10263 if (0 == avr_arch->flash_pm_offset)
10264 #endif
10265 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
10266 data_section->unnamed.callback = avr_output_data_section_asm_op;
10267 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
10268 }
10269
10270
10271 /* Implement `TARGET_ASM_NAMED_SECTION'. */
10272 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
10273
10274 static void
10275 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
10276 {
10277 if (flags & AVR_SECTION_PROGMEM)
10278 {
10279 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
10280 const char *old_prefix = ".rodata";
10281 const char *new_prefix = avr_addrspace[as].section_name;
10282
10283 if (STR_PREFIX_P (name, old_prefix))
10284 {
10285 const char *sname = ACONCAT ((new_prefix,
10286 name + strlen (old_prefix), NULL));
10287 default_elf_asm_named_section (sname, flags, decl);
10288 return;
10289 }
10290
10291 default_elf_asm_named_section (new_prefix, flags, decl);
10292 return;
10293 }
10294
10295 if (!avr_need_copy_data_p)
10296 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
10297 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
10298
10299 if (!avr_need_copy_data_p
10300 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10301 && 0 == avr_arch->flash_pm_offset
10302 #endif
10303 )
10304 avr_need_copy_data_p = (STR_PREFIX_P (name, ".rodata")
10305 || STR_PREFIX_P (name, ".gnu.linkonce.r"));
10306
10307 if (!avr_need_clear_bss_p)
10308 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
10309
10310 default_elf_asm_named_section (name, flags, decl);
10311 }
10312
10313
10314 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
10315
10316 static unsigned int
10317 avr_section_type_flags (tree decl, const char *name, int reloc)
10318 {
10319 unsigned int flags = default_section_type_flags (decl, name, reloc);
10320
10321 if (STR_PREFIX_P (name, ".noinit"))
10322 {
10323 if (decl && TREE_CODE (decl) == VAR_DECL
10324 && DECL_INITIAL (decl) == NULL_TREE)
10325 flags |= SECTION_BSS; /* @nobits */
10326 else
10327 warning (0, "only uninitialized variables can be placed in the "
10328 ".noinit section");
10329 }
10330
10331 if (decl && DECL_P (decl)
10332 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10333 {
10334 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10335
10336 /* Attribute progmem puts data in generic address space.
10337 Set section flags as if it was in __flash to get the right
10338 section prefix in the remainder. */
10339
10340 if (ADDR_SPACE_GENERIC_P (as))
10341 as = ADDR_SPACE_FLASH;
10342
10343 flags |= as * SECTION_MACH_DEP;
10344 flags &= ~SECTION_WRITE;
10345 flags &= ~SECTION_BSS;
10346 }
10347
10348 return flags;
10349 }
10350
10351
10352 /* A helper for the next function. NODE is a decl that is associated with
10353 a symbol. Return TRUE if the respective object may be accessed by LDS.
10354 There might still be other reasons for why LDS is not appropriate.
10355 This function is only appropriate for AVR_TINY. */
10356
10357 static bool
10358 avr_decl_maybe_lds_p (tree node)
10359 {
10360 if (!node
10361 || TREE_CODE (node) != VAR_DECL
10362 || DECL_SECTION_NAME (node) != NULL)
10363 return false;
10364
10365 /* Don't use LDS for objects that go to .rodata. The current default
10366 linker description file still locates .rodata in RAM, but this is not
10367 a must. A better linker script would just keep .rodata in flash and
10368 add an offset of 0x4000 to the VMA. Hence avoid LDS for such data. */
10369
10370 if (TREE_READONLY (node))
10371 return false;
10372
10373 // C++ requires peeling arrays.
10374
10375 do
10376 node = TREE_TYPE (node);
10377 while (ARRAY_TYPE == TREE_CODE (node));
10378
10379 return (node != error_mark_node
10380 && !TYPE_READONLY (node));
10381 }
10382
10383
10384 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
10385
10386 static void
10387 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
10388 {
10389 tree addr_attr = NULL_TREE;
10390
10391 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10392 readily available, see PR34734. So we postpone the warning
10393 about uninitialized data in program memory section until here. */
10394
10395 if (new_decl_p
10396 && decl && DECL_P (decl)
10397 && !DECL_EXTERNAL (decl)
10398 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10399 {
10400 if (!TREE_READONLY (decl))
10401 {
10402 // This might happen with C++ if stuff needs constructing.
10403 error ("variable %q+D with dynamic initialization put "
10404 "into program memory area", decl);
10405 }
10406 else if (NULL_TREE == DECL_INITIAL (decl))
10407 {
10408 // Don't warn for (implicit) aliases like in PR80462.
10409 tree asmname = DECL_ASSEMBLER_NAME (decl);
10410 varpool_node *node = varpool_node::get_for_asmname (asmname);
10411 bool alias_p = node && node->alias;
10412
10413 if (!alias_p)
10414 warning (OPT_Wuninitialized, "uninitialized variable %q+D put "
10415 "into program memory area", decl);
10416 }
10417 }
10418
10419 default_encode_section_info (decl, rtl, new_decl_p);
10420
10421 if (decl && DECL_P (decl)
10422 && TREE_CODE (decl) != FUNCTION_DECL
10423 && MEM_P (rtl)
10424 && SYMBOL_REF_P (XEXP (rtl, 0)))
10425 {
10426 rtx sym = XEXP (rtl, 0);
10427 tree type = TREE_TYPE (decl);
10428 tree attr = DECL_ATTRIBUTES (decl);
10429 if (type == error_mark_node)
10430 return;
10431
10432 addr_space_t as = TYPE_ADDR_SPACE (type);
10433
10434 /* PSTR strings are in generic space but located in flash:
10435 patch address space. */
10436
10437 if (!AVR_TINY
10438 && -1 == avr_progmem_p (decl, attr))
10439 as = ADDR_SPACE_FLASH;
10440
10441 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
10442
10443 tree io_low_attr = lookup_attribute ("io_low", attr);
10444 tree io_attr = lookup_attribute ("io", attr);
10445
10446 if (io_low_attr
10447 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
10448 addr_attr = io_attr;
10449 else if (io_attr
10450 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
10451 addr_attr = io_attr;
10452 else
10453 addr_attr = lookup_attribute ("address", attr);
10454 if (io_low_attr
10455 || (io_attr && addr_attr
10456 && low_io_address_operand
10457 (GEN_INT (TREE_INT_CST_LOW
10458 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
10459 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
10460 if (io_attr || io_low_attr)
10461 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
10462 /* If we have an (io) address attribute specification, but the variable
10463 is external, treat the address as only a tentative definition
10464 to be used to determine if an io port is in the lower range, but
10465 don't use the exact value for constant propagation. */
10466 if (addr_attr && !DECL_EXTERNAL (decl))
10467 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
10468 }
10469
10470 if (AVR_TINY
10471 && decl
10472 && VAR_DECL == TREE_CODE (decl)
10473 && MEM_P (rtl)
10474 && SYMBOL_REF_P (XEXP (rtl, 0)))
10475 {
10476 rtx sym = XEXP (rtl, 0);
10477 bool progmem_p = -1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl));
10478
10479 if (progmem_p)
10480 {
10481 // Tag symbols for addition of 0x4000 (avr_arch->flash_pm_offset).
10482 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_PM;
10483 }
10484
10485 if (avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl))
10486 || (TARGET_ABSDATA
10487 && !progmem_p
10488 && !addr_attr
10489 && avr_decl_maybe_lds_p (decl))
10490 || (addr_attr
10491 // If addr_attr is non-null, it has an argument. Peek into it.
10492 && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr))) < 0xc0))
10493 {
10494 // May be accessed by LDS / STS.
10495 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_ABSDATA;
10496 }
10497
10498 if (progmem_p
10499 && avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl)))
10500 {
10501 error ("%q+D has incompatible attributes %qs and %qs",
10502 decl, "progmem", "absdata");
10503 }
10504 }
10505 }
10506
10507
10508 /* Implement `TARGET_ASM_SELECT_SECTION' */
10509
10510 static section *
10511 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
10512 {
10513 section * sect = default_elf_select_section (decl, reloc, align);
10514
10515 if (decl && DECL_P (decl)
10516 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10517 {
10518 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10519
10520 /* __progmem__ goes in generic space but shall be allocated to
10521 .progmem.data */
10522
10523 if (ADDR_SPACE_GENERIC_P (as))
10524 as = ADDR_SPACE_FLASH;
10525
10526 if (sect->common.flags & SECTION_NAMED)
10527 {
10528 const char * name = sect->named.name;
10529 const char * old_prefix = ".rodata";
10530 const char * new_prefix = avr_addrspace[as].section_name;
10531
10532 if (STR_PREFIX_P (name, old_prefix))
10533 {
10534 const char *sname = ACONCAT ((new_prefix,
10535 name + strlen (old_prefix), NULL));
10536 return get_section (sname,
10537 sect->common.flags & ~SECTION_DECLARED,
10538 sect->named.decl);
10539 }
10540 }
10541
10542 if (!progmem_section[as])
10543 {
10544 progmem_section[as]
10545 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
10546 avr_addrspace[as].section_name);
10547 }
10548
10549 return progmem_section[as];
10550 }
10551
10552 return sect;
10553 }
10554
10555 /* Implement `TARGET_ASM_FILE_START'. */
10556 /* Outputs some text at the start of each assembler file. */
10557
10558 static void
10559 avr_file_start (void)
10560 {
10561 int sfr_offset = avr_arch->sfr_offset;
10562
10563 if (avr_arch->asm_only)
10564 error ("architecture %qs supported for assembler only", avr_mmcu);
10565
10566 default_file_start ();
10567
10568 /* Print I/O addresses of some SFRs used with IN and OUT. */
10569
10570 if (AVR_HAVE_SPH)
10571 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
10572
10573 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
10574 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
10575 if (AVR_HAVE_RAMPZ)
10576 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
10577 if (AVR_HAVE_RAMPY)
10578 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
10579 if (AVR_HAVE_RAMPX)
10580 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
10581 if (AVR_HAVE_RAMPD)
10582 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
10583 if (AVR_XMEGA || AVR_TINY)
10584 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
10585 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
10586 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
10587 }
10588
10589
10590 /* Implement `TARGET_ASM_FILE_END'. */
10591 /* Outputs to the stdio stream FILE some
10592 appropriate text to go at the end of an assembler file. */
10593
10594 static void
10595 avr_file_end (void)
10596 {
10597 /* Output these only if there is anything in the
10598 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
10599 input section(s) - some code size can be saved by not
10600 linking in the initialization code from libgcc if resp.
10601 sections are empty, see PR18145. */
10602
10603 if (avr_need_copy_data_p)
10604 fputs (".global __do_copy_data\n", asm_out_file);
10605
10606 if (avr_need_clear_bss_p)
10607 fputs (".global __do_clear_bss\n", asm_out_file);
10608 }
10609
10610
10611 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
10612 /* Choose the order in which to allocate hard registers for
10613 pseudo-registers local to a basic block.
10614
10615 Store the desired register order in the array `reg_alloc_order'.
10616 Element 0 should be the register to allocate first; element 1, the
10617 next register; and so on. */
10618
10619 void
10620 avr_adjust_reg_alloc_order (void)
10621 {
10622 static const int order_0[] =
10623 {
10624 24, 25,
10625 18, 19, 20, 21, 22, 23,
10626 30, 31,
10627 26, 27, 28, 29,
10628 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10629 0, 1,
10630 32, 33, 34, 35
10631 };
10632 static const int tiny_order_0[] = {
10633 20, 21,
10634 22, 23,
10635 24, 25,
10636 30, 31,
10637 26, 27,
10638 28, 29,
10639 19, 18,
10640 16, 17,
10641 32, 33, 34, 35,
10642 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10643 };
10644 static const int order_1[] =
10645 {
10646 18, 19, 20, 21, 22, 23, 24, 25,
10647 30, 31,
10648 26, 27, 28, 29,
10649 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10650 0, 1,
10651 32, 33, 34, 35
10652 };
10653 static const int tiny_order_1[] = {
10654 22, 23,
10655 24, 25,
10656 30, 31,
10657 26, 27,
10658 28, 29,
10659 21, 20, 19, 18,
10660 16, 17,
10661 32, 33, 34, 35,
10662 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10663 };
10664 static const int order_2[] =
10665 {
10666 25, 24, 23, 22, 21, 20, 19, 18,
10667 30, 31,
10668 26, 27, 28, 29,
10669 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10670 1, 0,
10671 32, 33, 34, 35
10672 };
10673
10674 /* Select specific register allocation order.
10675 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
10676 so different allocation order should be used. */
10677
10678 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
10679 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
10680 : (AVR_TINY ? tiny_order_0 : order_0));
10681
10682 for (size_t i = 0; i < ARRAY_SIZE (order_0); ++i)
10683 reg_alloc_order[i] = order[i];
10684 }
10685
10686
10687 /* Implement `TARGET_REGISTER_MOVE_COST' */
10688
10689 static int
10690 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
10691 reg_class_t from, reg_class_t to)
10692 {
10693 return (from == STACK_REG ? 6
10694 : to == STACK_REG ? 12
10695 : 2);
10696 }
10697
10698
10699 /* Implement `TARGET_MEMORY_MOVE_COST' */
10700
10701 static int
10702 avr_memory_move_cost (machine_mode mode,
10703 reg_class_t rclass ATTRIBUTE_UNUSED,
10704 bool in ATTRIBUTE_UNUSED)
10705 {
10706 return (mode == QImode ? 2
10707 : mode == HImode ? 4
10708 : mode == SImode ? 8
10709 : mode == SFmode ? 8
10710 : 16);
10711 }
10712
10713
10714 /* Cost for mul highpart. X is a LSHIFTRT, i.e. the outer TRUNCATE is
10715 already stripped off. */
10716
10717 static int
10718 avr_mul_highpart_cost (rtx x, int)
10719 {
10720 if (AVR_HAVE_MUL
10721 && LSHIFTRT == GET_CODE (x)
10722 && MULT == GET_CODE (XEXP (x, 0))
10723 && CONST_INT_P (XEXP (x, 1)))
10724 {
10725 // This is the wider mode.
10726 machine_mode mode = GET_MODE (x);
10727
10728 // The middle-end might still have PR81444, i.e. it is calling the cost
10729 // functions with strange modes. Fix this now by also considering
10730 // PSImode (should actually be SImode instead).
10731 if (HImode == mode || PSImode == mode || SImode == mode)
10732 {
10733 return COSTS_N_INSNS (2);
10734 }
10735 }
10736
10737 return 10000;
10738 }
10739
10740
10741 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
10742 cost of an RTX operand given its context. X is the rtx of the
10743 operand, MODE is its mode, and OUTER is the rtx_code of this
10744 operand's parent operator. */
10745
10746 static int
10747 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
10748 int opno, bool speed)
10749 {
10750 enum rtx_code code = GET_CODE (x);
10751 int total;
10752
10753 switch (code)
10754 {
10755 case REG:
10756 case SUBREG:
10757 return 0;
10758
10759 case CONST_INT:
10760 case CONST_FIXED:
10761 case CONST_DOUBLE:
10762 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
10763
10764 default:
10765 break;
10766 }
10767
10768 total = 0;
10769 avr_rtx_costs (x, mode, outer, opno, &total, speed);
10770 return total;
10771 }
10772
10773 /* Worker function for AVR backend's rtx_cost function.
10774 X is rtx expression whose cost is to be calculated.
10775 Return true if the complete cost has been computed.
10776 Return false if subexpressions should be scanned.
10777 In either case, *TOTAL contains the cost result. */
10778
10779 static bool
10780 avr_rtx_costs_1 (rtx x, machine_mode mode, int outer_code,
10781 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
10782 {
10783 enum rtx_code code = GET_CODE (x);
10784 HOST_WIDE_INT val;
10785
10786 switch (code)
10787 {
10788 case CONST_INT:
10789 case CONST_FIXED:
10790 case CONST_DOUBLE:
10791 case SYMBOL_REF:
10792 case CONST:
10793 case LABEL_REF:
10794 /* Immediate constants are as cheap as registers. */
10795 *total = 0;
10796 return true;
10797
10798 case MEM:
10799 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10800 return true;
10801
10802 case NEG:
10803 switch (mode)
10804 {
10805 case QImode:
10806 case SFmode:
10807 *total = COSTS_N_INSNS (1);
10808 break;
10809
10810 case HImode:
10811 case PSImode:
10812 case SImode:
10813 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
10814 break;
10815
10816 default:
10817 return false;
10818 }
10819 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10820 return true;
10821
10822 case ABS:
10823 switch (mode)
10824 {
10825 case QImode:
10826 case SFmode:
10827 *total = COSTS_N_INSNS (1);
10828 break;
10829
10830 default:
10831 return false;
10832 }
10833 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10834 return true;
10835
10836 case NOT:
10837 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10838 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10839 return true;
10840
10841 case ZERO_EXTEND:
10842 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10843 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10844 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10845 code, 0, speed);
10846 return true;
10847
10848 case SIGN_EXTEND:
10849 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10850 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10851 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10852 code, 0, speed);
10853 return true;
10854
10855 case PLUS:
10856 switch (mode)
10857 {
10858 case QImode:
10859 if (AVR_HAVE_MUL
10860 && MULT == GET_CODE (XEXP (x, 0))
10861 && register_operand (XEXP (x, 1), QImode))
10862 {
10863 /* multiply-add */
10864 *total = COSTS_N_INSNS (speed ? 4 : 3);
10865 /* multiply-add with constant: will be split and load constant. */
10866 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10867 *total = COSTS_N_INSNS (1) + *total;
10868 return true;
10869 }
10870 *total = COSTS_N_INSNS (1);
10871 if (!CONST_INT_P (XEXP (x, 1)))
10872 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10873 break;
10874
10875 case HImode:
10876 if (AVR_HAVE_MUL
10877 && (MULT == GET_CODE (XEXP (x, 0))
10878 || ASHIFT == GET_CODE (XEXP (x, 0)))
10879 && register_operand (XEXP (x, 1), HImode)
10880 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10881 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10882 {
10883 /* multiply-add */
10884 *total = COSTS_N_INSNS (speed ? 5 : 4);
10885 /* multiply-add with constant: will be split and load constant. */
10886 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10887 *total = COSTS_N_INSNS (1) + *total;
10888 return true;
10889 }
10890 if (!CONST_INT_P (XEXP (x, 1)))
10891 {
10892 *total = COSTS_N_INSNS (2);
10893 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10894 speed);
10895 }
10896 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10897 *total = COSTS_N_INSNS (1);
10898 else
10899 *total = COSTS_N_INSNS (2);
10900 break;
10901
10902 case PSImode:
10903 if (!CONST_INT_P (XEXP (x, 1)))
10904 {
10905 *total = COSTS_N_INSNS (3);
10906 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10907 speed);
10908 }
10909 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10910 *total = COSTS_N_INSNS (2);
10911 else
10912 *total = COSTS_N_INSNS (3);
10913 break;
10914
10915 case SImode:
10916 if (!CONST_INT_P (XEXP (x, 1)))
10917 {
10918 *total = COSTS_N_INSNS (4);
10919 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10920 speed);
10921 }
10922 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10923 *total = COSTS_N_INSNS (1);
10924 else
10925 *total = COSTS_N_INSNS (4);
10926 break;
10927
10928 default:
10929 return false;
10930 }
10931 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10932 return true;
10933
10934 case MINUS:
10935 if (AVR_HAVE_MUL
10936 && QImode == mode
10937 && register_operand (XEXP (x, 0), QImode)
10938 && MULT == GET_CODE (XEXP (x, 1)))
10939 {
10940 /* multiply-sub */
10941 *total = COSTS_N_INSNS (speed ? 4 : 3);
10942 /* multiply-sub with constant: will be split and load constant. */
10943 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10944 *total = COSTS_N_INSNS (1) + *total;
10945 return true;
10946 }
10947 if (AVR_HAVE_MUL
10948 && HImode == mode
10949 && register_operand (XEXP (x, 0), HImode)
10950 && (MULT == GET_CODE (XEXP (x, 1))
10951 || ASHIFT == GET_CODE (XEXP (x, 1)))
10952 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10953 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10954 {
10955 /* multiply-sub */
10956 *total = COSTS_N_INSNS (speed ? 5 : 4);
10957 /* multiply-sub with constant: will be split and load constant. */
10958 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10959 *total = COSTS_N_INSNS (1) + *total;
10960 return true;
10961 }
10962 /* FALLTHRU */
10963 case AND:
10964 case IOR:
10965 if (IOR == code
10966 && HImode == mode
10967 && ASHIFT == GET_CODE (XEXP (x, 0)))
10968 {
10969 *total = COSTS_N_INSNS (2);
10970 // Just a rough estimate. If we see no sign- or zero-extend,
10971 // then increase the cost a little bit.
10972 if (REG_P (XEXP (XEXP (x, 0), 0)))
10973 *total += COSTS_N_INSNS (1);
10974 if (REG_P (XEXP (x, 1)))
10975 *total += COSTS_N_INSNS (1);
10976 return true;
10977 }
10978 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10979 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10980 if (!CONST_INT_P (XEXP (x, 1)))
10981 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10982 return true;
10983
10984 case XOR:
10985 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10986 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10987 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10988 return true;
10989
10990 case MULT:
10991 switch (mode)
10992 {
10993 case QImode:
10994 if (AVR_HAVE_MUL)
10995 *total = COSTS_N_INSNS (!speed ? 3 : 4);
10996 else if (!speed)
10997 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10998 else
10999 return false;
11000 break;
11001
11002 case HImode:
11003 if (AVR_HAVE_MUL)
11004 {
11005 rtx op0 = XEXP (x, 0);
11006 rtx op1 = XEXP (x, 1);
11007 enum rtx_code code0 = GET_CODE (op0);
11008 enum rtx_code code1 = GET_CODE (op1);
11009 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
11010 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
11011
11012 if (ex0
11013 && (u8_operand (op1, HImode)
11014 || s8_operand (op1, HImode)))
11015 {
11016 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11017 return true;
11018 }
11019 if (ex0
11020 && register_operand (op1, HImode))
11021 {
11022 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11023 return true;
11024 }
11025 else if (ex0 || ex1)
11026 {
11027 *total = COSTS_N_INSNS (!speed ? 3 : 5);
11028 return true;
11029 }
11030 else if (register_operand (op0, HImode)
11031 && (u8_operand (op1, HImode)
11032 || s8_operand (op1, HImode)))
11033 {
11034 *total = COSTS_N_INSNS (!speed ? 6 : 9);
11035 return true;
11036 }
11037 else
11038 *total = COSTS_N_INSNS (!speed ? 7 : 10);
11039 }
11040 else if (!speed)
11041 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11042 else
11043 return false;
11044 break;
11045
11046 case PSImode:
11047 if (!speed)
11048 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11049 else
11050 *total = 10;
11051 break;
11052
11053 case SImode:
11054 case DImode:
11055 if (AVR_HAVE_MUL)
11056 {
11057 if (!speed)
11058 {
11059 /* Add some additional costs besides CALL like moves etc. */
11060
11061 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11062 }
11063 else
11064 {
11065 /* Just a rough estimate. Even with -O2 we don't want bulky
11066 code expanded inline. */
11067
11068 *total = COSTS_N_INSNS (25);
11069 }
11070 }
11071 else
11072 {
11073 if (speed)
11074 *total = COSTS_N_INSNS (300);
11075 else
11076 /* Add some additional costs besides CALL like moves etc. */
11077 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11078 }
11079
11080 if (mode == DImode)
11081 *total *= 2;
11082
11083 return true;
11084
11085 default:
11086 return false;
11087 }
11088 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11089 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
11090 return true;
11091
11092 case DIV:
11093 case MOD:
11094 case UDIV:
11095 case UMOD:
11096 if (!speed)
11097 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11098 else
11099 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
11100 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11101 /* For div/mod with const-int divisor we have at least the cost of
11102 loading the divisor. */
11103 if (CONST_INT_P (XEXP (x, 1)))
11104 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
11105 /* Add some overall penaly for clobbering and moving around registers */
11106 *total += COSTS_N_INSNS (2);
11107 return true;
11108
11109 case ROTATE:
11110 switch (mode)
11111 {
11112 case QImode:
11113 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
11114 *total = COSTS_N_INSNS (1);
11115
11116 break;
11117
11118 case HImode:
11119 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
11120 *total = COSTS_N_INSNS (3);
11121
11122 break;
11123
11124 case SImode:
11125 if (CONST_INT_P (XEXP (x, 1)))
11126 switch (INTVAL (XEXP (x, 1)))
11127 {
11128 case 8:
11129 case 24:
11130 *total = COSTS_N_INSNS (5);
11131 break;
11132 case 16:
11133 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
11134 break;
11135 }
11136 break;
11137
11138 default:
11139 return false;
11140 }
11141 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11142 return true;
11143
11144 case ASHIFT:
11145 switch (mode)
11146 {
11147 case QImode:
11148 if (!CONST_INT_P (XEXP (x, 1)))
11149 {
11150 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11151 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11152 speed);
11153 }
11154 else
11155 {
11156 val = INTVAL (XEXP (x, 1));
11157 if (val == 7)
11158 *total = COSTS_N_INSNS (3);
11159 else if (val >= 0 && val <= 7)
11160 *total = COSTS_N_INSNS (val);
11161 else
11162 *total = COSTS_N_INSNS (1);
11163 }
11164 break;
11165
11166 case HImode:
11167 if (AVR_HAVE_MUL)
11168 {
11169 if (const_2_to_7_operand (XEXP (x, 1), HImode)
11170 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
11171 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
11172 {
11173 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11174 return true;
11175 }
11176 }
11177
11178 if (const1_rtx == (XEXP (x, 1))
11179 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
11180 {
11181 *total = COSTS_N_INSNS (2);
11182 return true;
11183 }
11184
11185 if (!CONST_INT_P (XEXP (x, 1)))
11186 {
11187 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11188 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11189 speed);
11190 }
11191 else
11192 switch (INTVAL (XEXP (x, 1)))
11193 {
11194 case 0:
11195 *total = 0;
11196 break;
11197 case 1:
11198 case 8:
11199 *total = COSTS_N_INSNS (2);
11200 break;
11201 case 9:
11202 *total = COSTS_N_INSNS (3);
11203 break;
11204 case 2:
11205 case 3:
11206 case 10:
11207 case 15:
11208 *total = COSTS_N_INSNS (4);
11209 break;
11210 case 7:
11211 case 11:
11212 case 12:
11213 *total = COSTS_N_INSNS (5);
11214 break;
11215 case 4:
11216 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11217 break;
11218 case 6:
11219 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11220 break;
11221 case 5:
11222 *total = COSTS_N_INSNS (!speed ? 5 : 10);
11223 break;
11224 default:
11225 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11226 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11227 speed);
11228 }
11229 break;
11230
11231 case PSImode:
11232 if (!CONST_INT_P (XEXP (x, 1)))
11233 {
11234 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11235 }
11236 else
11237 switch (INTVAL (XEXP (x, 1)))
11238 {
11239 case 0:
11240 *total = 0;
11241 break;
11242 case 1:
11243 case 8:
11244 case 16:
11245 *total = COSTS_N_INSNS (3);
11246 break;
11247 case 23:
11248 *total = COSTS_N_INSNS (5);
11249 break;
11250 default:
11251 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11252 break;
11253 }
11254 break;
11255
11256 case SImode:
11257 if (!CONST_INT_P (XEXP (x, 1)))
11258 {
11259 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11260 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11261 speed);
11262 }
11263 else
11264 switch (INTVAL (XEXP (x, 1)))
11265 {
11266 case 0:
11267 *total = 0;
11268 break;
11269 case 24:
11270 *total = COSTS_N_INSNS (3);
11271 break;
11272 case 1:
11273 case 8:
11274 case 16:
11275 *total = COSTS_N_INSNS (4);
11276 break;
11277 case 31:
11278 *total = COSTS_N_INSNS (6);
11279 break;
11280 case 2:
11281 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11282 break;
11283 default:
11284 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11285 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11286 speed);
11287 }
11288 break;
11289
11290 default:
11291 return false;
11292 }
11293 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11294 return true;
11295
11296 case ASHIFTRT:
11297 switch (mode)
11298 {
11299 case QImode:
11300 if (!CONST_INT_P (XEXP (x, 1)))
11301 {
11302 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11303 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11304 speed);
11305 }
11306 else
11307 {
11308 val = INTVAL (XEXP (x, 1));
11309 if (val == 6)
11310 *total = COSTS_N_INSNS (4);
11311 else if (val == 7)
11312 *total = COSTS_N_INSNS (2);
11313 else if (val >= 0 && val <= 7)
11314 *total = COSTS_N_INSNS (val);
11315 else
11316 *total = COSTS_N_INSNS (1);
11317 }
11318 break;
11319
11320 case HImode:
11321 if (!CONST_INT_P (XEXP (x, 1)))
11322 {
11323 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11324 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11325 speed);
11326 }
11327 else
11328 switch (INTVAL (XEXP (x, 1)))
11329 {
11330 case 0:
11331 *total = 0;
11332 break;
11333 case 1:
11334 *total = COSTS_N_INSNS (2);
11335 break;
11336 case 15:
11337 *total = COSTS_N_INSNS (3);
11338 break;
11339 case 2:
11340 case 7:
11341 case 8:
11342 case 9:
11343 *total = COSTS_N_INSNS (4);
11344 break;
11345 case 10:
11346 case 14:
11347 *total = COSTS_N_INSNS (5);
11348 break;
11349 case 11:
11350 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11351 break;
11352 case 12:
11353 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11354 break;
11355 case 6:
11356 case 13:
11357 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11358 break;
11359 default:
11360 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11361 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11362 speed);
11363 }
11364 break;
11365
11366 case PSImode:
11367 if (!CONST_INT_P (XEXP (x, 1)))
11368 {
11369 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11370 }
11371 else
11372 switch (INTVAL (XEXP (x, 1)))
11373 {
11374 case 0:
11375 *total = 0;
11376 break;
11377 case 1:
11378 *total = COSTS_N_INSNS (3);
11379 break;
11380 case 16:
11381 case 8:
11382 *total = COSTS_N_INSNS (5);
11383 break;
11384 case 23:
11385 *total = COSTS_N_INSNS (4);
11386 break;
11387 default:
11388 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11389 break;
11390 }
11391 break;
11392
11393 case SImode:
11394 if (!CONST_INT_P (XEXP (x, 1)))
11395 {
11396 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11397 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11398 speed);
11399 }
11400 else
11401 switch (INTVAL (XEXP (x, 1)))
11402 {
11403 case 0:
11404 *total = 0;
11405 break;
11406 case 1:
11407 *total = COSTS_N_INSNS (4);
11408 break;
11409 case 8:
11410 case 16:
11411 case 24:
11412 *total = COSTS_N_INSNS (6);
11413 break;
11414 case 2:
11415 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11416 break;
11417 case 31:
11418 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
11419 break;
11420 default:
11421 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11422 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11423 speed);
11424 }
11425 break;
11426
11427 default:
11428 return false;
11429 }
11430 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11431 return true;
11432
11433 case LSHIFTRT:
11434 if (outer_code == TRUNCATE)
11435 {
11436 *total = avr_mul_highpart_cost (x, speed);
11437 return true;
11438 }
11439
11440 switch (mode)
11441 {
11442 case QImode:
11443 if (!CONST_INT_P (XEXP (x, 1)))
11444 {
11445 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11446 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11447 speed);
11448 }
11449 else
11450 {
11451 val = INTVAL (XEXP (x, 1));
11452 if (val == 7)
11453 *total = COSTS_N_INSNS (3);
11454 else if (val >= 0 && val <= 7)
11455 *total = COSTS_N_INSNS (val);
11456 else
11457 *total = COSTS_N_INSNS (1);
11458 }
11459 break;
11460
11461 case HImode:
11462 if (!CONST_INT_P (XEXP (x, 1)))
11463 {
11464 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11466 speed);
11467 }
11468 else
11469 switch (INTVAL (XEXP (x, 1)))
11470 {
11471 case 0:
11472 *total = 0;
11473 break;
11474 case 1:
11475 case 8:
11476 *total = COSTS_N_INSNS (2);
11477 break;
11478 case 9:
11479 *total = COSTS_N_INSNS (3);
11480 break;
11481 case 2:
11482 case 10:
11483 case 15:
11484 *total = COSTS_N_INSNS (4);
11485 break;
11486 case 7:
11487 case 11:
11488 *total = COSTS_N_INSNS (5);
11489 break;
11490 case 3:
11491 case 12:
11492 case 13:
11493 case 14:
11494 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11495 break;
11496 case 4:
11497 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11498 break;
11499 case 5:
11500 case 6:
11501 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11502 break;
11503 default:
11504 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11505 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11506 speed);
11507 }
11508 break;
11509
11510 case PSImode:
11511 if (!CONST_INT_P (XEXP (x, 1)))
11512 {
11513 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11514 }
11515 else
11516 switch (INTVAL (XEXP (x, 1)))
11517 {
11518 case 0:
11519 *total = 0;
11520 break;
11521 case 1:
11522 case 8:
11523 case 16:
11524 *total = COSTS_N_INSNS (3);
11525 break;
11526 case 23:
11527 *total = COSTS_N_INSNS (5);
11528 break;
11529 default:
11530 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11531 break;
11532 }
11533 break;
11534
11535 case SImode:
11536 if (!CONST_INT_P (XEXP (x, 1)))
11537 {
11538 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11539 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11540 speed);
11541 }
11542 else
11543 switch (INTVAL (XEXP (x, 1)))
11544 {
11545 case 0:
11546 *total = 0;
11547 break;
11548 case 1:
11549 *total = COSTS_N_INSNS (4);
11550 break;
11551 case 2:
11552 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11553 break;
11554 case 8:
11555 case 16:
11556 case 24:
11557 *total = COSTS_N_INSNS (4);
11558 break;
11559 case 31:
11560 *total = COSTS_N_INSNS (6);
11561 break;
11562 default:
11563 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11564 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11565 speed);
11566 }
11567 break;
11568
11569 default:
11570 return false;
11571 }
11572 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11573 return true;
11574
11575 case COMPARE:
11576 switch (GET_MODE (XEXP (x, 0)))
11577 {
11578 case QImode:
11579 *total = COSTS_N_INSNS (1);
11580 if (!CONST_INT_P (XEXP (x, 1)))
11581 *total += avr_operand_rtx_cost (XEXP (x, 1), QImode, code,
11582 1, speed);
11583 break;
11584
11585 case HImode:
11586 *total = COSTS_N_INSNS (2);
11587 if (!CONST_INT_P (XEXP (x, 1)))
11588 *total += avr_operand_rtx_cost (XEXP (x, 1), HImode, code,
11589 1, speed);
11590 else if (INTVAL (XEXP (x, 1)) != 0)
11591 *total += COSTS_N_INSNS (1);
11592 break;
11593
11594 case PSImode:
11595 *total = COSTS_N_INSNS (3);
11596 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
11597 *total += COSTS_N_INSNS (2);
11598 break;
11599
11600 case SImode:
11601 *total = COSTS_N_INSNS (4);
11602 if (!CONST_INT_P (XEXP (x, 1)))
11603 *total += avr_operand_rtx_cost (XEXP (x, 1), SImode, code,
11604 1, speed);
11605 else if (INTVAL (XEXP (x, 1)) != 0)
11606 *total += COSTS_N_INSNS (3);
11607 break;
11608
11609 default:
11610 return false;
11611 }
11612 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
11613 code, 0, speed);
11614 return true;
11615
11616 case TRUNCATE:
11617 if (LSHIFTRT == GET_CODE (XEXP (x, 0)))
11618 {
11619 *total = avr_mul_highpart_cost (XEXP (x, 0), speed);
11620 return true;
11621 }
11622 break;
11623
11624 default:
11625 break;
11626 }
11627 return false;
11628 }
11629
11630
11631 /* Implement `TARGET_RTX_COSTS'. */
11632
11633 static bool
11634 avr_rtx_costs (rtx x, machine_mode mode, int outer_code,
11635 int opno, int *total, bool speed)
11636 {
11637 bool done = avr_rtx_costs_1 (x, mode, outer_code, opno, total, speed);
11638
11639 if (avr_log.rtx_costs)
11640 {
11641 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
11642 done, speed ? "speed" : "size", *total, outer_code, x);
11643 }
11644
11645 return done;
11646 }
11647
11648
11649 /* Implement `TARGET_ADDRESS_COST'. */
11650
11651 static int
11652 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
11653 addr_space_t as ATTRIBUTE_UNUSED,
11654 bool speed ATTRIBUTE_UNUSED)
11655 {
11656 int cost = 4;
11657
11658 if (GET_CODE (x) == PLUS
11659 && CONST_INT_P (XEXP (x, 1))
11660 && (REG_P (XEXP (x, 0))
11661 || SUBREG_P (XEXP (x, 0))))
11662 {
11663 if (INTVAL (XEXP (x, 1)) > MAX_LD_OFFSET(mode))
11664 cost = 18;
11665 }
11666 else if (CONSTANT_ADDRESS_P (x))
11667 {
11668 if (io_address_operand (x, QImode))
11669 cost = 2;
11670
11671 if (AVR_TINY
11672 && avr_address_tiny_absdata_p (x, QImode))
11673 cost = 2;
11674 }
11675
11676 if (avr_log.address_cost)
11677 avr_edump ("\n%?: %d = %r\n", cost, x);
11678
11679 return cost;
11680 }
11681
11682 /* Test for extra memory constraint 'Q'.
11683 It's a memory address based on Y or Z pointer with valid displacement. */
11684
11685 int
11686 extra_constraint_Q (rtx x)
11687 {
11688 int ok = 0;
11689 rtx plus = XEXP (x, 0);
11690
11691 if (GET_CODE (plus) == PLUS
11692 && REG_P (XEXP (plus, 0))
11693 && CONST_INT_P (XEXP (plus, 1))
11694 && (INTVAL (XEXP (plus, 1))
11695 <= MAX_LD_OFFSET (GET_MODE (x))))
11696 {
11697 rtx xx = XEXP (plus, 0);
11698 int regno = REGNO (xx);
11699
11700 ok = (/* allocate pseudos */
11701 regno >= FIRST_PSEUDO_REGISTER
11702 /* strictly check */
11703 || regno == REG_Z || regno == REG_Y
11704 /* XXX frame & arg pointer checks */
11705 || xx == frame_pointer_rtx
11706 || xx == arg_pointer_rtx);
11707
11708 if (avr_log.constraints)
11709 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
11710 ok, reload_completed, reload_in_progress, x);
11711 }
11712
11713 return ok;
11714 }
11715
11716 /* Convert condition code CONDITION to the valid AVR condition code. */
11717
11718 RTX_CODE
11719 avr_normalize_condition (RTX_CODE condition)
11720 {
11721 switch (condition)
11722 {
11723 case GT:
11724 return GE;
11725 case GTU:
11726 return GEU;
11727 case LE:
11728 return LT;
11729 case LEU:
11730 return LTU;
11731 default:
11732 gcc_unreachable ();
11733 }
11734 }
11735
11736 /* Helper function for `avr_reorg'. */
11737
11738 static rtx
11739 avr_compare_pattern (rtx_insn *insn)
11740 {
11741 rtx pattern = single_set (insn);
11742
11743 if (pattern
11744 && NONJUMP_INSN_P (insn)
11745 && SET_DEST (pattern) == cc0_rtx
11746 && GET_CODE (SET_SRC (pattern)) == COMPARE)
11747 {
11748 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
11749 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
11750
11751 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
11752 They must not be swapped, thus skip them. */
11753
11754 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
11755 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
11756 return pattern;
11757 }
11758
11759 return NULL_RTX;
11760 }
11761
11762 /* Helper function for `avr_reorg'. */
11763
11764 /* Expansion of switch/case decision trees leads to code like
11765
11766 cc0 = compare (Reg, Num)
11767 if (cc0 == 0)
11768 goto L1
11769
11770 cc0 = compare (Reg, Num)
11771 if (cc0 > 0)
11772 goto L2
11773
11774 The second comparison is superfluous and can be deleted.
11775 The second jump condition can be transformed from a
11776 "difficult" one to a "simple" one because "cc0 > 0" and
11777 "cc0 >= 0" will have the same effect here.
11778
11779 This function relies on the way switch/case is being expaned
11780 as binary decision tree. For example code see PR 49903.
11781
11782 Return TRUE if optimization performed.
11783 Return FALSE if nothing changed.
11784
11785 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11786
11787 We don't want to do this in text peephole because it is
11788 tedious to work out jump offsets there and the second comparison
11789 might have been transormed by `avr_reorg'.
11790
11791 RTL peephole won't do because peephole2 does not scan across
11792 basic blocks. */
11793
11794 static bool
11795 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
11796 {
11797 rtx comp1, ifelse1, xcond1;
11798 rtx_insn *branch1;
11799 rtx comp2, ifelse2, xcond2;
11800 rtx_insn *branch2, *insn2;
11801 enum rtx_code code;
11802 rtx_insn *jump;
11803 rtx target, cond;
11804
11805 /* Look out for: compare1 - branch1 - compare2 - branch2 */
11806
11807 branch1 = next_nonnote_nondebug_insn (insn1);
11808 if (!branch1 || !JUMP_P (branch1))
11809 return false;
11810
11811 insn2 = next_nonnote_nondebug_insn (branch1);
11812 if (!insn2 || !avr_compare_pattern (insn2))
11813 return false;
11814
11815 branch2 = next_nonnote_nondebug_insn (insn2);
11816 if (!branch2 || !JUMP_P (branch2))
11817 return false;
11818
11819 comp1 = avr_compare_pattern (insn1);
11820 comp2 = avr_compare_pattern (insn2);
11821 xcond1 = single_set (branch1);
11822 xcond2 = single_set (branch2);
11823
11824 if (!comp1 || !comp2
11825 || !rtx_equal_p (comp1, comp2)
11826 || !xcond1 || SET_DEST (xcond1) != pc_rtx
11827 || !xcond2 || SET_DEST (xcond2) != pc_rtx
11828 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
11829 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
11830 {
11831 return false;
11832 }
11833
11834 comp1 = SET_SRC (comp1);
11835 ifelse1 = SET_SRC (xcond1);
11836 ifelse2 = SET_SRC (xcond2);
11837
11838 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
11839
11840 if (EQ != GET_CODE (XEXP (ifelse1, 0))
11841 || !REG_P (XEXP (comp1, 0))
11842 || !CONST_INT_P (XEXP (comp1, 1))
11843 || XEXP (ifelse1, 2) != pc_rtx
11844 || XEXP (ifelse2, 2) != pc_rtx
11845 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
11846 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
11847 || !COMPARISON_P (XEXP (ifelse2, 0))
11848 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
11849 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
11850 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
11851 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
11852 {
11853 return false;
11854 }
11855
11856 /* We filtered the insn sequence to look like
11857
11858 (set (cc0)
11859 (compare (reg:M N)
11860 (const_int VAL)))
11861 (set (pc)
11862 (if_then_else (eq (cc0)
11863 (const_int 0))
11864 (label_ref L1)
11865 (pc)))
11866
11867 (set (cc0)
11868 (compare (reg:M N)
11869 (const_int VAL)))
11870 (set (pc)
11871 (if_then_else (CODE (cc0)
11872 (const_int 0))
11873 (label_ref L2)
11874 (pc)))
11875 */
11876
11877 code = GET_CODE (XEXP (ifelse2, 0));
11878
11879 /* Map GT/GTU to GE/GEU which is easier for AVR.
11880 The first two instructions compare/branch on EQ
11881 so we may replace the difficult
11882
11883 if (x == VAL) goto L1;
11884 if (x > VAL) goto L2;
11885
11886 with easy
11887
11888 if (x == VAL) goto L1;
11889 if (x >= VAL) goto L2;
11890
11891 Similarly, replace LE/LEU by LT/LTU. */
11892
11893 switch (code)
11894 {
11895 case EQ:
11896 case LT: case LTU:
11897 case GE: case GEU:
11898 break;
11899
11900 case LE: case LEU:
11901 case GT: case GTU:
11902 code = avr_normalize_condition (code);
11903 break;
11904
11905 default:
11906 return false;
11907 }
11908
11909 /* Wrap the branches into UNSPECs so they won't be changed or
11910 optimized in the remainder. */
11911
11912 target = XEXP (XEXP (ifelse1, 1), 0);
11913 cond = XEXP (ifelse1, 0);
11914 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11915
11916 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11917
11918 target = XEXP (XEXP (ifelse2, 1), 0);
11919 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11920 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11921
11922 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11923
11924 /* The comparisons in insn1 and insn2 are exactly the same;
11925 insn2 is superfluous so delete it. */
11926
11927 delete_insn (insn2);
11928 delete_insn (branch1);
11929 delete_insn (branch2);
11930
11931 return true;
11932 }
11933
11934
11935 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11936 /* Optimize conditional jumps. */
11937
11938 static void
11939 avr_reorg (void)
11940 {
11941 rtx_insn *insn = get_insns();
11942
11943 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11944 {
11945 rtx pattern = avr_compare_pattern (insn);
11946
11947 if (!pattern)
11948 continue;
11949
11950 if (optimize
11951 && avr_reorg_remove_redundant_compare (insn))
11952 {
11953 continue;
11954 }
11955
11956 if (compare_diff_p (insn))
11957 {
11958 /* Now we work under compare insn with difficult branch. */
11959
11960 rtx_insn *next = next_real_insn (insn);
11961 rtx pat = PATTERN (next);
11962
11963 pattern = SET_SRC (pattern);
11964
11965 if (true_regnum (XEXP (pattern, 0)) >= 0
11966 && true_regnum (XEXP (pattern, 1)) >= 0)
11967 {
11968 rtx x = XEXP (pattern, 0);
11969 rtx src = SET_SRC (pat);
11970 rtx t = XEXP (src, 0);
11971 PUT_CODE (t, swap_condition (GET_CODE (t)));
11972 XEXP (pattern, 0) = XEXP (pattern, 1);
11973 XEXP (pattern, 1) = x;
11974 INSN_CODE (next) = -1;
11975 }
11976 else if (true_regnum (XEXP (pattern, 0)) >= 0
11977 && XEXP (pattern, 1) == const0_rtx)
11978 {
11979 /* This is a tst insn, we can reverse it. */
11980 rtx src = SET_SRC (pat);
11981 rtx t = XEXP (src, 0);
11982
11983 PUT_CODE (t, swap_condition (GET_CODE (t)));
11984 XEXP (pattern, 1) = XEXP (pattern, 0);
11985 XEXP (pattern, 0) = const0_rtx;
11986 INSN_CODE (next) = -1;
11987 INSN_CODE (insn) = -1;
11988 }
11989 else if (true_regnum (XEXP (pattern, 0)) >= 0
11990 && CONST_INT_P (XEXP (pattern, 1)))
11991 {
11992 rtx x = XEXP (pattern, 1);
11993 rtx src = SET_SRC (pat);
11994 rtx t = XEXP (src, 0);
11995 machine_mode mode = GET_MODE (XEXP (pattern, 0));
11996
11997 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11998 {
11999 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
12000 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
12001 INSN_CODE (next) = -1;
12002 INSN_CODE (insn) = -1;
12003 }
12004 }
12005 }
12006 }
12007 }
12008
12009 /* Returns register number for function return value.*/
12010
12011 static inline unsigned int
12012 avr_ret_register (void)
12013 {
12014 return 24;
12015 }
12016
12017
12018 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
12019
12020 static bool
12021 avr_function_value_regno_p (const unsigned int regno)
12022 {
12023 return (regno == avr_ret_register ());
12024 }
12025
12026
12027 /* Implement `TARGET_LIBCALL_VALUE'. */
12028 /* Create an RTX representing the place where a
12029 library function returns a value of mode MODE. */
12030
12031 static rtx
12032 avr_libcall_value (machine_mode mode,
12033 const_rtx func ATTRIBUTE_UNUSED)
12034 {
12035 int offs = GET_MODE_SIZE (mode);
12036
12037 if (offs <= 4)
12038 offs = (offs + 1) & ~1;
12039
12040 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
12041 }
12042
12043
12044 /* Implement `TARGET_FUNCTION_VALUE'. */
12045 /* Create an RTX representing the place where a
12046 function returns a value of data type VALTYPE. */
12047
12048 static rtx
12049 avr_function_value (const_tree type,
12050 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
12051 bool outgoing ATTRIBUTE_UNUSED)
12052 {
12053 unsigned int offs;
12054
12055 if (TYPE_MODE (type) != BLKmode)
12056 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
12057
12058 offs = int_size_in_bytes (type);
12059 if (offs < 2)
12060 offs = 2;
12061 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
12062 offs = GET_MODE_SIZE (SImode);
12063 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
12064 offs = GET_MODE_SIZE (DImode);
12065
12066 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
12067 }
12068
12069 int
12070 test_hard_reg_class (enum reg_class rclass, rtx x)
12071 {
12072 int regno = true_regnum (x);
12073 if (regno < 0)
12074 return 0;
12075
12076 if (TEST_HARD_REG_CLASS (rclass, regno))
12077 return 1;
12078
12079 return 0;
12080 }
12081
12082
12083 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
12084 and thus is suitable to be skipped by CPSE, SBRC, etc. */
12085
12086 static bool
12087 avr_2word_insn_p (rtx_insn *insn)
12088 {
12089 if (TARGET_SKIP_BUG
12090 || !insn
12091 || 2 != get_attr_length (insn))
12092 {
12093 return false;
12094 }
12095
12096 switch (INSN_CODE (insn))
12097 {
12098 default:
12099 return false;
12100
12101 case CODE_FOR_movqi_insn:
12102 case CODE_FOR_movuqq_insn:
12103 case CODE_FOR_movqq_insn:
12104 {
12105 rtx set = single_set (insn);
12106 rtx src = SET_SRC (set);
12107 rtx dest = SET_DEST (set);
12108
12109 /* Factor out LDS and STS from movqi_insn. */
12110
12111 if (MEM_P (dest)
12112 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
12113 {
12114 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
12115 }
12116 else if (REG_P (dest)
12117 && MEM_P (src))
12118 {
12119 return CONSTANT_ADDRESS_P (XEXP (src, 0));
12120 }
12121
12122 return false;
12123 }
12124
12125 case CODE_FOR_call_insn:
12126 case CODE_FOR_call_value_insn:
12127 return true;
12128 }
12129 }
12130
12131
12132 int
12133 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
12134 {
12135 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
12136 ? XEXP (dest, 0)
12137 : dest);
12138 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
12139 int dest_addr = INSN_ADDRESSES (uid);
12140 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
12141
12142 return (jump_offset == 1
12143 || (jump_offset == 2
12144 && avr_2word_insn_p (next_active_insn (insn))));
12145 }
12146
12147
12148 /* Worker function for `HARD_REGNO_MODE_OK'. */
12149 /* Returns 1 if a value of mode MODE can be stored starting with hard
12150 register number REGNO. On the enhanced core, anything larger than
12151 1 byte must start in even numbered register for "movw" to work
12152 (this way we don't have to check for odd registers everywhere). */
12153
12154 int
12155 avr_hard_regno_mode_ok (int regno, machine_mode mode)
12156 {
12157 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
12158 Disallowing QI et al. in these regs might lead to code like
12159 (set (subreg:QI (reg:HI 28) n) ...)
12160 which will result in wrong code because reload does not
12161 handle SUBREGs of hard regsisters like this.
12162 This could be fixed in reload. However, it appears
12163 that fixing reload is not wanted by reload people. */
12164
12165 /* Any GENERAL_REGS register can hold 8-bit values. */
12166
12167 if (GET_MODE_SIZE (mode) == 1)
12168 return 1;
12169
12170 /* FIXME: Ideally, the following test is not needed.
12171 However, it turned out that it can reduce the number
12172 of spill fails. AVR and it's poor endowment with
12173 address registers is extreme stress test for reload. */
12174
12175 if (GET_MODE_SIZE (mode) >= 4
12176 && regno >= REG_X)
12177 return 0;
12178
12179 /* All modes larger than 8 bits should start in an even register. */
12180
12181 return !(regno & 1);
12182 }
12183
12184
12185 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
12186
12187 int
12188 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
12189 {
12190 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
12191 represent valid hard registers like, e.g. HI:29. Returning TRUE
12192 for such registers can lead to performance degradation as mentioned
12193 in PR53595. Thus, report invalid hard registers as FALSE. */
12194
12195 if (!avr_hard_regno_mode_ok (regno, mode))
12196 return 0;
12197
12198 /* Return true if any of the following boundaries is crossed:
12199 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
12200
12201 return ((regno <= LAST_CALLEE_SAVED_REG
12202 && regno + GET_MODE_SIZE (mode) > 1 + LAST_CALLEE_SAVED_REG)
12203 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
12204 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
12205 }
12206
12207
12208 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
12209
12210 enum reg_class
12211 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
12212 addr_space_t as, RTX_CODE outer_code,
12213 RTX_CODE index_code ATTRIBUTE_UNUSED)
12214 {
12215 if (!ADDR_SPACE_GENERIC_P (as))
12216 {
12217 return POINTER_Z_REGS;
12218 }
12219
12220 if (!avr_strict_X)
12221 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
12222
12223 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
12224 }
12225
12226
12227 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
12228
12229 bool
12230 avr_regno_mode_code_ok_for_base_p (int regno,
12231 machine_mode mode ATTRIBUTE_UNUSED,
12232 addr_space_t as ATTRIBUTE_UNUSED,
12233 RTX_CODE outer_code,
12234 RTX_CODE index_code ATTRIBUTE_UNUSED)
12235 {
12236 bool ok = false;
12237
12238 if (!ADDR_SPACE_GENERIC_P (as))
12239 {
12240 if (regno < FIRST_PSEUDO_REGISTER
12241 && regno == REG_Z)
12242 {
12243 return true;
12244 }
12245
12246 if (reg_renumber)
12247 {
12248 regno = reg_renumber[regno];
12249
12250 if (regno == REG_Z)
12251 {
12252 return true;
12253 }
12254 }
12255
12256 return false;
12257 }
12258
12259 if (regno < FIRST_PSEUDO_REGISTER
12260 && (regno == REG_X
12261 || regno == REG_Y
12262 || regno == REG_Z
12263 || regno == ARG_POINTER_REGNUM))
12264 {
12265 ok = true;
12266 }
12267 else if (reg_renumber)
12268 {
12269 regno = reg_renumber[regno];
12270
12271 if (regno == REG_X
12272 || regno == REG_Y
12273 || regno == REG_Z
12274 || regno == ARG_POINTER_REGNUM)
12275 {
12276 ok = true;
12277 }
12278 }
12279
12280 if (avr_strict_X
12281 && PLUS == outer_code
12282 && regno == REG_X)
12283 {
12284 ok = false;
12285 }
12286
12287 return ok;
12288 }
12289
12290
12291 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
12292 /* Set 32-bit register OP[0] to compile-time constant OP[1].
12293 CLOBBER_REG is a QI clobber register or NULL_RTX.
12294 LEN == NULL: output instructions.
12295 LEN != NULL: set *LEN to the length of the instruction sequence
12296 (in words) printed with LEN = NULL.
12297 If CLEAR_P is true, OP[0] had been cleard to Zero already.
12298 If CLEAR_P is false, nothing is known about OP[0].
12299
12300 The effect on cc0 is as follows:
12301
12302 Load 0 to any register except ZERO_REG : NONE
12303 Load ld register with any value : NONE
12304 Anything else: : CLOBBER */
12305
12306 static void
12307 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
12308 {
12309 rtx src = op[1];
12310 rtx dest = op[0];
12311 rtx xval, xdest[4];
12312 int ival[4];
12313 int clobber_val = 1234;
12314 bool cooked_clobber_p = false;
12315 bool set_p = false;
12316 machine_mode mode = GET_MODE (dest);
12317 int n_bytes = GET_MODE_SIZE (mode);
12318
12319 gcc_assert (REG_P (dest)
12320 && CONSTANT_P (src));
12321
12322 if (len)
12323 *len = 0;
12324
12325 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
12326 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
12327
12328 if (REGNO (dest) < 16
12329 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
12330 {
12331 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
12332 }
12333
12334 /* We might need a clobber reg but don't have one. Look at the value to
12335 be loaded more closely. A clobber is only needed if it is a symbol
12336 or contains a byte that is neither 0, -1 or a power of 2. */
12337
12338 if (NULL_RTX == clobber_reg
12339 && !test_hard_reg_class (LD_REGS, dest)
12340 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
12341 || !avr_popcount_each_byte (src, n_bytes,
12342 (1 << 0) | (1 << 1) | (1 << 8))))
12343 {
12344 /* We have no clobber register but need one. Cook one up.
12345 That's cheaper than loading from constant pool. */
12346
12347 cooked_clobber_p = true;
12348 clobber_reg = all_regs_rtx[REG_Z + 1];
12349 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
12350 }
12351
12352 /* Now start filling DEST from LSB to MSB. */
12353
12354 for (int n = 0; n < n_bytes; n++)
12355 {
12356 int ldreg_p;
12357 bool done_byte = false;
12358 rtx xop[3];
12359
12360 /* Crop the n-th destination byte. */
12361
12362 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
12363 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
12364
12365 if (!CONST_INT_P (src)
12366 && !CONST_FIXED_P (src)
12367 && !CONST_DOUBLE_P (src))
12368 {
12369 static const char* const asm_code[][2] =
12370 {
12371 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
12372 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
12373 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
12374 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
12375 };
12376
12377 xop[0] = xdest[n];
12378 xop[1] = src;
12379 xop[2] = clobber_reg;
12380
12381 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
12382
12383 continue;
12384 }
12385
12386 /* Crop the n-th source byte. */
12387
12388 xval = simplify_gen_subreg (QImode, src, mode, n);
12389 ival[n] = INTVAL (xval);
12390
12391 /* Look if we can reuse the low word by means of MOVW. */
12392
12393 if (n == 2
12394 && n_bytes >= 4
12395 && AVR_HAVE_MOVW)
12396 {
12397 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
12398 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
12399
12400 if (INTVAL (lo16) == INTVAL (hi16))
12401 {
12402 if (0 != INTVAL (lo16)
12403 || !clear_p)
12404 {
12405 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
12406 }
12407
12408 break;
12409 }
12410 }
12411
12412 /* Don't use CLR so that cc0 is set as expected. */
12413
12414 if (ival[n] == 0)
12415 {
12416 if (!clear_p)
12417 avr_asm_len (ldreg_p ? "ldi %0,0"
12418 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
12419 : "mov %0,__zero_reg__",
12420 &xdest[n], len, 1);
12421 continue;
12422 }
12423
12424 if (clobber_val == ival[n]
12425 && REGNO (clobber_reg) == REGNO (xdest[n]))
12426 {
12427 continue;
12428 }
12429
12430 /* LD_REGS can use LDI to move a constant value */
12431
12432 if (ldreg_p)
12433 {
12434 xop[0] = xdest[n];
12435 xop[1] = xval;
12436 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
12437 continue;
12438 }
12439
12440 /* Try to reuse value already loaded in some lower byte. */
12441
12442 for (int j = 0; j < n; j++)
12443 if (ival[j] == ival[n])
12444 {
12445 xop[0] = xdest[n];
12446 xop[1] = xdest[j];
12447
12448 avr_asm_len ("mov %0,%1", xop, len, 1);
12449 done_byte = true;
12450 break;
12451 }
12452
12453 if (done_byte)
12454 continue;
12455
12456 /* Need no clobber reg for -1: Use CLR/DEC */
12457
12458 if (-1 == ival[n])
12459 {
12460 if (!clear_p)
12461 avr_asm_len ("clr %0", &xdest[n], len, 1);
12462
12463 avr_asm_len ("dec %0", &xdest[n], len, 1);
12464 continue;
12465 }
12466 else if (1 == ival[n])
12467 {
12468 if (!clear_p)
12469 avr_asm_len ("clr %0", &xdest[n], len, 1);
12470
12471 avr_asm_len ("inc %0", &xdest[n], len, 1);
12472 continue;
12473 }
12474
12475 /* Use T flag or INC to manage powers of 2 if we have
12476 no clobber reg. */
12477
12478 if (NULL_RTX == clobber_reg
12479 && single_one_operand (xval, QImode))
12480 {
12481 xop[0] = xdest[n];
12482 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
12483
12484 gcc_assert (constm1_rtx != xop[1]);
12485
12486 if (!set_p)
12487 {
12488 set_p = true;
12489 avr_asm_len ("set", xop, len, 1);
12490 }
12491
12492 if (!clear_p)
12493 avr_asm_len ("clr %0", xop, len, 1);
12494
12495 avr_asm_len ("bld %0,%1", xop, len, 1);
12496 continue;
12497 }
12498
12499 /* We actually need the LD_REGS clobber reg. */
12500
12501 gcc_assert (NULL_RTX != clobber_reg);
12502
12503 xop[0] = xdest[n];
12504 xop[1] = xval;
12505 xop[2] = clobber_reg;
12506 clobber_val = ival[n];
12507
12508 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12509 "mov %0,%2", xop, len, 2);
12510 }
12511
12512 /* If we cooked up a clobber reg above, restore it. */
12513
12514 if (cooked_clobber_p)
12515 {
12516 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
12517 }
12518 }
12519
12520
12521 /* Reload the constant OP[1] into the HI register OP[0].
12522 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12523 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12524 need a clobber reg or have to cook one up.
12525
12526 PLEN == NULL: Output instructions.
12527 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
12528 by the insns printed.
12529
12530 Return "". */
12531
12532 const char*
12533 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
12534 {
12535 output_reload_in_const (op, clobber_reg, plen, false);
12536 return "";
12537 }
12538
12539
12540 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
12541 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12542 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12543 need a clobber reg or have to cook one up.
12544
12545 LEN == NULL: Output instructions.
12546
12547 LEN != NULL: Output nothing. Set *LEN to number of words occupied
12548 by the insns printed.
12549
12550 Return "". */
12551
12552 const char *
12553 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
12554 {
12555 if (AVR_HAVE_MOVW
12556 && !test_hard_reg_class (LD_REGS, op[0])
12557 && (CONST_INT_P (op[1])
12558 || CONST_FIXED_P (op[1])
12559 || CONST_DOUBLE_P (op[1])))
12560 {
12561 int len_clr, len_noclr;
12562
12563 /* In some cases it is better to clear the destination beforehand, e.g.
12564
12565 CLR R2 CLR R3 MOVW R4,R2 INC R2
12566
12567 is shorther than
12568
12569 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
12570
12571 We find it too tedious to work that out in the print function.
12572 Instead, we call the print function twice to get the lengths of
12573 both methods and use the shortest one. */
12574
12575 output_reload_in_const (op, clobber_reg, &len_clr, true);
12576 output_reload_in_const (op, clobber_reg, &len_noclr, false);
12577
12578 if (len_noclr - len_clr == 4)
12579 {
12580 /* Default needs 4 CLR instructions: clear register beforehand. */
12581
12582 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
12583 "mov %B0,__zero_reg__" CR_TAB
12584 "movw %C0,%A0", &op[0], len, 3);
12585
12586 output_reload_in_const (op, clobber_reg, len, true);
12587
12588 if (len)
12589 *len += 3;
12590
12591 return "";
12592 }
12593 }
12594
12595 /* Default: destination not pre-cleared. */
12596
12597 output_reload_in_const (op, clobber_reg, len, false);
12598 return "";
12599 }
12600
12601 const char*
12602 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
12603 {
12604 output_reload_in_const (op, clobber_reg, len, false);
12605 return "";
12606 }
12607
12608
12609 /* Worker function for `ASM_OUTPUT_ADDR_VEC'. */
12610 /* Emit jump tables out-of-line so that branches crossing the table
12611 get shorter offsets. If we have JUMP + CALL, then put the tables
12612 in a dedicated non-.text section so that CALLs get better chance to
12613 be relaxed to RCALLs.
12614
12615 We emit the tables by hand because `function_rodata_section' does not
12616 work as expected, cf. PR71151, and we do *NOT* want the table to be
12617 in .rodata, hence setting JUMP_TABLES_IN_TEXT_SECTION = 0 is of limited
12618 use; and setting it to 1 attributes table lengths to branch offsets...
12619 Moreover, fincal.c keeps switching section before each table entry
12620 which we find too fragile as to rely on section caching. */
12621
12622 void
12623 avr_output_addr_vec (rtx_insn *labl, rtx table)
12624 {
12625 FILE *stream = asm_out_file;
12626
12627 app_disable();
12628
12629 // Switch to appropriate (sub)section.
12630
12631 if (DECL_SECTION_NAME (current_function_decl)
12632 && symtab_node::get (current_function_decl)
12633 && ! symtab_node::get (current_function_decl)->implicit_section)
12634 {
12635 // .subsection will emit the code after the function and in the
12636 // section as chosen by the user.
12637
12638 switch_to_section (current_function_section ());
12639 fprintf (stream, "\t.subsection\t1\n");
12640 }
12641 else
12642 {
12643 // Since PR63223 there is no restriction where to put the table; it
12644 // may even reside above 128 KiB. We put it in a section as high as
12645 // possible and avoid progmem in order not to waste flash <= 64 KiB.
12646
12647 const char *sec_name = ".jumptables.gcc";
12648
12649 // The table belongs to its host function, therefore use fine
12650 // grained sections so that, if that function is removed by
12651 // --gc-sections, the child table(s) may also be removed. */
12652
12653 tree asm_name = DECL_ASSEMBLER_NAME (current_function_decl);
12654 const char *fname = IDENTIFIER_POINTER (asm_name);
12655 fname = targetm.strip_name_encoding (fname);
12656 sec_name = ACONCAT ((sec_name, ".", fname, NULL));
12657
12658 fprintf (stream, "\t.section\t%s,\"%s\",@progbits\n", sec_name,
12659 AVR_HAVE_JMP_CALL ? "a" : "ax");
12660 }
12661
12662 // Output the label that preceeds the table.
12663
12664 ASM_OUTPUT_ALIGN (stream, 1);
12665 targetm.asm_out.internal_label (stream, "L", CODE_LABEL_NUMBER (labl));
12666
12667 // Output the table's content.
12668
12669 int vlen = XVECLEN (table, 0);
12670
12671 for (int idx = 0; idx < vlen; idx++)
12672 {
12673 int value = CODE_LABEL_NUMBER (XEXP (XVECEXP (table, 0, idx), 0));
12674
12675 if (AVR_HAVE_JMP_CALL)
12676 fprintf (stream, "\t.word gs(.L%d)\n", value);
12677 else
12678 fprintf (stream, "\trjmp .L%d\n", value);
12679 }
12680
12681 // Switch back to original section. As we clobbered the section above,
12682 // forget the current section before switching back.
12683
12684 in_section = NULL;
12685 switch_to_section (current_function_section ());
12686 }
12687
12688
12689 /* Implement `TARGET_CONDITIONAL_REGISTER_USAGE'. */
12690
12691 static void
12692 avr_conditional_register_usage (void)
12693 {
12694 if (AVR_TINY)
12695 {
12696 const int tiny_reg_alloc_order[] = {
12697 24, 25,
12698 22, 23,
12699 30, 31,
12700 26, 27,
12701 28, 29,
12702 21, 20, 19, 18,
12703 16, 17,
12704 32, 33, 34, 35,
12705 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
12706 };
12707
12708 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
12709 - R0-R15 are not available in Tiny Core devices
12710 - R16 and R17 are fixed registers. */
12711
12712 for (size_t i = 0; i <= 17; i++)
12713 {
12714 fixed_regs[i] = 1;
12715 call_used_regs[i] = 1;
12716 }
12717
12718 /* Set R18 to R21 as callee saved registers
12719 - R18, R19, R20 and R21 are the callee saved registers in
12720 Tiny Core devices */
12721
12722 for (size_t i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
12723 {
12724 call_used_regs[i] = 0;
12725 }
12726
12727 /* Update register allocation order for Tiny Core devices */
12728
12729 for (size_t i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
12730 {
12731 reg_alloc_order[i] = tiny_reg_alloc_order[i];
12732 }
12733
12734 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
12735 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
12736 }
12737 }
12738
12739 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
12740 /* Returns true if SCRATCH are safe to be allocated as a scratch
12741 registers (for a define_peephole2) in the current function. */
12742
12743 static bool
12744 avr_hard_regno_scratch_ok (unsigned int regno)
12745 {
12746 /* Interrupt functions can only use registers that have already been saved
12747 by the prologue, even if they would normally be call-clobbered. */
12748
12749 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12750 && !df_regs_ever_live_p (regno))
12751 return false;
12752
12753 /* Don't allow hard registers that might be part of the frame pointer.
12754 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12755 and don't care for a frame pointer that spans more than one register. */
12756
12757 if ((!reload_completed || frame_pointer_needed)
12758 && (regno == REG_Y || regno == REG_Y + 1))
12759 {
12760 return false;
12761 }
12762
12763 return true;
12764 }
12765
12766
12767 /* Worker function for `HARD_REGNO_RENAME_OK'. */
12768 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
12769
12770 int
12771 avr_hard_regno_rename_ok (unsigned int old_reg,
12772 unsigned int new_reg)
12773 {
12774 /* Interrupt functions can only use registers that have already been
12775 saved by the prologue, even if they would normally be
12776 call-clobbered. */
12777
12778 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12779 && !df_regs_ever_live_p (new_reg))
12780 return 0;
12781
12782 /* Don't allow hard registers that might be part of the frame pointer.
12783 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12784 and don't care for a frame pointer that spans more than one register. */
12785
12786 if ((!reload_completed || frame_pointer_needed)
12787 && (old_reg == REG_Y || old_reg == REG_Y + 1
12788 || new_reg == REG_Y || new_reg == REG_Y + 1))
12789 {
12790 return 0;
12791 }
12792
12793 return 1;
12794 }
12795
12796 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
12797 or memory location in the I/O space (QImode only).
12798
12799 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
12800 Operand 1: register operand to test, or CONST_INT memory address.
12801 Operand 2: bit number.
12802 Operand 3: label to jump to if the test is true. */
12803
12804 const char*
12805 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
12806 {
12807 enum rtx_code comp = GET_CODE (operands[0]);
12808 bool long_jump = get_attr_length (insn) >= 4;
12809 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
12810
12811 if (comp == GE)
12812 comp = EQ;
12813 else if (comp == LT)
12814 comp = NE;
12815
12816 if (reverse)
12817 comp = reverse_condition (comp);
12818
12819 switch (GET_CODE (operands[1]))
12820 {
12821 default:
12822 gcc_unreachable();
12823
12824 case CONST_INT:
12825 case CONST:
12826 case SYMBOL_REF:
12827
12828 if (low_io_address_operand (operands[1], QImode))
12829 {
12830 if (comp == EQ)
12831 output_asm_insn ("sbis %i1,%2", operands);
12832 else
12833 output_asm_insn ("sbic %i1,%2", operands);
12834 }
12835 else
12836 {
12837 gcc_assert (io_address_operand (operands[1], QImode));
12838 output_asm_insn ("in __tmp_reg__,%i1", operands);
12839 if (comp == EQ)
12840 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
12841 else
12842 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
12843 }
12844
12845 break; /* CONST_INT */
12846
12847 case REG:
12848
12849 if (comp == EQ)
12850 output_asm_insn ("sbrs %T1%T2", operands);
12851 else
12852 output_asm_insn ("sbrc %T1%T2", operands);
12853
12854 break; /* REG */
12855 } /* switch */
12856
12857 if (long_jump)
12858 return ("rjmp .+4" CR_TAB
12859 "jmp %x3");
12860
12861 if (!reverse)
12862 return "rjmp %x3";
12863
12864 return "";
12865 }
12866
12867 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
12868
12869 static void
12870 avr_asm_out_ctor (rtx symbol, int priority)
12871 {
12872 fputs ("\t.global __do_global_ctors\n", asm_out_file);
12873 default_ctor_section_asm_out_constructor (symbol, priority);
12874 }
12875
12876
12877 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
12878
12879 static void
12880 avr_asm_out_dtor (rtx symbol, int priority)
12881 {
12882 fputs ("\t.global __do_global_dtors\n", asm_out_file);
12883 default_dtor_section_asm_out_destructor (symbol, priority);
12884 }
12885
12886
12887 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
12888
12889 static bool
12890 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
12891 {
12892 HOST_WIDE_INT size = int_size_in_bytes (type);
12893 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
12894
12895 /* In avr, there are 8 return registers. But, for Tiny Core
12896 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12897 Return true if size is unknown or greater than the limit. */
12898
12899 if (size == -1 || size > ret_size_limit)
12900 {
12901 return true;
12902 }
12903 else
12904 {
12905 return false;
12906 }
12907 }
12908
12909
12910 /* Implement `CASE_VALUES_THRESHOLD'. */
12911 /* Supply the default for --param case-values-threshold=0 */
12912
12913 static unsigned int
12914 avr_case_values_threshold (void)
12915 {
12916 /* The exact break-even point between a jump table and an if-else tree
12917 depends on several factors not available here like, e.g. if 8-bit
12918 comparisons can be used in the if-else tree or not, on the
12919 range of the case values, if the case value can be reused, on the
12920 register allocation, etc. '7' appears to be a good choice. */
12921
12922 return 7;
12923 }
12924
12925
12926 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
12927
12928 static machine_mode
12929 avr_addr_space_address_mode (addr_space_t as)
12930 {
12931 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12932 }
12933
12934
12935 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12936
12937 static machine_mode
12938 avr_addr_space_pointer_mode (addr_space_t as)
12939 {
12940 return avr_addr_space_address_mode (as);
12941 }
12942
12943
12944 /* Helper for following function. */
12945
12946 static bool
12947 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12948 {
12949 gcc_assert (REG_P (reg));
12950
12951 if (strict)
12952 {
12953 return REGNO (reg) == REG_Z;
12954 }
12955
12956 /* Avoid combine to propagate hard regs. */
12957
12958 if (can_create_pseudo_p()
12959 && REGNO (reg) < REG_Z)
12960 {
12961 return false;
12962 }
12963
12964 return true;
12965 }
12966
12967
12968 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12969
12970 static bool
12971 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12972 bool strict, addr_space_t as)
12973 {
12974 bool ok = false;
12975
12976 switch (as)
12977 {
12978 default:
12979 gcc_unreachable();
12980
12981 case ADDR_SPACE_GENERIC:
12982 return avr_legitimate_address_p (mode, x, strict);
12983
12984 case ADDR_SPACE_FLASH:
12985 case ADDR_SPACE_FLASH1:
12986 case ADDR_SPACE_FLASH2:
12987 case ADDR_SPACE_FLASH3:
12988 case ADDR_SPACE_FLASH4:
12989 case ADDR_SPACE_FLASH5:
12990
12991 switch (GET_CODE (x))
12992 {
12993 case REG:
12994 ok = avr_reg_ok_for_pgm_addr (x, strict);
12995 break;
12996
12997 case POST_INC:
12998 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12999 break;
13000
13001 default:
13002 break;
13003 }
13004
13005 break; /* FLASH */
13006
13007 case ADDR_SPACE_MEMX:
13008 if (REG_P (x))
13009 ok = (!strict
13010 && can_create_pseudo_p());
13011
13012 if (LO_SUM == GET_CODE (x))
13013 {
13014 rtx hi = XEXP (x, 0);
13015 rtx lo = XEXP (x, 1);
13016
13017 ok = (REG_P (hi)
13018 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
13019 && REG_P (lo)
13020 && REGNO (lo) == REG_Z);
13021 }
13022
13023 break; /* MEMX */
13024 }
13025
13026 if (avr_log.legitimate_address_p)
13027 {
13028 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
13029 "reload_completed=%d reload_in_progress=%d %s:",
13030 ok, mode, strict, reload_completed, reload_in_progress,
13031 reg_renumber ? "(reg_renumber)" : "");
13032
13033 if (GET_CODE (x) == PLUS
13034 && REG_P (XEXP (x, 0))
13035 && CONST_INT_P (XEXP (x, 1))
13036 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
13037 && reg_renumber)
13038 {
13039 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
13040 true_regnum (XEXP (x, 0)));
13041 }
13042
13043 avr_edump ("\n%r\n", x);
13044 }
13045
13046 return ok;
13047 }
13048
13049
13050 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
13051
13052 static rtx
13053 avr_addr_space_legitimize_address (rtx x, rtx old_x,
13054 machine_mode mode, addr_space_t as)
13055 {
13056 if (ADDR_SPACE_GENERIC_P (as))
13057 return avr_legitimize_address (x, old_x, mode);
13058
13059 if (avr_log.legitimize_address)
13060 {
13061 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
13062 }
13063
13064 return old_x;
13065 }
13066
13067
13068 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
13069
13070 static rtx
13071 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
13072 {
13073 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
13074 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
13075
13076 if (avr_log.progmem)
13077 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
13078 src, type_from, type_to);
13079
13080 /* Up-casting from 16-bit to 24-bit pointer. */
13081
13082 if (as_from != ADDR_SPACE_MEMX
13083 && as_to == ADDR_SPACE_MEMX)
13084 {
13085 int msb;
13086 rtx sym = src;
13087 rtx reg = gen_reg_rtx (PSImode);
13088
13089 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
13090 sym = XEXP (sym, 0);
13091
13092 /* Look at symbol flags: avr_encode_section_info set the flags
13093 also if attribute progmem was seen so that we get the right
13094 promotion for, e.g. PSTR-like strings that reside in generic space
13095 but are located in flash. In that case we patch the incoming
13096 address space. */
13097
13098 if (SYMBOL_REF_P (sym)
13099 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
13100 {
13101 as_from = ADDR_SPACE_FLASH;
13102 }
13103
13104 /* Linearize memory: RAM has bit 23 set. */
13105
13106 msb = ADDR_SPACE_GENERIC_P (as_from)
13107 ? 0x80
13108 : avr_addrspace[as_from].segment;
13109
13110 src = force_reg (Pmode, src);
13111
13112 emit_insn (msb == 0
13113 ? gen_zero_extendhipsi2 (reg, src)
13114 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
13115
13116 return reg;
13117 }
13118
13119 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
13120
13121 if (as_from == ADDR_SPACE_MEMX
13122 && as_to != ADDR_SPACE_MEMX)
13123 {
13124 rtx new_src = gen_reg_rtx (Pmode);
13125
13126 src = force_reg (PSImode, src);
13127
13128 emit_move_insn (new_src,
13129 simplify_gen_subreg (Pmode, src, PSImode, 0));
13130 return new_src;
13131 }
13132
13133 return src;
13134 }
13135
13136
13137 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
13138
13139 static bool
13140 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
13141 addr_space_t superset ATTRIBUTE_UNUSED)
13142 {
13143 /* Allow any kind of pointer mess. */
13144
13145 return true;
13146 }
13147
13148
13149 /* Implement `TARGET_CONVERT_TO_TYPE'. */
13150
13151 static tree
13152 avr_convert_to_type (tree type, tree expr)
13153 {
13154 /* Print a diagnose for pointer conversion that changes the address
13155 space of the pointer target to a non-enclosing address space,
13156 provided -Waddr-space-convert is on.
13157
13158 FIXME: Filter out cases where the target object is known to
13159 be located in the right memory, like in
13160
13161 (const __flash*) PSTR ("text")
13162
13163 Also try to distinguish between explicit casts requested by
13164 the user and implicit casts like
13165
13166 void f (const __flash char*);
13167
13168 void g (const char *p)
13169 {
13170 f ((const __flash*) p);
13171 }
13172
13173 under the assumption that an explicit casts means that the user
13174 knows what he is doing, e.g. interface with PSTR or old style
13175 code with progmem and pgm_read_xxx.
13176 */
13177
13178 if (avr_warn_addr_space_convert
13179 && expr != error_mark_node
13180 && POINTER_TYPE_P (type)
13181 && POINTER_TYPE_P (TREE_TYPE (expr)))
13182 {
13183 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
13184 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
13185
13186 if (avr_log.progmem)
13187 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
13188
13189 if (as_new != ADDR_SPACE_MEMX
13190 && as_new != as_old)
13191 {
13192 location_t loc = EXPR_LOCATION (expr);
13193 const char *name_old = avr_addrspace[as_old].name;
13194 const char *name_new = avr_addrspace[as_new].name;
13195
13196 warning (OPT_Waddr_space_convert,
13197 "conversion from address space %qs to address space %qs",
13198 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
13199 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
13200
13201 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
13202 }
13203 }
13204
13205 return NULL_TREE;
13206 }
13207
13208
13209 /* Implement `TARGET_LEGITIMATE_COMBINED_INSN'. */
13210
13211 /* PR78883: Filter out paradoxical SUBREGs of MEM which are not handled
13212 properly by following passes. As INSN_SCHEDULING is off and hence
13213 general_operand accepts such expressions, ditch them now. */
13214
13215 static bool
13216 avr_legitimate_combined_insn (rtx_insn *insn)
13217 {
13218 subrtx_iterator::array_type array;
13219
13220 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
13221 {
13222 const_rtx op = *iter;
13223
13224 if (SUBREG_P (op)
13225 && MEM_P (SUBREG_REG (op))
13226 && (GET_MODE_SIZE (GET_MODE (op))
13227 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
13228 {
13229 return false;
13230 }
13231 }
13232
13233 return true;
13234 }
13235
13236
13237 /* PR63633: The middle-end might come up with hard regs as input operands.
13238
13239 RMASK is a bit mask representing a subset of hard registers R0...R31:
13240 Rn is an element of that set iff bit n of RMASK is set.
13241 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13242 OP[n] has to be fixed; otherwise OP[n] is left alone.
13243
13244 For each element of OPMASK which is a hard register overlapping RMASK,
13245 replace OP[n] with a newly created pseudo register
13246
13247 HREG == 0: Also emit a move insn that copies the contents of that
13248 hard register into the new pseudo.
13249
13250 HREG != 0: Also set HREG[n] to the hard register. */
13251
13252 static void
13253 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
13254 {
13255 for (; opmask; opmask >>= 1, op++)
13256 {
13257 rtx reg = *op;
13258
13259 if (hreg)
13260 *hreg = NULL_RTX;
13261
13262 if ((opmask & 1)
13263 && REG_P (reg)
13264 && REGNO (reg) < FIRST_PSEUDO_REGISTER
13265 // This hard-reg overlaps other prohibited hard regs?
13266 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
13267 {
13268 *op = gen_reg_rtx (GET_MODE (reg));
13269 if (hreg == NULL)
13270 emit_move_insn (*op, reg);
13271 else
13272 *hreg = reg;
13273 }
13274
13275 if (hreg)
13276 hreg++;
13277 }
13278 }
13279
13280
13281 void
13282 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
13283 {
13284 avr_fix_operands (op, NULL, opmask, rmask);
13285 }
13286
13287
13288 /* Helper for the function below: If bit n of MASK is set and
13289 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
13290 Otherwise do nothing for that n. Return TRUE. */
13291
13292 static bool
13293 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
13294 {
13295 for (; mask; mask >>= 1, op++, hreg++)
13296 if ((mask & 1)
13297 && *hreg)
13298 emit_move_insn (*hreg, *op);
13299
13300 return true;
13301 }
13302
13303
13304 /* PR63633: The middle-end might come up with hard regs as output operands.
13305
13306 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
13307 RMASK is a bit mask representing a subset of hard registers R0...R31:
13308 Rn is an element of that set iff bit n of RMASK is set.
13309 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13310 OP[n] has to be fixed; otherwise OP[n] is left alone.
13311
13312 Emit the insn sequence as generated by GEN() with all elements of OPMASK
13313 which are hard registers overlapping RMASK replaced by newly created
13314 pseudo registers. After the sequence has been emitted, emit insns that
13315 move the contents of respective pseudos to their hard regs. */
13316
13317 bool
13318 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
13319 unsigned opmask, unsigned rmask)
13320 {
13321 const int n = 3;
13322 rtx hreg[n];
13323
13324 /* It is letigimate for GEN to call this function, and in order not to
13325 get self-recursive we use the following static kludge. This is the
13326 only way not to duplicate all expanders and to avoid ugly and
13327 hard-to-maintain C-code instead of the much more appreciated RTL
13328 representation as supplied by define_expand. */
13329 static bool lock = false;
13330
13331 gcc_assert (opmask < (1u << n));
13332
13333 if (lock)
13334 return false;
13335
13336 avr_fix_operands (op, hreg, opmask, rmask);
13337
13338 lock = true;
13339 emit_insn (gen (op[0], op[1], op[2]));
13340 lock = false;
13341
13342 return avr_move_fixed_operands (op, hreg, opmask);
13343 }
13344
13345
13346 /* Worker function for movmemhi expander.
13347 XOP[0] Destination as MEM:BLK
13348 XOP[1] Source " "
13349 XOP[2] # Bytes to copy
13350
13351 Return TRUE if the expansion is accomplished.
13352 Return FALSE if the operand compination is not supported. */
13353
13354 bool
13355 avr_emit_movmemhi (rtx *xop)
13356 {
13357 HOST_WIDE_INT count;
13358 machine_mode loop_mode;
13359 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
13360 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
13361 rtx a_hi8 = NULL_RTX;
13362
13363 if (avr_mem_flash_p (xop[0]))
13364 return false;
13365
13366 if (!CONST_INT_P (xop[2]))
13367 return false;
13368
13369 count = INTVAL (xop[2]);
13370 if (count <= 0)
13371 return false;
13372
13373 a_src = XEXP (xop[1], 0);
13374 a_dest = XEXP (xop[0], 0);
13375
13376 if (PSImode == GET_MODE (a_src))
13377 {
13378 gcc_assert (as == ADDR_SPACE_MEMX);
13379
13380 loop_mode = (count < 0x100) ? QImode : HImode;
13381 loop_reg = gen_rtx_REG (loop_mode, 24);
13382 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
13383
13384 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
13385 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
13386 }
13387 else
13388 {
13389 int segment = avr_addrspace[as].segment;
13390
13391 if (segment
13392 && avr_n_flash > 1)
13393 {
13394 a_hi8 = GEN_INT (segment);
13395 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
13396 }
13397 else if (!ADDR_SPACE_GENERIC_P (as))
13398 {
13399 as = ADDR_SPACE_FLASH;
13400 }
13401
13402 addr1 = a_src;
13403
13404 loop_mode = (count <= 0x100) ? QImode : HImode;
13405 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
13406 }
13407
13408 xas = GEN_INT (as);
13409
13410 /* FIXME: Register allocator might come up with spill fails if it is left
13411 on its own. Thus, we allocate the pointer registers by hand:
13412 Z = source address
13413 X = destination address */
13414
13415 emit_move_insn (lpm_addr_reg_rtx, addr1);
13416 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
13417
13418 /* FIXME: Register allocator does a bad job and might spill address
13419 register(s) inside the loop leading to additional move instruction
13420 to/from stack which could clobber tmp_reg. Thus, do *not* emit
13421 load and store as separate insns. Instead, we perform the copy
13422 by means of one monolithic insn. */
13423
13424 gcc_assert (TMP_REGNO == LPM_REGNO);
13425
13426 if (as != ADDR_SPACE_MEMX)
13427 {
13428 /* Load instruction ([E]LPM or LD) is known at compile time:
13429 Do the copy-loop inline. */
13430
13431 rtx (*fun) (rtx, rtx, rtx)
13432 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
13433
13434 insn = fun (xas, loop_reg, loop_reg);
13435 }
13436 else
13437 {
13438 rtx (*fun) (rtx, rtx)
13439 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
13440
13441 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
13442
13443 insn = fun (xas, GEN_INT (avr_addr.rampz));
13444 }
13445
13446 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
13447 emit_insn (insn);
13448
13449 return true;
13450 }
13451
13452
13453 /* Print assembler for movmem_qi, movmem_hi insns...
13454 $0 : Address Space
13455 $1, $2 : Loop register
13456 Z : Source address
13457 X : Destination address
13458 */
13459
13460 const char*
13461 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
13462 {
13463 addr_space_t as = (addr_space_t) INTVAL (op[0]);
13464 machine_mode loop_mode = GET_MODE (op[1]);
13465 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
13466 rtx xop[3];
13467
13468 if (plen)
13469 *plen = 0;
13470
13471 xop[0] = op[0];
13472 xop[1] = op[1];
13473 xop[2] = tmp_reg_rtx;
13474
13475 /* Loop label */
13476
13477 avr_asm_len ("0:", xop, plen, 0);
13478
13479 /* Load with post-increment */
13480
13481 switch (as)
13482 {
13483 default:
13484 gcc_unreachable();
13485
13486 case ADDR_SPACE_GENERIC:
13487
13488 avr_asm_len ("ld %2,Z+", xop, plen, 1);
13489 break;
13490
13491 case ADDR_SPACE_FLASH:
13492
13493 if (AVR_HAVE_LPMX)
13494 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
13495 else
13496 avr_asm_len ("lpm" CR_TAB
13497 "adiw r30,1", xop, plen, 2);
13498 break;
13499
13500 case ADDR_SPACE_FLASH1:
13501 case ADDR_SPACE_FLASH2:
13502 case ADDR_SPACE_FLASH3:
13503 case ADDR_SPACE_FLASH4:
13504 case ADDR_SPACE_FLASH5:
13505
13506 if (AVR_HAVE_ELPMX)
13507 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
13508 else
13509 avr_asm_len ("elpm" CR_TAB
13510 "adiw r30,1", xop, plen, 2);
13511 break;
13512 }
13513
13514 /* Store with post-increment */
13515
13516 avr_asm_len ("st X+,%2", xop, plen, 1);
13517
13518 /* Decrement loop-counter and set Z-flag */
13519
13520 if (QImode == loop_mode)
13521 {
13522 avr_asm_len ("dec %1", xop, plen, 1);
13523 }
13524 else if (sbiw_p)
13525 {
13526 avr_asm_len ("sbiw %1,1", xop, plen, 1);
13527 }
13528 else
13529 {
13530 avr_asm_len ("subi %A1,1" CR_TAB
13531 "sbci %B1,0", xop, plen, 2);
13532 }
13533
13534 /* Loop until zero */
13535
13536 return avr_asm_len ("brne 0b", xop, plen, 1);
13537 }
13538
13539
13540 \f
13541 /* Helper for __builtin_avr_delay_cycles */
13542
13543 static rtx
13544 avr_mem_clobber (void)
13545 {
13546 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
13547 MEM_VOLATILE_P (mem) = 1;
13548 return mem;
13549 }
13550
13551 static void
13552 avr_expand_delay_cycles (rtx operands0)
13553 {
13554 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
13555 unsigned HOST_WIDE_INT cycles_used;
13556 unsigned HOST_WIDE_INT loop_count;
13557
13558 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
13559 {
13560 loop_count = ((cycles - 9) / 6) + 1;
13561 cycles_used = ((loop_count - 1) * 6) + 9;
13562 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
13563 avr_mem_clobber()));
13564 cycles -= cycles_used;
13565 }
13566
13567 if (IN_RANGE (cycles, 262145, 83886081))
13568 {
13569 loop_count = ((cycles - 7) / 5) + 1;
13570 if (loop_count > 0xFFFFFF)
13571 loop_count = 0xFFFFFF;
13572 cycles_used = ((loop_count - 1) * 5) + 7;
13573 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
13574 avr_mem_clobber()));
13575 cycles -= cycles_used;
13576 }
13577
13578 if (IN_RANGE (cycles, 768, 262144))
13579 {
13580 loop_count = ((cycles - 5) / 4) + 1;
13581 if (loop_count > 0xFFFF)
13582 loop_count = 0xFFFF;
13583 cycles_used = ((loop_count - 1) * 4) + 5;
13584 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
13585 avr_mem_clobber()));
13586 cycles -= cycles_used;
13587 }
13588
13589 if (IN_RANGE (cycles, 6, 767))
13590 {
13591 loop_count = cycles / 3;
13592 if (loop_count > 255)
13593 loop_count = 255;
13594 cycles_used = loop_count * 3;
13595 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
13596 avr_mem_clobber()));
13597 cycles -= cycles_used;
13598 }
13599
13600 while (cycles >= 2)
13601 {
13602 emit_insn (gen_nopv (GEN_INT (2)));
13603 cycles -= 2;
13604 }
13605
13606 if (cycles == 1)
13607 {
13608 emit_insn (gen_nopv (GEN_INT (1)));
13609 cycles--;
13610 }
13611 }
13612
13613
13614 static void
13615 avr_expand_nops (rtx operands0)
13616 {
13617 unsigned HOST_WIDE_INT n_nops = UINTVAL (operands0) & GET_MODE_MASK (HImode);
13618
13619 while (n_nops--)
13620 {
13621 emit_insn (gen_nopv (const1_rtx));
13622 }
13623 }
13624
13625
13626 /* Compute the image of x under f, i.e. perform x --> f(x) */
13627
13628 static int
13629 avr_map (unsigned int f, int x)
13630 {
13631 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
13632 }
13633
13634
13635 /* Return some metrics of map A. */
13636
13637 enum
13638 {
13639 /* Number of fixed points in { 0 ... 7 } */
13640 MAP_FIXED_0_7,
13641
13642 /* Size of preimage of non-fixed points in { 0 ... 7 } */
13643 MAP_NONFIXED_0_7,
13644
13645 /* Mask representing the fixed points in { 0 ... 7 } */
13646 MAP_MASK_FIXED_0_7,
13647
13648 /* Size of the preimage of { 0 ... 7 } */
13649 MAP_PREIMAGE_0_7,
13650
13651 /* Mask that represents the preimage of { f } */
13652 MAP_MASK_PREIMAGE_F
13653 };
13654
13655 static unsigned
13656 avr_map_metric (unsigned int a, int mode)
13657 {
13658 unsigned metric = 0;
13659
13660 for (unsigned i = 0; i < 8; i++)
13661 {
13662 unsigned ai = avr_map (a, i);
13663
13664 if (mode == MAP_FIXED_0_7)
13665 metric += ai == i;
13666 else if (mode == MAP_NONFIXED_0_7)
13667 metric += ai < 8 && ai != i;
13668 else if (mode == MAP_MASK_FIXED_0_7)
13669 metric |= ((unsigned) (ai == i)) << i;
13670 else if (mode == MAP_PREIMAGE_0_7)
13671 metric += ai < 8;
13672 else if (mode == MAP_MASK_PREIMAGE_F)
13673 metric |= ((unsigned) (ai == 0xf)) << i;
13674 else
13675 gcc_unreachable();
13676 }
13677
13678 return metric;
13679 }
13680
13681
13682 /* Return true if IVAL has a 0xf in its hexadecimal representation
13683 and false, otherwise. Only nibbles 0..7 are taken into account.
13684 Used as constraint helper for C0f and Cxf. */
13685
13686 bool
13687 avr_has_nibble_0xf (rtx ival)
13688 {
13689 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
13690 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13691 }
13692
13693
13694 /* We have a set of bits that are mapped by a function F.
13695 Try to decompose F by means of a second function G so that
13696
13697 F = F o G^-1 o G
13698
13699 and
13700
13701 cost (F o G^-1) + cost (G) < cost (F)
13702
13703 Example: Suppose builtin insert_bits supplies us with the map
13704 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
13705 nibble of the result, we can just as well rotate the bits before inserting
13706 them and use the map 0x7654ffff which is cheaper than the original map.
13707 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
13708
13709 typedef struct
13710 {
13711 /* tree code of binary function G */
13712 enum tree_code code;
13713
13714 /* The constant second argument of G */
13715 int arg;
13716
13717 /* G^-1, the inverse of G (*, arg) */
13718 unsigned ginv;
13719
13720 /* The cost of applying G (*, arg) */
13721 int cost;
13722
13723 /* The composition F o G^-1 (*, arg) for some function F */
13724 unsigned int map;
13725
13726 /* For debug purpose only */
13727 const char *str;
13728 } avr_map_op_t;
13729
13730 static const avr_map_op_t avr_map_op[] =
13731 {
13732 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
13733 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
13734 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
13735 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
13736 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
13737 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
13738 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
13739 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
13740 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
13741 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
13742 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
13743 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
13744 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
13745 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
13746 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
13747 };
13748
13749
13750 /* Try to decompose F as F = (F o G^-1) o G as described above.
13751 The result is a struct representing F o G^-1 and G.
13752 If result.cost < 0 then such a decomposition does not exist. */
13753
13754 static avr_map_op_t
13755 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
13756 {
13757 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
13758 avr_map_op_t f_ginv = *g;
13759 unsigned int ginv = g->ginv;
13760
13761 f_ginv.cost = -1;
13762
13763 /* Step 1: Computing F o G^-1 */
13764
13765 for (int i = 7; i >= 0; i--)
13766 {
13767 int x = avr_map (f, i);
13768
13769 if (x <= 7)
13770 {
13771 x = avr_map (ginv, x);
13772
13773 /* The bit is no element of the image of G: no avail (cost = -1) */
13774
13775 if (x > 7)
13776 return f_ginv;
13777 }
13778
13779 f_ginv.map = (f_ginv.map << 4) + x;
13780 }
13781
13782 /* Step 2: Compute the cost of the operations.
13783 The overall cost of doing an operation prior to the insertion is
13784 the cost of the insertion plus the cost of the operation. */
13785
13786 /* Step 2a: Compute cost of F o G^-1 */
13787
13788 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
13789 {
13790 /* The mapping consists only of fixed points and can be folded
13791 to AND/OR logic in the remainder. Reasonable cost is 3. */
13792
13793 f_ginv.cost = 2 + (val_used_p && !val_const_p);
13794 }
13795 else
13796 {
13797 rtx xop[4];
13798
13799 /* Get the cost of the insn by calling the output worker with some
13800 fake values. Mimic effect of reloading xop[3]: Unused operands
13801 are mapped to 0 and used operands are reloaded to xop[0]. */
13802
13803 xop[0] = all_regs_rtx[24];
13804 xop[1] = gen_int_mode (f_ginv.map, SImode);
13805 xop[2] = all_regs_rtx[25];
13806 xop[3] = val_used_p ? xop[0] : const0_rtx;
13807
13808 avr_out_insert_bits (xop, &f_ginv.cost);
13809
13810 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
13811 }
13812
13813 /* Step 2b: Add cost of G */
13814
13815 f_ginv.cost += g->cost;
13816
13817 if (avr_log.builtin)
13818 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
13819
13820 return f_ginv;
13821 }
13822
13823
13824 /* Insert bits from XOP[1] into XOP[0] according to MAP.
13825 XOP[0] and XOP[1] don't overlap.
13826 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
13827 If FIXP_P = false: Just move the bit if its position in the destination
13828 is different to its source position. */
13829
13830 static void
13831 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
13832 {
13833 /* T-flag contains this bit of the source, i.e. of XOP[1] */
13834 int t_bit_src = -1;
13835
13836 /* We order the operations according to the requested source bit b. */
13837
13838 for (int b = 0; b < 8; b++)
13839 for (int bit_dest = 0; bit_dest < 8; bit_dest++)
13840 {
13841 int bit_src = avr_map (map, bit_dest);
13842
13843 if (b != bit_src
13844 || bit_src >= 8
13845 /* Same position: No need to copy as requested by FIXP_P. */
13846 || (bit_dest == bit_src && !fixp_p))
13847 continue;
13848
13849 if (t_bit_src != bit_src)
13850 {
13851 /* Source bit is not yet in T: Store it to T. */
13852
13853 t_bit_src = bit_src;
13854
13855 xop[3] = GEN_INT (bit_src);
13856 avr_asm_len ("bst %T1%T3", xop, plen, 1);
13857 }
13858
13859 /* Load destination bit with T. */
13860
13861 xop[3] = GEN_INT (bit_dest);
13862 avr_asm_len ("bld %T0%T3", xop, plen, 1);
13863 }
13864 }
13865
13866
13867 /* PLEN == 0: Print assembler code for `insert_bits'.
13868 PLEN != 0: Compute code length in bytes.
13869
13870 OP[0]: Result
13871 OP[1]: The mapping composed of nibbles. If nibble no. N is
13872 0: Bit N of result is copied from bit OP[2].0
13873 ... ...
13874 7: Bit N of result is copied from bit OP[2].7
13875 0xf: Bit N of result is copied from bit OP[3].N
13876 OP[2]: Bits to be inserted
13877 OP[3]: Target value */
13878
13879 const char*
13880 avr_out_insert_bits (rtx *op, int *plen)
13881 {
13882 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
13883 unsigned mask_fixed;
13884 bool fixp_p = true;
13885 rtx xop[4];
13886
13887 xop[0] = op[0];
13888 xop[1] = op[2];
13889 xop[2] = op[3];
13890
13891 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
13892
13893 if (plen)
13894 *plen = 0;
13895 else if (flag_print_asm_name)
13896 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
13897
13898 /* If MAP has fixed points it might be better to initialize the result
13899 with the bits to be inserted instead of moving all bits by hand. */
13900
13901 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
13902
13903 if (REGNO (xop[0]) == REGNO (xop[1]))
13904 {
13905 /* Avoid early-clobber conflicts */
13906
13907 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
13908 xop[1] = tmp_reg_rtx;
13909 fixp_p = false;
13910 }
13911
13912 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13913 {
13914 /* XOP[2] is used and reloaded to XOP[0] already */
13915
13916 int n_fix = 0, n_nofix = 0;
13917
13918 gcc_assert (REG_P (xop[2]));
13919
13920 /* Get the code size of the bit insertions; once with all bits
13921 moved and once with fixed points omitted. */
13922
13923 avr_move_bits (xop, map, true, &n_fix);
13924 avr_move_bits (xop, map, false, &n_nofix);
13925
13926 if (fixp_p && n_fix - n_nofix > 3)
13927 {
13928 xop[3] = gen_int_mode (~mask_fixed, QImode);
13929
13930 avr_asm_len ("eor %0,%1" CR_TAB
13931 "andi %0,%3" CR_TAB
13932 "eor %0,%1", xop, plen, 3);
13933 fixp_p = false;
13934 }
13935 }
13936 else
13937 {
13938 /* XOP[2] is unused */
13939
13940 if (fixp_p && mask_fixed)
13941 {
13942 avr_asm_len ("mov %0,%1", xop, plen, 1);
13943 fixp_p = false;
13944 }
13945 }
13946
13947 /* Move/insert remaining bits. */
13948
13949 avr_move_bits (xop, map, fixp_p, plen);
13950
13951 return "";
13952 }
13953
13954
13955 /* IDs for all the AVR builtins. */
13956
13957 enum avr_builtin_id
13958 {
13959 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13960 AVR_BUILTIN_ ## NAME,
13961 #include "builtins.def"
13962 #undef DEF_BUILTIN
13963
13964 AVR_BUILTIN_COUNT
13965 };
13966
13967 struct GTY(()) avr_builtin_description
13968 {
13969 enum insn_code icode;
13970 int n_args;
13971 tree fndecl;
13972 };
13973
13974
13975 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13976 that a built-in's ID can be used to access the built-in by means of
13977 avr_bdesc[ID] */
13978
13979 static GTY(()) struct avr_builtin_description
13980 avr_bdesc[AVR_BUILTIN_COUNT] =
13981 {
13982 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13983 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13984 #include "builtins.def"
13985 #undef DEF_BUILTIN
13986 };
13987
13988
13989 /* Implement `TARGET_BUILTIN_DECL'. */
13990
13991 static tree
13992 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13993 {
13994 if (id < AVR_BUILTIN_COUNT)
13995 return avr_bdesc[id].fndecl;
13996
13997 return error_mark_node;
13998 }
13999
14000
14001 static void
14002 avr_init_builtin_int24 (void)
14003 {
14004 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
14005 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
14006
14007 lang_hooks.types.register_builtin_type (int24_type, "__int24");
14008 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
14009 }
14010
14011
14012 /* Implement `TARGET_INIT_BUILTINS' */
14013 /* Set up all builtin functions for this target. */
14014
14015 static void
14016 avr_init_builtins (void)
14017 {
14018 tree void_ftype_void
14019 = build_function_type_list (void_type_node, NULL_TREE);
14020 tree uchar_ftype_uchar
14021 = build_function_type_list (unsigned_char_type_node,
14022 unsigned_char_type_node,
14023 NULL_TREE);
14024 tree uint_ftype_uchar_uchar
14025 = build_function_type_list (unsigned_type_node,
14026 unsigned_char_type_node,
14027 unsigned_char_type_node,
14028 NULL_TREE);
14029 tree int_ftype_char_char
14030 = build_function_type_list (integer_type_node,
14031 char_type_node,
14032 char_type_node,
14033 NULL_TREE);
14034 tree int_ftype_char_uchar
14035 = build_function_type_list (integer_type_node,
14036 char_type_node,
14037 unsigned_char_type_node,
14038 NULL_TREE);
14039 tree void_ftype_ulong
14040 = build_function_type_list (void_type_node,
14041 long_unsigned_type_node,
14042 NULL_TREE);
14043
14044 tree uchar_ftype_ulong_uchar_uchar
14045 = build_function_type_list (unsigned_char_type_node,
14046 long_unsigned_type_node,
14047 unsigned_char_type_node,
14048 unsigned_char_type_node,
14049 NULL_TREE);
14050
14051 tree const_memx_void_node
14052 = build_qualified_type (void_type_node,
14053 TYPE_QUAL_CONST
14054 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
14055
14056 tree const_memx_ptr_type_node
14057 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
14058
14059 tree char_ftype_const_memx_ptr
14060 = build_function_type_list (char_type_node,
14061 const_memx_ptr_type_node,
14062 NULL);
14063
14064 #define ITYP(T) \
14065 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
14066
14067 #define FX_FTYPE_FX(fx) \
14068 tree fx##r_ftype_##fx##r \
14069 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
14070 tree fx##k_ftype_##fx##k \
14071 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
14072
14073 #define FX_FTYPE_FX_INT(fx) \
14074 tree fx##r_ftype_##fx##r_int \
14075 = build_function_type_list (node_##fx##r, node_##fx##r, \
14076 integer_type_node, NULL); \
14077 tree fx##k_ftype_##fx##k_int \
14078 = build_function_type_list (node_##fx##k, node_##fx##k, \
14079 integer_type_node, NULL)
14080
14081 #define INT_FTYPE_FX(fx) \
14082 tree int_ftype_##fx##r \
14083 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
14084 tree int_ftype_##fx##k \
14085 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
14086
14087 #define INTX_FTYPE_FX(fx) \
14088 tree int##fx##r_ftype_##fx##r \
14089 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
14090 tree int##fx##k_ftype_##fx##k \
14091 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
14092
14093 #define FX_FTYPE_INTX(fx) \
14094 tree fx##r_ftype_int##fx##r \
14095 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
14096 tree fx##k_ftype_int##fx##k \
14097 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
14098
14099 tree node_hr = short_fract_type_node;
14100 tree node_nr = fract_type_node;
14101 tree node_lr = long_fract_type_node;
14102 tree node_llr = long_long_fract_type_node;
14103
14104 tree node_uhr = unsigned_short_fract_type_node;
14105 tree node_unr = unsigned_fract_type_node;
14106 tree node_ulr = unsigned_long_fract_type_node;
14107 tree node_ullr = unsigned_long_long_fract_type_node;
14108
14109 tree node_hk = short_accum_type_node;
14110 tree node_nk = accum_type_node;
14111 tree node_lk = long_accum_type_node;
14112 tree node_llk = long_long_accum_type_node;
14113
14114 tree node_uhk = unsigned_short_accum_type_node;
14115 tree node_unk = unsigned_accum_type_node;
14116 tree node_ulk = unsigned_long_accum_type_node;
14117 tree node_ullk = unsigned_long_long_accum_type_node;
14118
14119
14120 /* For absfx builtins. */
14121
14122 FX_FTYPE_FX (h);
14123 FX_FTYPE_FX (n);
14124 FX_FTYPE_FX (l);
14125 FX_FTYPE_FX (ll);
14126
14127 /* For roundfx builtins. */
14128
14129 FX_FTYPE_FX_INT (h);
14130 FX_FTYPE_FX_INT (n);
14131 FX_FTYPE_FX_INT (l);
14132 FX_FTYPE_FX_INT (ll);
14133
14134 FX_FTYPE_FX_INT (uh);
14135 FX_FTYPE_FX_INT (un);
14136 FX_FTYPE_FX_INT (ul);
14137 FX_FTYPE_FX_INT (ull);
14138
14139 /* For countlsfx builtins. */
14140
14141 INT_FTYPE_FX (h);
14142 INT_FTYPE_FX (n);
14143 INT_FTYPE_FX (l);
14144 INT_FTYPE_FX (ll);
14145
14146 INT_FTYPE_FX (uh);
14147 INT_FTYPE_FX (un);
14148 INT_FTYPE_FX (ul);
14149 INT_FTYPE_FX (ull);
14150
14151 /* For bitsfx builtins. */
14152
14153 INTX_FTYPE_FX (h);
14154 INTX_FTYPE_FX (n);
14155 INTX_FTYPE_FX (l);
14156 INTX_FTYPE_FX (ll);
14157
14158 INTX_FTYPE_FX (uh);
14159 INTX_FTYPE_FX (un);
14160 INTX_FTYPE_FX (ul);
14161 INTX_FTYPE_FX (ull);
14162
14163 /* For fxbits builtins. */
14164
14165 FX_FTYPE_INTX (h);
14166 FX_FTYPE_INTX (n);
14167 FX_FTYPE_INTX (l);
14168 FX_FTYPE_INTX (ll);
14169
14170 FX_FTYPE_INTX (uh);
14171 FX_FTYPE_INTX (un);
14172 FX_FTYPE_INTX (ul);
14173 FX_FTYPE_INTX (ull);
14174
14175
14176 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
14177 { \
14178 int id = AVR_BUILTIN_ ## NAME; \
14179 const char *Name = "__builtin_avr_" #NAME; \
14180 char *name = (char*) alloca (1 + strlen (Name)); \
14181 \
14182 gcc_assert (id < AVR_BUILTIN_COUNT); \
14183 avr_bdesc[id].fndecl \
14184 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
14185 BUILT_IN_MD, LIBNAME, NULL_TREE); \
14186 }
14187 #include "builtins.def"
14188 #undef DEF_BUILTIN
14189
14190 avr_init_builtin_int24 ();
14191 }
14192
14193
14194 /* Subroutine of avr_expand_builtin to expand vanilla builtins
14195 with non-void result and 1 ... 3 arguments. */
14196
14197 static rtx
14198 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
14199 {
14200 rtx pat, xop[3];
14201 int n_args = call_expr_nargs (exp);
14202 machine_mode tmode = insn_data[icode].operand[0].mode;
14203
14204 gcc_assert (n_args >= 1 && n_args <= 3);
14205
14206 if (target == NULL_RTX
14207 || GET_MODE (target) != tmode
14208 || !insn_data[icode].operand[0].predicate (target, tmode))
14209 {
14210 target = gen_reg_rtx (tmode);
14211 }
14212
14213 for (int n = 0; n < n_args; n++)
14214 {
14215 tree arg = CALL_EXPR_ARG (exp, n);
14216 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14217 machine_mode opmode = GET_MODE (op);
14218 machine_mode mode = insn_data[icode].operand[n + 1].mode;
14219
14220 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
14221 {
14222 opmode = HImode;
14223 op = gen_lowpart (HImode, op);
14224 }
14225
14226 /* In case the insn wants input operands in modes different from
14227 the result, abort. */
14228
14229 gcc_assert (opmode == mode || opmode == VOIDmode);
14230
14231 if (!insn_data[icode].operand[n + 1].predicate (op, mode))
14232 op = copy_to_mode_reg (mode, op);
14233
14234 xop[n] = op;
14235 }
14236
14237 switch (n_args)
14238 {
14239 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
14240 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
14241 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
14242
14243 default:
14244 gcc_unreachable();
14245 }
14246
14247 if (pat == NULL_RTX)
14248 return NULL_RTX;
14249
14250 emit_insn (pat);
14251
14252 return target;
14253 }
14254
14255
14256 /* Implement `TARGET_EXPAND_BUILTIN'. */
14257 /* Expand an expression EXP that calls a built-in function,
14258 with result going to TARGET if that's convenient
14259 (and in mode MODE if that's convenient).
14260 SUBTARGET may be used as the target for computing one of EXP's operands.
14261 IGNORE is nonzero if the value is to be ignored. */
14262
14263 static rtx
14264 avr_expand_builtin (tree exp, rtx target,
14265 rtx subtarget ATTRIBUTE_UNUSED,
14266 machine_mode mode ATTRIBUTE_UNUSED,
14267 int ignore)
14268 {
14269 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
14270 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
14271 unsigned int id = DECL_FUNCTION_CODE (fndecl);
14272 const struct avr_builtin_description *d = &avr_bdesc[id];
14273 tree arg0;
14274 rtx op0;
14275
14276 gcc_assert (id < AVR_BUILTIN_COUNT);
14277
14278 switch (id)
14279 {
14280 case AVR_BUILTIN_NOP:
14281 emit_insn (gen_nopv (GEN_INT (1)));
14282 return 0;
14283
14284 case AVR_BUILTIN_DELAY_CYCLES:
14285 {
14286 arg0 = CALL_EXPR_ARG (exp, 0);
14287 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14288
14289 if (!CONST_INT_P (op0))
14290 error ("%s expects a compile time integer constant", bname);
14291 else
14292 avr_expand_delay_cycles (op0);
14293
14294 return NULL_RTX;
14295 }
14296
14297 case AVR_BUILTIN_NOPS:
14298 {
14299 arg0 = CALL_EXPR_ARG (exp, 0);
14300 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14301
14302 if (!CONST_INT_P (op0))
14303 error ("%s expects a compile time integer constant", bname);
14304 else
14305 avr_expand_nops (op0);
14306
14307 return NULL_RTX;
14308 }
14309
14310 case AVR_BUILTIN_INSERT_BITS:
14311 {
14312 arg0 = CALL_EXPR_ARG (exp, 0);
14313 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14314
14315 if (!CONST_INT_P (op0))
14316 {
14317 error ("%s expects a compile time long integer constant"
14318 " as first argument", bname);
14319 return target;
14320 }
14321
14322 break;
14323 }
14324
14325 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
14326 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
14327 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
14328 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
14329
14330 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
14331 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
14332 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
14333 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
14334
14335 /* Warn about odd rounding. Rounding points >= FBIT will have
14336 no effect. */
14337
14338 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
14339 break;
14340
14341 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
14342
14343 if (rbit >= (int) GET_MODE_FBIT (mode))
14344 {
14345 warning (OPT_Wextra, "rounding to %d bits has no effect for "
14346 "fixed-point value with %d fractional bits",
14347 rbit, GET_MODE_FBIT (mode));
14348
14349 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
14350 EXPAND_NORMAL);
14351 }
14352 else if (rbit <= - (int) GET_MODE_IBIT (mode))
14353 {
14354 warning (0, "rounding result will always be 0");
14355 return CONST0_RTX (mode);
14356 }
14357
14358 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
14359
14360 TR 18037 only specifies results for RP > 0. However, the
14361 remaining cases of -IBIT < RP <= 0 can easily be supported
14362 without any additional overhead. */
14363
14364 break; /* round */
14365 }
14366
14367 /* No fold found and no insn: Call support function from libgcc. */
14368
14369 if (d->icode == CODE_FOR_nothing
14370 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
14371 {
14372 return expand_call (exp, target, ignore);
14373 }
14374
14375 /* No special treatment needed: vanilla expand. */
14376
14377 gcc_assert (d->icode != CODE_FOR_nothing);
14378 gcc_assert (d->n_args == call_expr_nargs (exp));
14379
14380 if (d->n_args == 0)
14381 {
14382 emit_insn ((GEN_FCN (d->icode)) (target));
14383 return NULL_RTX;
14384 }
14385
14386 return avr_default_expand_builtin (d->icode, exp, target);
14387 }
14388
14389
14390 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
14391
14392 static tree
14393 avr_fold_absfx (tree tval)
14394 {
14395 if (FIXED_CST != TREE_CODE (tval))
14396 return NULL_TREE;
14397
14398 /* Our fixed-points have no padding: Use double_int payload directly. */
14399
14400 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
14401 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
14402 double_int ival = fval.data.sext (bits);
14403
14404 if (!ival.is_negative())
14405 return tval;
14406
14407 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
14408
14409 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
14410 ? double_int::max_value (bits, false)
14411 : -ival;
14412
14413 return build_fixed (TREE_TYPE (tval), fval);
14414 }
14415
14416
14417 /* Implement `TARGET_FOLD_BUILTIN'. */
14418
14419 static tree
14420 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
14421 bool ignore ATTRIBUTE_UNUSED)
14422 {
14423 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
14424 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
14425
14426 if (!optimize)
14427 return NULL_TREE;
14428
14429 switch (fcode)
14430 {
14431 default:
14432 break;
14433
14434 case AVR_BUILTIN_SWAP:
14435 {
14436 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
14437 build_int_cst (val_type, 4));
14438 }
14439
14440 case AVR_BUILTIN_ABSHR:
14441 case AVR_BUILTIN_ABSR:
14442 case AVR_BUILTIN_ABSLR:
14443 case AVR_BUILTIN_ABSLLR:
14444
14445 case AVR_BUILTIN_ABSHK:
14446 case AVR_BUILTIN_ABSK:
14447 case AVR_BUILTIN_ABSLK:
14448 case AVR_BUILTIN_ABSLLK:
14449 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
14450
14451 return avr_fold_absfx (arg[0]);
14452
14453 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
14454 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
14455 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
14456 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
14457
14458 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
14459 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
14460 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
14461 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
14462
14463 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
14464 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
14465 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
14466 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
14467
14468 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
14469 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
14470 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
14471 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
14472
14473 gcc_assert (TYPE_PRECISION (val_type)
14474 == TYPE_PRECISION (TREE_TYPE (arg[0])));
14475
14476 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
14477
14478 case AVR_BUILTIN_INSERT_BITS:
14479 {
14480 tree tbits = arg[1];
14481 tree tval = arg[2];
14482 tree tmap;
14483 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
14484 unsigned int map;
14485 bool changed = false;
14486 avr_map_op_t best_g;
14487
14488 if (TREE_CODE (arg[0]) != INTEGER_CST)
14489 {
14490 /* No constant as first argument: Don't fold this and run into
14491 error in avr_expand_builtin. */
14492
14493 break;
14494 }
14495
14496 tmap = wide_int_to_tree (map_type, arg[0]);
14497 map = TREE_INT_CST_LOW (tmap);
14498
14499 if (TREE_CODE (tval) != INTEGER_CST
14500 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
14501 {
14502 /* There are no F in the map, i.e. 3rd operand is unused.
14503 Replace that argument with some constant to render
14504 respective input unused. */
14505
14506 tval = build_int_cst (val_type, 0);
14507 changed = true;
14508 }
14509
14510 if (TREE_CODE (tbits) != INTEGER_CST
14511 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
14512 {
14513 /* Similar for the bits to be inserted. If they are unused,
14514 we can just as well pass 0. */
14515
14516 tbits = build_int_cst (val_type, 0);
14517 }
14518
14519 if (TREE_CODE (tbits) == INTEGER_CST)
14520 {
14521 /* Inserting bits known at compile time is easy and can be
14522 performed by AND and OR with appropriate masks. */
14523
14524 int bits = TREE_INT_CST_LOW (tbits);
14525 int mask_ior = 0, mask_and = 0xff;
14526
14527 for (size_t i = 0; i < 8; i++)
14528 {
14529 int mi = avr_map (map, i);
14530
14531 if (mi < 8)
14532 {
14533 if (bits & (1 << mi)) mask_ior |= (1 << i);
14534 else mask_and &= ~(1 << i);
14535 }
14536 }
14537
14538 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
14539 build_int_cst (val_type, mask_ior));
14540 return fold_build2 (BIT_AND_EXPR, val_type, tval,
14541 build_int_cst (val_type, mask_and));
14542 }
14543
14544 if (changed)
14545 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14546
14547 /* If bits don't change their position we can use vanilla logic
14548 to merge the two arguments. */
14549
14550 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
14551 {
14552 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
14553 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
14554
14555 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
14556 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
14557 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
14558 }
14559
14560 /* Try to decomposing map to reduce overall cost. */
14561
14562 if (avr_log.builtin)
14563 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
14564
14565 best_g = avr_map_op[0];
14566 best_g.cost = 1000;
14567
14568 for (size_t i = 0; i < ARRAY_SIZE (avr_map_op); i++)
14569 {
14570 avr_map_op_t g
14571 = avr_map_decompose (map, avr_map_op + i,
14572 TREE_CODE (tval) == INTEGER_CST);
14573
14574 if (g.cost >= 0 && g.cost < best_g.cost)
14575 best_g = g;
14576 }
14577
14578 if (avr_log.builtin)
14579 avr_edump ("\n");
14580
14581 if (best_g.arg == 0)
14582 /* No optimization found */
14583 break;
14584
14585 /* Apply operation G to the 2nd argument. */
14586
14587 if (avr_log.builtin)
14588 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
14589 best_g.str, best_g.arg, best_g.map, best_g.cost);
14590
14591 /* Do right-shifts arithmetically: They copy the MSB instead of
14592 shifting in a non-usable value (0) as with logic right-shift. */
14593
14594 tbits = fold_convert (signed_char_type_node, tbits);
14595 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
14596 build_int_cst (val_type, best_g.arg));
14597 tbits = fold_convert (val_type, tbits);
14598
14599 /* Use map o G^-1 instead of original map to undo the effect of G. */
14600
14601 tmap = wide_int_to_tree (map_type, best_g.map);
14602
14603 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14604 } /* AVR_BUILTIN_INSERT_BITS */
14605 }
14606
14607 return NULL_TREE;
14608 }
14609
14610 \f
14611
14612 /* Initialize the GCC target structure. */
14613
14614 #undef TARGET_ASM_ALIGNED_HI_OP
14615 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
14616 #undef TARGET_ASM_ALIGNED_SI_OP
14617 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
14618 #undef TARGET_ASM_UNALIGNED_HI_OP
14619 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
14620 #undef TARGET_ASM_UNALIGNED_SI_OP
14621 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
14622 #undef TARGET_ASM_INTEGER
14623 #define TARGET_ASM_INTEGER avr_assemble_integer
14624 #undef TARGET_ASM_FILE_START
14625 #define TARGET_ASM_FILE_START avr_file_start
14626 #undef TARGET_ASM_FILE_END
14627 #define TARGET_ASM_FILE_END avr_file_end
14628
14629 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
14630 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
14631 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
14632 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
14633
14634 #undef TARGET_FUNCTION_VALUE
14635 #define TARGET_FUNCTION_VALUE avr_function_value
14636 #undef TARGET_LIBCALL_VALUE
14637 #define TARGET_LIBCALL_VALUE avr_libcall_value
14638 #undef TARGET_FUNCTION_VALUE_REGNO_P
14639 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
14640
14641 #undef TARGET_ATTRIBUTE_TABLE
14642 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
14643 #undef TARGET_INSERT_ATTRIBUTES
14644 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
14645 #undef TARGET_SECTION_TYPE_FLAGS
14646 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
14647
14648 #undef TARGET_ASM_NAMED_SECTION
14649 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
14650 #undef TARGET_ASM_INIT_SECTIONS
14651 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
14652 #undef TARGET_ENCODE_SECTION_INFO
14653 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
14654 #undef TARGET_ASM_SELECT_SECTION
14655 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
14656
14657 #undef TARGET_ASM_FINAL_POSTSCAN_INSN
14658 #define TARGET_ASM_FINAL_POSTSCAN_INSN avr_asm_final_postscan_insn
14659
14660 #undef TARGET_REGISTER_MOVE_COST
14661 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
14662 #undef TARGET_MEMORY_MOVE_COST
14663 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
14664 #undef TARGET_RTX_COSTS
14665 #define TARGET_RTX_COSTS avr_rtx_costs
14666 #undef TARGET_ADDRESS_COST
14667 #define TARGET_ADDRESS_COST avr_address_cost
14668 #undef TARGET_MACHINE_DEPENDENT_REORG
14669 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
14670 #undef TARGET_FUNCTION_ARG
14671 #define TARGET_FUNCTION_ARG avr_function_arg
14672 #undef TARGET_FUNCTION_ARG_ADVANCE
14673 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
14674
14675 #undef TARGET_SET_CURRENT_FUNCTION
14676 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
14677
14678 #undef TARGET_RETURN_IN_MEMORY
14679 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
14680
14681 #undef TARGET_STRICT_ARGUMENT_NAMING
14682 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
14683
14684 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
14685 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
14686
14687 #undef TARGET_CONDITIONAL_REGISTER_USAGE
14688 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
14689
14690 #undef TARGET_HARD_REGNO_SCRATCH_OK
14691 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
14692 #undef TARGET_CASE_VALUES_THRESHOLD
14693 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
14694
14695 #undef TARGET_FRAME_POINTER_REQUIRED
14696 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
14697 #undef TARGET_CAN_ELIMINATE
14698 #define TARGET_CAN_ELIMINATE avr_can_eliminate
14699
14700 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
14701 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
14702
14703 #undef TARGET_WARN_FUNC_RETURN
14704 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
14705
14706 #undef TARGET_CLASS_LIKELY_SPILLED_P
14707 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
14708
14709 #undef TARGET_OPTION_OVERRIDE
14710 #define TARGET_OPTION_OVERRIDE avr_option_override
14711
14712 #undef TARGET_CANNOT_MODIFY_JUMPS_P
14713 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
14714
14715 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
14716 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
14717
14718 #undef TARGET_INIT_BUILTINS
14719 #define TARGET_INIT_BUILTINS avr_init_builtins
14720
14721 #undef TARGET_BUILTIN_DECL
14722 #define TARGET_BUILTIN_DECL avr_builtin_decl
14723
14724 #undef TARGET_EXPAND_BUILTIN
14725 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
14726
14727 #undef TARGET_FOLD_BUILTIN
14728 #define TARGET_FOLD_BUILTIN avr_fold_builtin
14729
14730 #undef TARGET_SCALAR_MODE_SUPPORTED_P
14731 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
14732
14733 #undef TARGET_BUILD_BUILTIN_VA_LIST
14734 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
14735
14736 #undef TARGET_FIXED_POINT_SUPPORTED_P
14737 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
14738
14739 #undef TARGET_CONVERT_TO_TYPE
14740 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
14741
14742 #undef TARGET_LRA_P
14743 #define TARGET_LRA_P hook_bool_void_false
14744
14745 #undef TARGET_ADDR_SPACE_SUBSET_P
14746 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
14747
14748 #undef TARGET_ADDR_SPACE_CONVERT
14749 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
14750
14751 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
14752 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
14753
14754 #undef TARGET_ADDR_SPACE_POINTER_MODE
14755 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
14756
14757 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
14758 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
14759 avr_addr_space_legitimate_address_p
14760
14761 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
14762 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
14763
14764 #undef TARGET_ADDR_SPACE_DIAGNOSE_USAGE
14765 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
14766
14767 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
14768 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
14769
14770 #undef TARGET_PRINT_OPERAND
14771 #define TARGET_PRINT_OPERAND avr_print_operand
14772 #undef TARGET_PRINT_OPERAND_ADDRESS
14773 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
14774 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
14775 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
14776
14777 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
14778 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
14779 avr_use_by_pieces_infrastructure_p
14780
14781 #undef TARGET_LEGITIMATE_COMBINED_INSN
14782 #define TARGET_LEGITIMATE_COMBINED_INSN avr_legitimate_combined_insn
14783
14784 struct gcc_target targetm = TARGET_INITIALIZER;
14785
14786 \f
14787 #include "gt-avr.h"