]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
Turn HARD_REGNO_MODE_OK into a target hook
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2017 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "intl.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "cgraph.h"
32 #include "c-family/c-common.h"
33 #include "cfghooks.h"
34 #include "df.h"
35 #include "memmodel.h"
36 #include "tm_p.h"
37 #include "optabs.h"
38 #include "regs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "conditions.h"
42 #include "insn-attr.h"
43 #include "reload.h"
44 #include "varasm.h"
45 #include "calls.h"
46 #include "stor-layout.h"
47 #include "output.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "cfgrtl.h"
52 #include "params.h"
53 #include "builtins.h"
54 #include "context.h"
55 #include "tree-pass.h"
56 #include "print-rtl.h"
57 #include "rtl-iter.h"
58
59 /* This file should be included last. */
60 #include "target-def.h"
61
62 /* Maximal allowed offset for an address in the LD command */
63 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
64
65 /* Return true if STR starts with PREFIX and false, otherwise. */
66 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
67
68 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
69 address space where data is to be located.
70 As the only non-generic address spaces are all located in flash,
71 this can be used to test if data shall go into some .progmem* section.
72 This must be the rightmost field of machine dependent section flags. */
73 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
74
75 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
76 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
77
78 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
79 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
81 do { \
82 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
83 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
84 } while (0)
85
86 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
87 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
88 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
89 / SYMBOL_FLAG_MACH_DEP)
90
91 /* (AVR_TINY only): Symbol has attribute progmem */
92 #define AVR_SYMBOL_FLAG_TINY_PM \
93 (SYMBOL_FLAG_MACH_DEP << 7)
94
95 /* (AVR_TINY only): Symbol has attribute absdata */
96 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
97 (SYMBOL_FLAG_MACH_DEP << 8)
98
99 #define TINY_ADIW(REG1, REG2, I) \
100 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
101 "sbci " #REG2 ",hi8(-(" #I "))"
102
103 #define TINY_SBIW(REG1, REG2, I) \
104 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
105 "sbci " #REG2 ",hi8((" #I "))"
106
107 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
108 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
109
110 /* Known address spaces. The order must be the same as in the respective
111 enum from avr.h (or designated initialized must be used). */
112 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
113 {
114 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
115 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
116 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
117 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
118 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
119 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
120 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
121 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
122 };
123
124
125 /* Holding RAM addresses of some SFRs used by the compiler and that
126 are unique over all devices in an architecture like 'avr4'. */
127
128 typedef struct
129 {
130 /* SREG: The processor status */
131 int sreg;
132
133 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
134 int ccp;
135 int rampd;
136 int rampx;
137 int rampy;
138
139 /* RAMPZ: The high byte of 24-bit address used with ELPM */
140 int rampz;
141
142 /* SP: The stack pointer and its low and high byte */
143 int sp_l;
144 int sp_h;
145 } avr_addr_t;
146
147 static avr_addr_t avr_addr;
148
149
150 /* Prototypes for local helper functions. */
151
152 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
153 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
154 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
155 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
156 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
157 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
158
159 static int get_sequence_length (rtx_insn *insns);
160 static int sequent_regs_live (void);
161 static const char *ptrreg_to_str (int);
162 static const char *cond_string (enum rtx_code);
163 static int avr_num_arg_regs (machine_mode, const_tree);
164 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
165 int, bool);
166 static void output_reload_in_const (rtx*, rtx, int*, bool);
167 static struct machine_function * avr_init_machine_status (void);
168
169
170 /* Prototypes for hook implementors if needed before their implementation. */
171
172 static bool avr_rtx_costs (rtx, machine_mode, int, int, int*, bool);
173
174
175 /* Allocate registers from r25 to r8 for parameters for function calls. */
176 #define FIRST_CUM_REG 26
177
178 /* Last call saved register */
179 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
180
181 /* Implicit target register of LPM instruction (R0) */
182 extern GTY(()) rtx lpm_reg_rtx;
183 rtx lpm_reg_rtx;
184
185 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
186 extern GTY(()) rtx lpm_addr_reg_rtx;
187 rtx lpm_addr_reg_rtx;
188
189 /* Temporary register RTX (reg:QI TMP_REGNO) */
190 extern GTY(()) rtx tmp_reg_rtx;
191 rtx tmp_reg_rtx;
192
193 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
194 extern GTY(()) rtx zero_reg_rtx;
195 rtx zero_reg_rtx;
196
197 /* RTXs for all general purpose registers as QImode */
198 extern GTY(()) rtx all_regs_rtx[32];
199 rtx all_regs_rtx[32];
200
201 /* SREG, the processor status */
202 extern GTY(()) rtx sreg_rtx;
203 rtx sreg_rtx;
204
205 /* RAMP* special function registers */
206 extern GTY(()) rtx rampd_rtx;
207 extern GTY(()) rtx rampx_rtx;
208 extern GTY(()) rtx rampy_rtx;
209 extern GTY(()) rtx rampz_rtx;
210 rtx rampd_rtx;
211 rtx rampx_rtx;
212 rtx rampy_rtx;
213 rtx rampz_rtx;
214
215 /* RTX containing the strings "" and "e", respectively */
216 static GTY(()) rtx xstring_empty;
217 static GTY(()) rtx xstring_e;
218
219 /* Current architecture. */
220 const avr_arch_t *avr_arch;
221
222 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
223 or to address space __flash* or __memx. Only used as singletons inside
224 avr_asm_select_section, but it must not be local there because of GTY. */
225 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
226
227 /* Condition for insns/expanders from avr-dimode.md. */
228 bool avr_have_dimode = true;
229
230 /* To track if code will use .bss and/or .data. */
231 bool avr_need_clear_bss_p = false;
232 bool avr_need_copy_data_p = false;
233
234 \f
235 /* Transform UP into lowercase and write the result to LO.
236 You must provide enough space for LO. Return LO. */
237
238 static char*
239 avr_tolower (char *lo, const char *up)
240 {
241 char *lo0 = lo;
242
243 for (; *up; up++, lo++)
244 *lo = TOLOWER (*up);
245
246 *lo = '\0';
247
248 return lo0;
249 }
250
251
252 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
253 Return true if the least significant N_BYTES bytes of XVAL all have a
254 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
255 of integers which contains an integer N iff bit N of POP_MASK is set. */
256
257 bool
258 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
259 {
260 machine_mode mode = GET_MODE (xval);
261
262 if (VOIDmode == mode)
263 mode = SImode;
264
265 for (int i = 0; i < n_bytes; i++)
266 {
267 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
268 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
269
270 if (0 == (pop_mask & (1 << popcount_hwi (val8))))
271 return false;
272 }
273
274 return true;
275 }
276
277
278 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
279 the bit representation of X by "casting" it to CONST_INT. */
280
281 rtx
282 avr_to_int_mode (rtx x)
283 {
284 machine_mode mode = GET_MODE (x);
285
286 return VOIDmode == mode
287 ? x
288 : simplify_gen_subreg (int_mode_for_mode (mode).require (), x, mode, 0);
289 }
290
291 namespace {
292
293 static const pass_data avr_pass_data_recompute_notes =
294 {
295 RTL_PASS, // type
296 "", // name (will be patched)
297 OPTGROUP_NONE, // optinfo_flags
298 TV_DF_SCAN, // tv_id
299 0, // properties_required
300 0, // properties_provided
301 0, // properties_destroyed
302 0, // todo_flags_start
303 TODO_df_finish | TODO_df_verify // todo_flags_finish
304 };
305
306
307 class avr_pass_recompute_notes : public rtl_opt_pass
308 {
309 public:
310 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
311 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
312 {
313 this->name = name;
314 }
315
316 virtual unsigned int execute (function*)
317 {
318 df_note_add_problem ();
319 df_analyze ();
320
321 return 0;
322 }
323 }; // avr_pass_recompute_notes
324
325 static const pass_data avr_pass_data_casesi =
326 {
327 RTL_PASS, // type
328 "", // name (will be patched)
329 OPTGROUP_NONE, // optinfo_flags
330 TV_DF_SCAN, // tv_id
331 0, // properties_required
332 0, // properties_provided
333 0, // properties_destroyed
334 0, // todo_flags_start
335 0 // todo_flags_finish
336 };
337
338
339 class avr_pass_casesi : public rtl_opt_pass
340 {
341 public:
342 avr_pass_casesi (gcc::context *ctxt, const char *name)
343 : rtl_opt_pass (avr_pass_data_casesi, ctxt)
344 {
345 this->name = name;
346 }
347
348 void avr_rest_of_handle_casesi (function*);
349
350 virtual bool gate (function*) { return optimize > 0; }
351
352 virtual unsigned int execute (function *func)
353 {
354 avr_rest_of_handle_casesi (func);
355
356 return 0;
357 }
358 }; // avr_pass_casesi
359
360 } // anon namespace
361
362 rtl_opt_pass*
363 make_avr_pass_recompute_notes (gcc::context *ctxt)
364 {
365 return new avr_pass_recompute_notes (ctxt, "avr-notes-free-cfg");
366 }
367
368 rtl_opt_pass*
369 make_avr_pass_casesi (gcc::context *ctxt)
370 {
371 return new avr_pass_casesi (ctxt, "avr-casesi");
372 }
373
374
375 /* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
376
377 static rtx_insn*
378 avr_parallel_insn_from_insns (rtx_insn *i[6])
379 {
380 rtvec vec = gen_rtvec (6, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
381 PATTERN (i[3]), PATTERN (i[4]), PATTERN (i[5]));
382 start_sequence();
383 emit (gen_rtx_PARALLEL (VOIDmode, vec));
384 rtx_insn *insn = get_insns();
385 end_sequence();
386
387 return insn;
388 }
389
390
391 /* Return true if we see an insn stream generated by casesi expander together
392 with an extension to SImode of the switch value.
393
394 If this is the case, fill in the insns from casesi to INSNS[1..5] and
395 the SImode extension to INSNS[0]. Moreover, extract the operands of
396 pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
397
398 static bool
399 avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
400 {
401 rtx set_5, set_0;
402
403 /* A first and quick test for a casesi sequences. As a side effect of
404 the test, harvest respective insns to INSNS[0..5]. */
405
406 if (!(JUMP_P (insns[5] = insn)
407 // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
408 // hence the following test ensures that we are actually dealing
409 // with code from casesi.
410 && (set_5 = single_set (insns[5]))
411 && UNSPEC == GET_CODE (SET_SRC (set_5))
412 && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_5), 1)
413
414 && (insns[4] = prev_real_insn (insns[5]))
415 && (insns[3] = prev_real_insn (insns[4]))
416 && (insns[2] = prev_real_insn (insns[3]))
417 && (insns[1] = prev_real_insn (insns[2]))
418
419 // Insn prior to casesi.
420 && (insns[0] = prev_real_insn (insns[1]))
421 && (set_0 = single_set (insns[0]))
422 && extend_operator (SET_SRC (set_0), SImode)))
423 {
424 return false;
425 }
426
427 if (dump_file)
428 {
429 fprintf (dump_file, ";; Sequence from casesi in "
430 "[bb %d]:\n\n", bb->index);
431 for (int i = 0; i < 6; i++)
432 print_rtl_single (dump_file, insns[i]);
433 }
434
435 /* We have to deal with quite some operands. Extracting them by hand
436 would be tedious, therefore wrap the insn patterns into a parallel,
437 run recog against it and then use insn extract to get the operands. */
438
439 rtx_insn *xinsn = avr_parallel_insn_from_insns (insns);
440
441 INSN_CODE (xinsn) = recog (PATTERN (xinsn), xinsn, NULL /* num_clobbers */);
442
443 /* Failing to recognize means that someone changed the casesi expander or
444 that some passes prior to this one performed some unexpected changes.
445 Gracefully drop such situations instead of aborting. */
446
447 if (INSN_CODE (xinsn) < 0)
448 {
449 if (dump_file)
450 fprintf (dump_file, ";; Sequence not recognized, giving up.\n\n");
451
452 return false;
453 }
454
455 gcc_assert (CODE_FOR_casesi_qi_sequence == INSN_CODE (xinsn)
456 || CODE_FOR_casesi_hi_sequence == INSN_CODE (xinsn));
457
458 extract_insn (xinsn);
459
460 // Assert on the anatomy of xinsn's operands we are going to work with.
461
462 gcc_assert (11 == recog_data.n_operands);
463 gcc_assert (4 == recog_data.n_dups);
464
465 if (dump_file)
466 {
467 fprintf (dump_file, ";; Operands extracted:\n");
468 for (int i = 0; i < recog_data.n_operands; i++)
469 avr_fdump (dump_file, ";; $%d = %r\n", i, recog_data.operand[i]);
470 fprintf (dump_file, "\n");
471 }
472
473 return true;
474 }
475
476
477 /* Perform some extra checks on operands of casesi_<mode>_sequence.
478 Not all operand dependencies can be described by means of predicates.
479 This function performs left over checks and should always return true.
480 Returning false means that someone changed the casesi expander but did
481 not adjust casesi_<mode>_sequence. */
482
483 bool
484 avr_casei_sequence_check_operands (rtx *xop)
485 {
486 rtx sub_5 = NULL_RTX;
487
488 if (AVR_HAVE_EIJMP_EICALL
489 // The last clobber op of the tablejump.
490 && xop[8] == all_regs_rtx[24])
491 {
492 // $6 is: (subreg:SI ($5) 0)
493 sub_5 = xop[6];
494 }
495
496 if (!AVR_HAVE_EIJMP_EICALL
497 // $6 is: (plus:HI (subreg:SI ($5) 0)
498 // (label_ref ($3)))
499 && PLUS == GET_CODE (xop[6])
500 && LABEL_REF == GET_CODE (XEXP (xop[6], 1))
501 && rtx_equal_p (xop[3], XEXP (XEXP (xop[6], 1), 0))
502 // The last clobber op of the tablejump.
503 && xop[8] == const0_rtx)
504 {
505 sub_5 = XEXP (xop[6], 0);
506 }
507
508 if (sub_5
509 && SUBREG_P (sub_5)
510 && 0 == SUBREG_BYTE (sub_5)
511 && rtx_equal_p (xop[5], SUBREG_REG (sub_5)))
512 return true;
513
514 if (dump_file)
515 fprintf (dump_file, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
516
517 return false;
518 }
519
520
521 /* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
522 extension of an 8-bit or 16-bit integer to SImode. XOP contains the
523 operands of INSNS as extracted by insn_extract from pattern
524 casesi_<mode>_sequence:
525
526 $0: SImode reg switch value as result of $9.
527 $1: Negative of smallest index in switch.
528 $2: Number of entries in switch.
529 $3: Label to table.
530 $4: Label if out-of-bounds.
531 $5: $0 + $1.
532 $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
533 2-byte PC: subreg:HI ($5)
534 $7: HI reg index into table (Z or pseudo)
535 $8: R24 or const0_rtx (to be clobbered)
536 $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
537 $10: QImode or HImode register input of $9.
538
539 Try to optimize this sequence, i.e. use the original HImode / QImode
540 switch value instead of SImode. */
541
542 static void
543 avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
544 {
545 // Original mode of the switch value; this is QImode or HImode.
546 machine_mode mode = GET_MODE (xop[10]);
547
548 // How the original switch value was extended to SImode; this is
549 // SIGN_EXTEND or ZERO_EXTEND.
550 enum rtx_code code = GET_CODE (xop[9]);
551
552 // Lower index, upper index (plus one) and range of case calues.
553 HOST_WIDE_INT low_idx = -INTVAL (xop[1]);
554 HOST_WIDE_INT num_idx = INTVAL (xop[2]);
555 HOST_WIDE_INT hig_idx = low_idx + num_idx;
556
557 // Maximum ranges of (un)signed QImode resp. HImode.
558 unsigned umax = QImode == mode ? 0xff : 0xffff;
559 int imax = QImode == mode ? 0x7f : 0x7fff;
560 int imin = -imax - 1;
561
562 // Testing the case range and whether it fits into the range of the
563 // (un)signed mode. This test should actually always pass because it
564 // makes no sense to have case values outside the mode range. Notice
565 // that case labels which are unreachable because they are outside the
566 // mode of the switch value (e.g. "case -1" for uint8_t) have already
567 // been thrown away by the middle-end.
568
569 if (SIGN_EXTEND == code
570 && low_idx >= imin
571 && hig_idx <= imax)
572 {
573 // ok
574 }
575 else if (ZERO_EXTEND == code
576 && low_idx >= 0
577 && (unsigned) hig_idx <= umax)
578 {
579 // ok
580 }
581 else
582 {
583 if (dump_file)
584 fprintf (dump_file, ";; Case ranges too big, giving up.\n\n");
585 return;
586 }
587
588 // Do normalization of switch value $10 and out-of-bound check in its
589 // original mode instead of in SImode. Use a newly created pseudo.
590 // This will replace insns[1..2].
591
592 start_sequence();
593
594 rtx_insn *seq1, *seq2, *last1, *last2;
595
596 rtx reg = copy_to_mode_reg (mode, xop[10]);
597
598 rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
599 rtx (*gen_cmp)(rtx,rtx) = QImode == mode ? gen_cmpqi3 : gen_cmphi3;
600
601 emit_insn (gen_add (reg, reg, gen_int_mode (-low_idx, mode)));
602 emit_insn (gen_cmp (reg, gen_int_mode (num_idx, mode)));
603
604 seq1 = get_insns();
605 last1 = get_last_insn();
606 end_sequence();
607
608 emit_insn_before (seq1, insns[1]);
609
610 // After the out-of-bounds test and corresponding branch, use a
611 // 16-bit index. If QImode is used, extend it to HImode first.
612 // This will replace insns[4].
613
614 start_sequence();
615
616 if (QImode == mode)
617 reg = force_reg (HImode, gen_rtx_fmt_e (code, HImode, reg));
618
619 rtx pat_4 = AVR_3_BYTE_PC
620 ? gen_movhi (xop[7], reg)
621 : gen_addhi3 (xop[7], reg, gen_rtx_LABEL_REF (VOIDmode, xop[3]));
622
623 emit_insn (pat_4);
624
625 seq2 = get_insns();
626 last2 = get_last_insn();
627 end_sequence();
628
629 emit_insn_after (seq2, insns[4]);
630
631 if (dump_file)
632 {
633 fprintf (dump_file, ";; New insns: ");
634
635 for (rtx_insn *insn = seq1; ; insn = NEXT_INSN (insn))
636 {
637 fprintf (dump_file, "%d, ", INSN_UID (insn));
638 if (insn == last1)
639 break;
640 }
641 for (rtx_insn *insn = seq2; ; insn = NEXT_INSN (insn))
642 {
643 fprintf (dump_file, "%d%s", INSN_UID (insn),
644 insn == last2 ? ".\n\n" : ", ");
645 if (insn == last2)
646 break;
647 }
648
649 fprintf (dump_file, ";; Deleting insns: %d, %d, %d.\n\n",
650 INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[4]));
651 }
652
653 // Pseudodelete the SImode and subreg of SImode insns. We don't care
654 // about the extension insns[0]: Its result is now unused and other
655 // passes will clean it up.
656
657 SET_INSN_DELETED (insns[1]);
658 SET_INSN_DELETED (insns[2]);
659 SET_INSN_DELETED (insns[4]);
660 }
661
662
663 void
664 avr_pass_casesi::avr_rest_of_handle_casesi (function *func)
665 {
666 basic_block bb;
667
668 FOR_EACH_BB_FN (bb, func)
669 {
670 rtx_insn *insn, *insns[6];
671
672 FOR_BB_INSNS (bb, insn)
673 {
674 if (avr_is_casesi_sequence (bb, insn, insns))
675 {
676 avr_optimize_casesi (insns, recog_data.operand);
677 }
678 }
679 }
680 }
681
682
683 /* Set `avr_arch' as specified by `-mmcu='.
684 Return true on success. */
685
686 static bool
687 avr_set_core_architecture (void)
688 {
689 /* Search for mcu core architecture. */
690
691 if (!avr_mmcu)
692 avr_mmcu = AVR_MMCU_DEFAULT;
693
694 avr_arch = &avr_arch_types[0];
695
696 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
697 {
698 if (NULL == mcu->name)
699 {
700 /* Reached the end of `avr_mcu_types'. This should actually never
701 happen as options are provided by device-specs. It could be a
702 typo in a device-specs or calling the compiler proper directly
703 with -mmcu=<device>. */
704
705 error ("unknown core architecture %qs specified with %qs",
706 avr_mmcu, "-mmcu=");
707 avr_inform_core_architectures ();
708 break;
709 }
710 else if (0 == strcmp (mcu->name, avr_mmcu)
711 // Is this a proper architecture ?
712 && NULL == mcu->macro)
713 {
714 avr_arch = &avr_arch_types[mcu->arch_id];
715 if (avr_n_flash < 0)
716 avr_n_flash = 1 + (mcu->flash_size - 1) / 0x10000;
717
718 return true;
719 }
720 }
721
722 return false;
723 }
724
725
726 /* Implement `TARGET_OPTION_OVERRIDE'. */
727
728 static void
729 avr_option_override (void)
730 {
731 /* Disable -fdelete-null-pointer-checks option for AVR target.
732 This option compiler assumes that dereferencing of a null pointer
733 would halt the program. For AVR this assumption is not true and
734 programs can safely dereference null pointers. Changes made by this
735 option may not work properly for AVR. So disable this option. */
736
737 flag_delete_null_pointer_checks = 0;
738
739 /* caller-save.c looks for call-clobbered hard registers that are assigned
740 to pseudos that cross calls and tries so save-restore them around calls
741 in order to reduce the number of stack slots needed.
742
743 This might lead to situations where reload is no more able to cope
744 with the challenge of AVR's very few address registers and fails to
745 perform the requested spills. */
746
747 if (avr_strict_X)
748 flag_caller_saves = 0;
749
750 /* Allow optimizer to introduce store data races. This used to be the
751 default - it was changed because bigger targets did not see any
752 performance decrease. For the AVR though, disallowing data races
753 introduces additional code in LIM and increases reg pressure. */
754
755 maybe_set_param_value (PARAM_ALLOW_STORE_DATA_RACES, 1,
756 global_options.x_param_values,
757 global_options_set.x_param_values);
758
759 /* Unwind tables currently require a frame pointer for correctness,
760 see toplev.c:process_options(). */
761
762 if ((flag_unwind_tables
763 || flag_non_call_exceptions
764 || flag_asynchronous_unwind_tables)
765 && !ACCUMULATE_OUTGOING_ARGS)
766 {
767 flag_omit_frame_pointer = 0;
768 }
769
770 if (flag_pic == 1)
771 warning (OPT_fpic, "-fpic is not supported");
772 if (flag_pic == 2)
773 warning (OPT_fPIC, "-fPIC is not supported");
774 if (flag_pie == 1)
775 warning (OPT_fpie, "-fpie is not supported");
776 if (flag_pie == 2)
777 warning (OPT_fPIE, "-fPIE is not supported");
778
779 #if !defined (HAVE_AS_AVR_MGCCISR_OPTION)
780 avr_gasisr_prologues = 0;
781 #endif
782
783 if (!avr_set_core_architecture())
784 return;
785
786 /* RAM addresses of some SFRs common to all devices in respective arch. */
787
788 /* SREG: Status Register containing flags like I (global IRQ) */
789 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
790
791 /* RAMPZ: Address' high part when loading via ELPM */
792 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
793
794 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
795 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
796 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
797 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
798
799 /* SP: Stack Pointer (SP_H:SP_L) */
800 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
801 avr_addr.sp_h = avr_addr.sp_l + 1;
802
803 init_machine_status = avr_init_machine_status;
804
805 avr_log_set_avr_log();
806 }
807
808 /* Function to set up the backend function structure. */
809
810 static struct machine_function *
811 avr_init_machine_status (void)
812 {
813 return ggc_cleared_alloc<machine_function> ();
814 }
815
816
817 /* Implement `INIT_EXPANDERS'. */
818 /* The function works like a singleton. */
819
820 void
821 avr_init_expanders (void)
822 {
823 for (int regno = 0; regno < 32; regno ++)
824 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
825
826 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
827 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
828 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
829
830 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
831
832 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
833 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
834 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
835 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
836 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
837
838 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
839 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
840
841 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
842 to be present */
843 if (AVR_TINY)
844 avr_have_dimode = false;
845 }
846
847
848 /* Implement `REGNO_REG_CLASS'. */
849 /* Return register class for register R. */
850
851 enum reg_class
852 avr_regno_reg_class (int r)
853 {
854 static const enum reg_class reg_class_tab[] =
855 {
856 R0_REG,
857 /* r1 - r15 */
858 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
859 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
860 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
861 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
862 /* r16 - r23 */
863 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
864 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
865 /* r24, r25 */
866 ADDW_REGS, ADDW_REGS,
867 /* X: r26, 27 */
868 POINTER_X_REGS, POINTER_X_REGS,
869 /* Y: r28, r29 */
870 POINTER_Y_REGS, POINTER_Y_REGS,
871 /* Z: r30, r31 */
872 POINTER_Z_REGS, POINTER_Z_REGS,
873 /* SP: SPL, SPH */
874 STACK_REG, STACK_REG
875 };
876
877 if (r <= 33)
878 return reg_class_tab[r];
879
880 return ALL_REGS;
881 }
882
883
884 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
885
886 static bool
887 avr_scalar_mode_supported_p (scalar_mode mode)
888 {
889 if (ALL_FIXED_POINT_MODE_P (mode))
890 return true;
891
892 if (PSImode == mode)
893 return true;
894
895 return default_scalar_mode_supported_p (mode);
896 }
897
898
899 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
900
901 static bool
902 avr_decl_flash_p (tree decl)
903 {
904 if (TREE_CODE (decl) != VAR_DECL
905 || TREE_TYPE (decl) == error_mark_node)
906 {
907 return false;
908 }
909
910 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
911 }
912
913
914 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
915 address space and FALSE, otherwise. */
916
917 static bool
918 avr_decl_memx_p (tree decl)
919 {
920 if (TREE_CODE (decl) != VAR_DECL
921 || TREE_TYPE (decl) == error_mark_node)
922 {
923 return false;
924 }
925
926 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
927 }
928
929
930 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
931
932 bool
933 avr_mem_flash_p (rtx x)
934 {
935 return (MEM_P (x)
936 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
937 }
938
939
940 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
941 address space and FALSE, otherwise. */
942
943 bool
944 avr_mem_memx_p (rtx x)
945 {
946 return (MEM_P (x)
947 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
948 }
949
950
951 /* A helper for the subsequent function attribute used to dig for
952 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
953
954 static inline int
955 avr_lookup_function_attribute1 (const_tree func, const char *name)
956 {
957 if (FUNCTION_DECL == TREE_CODE (func))
958 {
959 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
960 {
961 return true;
962 }
963
964 func = TREE_TYPE (func);
965 }
966
967 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
968 || TREE_CODE (func) == METHOD_TYPE);
969
970 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
971 }
972
973 /* Return nonzero if FUNC is a naked function. */
974
975 static int
976 avr_naked_function_p (tree func)
977 {
978 return avr_lookup_function_attribute1 (func, "naked");
979 }
980
981 /* Return nonzero if FUNC is an interrupt function as specified
982 by the "interrupt" attribute. */
983
984 static int
985 avr_interrupt_function_p (tree func)
986 {
987 return avr_lookup_function_attribute1 (func, "interrupt");
988 }
989
990 /* Return nonzero if FUNC is a signal function as specified
991 by the "signal" attribute. */
992
993 static int
994 avr_signal_function_p (tree func)
995 {
996 return avr_lookup_function_attribute1 (func, "signal");
997 }
998
999 /* Return nonzero if FUNC is an OS_task function. */
1000
1001 static int
1002 avr_OS_task_function_p (tree func)
1003 {
1004 return avr_lookup_function_attribute1 (func, "OS_task");
1005 }
1006
1007 /* Return nonzero if FUNC is an OS_main function. */
1008
1009 static int
1010 avr_OS_main_function_p (tree func)
1011 {
1012 return avr_lookup_function_attribute1 (func, "OS_main");
1013 }
1014
1015
1016 /* Return nonzero if FUNC is a no_gccisr function as specified
1017 by the "no_gccisr" attribute. */
1018
1019 static int
1020 avr_no_gccisr_function_p (tree func)
1021 {
1022 return avr_lookup_function_attribute1 (func, "no_gccisr");
1023 }
1024
1025 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
1026 /* Sanity cheching for above function attributes. */
1027
1028 static void
1029 avr_set_current_function (tree decl)
1030 {
1031 location_t loc;
1032 const char *isr;
1033
1034 if (decl == NULL_TREE
1035 || current_function_decl == NULL_TREE
1036 || current_function_decl == error_mark_node
1037 || ! cfun->machine
1038 || cfun->machine->attributes_checked_p)
1039 return;
1040
1041 loc = DECL_SOURCE_LOCATION (decl);
1042
1043 cfun->machine->is_naked = avr_naked_function_p (decl);
1044 cfun->machine->is_signal = avr_signal_function_p (decl);
1045 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
1046 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
1047 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
1048 cfun->machine->is_no_gccisr = avr_no_gccisr_function_p (decl);
1049
1050 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
1051
1052 /* Too much attributes make no sense as they request conflicting features. */
1053
1054 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
1055 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
1056 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
1057 " exclusive", "OS_task", "OS_main", isr);
1058
1059 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
1060
1061 if (cfun->machine->is_naked
1062 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1063 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
1064 " no effect on %qs function", "OS_task", "OS_main", "naked");
1065
1066 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1067 {
1068 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
1069 tree ret = TREE_TYPE (TREE_TYPE (decl));
1070 const char *name;
1071
1072 name = DECL_ASSEMBLER_NAME_SET_P (decl)
1073 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
1074 : IDENTIFIER_POINTER (DECL_NAME (decl));
1075
1076 /* Skip a leading '*' that might still prefix the assembler name,
1077 e.g. in non-LTO runs. */
1078
1079 name = default_strip_name_encoding (name);
1080
1081 /* Interrupt handlers must be void __vector (void) functions. */
1082
1083 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
1084 error_at (loc, "%qs function cannot have arguments", isr);
1085
1086 if (TREE_CODE (ret) != VOID_TYPE)
1087 error_at (loc, "%qs function cannot return a value", isr);
1088
1089 #if defined WITH_AVRLIBC
1090 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
1091 using this when it switched from SIGNAL and INTERRUPT to ISR. */
1092
1093 if (cfun->machine->is_interrupt)
1094 cfun->machine->is_signal = 0;
1095
1096 /* If the function has the 'signal' or 'interrupt' attribute, ensure
1097 that the name of the function is "__vector_NN" so as to catch
1098 when the user misspells the vector name. */
1099
1100 if (!STR_PREFIX_P (name, "__vector"))
1101 warning_at (loc, OPT_Wmisspelled_isr, "%qs appears to be a misspelled "
1102 "%qs handler, missing %<__vector%> prefix", name, isr);
1103 #endif // AVR-LibC naming conventions
1104 }
1105
1106 #if defined WITH_AVRLIBC
1107 // Common problem is using "ISR" without first including avr/interrupt.h.
1108 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
1109 name = default_strip_name_encoding (name);
1110 if (0 == strcmp ("ISR", name)
1111 || 0 == strcmp ("INTERRUPT", name)
1112 || 0 == strcmp ("SIGNAL", name))
1113 {
1114 warning_at (loc, OPT_Wmisspelled_isr, "%qs is a reserved identifier"
1115 " in AVR-LibC. Consider %<#include <avr/interrupt.h>%>"
1116 " before using the %qs macro", name, name);
1117 }
1118 #endif // AVR-LibC naming conventions
1119
1120 /* Don't print the above diagnostics more than once. */
1121
1122 cfun->machine->attributes_checked_p = 1;
1123 }
1124
1125
1126 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
1127
1128 int
1129 avr_accumulate_outgoing_args (void)
1130 {
1131 if (!cfun)
1132 return TARGET_ACCUMULATE_OUTGOING_ARGS;
1133
1134 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1135 what offset is correct. In some cases it is relative to
1136 virtual_outgoing_args_rtx and in others it is relative to
1137 virtual_stack_vars_rtx. For example code see
1138 gcc.c-torture/execute/built-in-setjmp.c
1139 gcc.c-torture/execute/builtins/sprintf-chk.c */
1140
1141 return (TARGET_ACCUMULATE_OUTGOING_ARGS
1142 && !(cfun->calls_setjmp
1143 || cfun->has_nonlocal_label));
1144 }
1145
1146
1147 /* Report contribution of accumulated outgoing arguments to stack size. */
1148
1149 static inline int
1150 avr_outgoing_args_size (void)
1151 {
1152 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
1153 }
1154
1155
1156 /* Implement `STARTING_FRAME_OFFSET'. */
1157 /* This is the offset from the frame pointer register to the first stack slot
1158 that contains a variable living in the frame. */
1159
1160 int
1161 avr_starting_frame_offset (void)
1162 {
1163 return 1 + avr_outgoing_args_size ();
1164 }
1165
1166
1167 /* Return the number of hard registers to push/pop in the prologue/epilogue
1168 of the current function, and optionally store these registers in SET. */
1169
1170 static int
1171 avr_regs_to_save (HARD_REG_SET *set)
1172 {
1173 int count;
1174 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1175
1176 if (set)
1177 CLEAR_HARD_REG_SET (*set);
1178 count = 0;
1179
1180 /* No need to save any registers if the function never returns or
1181 has the "OS_task" or "OS_main" attribute. */
1182
1183 if (TREE_THIS_VOLATILE (current_function_decl)
1184 || cfun->machine->is_OS_task
1185 || cfun->machine->is_OS_main)
1186 return 0;
1187
1188 for (int reg = 0; reg < 32; reg++)
1189 {
1190 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1191 any global register variables. */
1192
1193 if (fixed_regs[reg])
1194 continue;
1195
1196 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
1197 || (df_regs_ever_live_p (reg)
1198 && (int_or_sig_p || !call_used_regs[reg])
1199 /* Don't record frame pointer registers here. They are treated
1200 indivitually in prologue. */
1201 && !(frame_pointer_needed
1202 && (reg == REG_Y || reg == REG_Y + 1))))
1203 {
1204 if (set)
1205 SET_HARD_REG_BIT (*set, reg);
1206 count++;
1207 }
1208 }
1209 return count;
1210 }
1211
1212
1213 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1214
1215 static bool
1216 avr_allocate_stack_slots_for_args (void)
1217 {
1218 return !cfun->machine->is_naked;
1219 }
1220
1221
1222 /* Return true if register FROM can be eliminated via register TO. */
1223
1224 static bool
1225 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1226 {
1227 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
1228 || !frame_pointer_needed);
1229 }
1230
1231
1232 /* Implement `TARGET_WARN_FUNC_RETURN'. */
1233
1234 static bool
1235 avr_warn_func_return (tree decl)
1236 {
1237 /* Naked functions are implemented entirely in assembly, including the
1238 return sequence, so suppress warnings about this. */
1239
1240 return !avr_naked_function_p (decl);
1241 }
1242
1243 /* Compute offset between arg_pointer and frame_pointer. */
1244
1245 int
1246 avr_initial_elimination_offset (int from, int to)
1247 {
1248 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1249 return 0;
1250 else
1251 {
1252 int offset = frame_pointer_needed ? 2 : 0;
1253 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
1254
1255 // If FROM is ARG_POINTER_REGNUM, we are not in an ISR as ISRs
1256 // might not have arguments. Hence the following is not affected
1257 // by gasisr prologues.
1258 offset += avr_regs_to_save (NULL);
1259 return (get_frame_size () + avr_outgoing_args_size()
1260 + avr_pc_size + 1 + offset);
1261 }
1262 }
1263
1264
1265 /* Helper for the function below. */
1266
1267 static void
1268 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
1269 {
1270 *node = make_node (FIXED_POINT_TYPE);
1271 TYPE_SATURATING (*node) = sat_p;
1272 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
1273 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
1274 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
1275 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
1276 SET_TYPE_ALIGN (*node, 8);
1277 SET_TYPE_MODE (*node, mode);
1278
1279 layout_type (*node);
1280 }
1281
1282
1283 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
1284
1285 static tree
1286 avr_build_builtin_va_list (void)
1287 {
1288 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1289 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1290 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1291 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1292 to the long long accum modes instead of the desired [U]TAmode.
1293
1294 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
1295 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
1296 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1297 libgcc to detect IBIT and FBIT. */
1298
1299 avr_adjust_type_node (&ta_type_node, TAmode, 0);
1300 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
1301 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
1302 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
1303
1304 unsigned_long_long_accum_type_node = uta_type_node;
1305 long_long_accum_type_node = ta_type_node;
1306 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
1307 sat_long_long_accum_type_node = sat_ta_type_node;
1308
1309 /* Dispatch to the default handler. */
1310
1311 return std_build_builtin_va_list ();
1312 }
1313
1314
1315 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
1316 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1317 frame pointer by +STARTING_FRAME_OFFSET.
1318 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
1319 avoids creating add/sub of offset in nonlocal goto and setjmp. */
1320
1321 static rtx
1322 avr_builtin_setjmp_frame_value (void)
1323 {
1324 rtx xval = gen_reg_rtx (Pmode);
1325 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
1326 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
1327 return xval;
1328 }
1329
1330
1331 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1332 This is return address of function. */
1333
1334 rtx
1335 avr_return_addr_rtx (int count, rtx tem)
1336 {
1337 rtx r;
1338
1339 /* Can only return this function's return address. Others not supported. */
1340 if (count)
1341 return NULL;
1342
1343 if (AVR_3_BYTE_PC)
1344 {
1345 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1346 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1347 " of address");
1348 }
1349 else
1350 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1351
1352 cfun->machine->use_L__stack_usage = 1;
1353
1354 r = gen_rtx_PLUS (Pmode, tem, r);
1355 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1356 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1357 return r;
1358 }
1359
1360 /* Return 1 if the function epilogue is just a single "ret". */
1361
1362 int
1363 avr_simple_epilogue (void)
1364 {
1365 return (! frame_pointer_needed
1366 && get_frame_size () == 0
1367 && avr_outgoing_args_size() == 0
1368 && avr_regs_to_save (NULL) == 0
1369 && ! cfun->machine->is_interrupt
1370 && ! cfun->machine->is_signal
1371 && ! cfun->machine->is_naked
1372 && ! TREE_THIS_VOLATILE (current_function_decl));
1373 }
1374
1375 /* This function checks sequence of live registers. */
1376
1377 static int
1378 sequent_regs_live (void)
1379 {
1380 int live_seq = 0;
1381 int cur_seq = 0;
1382
1383 for (int reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1384 {
1385 if (fixed_regs[reg])
1386 {
1387 /* Don't recognize sequences that contain global register
1388 variables. */
1389
1390 if (live_seq != 0)
1391 return 0;
1392 else
1393 continue;
1394 }
1395
1396 if (!call_used_regs[reg])
1397 {
1398 if (df_regs_ever_live_p (reg))
1399 {
1400 ++live_seq;
1401 ++cur_seq;
1402 }
1403 else
1404 cur_seq = 0;
1405 }
1406 }
1407
1408 if (!frame_pointer_needed)
1409 {
1410 if (df_regs_ever_live_p (REG_Y))
1411 {
1412 ++live_seq;
1413 ++cur_seq;
1414 }
1415 else
1416 cur_seq = 0;
1417
1418 if (df_regs_ever_live_p (REG_Y + 1))
1419 {
1420 ++live_seq;
1421 ++cur_seq;
1422 }
1423 else
1424 cur_seq = 0;
1425 }
1426 else
1427 {
1428 cur_seq += 2;
1429 live_seq += 2;
1430 }
1431 return (cur_seq == live_seq) ? live_seq : 0;
1432 }
1433
1434 namespace {
1435 static const pass_data avr_pass_data_pre_proep =
1436 {
1437 RTL_PASS, // type
1438 "", // name (will be patched)
1439 OPTGROUP_NONE, // optinfo_flags
1440 TV_DF_SCAN, // tv_id
1441 0, // properties_required
1442 0, // properties_provided
1443 0, // properties_destroyed
1444 0, // todo_flags_start
1445 0 // todo_flags_finish
1446 };
1447
1448
1449 class avr_pass_pre_proep : public rtl_opt_pass
1450 {
1451 public:
1452 avr_pass_pre_proep (gcc::context *ctxt, const char *name)
1453 : rtl_opt_pass (avr_pass_data_pre_proep, ctxt)
1454 {
1455 this->name = name;
1456 }
1457
1458 void compute_maybe_gasisr (function*);
1459
1460 virtual unsigned int execute (function *fun)
1461 {
1462 if (avr_gasisr_prologues
1463 // Whether this function is an ISR worth scanning at all.
1464 && !fun->machine->is_no_gccisr
1465 && (fun->machine->is_interrupt
1466 || fun->machine->is_signal)
1467 && !cfun->machine->is_naked
1468 // Paranoia: Non-local gotos and labels that might escape.
1469 && !cfun->calls_setjmp
1470 && !cfun->has_nonlocal_label
1471 && !cfun->has_forced_label_in_static)
1472 {
1473 compute_maybe_gasisr (fun);
1474 }
1475
1476 return 0;
1477 }
1478
1479 }; // avr_pass_pre_proep
1480
1481 } // anon namespace
1482
1483 rtl_opt_pass*
1484 make_avr_pass_pre_proep (gcc::context *ctxt)
1485 {
1486 return new avr_pass_pre_proep (ctxt, "avr-pre-proep");
1487 }
1488
1489
1490 /* Set fun->machine->gasisr.maybe provided we don't find anything that
1491 prohibits GAS generating parts of ISR prologues / epilogues for us. */
1492
1493 void
1494 avr_pass_pre_proep::compute_maybe_gasisr (function *fun)
1495 {
1496 // Don't use BB iterators so that we see JUMP_TABLE_DATA.
1497
1498 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
1499 {
1500 // Transparent calls always use [R]CALL and are filtered out by GAS.
1501 // ISRs don't use -mcall-prologues, hence what remains to be filtered
1502 // out are open coded (tail) calls.
1503
1504 if (CALL_P (insn))
1505 return;
1506
1507 // __tablejump2__ clobbers something and is targeted by JMP so
1508 // that GAS won't see its usage.
1509
1510 if (AVR_HAVE_JMP_CALL
1511 && JUMP_TABLE_DATA_P (insn))
1512 return;
1513
1514 // Non-local gotos not seen in *FUN.
1515
1516 if (JUMP_P (insn)
1517 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1518 return;
1519 }
1520
1521 fun->machine->gasisr.maybe = 1;
1522 }
1523
1524
1525 /* Obtain the length sequence of insns. */
1526
1527 int
1528 get_sequence_length (rtx_insn *insns)
1529 {
1530 int length = 0;
1531
1532 for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
1533 length += get_attr_length (insn);
1534
1535 return length;
1536 }
1537
1538
1539 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1540
1541 rtx
1542 avr_incoming_return_addr_rtx (void)
1543 {
1544 /* The return address is at the top of the stack. Note that the push
1545 was via post-decrement, which means the actual address is off by one. */
1546 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1547 }
1548
1549
1550 /* Unset a bit in *SET. If successful, return the respective bit number.
1551 Otherwise, return -1 and *SET is unaltered. */
1552
1553 static int
1554 avr_hregs_split_reg (HARD_REG_SET *set)
1555 {
1556 for (int regno = 0; regno < 32; regno++)
1557 if (TEST_HARD_REG_BIT (*set, regno))
1558 {
1559 // Don't remove a register from *SET which might indicate that
1560 // some RAMP* register might need ISR prologue / epilogue treatment.
1561
1562 if (AVR_HAVE_RAMPX
1563 && (REG_X == regno || REG_X + 1 == regno)
1564 && TEST_HARD_REG_BIT (*set, REG_X)
1565 && TEST_HARD_REG_BIT (*set, REG_X + 1))
1566 continue;
1567
1568 if (AVR_HAVE_RAMPY
1569 && !frame_pointer_needed
1570 && (REG_Y == regno || REG_Y + 1 == regno)
1571 && TEST_HARD_REG_BIT (*set, REG_Y)
1572 && TEST_HARD_REG_BIT (*set, REG_Y + 1))
1573 continue;
1574
1575 if (AVR_HAVE_RAMPZ
1576 && (REG_Z == regno || REG_Z + 1 == regno)
1577 && TEST_HARD_REG_BIT (*set, REG_Z)
1578 && TEST_HARD_REG_BIT (*set, REG_Z + 1))
1579 continue;
1580
1581 CLEAR_HARD_REG_BIT (*set, regno);
1582 return regno;
1583 }
1584
1585 return -1;
1586 }
1587
1588
1589 /* Helper for expand_prologue. Emit a push of a byte register. */
1590
1591 static void
1592 emit_push_byte (unsigned regno, bool frame_related_p)
1593 {
1594 rtx mem, reg;
1595 rtx_insn *insn;
1596
1597 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1598 mem = gen_frame_mem (QImode, mem);
1599 reg = gen_rtx_REG (QImode, regno);
1600
1601 insn = emit_insn (gen_rtx_SET (mem, reg));
1602 if (frame_related_p)
1603 RTX_FRAME_RELATED_P (insn) = 1;
1604
1605 cfun->machine->stack_usage++;
1606 }
1607
1608
1609 /* Helper for expand_prologue. Emit a push of a SFR via register TREG.
1610 SFR is a MEM representing the memory location of the SFR.
1611 If CLR_P then clear the SFR after the push using zero_reg. */
1612
1613 static void
1614 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p, int treg)
1615 {
1616 rtx_insn *insn;
1617
1618 gcc_assert (MEM_P (sfr));
1619
1620 /* IN treg, IO(SFR) */
1621 insn = emit_move_insn (all_regs_rtx[treg], sfr);
1622 if (frame_related_p)
1623 RTX_FRAME_RELATED_P (insn) = 1;
1624
1625 /* PUSH treg */
1626 emit_push_byte (treg, frame_related_p);
1627
1628 if (clr_p)
1629 {
1630 /* OUT IO(SFR), __zero_reg__ */
1631 insn = emit_move_insn (sfr, const0_rtx);
1632 if (frame_related_p)
1633 RTX_FRAME_RELATED_P (insn) = 1;
1634 }
1635 }
1636
1637 static void
1638 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1639 {
1640 rtx_insn *insn;
1641 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1642 int live_seq = sequent_regs_live ();
1643
1644 HOST_WIDE_INT size_max
1645 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1646
1647 bool minimize = (TARGET_CALL_PROLOGUES
1648 && size < size_max
1649 && live_seq
1650 && !isr_p
1651 && !cfun->machine->is_OS_task
1652 && !cfun->machine->is_OS_main
1653 && !AVR_TINY);
1654
1655 if (minimize
1656 && (frame_pointer_needed
1657 || avr_outgoing_args_size() > 8
1658 || (AVR_2_BYTE_PC && live_seq > 6)
1659 || live_seq > 7))
1660 {
1661 rtx pattern;
1662 int first_reg, reg, offset;
1663
1664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1665 gen_int_mode (size, HImode));
1666
1667 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1668 gen_int_mode (live_seq+size, HImode));
1669 insn = emit_insn (pattern);
1670 RTX_FRAME_RELATED_P (insn) = 1;
1671
1672 /* Describe the effect of the unspec_volatile call to prologue_saves.
1673 Note that this formulation assumes that add_reg_note pushes the
1674 notes to the front. Thus we build them in the reverse order of
1675 how we want dwarf2out to process them. */
1676
1677 /* The function does always set frame_pointer_rtx, but whether that
1678 is going to be permanent in the function is frame_pointer_needed. */
1679
1680 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1681 gen_rtx_SET ((frame_pointer_needed
1682 ? frame_pointer_rtx
1683 : stack_pointer_rtx),
1684 plus_constant (Pmode, stack_pointer_rtx,
1685 -(size + live_seq))));
1686
1687 /* Note that live_seq always contains r28+r29, but the other
1688 registers to be saved are all below 18. */
1689
1690 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1691
1692 for (reg = 29, offset = -live_seq + 1;
1693 reg >= first_reg;
1694 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1695 {
1696 rtx m, r;
1697
1698 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1699 offset));
1700 r = gen_rtx_REG (QImode, reg);
1701 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1702 }
1703
1704 cfun->machine->stack_usage += size + live_seq;
1705 }
1706 else /* !minimize */
1707 {
1708 for (int reg = 0; reg < 32; ++reg)
1709 if (TEST_HARD_REG_BIT (set, reg))
1710 emit_push_byte (reg, true);
1711
1712 if (frame_pointer_needed
1713 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1714 {
1715 /* Push frame pointer. Always be consistent about the
1716 ordering of pushes -- epilogue_restores expects the
1717 register pair to be pushed low byte first. */
1718
1719 emit_push_byte (REG_Y, true);
1720 emit_push_byte (REG_Y + 1, true);
1721 }
1722
1723 if (frame_pointer_needed
1724 && size == 0)
1725 {
1726 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1727 RTX_FRAME_RELATED_P (insn) = 1;
1728 }
1729
1730 if (size != 0)
1731 {
1732 /* Creating a frame can be done by direct manipulation of the
1733 stack or via the frame pointer. These two methods are:
1734 fp = sp
1735 fp -= size
1736 sp = fp
1737 or
1738 sp -= size
1739 fp = sp (*)
1740 the optimum method depends on function type, stack and
1741 frame size. To avoid a complex logic, both methods are
1742 tested and shortest is selected.
1743
1744 There is also the case where SIZE != 0 and no frame pointer is
1745 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1746 In that case, insn (*) is not needed in that case.
1747 We use the X register as scratch. This is save because in X
1748 is call-clobbered.
1749 In an interrupt routine, the case of SIZE != 0 together with
1750 !frame_pointer_needed can only occur if the function is not a
1751 leaf function and thus X has already been saved. */
1752
1753 int irq_state = -1;
1754 HOST_WIDE_INT size_cfa = size, neg_size;
1755 rtx_insn *fp_plus_insns;
1756 rtx fp, my_fp;
1757
1758 gcc_assert (frame_pointer_needed
1759 || !isr_p
1760 || !crtl->is_leaf);
1761
1762 fp = my_fp = (frame_pointer_needed
1763 ? frame_pointer_rtx
1764 : gen_rtx_REG (Pmode, REG_X));
1765
1766 if (AVR_HAVE_8BIT_SP)
1767 {
1768 /* The high byte (r29) does not change:
1769 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1770
1771 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1772 }
1773
1774 /* Cut down size and avoid size = 0 so that we don't run
1775 into ICE like PR52488 in the remainder. */
1776
1777 if (size > size_max)
1778 {
1779 /* Don't error so that insane code from newlib still compiles
1780 and does not break building newlib. As PR51345 is implemented
1781 now, there are multilib variants with -msp8.
1782
1783 If user wants sanity checks he can use -Wstack-usage=
1784 or similar options.
1785
1786 For CFA we emit the original, non-saturated size so that
1787 the generic machinery is aware of the real stack usage and
1788 will print the above diagnostic as expected. */
1789
1790 size = size_max;
1791 }
1792
1793 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1794 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1795
1796 /************ Method 1: Adjust frame pointer ************/
1797
1798 start_sequence ();
1799
1800 /* Normally, the dwarf2out frame-related-expr interpreter does
1801 not expect to have the CFA change once the frame pointer is
1802 set up. Thus, we avoid marking the move insn below and
1803 instead indicate that the entire operation is complete after
1804 the frame pointer subtraction is done. */
1805
1806 insn = emit_move_insn (fp, stack_pointer_rtx);
1807 if (frame_pointer_needed)
1808 {
1809 RTX_FRAME_RELATED_P (insn) = 1;
1810 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1811 gen_rtx_SET (fp, stack_pointer_rtx));
1812 }
1813
1814 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1815 my_fp, neg_size));
1816
1817 if (frame_pointer_needed)
1818 {
1819 RTX_FRAME_RELATED_P (insn) = 1;
1820 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1821 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1822 -size_cfa)));
1823 }
1824
1825 /* Copy to stack pointer. Note that since we've already
1826 changed the CFA to the frame pointer this operation
1827 need not be annotated if frame pointer is needed.
1828 Always move through unspec, see PR50063.
1829 For meaning of irq_state see movhi_sp_r insn. */
1830
1831 if (cfun->machine->is_interrupt)
1832 irq_state = 1;
1833
1834 if (TARGET_NO_INTERRUPTS
1835 || cfun->machine->is_signal
1836 || cfun->machine->is_OS_main)
1837 irq_state = 0;
1838
1839 if (AVR_HAVE_8BIT_SP)
1840 irq_state = 2;
1841
1842 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1843 fp, GEN_INT (irq_state)));
1844 if (!frame_pointer_needed)
1845 {
1846 RTX_FRAME_RELATED_P (insn) = 1;
1847 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1848 gen_rtx_SET (stack_pointer_rtx,
1849 plus_constant (Pmode,
1850 stack_pointer_rtx,
1851 -size_cfa)));
1852 }
1853
1854 fp_plus_insns = get_insns ();
1855 end_sequence ();
1856
1857 /************ Method 2: Adjust Stack pointer ************/
1858
1859 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1860 can only handle specific offsets. */
1861
1862 int n_rcall = size / (AVR_3_BYTE_PC ? 3 : 2);
1863
1864 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode)
1865 // Don't use more than 3 RCALLs.
1866 && n_rcall <= 3)
1867 {
1868 rtx_insn *sp_plus_insns;
1869
1870 start_sequence ();
1871
1872 insn = emit_move_insn (stack_pointer_rtx,
1873 plus_constant (Pmode, stack_pointer_rtx,
1874 -size));
1875 RTX_FRAME_RELATED_P (insn) = 1;
1876 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1877 gen_rtx_SET (stack_pointer_rtx,
1878 plus_constant (Pmode,
1879 stack_pointer_rtx,
1880 -size_cfa)));
1881 if (frame_pointer_needed)
1882 {
1883 insn = emit_move_insn (fp, stack_pointer_rtx);
1884 RTX_FRAME_RELATED_P (insn) = 1;
1885 }
1886
1887 sp_plus_insns = get_insns ();
1888 end_sequence ();
1889
1890 /************ Use shortest method ************/
1891
1892 emit_insn (get_sequence_length (sp_plus_insns)
1893 < get_sequence_length (fp_plus_insns)
1894 ? sp_plus_insns
1895 : fp_plus_insns);
1896 }
1897 else
1898 {
1899 emit_insn (fp_plus_insns);
1900 }
1901
1902 cfun->machine->stack_usage += size_cfa;
1903 } /* !minimize && size != 0 */
1904 } /* !minimize */
1905 }
1906
1907
1908 /* Output function prologue. */
1909
1910 void
1911 avr_expand_prologue (void)
1912 {
1913 HARD_REG_SET set;
1914 HOST_WIDE_INT size;
1915
1916 size = get_frame_size() + avr_outgoing_args_size();
1917
1918 cfun->machine->stack_usage = 0;
1919
1920 /* Prologue: naked. */
1921 if (cfun->machine->is_naked)
1922 {
1923 return;
1924 }
1925
1926 avr_regs_to_save (&set);
1927
1928 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1929 {
1930 int treg = AVR_TMP_REGNO;
1931 /* Enable interrupts. */
1932 if (cfun->machine->is_interrupt)
1933 emit_insn (gen_enable_interrupt ());
1934
1935 if (cfun->machine->gasisr.maybe)
1936 {
1937 /* Let GAS PR21472 emit prologue preamble for us which handles SREG,
1938 ZERO_REG and TMP_REG and one additional, optional register for
1939 us in an optimal way. This even scans through inline asm. */
1940
1941 cfun->machine->gasisr.yes = 1;
1942
1943 // The optional reg or TMP_REG if we don't need one. If we need one,
1944 // remove that reg from SET so that it's not puhed / popped twice.
1945 // We also use it below instead of TMP_REG in some places.
1946
1947 treg = avr_hregs_split_reg (&set);
1948 if (treg < 0)
1949 treg = AVR_TMP_REGNO;
1950 cfun->machine->gasisr.regno = treg;
1951
1952 // The worst case of pushes. The exact number can be inferred
1953 // at assembly time by magic expression __gcc_isr.n_pushed.
1954 cfun->machine->stack_usage += 3 + (treg != AVR_TMP_REGNO);
1955
1956 // Emit a Prologue chunk. Epilogue chunk(s) might follow.
1957 // The final Done chunk is emit by final postscan.
1958 emit_insn (gen_gasisr (GEN_INT (GASISR_Prologue), GEN_INT (treg)));
1959 }
1960 else // !TARGET_GASISR_PROLOGUES: Classic, dumb prologue preamble.
1961 {
1962 /* Push zero reg. */
1963 emit_push_byte (AVR_ZERO_REGNO, true);
1964
1965 /* Push tmp reg. */
1966 emit_push_byte (AVR_TMP_REGNO, true);
1967
1968 /* Push SREG. */
1969 /* ??? There's no dwarf2 column reserved for SREG. */
1970 emit_push_sfr (sreg_rtx, false, false /* clr */, AVR_TMP_REGNO);
1971
1972 /* Clear zero reg. */
1973 emit_move_insn (zero_reg_rtx, const0_rtx);
1974
1975 /* Prevent any attempt to delete the setting of ZERO_REG! */
1976 emit_use (zero_reg_rtx);
1977 }
1978
1979 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1980 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1981
1982 if (AVR_HAVE_RAMPD)
1983 emit_push_sfr (rampd_rtx, false /* frame */, true /* clr */, treg);
1984
1985 if (AVR_HAVE_RAMPX
1986 && TEST_HARD_REG_BIT (set, REG_X)
1987 && TEST_HARD_REG_BIT (set, REG_X + 1))
1988 {
1989 emit_push_sfr (rampx_rtx, false /* frame */, true /* clr */, treg);
1990 }
1991
1992 if (AVR_HAVE_RAMPY
1993 && (frame_pointer_needed
1994 || (TEST_HARD_REG_BIT (set, REG_Y)
1995 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1996 {
1997 emit_push_sfr (rampy_rtx, false /* frame */, true /* clr */, treg);
1998 }
1999
2000 if (AVR_HAVE_RAMPZ
2001 && TEST_HARD_REG_BIT (set, REG_Z)
2002 && TEST_HARD_REG_BIT (set, REG_Z + 1))
2003 {
2004 emit_push_sfr (rampz_rtx, false /* frame */, AVR_HAVE_RAMPD, treg);
2005 }
2006 } /* is_interrupt is_signal */
2007
2008 avr_prologue_setup_frame (size, set);
2009
2010 if (flag_stack_usage_info)
2011 current_function_static_stack_size
2012 = cfun->machine->stack_usage + INCOMING_FRAME_SP_OFFSET;
2013 }
2014
2015
2016 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
2017 /* Output summary at end of function prologue. */
2018
2019 static void
2020 avr_asm_function_end_prologue (FILE *file)
2021 {
2022 if (cfun->machine->is_naked)
2023 {
2024 fputs ("/* prologue: naked */\n", file);
2025 }
2026 else
2027 {
2028 if (cfun->machine->is_interrupt)
2029 {
2030 fputs ("/* prologue: Interrupt */\n", file);
2031 }
2032 else if (cfun->machine->is_signal)
2033 {
2034 fputs ("/* prologue: Signal */\n", file);
2035 }
2036 else
2037 fputs ("/* prologue: function */\n", file);
2038 }
2039
2040 if (ACCUMULATE_OUTGOING_ARGS)
2041 fprintf (file, "/* outgoing args size = %d */\n",
2042 avr_outgoing_args_size());
2043
2044 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
2045 get_frame_size());
2046
2047 if (!cfun->machine->gasisr.yes)
2048 {
2049 fprintf (file, "/* stack size = %d */\n", cfun->machine->stack_usage);
2050 // Create symbol stack offset so all functions have it. Add 1 to stack
2051 // usage for offset so that SP + .L__stack_offset = return address.
2052 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
2053 }
2054 else
2055 {
2056 int used_by_gasisr = 3 + (cfun->machine->gasisr.regno != AVR_TMP_REGNO);
2057 int to = cfun->machine->stack_usage;
2058 int from = to - used_by_gasisr;
2059 // Number of pushed regs is only known at assembly-time.
2060 fprintf (file, "/* stack size = %d...%d */\n", from , to);
2061 fprintf (file, ".L__stack_usage = %d + __gcc_isr.n_pushed\n", from);
2062 }
2063 }
2064
2065
2066 /* Implement `EPILOGUE_USES'. */
2067
2068 int
2069 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
2070 {
2071 if (reload_completed
2072 && cfun->machine
2073 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
2074 return 1;
2075 return 0;
2076 }
2077
2078 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
2079
2080 static void
2081 emit_pop_byte (unsigned regno)
2082 {
2083 rtx mem, reg;
2084
2085 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
2086 mem = gen_frame_mem (QImode, mem);
2087 reg = gen_rtx_REG (QImode, regno);
2088
2089 emit_insn (gen_rtx_SET (reg, mem));
2090 }
2091
2092 /* Output RTL epilogue. */
2093
2094 void
2095 avr_expand_epilogue (bool sibcall_p)
2096 {
2097 int live_seq;
2098 HARD_REG_SET set;
2099 int minimize;
2100 HOST_WIDE_INT size;
2101 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
2102
2103 size = get_frame_size() + avr_outgoing_args_size();
2104
2105 /* epilogue: naked */
2106 if (cfun->machine->is_naked)
2107 {
2108 gcc_assert (!sibcall_p);
2109
2110 emit_jump_insn (gen_return ());
2111 return;
2112 }
2113
2114 avr_regs_to_save (&set);
2115 live_seq = sequent_regs_live ();
2116
2117 minimize = (TARGET_CALL_PROLOGUES
2118 && live_seq
2119 && !isr_p
2120 && !cfun->machine->is_OS_task
2121 && !cfun->machine->is_OS_main
2122 && !AVR_TINY);
2123
2124 if (minimize
2125 && (live_seq > 4
2126 || frame_pointer_needed
2127 || size))
2128 {
2129 /* Get rid of frame. */
2130
2131 if (!frame_pointer_needed)
2132 {
2133 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2134 }
2135
2136 if (size)
2137 {
2138 emit_move_insn (frame_pointer_rtx,
2139 plus_constant (Pmode, frame_pointer_rtx, size));
2140 }
2141
2142 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
2143 return;
2144 }
2145
2146 if (size)
2147 {
2148 /* Try two methods to adjust stack and select shortest. */
2149
2150 int irq_state = -1;
2151 rtx fp, my_fp;
2152 rtx_insn *fp_plus_insns;
2153 HOST_WIDE_INT size_max;
2154
2155 gcc_assert (frame_pointer_needed
2156 || !isr_p
2157 || !crtl->is_leaf);
2158
2159 fp = my_fp = (frame_pointer_needed
2160 ? frame_pointer_rtx
2161 : gen_rtx_REG (Pmode, REG_X));
2162
2163 if (AVR_HAVE_8BIT_SP)
2164 {
2165 /* The high byte (r29) does not change:
2166 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
2167
2168 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
2169 }
2170
2171 /* For rationale see comment in prologue generation. */
2172
2173 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
2174 if (size > size_max)
2175 size = size_max;
2176 size = trunc_int_for_mode (size, GET_MODE (my_fp));
2177
2178 /********** Method 1: Adjust fp register **********/
2179
2180 start_sequence ();
2181
2182 if (!frame_pointer_needed)
2183 emit_move_insn (fp, stack_pointer_rtx);
2184
2185 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
2186
2187 /* Copy to stack pointer. */
2188
2189 if (TARGET_NO_INTERRUPTS)
2190 irq_state = 0;
2191
2192 if (AVR_HAVE_8BIT_SP)
2193 irq_state = 2;
2194
2195 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
2196 GEN_INT (irq_state)));
2197
2198 fp_plus_insns = get_insns ();
2199 end_sequence ();
2200
2201 /********** Method 2: Adjust Stack pointer **********/
2202
2203 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
2204 {
2205 rtx_insn *sp_plus_insns;
2206
2207 start_sequence ();
2208
2209 emit_move_insn (stack_pointer_rtx,
2210 plus_constant (Pmode, stack_pointer_rtx, size));
2211
2212 sp_plus_insns = get_insns ();
2213 end_sequence ();
2214
2215 /************ Use shortest method ************/
2216
2217 emit_insn (get_sequence_length (sp_plus_insns)
2218 < get_sequence_length (fp_plus_insns)
2219 ? sp_plus_insns
2220 : fp_plus_insns);
2221 }
2222 else
2223 emit_insn (fp_plus_insns);
2224 } /* size != 0 */
2225
2226 if (frame_pointer_needed
2227 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
2228 {
2229 /* Restore previous frame_pointer. See avr_expand_prologue for
2230 rationale for not using pophi. */
2231
2232 emit_pop_byte (REG_Y + 1);
2233 emit_pop_byte (REG_Y);
2234 }
2235
2236 /* Restore used registers. */
2237
2238 int treg = AVR_TMP_REGNO;
2239
2240 if (isr_p
2241 && cfun->machine->gasisr.yes)
2242 {
2243 treg = cfun->machine->gasisr.regno;
2244 CLEAR_HARD_REG_BIT (set, treg);
2245 }
2246
2247 for (int reg = 31; reg >= 0; --reg)
2248 if (TEST_HARD_REG_BIT (set, reg))
2249 emit_pop_byte (reg);
2250
2251 if (isr_p)
2252 {
2253 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2254 The conditions to restore them must be tha same as in prologue. */
2255
2256 if (AVR_HAVE_RAMPZ
2257 && TEST_HARD_REG_BIT (set, REG_Z)
2258 && TEST_HARD_REG_BIT (set, REG_Z + 1))
2259 {
2260 emit_pop_byte (treg);
2261 emit_move_insn (rampz_rtx, all_regs_rtx[treg]);
2262 }
2263
2264 if (AVR_HAVE_RAMPY
2265 && (frame_pointer_needed
2266 || (TEST_HARD_REG_BIT (set, REG_Y)
2267 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
2268 {
2269 emit_pop_byte (treg);
2270 emit_move_insn (rampy_rtx, all_regs_rtx[treg]);
2271 }
2272
2273 if (AVR_HAVE_RAMPX
2274 && TEST_HARD_REG_BIT (set, REG_X)
2275 && TEST_HARD_REG_BIT (set, REG_X + 1))
2276 {
2277 emit_pop_byte (treg);
2278 emit_move_insn (rampx_rtx, all_regs_rtx[treg]);
2279 }
2280
2281 if (AVR_HAVE_RAMPD)
2282 {
2283 emit_pop_byte (treg);
2284 emit_move_insn (rampd_rtx, all_regs_rtx[treg]);
2285 }
2286
2287 if (cfun->machine->gasisr.yes)
2288 {
2289 // Emit an Epilogue chunk.
2290 emit_insn (gen_gasisr (GEN_INT (GASISR_Epilogue),
2291 GEN_INT (cfun->machine->gasisr.regno)));
2292 }
2293 else // !TARGET_GASISR_PROLOGUES
2294 {
2295 /* Restore SREG using tmp_reg as scratch. */
2296
2297 emit_pop_byte (AVR_TMP_REGNO);
2298 emit_move_insn (sreg_rtx, tmp_reg_rtx);
2299
2300 /* Restore tmp REG. */
2301 emit_pop_byte (AVR_TMP_REGNO);
2302
2303 /* Restore zero REG. */
2304 emit_pop_byte (AVR_ZERO_REGNO);
2305 }
2306 }
2307
2308 if (!sibcall_p)
2309 emit_jump_insn (gen_return ());
2310 }
2311
2312
2313 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
2314
2315 static void
2316 avr_asm_function_begin_epilogue (FILE *file)
2317 {
2318 app_disable();
2319 fprintf (file, "/* epilogue start */\n");
2320 }
2321
2322
2323 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
2324
2325 static bool
2326 avr_cannot_modify_jumps_p (void)
2327 {
2328 /* Naked Functions must not have any instructions after
2329 their epilogue, see PR42240 */
2330
2331 if (reload_completed
2332 && cfun->machine
2333 && cfun->machine->is_naked)
2334 {
2335 return true;
2336 }
2337
2338 return false;
2339 }
2340
2341
2342 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
2343
2344 static bool
2345 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
2346 {
2347 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
2348 This hook just serves to hack around PR rtl-optimization/52543 by
2349 claiming that non-generic addresses were mode-dependent so that
2350 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
2351 RTXes to probe SET and MEM costs and assumes that MEM is always in the
2352 generic address space which is not true. */
2353
2354 return !ADDR_SPACE_GENERIC_P (as);
2355 }
2356
2357
2358 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2359 address with the `absdata' variable attribute, i.e. respective
2360 data can be read / written by LDS / STS instruction.
2361 This is used only for AVR_TINY. */
2362
2363 static bool
2364 avr_address_tiny_absdata_p (rtx x, machine_mode mode)
2365 {
2366 if (CONST == GET_CODE (x))
2367 x = XEXP (XEXP (x, 0), 0);
2368
2369 if (SYMBOL_REF_P (x))
2370 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_ABSDATA;
2371
2372 if (CONST_INT_P (x)
2373 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)))
2374 return true;
2375
2376 return false;
2377 }
2378
2379
2380 /* Helper function for `avr_legitimate_address_p'. */
2381
2382 static inline bool
2383 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
2384 RTX_CODE outer_code, bool strict)
2385 {
2386 return (REG_P (reg)
2387 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
2388 as, outer_code, UNKNOWN)
2389 || (!strict
2390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
2391 }
2392
2393
2394 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2395 machine for a memory operand of mode MODE. */
2396
2397 static bool
2398 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2399 {
2400 bool ok = CONSTANT_ADDRESS_P (x);
2401
2402 switch (GET_CODE (x))
2403 {
2404 case REG:
2405 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
2406 MEM, strict);
2407
2408 if (strict
2409 && GET_MODE_SIZE (mode) > 4
2410 && REG_X == REGNO (x))
2411 {
2412 ok = false;
2413 }
2414 break;
2415
2416 case POST_INC:
2417 case PRE_DEC:
2418 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
2419 GET_CODE (x), strict);
2420 break;
2421
2422 case PLUS:
2423 {
2424 rtx reg = XEXP (x, 0);
2425 rtx op1 = XEXP (x, 1);
2426
2427 if (REG_P (reg)
2428 && CONST_INT_P (op1)
2429 && INTVAL (op1) >= 0)
2430 {
2431 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
2432
2433 if (fit)
2434 {
2435 ok = (! strict
2436 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
2437 PLUS, strict));
2438
2439 if (reg == frame_pointer_rtx
2440 || reg == arg_pointer_rtx)
2441 {
2442 ok = true;
2443 }
2444 }
2445 else if (frame_pointer_needed
2446 && reg == frame_pointer_rtx)
2447 {
2448 ok = true;
2449 }
2450 }
2451 }
2452 break;
2453
2454 default:
2455 break;
2456 }
2457
2458 if (AVR_TINY
2459 && CONSTANT_ADDRESS_P (x))
2460 {
2461 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2462 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
2463
2464 ok = avr_address_tiny_absdata_p (x, mode);
2465 }
2466
2467 if (avr_log.legitimate_address_p)
2468 {
2469 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2470 "reload_completed=%d reload_in_progress=%d %s:",
2471 ok, mode, strict, reload_completed, reload_in_progress,
2472 reg_renumber ? "(reg_renumber)" : "");
2473
2474 if (GET_CODE (x) == PLUS
2475 && REG_P (XEXP (x, 0))
2476 && CONST_INT_P (XEXP (x, 1))
2477 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
2478 && reg_renumber)
2479 {
2480 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
2481 true_regnum (XEXP (x, 0)));
2482 }
2483
2484 avr_edump ("\n%r\n", x);
2485 }
2486
2487 return ok;
2488 }
2489
2490
2491 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2492 now only a helper for avr_addr_space_legitimize_address. */
2493 /* Attempts to replace X with a valid
2494 memory address for an operand of mode MODE */
2495
2496 static rtx
2497 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
2498 {
2499 bool big_offset_p = false;
2500
2501 x = oldx;
2502
2503 if (AVR_TINY)
2504 {
2505 if (CONSTANT_ADDRESS_P (x)
2506 && ! avr_address_tiny_absdata_p (x, mode))
2507 {
2508 x = force_reg (Pmode, x);
2509 }
2510 }
2511
2512 if (GET_CODE (oldx) == PLUS
2513 && REG_P (XEXP (oldx, 0)))
2514 {
2515 if (REG_P (XEXP (oldx, 1)))
2516 x = force_reg (GET_MODE (oldx), oldx);
2517 else if (CONST_INT_P (XEXP (oldx, 1)))
2518 {
2519 int offs = INTVAL (XEXP (oldx, 1));
2520 if (frame_pointer_rtx != XEXP (oldx, 0)
2521 && offs > MAX_LD_OFFSET (mode))
2522 {
2523 big_offset_p = true;
2524 x = force_reg (GET_MODE (oldx), oldx);
2525 }
2526 }
2527 }
2528
2529 if (avr_log.legitimize_address)
2530 {
2531 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
2532
2533 if (x != oldx)
2534 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
2535 }
2536
2537 return x;
2538 }
2539
2540
2541 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
2542 /* This will allow register R26/27 to be used where it is no worse than normal
2543 base pointers R28/29 or R30/31. For example, if base offset is greater
2544 than 63 bytes or for R++ or --R addressing. */
2545
2546 rtx
2547 avr_legitimize_reload_address (rtx *px, machine_mode mode,
2548 int opnum, int type, int addr_type,
2549 int ind_levels ATTRIBUTE_UNUSED,
2550 rtx (*mk_memloc)(rtx,int))
2551 {
2552 rtx x = *px;
2553
2554 if (avr_log.legitimize_reload_address)
2555 avr_edump ("\n%?:%m %r\n", mode, x);
2556
2557 if (1 && (GET_CODE (x) == POST_INC
2558 || GET_CODE (x) == PRE_DEC))
2559 {
2560 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2561 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2562 opnum, RELOAD_OTHER);
2563
2564 if (avr_log.legitimize_reload_address)
2565 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2566 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2567
2568 return x;
2569 }
2570
2571 if (GET_CODE (x) == PLUS
2572 && REG_P (XEXP (x, 0))
2573 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2574 && CONST_INT_P (XEXP (x, 1))
2575 && INTVAL (XEXP (x, 1)) >= 1)
2576 {
2577 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2578
2579 if (fit)
2580 {
2581 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2582 {
2583 int regno = REGNO (XEXP (x, 0));
2584 rtx mem = mk_memloc (x, regno);
2585
2586 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2587 POINTER_REGS, Pmode, VOIDmode, 0, 0,
2588 1, (enum reload_type) addr_type);
2589
2590 if (avr_log.legitimize_reload_address)
2591 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2592 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2593
2594 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2595 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2596 opnum, (enum reload_type) type);
2597
2598 if (avr_log.legitimize_reload_address)
2599 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2600 BASE_POINTER_REGS, mem, NULL_RTX);
2601
2602 return x;
2603 }
2604 }
2605 else if (! (frame_pointer_needed
2606 && XEXP (x, 0) == frame_pointer_rtx))
2607 {
2608 push_reload (x, NULL_RTX, px, NULL,
2609 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2610 opnum, (enum reload_type) type);
2611
2612 if (avr_log.legitimize_reload_address)
2613 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2614 POINTER_REGS, x, NULL_RTX);
2615
2616 return x;
2617 }
2618 }
2619
2620 return NULL_RTX;
2621 }
2622
2623
2624 /* Helper function to print assembler resp. track instruction
2625 sequence lengths. Always return "".
2626
2627 If PLEN == NULL:
2628 Output assembler code from template TPL with operands supplied
2629 by OPERANDS. This is just forwarding to output_asm_insn.
2630
2631 If PLEN != NULL:
2632 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2633 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2634 Don't output anything.
2635 */
2636
2637 static const char*
2638 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2639 {
2640 if (NULL == plen)
2641 {
2642 output_asm_insn (tpl, operands);
2643 }
2644 else
2645 {
2646 if (n_words < 0)
2647 *plen = -n_words;
2648 else
2649 *plen += n_words;
2650 }
2651
2652 return "";
2653 }
2654
2655
2656 /* Return a pointer register name as a string. */
2657
2658 static const char*
2659 ptrreg_to_str (int regno)
2660 {
2661 switch (regno)
2662 {
2663 case REG_X: return "X";
2664 case REG_Y: return "Y";
2665 case REG_Z: return "Z";
2666 default:
2667 output_operand_lossage ("address operand requires constraint for"
2668 " X, Y, or Z register");
2669 }
2670 return NULL;
2671 }
2672
2673 /* Return the condition name as a string.
2674 Used in conditional jump constructing */
2675
2676 static const char*
2677 cond_string (enum rtx_code code)
2678 {
2679 switch (code)
2680 {
2681 case NE:
2682 return "ne";
2683 case EQ:
2684 return "eq";
2685 case GE:
2686 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2687 return "pl";
2688 else
2689 return "ge";
2690 case LT:
2691 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2692 return "mi";
2693 else
2694 return "lt";
2695 case GEU:
2696 return "sh";
2697 case LTU:
2698 return "lo";
2699 default:
2700 gcc_unreachable ();
2701 }
2702
2703 return "";
2704 }
2705
2706
2707 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
2708 This must be used for AVR_TINY only because on other cores
2709 the flash memory is not visible in the RAM address range and
2710 cannot be read by, say, LD instruction. */
2711
2712 static bool
2713 avr_address_tiny_pm_p (rtx x)
2714 {
2715 if (CONST == GET_CODE (x))
2716 x = XEXP (XEXP (x, 0), 0);
2717
2718 if (SYMBOL_REF_P (x))
2719 return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_PM;
2720
2721 return false;
2722 }
2723
2724 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2725 /* Output ADDR to FILE as address. */
2726
2727 static void
2728 avr_print_operand_address (FILE *file, machine_mode /*mode*/, rtx addr)
2729 {
2730 if (AVR_TINY
2731 && avr_address_tiny_pm_p (addr))
2732 {
2733 addr = plus_constant (Pmode, addr, avr_arch->flash_pm_offset);
2734 }
2735
2736 switch (GET_CODE (addr))
2737 {
2738 case REG:
2739 fprintf (file, "%s", ptrreg_to_str (REGNO (addr)));
2740 break;
2741
2742 case PRE_DEC:
2743 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2744 break;
2745
2746 case POST_INC:
2747 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2748 break;
2749
2750 default:
2751 if (CONSTANT_ADDRESS_P (addr)
2752 && text_segment_operand (addr, VOIDmode))
2753 {
2754 rtx x = addr;
2755 if (GET_CODE (x) == CONST)
2756 x = XEXP (x, 0);
2757 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
2758 {
2759 /* Assembler gs() will implant word address. Make offset
2760 a byte offset inside gs() for assembler. This is
2761 needed because the more logical (constant+gs(sym)) is not
2762 accepted by gas. For 128K and smaller devices this is ok.
2763 For large devices it will create a trampoline to offset
2764 from symbol which may not be what the user really wanted. */
2765
2766 fprintf (file, "gs(");
2767 output_addr_const (file, XEXP (x, 0));
2768 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2769 2 * INTVAL (XEXP (x, 1)));
2770 if (AVR_3_BYTE_PC)
2771 if (warning (0, "pointer offset from symbol maybe incorrect"))
2772 {
2773 output_addr_const (stderr, addr);
2774 fprintf (stderr, "\n");
2775 }
2776 }
2777 else
2778 {
2779 fprintf (file, "gs(");
2780 output_addr_const (file, addr);
2781 fprintf (file, ")");
2782 }
2783 }
2784 else
2785 output_addr_const (file, addr);
2786 }
2787 }
2788
2789
2790 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2791
2792 static bool
2793 avr_print_operand_punct_valid_p (unsigned char code)
2794 {
2795 return code == '~' || code == '!';
2796 }
2797
2798
2799 /* Implement `TARGET_PRINT_OPERAND'. */
2800 /* Output X as assembler operand to file FILE.
2801 For a description of supported %-codes, see top of avr.md. */
2802
2803 static void
2804 avr_print_operand (FILE *file, rtx x, int code)
2805 {
2806 int abcd = 0, ef = 0, ij = 0;
2807
2808 if (code >= 'A' && code <= 'D')
2809 abcd = code - 'A';
2810 else if (code == 'E' || code == 'F')
2811 ef = code - 'E';
2812 else if (code == 'I' || code == 'J')
2813 ij = code - 'I';
2814
2815 if (code == '~')
2816 {
2817 if (!AVR_HAVE_JMP_CALL)
2818 fputc ('r', file);
2819 }
2820 else if (code == '!')
2821 {
2822 if (AVR_HAVE_EIJMP_EICALL)
2823 fputc ('e', file);
2824 }
2825 else if (code == 't'
2826 || code == 'T')
2827 {
2828 static int t_regno = -1;
2829 static int t_nbits = -1;
2830
2831 if (REG_P (x) && t_regno < 0 && code == 'T')
2832 {
2833 t_regno = REGNO (x);
2834 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2835 }
2836 else if (CONST_INT_P (x) && t_regno >= 0
2837 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2838 {
2839 int bpos = INTVAL (x);
2840
2841 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2842 if (code == 'T')
2843 fprintf (file, ",%d", bpos % 8);
2844
2845 t_regno = -1;
2846 }
2847 else
2848 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2849 }
2850 else if (code == 'E' || code == 'F')
2851 {
2852 rtx op = XEXP (x, 0);
2853 fprintf (file, "%s", reg_names[REGNO (op) + ef]);
2854 }
2855 else if (code == 'I' || code == 'J')
2856 {
2857 rtx op = XEXP (XEXP (x, 0), 0);
2858 fprintf (file, "%s", reg_names[REGNO (op) + ij]);
2859 }
2860 else if (REG_P (x))
2861 {
2862 if (x == zero_reg_rtx)
2863 fprintf (file, "__zero_reg__");
2864 else if (code == 'r' && REGNO (x) < 32)
2865 fprintf (file, "%d", (int) REGNO (x));
2866 else
2867 fprintf (file, "%s", reg_names[REGNO (x) + abcd]);
2868 }
2869 else if (CONST_INT_P (x))
2870 {
2871 HOST_WIDE_INT ival = INTVAL (x);
2872
2873 if ('i' != code)
2874 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2875 else if (low_io_address_operand (x, VOIDmode)
2876 || high_io_address_operand (x, VOIDmode))
2877 {
2878 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2879 fprintf (file, "__RAMPZ__");
2880 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2881 fprintf (file, "__RAMPY__");
2882 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2883 fprintf (file, "__RAMPX__");
2884 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2885 fprintf (file, "__RAMPD__");
2886 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2887 fprintf (file, "__CCP__");
2888 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2889 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2890 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2891 else
2892 {
2893 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2894 ival - avr_arch->sfr_offset);
2895 }
2896 }
2897 else
2898 fatal_insn ("bad address, not an I/O address:", x);
2899 }
2900 else if (MEM_P (x))
2901 {
2902 rtx addr = XEXP (x, 0);
2903
2904 if (code == 'm')
2905 {
2906 if (!CONSTANT_P (addr))
2907 fatal_insn ("bad address, not a constant:", addr);
2908 /* Assembler template with m-code is data - not progmem section */
2909 if (text_segment_operand (addr, VOIDmode))
2910 if (warning (0, "accessing data memory with"
2911 " program memory address"))
2912 {
2913 output_addr_const (stderr, addr);
2914 fprintf(stderr,"\n");
2915 }
2916 output_addr_const (file, addr);
2917 }
2918 else if (code == 'i')
2919 {
2920 avr_print_operand (file, addr, 'i');
2921 }
2922 else if (code == 'o')
2923 {
2924 if (GET_CODE (addr) != PLUS)
2925 fatal_insn ("bad address, not (reg+disp):", addr);
2926
2927 avr_print_operand (file, XEXP (addr, 1), 0);
2928 }
2929 else if (code == 'b')
2930 {
2931 if (GET_CODE (addr) != PLUS)
2932 fatal_insn ("bad address, not (reg+disp):", addr);
2933
2934 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2935 }
2936 else if (code == 'p' || code == 'r')
2937 {
2938 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2939 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2940
2941 if (code == 'p')
2942 /* X, Y, Z */
2943 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2944 else
2945 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2946 }
2947 else if (GET_CODE (addr) == PLUS)
2948 {
2949 avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2950 if (REGNO (XEXP (addr, 0)) == REG_X)
2951 fatal_insn ("internal compiler error. Bad address:"
2952 ,addr);
2953 fputc ('+', file);
2954 avr_print_operand (file, XEXP (addr, 1), code);
2955 }
2956 else
2957 avr_print_operand_address (file, VOIDmode, addr);
2958 }
2959 else if (code == 'i')
2960 {
2961 if (SYMBOL_REF_P (x) && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2962 avr_print_operand_address
2963 (file, VOIDmode, plus_constant (HImode, x, -avr_arch->sfr_offset));
2964 else
2965 fatal_insn ("bad address, not an I/O address:", x);
2966 }
2967 else if (code == 'x')
2968 {
2969 /* Constant progmem address - like used in jmp or call */
2970 if (0 == text_segment_operand (x, VOIDmode))
2971 if (warning (0, "accessing program memory"
2972 " with data memory address"))
2973 {
2974 output_addr_const (stderr, x);
2975 fprintf(stderr,"\n");
2976 }
2977 /* Use normal symbol for direct address no linker trampoline needed */
2978 output_addr_const (file, x);
2979 }
2980 else if (CONST_FIXED_P (x))
2981 {
2982 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2983 if (code != 0)
2984 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2985 code);
2986 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2987 }
2988 else if (CONST_DOUBLE_P (x))
2989 {
2990 long val;
2991 if (GET_MODE (x) != SFmode)
2992 fatal_insn ("internal compiler error. Unknown mode:", x);
2993 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), val);
2994 fprintf (file, "0x%lx", val);
2995 }
2996 else if (GET_CODE (x) == CONST_STRING)
2997 fputs (XSTR (x, 0), file);
2998 else if (code == 'j')
2999 fputs (cond_string (GET_CODE (x)), file);
3000 else if (code == 'k')
3001 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
3002 else
3003 avr_print_operand_address (file, VOIDmode, x);
3004 }
3005
3006
3007 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
3008
3009 /* Prefer sequence of loads/stores for moves of size upto
3010 two - two pairs of load/store instructions are always better
3011 than the 5 instruction sequence for a loop (1 instruction
3012 for loop counter setup, and 4 for the body of the loop). */
3013
3014 static bool
3015 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
3016 unsigned int align ATTRIBUTE_UNUSED,
3017 enum by_pieces_operation op,
3018 bool speed_p)
3019 {
3020 if (op != MOVE_BY_PIECES
3021 || (speed_p && size > MOVE_MAX_PIECES))
3022 return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
3023
3024 return size <= MOVE_MAX_PIECES;
3025 }
3026
3027
3028 /* Worker function for `NOTICE_UPDATE_CC'. */
3029 /* Update the condition code in the INSN. */
3030
3031 void
3032 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
3033 {
3034 rtx set;
3035 enum attr_cc cc = get_attr_cc (insn);
3036
3037 switch (cc)
3038 {
3039 default:
3040 break;
3041
3042 case CC_PLUS:
3043 case CC_LDI:
3044 {
3045 rtx *op = recog_data.operand;
3046 int len_dummy, icc;
3047
3048 /* Extract insn's operands. */
3049 extract_constrain_insn_cached (insn);
3050
3051 switch (cc)
3052 {
3053 default:
3054 gcc_unreachable();
3055
3056 case CC_PLUS:
3057 avr_out_plus (insn, op, &len_dummy, &icc);
3058 cc = (enum attr_cc) icc;
3059 break;
3060
3061 case CC_LDI:
3062
3063 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
3064 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
3065 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
3066 ? CC_CLOBBER
3067 /* Any other "r,rL" combination does not alter cc0. */
3068 : CC_NONE;
3069
3070 break;
3071 } /* inner switch */
3072
3073 break;
3074 }
3075 } /* outer swicth */
3076
3077 switch (cc)
3078 {
3079 default:
3080 /* Special values like CC_OUT_PLUS from above have been
3081 mapped to "standard" CC_* values so we never come here. */
3082
3083 gcc_unreachable();
3084 break;
3085
3086 case CC_NONE:
3087 /* Insn does not affect CC at all, but it might set some registers
3088 that are stored in cc_status. If such a register is affected by
3089 the current insn, for example by means of a SET or a CLOBBER,
3090 then we must reset cc_status; cf. PR77326.
3091
3092 Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
3093 will abort on COMPARE (which might be found in cc_status.value1/2).
3094 Thus work out the registers set by the insn and regs mentioned
3095 in cc_status.value1/2. */
3096
3097 if (cc_status.value1
3098 || cc_status.value2)
3099 {
3100 HARD_REG_SET regs_used;
3101 HARD_REG_SET regs_set;
3102 CLEAR_HARD_REG_SET (regs_used);
3103
3104 if (cc_status.value1
3105 && !CONSTANT_P (cc_status.value1))
3106 {
3107 find_all_hard_regs (cc_status.value1, &regs_used);
3108 }
3109
3110 if (cc_status.value2
3111 && !CONSTANT_P (cc_status.value2))
3112 {
3113 find_all_hard_regs (cc_status.value2, &regs_used);
3114 }
3115
3116 find_all_hard_reg_sets (insn, &regs_set, false);
3117
3118 if (hard_reg_set_intersect_p (regs_used, regs_set))
3119 {
3120 CC_STATUS_INIT;
3121 }
3122 }
3123
3124 break; // CC_NONE
3125
3126 case CC_SET_N:
3127 CC_STATUS_INIT;
3128 break;
3129
3130 case CC_SET_ZN:
3131 set = single_set (insn);
3132 CC_STATUS_INIT;
3133 if (set)
3134 {
3135 cc_status.flags |= CC_NO_OVERFLOW;
3136 cc_status.value1 = SET_DEST (set);
3137 }
3138 break;
3139
3140 case CC_SET_VZN:
3141 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
3142 of this combination, cf. also PR61055. */
3143 CC_STATUS_INIT;
3144 break;
3145
3146 case CC_SET_CZN:
3147 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
3148 The V flag may or may not be known but that's ok because
3149 alter_cond will change tests to use EQ/NE. */
3150 set = single_set (insn);
3151 CC_STATUS_INIT;
3152 if (set)
3153 {
3154 cc_status.value1 = SET_DEST (set);
3155 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
3156 }
3157 break;
3158
3159 case CC_COMPARE:
3160 set = single_set (insn);
3161 CC_STATUS_INIT;
3162 if (set)
3163 cc_status.value1 = SET_SRC (set);
3164 break;
3165
3166 case CC_CLOBBER:
3167 /* Insn doesn't leave CC in a usable state. */
3168 CC_STATUS_INIT;
3169 break;
3170 }
3171 }
3172
3173 /* Choose mode for jump insn:
3174 1 - relative jump in range -63 <= x <= 62 ;
3175 2 - relative jump in range -2046 <= x <= 2045 ;
3176 3 - absolute jump (only for ATmega[16]03). */
3177
3178 int
3179 avr_jump_mode (rtx x, rtx_insn *insn)
3180 {
3181 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
3182 ? XEXP (x, 0) : x));
3183 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
3184 int jump_distance = cur_addr - dest_addr;
3185
3186 if (IN_RANGE (jump_distance, -63, 62))
3187 return 1;
3188 else if (IN_RANGE (jump_distance, -2046, 2045))
3189 return 2;
3190 else if (AVR_HAVE_JMP_CALL)
3191 return 3;
3192
3193 return 2;
3194 }
3195
3196 /* Return an AVR condition jump commands.
3197 X is a comparison RTX.
3198 LEN is a number returned by avr_jump_mode function.
3199 If REVERSE nonzero then condition code in X must be reversed. */
3200
3201 const char*
3202 ret_cond_branch (rtx x, int len, int reverse)
3203 {
3204 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
3205
3206 switch (cond)
3207 {
3208 case GT:
3209 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3210 return (len == 1 ? ("breq .+2" CR_TAB
3211 "brpl %0") :
3212 len == 2 ? ("breq .+4" CR_TAB
3213 "brmi .+2" CR_TAB
3214 "rjmp %0") :
3215 ("breq .+6" CR_TAB
3216 "brmi .+4" CR_TAB
3217 "jmp %0"));
3218
3219 else
3220 return (len == 1 ? ("breq .+2" CR_TAB
3221 "brge %0") :
3222 len == 2 ? ("breq .+4" CR_TAB
3223 "brlt .+2" CR_TAB
3224 "rjmp %0") :
3225 ("breq .+6" CR_TAB
3226 "brlt .+4" CR_TAB
3227 "jmp %0"));
3228 case GTU:
3229 return (len == 1 ? ("breq .+2" CR_TAB
3230 "brsh %0") :
3231 len == 2 ? ("breq .+4" CR_TAB
3232 "brlo .+2" CR_TAB
3233 "rjmp %0") :
3234 ("breq .+6" CR_TAB
3235 "brlo .+4" CR_TAB
3236 "jmp %0"));
3237 case LE:
3238 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3239 return (len == 1 ? ("breq %0" CR_TAB
3240 "brmi %0") :
3241 len == 2 ? ("breq .+2" CR_TAB
3242 "brpl .+2" CR_TAB
3243 "rjmp %0") :
3244 ("breq .+2" CR_TAB
3245 "brpl .+4" CR_TAB
3246 "jmp %0"));
3247 else
3248 return (len == 1 ? ("breq %0" CR_TAB
3249 "brlt %0") :
3250 len == 2 ? ("breq .+2" CR_TAB
3251 "brge .+2" CR_TAB
3252 "rjmp %0") :
3253 ("breq .+2" CR_TAB
3254 "brge .+4" CR_TAB
3255 "jmp %0"));
3256 case LEU:
3257 return (len == 1 ? ("breq %0" CR_TAB
3258 "brlo %0") :
3259 len == 2 ? ("breq .+2" CR_TAB
3260 "brsh .+2" CR_TAB
3261 "rjmp %0") :
3262 ("breq .+2" CR_TAB
3263 "brsh .+4" CR_TAB
3264 "jmp %0"));
3265 default:
3266 if (reverse)
3267 {
3268 switch (len)
3269 {
3270 case 1:
3271 return "br%k1 %0";
3272 case 2:
3273 return ("br%j1 .+2" CR_TAB
3274 "rjmp %0");
3275 default:
3276 return ("br%j1 .+4" CR_TAB
3277 "jmp %0");
3278 }
3279 }
3280 else
3281 {
3282 switch (len)
3283 {
3284 case 1:
3285 return "br%j1 %0";
3286 case 2:
3287 return ("br%k1 .+2" CR_TAB
3288 "rjmp %0");
3289 default:
3290 return ("br%k1 .+4" CR_TAB
3291 "jmp %0");
3292 }
3293 }
3294 }
3295 return "";
3296 }
3297
3298
3299 /* Worker function for `FINAL_PRESCAN_INSN'. */
3300 /* Output insn cost for next insn. */
3301
3302 void
3303 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
3304 int num_operands ATTRIBUTE_UNUSED)
3305 {
3306 if (avr_log.rtx_costs)
3307 {
3308 rtx set = single_set (insn);
3309
3310 if (set)
3311 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
3312 set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)),
3313 optimize_insn_for_speed_p ()));
3314 else
3315 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
3316 rtx_cost (PATTERN (insn), VOIDmode, INSN, 0,
3317 optimize_insn_for_speed_p()));
3318 }
3319
3320 if (avr_log.insn_addresses)
3321 fprintf (asm_out_file, ";; ADDR = %d\n",
3322 (int) INSN_ADDRESSES (INSN_UID (insn)));
3323 }
3324
3325
3326 /* Implement `TARGET_ASM_FINAL_POSTSCAN_INSN'. */
3327 /* When GAS generates (parts of) ISR prologue / epilogue for us, we must
3328 hint GAS about the end of the code to scan. There migh be code located
3329 after the last epilogue. */
3330
3331 static void
3332 avr_asm_final_postscan_insn (FILE *stream, rtx_insn *insn, rtx*, int)
3333 {
3334 if (cfun->machine->gasisr.yes
3335 && !next_real_insn (insn))
3336 {
3337 app_disable();
3338 fprintf (stream, "\t__gcc_isr %d,r%d\n", GASISR_Done,
3339 cfun->machine->gasisr.regno);
3340 }
3341 }
3342
3343
3344 /* Return 0 if undefined, 1 if always true or always false. */
3345
3346 int
3347 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
3348 {
3349 unsigned int max = (mode == QImode ? 0xff :
3350 mode == HImode ? 0xffff :
3351 mode == PSImode ? 0xffffff :
3352 mode == SImode ? 0xffffffff : 0);
3353 if (max && op && CONST_INT_P (x))
3354 {
3355 if (unsigned_condition (op) != op)
3356 max >>= 1;
3357
3358 if (max != (INTVAL (x) & max)
3359 && INTVAL (x) != 0xff)
3360 return 1;
3361 }
3362 return 0;
3363 }
3364
3365
3366 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
3367 /* Returns nonzero if REGNO is the number of a hard
3368 register in which function arguments are sometimes passed. */
3369
3370 int
3371 avr_function_arg_regno_p (int r)
3372 {
3373 return AVR_TINY ? IN_RANGE (r, 20, 25) : IN_RANGE (r, 8, 25);
3374 }
3375
3376
3377 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
3378 /* Initializing the variable cum for the state at the beginning
3379 of the argument list. */
3380
3381 void
3382 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
3383 tree fndecl ATTRIBUTE_UNUSED)
3384 {
3385 cum->nregs = AVR_TINY ? 6 : 18;
3386 cum->regno = FIRST_CUM_REG;
3387 if (!libname && stdarg_p (fntype))
3388 cum->nregs = 0;
3389
3390 /* Assume the calle may be tail called */
3391
3392 cfun->machine->sibcall_fails = 0;
3393 }
3394
3395 /* Returns the number of registers to allocate for a function argument. */
3396
3397 static int
3398 avr_num_arg_regs (machine_mode mode, const_tree type)
3399 {
3400 int size;
3401
3402 if (mode == BLKmode)
3403 size = int_size_in_bytes (type);
3404 else
3405 size = GET_MODE_SIZE (mode);
3406
3407 /* Align all function arguments to start in even-numbered registers.
3408 Odd-sized arguments leave holes above them. */
3409
3410 return (size + 1) & ~1;
3411 }
3412
3413
3414 /* Implement `TARGET_FUNCTION_ARG'. */
3415 /* Controls whether a function argument is passed
3416 in a register, and which register. */
3417
3418 static rtx
3419 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
3420 const_tree type, bool named ATTRIBUTE_UNUSED)
3421 {
3422 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3423 int bytes = avr_num_arg_regs (mode, type);
3424
3425 if (cum->nregs && bytes <= cum->nregs)
3426 return gen_rtx_REG (mode, cum->regno - bytes);
3427
3428 return NULL_RTX;
3429 }
3430
3431
3432 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
3433 /* Update the summarizer variable CUM to advance past an argument
3434 in the argument list. */
3435
3436 static void
3437 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
3438 const_tree type, bool named ATTRIBUTE_UNUSED)
3439 {
3440 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3441 int bytes = avr_num_arg_regs (mode, type);
3442
3443 cum->nregs -= bytes;
3444 cum->regno -= bytes;
3445
3446 /* A parameter is being passed in a call-saved register. As the original
3447 contents of these regs has to be restored before leaving the function,
3448 a function must not pass arguments in call-saved regs in order to get
3449 tail-called. */
3450
3451 if (cum->regno >= 8
3452 && cum->nregs >= 0
3453 && !call_used_regs[cum->regno])
3454 {
3455 /* FIXME: We ship info on failing tail-call in struct machine_function.
3456 This uses internals of calls.c:expand_call() and the way args_so_far
3457 is used. targetm.function_ok_for_sibcall() needs to be extended to
3458 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
3459 dependent so that such an extension is not wanted. */
3460
3461 cfun->machine->sibcall_fails = 1;
3462 }
3463
3464 /* Test if all registers needed by the ABI are actually available. If the
3465 user has fixed a GPR needed to pass an argument, an (implicit) function
3466 call will clobber that fixed register. See PR45099 for an example. */
3467
3468 if (cum->regno >= 8
3469 && cum->nregs >= 0)
3470 {
3471 for (int regno = cum->regno; regno < cum->regno + bytes; regno++)
3472 if (fixed_regs[regno])
3473 warning (0, "fixed register %s used to pass parameter to function",
3474 reg_names[regno]);
3475 }
3476
3477 if (cum->nregs <= 0)
3478 {
3479 cum->nregs = 0;
3480 cum->regno = FIRST_CUM_REG;
3481 }
3482 }
3483
3484 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3485 /* Decide whether we can make a sibling call to a function. DECL is the
3486 declaration of the function being targeted by the call and EXP is the
3487 CALL_EXPR representing the call. */
3488
3489 static bool
3490 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
3491 {
3492 tree fntype_callee;
3493
3494 /* Tail-calling must fail if callee-saved regs are used to pass
3495 function args. We must not tail-call when `epilogue_restores'
3496 is used. Unfortunately, we cannot tell at this point if that
3497 actually will happen or not, and we cannot step back from
3498 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
3499
3500 if (cfun->machine->sibcall_fails
3501 || TARGET_CALL_PROLOGUES)
3502 {
3503 return false;
3504 }
3505
3506 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
3507
3508 if (decl_callee)
3509 {
3510 decl_callee = TREE_TYPE (decl_callee);
3511 }
3512 else
3513 {
3514 decl_callee = fntype_callee;
3515
3516 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
3517 && METHOD_TYPE != TREE_CODE (decl_callee))
3518 {
3519 decl_callee = TREE_TYPE (decl_callee);
3520 }
3521 }
3522
3523 /* Ensure that caller and callee have compatible epilogues */
3524
3525 if (cfun->machine->is_interrupt
3526 || cfun->machine->is_signal
3527 || cfun->machine->is_naked
3528 || avr_naked_function_p (decl_callee)
3529 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
3530 || (avr_OS_task_function_p (decl_callee)
3531 != cfun->machine->is_OS_task)
3532 || (avr_OS_main_function_p (decl_callee)
3533 != cfun->machine->is_OS_main))
3534 {
3535 return false;
3536 }
3537
3538 return true;
3539 }
3540
3541 /***********************************************************************
3542 Functions for outputting various mov's for a various modes
3543 ************************************************************************/
3544
3545 /* Return true if a value of mode MODE is read from flash by
3546 __load_* function from libgcc. */
3547
3548 bool
3549 avr_load_libgcc_p (rtx op)
3550 {
3551 machine_mode mode = GET_MODE (op);
3552 int n_bytes = GET_MODE_SIZE (mode);
3553
3554 return (n_bytes > 2
3555 && !AVR_HAVE_LPMX
3556 && avr_mem_flash_p (op));
3557 }
3558
3559 /* Return true if a value of mode MODE is read by __xload_* function. */
3560
3561 bool
3562 avr_xload_libgcc_p (machine_mode mode)
3563 {
3564 int n_bytes = GET_MODE_SIZE (mode);
3565
3566 return (n_bytes > 1
3567 || avr_n_flash > 1);
3568 }
3569
3570
3571 /* Fixme: This is a hack because secondary reloads don't works as expected.
3572
3573 Find an unused d-register to be used as scratch in INSN.
3574 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3575 is a register, skip all possible return values that overlap EXCLUDE.
3576 The policy for the returned register is similar to that of
3577 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3578 of INSN.
3579
3580 Return a QImode d-register or NULL_RTX if nothing found. */
3581
3582 static rtx
3583 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
3584 {
3585 bool isr_p = (avr_interrupt_function_p (current_function_decl)
3586 || avr_signal_function_p (current_function_decl));
3587
3588 for (int regno = 16; regno < 32; regno++)
3589 {
3590 rtx reg = all_regs_rtx[regno];
3591
3592 if ((exclude
3593 && reg_overlap_mentioned_p (exclude, reg))
3594 || fixed_regs[regno])
3595 {
3596 continue;
3597 }
3598
3599 /* Try non-live register */
3600
3601 if (!df_regs_ever_live_p (regno)
3602 && (TREE_THIS_VOLATILE (current_function_decl)
3603 || cfun->machine->is_OS_task
3604 || cfun->machine->is_OS_main
3605 || (!isr_p && call_used_regs[regno])))
3606 {
3607 return reg;
3608 }
3609
3610 /* Any live register can be used if it is unused after.
3611 Prologue/epilogue will care for it as needed. */
3612
3613 if (df_regs_ever_live_p (regno)
3614 && reg_unused_after (insn, reg))
3615 {
3616 return reg;
3617 }
3618 }
3619
3620 return NULL_RTX;
3621 }
3622
3623
3624 /* Helper function for the next function in the case where only restricted
3625 version of LPM instruction is available. */
3626
3627 static const char*
3628 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3629 {
3630 rtx dest = xop[0];
3631 rtx addr = xop[1];
3632 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3633 int regno_dest;
3634
3635 regno_dest = REGNO (dest);
3636
3637 /* The implicit target register of LPM. */
3638 xop[3] = lpm_reg_rtx;
3639
3640 switch (GET_CODE (addr))
3641 {
3642 default:
3643 gcc_unreachable();
3644
3645 case REG:
3646
3647 gcc_assert (REG_Z == REGNO (addr));
3648
3649 switch (n_bytes)
3650 {
3651 default:
3652 gcc_unreachable();
3653
3654 case 1:
3655 avr_asm_len ("%4lpm", xop, plen, 1);
3656
3657 if (regno_dest != LPM_REGNO)
3658 avr_asm_len ("mov %0,%3", xop, plen, 1);
3659
3660 return "";
3661
3662 case 2:
3663 if (REGNO (dest) == REG_Z)
3664 return avr_asm_len ("%4lpm" CR_TAB
3665 "push %3" CR_TAB
3666 "adiw %2,1" CR_TAB
3667 "%4lpm" CR_TAB
3668 "mov %B0,%3" CR_TAB
3669 "pop %A0", xop, plen, 6);
3670
3671 avr_asm_len ("%4lpm" CR_TAB
3672 "mov %A0,%3" CR_TAB
3673 "adiw %2,1" CR_TAB
3674 "%4lpm" CR_TAB
3675 "mov %B0,%3", xop, plen, 5);
3676
3677 if (!reg_unused_after (insn, addr))
3678 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3679
3680 break; /* 2 */
3681 }
3682
3683 break; /* REG */
3684
3685 case POST_INC:
3686
3687 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3688 && n_bytes <= 4);
3689
3690 if (regno_dest == LPM_REGNO)
3691 avr_asm_len ("%4lpm" CR_TAB
3692 "adiw %2,1", xop, plen, 2);
3693 else
3694 avr_asm_len ("%4lpm" CR_TAB
3695 "mov %A0,%3" CR_TAB
3696 "adiw %2,1", xop, plen, 3);
3697
3698 if (n_bytes >= 2)
3699 avr_asm_len ("%4lpm" CR_TAB
3700 "mov %B0,%3" CR_TAB
3701 "adiw %2,1", xop, plen, 3);
3702
3703 if (n_bytes >= 3)
3704 avr_asm_len ("%4lpm" CR_TAB
3705 "mov %C0,%3" CR_TAB
3706 "adiw %2,1", xop, plen, 3);
3707
3708 if (n_bytes >= 4)
3709 avr_asm_len ("%4lpm" CR_TAB
3710 "mov %D0,%3" CR_TAB
3711 "adiw %2,1", xop, plen, 3);
3712
3713 break; /* POST_INC */
3714
3715 } /* switch CODE (addr) */
3716
3717 return "";
3718 }
3719
3720
3721 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3722 OP[1] in AS1 to register OP[0].
3723 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3724 Return "". */
3725
3726 const char*
3727 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3728 {
3729 rtx xop[7];
3730 rtx dest = op[0];
3731 rtx src = SET_SRC (single_set (insn));
3732 rtx addr;
3733 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3734 int segment;
3735 RTX_CODE code;
3736 addr_space_t as = MEM_ADDR_SPACE (src);
3737
3738 if (plen)
3739 *plen = 0;
3740
3741 if (MEM_P (dest))
3742 {
3743 warning (0, "writing to address space %qs not supported",
3744 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3745
3746 return "";
3747 }
3748
3749 addr = XEXP (src, 0);
3750 code = GET_CODE (addr);
3751
3752 gcc_assert (REG_P (dest));
3753 gcc_assert (REG == code || POST_INC == code);
3754
3755 xop[0] = dest;
3756 xop[1] = addr;
3757 xop[2] = lpm_addr_reg_rtx;
3758 xop[4] = xstring_empty;
3759 xop[5] = tmp_reg_rtx;
3760 xop[6] = XEXP (rampz_rtx, 0);
3761
3762 segment = avr_addrspace[as].segment;
3763
3764 /* Set RAMPZ as needed. */
3765
3766 if (segment)
3767 {
3768 xop[4] = GEN_INT (segment);
3769 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3770
3771 if (xop[3] != NULL_RTX)
3772 {
3773 avr_asm_len ("ldi %3,%4" CR_TAB
3774 "out %i6,%3", xop, plen, 2);
3775 }
3776 else if (segment == 1)
3777 {
3778 avr_asm_len ("clr %5" CR_TAB
3779 "inc %5" CR_TAB
3780 "out %i6,%5", xop, plen, 3);
3781 }
3782 else
3783 {
3784 avr_asm_len ("mov %5,%2" CR_TAB
3785 "ldi %2,%4" CR_TAB
3786 "out %i6,%2" CR_TAB
3787 "mov %2,%5", xop, plen, 4);
3788 }
3789
3790 xop[4] = xstring_e;
3791
3792 if (!AVR_HAVE_ELPMX)
3793 return avr_out_lpm_no_lpmx (insn, xop, plen);
3794 }
3795 else if (!AVR_HAVE_LPMX)
3796 {
3797 return avr_out_lpm_no_lpmx (insn, xop, plen);
3798 }
3799
3800 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3801
3802 switch (GET_CODE (addr))
3803 {
3804 default:
3805 gcc_unreachable();
3806
3807 case REG:
3808
3809 gcc_assert (REG_Z == REGNO (addr));
3810
3811 switch (n_bytes)
3812 {
3813 default:
3814 gcc_unreachable();
3815
3816 case 1:
3817 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3818
3819 case 2:
3820 if (REGNO (dest) == REG_Z)
3821 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3822 "%4lpm %B0,%a2" CR_TAB
3823 "mov %A0,%5", xop, plen, 3);
3824 else
3825 {
3826 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3827 "%4lpm %B0,%a2", xop, plen, 2);
3828
3829 if (!reg_unused_after (insn, addr))
3830 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3831 }
3832
3833 break; /* 2 */
3834
3835 case 3:
3836
3837 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3838 "%4lpm %B0,%a2+" CR_TAB
3839 "%4lpm %C0,%a2", xop, plen, 3);
3840
3841 if (!reg_unused_after (insn, addr))
3842 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3843
3844 break; /* 3 */
3845
3846 case 4:
3847
3848 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3849 "%4lpm %B0,%a2+", xop, plen, 2);
3850
3851 if (REGNO (dest) == REG_Z - 2)
3852 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3853 "%4lpm %C0,%a2" CR_TAB
3854 "mov %D0,%5", xop, plen, 3);
3855 else
3856 {
3857 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3858 "%4lpm %D0,%a2", xop, plen, 2);
3859
3860 if (!reg_unused_after (insn, addr))
3861 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3862 }
3863
3864 break; /* 4 */
3865 } /* n_bytes */
3866
3867 break; /* REG */
3868
3869 case POST_INC:
3870
3871 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3872 && n_bytes <= 4);
3873
3874 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3875 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3876 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3877 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3878
3879 break; /* POST_INC */
3880
3881 } /* switch CODE (addr) */
3882
3883 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3884 {
3885 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3886
3887 xop[0] = zero_reg_rtx;
3888 avr_asm_len ("out %i6,%0", xop, plen, 1);
3889 }
3890
3891 return "";
3892 }
3893
3894
3895 /* Worker function for xload_8 insn. */
3896
3897 const char*
3898 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3899 {
3900 rtx xop[4];
3901
3902 xop[0] = op[0];
3903 xop[1] = op[1];
3904 xop[2] = lpm_addr_reg_rtx;
3905 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3906
3907 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3908
3909 avr_asm_len ("sbrc %1,7" CR_TAB
3910 "ld %3,%a2", xop, plen, 2);
3911
3912 if (REGNO (xop[0]) != REGNO (xop[3]))
3913 avr_asm_len ("mov %0,%3", xop, plen, 1);
3914
3915 return "";
3916 }
3917
3918
3919 const char*
3920 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3921 {
3922 rtx dest = operands[0];
3923 rtx src = operands[1];
3924
3925 if (avr_mem_flash_p (src)
3926 || avr_mem_flash_p (dest))
3927 {
3928 return avr_out_lpm (insn, operands, plen);
3929 }
3930
3931 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3932
3933 if (REG_P (dest))
3934 {
3935 if (REG_P (src)) /* mov r,r */
3936 {
3937 if (test_hard_reg_class (STACK_REG, dest))
3938 return avr_asm_len ("out %0,%1", operands, plen, -1);
3939 else if (test_hard_reg_class (STACK_REG, src))
3940 return avr_asm_len ("in %0,%1", operands, plen, -1);
3941
3942 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3943 }
3944 else if (CONSTANT_P (src))
3945 {
3946 output_reload_in_const (operands, NULL_RTX, plen, false);
3947 return "";
3948 }
3949 else if (MEM_P (src))
3950 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3951 }
3952 else if (MEM_P (dest))
3953 {
3954 rtx xop[2];
3955
3956 xop[0] = dest;
3957 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3958
3959 return out_movqi_mr_r (insn, xop, plen);
3960 }
3961
3962 return "";
3963 }
3964
3965
3966 const char *
3967 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3968 {
3969 rtx dest = xop[0];
3970 rtx src = xop[1];
3971
3972 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3973
3974 if (avr_mem_flash_p (src)
3975 || avr_mem_flash_p (dest))
3976 {
3977 return avr_out_lpm (insn, xop, plen);
3978 }
3979
3980 if (REG_P (dest))
3981 {
3982 if (REG_P (src)) /* mov r,r */
3983 {
3984 if (test_hard_reg_class (STACK_REG, dest))
3985 {
3986 if (AVR_HAVE_8BIT_SP)
3987 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3988
3989 if (AVR_XMEGA)
3990 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3991 "out __SP_H__,%B1", xop, plen, -2);
3992
3993 /* Use simple load of SP if no interrupts are used. */
3994
3995 return TARGET_NO_INTERRUPTS
3996 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3997 "out __SP_L__,%A1", xop, plen, -2)
3998 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3999 "cli" CR_TAB
4000 "out __SP_H__,%B1" CR_TAB
4001 "out __SREG__,__tmp_reg__" CR_TAB
4002 "out __SP_L__,%A1", xop, plen, -5);
4003 }
4004 else if (test_hard_reg_class (STACK_REG, src))
4005 {
4006 return !AVR_HAVE_SPH
4007 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
4008 "clr %B0", xop, plen, -2)
4009
4010 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
4011 "in %B0,__SP_H__", xop, plen, -2);
4012 }
4013
4014 return AVR_HAVE_MOVW
4015 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
4016
4017 : avr_asm_len ("mov %A0,%A1" CR_TAB
4018 "mov %B0,%B1", xop, plen, -2);
4019 } /* REG_P (src) */
4020 else if (CONSTANT_P (src))
4021 {
4022 return output_reload_inhi (xop, NULL, plen);
4023 }
4024 else if (MEM_P (src))
4025 {
4026 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
4027 }
4028 }
4029 else if (MEM_P (dest))
4030 {
4031 rtx xop[2];
4032
4033 xop[0] = dest;
4034 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4035
4036 return out_movhi_mr_r (insn, xop, plen);
4037 }
4038
4039 fatal_insn ("invalid insn:", insn);
4040
4041 return "";
4042 }
4043
4044
4045 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
4046
4047 static const char*
4048 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4049 {
4050 rtx dest = op[0];
4051 rtx src = op[1];
4052 rtx x = XEXP (src, 0);
4053
4054 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4055 "ld %0,%b1" , op, plen, -3);
4056
4057 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4058 && !reg_unused_after (insn, XEXP (x, 0)))
4059 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
4060
4061 return "";
4062 }
4063
4064 static const char*
4065 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4066 {
4067 rtx dest = op[0];
4068 rtx src = op[1];
4069 rtx x = XEXP (src, 0);
4070
4071 if (CONSTANT_ADDRESS_P (x))
4072 {
4073 int n_words = AVR_TINY ? 1 : 2;
4074 return io_address_operand (x, QImode)
4075 ? avr_asm_len ("in %0,%i1", op, plen, -1)
4076 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
4077 }
4078
4079 if (GET_CODE (x) == PLUS
4080 && REG_P (XEXP (x, 0))
4081 && CONST_INT_P (XEXP (x, 1)))
4082 {
4083 /* memory access by reg+disp */
4084
4085 int disp = INTVAL (XEXP (x, 1));
4086
4087 if (AVR_TINY)
4088 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
4089
4090 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
4091 {
4092 if (REGNO (XEXP (x, 0)) != REG_Y)
4093 fatal_insn ("incorrect insn:",insn);
4094
4095 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4096 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
4097 "ldd %0,Y+63" CR_TAB
4098 "sbiw r28,%o1-63", op, plen, -3);
4099
4100 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4101 "sbci r29,hi8(-%o1)" CR_TAB
4102 "ld %0,Y" CR_TAB
4103 "subi r28,lo8(%o1)" CR_TAB
4104 "sbci r29,hi8(%o1)", op, plen, -5);
4105 }
4106 else if (REGNO (XEXP (x, 0)) == REG_X)
4107 {
4108 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
4109 it but I have this situation with extremal optimizing options. */
4110
4111 avr_asm_len ("adiw r26,%o1" CR_TAB
4112 "ld %0,X", op, plen, -2);
4113
4114 if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
4115 && !reg_unused_after (insn, XEXP (x, 0)))
4116 {
4117 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
4118 }
4119
4120 return "";
4121 }
4122
4123 return avr_asm_len ("ldd %0,%1", op, plen, -1);
4124 }
4125
4126 return avr_asm_len ("ld %0,%1", op, plen, -1);
4127 }
4128
4129
4130 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4131
4132 static const char*
4133 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4134 {
4135 rtx dest = op[0];
4136 rtx src = op[1];
4137 rtx base = XEXP (src, 0);
4138
4139 int reg_dest = true_regnum (dest);
4140 int reg_base = true_regnum (base);
4141
4142 if (reg_dest == reg_base) /* R = (R) */
4143 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4144 "ld %B0,%1" CR_TAB
4145 "mov %A0,__tmp_reg__", op, plen, -3);
4146
4147 avr_asm_len ("ld %A0,%1+" CR_TAB
4148 "ld %B0,%1", op, plen, -2);
4149
4150 if (!reg_unused_after (insn, base))
4151 avr_asm_len (TINY_SBIW (%E1, %F1, 1), op, plen, 2);
4152
4153 return "";
4154 }
4155
4156
4157 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4158
4159 static const char*
4160 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4161 {
4162 rtx dest = op[0];
4163 rtx src = op[1];
4164 rtx base = XEXP (src, 0);
4165
4166 int reg_dest = true_regnum (dest);
4167 int reg_base = true_regnum (XEXP (base, 0));
4168
4169 if (reg_base == reg_dest)
4170 {
4171 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4172 "ld __tmp_reg__,%b1+" CR_TAB
4173 "ld %B0,%b1" CR_TAB
4174 "mov %A0,__tmp_reg__", op, plen, -5);
4175 }
4176 else
4177 {
4178 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4179 "ld %A0,%b1+" CR_TAB
4180 "ld %B0,%b1", op, plen, -4);
4181
4182 if (!reg_unused_after (insn, XEXP (base, 0)))
4183 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+1), op, plen, 2);
4184
4185 return "";
4186 }
4187 }
4188
4189
4190 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
4191
4192 static const char*
4193 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
4194 {
4195 int mem_volatile_p = 0;
4196 rtx dest = op[0];
4197 rtx src = op[1];
4198 rtx base = XEXP (src, 0);
4199
4200 /* "volatile" forces reading low byte first, even if less efficient,
4201 for correct operation with 16-bit I/O registers. */
4202 mem_volatile_p = MEM_VOLATILE_P (src);
4203
4204 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4205 fatal_insn ("incorrect insn:", insn);
4206
4207 if (!mem_volatile_p)
4208 return avr_asm_len ("ld %B0,%1" CR_TAB
4209 "ld %A0,%1", op, plen, -2);
4210
4211 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
4212 "ld %A0,%p1+" CR_TAB
4213 "ld %B0,%p1" CR_TAB
4214 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
4215 }
4216
4217
4218 static const char*
4219 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
4220 {
4221 rtx dest = op[0];
4222 rtx src = op[1];
4223 rtx base = XEXP (src, 0);
4224 int reg_dest = true_regnum (dest);
4225 int reg_base = true_regnum (base);
4226 /* "volatile" forces reading low byte first, even if less efficient,
4227 for correct operation with 16-bit I/O registers. */
4228 int mem_volatile_p = MEM_VOLATILE_P (src);
4229
4230 if (reg_base > 0)
4231 {
4232 if (AVR_TINY)
4233 return avr_out_movhi_r_mr_reg_no_disp_tiny (insn, op, plen);
4234
4235 if (reg_dest == reg_base) /* R = (R) */
4236 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4237 "ld %B0,%1" CR_TAB
4238 "mov %A0,__tmp_reg__", op, plen, -3);
4239
4240 if (reg_base != REG_X)
4241 return avr_asm_len ("ld %A0,%1" CR_TAB
4242 "ldd %B0,%1+1", op, plen, -2);
4243
4244 avr_asm_len ("ld %A0,X+" CR_TAB
4245 "ld %B0,X", op, plen, -2);
4246
4247 if (!reg_unused_after (insn, base))
4248 avr_asm_len ("sbiw r26,1", op, plen, 1);
4249
4250 return "";
4251 }
4252 else if (GET_CODE (base) == PLUS) /* (R + i) */
4253 {
4254 int disp = INTVAL (XEXP (base, 1));
4255 int reg_base = true_regnum (XEXP (base, 0));
4256
4257 if (AVR_TINY)
4258 return avr_out_movhi_r_mr_reg_disp_tiny (insn, op, plen);
4259
4260 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4261 {
4262 if (REGNO (XEXP (base, 0)) != REG_Y)
4263 fatal_insn ("incorrect insn:",insn);
4264
4265 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
4266 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4267 "ldd %A0,Y+62" CR_TAB
4268 "ldd %B0,Y+63" CR_TAB
4269 "sbiw r28,%o1-62", op, plen, -4)
4270
4271 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4272 "sbci r29,hi8(-%o1)" CR_TAB
4273 "ld %A0,Y" CR_TAB
4274 "ldd %B0,Y+1" CR_TAB
4275 "subi r28,lo8(%o1)" CR_TAB
4276 "sbci r29,hi8(%o1)", op, plen, -6);
4277 }
4278
4279 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4280 it but I have this situation with extremal
4281 optimization options. */
4282
4283 if (reg_base == REG_X)
4284 {
4285 if (reg_base == reg_dest)
4286 return avr_asm_len ("adiw r26,%o1" CR_TAB
4287 "ld __tmp_reg__,X+" CR_TAB
4288 "ld %B0,X" CR_TAB
4289 "mov %A0,__tmp_reg__", op, plen, -4);
4290
4291 avr_asm_len ("adiw r26,%o1" CR_TAB
4292 "ld %A0,X+" CR_TAB
4293 "ld %B0,X", op, plen, -3);
4294
4295 if (!reg_unused_after (insn, XEXP (base, 0)))
4296 avr_asm_len ("sbiw r26,%o1+1", op, plen, 1);
4297
4298 return "";
4299 }
4300
4301 return reg_base == reg_dest
4302 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4303 "ldd %B0,%B1" CR_TAB
4304 "mov %A0,__tmp_reg__", op, plen, -3)
4305
4306 : avr_asm_len ("ldd %A0,%A1" CR_TAB
4307 "ldd %B0,%B1", op, plen, -2);
4308 }
4309 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4310 {
4311 if (AVR_TINY)
4312 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
4313
4314 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4315 fatal_insn ("incorrect insn:", insn);
4316
4317 if (!mem_volatile_p)
4318 return avr_asm_len ("ld %B0,%1" CR_TAB
4319 "ld %A0,%1", op, plen, -2);
4320
4321 return REGNO (XEXP (base, 0)) == REG_X
4322 ? avr_asm_len ("sbiw r26,2" CR_TAB
4323 "ld %A0,X+" CR_TAB
4324 "ld %B0,X" CR_TAB
4325 "sbiw r26,1", op, plen, -4)
4326
4327 : avr_asm_len ("sbiw %r1,2" CR_TAB
4328 "ld %A0,%p1" CR_TAB
4329 "ldd %B0,%p1+1", op, plen, -3);
4330 }
4331 else if (GET_CODE (base) == POST_INC) /* (R++) */
4332 {
4333 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4334 fatal_insn ("incorrect insn:", insn);
4335
4336 return avr_asm_len ("ld %A0,%1" CR_TAB
4337 "ld %B0,%1", op, plen, -2);
4338 }
4339 else if (CONSTANT_ADDRESS_P (base))
4340 {
4341 int n_words = AVR_TINY ? 2 : 4;
4342 return io_address_operand (base, HImode)
4343 ? avr_asm_len ("in %A0,%i1" CR_TAB
4344 "in %B0,%i1+1", op, plen, -2)
4345
4346 : avr_asm_len ("lds %A0,%m1" CR_TAB
4347 "lds %B0,%m1+1", op, plen, -n_words);
4348 }
4349
4350 fatal_insn ("unknown move insn:",insn);
4351 return "";
4352 }
4353
4354 static const char*
4355 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4356 {
4357 rtx dest = op[0];
4358 rtx src = op[1];
4359 rtx base = XEXP (src, 0);
4360 int reg_dest = true_regnum (dest);
4361 int reg_base = true_regnum (base);
4362
4363 if (reg_dest == reg_base)
4364 {
4365 /* "ld r26,-X" is undefined */
4366 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
4367 "ld %D0,%1" CR_TAB
4368 "ld %C0,-%1" CR_TAB
4369 "ld __tmp_reg__,-%1" CR_TAB
4370 TINY_SBIW (%E1, %F1, 1) CR_TAB
4371 "ld %A0,%1" CR_TAB
4372 "mov %B0,__tmp_reg__");
4373 }
4374 else if (reg_dest == reg_base - 2)
4375 {
4376 return *l = 5, ("ld %A0,%1+" CR_TAB
4377 "ld %B0,%1+" CR_TAB
4378 "ld __tmp_reg__,%1+" CR_TAB
4379 "ld %D0,%1" CR_TAB
4380 "mov %C0,__tmp_reg__");
4381 }
4382 else if (reg_unused_after (insn, base))
4383 {
4384 return *l = 4, ("ld %A0,%1+" CR_TAB
4385 "ld %B0,%1+" CR_TAB
4386 "ld %C0,%1+" CR_TAB
4387 "ld %D0,%1");
4388 }
4389 else
4390 {
4391 return *l = 6, ("ld %A0,%1+" CR_TAB
4392 "ld %B0,%1+" CR_TAB
4393 "ld %C0,%1+" CR_TAB
4394 "ld %D0,%1" CR_TAB
4395 TINY_SBIW (%E1, %F1, 3));
4396 }
4397 }
4398
4399
4400 static const char*
4401 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4402 {
4403 rtx dest = op[0];
4404 rtx src = op[1];
4405 rtx base = XEXP (src, 0);
4406 int reg_dest = true_regnum (dest);
4407 int reg_base = true_regnum (XEXP (base, 0));
4408
4409 if (reg_dest == reg_base)
4410 {
4411 /* "ld r26,-X" is undefined */
4412 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
4413 "ld %D0,%b1" CR_TAB
4414 "ld %C0,-%b1" CR_TAB
4415 "ld __tmp_reg__,-%b1" CR_TAB
4416 TINY_SBIW (%I1, %J1, 1) CR_TAB
4417 "ld %A0,%b1" CR_TAB
4418 "mov %B0,__tmp_reg__");
4419 }
4420 else if (reg_dest == reg_base - 2)
4421 {
4422 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4423 "ld %A0,%b1+" CR_TAB
4424 "ld %B0,%b1+" CR_TAB
4425 "ld __tmp_reg__,%b1+" CR_TAB
4426 "ld %D0,%b1" CR_TAB
4427 "mov %C0,__tmp_reg__");
4428 }
4429 else if (reg_unused_after (insn, XEXP (base, 0)))
4430 {
4431 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4432 "ld %A0,%b1+" CR_TAB
4433 "ld %B0,%b1+" CR_TAB
4434 "ld %C0,%b1+" CR_TAB
4435 "ld %D0,%b1");
4436 }
4437 else
4438 {
4439 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4440 "ld %A0,%b1+" CR_TAB
4441 "ld %B0,%b1+" CR_TAB
4442 "ld %C0,%b1+" CR_TAB
4443 "ld %D0,%b1" CR_TAB
4444 TINY_SBIW (%I1, %J1, %o1+3));
4445 }
4446 }
4447
4448 static const char*
4449 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
4450 {
4451 rtx dest = op[0];
4452 rtx src = op[1];
4453 rtx base = XEXP (src, 0);
4454 int reg_dest = true_regnum (dest);
4455 int reg_base = true_regnum (base);
4456 int tmp;
4457
4458 if (!l)
4459 l = &tmp;
4460
4461 if (reg_base > 0)
4462 {
4463 if (AVR_TINY)
4464 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
4465
4466 if (reg_base == REG_X) /* (R26) */
4467 {
4468 if (reg_dest == REG_X)
4469 /* "ld r26,-X" is undefined */
4470 return *l=7, ("adiw r26,3" CR_TAB
4471 "ld r29,X" CR_TAB
4472 "ld r28,-X" CR_TAB
4473 "ld __tmp_reg__,-X" CR_TAB
4474 "sbiw r26,1" CR_TAB
4475 "ld r26,X" CR_TAB
4476 "mov r27,__tmp_reg__");
4477 else if (reg_dest == REG_X - 2)
4478 return *l=5, ("ld %A0,X+" CR_TAB
4479 "ld %B0,X+" CR_TAB
4480 "ld __tmp_reg__,X+" CR_TAB
4481 "ld %D0,X" CR_TAB
4482 "mov %C0,__tmp_reg__");
4483 else if (reg_unused_after (insn, base))
4484 return *l=4, ("ld %A0,X+" CR_TAB
4485 "ld %B0,X+" CR_TAB
4486 "ld %C0,X+" CR_TAB
4487 "ld %D0,X");
4488 else
4489 return *l=5, ("ld %A0,X+" CR_TAB
4490 "ld %B0,X+" CR_TAB
4491 "ld %C0,X+" CR_TAB
4492 "ld %D0,X" CR_TAB
4493 "sbiw r26,3");
4494 }
4495 else
4496 {
4497 if (reg_dest == reg_base)
4498 return *l=5, ("ldd %D0,%1+3" CR_TAB
4499 "ldd %C0,%1+2" CR_TAB
4500 "ldd __tmp_reg__,%1+1" CR_TAB
4501 "ld %A0,%1" CR_TAB
4502 "mov %B0,__tmp_reg__");
4503 else if (reg_base == reg_dest + 2)
4504 return *l=5, ("ld %A0,%1" CR_TAB
4505 "ldd %B0,%1+1" CR_TAB
4506 "ldd __tmp_reg__,%1+2" CR_TAB
4507 "ldd %D0,%1+3" CR_TAB
4508 "mov %C0,__tmp_reg__");
4509 else
4510 return *l=4, ("ld %A0,%1" CR_TAB
4511 "ldd %B0,%1+1" CR_TAB
4512 "ldd %C0,%1+2" CR_TAB
4513 "ldd %D0,%1+3");
4514 }
4515 }
4516 else if (GET_CODE (base) == PLUS) /* (R + i) */
4517 {
4518 int disp = INTVAL (XEXP (base, 1));
4519
4520 if (AVR_TINY)
4521 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
4522
4523 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4524 {
4525 if (REGNO (XEXP (base, 0)) != REG_Y)
4526 fatal_insn ("incorrect insn:",insn);
4527
4528 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4529 return *l = 6, ("adiw r28,%o1-60" CR_TAB
4530 "ldd %A0,Y+60" CR_TAB
4531 "ldd %B0,Y+61" CR_TAB
4532 "ldd %C0,Y+62" CR_TAB
4533 "ldd %D0,Y+63" CR_TAB
4534 "sbiw r28,%o1-60");
4535
4536 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
4537 "sbci r29,hi8(-%o1)" CR_TAB
4538 "ld %A0,Y" CR_TAB
4539 "ldd %B0,Y+1" CR_TAB
4540 "ldd %C0,Y+2" CR_TAB
4541 "ldd %D0,Y+3" CR_TAB
4542 "subi r28,lo8(%o1)" CR_TAB
4543 "sbci r29,hi8(%o1)");
4544 }
4545
4546 reg_base = true_regnum (XEXP (base, 0));
4547 if (reg_base == REG_X)
4548 {
4549 /* R = (X + d) */
4550 if (reg_dest == REG_X)
4551 {
4552 *l = 7;
4553 /* "ld r26,-X" is undefined */
4554 return ("adiw r26,%o1+3" CR_TAB
4555 "ld r29,X" CR_TAB
4556 "ld r28,-X" CR_TAB
4557 "ld __tmp_reg__,-X" CR_TAB
4558 "sbiw r26,1" CR_TAB
4559 "ld r26,X" CR_TAB
4560 "mov r27,__tmp_reg__");
4561 }
4562 *l = 6;
4563 if (reg_dest == REG_X - 2)
4564 return ("adiw r26,%o1" CR_TAB
4565 "ld r24,X+" CR_TAB
4566 "ld r25,X+" CR_TAB
4567 "ld __tmp_reg__,X+" CR_TAB
4568 "ld r27,X" CR_TAB
4569 "mov r26,__tmp_reg__");
4570
4571 return ("adiw r26,%o1" CR_TAB
4572 "ld %A0,X+" CR_TAB
4573 "ld %B0,X+" CR_TAB
4574 "ld %C0,X+" CR_TAB
4575 "ld %D0,X" CR_TAB
4576 "sbiw r26,%o1+3");
4577 }
4578 if (reg_dest == reg_base)
4579 return *l=5, ("ldd %D0,%D1" CR_TAB
4580 "ldd %C0,%C1" CR_TAB
4581 "ldd __tmp_reg__,%B1" CR_TAB
4582 "ldd %A0,%A1" CR_TAB
4583 "mov %B0,__tmp_reg__");
4584 else if (reg_dest == reg_base - 2)
4585 return *l=5, ("ldd %A0,%A1" CR_TAB
4586 "ldd %B0,%B1" CR_TAB
4587 "ldd __tmp_reg__,%C1" CR_TAB
4588 "ldd %D0,%D1" CR_TAB
4589 "mov %C0,__tmp_reg__");
4590 return *l=4, ("ldd %A0,%A1" CR_TAB
4591 "ldd %B0,%B1" CR_TAB
4592 "ldd %C0,%C1" CR_TAB
4593 "ldd %D0,%D1");
4594 }
4595 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4596 return *l=4, ("ld %D0,%1" CR_TAB
4597 "ld %C0,%1" CR_TAB
4598 "ld %B0,%1" CR_TAB
4599 "ld %A0,%1");
4600 else if (GET_CODE (base) == POST_INC) /* (R++) */
4601 return *l=4, ("ld %A0,%1" CR_TAB
4602 "ld %B0,%1" CR_TAB
4603 "ld %C0,%1" CR_TAB
4604 "ld %D0,%1");
4605 else if (CONSTANT_ADDRESS_P (base))
4606 {
4607 if (io_address_operand (base, SImode))
4608 {
4609 *l = 4;
4610 return ("in %A0,%i1" CR_TAB
4611 "in %B0,%i1+1" CR_TAB
4612 "in %C0,%i1+2" CR_TAB
4613 "in %D0,%i1+3");
4614 }
4615 else
4616 {
4617 *l = AVR_TINY ? 4 : 8;
4618 return ("lds %A0,%m1" CR_TAB
4619 "lds %B0,%m1+1" CR_TAB
4620 "lds %C0,%m1+2" CR_TAB
4621 "lds %D0,%m1+3");
4622 }
4623 }
4624
4625 fatal_insn ("unknown move insn:",insn);
4626 return "";
4627 }
4628
4629 static const char*
4630 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4631 {
4632 rtx dest = op[0];
4633 rtx src = op[1];
4634 rtx base = XEXP (dest, 0);
4635 int reg_base = true_regnum (base);
4636 int reg_src = true_regnum (src);
4637
4638 if (reg_base == reg_src)
4639 {
4640 /* "ld r26,-X" is undefined */
4641 if (reg_unused_after (insn, base))
4642 {
4643 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4644 "st %0,%A1" CR_TAB
4645 TINY_ADIW (%E0, %F0, 1) CR_TAB
4646 "st %0+,__tmp_reg__" CR_TAB
4647 "st %0+,%C1" CR_TAB
4648 "st %0+,%D1");
4649 }
4650 else
4651 {
4652 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4653 "st %0,%A1" CR_TAB
4654 TINY_ADIW (%E0, %F0, 1) CR_TAB
4655 "st %0+,__tmp_reg__" CR_TAB
4656 "st %0+,%C1" CR_TAB
4657 "st %0+,%D1" CR_TAB
4658 TINY_SBIW (%E0, %F0, 3));
4659 }
4660 }
4661 else if (reg_base == reg_src + 2)
4662 {
4663 if (reg_unused_after (insn, base))
4664 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4665 "mov __tmp_reg__,%D1" CR_TAB
4666 "st %0+,%A1" CR_TAB
4667 "st %0+,%B1" CR_TAB
4668 "st %0+,__zero_reg__" CR_TAB
4669 "st %0,__tmp_reg__" CR_TAB
4670 "clr __zero_reg__");
4671 else
4672 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4673 "mov __tmp_reg__,%D1" CR_TAB
4674 "st %0+,%A1" CR_TAB
4675 "st %0+,%B1" CR_TAB
4676 "st %0+,__zero_reg__" CR_TAB
4677 "st %0,__tmp_reg__" CR_TAB
4678 "clr __zero_reg__" CR_TAB
4679 TINY_SBIW (%E0, %F0, 3));
4680 }
4681
4682 return *l = 6, ("st %0+,%A1" CR_TAB
4683 "st %0+,%B1" CR_TAB
4684 "st %0+,%C1" CR_TAB
4685 "st %0,%D1" CR_TAB
4686 TINY_SBIW (%E0, %F0, 3));
4687 }
4688
4689 static const char*
4690 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4691 {
4692 rtx dest = op[0];
4693 rtx src = op[1];
4694 rtx base = XEXP (dest, 0);
4695 int reg_base = REGNO (XEXP (base, 0));
4696 int reg_src =true_regnum (src);
4697
4698 if (reg_base == reg_src)
4699 {
4700 *l = 11;
4701 return ("mov __tmp_reg__,%A2" CR_TAB
4702 "mov __zero_reg__,%B2" CR_TAB
4703 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4704 "st %b0+,__tmp_reg__" CR_TAB
4705 "st %b0+,__zero_reg__" CR_TAB
4706 "st %b0+,%C2" CR_TAB
4707 "st %b0,%D2" CR_TAB
4708 "clr __zero_reg__" CR_TAB
4709 TINY_SBIW (%I0, %J0, %o0+3));
4710 }
4711 else if (reg_src == reg_base - 2)
4712 {
4713 *l = 11;
4714 return ("mov __tmp_reg__,%C2" CR_TAB
4715 "mov __zero_reg__,%D2" CR_TAB
4716 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4717 "st %b0+,%A0" CR_TAB
4718 "st %b0+,%B0" CR_TAB
4719 "st %b0+,__tmp_reg__" CR_TAB
4720 "st %b0,__zero_reg__" CR_TAB
4721 "clr __zero_reg__" CR_TAB
4722 TINY_SBIW (%I0, %J0, %o0+3));
4723 }
4724 *l = 8;
4725 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4726 "st %b0+,%A1" CR_TAB
4727 "st %b0+,%B1" CR_TAB
4728 "st %b0+,%C1" CR_TAB
4729 "st %b0,%D1" CR_TAB
4730 TINY_SBIW (%I0, %J0, %o0+3));
4731 }
4732
4733 static const char*
4734 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4735 {
4736 rtx dest = op[0];
4737 rtx src = op[1];
4738 rtx base = XEXP (dest, 0);
4739 int reg_base = true_regnum (base);
4740 int reg_src = true_regnum (src);
4741 int tmp;
4742
4743 if (!l)
4744 l = &tmp;
4745
4746 if (CONSTANT_ADDRESS_P (base))
4747 {
4748 if (io_address_operand (base, SImode))
4749 {
4750 return *l=4,("out %i0, %A1" CR_TAB
4751 "out %i0+1,%B1" CR_TAB
4752 "out %i0+2,%C1" CR_TAB
4753 "out %i0+3,%D1");
4754 }
4755 else
4756 {
4757 *l = AVR_TINY ? 4 : 8;
4758 return ("sts %m0,%A1" CR_TAB
4759 "sts %m0+1,%B1" CR_TAB
4760 "sts %m0+2,%C1" CR_TAB
4761 "sts %m0+3,%D1");
4762 }
4763 }
4764
4765 if (reg_base > 0) /* (r) */
4766 {
4767 if (AVR_TINY)
4768 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4769
4770 if (reg_base == REG_X) /* (R26) */
4771 {
4772 if (reg_src == REG_X)
4773 {
4774 /* "st X+,r26" is undefined */
4775 if (reg_unused_after (insn, base))
4776 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4777 "st X,r26" CR_TAB
4778 "adiw r26,1" CR_TAB
4779 "st X+,__tmp_reg__" CR_TAB
4780 "st X+,r28" CR_TAB
4781 "st X,r29");
4782 else
4783 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4784 "st X,r26" CR_TAB
4785 "adiw r26,1" CR_TAB
4786 "st X+,__tmp_reg__" CR_TAB
4787 "st X+,r28" CR_TAB
4788 "st X,r29" CR_TAB
4789 "sbiw r26,3");
4790 }
4791 else if (reg_base == reg_src + 2)
4792 {
4793 if (reg_unused_after (insn, base))
4794 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4795 "mov __tmp_reg__,%D1" CR_TAB
4796 "st %0+,%A1" CR_TAB
4797 "st %0+,%B1" CR_TAB
4798 "st %0+,__zero_reg__" CR_TAB
4799 "st %0,__tmp_reg__" CR_TAB
4800 "clr __zero_reg__");
4801 else
4802 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4803 "mov __tmp_reg__,%D1" CR_TAB
4804 "st %0+,%A1" CR_TAB
4805 "st %0+,%B1" CR_TAB
4806 "st %0+,__zero_reg__" CR_TAB
4807 "st %0,__tmp_reg__" CR_TAB
4808 "clr __zero_reg__" CR_TAB
4809 "sbiw r26,3");
4810 }
4811 return *l=5, ("st %0+,%A1" CR_TAB
4812 "st %0+,%B1" CR_TAB
4813 "st %0+,%C1" CR_TAB
4814 "st %0,%D1" CR_TAB
4815 "sbiw r26,3");
4816 }
4817 else
4818 return *l=4, ("st %0,%A1" CR_TAB
4819 "std %0+1,%B1" CR_TAB
4820 "std %0+2,%C1" CR_TAB
4821 "std %0+3,%D1");
4822 }
4823 else if (GET_CODE (base) == PLUS) /* (R + i) */
4824 {
4825 int disp = INTVAL (XEXP (base, 1));
4826
4827 if (AVR_TINY)
4828 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4829
4830 reg_base = REGNO (XEXP (base, 0));
4831 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4832 {
4833 if (reg_base != REG_Y)
4834 fatal_insn ("incorrect insn:",insn);
4835
4836 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4837 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4838 "std Y+60,%A1" CR_TAB
4839 "std Y+61,%B1" CR_TAB
4840 "std Y+62,%C1" CR_TAB
4841 "std Y+63,%D1" CR_TAB
4842 "sbiw r28,%o0-60");
4843
4844 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4845 "sbci r29,hi8(-%o0)" CR_TAB
4846 "st Y,%A1" CR_TAB
4847 "std Y+1,%B1" CR_TAB
4848 "std Y+2,%C1" CR_TAB
4849 "std Y+3,%D1" CR_TAB
4850 "subi r28,lo8(%o0)" CR_TAB
4851 "sbci r29,hi8(%o0)");
4852 }
4853 if (reg_base == REG_X)
4854 {
4855 /* (X + d) = R */
4856 if (reg_src == REG_X)
4857 {
4858 *l = 9;
4859 return ("mov __tmp_reg__,r26" CR_TAB
4860 "mov __zero_reg__,r27" CR_TAB
4861 "adiw r26,%o0" CR_TAB
4862 "st X+,__tmp_reg__" CR_TAB
4863 "st X+,__zero_reg__" CR_TAB
4864 "st X+,r28" CR_TAB
4865 "st X,r29" CR_TAB
4866 "clr __zero_reg__" CR_TAB
4867 "sbiw r26,%o0+3");
4868 }
4869 else if (reg_src == REG_X - 2)
4870 {
4871 *l = 9;
4872 return ("mov __tmp_reg__,r26" CR_TAB
4873 "mov __zero_reg__,r27" CR_TAB
4874 "adiw r26,%o0" CR_TAB
4875 "st X+,r24" CR_TAB
4876 "st X+,r25" CR_TAB
4877 "st X+,__tmp_reg__" CR_TAB
4878 "st X,__zero_reg__" CR_TAB
4879 "clr __zero_reg__" CR_TAB
4880 "sbiw r26,%o0+3");
4881 }
4882 *l = 6;
4883 return ("adiw r26,%o0" CR_TAB
4884 "st X+,%A1" CR_TAB
4885 "st X+,%B1" CR_TAB
4886 "st X+,%C1" CR_TAB
4887 "st X,%D1" CR_TAB
4888 "sbiw r26,%o0+3");
4889 }
4890 return *l=4, ("std %A0,%A1" CR_TAB
4891 "std %B0,%B1" CR_TAB
4892 "std %C0,%C1" CR_TAB
4893 "std %D0,%D1");
4894 }
4895 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4896 return *l=4, ("st %0,%D1" CR_TAB
4897 "st %0,%C1" CR_TAB
4898 "st %0,%B1" CR_TAB
4899 "st %0,%A1");
4900 else if (GET_CODE (base) == POST_INC) /* (R++) */
4901 return *l=4, ("st %0,%A1" CR_TAB
4902 "st %0,%B1" CR_TAB
4903 "st %0,%C1" CR_TAB
4904 "st %0,%D1");
4905 fatal_insn ("unknown move insn:",insn);
4906 return "";
4907 }
4908
4909 const char *
4910 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4911 {
4912 int dummy;
4913 rtx dest = operands[0];
4914 rtx src = operands[1];
4915 int *real_l = l;
4916
4917 if (avr_mem_flash_p (src)
4918 || avr_mem_flash_p (dest))
4919 {
4920 return avr_out_lpm (insn, operands, real_l);
4921 }
4922
4923 if (!l)
4924 l = &dummy;
4925
4926 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4927
4928 if (REG_P (dest))
4929 {
4930 if (REG_P (src)) /* mov r,r */
4931 {
4932 if (true_regnum (dest) > true_regnum (src))
4933 {
4934 if (AVR_HAVE_MOVW)
4935 {
4936 *l = 2;
4937 return ("movw %C0,%C1" CR_TAB
4938 "movw %A0,%A1");
4939 }
4940 *l = 4;
4941 return ("mov %D0,%D1" CR_TAB
4942 "mov %C0,%C1" CR_TAB
4943 "mov %B0,%B1" CR_TAB
4944 "mov %A0,%A1");
4945 }
4946 else
4947 {
4948 if (AVR_HAVE_MOVW)
4949 {
4950 *l = 2;
4951 return ("movw %A0,%A1" CR_TAB
4952 "movw %C0,%C1");
4953 }
4954 *l = 4;
4955 return ("mov %A0,%A1" CR_TAB
4956 "mov %B0,%B1" CR_TAB
4957 "mov %C0,%C1" CR_TAB
4958 "mov %D0,%D1");
4959 }
4960 }
4961 else if (CONSTANT_P (src))
4962 {
4963 return output_reload_insisf (operands, NULL_RTX, real_l);
4964 }
4965 else if (MEM_P (src))
4966 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4967 }
4968 else if (MEM_P (dest))
4969 {
4970 const char *templ;
4971
4972 if (src == CONST0_RTX (GET_MODE (dest)))
4973 operands[1] = zero_reg_rtx;
4974
4975 templ = out_movsi_mr_r (insn, operands, real_l);
4976
4977 if (!real_l)
4978 output_asm_insn (templ, operands);
4979
4980 operands[1] = src;
4981 return "";
4982 }
4983 fatal_insn ("invalid insn:", insn);
4984 return "";
4985 }
4986
4987
4988 /* Handle loads of 24-bit types from memory to register. */
4989
4990 static const char*
4991 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4992 {
4993 rtx dest = op[0];
4994 rtx src = op[1];
4995 rtx base = XEXP (src, 0);
4996 int reg_dest = true_regnum (dest);
4997 int reg_base = true_regnum (base);
4998
4999 if (reg_base == reg_dest)
5000 {
5001 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
5002 "ld %C0,%1" CR_TAB
5003 "ld __tmp_reg__,-%1" CR_TAB
5004 TINY_SBIW (%E1, %F1, 1) CR_TAB
5005 "ld %A0,%1" CR_TAB
5006 "mov %B0,__tmp_reg__", op, plen, -8);
5007 }
5008 else
5009 {
5010 avr_asm_len ("ld %A0,%1+" CR_TAB
5011 "ld %B0,%1+" CR_TAB
5012 "ld %C0,%1", op, plen, -3);
5013
5014 if (reg_dest != reg_base - 2
5015 && !reg_unused_after (insn, base))
5016 {
5017 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
5018 }
5019 return "";
5020 }
5021 }
5022
5023 static const char*
5024 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5025 {
5026 rtx dest = op[0];
5027 rtx src = op[1];
5028 rtx base = XEXP (src, 0);
5029 int reg_dest = true_regnum (dest);
5030 int reg_base = true_regnum (base);
5031
5032 reg_base = true_regnum (XEXP (base, 0));
5033 if (reg_base == reg_dest)
5034 {
5035 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
5036 "ld %C0,%b1" CR_TAB
5037 "ld __tmp_reg__,-%b1" CR_TAB
5038 TINY_SBIW (%I1, %J1, 1) CR_TAB
5039 "ld %A0,%b1" CR_TAB
5040 "mov %B0,__tmp_reg__", op, plen, -8);
5041 }
5042 else
5043 {
5044 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
5045 "ld %A0,%b1+" CR_TAB
5046 "ld %B0,%b1+" CR_TAB
5047 "ld %C0,%b1", op, plen, -5);
5048
5049 if (reg_dest != reg_base - 2
5050 && !reg_unused_after (insn, XEXP (base, 0)))
5051 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
5052
5053 return "";
5054 }
5055 }
5056
5057 static const char*
5058 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
5059 {
5060 rtx dest = op[0];
5061 rtx src = op[1];
5062 rtx base = XEXP (src, 0);
5063 int reg_dest = true_regnum (dest);
5064 int reg_base = true_regnum (base);
5065
5066 if (reg_base > 0)
5067 {
5068 if (AVR_TINY)
5069 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
5070
5071 if (reg_base == REG_X) /* (R26) */
5072 {
5073 if (reg_dest == REG_X)
5074 /* "ld r26,-X" is undefined */
5075 return avr_asm_len ("adiw r26,2" CR_TAB
5076 "ld r28,X" CR_TAB
5077 "ld __tmp_reg__,-X" CR_TAB
5078 "sbiw r26,1" CR_TAB
5079 "ld r26,X" CR_TAB
5080 "mov r27,__tmp_reg__", op, plen, -6);
5081 else
5082 {
5083 avr_asm_len ("ld %A0,X+" CR_TAB
5084 "ld %B0,X+" CR_TAB
5085 "ld %C0,X", op, plen, -3);
5086
5087 if (reg_dest != REG_X - 2
5088 && !reg_unused_after (insn, base))
5089 {
5090 avr_asm_len ("sbiw r26,2", op, plen, 1);
5091 }
5092
5093 return "";
5094 }
5095 }
5096 else /* reg_base != REG_X */
5097 {
5098 if (reg_dest == reg_base)
5099 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
5100 "ldd __tmp_reg__,%1+1" CR_TAB
5101 "ld %A0,%1" CR_TAB
5102 "mov %B0,__tmp_reg__", op, plen, -4);
5103 else
5104 return avr_asm_len ("ld %A0,%1" CR_TAB
5105 "ldd %B0,%1+1" CR_TAB
5106 "ldd %C0,%1+2", op, plen, -3);
5107 }
5108 }
5109 else if (GET_CODE (base) == PLUS) /* (R + i) */
5110 {
5111 int disp = INTVAL (XEXP (base, 1));
5112
5113 if (AVR_TINY)
5114 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
5115
5116 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
5117 {
5118 if (REGNO (XEXP (base, 0)) != REG_Y)
5119 fatal_insn ("incorrect insn:",insn);
5120
5121 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
5122 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
5123 "ldd %A0,Y+61" CR_TAB
5124 "ldd %B0,Y+62" CR_TAB
5125 "ldd %C0,Y+63" CR_TAB
5126 "sbiw r28,%o1-61", op, plen, -5);
5127
5128 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
5129 "sbci r29,hi8(-%o1)" CR_TAB
5130 "ld %A0,Y" CR_TAB
5131 "ldd %B0,Y+1" CR_TAB
5132 "ldd %C0,Y+2" CR_TAB
5133 "subi r28,lo8(%o1)" CR_TAB
5134 "sbci r29,hi8(%o1)", op, plen, -7);
5135 }
5136
5137 reg_base = true_regnum (XEXP (base, 0));
5138 if (reg_base == REG_X)
5139 {
5140 /* R = (X + d) */
5141 if (reg_dest == REG_X)
5142 {
5143 /* "ld r26,-X" is undefined */
5144 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
5145 "ld r28,X" CR_TAB
5146 "ld __tmp_reg__,-X" CR_TAB
5147 "sbiw r26,1" CR_TAB
5148 "ld r26,X" CR_TAB
5149 "mov r27,__tmp_reg__", op, plen, -6);
5150 }
5151
5152 avr_asm_len ("adiw r26,%o1" CR_TAB
5153 "ld %A0,X+" CR_TAB
5154 "ld %B0,X+" CR_TAB
5155 "ld %C0,X", op, plen, -4);
5156
5157 if (reg_dest != REG_W
5158 && !reg_unused_after (insn, XEXP (base, 0)))
5159 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
5160
5161 return "";
5162 }
5163
5164 if (reg_dest == reg_base)
5165 return avr_asm_len ("ldd %C0,%C1" CR_TAB
5166 "ldd __tmp_reg__,%B1" CR_TAB
5167 "ldd %A0,%A1" CR_TAB
5168 "mov %B0,__tmp_reg__", op, plen, -4);
5169
5170 return avr_asm_len ("ldd %A0,%A1" CR_TAB
5171 "ldd %B0,%B1" CR_TAB
5172 "ldd %C0,%C1", op, plen, -3);
5173 }
5174 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5175 return avr_asm_len ("ld %C0,%1" CR_TAB
5176 "ld %B0,%1" CR_TAB
5177 "ld %A0,%1", op, plen, -3);
5178 else if (GET_CODE (base) == POST_INC) /* (R++) */
5179 return avr_asm_len ("ld %A0,%1" CR_TAB
5180 "ld %B0,%1" CR_TAB
5181 "ld %C0,%1", op, plen, -3);
5182
5183 else if (CONSTANT_ADDRESS_P (base))
5184 {
5185 int n_words = AVR_TINY ? 3 : 6;
5186 return avr_asm_len ("lds %A0,%m1" CR_TAB
5187 "lds %B0,%m1+1" CR_TAB
5188 "lds %C0,%m1+2", op, plen , -n_words);
5189 }
5190
5191 fatal_insn ("unknown move insn:",insn);
5192 return "";
5193 }
5194
5195
5196 static const char*
5197 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5198 {
5199 rtx dest = op[0];
5200 rtx src = op[1];
5201 rtx base = XEXP (dest, 0);
5202 int reg_base = true_regnum (base);
5203 int reg_src = true_regnum (src);
5204
5205 if (reg_base == reg_src)
5206 {
5207 avr_asm_len ("st %0,%A1" CR_TAB
5208 "mov __tmp_reg__,%B1" CR_TAB
5209 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
5210 "st %0+,__tmp_reg__" CR_TAB
5211 "st %0,%C1", op, plen, -6);
5212
5213 }
5214 else if (reg_src == reg_base - 2)
5215 {
5216 avr_asm_len ("st %0,%A1" CR_TAB
5217 "mov __tmp_reg__,%C1" CR_TAB
5218 TINY_ADIW (%E0, %F0, 1) CR_TAB
5219 "st %0+,%B1" CR_TAB
5220 "st %0,__tmp_reg__", op, plen, 6);
5221 }
5222 else
5223 {
5224 avr_asm_len ("st %0+,%A1" CR_TAB
5225 "st %0+,%B1" CR_TAB
5226 "st %0,%C1", op, plen, -3);
5227 }
5228
5229 if (!reg_unused_after (insn, base))
5230 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
5231
5232 return "";
5233 }
5234
5235 static const char*
5236 avr_out_store_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5237 {
5238 rtx dest = op[0];
5239 rtx src = op[1];
5240 rtx base = XEXP (dest, 0);
5241 int reg_base = REGNO (XEXP (base, 0));
5242 int reg_src = true_regnum (src);
5243
5244 if (reg_src == reg_base)
5245 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5246 "mov __zero_reg__,%B1" CR_TAB
5247 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5248 "st %b0+,__tmp_reg__" CR_TAB
5249 "st %b0+,__zero_reg__" CR_TAB
5250 "st %b0,%C1" CR_TAB
5251 "clr __zero_reg__", op, plen, -8);
5252 else if (reg_src == reg_base - 2)
5253 avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
5254 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5255 "st %b0+,%A1" CR_TAB
5256 "st %b0+,%B1" CR_TAB
5257 "st %b0,__tmp_reg__", op, plen, -6);
5258 else
5259 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5260 "st %b0+,%A1" CR_TAB
5261 "st %b0+,%B1" CR_TAB
5262 "st %b0,%C1", op, plen, -5);
5263
5264 if (!reg_unused_after (insn, XEXP (base, 0)))
5265 avr_asm_len (TINY_SBIW (%I0, %J0, %o0+2), op, plen, 2);
5266
5267 return "";
5268 }
5269
5270 /* Handle store of 24-bit type from register or zero to memory. */
5271
5272 static const char*
5273 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
5274 {
5275 rtx dest = op[0];
5276 rtx src = op[1];
5277 rtx base = XEXP (dest, 0);
5278 int reg_base = true_regnum (base);
5279
5280 if (CONSTANT_ADDRESS_P (base))
5281 {
5282 int n_words = AVR_TINY ? 3 : 6;
5283 return avr_asm_len ("sts %m0,%A1" CR_TAB
5284 "sts %m0+1,%B1" CR_TAB
5285 "sts %m0+2,%C1", op, plen, -n_words);
5286 }
5287
5288 if (reg_base > 0) /* (r) */
5289 {
5290 if (AVR_TINY)
5291 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
5292
5293 if (reg_base == REG_X) /* (R26) */
5294 {
5295 gcc_assert (!reg_overlap_mentioned_p (base, src));
5296
5297 avr_asm_len ("st %0+,%A1" CR_TAB
5298 "st %0+,%B1" CR_TAB
5299 "st %0,%C1", op, plen, -3);
5300
5301 if (!reg_unused_after (insn, base))
5302 avr_asm_len ("sbiw r26,2", op, plen, 1);
5303
5304 return "";
5305 }
5306 else
5307 return avr_asm_len ("st %0,%A1" CR_TAB
5308 "std %0+1,%B1" CR_TAB
5309 "std %0+2,%C1", op, plen, -3);
5310 }
5311 else if (GET_CODE (base) == PLUS) /* (R + i) */
5312 {
5313 int disp = INTVAL (XEXP (base, 1));
5314
5315 if (AVR_TINY)
5316 return avr_out_store_psi_reg_disp_tiny (insn, op, plen);
5317
5318 reg_base = REGNO (XEXP (base, 0));
5319
5320 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5321 {
5322 if (reg_base != REG_Y)
5323 fatal_insn ("incorrect insn:",insn);
5324
5325 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5326 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5327 "std Y+61,%A1" CR_TAB
5328 "std Y+62,%B1" CR_TAB
5329 "std Y+63,%C1" CR_TAB
5330 "sbiw r28,%o0-61", op, plen, -5);
5331
5332 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5333 "sbci r29,hi8(-%o0)" CR_TAB
5334 "st Y,%A1" CR_TAB
5335 "std Y+1,%B1" CR_TAB
5336 "std Y+2,%C1" CR_TAB
5337 "subi r28,lo8(%o0)" CR_TAB
5338 "sbci r29,hi8(%o0)", op, plen, -7);
5339 }
5340 if (reg_base == REG_X)
5341 {
5342 /* (X + d) = R */
5343 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
5344
5345 avr_asm_len ("adiw r26,%o0" CR_TAB
5346 "st X+,%A1" CR_TAB
5347 "st X+,%B1" CR_TAB
5348 "st X,%C1", op, plen, -4);
5349
5350 if (!reg_unused_after (insn, XEXP (base, 0)))
5351 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
5352
5353 return "";
5354 }
5355
5356 return avr_asm_len ("std %A0,%A1" CR_TAB
5357 "std %B0,%B1" CR_TAB
5358 "std %C0,%C1", op, plen, -3);
5359 }
5360 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5361 return avr_asm_len ("st %0,%C1" CR_TAB
5362 "st %0,%B1" CR_TAB
5363 "st %0,%A1", op, plen, -3);
5364 else if (GET_CODE (base) == POST_INC) /* (R++) */
5365 return avr_asm_len ("st %0,%A1" CR_TAB
5366 "st %0,%B1" CR_TAB
5367 "st %0,%C1", op, plen, -3);
5368
5369 fatal_insn ("unknown move insn:",insn);
5370 return "";
5371 }
5372
5373
5374 /* Move around 24-bit stuff. */
5375
5376 const char *
5377 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
5378 {
5379 rtx dest = op[0];
5380 rtx src = op[1];
5381
5382 if (avr_mem_flash_p (src)
5383 || avr_mem_flash_p (dest))
5384 {
5385 return avr_out_lpm (insn, op, plen);
5386 }
5387
5388 if (register_operand (dest, VOIDmode))
5389 {
5390 if (register_operand (src, VOIDmode)) /* mov r,r */
5391 {
5392 if (true_regnum (dest) > true_regnum (src))
5393 {
5394 avr_asm_len ("mov %C0,%C1", op, plen, -1);
5395
5396 if (AVR_HAVE_MOVW)
5397 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
5398 else
5399 return avr_asm_len ("mov %B0,%B1" CR_TAB
5400 "mov %A0,%A1", op, plen, 2);
5401 }
5402 else
5403 {
5404 if (AVR_HAVE_MOVW)
5405 avr_asm_len ("movw %A0,%A1", op, plen, -1);
5406 else
5407 avr_asm_len ("mov %A0,%A1" CR_TAB
5408 "mov %B0,%B1", op, plen, -2);
5409
5410 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
5411 }
5412 }
5413 else if (CONSTANT_P (src))
5414 {
5415 return avr_out_reload_inpsi (op, NULL_RTX, plen);
5416 }
5417 else if (MEM_P (src))
5418 return avr_out_load_psi (insn, op, plen); /* mov r,m */
5419 }
5420 else if (MEM_P (dest))
5421 {
5422 rtx xop[2];
5423
5424 xop[0] = dest;
5425 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
5426
5427 return avr_out_store_psi (insn, xop, plen);
5428 }
5429
5430 fatal_insn ("invalid insn:", insn);
5431 return "";
5432 }
5433
5434 static const char*
5435 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5436 {
5437 rtx dest = op[0];
5438 rtx src = op[1];
5439 rtx x = XEXP (dest, 0);
5440
5441 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5442 {
5443 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5444 TINY_ADIW (%I0, %J0, %o0) CR_TAB
5445 "st %b0,__tmp_reg__", op, plen, -4);
5446 }
5447 else
5448 {
5449 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5450 "st %b0,%1", op, plen, -3);
5451 }
5452
5453 if (!reg_unused_after (insn, XEXP (x, 0)))
5454 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5455
5456 return "";
5457 }
5458
5459 static const char*
5460 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5461 {
5462 rtx dest = op[0];
5463 rtx src = op[1];
5464 rtx x = XEXP (dest, 0);
5465
5466 if (CONSTANT_ADDRESS_P (x))
5467 {
5468 int n_words = AVR_TINY ? 1 : 2;
5469 return io_address_operand (x, QImode)
5470 ? avr_asm_len ("out %i0,%1", op, plen, -1)
5471 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
5472 }
5473 else if (GET_CODE (x) == PLUS
5474 && REG_P (XEXP (x, 0))
5475 && CONST_INT_P (XEXP (x, 1)))
5476 {
5477 /* memory access by reg+disp */
5478
5479 int disp = INTVAL (XEXP (x, 1));
5480
5481 if (AVR_TINY)
5482 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
5483
5484 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
5485 {
5486 if (REGNO (XEXP (x, 0)) != REG_Y)
5487 fatal_insn ("incorrect insn:",insn);
5488
5489 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5490 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5491 "std Y+63,%1" CR_TAB
5492 "sbiw r28,%o0-63", op, plen, -3);
5493
5494 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5495 "sbci r29,hi8(-%o0)" CR_TAB
5496 "st Y,%1" CR_TAB
5497 "subi r28,lo8(%o0)" CR_TAB
5498 "sbci r29,hi8(%o0)", op, plen, -5);
5499 }
5500 else if (REGNO (XEXP (x, 0)) == REG_X)
5501 {
5502 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5503 {
5504 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5505 "adiw r26,%o0" CR_TAB
5506 "st X,__tmp_reg__", op, plen, -3);
5507 }
5508 else
5509 {
5510 avr_asm_len ("adiw r26,%o0" CR_TAB
5511 "st X,%1", op, plen, -2);
5512 }
5513
5514 if (!reg_unused_after (insn, XEXP (x, 0)))
5515 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
5516
5517 return "";
5518 }
5519
5520 return avr_asm_len ("std %0,%1", op, plen, -1);
5521 }
5522
5523 return avr_asm_len ("st %0,%1", op, plen, -1);
5524 }
5525
5526
5527 /* Helper for the next function for XMEGA. It does the same
5528 but with low byte first. */
5529
5530 static const char*
5531 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
5532 {
5533 rtx dest = op[0];
5534 rtx src = op[1];
5535 rtx base = XEXP (dest, 0);
5536 int reg_base = true_regnum (base);
5537 int reg_src = true_regnum (src);
5538
5539 /* "volatile" forces writing low byte first, even if less efficient,
5540 for correct operation with 16-bit I/O registers like SP. */
5541 int mem_volatile_p = MEM_VOLATILE_P (dest);
5542
5543 if (CONSTANT_ADDRESS_P (base))
5544 {
5545 return io_address_operand (base, HImode)
5546 ? avr_asm_len ("out %i0,%A1" CR_TAB
5547 "out %i0+1,%B1", op, plen, -2)
5548
5549 : avr_asm_len ("sts %m0,%A1" CR_TAB
5550 "sts %m0+1,%B1", op, plen, -4);
5551 }
5552
5553 if (reg_base > 0)
5554 {
5555 if (reg_base != REG_X)
5556 return avr_asm_len ("st %0,%A1" CR_TAB
5557 "std %0+1,%B1", op, plen, -2);
5558
5559 if (reg_src == REG_X)
5560 /* "st X+,r26" and "st -X,r26" are undefined. */
5561 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5562 "st X,r26" CR_TAB
5563 "adiw r26,1" CR_TAB
5564 "st X,__tmp_reg__", op, plen, -4);
5565 else
5566 avr_asm_len ("st X+,%A1" CR_TAB
5567 "st X,%B1", op, plen, -2);
5568
5569 return reg_unused_after (insn, base)
5570 ? ""
5571 : avr_asm_len ("sbiw r26,1", op, plen, 1);
5572 }
5573 else if (GET_CODE (base) == PLUS)
5574 {
5575 int disp = INTVAL (XEXP (base, 1));
5576 reg_base = REGNO (XEXP (base, 0));
5577 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5578 {
5579 if (reg_base != REG_Y)
5580 fatal_insn ("incorrect insn:",insn);
5581
5582 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5583 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5584 "std Y+62,%A1" CR_TAB
5585 "std Y+63,%B1" CR_TAB
5586 "sbiw r28,%o0-62", op, plen, -4)
5587
5588 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5589 "sbci r29,hi8(-%o0)" CR_TAB
5590 "st Y,%A1" CR_TAB
5591 "std Y+1,%B1" CR_TAB
5592 "subi r28,lo8(%o0)" CR_TAB
5593 "sbci r29,hi8(%o0)", op, plen, -6);
5594 }
5595
5596 if (reg_base != REG_X)
5597 return avr_asm_len ("std %A0,%A1" CR_TAB
5598 "std %B0,%B1", op, plen, -2);
5599 /* (X + d) = R */
5600 return reg_src == REG_X
5601 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5602 "mov __zero_reg__,r27" CR_TAB
5603 "adiw r26,%o0" CR_TAB
5604 "st X+,__tmp_reg__" CR_TAB
5605 "st X,__zero_reg__" CR_TAB
5606 "clr __zero_reg__" CR_TAB
5607 "sbiw r26,%o0+1", op, plen, -7)
5608
5609 : avr_asm_len ("adiw r26,%o0" CR_TAB
5610 "st X+,%A1" CR_TAB
5611 "st X,%B1" CR_TAB
5612 "sbiw r26,%o0+1", op, plen, -4);
5613 }
5614 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5615 {
5616 if (!mem_volatile_p)
5617 return avr_asm_len ("st %0,%B1" CR_TAB
5618 "st %0,%A1", op, plen, -2);
5619
5620 return REGNO (XEXP (base, 0)) == REG_X
5621 ? avr_asm_len ("sbiw r26,2" CR_TAB
5622 "st X+,%A1" CR_TAB
5623 "st X,%B1" CR_TAB
5624 "sbiw r26,1", op, plen, -4)
5625
5626 : avr_asm_len ("sbiw %r0,2" CR_TAB
5627 "st %p0,%A1" CR_TAB
5628 "std %p0+1,%B1", op, plen, -3);
5629 }
5630 else if (GET_CODE (base) == POST_INC) /* (R++) */
5631 {
5632 return avr_asm_len ("st %0,%A1" CR_TAB
5633 "st %0,%B1", op, plen, -2);
5634
5635 }
5636 fatal_insn ("unknown move insn:",insn);
5637 return "";
5638 }
5639
5640 static const char*
5641 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5642 {
5643 rtx dest = op[0];
5644 rtx src = op[1];
5645 rtx base = XEXP (dest, 0);
5646 int reg_base = true_regnum (base);
5647 int reg_src = true_regnum (src);
5648 int mem_volatile_p = MEM_VOLATILE_P (dest);
5649
5650 if (reg_base == reg_src)
5651 {
5652 return !mem_volatile_p && reg_unused_after (insn, src)
5653 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5654 "st %0,%A1" CR_TAB
5655 TINY_ADIW (%E0, %F0, 1) CR_TAB
5656 "st %0,__tmp_reg__", op, plen, -5)
5657 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5658 TINY_ADIW (%E0, %F0, 1) CR_TAB
5659 "st %0,__tmp_reg__" CR_TAB
5660 TINY_SBIW (%E0, %F0, 1) CR_TAB
5661 "st %0, %A1", op, plen, -7);
5662 }
5663
5664 return !mem_volatile_p && reg_unused_after (insn, base)
5665 ? avr_asm_len ("st %0+,%A1" CR_TAB
5666 "st %0,%B1", op, plen, -2)
5667 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5668 "st %0,%B1" CR_TAB
5669 "st -%0,%A1", op, plen, -4);
5670 }
5671
5672 static const char*
5673 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5674 {
5675 rtx dest = op[0];
5676 rtx src = op[1];
5677 rtx base = XEXP (dest, 0);
5678 int reg_base = REGNO (XEXP (base, 0));
5679 int reg_src = true_regnum (src);
5680
5681 if (reg_src == reg_base)
5682 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5683 "mov __zero_reg__,%B1" CR_TAB
5684 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5685 "st %b0,__zero_reg__" CR_TAB
5686 "st -%b0,__tmp_reg__" CR_TAB
5687 "clr __zero_reg__", op, plen, -7);
5688 else
5689 avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5690 "st %b0,%B1" CR_TAB
5691 "st -%b0,%A1", op, plen, -4);
5692
5693 if (!reg_unused_after (insn, XEXP (base, 0)))
5694 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5695
5696 return "";
5697 }
5698
5699 static const char*
5700 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5701 {
5702 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5703 "st %p0,%B1" CR_TAB
5704 "st -%p0,%A1" CR_TAB
5705 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5706 }
5707
5708 static const char*
5709 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5710 {
5711 rtx dest = op[0];
5712 rtx src = op[1];
5713 rtx base = XEXP (dest, 0);
5714 int reg_base = true_regnum (base);
5715 int reg_src = true_regnum (src);
5716 int mem_volatile_p;
5717
5718 /* "volatile" forces writing high-byte first (no-xmega) resp.
5719 low-byte first (xmega) even if less efficient, for correct
5720 operation with 16-bit I/O registers like. */
5721
5722 if (AVR_XMEGA)
5723 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5724
5725 mem_volatile_p = MEM_VOLATILE_P (dest);
5726
5727 if (CONSTANT_ADDRESS_P (base))
5728 {
5729 int n_words = AVR_TINY ? 2 : 4;
5730 return io_address_operand (base, HImode)
5731 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5732 "out %i0,%A1", op, plen, -2)
5733
5734 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5735 "sts %m0,%A1", op, plen, -n_words);
5736 }
5737
5738 if (reg_base > 0)
5739 {
5740 if (AVR_TINY)
5741 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5742
5743 if (reg_base != REG_X)
5744 return avr_asm_len ("std %0+1,%B1" CR_TAB
5745 "st %0,%A1", op, plen, -2);
5746
5747 if (reg_src == REG_X)
5748 /* "st X+,r26" and "st -X,r26" are undefined. */
5749 return !mem_volatile_p && reg_unused_after (insn, src)
5750 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5751 "st X,r26" CR_TAB
5752 "adiw r26,1" CR_TAB
5753 "st X,__tmp_reg__", op, plen, -4)
5754
5755 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5756 "adiw r26,1" CR_TAB
5757 "st X,__tmp_reg__" CR_TAB
5758 "sbiw r26,1" CR_TAB
5759 "st X,r26", op, plen, -5);
5760
5761 return !mem_volatile_p && reg_unused_after (insn, base)
5762 ? avr_asm_len ("st X+,%A1" CR_TAB
5763 "st X,%B1", op, plen, -2)
5764 : avr_asm_len ("adiw r26,1" CR_TAB
5765 "st X,%B1" CR_TAB
5766 "st -X,%A1", op, plen, -3);
5767 }
5768 else if (GET_CODE (base) == PLUS)
5769 {
5770 int disp = INTVAL (XEXP (base, 1));
5771
5772 if (AVR_TINY)
5773 return avr_out_movhi_mr_r_reg_disp_tiny (insn, op, plen);
5774
5775 reg_base = REGNO (XEXP (base, 0));
5776 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5777 {
5778 if (reg_base != REG_Y)
5779 fatal_insn ("incorrect insn:",insn);
5780
5781 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5782 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5783 "std Y+63,%B1" CR_TAB
5784 "std Y+62,%A1" CR_TAB
5785 "sbiw r28,%o0-62", op, plen, -4)
5786
5787 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5788 "sbci r29,hi8(-%o0)" CR_TAB
5789 "std Y+1,%B1" CR_TAB
5790 "st Y,%A1" CR_TAB
5791 "subi r28,lo8(%o0)" CR_TAB
5792 "sbci r29,hi8(%o0)", op, plen, -6);
5793 }
5794
5795 if (reg_base != REG_X)
5796 return avr_asm_len ("std %B0,%B1" CR_TAB
5797 "std %A0,%A1", op, plen, -2);
5798 /* (X + d) = R */
5799 return reg_src == REG_X
5800 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5801 "mov __zero_reg__,r27" CR_TAB
5802 "adiw r26,%o0+1" CR_TAB
5803 "st X,__zero_reg__" CR_TAB
5804 "st -X,__tmp_reg__" CR_TAB
5805 "clr __zero_reg__" CR_TAB
5806 "sbiw r26,%o0", op, plen, -7)
5807
5808 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5809 "st X,%B1" CR_TAB
5810 "st -X,%A1" CR_TAB
5811 "sbiw r26,%o0", op, plen, -4);
5812 }
5813 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5814 {
5815 return avr_asm_len ("st %0,%B1" CR_TAB
5816 "st %0,%A1", op, plen, -2);
5817 }
5818 else if (GET_CODE (base) == POST_INC) /* (R++) */
5819 {
5820 if (!mem_volatile_p)
5821 return avr_asm_len ("st %0,%A1" CR_TAB
5822 "st %0,%B1", op, plen, -2);
5823
5824 if (AVR_TINY)
5825 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5826
5827 return REGNO (XEXP (base, 0)) == REG_X
5828 ? avr_asm_len ("adiw r26,1" CR_TAB
5829 "st X,%B1" CR_TAB
5830 "st -X,%A1" CR_TAB
5831 "adiw r26,2", op, plen, -4)
5832
5833 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5834 "st %p0,%A1" CR_TAB
5835 "adiw %r0,2", op, plen, -3);
5836 }
5837 fatal_insn ("unknown move insn:",insn);
5838 return "";
5839 }
5840
5841 /* Return 1 if frame pointer for current function required. */
5842
5843 static bool
5844 avr_frame_pointer_required_p (void)
5845 {
5846 return (cfun->calls_alloca
5847 || cfun->calls_setjmp
5848 || cfun->has_nonlocal_label
5849 || crtl->args.info.nregs == 0
5850 || get_frame_size () > 0);
5851 }
5852
5853 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5854
5855 static RTX_CODE
5856 compare_condition (rtx_insn *insn)
5857 {
5858 rtx_insn *next = next_real_insn (insn);
5859
5860 if (next && JUMP_P (next))
5861 {
5862 rtx pat = PATTERN (next);
5863 rtx src = SET_SRC (pat);
5864
5865 if (IF_THEN_ELSE == GET_CODE (src))
5866 return GET_CODE (XEXP (src, 0));
5867 }
5868
5869 return UNKNOWN;
5870 }
5871
5872
5873 /* Returns true iff INSN is a tst insn that only tests the sign. */
5874
5875 static bool
5876 compare_sign_p (rtx_insn *insn)
5877 {
5878 RTX_CODE cond = compare_condition (insn);
5879 return (cond == GE || cond == LT);
5880 }
5881
5882
5883 /* Returns true iff the next insn is a JUMP_INSN with a condition
5884 that needs to be swapped (GT, GTU, LE, LEU). */
5885
5886 static bool
5887 compare_diff_p (rtx_insn *insn)
5888 {
5889 RTX_CODE cond = compare_condition (insn);
5890 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5891 }
5892
5893 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5894
5895 static bool
5896 compare_eq_p (rtx_insn *insn)
5897 {
5898 RTX_CODE cond = compare_condition (insn);
5899 return (cond == EQ || cond == NE);
5900 }
5901
5902
5903 /* Output compare instruction
5904
5905 compare (XOP[0], XOP[1])
5906
5907 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5908 XOP[2] is an 8-bit scratch register as needed.
5909
5910 PLEN == NULL: Output instructions.
5911 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5912 Don't output anything. */
5913
5914 const char*
5915 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5916 {
5917 /* Register to compare and value to compare against. */
5918 rtx xreg = xop[0];
5919 rtx xval = xop[1];
5920
5921 /* MODE of the comparison. */
5922 machine_mode mode;
5923
5924 /* Number of bytes to operate on. */
5925 int n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5926
5927 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5928 int clobber_val = -1;
5929
5930 /* Map fixed mode operands to integer operands with the same binary
5931 representation. They are easier to handle in the remainder. */
5932
5933 if (CONST_FIXED_P (xval))
5934 {
5935 xreg = avr_to_int_mode (xop[0]);
5936 xval = avr_to_int_mode (xop[1]);
5937 }
5938
5939 mode = GET_MODE (xreg);
5940
5941 gcc_assert (REG_P (xreg));
5942 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5943 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5944
5945 if (plen)
5946 *plen = 0;
5947
5948 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5949 against 0 by ORing the bytes. This is one instruction shorter.
5950 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5951 and therefore don't use this. */
5952
5953 if (!test_hard_reg_class (LD_REGS, xreg)
5954 && compare_eq_p (insn)
5955 && reg_unused_after (insn, xreg))
5956 {
5957 if (xval == const1_rtx)
5958 {
5959 avr_asm_len ("dec %A0" CR_TAB
5960 "or %A0,%B0", xop, plen, 2);
5961
5962 if (n_bytes >= 3)
5963 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5964
5965 if (n_bytes >= 4)
5966 avr_asm_len ("or %A0,%D0", xop, plen, 1);
5967
5968 return "";
5969 }
5970 else if (xval == constm1_rtx)
5971 {
5972 if (n_bytes >= 4)
5973 avr_asm_len ("and %A0,%D0", xop, plen, 1);
5974
5975 if (n_bytes >= 3)
5976 avr_asm_len ("and %A0,%C0", xop, plen, 1);
5977
5978 return avr_asm_len ("and %A0,%B0" CR_TAB
5979 "com %A0", xop, plen, 2);
5980 }
5981 }
5982
5983 /* Comparisons == -1 and != -1 of a d-register that's used after the
5984 comparison. (If it's unused after we use CPI / SBCI or ADIW sequence
5985 from below.) Instead of CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx we can
5986 use CPI Rlo,-1 / CPC Rhi,Rlo which is 1 instruction shorter:
5987 If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
5988 when CPC'ing the high part. If CPI is false then CPC cannot render
5989 the result to true. This also works for the more generic case where
5990 the constant is of the form 0xabab. */
5991
5992 if (n_bytes == 2
5993 && xval != const0_rtx
5994 && test_hard_reg_class (LD_REGS, xreg)
5995 && compare_eq_p (insn)
5996 && !reg_unused_after (insn, xreg))
5997 {
5998 rtx xlo8 = simplify_gen_subreg (QImode, xval, mode, 0);
5999 rtx xhi8 = simplify_gen_subreg (QImode, xval, mode, 1);
6000
6001 if (INTVAL (xlo8) == INTVAL (xhi8))
6002 {
6003 xop[0] = xreg;
6004 xop[1] = xlo8;
6005
6006 return avr_asm_len ("cpi %A0,%1" CR_TAB
6007 "cpc %B0,%A0", xop, plen, 2);
6008 }
6009 }
6010
6011 for (int i = 0; i < n_bytes; i++)
6012 {
6013 /* We compare byte-wise. */
6014 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
6015 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6016
6017 /* 8-bit value to compare with this byte. */
6018 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6019
6020 /* Registers R16..R31 can operate with immediate. */
6021 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6022
6023 xop[0] = reg8;
6024 xop[1] = gen_int_mode (val8, QImode);
6025
6026 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
6027
6028 if (i == 0
6029 && test_hard_reg_class (ADDW_REGS, reg8))
6030 {
6031 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
6032
6033 if (IN_RANGE (val16, 0, 63)
6034 && (val8 == 0
6035 || reg_unused_after (insn, xreg)))
6036 {
6037 if (AVR_TINY)
6038 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
6039 else
6040 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
6041
6042 i++;
6043 continue;
6044 }
6045
6046 if (n_bytes == 2
6047 && IN_RANGE (val16, -63, -1)
6048 && compare_eq_p (insn)
6049 && reg_unused_after (insn, xreg))
6050 {
6051 return AVR_TINY
6052 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
6053 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
6054 }
6055 }
6056
6057 /* Comparing against 0 is easy. */
6058
6059 if (val8 == 0)
6060 {
6061 avr_asm_len (i == 0
6062 ? "cp %0,__zero_reg__"
6063 : "cpc %0,__zero_reg__", xop, plen, 1);
6064 continue;
6065 }
6066
6067 /* Upper registers can compare and subtract-with-carry immediates.
6068 Notice that compare instructions do the same as respective subtract
6069 instruction; the only difference is that comparisons don't write
6070 the result back to the target register. */
6071
6072 if (ld_reg_p)
6073 {
6074 if (i == 0)
6075 {
6076 avr_asm_len ("cpi %0,%1", xop, plen, 1);
6077 continue;
6078 }
6079 else if (reg_unused_after (insn, xreg))
6080 {
6081 avr_asm_len ("sbci %0,%1", xop, plen, 1);
6082 continue;
6083 }
6084 }
6085
6086 /* Must load the value into the scratch register. */
6087
6088 gcc_assert (REG_P (xop[2]));
6089
6090 if (clobber_val != (int) val8)
6091 avr_asm_len ("ldi %2,%1", xop, plen, 1);
6092 clobber_val = (int) val8;
6093
6094 avr_asm_len (i == 0
6095 ? "cp %0,%2"
6096 : "cpc %0,%2", xop, plen, 1);
6097 }
6098
6099 return "";
6100 }
6101
6102
6103 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
6104
6105 const char*
6106 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
6107 {
6108 rtx xop[3];
6109
6110 xop[0] = gen_rtx_REG (DImode, 18);
6111 xop[1] = op[0];
6112 xop[2] = op[1];
6113
6114 return avr_out_compare (insn, xop, plen);
6115 }
6116
6117 /* Output test instruction for HImode. */
6118
6119 const char*
6120 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
6121 {
6122 if (compare_sign_p (insn))
6123 {
6124 avr_asm_len ("tst %B0", op, plen, -1);
6125 }
6126 else if (reg_unused_after (insn, op[0])
6127 && compare_eq_p (insn))
6128 {
6129 /* Faster than sbiw if we can clobber the operand. */
6130 avr_asm_len ("or %A0,%B0", op, plen, -1);
6131 }
6132 else
6133 {
6134 avr_out_compare (insn, op, plen);
6135 }
6136
6137 return "";
6138 }
6139
6140
6141 /* Output test instruction for PSImode. */
6142
6143 const char*
6144 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
6145 {
6146 if (compare_sign_p (insn))
6147 {
6148 avr_asm_len ("tst %C0", op, plen, -1);
6149 }
6150 else if (reg_unused_after (insn, op[0])
6151 && compare_eq_p (insn))
6152 {
6153 /* Faster than sbiw if we can clobber the operand. */
6154 avr_asm_len ("or %A0,%B0" CR_TAB
6155 "or %A0,%C0", op, plen, -2);
6156 }
6157 else
6158 {
6159 avr_out_compare (insn, op, plen);
6160 }
6161
6162 return "";
6163 }
6164
6165
6166 /* Output test instruction for SImode. */
6167
6168 const char*
6169 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
6170 {
6171 if (compare_sign_p (insn))
6172 {
6173 avr_asm_len ("tst %D0", op, plen, -1);
6174 }
6175 else if (reg_unused_after (insn, op[0])
6176 && compare_eq_p (insn))
6177 {
6178 /* Faster than sbiw if we can clobber the operand. */
6179 avr_asm_len ("or %A0,%B0" CR_TAB
6180 "or %A0,%C0" CR_TAB
6181 "or %A0,%D0", op, plen, -3);
6182 }
6183 else
6184 {
6185 avr_out_compare (insn, op, plen);
6186 }
6187
6188 return "";
6189 }
6190
6191
6192 /* Generate asm equivalent for various shifts. This only handles cases
6193 that are not already carefully hand-optimized in ?sh??i3_out.
6194
6195 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
6196 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
6197 OPERANDS[3] is a QImode scratch register from LD regs if
6198 available and SCRATCH, otherwise (no scratch available)
6199
6200 TEMPL is an assembler template that shifts by one position.
6201 T_LEN is the length of this template. */
6202
6203 void
6204 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
6205 int *plen, int t_len)
6206 {
6207 bool second_label = true;
6208 bool saved_in_tmp = false;
6209 bool use_zero_reg = false;
6210 rtx op[5];
6211
6212 op[0] = operands[0];
6213 op[1] = operands[1];
6214 op[2] = operands[2];
6215 op[3] = operands[3];
6216
6217 if (plen)
6218 *plen = 0;
6219
6220 if (CONST_INT_P (operands[2]))
6221 {
6222 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
6223 && REG_P (operands[3]));
6224 int count = INTVAL (operands[2]);
6225 int max_len = 10; /* If larger than this, always use a loop. */
6226
6227 if (count <= 0)
6228 return;
6229
6230 if (count < 8 && !scratch)
6231 use_zero_reg = true;
6232
6233 if (optimize_size)
6234 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
6235
6236 if (t_len * count <= max_len)
6237 {
6238 /* Output shifts inline with no loop - faster. */
6239
6240 while (count-- > 0)
6241 avr_asm_len (templ, op, plen, t_len);
6242
6243 return;
6244 }
6245
6246 if (scratch)
6247 {
6248 avr_asm_len ("ldi %3,%2", op, plen, 1);
6249 }
6250 else if (use_zero_reg)
6251 {
6252 /* Hack to save one word: use __zero_reg__ as loop counter.
6253 Set one bit, then shift in a loop until it is 0 again. */
6254
6255 op[3] = zero_reg_rtx;
6256
6257 avr_asm_len ("set" CR_TAB
6258 "bld %3,%2-1", op, plen, 2);
6259 }
6260 else
6261 {
6262 /* No scratch register available, use one from LD_REGS (saved in
6263 __tmp_reg__) that doesn't overlap with registers to shift. */
6264
6265 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
6266 op[4] = tmp_reg_rtx;
6267 saved_in_tmp = true;
6268
6269 avr_asm_len ("mov %4,%3" CR_TAB
6270 "ldi %3,%2", op, plen, 2);
6271 }
6272
6273 second_label = false;
6274 }
6275 else if (MEM_P (op[2]))
6276 {
6277 rtx op_mov[2];
6278
6279 op_mov[0] = op[3] = tmp_reg_rtx;
6280 op_mov[1] = op[2];
6281
6282 out_movqi_r_mr (insn, op_mov, plen);
6283 }
6284 else if (register_operand (op[2], QImode))
6285 {
6286 op[3] = op[2];
6287
6288 if (!reg_unused_after (insn, op[2])
6289 || reg_overlap_mentioned_p (op[0], op[2]))
6290 {
6291 op[3] = tmp_reg_rtx;
6292 avr_asm_len ("mov %3,%2", op, plen, 1);
6293 }
6294 }
6295 else
6296 fatal_insn ("bad shift insn:", insn);
6297
6298 if (second_label)
6299 avr_asm_len ("rjmp 2f", op, plen, 1);
6300
6301 avr_asm_len ("1:", op, plen, 0);
6302 avr_asm_len (templ, op, plen, t_len);
6303
6304 if (second_label)
6305 avr_asm_len ("2:", op, plen, 0);
6306
6307 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
6308 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
6309
6310 if (saved_in_tmp)
6311 avr_asm_len ("mov %3,%4", op, plen, 1);
6312 }
6313
6314
6315 /* 8bit shift left ((char)x << i) */
6316
6317 const char *
6318 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
6319 {
6320 if (CONST_INT_P (operands[2]))
6321 {
6322 int k;
6323
6324 if (!len)
6325 len = &k;
6326
6327 switch (INTVAL (operands[2]))
6328 {
6329 default:
6330 if (INTVAL (operands[2]) < 8)
6331 break;
6332
6333 *len = 1;
6334 return "clr %0";
6335
6336 case 1:
6337 *len = 1;
6338 return "lsl %0";
6339
6340 case 2:
6341 *len = 2;
6342 return ("lsl %0" CR_TAB
6343 "lsl %0");
6344
6345 case 3:
6346 *len = 3;
6347 return ("lsl %0" CR_TAB
6348 "lsl %0" CR_TAB
6349 "lsl %0");
6350
6351 case 4:
6352 if (test_hard_reg_class (LD_REGS, operands[0]))
6353 {
6354 *len = 2;
6355 return ("swap %0" CR_TAB
6356 "andi %0,0xf0");
6357 }
6358 *len = 4;
6359 return ("lsl %0" CR_TAB
6360 "lsl %0" CR_TAB
6361 "lsl %0" CR_TAB
6362 "lsl %0");
6363
6364 case 5:
6365 if (test_hard_reg_class (LD_REGS, operands[0]))
6366 {
6367 *len = 3;
6368 return ("swap %0" CR_TAB
6369 "lsl %0" CR_TAB
6370 "andi %0,0xe0");
6371 }
6372 *len = 5;
6373 return ("lsl %0" CR_TAB
6374 "lsl %0" CR_TAB
6375 "lsl %0" CR_TAB
6376 "lsl %0" CR_TAB
6377 "lsl %0");
6378
6379 case 6:
6380 if (test_hard_reg_class (LD_REGS, operands[0]))
6381 {
6382 *len = 4;
6383 return ("swap %0" CR_TAB
6384 "lsl %0" CR_TAB
6385 "lsl %0" CR_TAB
6386 "andi %0,0xc0");
6387 }
6388 *len = 6;
6389 return ("lsl %0" CR_TAB
6390 "lsl %0" CR_TAB
6391 "lsl %0" CR_TAB
6392 "lsl %0" CR_TAB
6393 "lsl %0" CR_TAB
6394 "lsl %0");
6395
6396 case 7:
6397 *len = 3;
6398 return ("ror %0" CR_TAB
6399 "clr %0" CR_TAB
6400 "ror %0");
6401 }
6402 }
6403 else if (CONSTANT_P (operands[2]))
6404 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6405
6406 out_shift_with_cnt ("lsl %0",
6407 insn, operands, len, 1);
6408 return "";
6409 }
6410
6411
6412 /* 16bit shift left ((short)x << i) */
6413
6414 const char *
6415 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
6416 {
6417 if (CONST_INT_P (operands[2]))
6418 {
6419 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6420 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6421 int k;
6422 int *t = len;
6423
6424 if (!len)
6425 len = &k;
6426
6427 switch (INTVAL (operands[2]))
6428 {
6429 default:
6430 if (INTVAL (operands[2]) < 16)
6431 break;
6432
6433 *len = 2;
6434 return ("clr %B0" CR_TAB
6435 "clr %A0");
6436
6437 case 4:
6438 if (optimize_size && scratch)
6439 break; /* 5 */
6440 if (ldi_ok)
6441 {
6442 *len = 6;
6443 return ("swap %A0" CR_TAB
6444 "swap %B0" CR_TAB
6445 "andi %B0,0xf0" CR_TAB
6446 "eor %B0,%A0" CR_TAB
6447 "andi %A0,0xf0" CR_TAB
6448 "eor %B0,%A0");
6449 }
6450 if (scratch)
6451 {
6452 *len = 7;
6453 return ("swap %A0" CR_TAB
6454 "swap %B0" CR_TAB
6455 "ldi %3,0xf0" CR_TAB
6456 "and %B0,%3" CR_TAB
6457 "eor %B0,%A0" CR_TAB
6458 "and %A0,%3" CR_TAB
6459 "eor %B0,%A0");
6460 }
6461 break; /* optimize_size ? 6 : 8 */
6462
6463 case 5:
6464 if (optimize_size)
6465 break; /* scratch ? 5 : 6 */
6466 if (ldi_ok)
6467 {
6468 *len = 8;
6469 return ("lsl %A0" CR_TAB
6470 "rol %B0" CR_TAB
6471 "swap %A0" CR_TAB
6472 "swap %B0" CR_TAB
6473 "andi %B0,0xf0" CR_TAB
6474 "eor %B0,%A0" CR_TAB
6475 "andi %A0,0xf0" CR_TAB
6476 "eor %B0,%A0");
6477 }
6478 if (scratch)
6479 {
6480 *len = 9;
6481 return ("lsl %A0" CR_TAB
6482 "rol %B0" CR_TAB
6483 "swap %A0" CR_TAB
6484 "swap %B0" CR_TAB
6485 "ldi %3,0xf0" CR_TAB
6486 "and %B0,%3" CR_TAB
6487 "eor %B0,%A0" CR_TAB
6488 "and %A0,%3" CR_TAB
6489 "eor %B0,%A0");
6490 }
6491 break; /* 10 */
6492
6493 case 6:
6494 if (optimize_size)
6495 break; /* scratch ? 5 : 6 */
6496 *len = 9;
6497 return ("clr __tmp_reg__" CR_TAB
6498 "lsr %B0" CR_TAB
6499 "ror %A0" CR_TAB
6500 "ror __tmp_reg__" CR_TAB
6501 "lsr %B0" CR_TAB
6502 "ror %A0" CR_TAB
6503 "ror __tmp_reg__" CR_TAB
6504 "mov %B0,%A0" CR_TAB
6505 "mov %A0,__tmp_reg__");
6506
6507 case 7:
6508 *len = 5;
6509 return ("lsr %B0" CR_TAB
6510 "mov %B0,%A0" CR_TAB
6511 "clr %A0" CR_TAB
6512 "ror %B0" CR_TAB
6513 "ror %A0");
6514
6515 case 8:
6516 return *len = 2, ("mov %B0,%A1" CR_TAB
6517 "clr %A0");
6518
6519 case 9:
6520 *len = 3;
6521 return ("mov %B0,%A0" CR_TAB
6522 "clr %A0" CR_TAB
6523 "lsl %B0");
6524
6525 case 10:
6526 *len = 4;
6527 return ("mov %B0,%A0" CR_TAB
6528 "clr %A0" CR_TAB
6529 "lsl %B0" CR_TAB
6530 "lsl %B0");
6531
6532 case 11:
6533 *len = 5;
6534 return ("mov %B0,%A0" CR_TAB
6535 "clr %A0" CR_TAB
6536 "lsl %B0" CR_TAB
6537 "lsl %B0" CR_TAB
6538 "lsl %B0");
6539
6540 case 12:
6541 if (ldi_ok)
6542 {
6543 *len = 4;
6544 return ("mov %B0,%A0" CR_TAB
6545 "clr %A0" CR_TAB
6546 "swap %B0" CR_TAB
6547 "andi %B0,0xf0");
6548 }
6549 if (scratch)
6550 {
6551 *len = 5;
6552 return ("mov %B0,%A0" CR_TAB
6553 "clr %A0" CR_TAB
6554 "swap %B0" CR_TAB
6555 "ldi %3,0xf0" CR_TAB
6556 "and %B0,%3");
6557 }
6558 *len = 6;
6559 return ("mov %B0,%A0" CR_TAB
6560 "clr %A0" CR_TAB
6561 "lsl %B0" CR_TAB
6562 "lsl %B0" CR_TAB
6563 "lsl %B0" CR_TAB
6564 "lsl %B0");
6565
6566 case 13:
6567 if (ldi_ok)
6568 {
6569 *len = 5;
6570 return ("mov %B0,%A0" CR_TAB
6571 "clr %A0" CR_TAB
6572 "swap %B0" CR_TAB
6573 "lsl %B0" CR_TAB
6574 "andi %B0,0xe0");
6575 }
6576 if (AVR_HAVE_MUL && scratch)
6577 {
6578 *len = 5;
6579 return ("ldi %3,0x20" CR_TAB
6580 "mul %A0,%3" CR_TAB
6581 "mov %B0,r0" CR_TAB
6582 "clr %A0" CR_TAB
6583 "clr __zero_reg__");
6584 }
6585 if (optimize_size && scratch)
6586 break; /* 5 */
6587 if (scratch)
6588 {
6589 *len = 6;
6590 return ("mov %B0,%A0" CR_TAB
6591 "clr %A0" CR_TAB
6592 "swap %B0" CR_TAB
6593 "lsl %B0" CR_TAB
6594 "ldi %3,0xe0" CR_TAB
6595 "and %B0,%3");
6596 }
6597 if (AVR_HAVE_MUL)
6598 {
6599 *len = 6;
6600 return ("set" CR_TAB
6601 "bld r1,5" CR_TAB
6602 "mul %A0,r1" CR_TAB
6603 "mov %B0,r0" CR_TAB
6604 "clr %A0" CR_TAB
6605 "clr __zero_reg__");
6606 }
6607 *len = 7;
6608 return ("mov %B0,%A0" CR_TAB
6609 "clr %A0" CR_TAB
6610 "lsl %B0" CR_TAB
6611 "lsl %B0" CR_TAB
6612 "lsl %B0" CR_TAB
6613 "lsl %B0" CR_TAB
6614 "lsl %B0");
6615
6616 case 14:
6617 if (AVR_HAVE_MUL && ldi_ok)
6618 {
6619 *len = 5;
6620 return ("ldi %B0,0x40" CR_TAB
6621 "mul %A0,%B0" CR_TAB
6622 "mov %B0,r0" CR_TAB
6623 "clr %A0" CR_TAB
6624 "clr __zero_reg__");
6625 }
6626 if (AVR_HAVE_MUL && scratch)
6627 {
6628 *len = 5;
6629 return ("ldi %3,0x40" CR_TAB
6630 "mul %A0,%3" CR_TAB
6631 "mov %B0,r0" CR_TAB
6632 "clr %A0" CR_TAB
6633 "clr __zero_reg__");
6634 }
6635 if (optimize_size && ldi_ok)
6636 {
6637 *len = 5;
6638 return ("mov %B0,%A0" CR_TAB
6639 "ldi %A0,6" "\n1:\t"
6640 "lsl %B0" CR_TAB
6641 "dec %A0" CR_TAB
6642 "brne 1b");
6643 }
6644 if (optimize_size && scratch)
6645 break; /* 5 */
6646 *len = 6;
6647 return ("clr %B0" CR_TAB
6648 "lsr %A0" CR_TAB
6649 "ror %B0" CR_TAB
6650 "lsr %A0" CR_TAB
6651 "ror %B0" CR_TAB
6652 "clr %A0");
6653
6654 case 15:
6655 *len = 4;
6656 return ("clr %B0" CR_TAB
6657 "lsr %A0" CR_TAB
6658 "ror %B0" CR_TAB
6659 "clr %A0");
6660 }
6661 len = t;
6662 }
6663 out_shift_with_cnt ("lsl %A0" CR_TAB
6664 "rol %B0", insn, operands, len, 2);
6665 return "";
6666 }
6667
6668
6669 /* 24-bit shift left */
6670
6671 const char*
6672 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6673 {
6674 if (plen)
6675 *plen = 0;
6676
6677 if (CONST_INT_P (op[2]))
6678 {
6679 switch (INTVAL (op[2]))
6680 {
6681 default:
6682 if (INTVAL (op[2]) < 24)
6683 break;
6684
6685 return avr_asm_len ("clr %A0" CR_TAB
6686 "clr %B0" CR_TAB
6687 "clr %C0", op, plen, 3);
6688
6689 case 8:
6690 {
6691 int reg0 = REGNO (op[0]);
6692 int reg1 = REGNO (op[1]);
6693
6694 if (reg0 >= reg1)
6695 return avr_asm_len ("mov %C0,%B1" CR_TAB
6696 "mov %B0,%A1" CR_TAB
6697 "clr %A0", op, plen, 3);
6698 else
6699 return avr_asm_len ("clr %A0" CR_TAB
6700 "mov %B0,%A1" CR_TAB
6701 "mov %C0,%B1", op, plen, 3);
6702 }
6703
6704 case 16:
6705 {
6706 int reg0 = REGNO (op[0]);
6707 int reg1 = REGNO (op[1]);
6708
6709 if (reg0 + 2 != reg1)
6710 avr_asm_len ("mov %C0,%A0", op, plen, 1);
6711
6712 return avr_asm_len ("clr %B0" CR_TAB
6713 "clr %A0", op, plen, 2);
6714 }
6715
6716 case 23:
6717 return avr_asm_len ("clr %C0" CR_TAB
6718 "lsr %A0" CR_TAB
6719 "ror %C0" CR_TAB
6720 "clr %B0" CR_TAB
6721 "clr %A0", op, plen, 5);
6722 }
6723 }
6724
6725 out_shift_with_cnt ("lsl %A0" CR_TAB
6726 "rol %B0" CR_TAB
6727 "rol %C0", insn, op, plen, 3);
6728 return "";
6729 }
6730
6731
6732 /* 32bit shift left ((long)x << i) */
6733
6734 const char *
6735 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6736 {
6737 if (CONST_INT_P (operands[2]))
6738 {
6739 int k;
6740 int *t = len;
6741
6742 if (!len)
6743 len = &k;
6744
6745 switch (INTVAL (operands[2]))
6746 {
6747 default:
6748 if (INTVAL (operands[2]) < 32)
6749 break;
6750
6751 if (AVR_HAVE_MOVW)
6752 return *len = 3, ("clr %D0" CR_TAB
6753 "clr %C0" CR_TAB
6754 "movw %A0,%C0");
6755 *len = 4;
6756 return ("clr %D0" CR_TAB
6757 "clr %C0" CR_TAB
6758 "clr %B0" CR_TAB
6759 "clr %A0");
6760
6761 case 8:
6762 {
6763 int reg0 = true_regnum (operands[0]);
6764 int reg1 = true_regnum (operands[1]);
6765 *len = 4;
6766 if (reg0 >= reg1)
6767 return ("mov %D0,%C1" CR_TAB
6768 "mov %C0,%B1" CR_TAB
6769 "mov %B0,%A1" CR_TAB
6770 "clr %A0");
6771 else
6772 return ("clr %A0" CR_TAB
6773 "mov %B0,%A1" CR_TAB
6774 "mov %C0,%B1" CR_TAB
6775 "mov %D0,%C1");
6776 }
6777
6778 case 16:
6779 {
6780 int reg0 = true_regnum (operands[0]);
6781 int reg1 = true_regnum (operands[1]);
6782 if (reg0 + 2 == reg1)
6783 return *len = 2, ("clr %B0" CR_TAB
6784 "clr %A0");
6785 if (AVR_HAVE_MOVW)
6786 return *len = 3, ("movw %C0,%A1" CR_TAB
6787 "clr %B0" CR_TAB
6788 "clr %A0");
6789 else
6790 return *len = 4, ("mov %C0,%A1" CR_TAB
6791 "mov %D0,%B1" CR_TAB
6792 "clr %B0" CR_TAB
6793 "clr %A0");
6794 }
6795
6796 case 24:
6797 *len = 4;
6798 return ("mov %D0,%A1" CR_TAB
6799 "clr %C0" CR_TAB
6800 "clr %B0" CR_TAB
6801 "clr %A0");
6802
6803 case 31:
6804 *len = 6;
6805 return ("clr %D0" CR_TAB
6806 "lsr %A0" CR_TAB
6807 "ror %D0" CR_TAB
6808 "clr %C0" CR_TAB
6809 "clr %B0" CR_TAB
6810 "clr %A0");
6811 }
6812 len = t;
6813 }
6814 out_shift_with_cnt ("lsl %A0" CR_TAB
6815 "rol %B0" CR_TAB
6816 "rol %C0" CR_TAB
6817 "rol %D0", insn, operands, len, 4);
6818 return "";
6819 }
6820
6821 /* 8bit arithmetic shift right ((signed char)x >> i) */
6822
6823 const char *
6824 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6825 {
6826 if (CONST_INT_P (operands[2]))
6827 {
6828 int k;
6829
6830 if (!len)
6831 len = &k;
6832
6833 switch (INTVAL (operands[2]))
6834 {
6835 case 1:
6836 *len = 1;
6837 return "asr %0";
6838
6839 case 2:
6840 *len = 2;
6841 return ("asr %0" CR_TAB
6842 "asr %0");
6843
6844 case 3:
6845 *len = 3;
6846 return ("asr %0" CR_TAB
6847 "asr %0" CR_TAB
6848 "asr %0");
6849
6850 case 4:
6851 *len = 4;
6852 return ("asr %0" CR_TAB
6853 "asr %0" CR_TAB
6854 "asr %0" CR_TAB
6855 "asr %0");
6856
6857 case 5:
6858 *len = 5;
6859 return ("asr %0" CR_TAB
6860 "asr %0" CR_TAB
6861 "asr %0" CR_TAB
6862 "asr %0" CR_TAB
6863 "asr %0");
6864
6865 case 6:
6866 *len = 4;
6867 return ("bst %0,6" CR_TAB
6868 "lsl %0" CR_TAB
6869 "sbc %0,%0" CR_TAB
6870 "bld %0,0");
6871
6872 default:
6873 if (INTVAL (operands[2]) < 8)
6874 break;
6875
6876 /* fall through */
6877
6878 case 7:
6879 *len = 2;
6880 return ("lsl %0" CR_TAB
6881 "sbc %0,%0");
6882 }
6883 }
6884 else if (CONSTANT_P (operands[2]))
6885 fatal_insn ("internal compiler error. Incorrect shift:", insn);
6886
6887 out_shift_with_cnt ("asr %0",
6888 insn, operands, len, 1);
6889 return "";
6890 }
6891
6892
6893 /* 16bit arithmetic shift right ((signed short)x >> i) */
6894
6895 const char *
6896 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6897 {
6898 if (CONST_INT_P (operands[2]))
6899 {
6900 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6901 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6902 int k;
6903 int *t = len;
6904
6905 if (!len)
6906 len = &k;
6907
6908 switch (INTVAL (operands[2]))
6909 {
6910 case 4:
6911 case 5:
6912 /* XXX try to optimize this too? */
6913 break;
6914
6915 case 6:
6916 if (optimize_size)
6917 break; /* scratch ? 5 : 6 */
6918 *len = 8;
6919 return ("mov __tmp_reg__,%A0" CR_TAB
6920 "mov %A0,%B0" CR_TAB
6921 "lsl __tmp_reg__" CR_TAB
6922 "rol %A0" CR_TAB
6923 "sbc %B0,%B0" CR_TAB
6924 "lsl __tmp_reg__" CR_TAB
6925 "rol %A0" CR_TAB
6926 "rol %B0");
6927
6928 case 7:
6929 *len = 4;
6930 return ("lsl %A0" CR_TAB
6931 "mov %A0,%B0" CR_TAB
6932 "rol %A0" CR_TAB
6933 "sbc %B0,%B0");
6934
6935 case 8:
6936 {
6937 int reg0 = true_regnum (operands[0]);
6938 int reg1 = true_regnum (operands[1]);
6939
6940 if (reg0 == reg1)
6941 return *len = 3, ("mov %A0,%B0" CR_TAB
6942 "lsl %B0" CR_TAB
6943 "sbc %B0,%B0");
6944 else
6945 return *len = 4, ("mov %A0,%B1" CR_TAB
6946 "clr %B0" CR_TAB
6947 "sbrc %A0,7" CR_TAB
6948 "dec %B0");
6949 }
6950
6951 case 9:
6952 *len = 4;
6953 return ("mov %A0,%B0" CR_TAB
6954 "lsl %B0" CR_TAB
6955 "sbc %B0,%B0" CR_TAB
6956 "asr %A0");
6957
6958 case 10:
6959 *len = 5;
6960 return ("mov %A0,%B0" CR_TAB
6961 "lsl %B0" CR_TAB
6962 "sbc %B0,%B0" CR_TAB
6963 "asr %A0" CR_TAB
6964 "asr %A0");
6965
6966 case 11:
6967 if (AVR_HAVE_MUL && ldi_ok)
6968 {
6969 *len = 5;
6970 return ("ldi %A0,0x20" CR_TAB
6971 "muls %B0,%A0" CR_TAB
6972 "mov %A0,r1" CR_TAB
6973 "sbc %B0,%B0" CR_TAB
6974 "clr __zero_reg__");
6975 }
6976 if (optimize_size && scratch)
6977 break; /* 5 */
6978 *len = 6;
6979 return ("mov %A0,%B0" CR_TAB
6980 "lsl %B0" CR_TAB
6981 "sbc %B0,%B0" CR_TAB
6982 "asr %A0" CR_TAB
6983 "asr %A0" CR_TAB
6984 "asr %A0");
6985
6986 case 12:
6987 if (AVR_HAVE_MUL && ldi_ok)
6988 {
6989 *len = 5;
6990 return ("ldi %A0,0x10" CR_TAB
6991 "muls %B0,%A0" CR_TAB
6992 "mov %A0,r1" CR_TAB
6993 "sbc %B0,%B0" CR_TAB
6994 "clr __zero_reg__");
6995 }
6996 if (optimize_size && scratch)
6997 break; /* 5 */
6998 *len = 7;
6999 return ("mov %A0,%B0" CR_TAB
7000 "lsl %B0" CR_TAB
7001 "sbc %B0,%B0" CR_TAB
7002 "asr %A0" CR_TAB
7003 "asr %A0" CR_TAB
7004 "asr %A0" CR_TAB
7005 "asr %A0");
7006
7007 case 13:
7008 if (AVR_HAVE_MUL && ldi_ok)
7009 {
7010 *len = 5;
7011 return ("ldi %A0,0x08" CR_TAB
7012 "muls %B0,%A0" CR_TAB
7013 "mov %A0,r1" CR_TAB
7014 "sbc %B0,%B0" CR_TAB
7015 "clr __zero_reg__");
7016 }
7017 if (optimize_size)
7018 break; /* scratch ? 5 : 7 */
7019 *len = 8;
7020 return ("mov %A0,%B0" CR_TAB
7021 "lsl %B0" CR_TAB
7022 "sbc %B0,%B0" CR_TAB
7023 "asr %A0" CR_TAB
7024 "asr %A0" CR_TAB
7025 "asr %A0" CR_TAB
7026 "asr %A0" CR_TAB
7027 "asr %A0");
7028
7029 case 14:
7030 *len = 5;
7031 return ("lsl %B0" CR_TAB
7032 "sbc %A0,%A0" CR_TAB
7033 "lsl %B0" CR_TAB
7034 "mov %B0,%A0" CR_TAB
7035 "rol %A0");
7036
7037 default:
7038 if (INTVAL (operands[2]) < 16)
7039 break;
7040
7041 /* fall through */
7042
7043 case 15:
7044 return *len = 3, ("lsl %B0" CR_TAB
7045 "sbc %A0,%A0" CR_TAB
7046 "mov %B0,%A0");
7047 }
7048 len = t;
7049 }
7050 out_shift_with_cnt ("asr %B0" CR_TAB
7051 "ror %A0", insn, operands, len, 2);
7052 return "";
7053 }
7054
7055
7056 /* 24-bit arithmetic shift right */
7057
7058 const char*
7059 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7060 {
7061 int dest = REGNO (op[0]);
7062 int src = REGNO (op[1]);
7063
7064 if (CONST_INT_P (op[2]))
7065 {
7066 if (plen)
7067 *plen = 0;
7068
7069 switch (INTVAL (op[2]))
7070 {
7071 case 8:
7072 if (dest <= src)
7073 return avr_asm_len ("mov %A0,%B1" CR_TAB
7074 "mov %B0,%C1" CR_TAB
7075 "clr %C0" CR_TAB
7076 "sbrc %B0,7" CR_TAB
7077 "dec %C0", op, plen, 5);
7078 else
7079 return avr_asm_len ("clr %C0" CR_TAB
7080 "sbrc %C1,7" CR_TAB
7081 "dec %C0" CR_TAB
7082 "mov %B0,%C1" CR_TAB
7083 "mov %A0,%B1", op, plen, 5);
7084
7085 case 16:
7086 if (dest != src + 2)
7087 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7088
7089 return avr_asm_len ("clr %B0" CR_TAB
7090 "sbrc %A0,7" CR_TAB
7091 "com %B0" CR_TAB
7092 "mov %C0,%B0", op, plen, 4);
7093
7094 default:
7095 if (INTVAL (op[2]) < 24)
7096 break;
7097
7098 /* fall through */
7099
7100 case 23:
7101 return avr_asm_len ("lsl %C0" CR_TAB
7102 "sbc %A0,%A0" CR_TAB
7103 "mov %B0,%A0" CR_TAB
7104 "mov %C0,%A0", op, plen, 4);
7105 } /* switch */
7106 }
7107
7108 out_shift_with_cnt ("asr %C0" CR_TAB
7109 "ror %B0" CR_TAB
7110 "ror %A0", insn, op, plen, 3);
7111 return "";
7112 }
7113
7114
7115 /* 32-bit arithmetic shift right ((signed long)x >> i) */
7116
7117 const char *
7118 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7119 {
7120 if (CONST_INT_P (operands[2]))
7121 {
7122 int k;
7123 int *t = len;
7124
7125 if (!len)
7126 len = &k;
7127
7128 switch (INTVAL (operands[2]))
7129 {
7130 case 8:
7131 {
7132 int reg0 = true_regnum (operands[0]);
7133 int reg1 = true_regnum (operands[1]);
7134 *len=6;
7135 if (reg0 <= reg1)
7136 return ("mov %A0,%B1" CR_TAB
7137 "mov %B0,%C1" CR_TAB
7138 "mov %C0,%D1" CR_TAB
7139 "clr %D0" CR_TAB
7140 "sbrc %C0,7" CR_TAB
7141 "dec %D0");
7142 else
7143 return ("clr %D0" CR_TAB
7144 "sbrc %D1,7" CR_TAB
7145 "dec %D0" CR_TAB
7146 "mov %C0,%D1" CR_TAB
7147 "mov %B0,%C1" CR_TAB
7148 "mov %A0,%B1");
7149 }
7150
7151 case 16:
7152 {
7153 int reg0 = true_regnum (operands[0]);
7154 int reg1 = true_regnum (operands[1]);
7155
7156 if (reg0 == reg1 + 2)
7157 return *len = 4, ("clr %D0" CR_TAB
7158 "sbrc %B0,7" CR_TAB
7159 "com %D0" CR_TAB
7160 "mov %C0,%D0");
7161 if (AVR_HAVE_MOVW)
7162 return *len = 5, ("movw %A0,%C1" CR_TAB
7163 "clr %D0" CR_TAB
7164 "sbrc %B0,7" CR_TAB
7165 "com %D0" CR_TAB
7166 "mov %C0,%D0");
7167 else
7168 return *len = 6, ("mov %B0,%D1" CR_TAB
7169 "mov %A0,%C1" CR_TAB
7170 "clr %D0" CR_TAB
7171 "sbrc %B0,7" CR_TAB
7172 "com %D0" CR_TAB
7173 "mov %C0,%D0");
7174 }
7175
7176 case 24:
7177 return *len = 6, ("mov %A0,%D1" CR_TAB
7178 "clr %D0" CR_TAB
7179 "sbrc %A0,7" CR_TAB
7180 "com %D0" CR_TAB
7181 "mov %B0,%D0" CR_TAB
7182 "mov %C0,%D0");
7183
7184 default:
7185 if (INTVAL (operands[2]) < 32)
7186 break;
7187
7188 /* fall through */
7189
7190 case 31:
7191 if (AVR_HAVE_MOVW)
7192 return *len = 4, ("lsl %D0" CR_TAB
7193 "sbc %A0,%A0" CR_TAB
7194 "mov %B0,%A0" CR_TAB
7195 "movw %C0,%A0");
7196 else
7197 return *len = 5, ("lsl %D0" CR_TAB
7198 "sbc %A0,%A0" CR_TAB
7199 "mov %B0,%A0" CR_TAB
7200 "mov %C0,%A0" CR_TAB
7201 "mov %D0,%A0");
7202 }
7203 len = t;
7204 }
7205 out_shift_with_cnt ("asr %D0" CR_TAB
7206 "ror %C0" CR_TAB
7207 "ror %B0" CR_TAB
7208 "ror %A0", insn, operands, len, 4);
7209 return "";
7210 }
7211
7212 /* 8-bit logic shift right ((unsigned char)x >> i) */
7213
7214 const char *
7215 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
7216 {
7217 if (CONST_INT_P (operands[2]))
7218 {
7219 int k;
7220
7221 if (!len)
7222 len = &k;
7223
7224 switch (INTVAL (operands[2]))
7225 {
7226 default:
7227 if (INTVAL (operands[2]) < 8)
7228 break;
7229
7230 *len = 1;
7231 return "clr %0";
7232
7233 case 1:
7234 *len = 1;
7235 return "lsr %0";
7236
7237 case 2:
7238 *len = 2;
7239 return ("lsr %0" CR_TAB
7240 "lsr %0");
7241 case 3:
7242 *len = 3;
7243 return ("lsr %0" CR_TAB
7244 "lsr %0" CR_TAB
7245 "lsr %0");
7246
7247 case 4:
7248 if (test_hard_reg_class (LD_REGS, operands[0]))
7249 {
7250 *len=2;
7251 return ("swap %0" CR_TAB
7252 "andi %0,0x0f");
7253 }
7254 *len = 4;
7255 return ("lsr %0" CR_TAB
7256 "lsr %0" CR_TAB
7257 "lsr %0" CR_TAB
7258 "lsr %0");
7259
7260 case 5:
7261 if (test_hard_reg_class (LD_REGS, operands[0]))
7262 {
7263 *len = 3;
7264 return ("swap %0" CR_TAB
7265 "lsr %0" CR_TAB
7266 "andi %0,0x7");
7267 }
7268 *len = 5;
7269 return ("lsr %0" CR_TAB
7270 "lsr %0" CR_TAB
7271 "lsr %0" CR_TAB
7272 "lsr %0" CR_TAB
7273 "lsr %0");
7274
7275 case 6:
7276 if (test_hard_reg_class (LD_REGS, operands[0]))
7277 {
7278 *len = 4;
7279 return ("swap %0" CR_TAB
7280 "lsr %0" CR_TAB
7281 "lsr %0" CR_TAB
7282 "andi %0,0x3");
7283 }
7284 *len = 6;
7285 return ("lsr %0" CR_TAB
7286 "lsr %0" CR_TAB
7287 "lsr %0" CR_TAB
7288 "lsr %0" CR_TAB
7289 "lsr %0" CR_TAB
7290 "lsr %0");
7291
7292 case 7:
7293 *len = 3;
7294 return ("rol %0" CR_TAB
7295 "clr %0" CR_TAB
7296 "rol %0");
7297 }
7298 }
7299 else if (CONSTANT_P (operands[2]))
7300 fatal_insn ("internal compiler error. Incorrect shift:", insn);
7301
7302 out_shift_with_cnt ("lsr %0",
7303 insn, operands, len, 1);
7304 return "";
7305 }
7306
7307 /* 16-bit logic shift right ((unsigned short)x >> i) */
7308
7309 const char *
7310 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
7311 {
7312 if (CONST_INT_P (operands[2]))
7313 {
7314 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
7315 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
7316 int k;
7317 int *t = len;
7318
7319 if (!len)
7320 len = &k;
7321
7322 switch (INTVAL (operands[2]))
7323 {
7324 default:
7325 if (INTVAL (operands[2]) < 16)
7326 break;
7327
7328 *len = 2;
7329 return ("clr %B0" CR_TAB
7330 "clr %A0");
7331
7332 case 4:
7333 if (optimize_size && scratch)
7334 break; /* 5 */
7335 if (ldi_ok)
7336 {
7337 *len = 6;
7338 return ("swap %B0" CR_TAB
7339 "swap %A0" CR_TAB
7340 "andi %A0,0x0f" CR_TAB
7341 "eor %A0,%B0" CR_TAB
7342 "andi %B0,0x0f" CR_TAB
7343 "eor %A0,%B0");
7344 }
7345 if (scratch)
7346 {
7347 *len = 7;
7348 return ("swap %B0" CR_TAB
7349 "swap %A0" CR_TAB
7350 "ldi %3,0x0f" CR_TAB
7351 "and %A0,%3" CR_TAB
7352 "eor %A0,%B0" CR_TAB
7353 "and %B0,%3" CR_TAB
7354 "eor %A0,%B0");
7355 }
7356 break; /* optimize_size ? 6 : 8 */
7357
7358 case 5:
7359 if (optimize_size)
7360 break; /* scratch ? 5 : 6 */
7361 if (ldi_ok)
7362 {
7363 *len = 8;
7364 return ("lsr %B0" CR_TAB
7365 "ror %A0" CR_TAB
7366 "swap %B0" CR_TAB
7367 "swap %A0" CR_TAB
7368 "andi %A0,0x0f" CR_TAB
7369 "eor %A0,%B0" CR_TAB
7370 "andi %B0,0x0f" CR_TAB
7371 "eor %A0,%B0");
7372 }
7373 if (scratch)
7374 {
7375 *len = 9;
7376 return ("lsr %B0" CR_TAB
7377 "ror %A0" CR_TAB
7378 "swap %B0" CR_TAB
7379 "swap %A0" CR_TAB
7380 "ldi %3,0x0f" CR_TAB
7381 "and %A0,%3" CR_TAB
7382 "eor %A0,%B0" CR_TAB
7383 "and %B0,%3" CR_TAB
7384 "eor %A0,%B0");
7385 }
7386 break; /* 10 */
7387
7388 case 6:
7389 if (optimize_size)
7390 break; /* scratch ? 5 : 6 */
7391 *len = 9;
7392 return ("clr __tmp_reg__" CR_TAB
7393 "lsl %A0" CR_TAB
7394 "rol %B0" CR_TAB
7395 "rol __tmp_reg__" CR_TAB
7396 "lsl %A0" CR_TAB
7397 "rol %B0" CR_TAB
7398 "rol __tmp_reg__" CR_TAB
7399 "mov %A0,%B0" CR_TAB
7400 "mov %B0,__tmp_reg__");
7401
7402 case 7:
7403 *len = 5;
7404 return ("lsl %A0" CR_TAB
7405 "mov %A0,%B0" CR_TAB
7406 "rol %A0" CR_TAB
7407 "sbc %B0,%B0" CR_TAB
7408 "neg %B0");
7409
7410 case 8:
7411 return *len = 2, ("mov %A0,%B1" CR_TAB
7412 "clr %B0");
7413
7414 case 9:
7415 *len = 3;
7416 return ("mov %A0,%B0" CR_TAB
7417 "clr %B0" CR_TAB
7418 "lsr %A0");
7419
7420 case 10:
7421 *len = 4;
7422 return ("mov %A0,%B0" CR_TAB
7423 "clr %B0" CR_TAB
7424 "lsr %A0" CR_TAB
7425 "lsr %A0");
7426
7427 case 11:
7428 *len = 5;
7429 return ("mov %A0,%B0" CR_TAB
7430 "clr %B0" CR_TAB
7431 "lsr %A0" CR_TAB
7432 "lsr %A0" CR_TAB
7433 "lsr %A0");
7434
7435 case 12:
7436 if (ldi_ok)
7437 {
7438 *len = 4;
7439 return ("mov %A0,%B0" CR_TAB
7440 "clr %B0" CR_TAB
7441 "swap %A0" CR_TAB
7442 "andi %A0,0x0f");
7443 }
7444 if (scratch)
7445 {
7446 *len = 5;
7447 return ("mov %A0,%B0" CR_TAB
7448 "clr %B0" CR_TAB
7449 "swap %A0" CR_TAB
7450 "ldi %3,0x0f" CR_TAB
7451 "and %A0,%3");
7452 }
7453 *len = 6;
7454 return ("mov %A0,%B0" CR_TAB
7455 "clr %B0" CR_TAB
7456 "lsr %A0" CR_TAB
7457 "lsr %A0" CR_TAB
7458 "lsr %A0" CR_TAB
7459 "lsr %A0");
7460
7461 case 13:
7462 if (ldi_ok)
7463 {
7464 *len = 5;
7465 return ("mov %A0,%B0" CR_TAB
7466 "clr %B0" CR_TAB
7467 "swap %A0" CR_TAB
7468 "lsr %A0" CR_TAB
7469 "andi %A0,0x07");
7470 }
7471 if (AVR_HAVE_MUL && scratch)
7472 {
7473 *len = 5;
7474 return ("ldi %3,0x08" CR_TAB
7475 "mul %B0,%3" CR_TAB
7476 "mov %A0,r1" CR_TAB
7477 "clr %B0" CR_TAB
7478 "clr __zero_reg__");
7479 }
7480 if (optimize_size && scratch)
7481 break; /* 5 */
7482 if (scratch)
7483 {
7484 *len = 6;
7485 return ("mov %A0,%B0" CR_TAB
7486 "clr %B0" CR_TAB
7487 "swap %A0" CR_TAB
7488 "lsr %A0" CR_TAB
7489 "ldi %3,0x07" CR_TAB
7490 "and %A0,%3");
7491 }
7492 if (AVR_HAVE_MUL)
7493 {
7494 *len = 6;
7495 return ("set" CR_TAB
7496 "bld r1,3" CR_TAB
7497 "mul %B0,r1" CR_TAB
7498 "mov %A0,r1" CR_TAB
7499 "clr %B0" CR_TAB
7500 "clr __zero_reg__");
7501 }
7502 *len = 7;
7503 return ("mov %A0,%B0" CR_TAB
7504 "clr %B0" CR_TAB
7505 "lsr %A0" CR_TAB
7506 "lsr %A0" CR_TAB
7507 "lsr %A0" CR_TAB
7508 "lsr %A0" CR_TAB
7509 "lsr %A0");
7510
7511 case 14:
7512 if (AVR_HAVE_MUL && ldi_ok)
7513 {
7514 *len = 5;
7515 return ("ldi %A0,0x04" CR_TAB
7516 "mul %B0,%A0" CR_TAB
7517 "mov %A0,r1" CR_TAB
7518 "clr %B0" CR_TAB
7519 "clr __zero_reg__");
7520 }
7521 if (AVR_HAVE_MUL && scratch)
7522 {
7523 *len = 5;
7524 return ("ldi %3,0x04" CR_TAB
7525 "mul %B0,%3" CR_TAB
7526 "mov %A0,r1" CR_TAB
7527 "clr %B0" CR_TAB
7528 "clr __zero_reg__");
7529 }
7530 if (optimize_size && ldi_ok)
7531 {
7532 *len = 5;
7533 return ("mov %A0,%B0" CR_TAB
7534 "ldi %B0,6" "\n1:\t"
7535 "lsr %A0" CR_TAB
7536 "dec %B0" CR_TAB
7537 "brne 1b");
7538 }
7539 if (optimize_size && scratch)
7540 break; /* 5 */
7541 *len = 6;
7542 return ("clr %A0" CR_TAB
7543 "lsl %B0" CR_TAB
7544 "rol %A0" CR_TAB
7545 "lsl %B0" CR_TAB
7546 "rol %A0" CR_TAB
7547 "clr %B0");
7548
7549 case 15:
7550 *len = 4;
7551 return ("clr %A0" CR_TAB
7552 "lsl %B0" CR_TAB
7553 "rol %A0" CR_TAB
7554 "clr %B0");
7555 }
7556 len = t;
7557 }
7558 out_shift_with_cnt ("lsr %B0" CR_TAB
7559 "ror %A0", insn, operands, len, 2);
7560 return "";
7561 }
7562
7563
7564 /* 24-bit logic shift right */
7565
7566 const char*
7567 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7568 {
7569 int dest = REGNO (op[0]);
7570 int src = REGNO (op[1]);
7571
7572 if (CONST_INT_P (op[2]))
7573 {
7574 if (plen)
7575 *plen = 0;
7576
7577 switch (INTVAL (op[2]))
7578 {
7579 case 8:
7580 if (dest <= src)
7581 return avr_asm_len ("mov %A0,%B1" CR_TAB
7582 "mov %B0,%C1" CR_TAB
7583 "clr %C0", op, plen, 3);
7584 else
7585 return avr_asm_len ("clr %C0" CR_TAB
7586 "mov %B0,%C1" CR_TAB
7587 "mov %A0,%B1", op, plen, 3);
7588
7589 case 16:
7590 if (dest != src + 2)
7591 avr_asm_len ("mov %A0,%C1", op, plen, 1);
7592
7593 return avr_asm_len ("clr %B0" CR_TAB
7594 "clr %C0", op, plen, 2);
7595
7596 default:
7597 if (INTVAL (op[2]) < 24)
7598 break;
7599
7600 /* fall through */
7601
7602 case 23:
7603 return avr_asm_len ("clr %A0" CR_TAB
7604 "sbrc %C0,7" CR_TAB
7605 "inc %A0" CR_TAB
7606 "clr %B0" CR_TAB
7607 "clr %C0", op, plen, 5);
7608 } /* switch */
7609 }
7610
7611 out_shift_with_cnt ("lsr %C0" CR_TAB
7612 "ror %B0" CR_TAB
7613 "ror %A0", insn, op, plen, 3);
7614 return "";
7615 }
7616
7617
7618 /* 32-bit logic shift right ((unsigned int)x >> i) */
7619
7620 const char *
7621 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7622 {
7623 if (CONST_INT_P (operands[2]))
7624 {
7625 int k;
7626 int *t = len;
7627
7628 if (!len)
7629 len = &k;
7630
7631 switch (INTVAL (operands[2]))
7632 {
7633 default:
7634 if (INTVAL (operands[2]) < 32)
7635 break;
7636
7637 if (AVR_HAVE_MOVW)
7638 return *len = 3, ("clr %D0" CR_TAB
7639 "clr %C0" CR_TAB
7640 "movw %A0,%C0");
7641 *len = 4;
7642 return ("clr %D0" CR_TAB
7643 "clr %C0" CR_TAB
7644 "clr %B0" CR_TAB
7645 "clr %A0");
7646
7647 case 8:
7648 {
7649 int reg0 = true_regnum (operands[0]);
7650 int reg1 = true_regnum (operands[1]);
7651 *len = 4;
7652 if (reg0 <= reg1)
7653 return ("mov %A0,%B1" CR_TAB
7654 "mov %B0,%C1" CR_TAB
7655 "mov %C0,%D1" CR_TAB
7656 "clr %D0");
7657 else
7658 return ("clr %D0" CR_TAB
7659 "mov %C0,%D1" CR_TAB
7660 "mov %B0,%C1" CR_TAB
7661 "mov %A0,%B1");
7662 }
7663
7664 case 16:
7665 {
7666 int reg0 = true_regnum (operands[0]);
7667 int reg1 = true_regnum (operands[1]);
7668
7669 if (reg0 == reg1 + 2)
7670 return *len = 2, ("clr %C0" CR_TAB
7671 "clr %D0");
7672 if (AVR_HAVE_MOVW)
7673 return *len = 3, ("movw %A0,%C1" CR_TAB
7674 "clr %C0" CR_TAB
7675 "clr %D0");
7676 else
7677 return *len = 4, ("mov %B0,%D1" CR_TAB
7678 "mov %A0,%C1" CR_TAB
7679 "clr %C0" CR_TAB
7680 "clr %D0");
7681 }
7682
7683 case 24:
7684 return *len = 4, ("mov %A0,%D1" CR_TAB
7685 "clr %B0" CR_TAB
7686 "clr %C0" CR_TAB
7687 "clr %D0");
7688
7689 case 31:
7690 *len = 6;
7691 return ("clr %A0" CR_TAB
7692 "sbrc %D0,7" CR_TAB
7693 "inc %A0" CR_TAB
7694 "clr %B0" CR_TAB
7695 "clr %C0" CR_TAB
7696 "clr %D0");
7697 }
7698 len = t;
7699 }
7700 out_shift_with_cnt ("lsr %D0" CR_TAB
7701 "ror %C0" CR_TAB
7702 "ror %B0" CR_TAB
7703 "ror %A0", insn, operands, len, 4);
7704 return "";
7705 }
7706
7707
7708 /* Output addition of register XOP[0] and compile time constant XOP[2].
7709 CODE == PLUS: perform addition by using ADD instructions or
7710 CODE == MINUS: perform addition by using SUB instructions:
7711
7712 XOP[0] = XOP[0] + XOP[2]
7713
7714 Or perform addition/subtraction with register XOP[2] depending on CODE:
7715
7716 XOP[0] = XOP[0] +/- XOP[2]
7717
7718 If PLEN == NULL, print assembler instructions to perform the operation;
7719 otherwise, set *PLEN to the length of the instruction sequence (in words)
7720 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7721 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7722
7723 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7724 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7725 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7726 the subtrahend in the original insn, provided it is a compile time constant.
7727 In all other cases, SIGN is 0.
7728
7729 If OUT_LABEL is true, print the final 0: label which is needed for
7730 saturated addition / subtraction. The only case where OUT_LABEL = false
7731 is useful is for saturated addition / subtraction performed during
7732 fixed-point rounding, cf. `avr_out_round'. */
7733
7734 static void
7735 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7736 enum rtx_code code_sat, int sign, bool out_label)
7737 {
7738 /* MODE of the operation. */
7739 machine_mode mode = GET_MODE (xop[0]);
7740
7741 /* INT_MODE of the same size. */
7742 scalar_int_mode imode = int_mode_for_mode (mode).require ();
7743
7744 /* Number of bytes to operate on. */
7745 int n_bytes = GET_MODE_SIZE (mode);
7746
7747 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7748 int clobber_val = -1;
7749
7750 /* op[0]: 8-bit destination register
7751 op[1]: 8-bit const int
7752 op[2]: 8-bit scratch register */
7753 rtx op[3];
7754
7755 /* Started the operation? Before starting the operation we may skip
7756 adding 0. This is no more true after the operation started because
7757 carry must be taken into account. */
7758 bool started = false;
7759
7760 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7761 rtx xval = xop[2];
7762
7763 /* Output a BRVC instruction. Only needed with saturation. */
7764 bool out_brvc = true;
7765
7766 if (plen)
7767 *plen = 0;
7768
7769 if (REG_P (xop[2]))
7770 {
7771 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7772
7773 for (int i = 0; i < n_bytes; i++)
7774 {
7775 /* We operate byte-wise on the destination. */
7776 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7777 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7778
7779 if (i == 0)
7780 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7781 op, plen, 1);
7782 else
7783 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7784 op, plen, 1);
7785 }
7786
7787 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7788 {
7789 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7790
7791 if (MINUS == code)
7792 return;
7793 }
7794
7795 goto saturate;
7796 }
7797
7798 /* Except in the case of ADIW with 16-bit register (see below)
7799 addition does not set cc0 in a usable way. */
7800
7801 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7802
7803 if (CONST_FIXED_P (xval))
7804 xval = avr_to_int_mode (xval);
7805
7806 /* Adding/Subtracting zero is a no-op. */
7807
7808 if (xval == const0_rtx)
7809 {
7810 *pcc = CC_NONE;
7811 return;
7812 }
7813
7814 if (MINUS == code)
7815 xval = simplify_unary_operation (NEG, imode, xval, imode);
7816
7817 op[2] = xop[3];
7818
7819 if (SS_PLUS == code_sat && MINUS == code
7820 && sign < 0
7821 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7822 & GET_MODE_MASK (QImode)))
7823 {
7824 /* We compute x + 0x80 by means of SUB instructions. We negated the
7825 constant subtrahend above and are left with x - (-128) so that we
7826 need something like SUBI r,128 which does not exist because SUBI sets
7827 V according to the sign of the subtrahend. Notice the only case
7828 where this must be done is when NEG overflowed in case [2s] because
7829 the V computation needs the right sign of the subtrahend. */
7830
7831 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes - 1);
7832
7833 avr_asm_len ("subi %0,128" CR_TAB
7834 "brmi 0f", &msb, plen, 2);
7835 out_brvc = false;
7836
7837 goto saturate;
7838 }
7839
7840 for (int i = 0; i < n_bytes; i++)
7841 {
7842 /* We operate byte-wise on the destination. */
7843 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7844 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7845
7846 /* 8-bit value to operate with this byte. */
7847 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7848
7849 /* Registers R16..R31 can operate with immediate. */
7850 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7851
7852 op[0] = reg8;
7853 op[1] = gen_int_mode (val8, QImode);
7854
7855 /* To get usable cc0 no low-bytes must have been skipped. */
7856
7857 if (i && !started)
7858 *pcc = CC_CLOBBER;
7859
7860 if (!started
7861 && i % 2 == 0
7862 && i + 2 <= n_bytes
7863 && test_hard_reg_class (ADDW_REGS, reg8))
7864 {
7865 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7866 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7867
7868 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7869 i.e. operate word-wise. */
7870
7871 if (val16 < 64)
7872 {
7873 if (val16 != 0)
7874 {
7875 started = true;
7876 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7877 op, plen, 1);
7878
7879 if (n_bytes == 2 && PLUS == code)
7880 *pcc = CC_SET_CZN;
7881 }
7882
7883 i++;
7884 continue;
7885 }
7886 }
7887
7888 if (val8 == 0)
7889 {
7890 if (started)
7891 avr_asm_len (code == PLUS
7892 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7893 op, plen, 1);
7894 continue;
7895 }
7896 else if ((val8 == 1 || val8 == 0xff)
7897 && UNKNOWN == code_sat
7898 && !started
7899 && i == n_bytes - 1)
7900 {
7901 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7902 op, plen, 1);
7903 *pcc = CC_CLOBBER;
7904 break;
7905 }
7906
7907 switch (code)
7908 {
7909 case PLUS:
7910
7911 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7912
7913 if (plen != NULL && UNKNOWN != code_sat)
7914 {
7915 /* This belongs to the x + 0x80 corner case. The code with
7916 ADD instruction is not smaller, thus make this case
7917 expensive so that the caller won't pick it. */
7918
7919 *plen += 10;
7920 break;
7921 }
7922
7923 if (clobber_val != (int) val8)
7924 avr_asm_len ("ldi %2,%1", op, plen, 1);
7925 clobber_val = (int) val8;
7926
7927 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7928
7929 break; /* PLUS */
7930
7931 case MINUS:
7932
7933 if (ld_reg_p)
7934 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7935 else
7936 {
7937 gcc_assert (plen != NULL || REG_P (op[2]));
7938
7939 if (clobber_val != (int) val8)
7940 avr_asm_len ("ldi %2,%1", op, plen, 1);
7941 clobber_val = (int) val8;
7942
7943 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7944 }
7945
7946 break; /* MINUS */
7947
7948 default:
7949 /* Unknown code */
7950 gcc_unreachable();
7951 }
7952
7953 started = true;
7954
7955 } /* for all sub-bytes */
7956
7957 saturate:
7958
7959 if (UNKNOWN == code_sat)
7960 return;
7961
7962 *pcc = (int) CC_CLOBBER;
7963
7964 /* Vanilla addition/subtraction is done. We are left with saturation.
7965
7966 We have to compute A = A <op> B where A is a register and
7967 B is a register or a non-zero compile time constant CONST.
7968 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7969 B stands for the original operand $2 in INSN. In the case of B = CONST,
7970 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7971
7972 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7973
7974
7975 unsigned
7976 operation | code | sat if | b is | sat value | case
7977 -----------------+-------+----------+--------------+-----------+-------
7978 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7979 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7980 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7981 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7982
7983
7984 signed
7985 operation | code | sat if | b is | sat value | case
7986 -----------------+-------+----------+--------------+-----------+-------
7987 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7988 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7989 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7990 - as a + (-b) | add | V == 1 | const | s- | [4s]
7991
7992 s+ = b < 0 ? -0x80 : 0x7f
7993 s- = b < 0 ? 0x7f : -0x80
7994
7995 The cases a - b actually perform a - (-(-b)) if B is CONST.
7996 */
7997
7998 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7999 op[1] = n_bytes > 1
8000 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
8001 : NULL_RTX;
8002
8003 bool need_copy = true;
8004 int len_call = 1 + AVR_HAVE_JMP_CALL;
8005
8006 switch (code_sat)
8007 {
8008 default:
8009 gcc_unreachable();
8010
8011 case SS_PLUS:
8012 case SS_MINUS:
8013
8014 if (out_brvc)
8015 avr_asm_len ("brvc 0f", op, plen, 1);
8016
8017 if (reg_overlap_mentioned_p (xop[0], xop[2]))
8018 {
8019 /* [1s,reg] */
8020
8021 if (n_bytes == 1)
8022 avr_asm_len ("ldi %0,0x7f" CR_TAB
8023 "adc %0,__zero_reg__", op, plen, 2);
8024 else
8025 avr_asm_len ("ldi %0,0x7f" CR_TAB
8026 "ldi %1,0xff" CR_TAB
8027 "adc %1,__zero_reg__" CR_TAB
8028 "adc %0,__zero_reg__", op, plen, 4);
8029 }
8030 else if (sign == 0 && PLUS == code)
8031 {
8032 /* [1s,reg] */
8033
8034 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8035
8036 if (n_bytes == 1)
8037 avr_asm_len ("ldi %0,0x80" CR_TAB
8038 "sbrs %2,7" CR_TAB
8039 "dec %0", op, plen, 3);
8040 else
8041 avr_asm_len ("ldi %0,0x80" CR_TAB
8042 "cp %2,%0" CR_TAB
8043 "sbc %1,%1" CR_TAB
8044 "sbci %0,0", op, plen, 4);
8045 }
8046 else if (sign == 0 && MINUS == code)
8047 {
8048 /* [3s,reg] */
8049
8050 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
8051
8052 if (n_bytes == 1)
8053 avr_asm_len ("ldi %0,0x7f" CR_TAB
8054 "sbrs %2,7" CR_TAB
8055 "inc %0", op, plen, 3);
8056 else
8057 avr_asm_len ("ldi %0,0x7f" CR_TAB
8058 "cp %0,%2" CR_TAB
8059 "sbc %1,%1" CR_TAB
8060 "sbci %0,-1", op, plen, 4);
8061 }
8062 else if ((sign < 0) ^ (SS_MINUS == code_sat))
8063 {
8064 /* [1s,const,B < 0] [2s,B < 0] */
8065 /* [3s,const,B > 0] [4s,B > 0] */
8066
8067 if (n_bytes == 8)
8068 {
8069 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8070 need_copy = false;
8071 }
8072
8073 avr_asm_len ("ldi %0,0x80", op, plen, 1);
8074 if (n_bytes > 1 && need_copy)
8075 avr_asm_len ("clr %1", op, plen, 1);
8076 }
8077 else if ((sign > 0) ^ (SS_MINUS == code_sat))
8078 {
8079 /* [1s,const,B > 0] [2s,B > 0] */
8080 /* [3s,const,B < 0] [4s,B < 0] */
8081
8082 if (n_bytes == 8)
8083 {
8084 avr_asm_len ("sec" CR_TAB
8085 "%~call __sbc_8", op, plen, 1 + len_call);
8086 need_copy = false;
8087 }
8088
8089 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
8090 if (n_bytes > 1 && need_copy)
8091 avr_asm_len ("ldi %1,0xff", op, plen, 1);
8092 }
8093 else
8094 gcc_unreachable();
8095
8096 break;
8097
8098 case US_PLUS:
8099 /* [1u] : [2u] */
8100
8101 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
8102
8103 if (n_bytes == 8)
8104 {
8105 if (MINUS == code)
8106 avr_asm_len ("sec", op, plen, 1);
8107 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
8108
8109 need_copy = false;
8110 }
8111 else
8112 {
8113 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
8114 avr_asm_len ("sec" CR_TAB
8115 "sbc %0,%0", op, plen, 2);
8116 else
8117 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
8118 op, plen, 1);
8119 }
8120 break; /* US_PLUS */
8121
8122 case US_MINUS:
8123 /* [4u] : [3u] */
8124
8125 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
8126
8127 if (n_bytes == 8)
8128 {
8129 avr_asm_len ("%~call __clr_8", op, plen, len_call);
8130 need_copy = false;
8131 }
8132 else
8133 avr_asm_len ("clr %0", op, plen, 1);
8134
8135 break;
8136 }
8137
8138 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
8139 Now copy the right value to the LSBs. */
8140
8141 if (need_copy && n_bytes > 1)
8142 {
8143 if (US_MINUS == code_sat || US_PLUS == code_sat)
8144 {
8145 avr_asm_len ("mov %1,%0", op, plen, 1);
8146
8147 if (n_bytes > 2)
8148 {
8149 op[0] = xop[0];
8150 if (AVR_HAVE_MOVW)
8151 avr_asm_len ("movw %0,%1", op, plen, 1);
8152 else
8153 avr_asm_len ("mov %A0,%1" CR_TAB
8154 "mov %B0,%1", op, plen, 2);
8155 }
8156 }
8157 else if (n_bytes > 2)
8158 {
8159 op[0] = xop[0];
8160 avr_asm_len ("mov %A0,%1" CR_TAB
8161 "mov %B0,%1", op, plen, 2);
8162 }
8163 }
8164
8165 if (need_copy && n_bytes == 8)
8166 {
8167 if (AVR_HAVE_MOVW)
8168 avr_asm_len ("movw %r0+2,%0" CR_TAB
8169 "movw %r0+4,%0", xop, plen, 2);
8170 else
8171 avr_asm_len ("mov %r0+2,%0" CR_TAB
8172 "mov %r0+3,%0" CR_TAB
8173 "mov %r0+4,%0" CR_TAB
8174 "mov %r0+5,%0", xop, plen, 4);
8175 }
8176
8177 if (out_label)
8178 avr_asm_len ("0:", op, plen, 0);
8179 }
8180
8181
8182 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
8183 is ont a compile-time constant:
8184
8185 XOP[0] = XOP[0] +/- XOP[2]
8186
8187 This is a helper for the function below. The only insns that need this
8188 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
8189
8190 static const char*
8191 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
8192 {
8193 machine_mode mode = GET_MODE (xop[0]);
8194
8195 /* Only pointer modes want to add symbols. */
8196
8197 gcc_assert (mode == HImode || mode == PSImode);
8198
8199 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
8200
8201 avr_asm_len (PLUS == code
8202 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
8203 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
8204 xop, plen, -2);
8205
8206 if (PSImode == mode)
8207 avr_asm_len (PLUS == code
8208 ? "sbci %C0,hlo8(-(%2))"
8209 : "sbci %C0,hlo8(%2)", xop, plen, 1);
8210 return "";
8211 }
8212
8213
8214 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
8215
8216 INSN is a single_set insn or an insn pattern with a binary operation as
8217 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
8218
8219 XOP are the operands of INSN. In the case of 64-bit operations with
8220 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
8221 The non-saturating insns up to 32 bits may or may not supply a "d" class
8222 scratch as XOP[3].
8223
8224 If PLEN == NULL output the instructions.
8225 If PLEN != NULL set *PLEN to the length of the sequence in words.
8226
8227 PCC is a pointer to store the instructions' effect on cc0.
8228 PCC may be NULL.
8229
8230 PLEN and PCC default to NULL.
8231
8232 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
8233
8234 Return "" */
8235
8236 const char*
8237 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
8238 {
8239 int cc_plus, cc_minus, cc_dummy;
8240 int len_plus, len_minus;
8241 rtx op[4];
8242 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8243 rtx xdest = SET_DEST (xpattern);
8244 machine_mode mode = GET_MODE (xdest);
8245 scalar_int_mode imode = int_mode_for_mode (mode).require ();
8246 int n_bytes = GET_MODE_SIZE (mode);
8247 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
8248 enum rtx_code code
8249 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
8250 ? PLUS : MINUS);
8251
8252 if (!pcc)
8253 pcc = &cc_dummy;
8254
8255 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
8256
8257 if (PLUS == code_sat || MINUS == code_sat)
8258 code_sat = UNKNOWN;
8259
8260 if (n_bytes <= 4 && REG_P (xop[2]))
8261 {
8262 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
8263 return "";
8264 }
8265
8266 if (8 == n_bytes)
8267 {
8268 op[0] = gen_rtx_REG (DImode, ACC_A);
8269 op[1] = gen_rtx_REG (DImode, ACC_A);
8270 op[2] = avr_to_int_mode (xop[0]);
8271 }
8272 else
8273 {
8274 if (!REG_P (xop[2])
8275 && !CONST_INT_P (xop[2])
8276 && !CONST_FIXED_P (xop[2]))
8277 {
8278 return avr_out_plus_symbol (xop, code, plen, pcc);
8279 }
8280
8281 op[0] = avr_to_int_mode (xop[0]);
8282 op[1] = avr_to_int_mode (xop[1]);
8283 op[2] = avr_to_int_mode (xop[2]);
8284 }
8285
8286 /* Saturations and 64-bit operations don't have a clobber operand.
8287 For the other cases, the caller will provide a proper XOP[3]. */
8288
8289 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
8290 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
8291
8292 /* Saturation will need the sign of the original operand. */
8293
8294 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
8295 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
8296
8297 /* If we subtract and the subtrahend is a constant, then negate it
8298 so that avr_out_plus_1 can be used. */
8299
8300 if (MINUS == code)
8301 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
8302
8303 /* Work out the shortest sequence. */
8304
8305 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
8306 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
8307
8308 if (plen)
8309 {
8310 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
8311 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
8312 }
8313 else if (len_minus <= len_plus)
8314 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
8315 else
8316 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
8317
8318 return "";
8319 }
8320
8321
8322 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8323 time constant XOP[2]:
8324
8325 XOP[0] = XOP[0] <op> XOP[2]
8326
8327 and return "". If PLEN == NULL, print assembler instructions to perform the
8328 operation; otherwise, set *PLEN to the length of the instruction sequence
8329 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
8330 register or SCRATCH if no clobber register is needed for the operation.
8331 INSN is an INSN_P or a pattern of an insn. */
8332
8333 const char*
8334 avr_out_bitop (rtx insn, rtx *xop, int *plen)
8335 {
8336 /* CODE and MODE of the operation. */
8337 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8338 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
8339 machine_mode mode = GET_MODE (xop[0]);
8340
8341 /* Number of bytes to operate on. */
8342 int n_bytes = GET_MODE_SIZE (mode);
8343
8344 /* Value of T-flag (0 or 1) or -1 if unknow. */
8345 int set_t = -1;
8346
8347 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8348 int clobber_val = -1;
8349
8350 /* op[0]: 8-bit destination register
8351 op[1]: 8-bit const int
8352 op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8353 op[3]: 8-bit register containing 0xff or NULL_RTX */
8354 rtx op[4];
8355
8356 op[2] = QImode == mode ? NULL_RTX : xop[3];
8357 op[3] = NULL_RTX;
8358
8359 if (plen)
8360 *plen = 0;
8361
8362 for (int i = 0; i < n_bytes; i++)
8363 {
8364 /* We operate byte-wise on the destination. */
8365 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
8366 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
8367
8368 /* 8-bit value to operate with this byte. */
8369 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
8370
8371 /* Number of bits set in the current byte of the constant. */
8372 int pop8 = popcount_hwi (val8);
8373
8374 /* Registers R16..R31 can operate with immediate. */
8375 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
8376
8377 op[0] = reg8;
8378 op[1] = GEN_INT (val8);
8379
8380 switch (code)
8381 {
8382 case IOR:
8383
8384 if (0 == pop8)
8385 continue;
8386 else if (ld_reg_p)
8387 avr_asm_len ("ori %0,%1", op, plen, 1);
8388 else if (1 == pop8)
8389 {
8390 if (set_t != 1)
8391 avr_asm_len ("set", op, plen, 1);
8392 set_t = 1;
8393
8394 op[1] = GEN_INT (exact_log2 (val8));
8395 avr_asm_len ("bld %0,%1", op, plen, 1);
8396 }
8397 else if (8 == pop8)
8398 {
8399 if (op[3] != NULL_RTX)
8400 avr_asm_len ("mov %0,%3", op, plen, 1);
8401 else
8402 avr_asm_len ("clr %0" CR_TAB
8403 "dec %0", op, plen, 2);
8404
8405 op[3] = op[0];
8406 }
8407 else
8408 {
8409 if (clobber_val != (int) val8)
8410 avr_asm_len ("ldi %2,%1", op, plen, 1);
8411 clobber_val = (int) val8;
8412
8413 avr_asm_len ("or %0,%2", op, plen, 1);
8414 }
8415
8416 continue; /* IOR */
8417
8418 case AND:
8419
8420 if (8 == pop8)
8421 continue;
8422 else if (0 == pop8)
8423 avr_asm_len ("clr %0", op, plen, 1);
8424 else if (ld_reg_p)
8425 avr_asm_len ("andi %0,%1", op, plen, 1);
8426 else if (7 == pop8)
8427 {
8428 if (set_t != 0)
8429 avr_asm_len ("clt", op, plen, 1);
8430 set_t = 0;
8431
8432 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
8433 avr_asm_len ("bld %0,%1", op, plen, 1);
8434 }
8435 else
8436 {
8437 if (clobber_val != (int) val8)
8438 avr_asm_len ("ldi %2,%1", op, plen, 1);
8439 clobber_val = (int) val8;
8440
8441 avr_asm_len ("and %0,%2", op, plen, 1);
8442 }
8443
8444 continue; /* AND */
8445
8446 case XOR:
8447
8448 if (0 == pop8)
8449 continue;
8450 else if (8 == pop8)
8451 avr_asm_len ("com %0", op, plen, 1);
8452 else if (ld_reg_p && val8 == (1 << 7))
8453 avr_asm_len ("subi %0,%1", op, plen, 1);
8454 else
8455 {
8456 if (clobber_val != (int) val8)
8457 avr_asm_len ("ldi %2,%1", op, plen, 1);
8458 clobber_val = (int) val8;
8459
8460 avr_asm_len ("eor %0,%2", op, plen, 1);
8461 }
8462
8463 continue; /* XOR */
8464
8465 default:
8466 /* Unknown rtx_code */
8467 gcc_unreachable();
8468 }
8469 } /* for all sub-bytes */
8470
8471 return "";
8472 }
8473
8474
8475 /* Output sign extension from XOP[1] to XOP[0] and return "".
8476 If PLEN == NULL, print assembler instructions to perform the operation;
8477 otherwise, set *PLEN to the length of the instruction sequence (in words)
8478 as printed with PLEN == NULL. */
8479
8480 const char*
8481 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
8482 {
8483 // Size in bytes of source resp. destination operand.
8484 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
8485 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
8486 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
8487
8488 if (plen)
8489 *plen = 0;
8490
8491 // Copy destination to source
8492
8493 if (REGNO (xop[0]) != REGNO (xop[1]))
8494 {
8495 gcc_assert (n_src <= 2);
8496
8497 if (n_src == 2)
8498 avr_asm_len (AVR_HAVE_MOVW
8499 ? "movw %0,%1"
8500 : "mov %B0,%B1", xop, plen, 1);
8501 if (n_src == 1 || !AVR_HAVE_MOVW)
8502 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
8503 }
8504
8505 // Set Carry to the sign bit MSB.7...
8506
8507 if (REGNO (xop[0]) == REGNO (xop[1])
8508 || !reg_unused_after (insn, r_msb))
8509 {
8510 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
8511 r_msb = tmp_reg_rtx;
8512 }
8513
8514 avr_asm_len ("lsl %0", &r_msb, plen, 1);
8515
8516 // ...and propagate it to all the new sign bits
8517
8518 for (unsigned n = n_src; n < n_dest; n++)
8519 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
8520
8521 return "";
8522 }
8523
8524
8525 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8526 PLEN != NULL: Set *PLEN to the length of that sequence.
8527 Return "". */
8528
8529 const char*
8530 avr_out_addto_sp (rtx *op, int *plen)
8531 {
8532 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
8533 int addend = INTVAL (op[0]);
8534
8535 if (plen)
8536 *plen = 0;
8537
8538 if (addend < 0)
8539 {
8540 if (flag_verbose_asm || flag_print_asm_name)
8541 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
8542
8543 while (addend <= -pc_len)
8544 {
8545 addend += pc_len;
8546 avr_asm_len ("rcall .", op, plen, 1);
8547 }
8548
8549 while (addend++ < 0)
8550 avr_asm_len ("push __tmp_reg__", op, plen, 1);
8551 }
8552 else if (addend > 0)
8553 {
8554 if (flag_verbose_asm || flag_print_asm_name)
8555 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
8556
8557 while (addend-- > 0)
8558 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
8559 }
8560
8561 return "";
8562 }
8563
8564
8565 /* Output instructions to insert an inverted bit into OPERANDS[0]:
8566 $0.$1 = ~$2.$3 if XBITNO = NULL
8567 $0.$1 = ~$2.XBITNO if XBITNO != NULL.
8568 If PLEN = NULL then output the respective instruction sequence which
8569 is a combination of BST / BLD and some instruction(s) to invert the bit.
8570 If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
8571 Return "". */
8572
8573 const char*
8574 avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
8575 {
8576 rtx op[4] = { operands[0], operands[1], operands[2],
8577 xbitno == NULL_RTX ? operands [3] : xbitno };
8578
8579 if (INTVAL (op[1]) == 7
8580 && test_hard_reg_class (LD_REGS, op[0]))
8581 {
8582 /* If the inserted bit number is 7 and we have a d-reg, then invert
8583 the bit after the insertion by means of SUBI *,0x80. */
8584
8585 if (INTVAL (op[3]) == 7
8586 && REGNO (op[0]) == REGNO (op[2]))
8587 {
8588 avr_asm_len ("subi %0,0x80", op, plen, -1);
8589 }
8590 else
8591 {
8592 avr_asm_len ("bst %2,%3" CR_TAB
8593 "bld %0,%1" CR_TAB
8594 "subi %0,0x80", op, plen, -3);
8595 }
8596 }
8597 else if (test_hard_reg_class (LD_REGS, op[0])
8598 && (INTVAL (op[1]) != INTVAL (op[3])
8599 || !reg_overlap_mentioned_p (op[0], op[2])))
8600 {
8601 /* If the destination bit is in a d-reg we can jump depending
8602 on the source bit and use ANDI / ORI. This just applies if we
8603 have not an early-clobber situation with the bit. */
8604
8605 avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
8606 "sbrs %2,%3" CR_TAB
8607 "ori %0,1<<%1", op, plen, -3);
8608 }
8609 else
8610 {
8611 /* Otherwise, invert the bit by means of COM before we store it with
8612 BST and then undo the COM if needed. */
8613
8614 avr_asm_len ("com %2" CR_TAB
8615 "bst %2,%3", op, plen, -2);
8616
8617 if (!reg_unused_after (insn, op[2])
8618 // A simple 'reg_unused_after' is not enough because that function
8619 // assumes that the destination register is overwritten completely
8620 // and hence is in order for our purpose. This is not the case
8621 // with BLD which just changes one bit of the destination.
8622 || reg_overlap_mentioned_p (op[0], op[2]))
8623 {
8624 /* Undo the COM from above. */
8625 avr_asm_len ("com %2", op, plen, 1);
8626 }
8627
8628 avr_asm_len ("bld %0,%1", op, plen, 1);
8629 }
8630
8631 return "";
8632 }
8633
8634
8635 /* Outputs instructions needed for fixed point type conversion.
8636 This includes converting between any fixed point type, as well
8637 as converting to any integer type. Conversion between integer
8638 types is not supported.
8639
8640 Converting signed fractional types requires a bit shift if converting
8641 to or from any unsigned fractional type because the decimal place is
8642 shifted by 1 bit. When the destination is a signed fractional, the sign
8643 is stored in either the carry or T bit. */
8644
8645 const char*
8646 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
8647 {
8648 rtx xop[6];
8649 RTX_CODE shift = UNKNOWN;
8650 bool sign_in_carry = false;
8651 bool msb_in_carry = false;
8652 bool lsb_in_tmp_reg = false;
8653 bool lsb_in_carry = false;
8654 bool frac_rounded = false;
8655 const char *code_ashift = "lsl %0";
8656
8657
8658 #define MAY_CLOBBER(RR) \
8659 /* Shorthand used below. */ \
8660 ((sign_bytes \
8661 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
8662 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
8663 || (reg_unused_after (insn, all_regs_rtx[RR]) \
8664 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8665
8666 struct
8667 {
8668 /* bytes : Length of operand in bytes.
8669 ibyte : Length of integral part in bytes.
8670 fbyte, fbit : Length of fractional part in bytes, bits. */
8671
8672 bool sbit;
8673 unsigned fbit, bytes, ibyte, fbyte;
8674 unsigned regno, regno_msb;
8675 } dest, src, *val[2] = { &dest, &src };
8676
8677 if (plen)
8678 *plen = 0;
8679
8680 /* Step 0: Determine information on source and destination operand we
8681 ====== will need in the remainder. */
8682
8683 for (size_t i = 0; i < ARRAY_SIZE (val); i++)
8684 {
8685 machine_mode mode;
8686
8687 xop[i] = operands[i];
8688
8689 mode = GET_MODE (xop[i]);
8690
8691 val[i]->bytes = GET_MODE_SIZE (mode);
8692 val[i]->regno = REGNO (xop[i]);
8693 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
8694
8695 if (SCALAR_INT_MODE_P (mode))
8696 {
8697 val[i]->sbit = intsigned;
8698 val[i]->fbit = 0;
8699 }
8700 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
8701 {
8702 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
8703 val[i]->fbit = GET_MODE_FBIT (mode);
8704 }
8705 else
8706 fatal_insn ("unsupported fixed-point conversion", insn);
8707
8708 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
8709 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
8710 }
8711
8712 // Byte offset of the decimal point taking into account different place
8713 // of the decimal point in input and output and different register numbers
8714 // of input and output.
8715 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
8716
8717 // Number of destination bytes that will come from sign / zero extension.
8718 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
8719
8720 // Number of bytes at the low end to be filled with zeros.
8721 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
8722
8723 // Do we have a 16-Bit register that is cleared?
8724 rtx clrw = NULL_RTX;
8725
8726 bool sign_extend = src.sbit && sign_bytes;
8727
8728 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8729 shift = ASHIFT;
8730 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8731 shift = ASHIFTRT;
8732 else if (dest.fbit % 8 == src.fbit % 8)
8733 shift = UNKNOWN;
8734 else
8735 gcc_unreachable();
8736
8737 /* If we need to round the fraction part, we might need to save/round it
8738 before clobbering any of it in Step 1. Also, we might want to do
8739 the rounding now to make use of LD_REGS. */
8740 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8741 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8742 && !TARGET_FRACT_CONV_TRUNC)
8743 {
8744 bool overlap
8745 = (src.regno <=
8746 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8747 && dest.regno - offset -1 >= dest.regno);
8748 unsigned s0 = dest.regno - offset -1;
8749 bool use_src = true;
8750 unsigned sn;
8751 unsigned copied_msb = src.regno_msb;
8752 bool have_carry = false;
8753
8754 if (src.ibyte > dest.ibyte)
8755 copied_msb -= src.ibyte - dest.ibyte;
8756
8757 for (sn = s0; sn <= copied_msb; sn++)
8758 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8759 && !reg_unused_after (insn, all_regs_rtx[sn]))
8760 use_src = false;
8761 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8762 {
8763 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8764 &all_regs_rtx[src.regno_msb], plen, 2);
8765 sn = src.regno;
8766 if (sn < s0)
8767 {
8768 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8769 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8770 else
8771 avr_asm_len ("sec" CR_TAB
8772 "cpc %0,__zero_reg__",
8773 &all_regs_rtx[sn], plen, 2);
8774 have_carry = true;
8775 }
8776 while (++sn < s0)
8777 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8778
8779 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8780 &all_regs_rtx[s0], plen, 1);
8781 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8782 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8783 avr_asm_len ("\n0:", NULL, plen, 0);
8784 frac_rounded = true;
8785 }
8786 else if (use_src && overlap)
8787 {
8788 avr_asm_len ("clr __tmp_reg__" CR_TAB
8789 "sbrc %1,0" CR_TAB
8790 "dec __tmp_reg__", xop, plen, 1);
8791 sn = src.regno;
8792 if (sn < s0)
8793 {
8794 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8795 have_carry = true;
8796 }
8797
8798 while (++sn < s0)
8799 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8800
8801 if (have_carry)
8802 avr_asm_len ("clt" CR_TAB
8803 "bld __tmp_reg__,7" CR_TAB
8804 "adc %0,__tmp_reg__",
8805 &all_regs_rtx[s0], plen, 1);
8806 else
8807 avr_asm_len ("lsr __tmp_reg" CR_TAB
8808 "add %0,__tmp_reg__",
8809 &all_regs_rtx[s0], plen, 2);
8810 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8811 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8812 frac_rounded = true;
8813 }
8814 else if (overlap)
8815 {
8816 bool use_src
8817 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8818 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8819 || reg_unused_after (insn, all_regs_rtx[s0])));
8820 xop[2] = all_regs_rtx[s0];
8821 unsigned sn = src.regno;
8822 if (!use_src || sn == s0)
8823 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8824 /* We need to consider to-be-discarded bits
8825 if the value is negative. */
8826 if (sn < s0)
8827 {
8828 avr_asm_len ("tst %0" CR_TAB
8829 "brpl 0f",
8830 &all_regs_rtx[src.regno_msb], plen, 2);
8831 /* Test to-be-discarded bytes for any nozero bits.
8832 ??? Could use OR or SBIW to test two registers at once. */
8833 if (sn < s0)
8834 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8835
8836 while (++sn < s0)
8837 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8838 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8839 if (use_src)
8840 avr_asm_len ("breq 0f" CR_TAB
8841 "ori %2,1"
8842 "\n0:\t" "mov __tmp_reg__,%2",
8843 xop, plen, 3);
8844 else
8845 avr_asm_len ("breq 0f" CR_TAB
8846 "set" CR_TAB
8847 "bld __tmp_reg__,0\n0:",
8848 xop, plen, 3);
8849 }
8850 lsb_in_tmp_reg = true;
8851 }
8852 }
8853
8854 /* Step 1: Clear bytes at the low end and copy payload bits from source
8855 ====== to destination. */
8856
8857 int step = offset < 0 ? 1 : -1;
8858 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8859
8860 // We cleared at least that number of registers.
8861 int clr_n = 0;
8862
8863 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8864 {
8865 // Next regno of destination is needed for MOVW
8866 unsigned d1 = d0 + step;
8867
8868 // Current and next regno of source
8869 signed s0 = d0 - offset;
8870 signed s1 = s0 + step;
8871
8872 // Must current resp. next regno be CLRed? This applies to the low
8873 // bytes of the destination that have no associated source bytes.
8874 bool clr0 = s0 < (signed) src.regno;
8875 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8876
8877 // First gather what code to emit (if any) and additional step to
8878 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8879 // is the source rtx for the current loop iteration.
8880 const char *code = NULL;
8881 int stepw = 0;
8882
8883 if (clr0)
8884 {
8885 if (AVR_HAVE_MOVW && clr1 && clrw)
8886 {
8887 xop[2] = all_regs_rtx[d0 & ~1];
8888 xop[3] = clrw;
8889 code = "movw %2,%3";
8890 stepw = step;
8891 }
8892 else
8893 {
8894 xop[2] = all_regs_rtx[d0];
8895 code = "clr %2";
8896
8897 if (++clr_n >= 2
8898 && !clrw
8899 && d0 % 2 == (step > 0))
8900 {
8901 clrw = all_regs_rtx[d0 & ~1];
8902 }
8903 }
8904 }
8905 else if (offset && s0 <= (signed) src.regno_msb)
8906 {
8907 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8908 && d0 % 2 == (offset > 0)
8909 && d1 <= dest.regno_msb && d1 >= dest.regno
8910 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
8911
8912 xop[2] = all_regs_rtx[d0 & ~movw];
8913 xop[3] = all_regs_rtx[s0 & ~movw];
8914 code = movw ? "movw %2,%3" : "mov %2,%3";
8915 stepw = step * movw;
8916 }
8917
8918 if (code)
8919 {
8920 if (sign_extend && shift != ASHIFT && !sign_in_carry
8921 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8922 {
8923 /* We are going to override the sign bit. If we sign-extend,
8924 store the sign in the Carry flag. This is not needed if
8925 the destination will be ASHIFT in the remainder because
8926 the ASHIFT will set Carry without extra instruction. */
8927
8928 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8929 sign_in_carry = true;
8930 }
8931
8932 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8933
8934 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8935 && src.ibyte > dest.ibyte
8936 && (d0 == src_msb || d0 + stepw == src_msb))
8937 {
8938 /* We are going to override the MSB. If we shift right,
8939 store the MSB in the Carry flag. This is only needed if
8940 we don't sign-extend becaue with sign-extension the MSB
8941 (the sign) will be produced by the sign extension. */
8942
8943 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8944 msb_in_carry = true;
8945 }
8946
8947 unsigned src_lsb = dest.regno - offset -1;
8948
8949 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8950 && !lsb_in_tmp_reg
8951 && (d0 == src_lsb || d0 + stepw == src_lsb))
8952 {
8953 /* We are going to override the new LSB; store it into carry. */
8954
8955 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8956 code_ashift = "rol %0";
8957 lsb_in_carry = true;
8958 }
8959
8960 avr_asm_len (code, xop, plen, 1);
8961 d0 += stepw;
8962 }
8963 }
8964
8965 /* Step 2: Shift destination left by 1 bit position. This might be needed
8966 ====== for signed input and unsigned output. */
8967
8968 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8969 {
8970 unsigned s0 = dest.regno - offset -1;
8971
8972 /* n1169 4.1.4 says:
8973 "Conversions from a fixed-point to an integer type round toward zero."
8974 Hence, converting a fract type to integer only gives a non-zero result
8975 for -1. */
8976 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8977 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8978 && !TARGET_FRACT_CONV_TRUNC)
8979 {
8980 gcc_assert (s0 == src.regno_msb);
8981 /* Check if the input is -1. We do that by checking if negating
8982 the input causes an integer overflow. */
8983 unsigned sn = src.regno;
8984 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8985 while (sn <= s0)
8986 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8987
8988 /* Overflow goes with set carry. Clear carry otherwise. */
8989 avr_asm_len ("brvs 0f" CR_TAB
8990 "clc\n0:", NULL, plen, 2);
8991 }
8992 /* Likewise, when converting from accumulator types to integer, we
8993 need to round up negative values. */
8994 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8995 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8996 && !TARGET_FRACT_CONV_TRUNC
8997 && !frac_rounded)
8998 {
8999 bool have_carry = false;
9000
9001 xop[2] = all_regs_rtx[s0];
9002 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
9003 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
9004 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
9005 &all_regs_rtx[src.regno_msb], plen, 2);
9006 if (!lsb_in_tmp_reg)
9007 {
9008 unsigned sn = src.regno;
9009 if (sn < s0)
9010 {
9011 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
9012 plen, 1);
9013 have_carry = true;
9014 }
9015 while (++sn < s0)
9016 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
9017 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
9018 }
9019 /* Add in C and the rounding value 127. */
9020 /* If the destination msb is a sign byte, and in LD_REGS,
9021 grab it as a temporary. */
9022 if (sign_bytes
9023 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
9024 dest.regno_msb))
9025 {
9026 xop[3] = all_regs_rtx[dest.regno_msb];
9027 avr_asm_len ("ldi %3,127", xop, plen, 1);
9028 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
9029 : have_carry ? "adc %2,%3"
9030 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
9031 : "add %2,%3"),
9032 xop, plen, 1);
9033 }
9034 else
9035 {
9036 /* Fall back to use __zero_reg__ as a temporary. */
9037 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
9038 if (have_carry)
9039 avr_asm_len ("clt" CR_TAB
9040 "bld __zero_reg__,7", NULL, plen, 2);
9041 else
9042 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
9043 avr_asm_len (have_carry && lsb_in_tmp_reg
9044 ? "adc __tmp_reg__,__zero_reg__"
9045 : have_carry ? "adc %2,__zero_reg__"
9046 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
9047 : "add %2,__zero_reg__",
9048 xop, plen, 1);
9049 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
9050 }
9051
9052 for (d0 = dest.regno + zero_bytes;
9053 d0 <= dest.regno_msb - sign_bytes; d0++)
9054 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
9055
9056 avr_asm_len (lsb_in_tmp_reg
9057 ? "\n0:\t" "lsl __tmp_reg__"
9058 : "\n0:\t" "lsl %2",
9059 xop, plen, 1);
9060 }
9061 else if (MAY_CLOBBER (s0))
9062 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9063 else
9064 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9065 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9066
9067 code_ashift = "rol %0";
9068 lsb_in_carry = true;
9069 }
9070
9071 if (shift == ASHIFT)
9072 {
9073 for (d0 = dest.regno + zero_bytes;
9074 d0 <= dest.regno_msb - sign_bytes; d0++)
9075 {
9076 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
9077 code_ashift = "rol %0";
9078 }
9079
9080 lsb_in_carry = false;
9081 sign_in_carry = true;
9082 }
9083
9084 /* Step 4a: Store MSB in carry if we don't already have it or will produce
9085 ======= it in sign-extension below. */
9086
9087 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
9088 && src.ibyte > dest.ibyte)
9089 {
9090 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
9091
9092 if (MAY_CLOBBER (s0))
9093 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
9094 else
9095 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9096 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9097
9098 msb_in_carry = true;
9099 }
9100
9101 /* Step 3: Sign-extend or zero-extend the destination as needed.
9102 ====== */
9103
9104 if (sign_extend && !sign_in_carry)
9105 {
9106 unsigned s0 = src.regno_msb;
9107
9108 if (MAY_CLOBBER (s0))
9109 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
9110 else
9111 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
9112 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
9113
9114 sign_in_carry = true;
9115 }
9116
9117 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
9118
9119 unsigned copies = 0;
9120 rtx movw = sign_extend ? NULL_RTX : clrw;
9121
9122 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
9123 {
9124 if (AVR_HAVE_MOVW && movw
9125 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
9126 {
9127 xop[2] = all_regs_rtx[d0];
9128 xop[3] = movw;
9129 avr_asm_len ("movw %2,%3", xop, plen, 1);
9130 d0++;
9131 }
9132 else
9133 {
9134 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
9135 &all_regs_rtx[d0], plen, 1);
9136
9137 if (++copies >= 2 && !movw && d0 % 2 == 1)
9138 movw = all_regs_rtx[d0-1];
9139 }
9140 } /* for */
9141
9142
9143 /* Step 4: Right shift the destination. This might be needed for
9144 ====== conversions from unsigned to signed. */
9145
9146 if (shift == ASHIFTRT)
9147 {
9148 const char *code_ashiftrt = "lsr %0";
9149
9150 if (sign_extend || msb_in_carry)
9151 code_ashiftrt = "ror %0";
9152
9153 if (src.sbit && src.ibyte == dest.ibyte)
9154 code_ashiftrt = "asr %0";
9155
9156 for (d0 = dest.regno_msb - sign_bytes;
9157 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
9158 {
9159 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
9160 code_ashiftrt = "ror %0";
9161 }
9162 }
9163
9164 #undef MAY_CLOBBER
9165
9166 return "";
9167 }
9168
9169
9170 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
9171 XOP[2] is the rounding point, a CONST_INT. The function prints the
9172 instruction sequence if PLEN = NULL and computes the length in words
9173 of the sequence if PLEN != NULL. Most of this function deals with
9174 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
9175
9176 const char*
9177 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9178 {
9179 scalar_mode mode = as_a <scalar_mode> (GET_MODE (xop[0]));
9180 scalar_int_mode imode = int_mode_for_mode (mode).require ();
9181 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
9182 int fbit = (int) GET_MODE_FBIT (mode);
9183 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
9184 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
9185 GET_MODE_PRECISION (imode));
9186 // Lengths of PLUS and AND parts.
9187 int len_add = 0, *plen_add = plen ? &len_add : NULL;
9188 int len_and = 0, *plen_and = plen ? &len_and : NULL;
9189
9190 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
9191 // the saturated addition so that we can emit the "rjmp 1f" before the
9192 // "0:" below.
9193
9194 rtx xadd = const_fixed_from_double_int (i_add, mode);
9195 rtx xpattern, xsrc, op[4];
9196
9197 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
9198 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
9199 : gen_rtx_US_PLUS (mode, xop[1], xadd);
9200 xpattern = gen_rtx_SET (xop[0], xsrc);
9201
9202 op[0] = xop[0];
9203 op[1] = xop[1];
9204 op[2] = xadd;
9205 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
9206
9207 avr_asm_len ("rjmp 1f" CR_TAB
9208 "0:", NULL, plen_add, 1);
9209
9210 // Keep all bits from RP and higher: ... 2^(-RP)
9211 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
9212 // Rounding point ^^^^^^^
9213 // Added above ^^^^^^^^^
9214 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
9215 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
9216
9217 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
9218
9219 op[0] = xreg;
9220 op[1] = xreg;
9221 op[2] = xmask;
9222 op[3] = gen_rtx_SCRATCH (QImode);
9223 avr_out_bitop (xpattern, op, plen_and);
9224 avr_asm_len ("1:", NULL, plen, 0);
9225
9226 if (plen)
9227 *plen = len_add + len_and;
9228
9229 return "";
9230 }
9231
9232
9233 /* Create RTL split patterns for byte sized rotate expressions. This
9234 produces a series of move instructions and considers overlap situations.
9235 Overlapping non-HImode operands need a scratch register. */
9236
9237 bool
9238 avr_rotate_bytes (rtx operands[])
9239 {
9240 machine_mode mode = GET_MODE (operands[0]);
9241 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
9242 bool same_reg = rtx_equal_p (operands[0], operands[1]);
9243 int num = INTVAL (operands[2]);
9244 rtx scratch = operands[3];
9245 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
9246 Word move if no scratch is needed, otherwise use size of scratch. */
9247 machine_mode move_mode = QImode;
9248 int move_size, offset, size;
9249
9250 if (num & 0xf)
9251 move_mode = QImode;
9252 else if ((mode == SImode && !same_reg) || !overlapped)
9253 move_mode = HImode;
9254 else
9255 move_mode = GET_MODE (scratch);
9256
9257 /* Force DI rotate to use QI moves since other DI moves are currently split
9258 into QI moves so forward propagation works better. */
9259 if (mode == DImode)
9260 move_mode = QImode;
9261 /* Make scratch smaller if needed. */
9262 if (SCRATCH != GET_CODE (scratch)
9263 && HImode == GET_MODE (scratch)
9264 && QImode == move_mode)
9265 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
9266
9267 move_size = GET_MODE_SIZE (move_mode);
9268 /* Number of bytes/words to rotate. */
9269 offset = (num >> 3) / move_size;
9270 /* Number of moves needed. */
9271 size = GET_MODE_SIZE (mode) / move_size;
9272 /* Himode byte swap is special case to avoid a scratch register. */
9273 if (mode == HImode && same_reg)
9274 {
9275 /* HImode byte swap, using xor. This is as quick as using scratch. */
9276 rtx src, dst;
9277 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
9278 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
9279 if (!rtx_equal_p (dst, src))
9280 {
9281 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9282 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
9283 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9284 }
9285 }
9286 else
9287 {
9288 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9289 /* Create linked list of moves to determine move order. */
9290 struct {
9291 rtx src, dst;
9292 int links;
9293 } move[MAX_SIZE + 8];
9294 int blocked, moves;
9295
9296 gcc_assert (size <= MAX_SIZE);
9297 /* Generate list of subreg moves. */
9298 for (int i = 0; i < size; i++)
9299 {
9300 int from = i;
9301 int to = (from + offset) % size;
9302 move[i].src = simplify_gen_subreg (move_mode, operands[1],
9303 mode, from * move_size);
9304 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
9305 mode, to * move_size);
9306 move[i].links = -1;
9307 }
9308 /* Mark dependence where a dst of one move is the src of another move.
9309 The first move is a conflict as it must wait until second is
9310 performed. We ignore moves to self - we catch this later. */
9311 if (overlapped)
9312 for (int i = 0; i < size; i++)
9313 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
9314 for (int j = 0; j < size; j++)
9315 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
9316 {
9317 /* The dst of move i is the src of move j. */
9318 move[i].links = j;
9319 break;
9320 }
9321
9322 blocked = -1;
9323 moves = 0;
9324 /* Go through move list and perform non-conflicting moves. As each
9325 non-overlapping move is made, it may remove other conflicts
9326 so the process is repeated until no conflicts remain. */
9327 do
9328 {
9329 blocked = -1;
9330 moves = 0;
9331 /* Emit move where dst is not also a src or we have used that
9332 src already. */
9333 for (int i = 0; i < size; i++)
9334 if (move[i].src != NULL_RTX)
9335 {
9336 if (move[i].links == -1
9337 || move[move[i].links].src == NULL_RTX)
9338 {
9339 moves++;
9340 /* Ignore NOP moves to self. */
9341 if (!rtx_equal_p (move[i].dst, move[i].src))
9342 emit_move_insn (move[i].dst, move[i].src);
9343
9344 /* Remove conflict from list. */
9345 move[i].src = NULL_RTX;
9346 }
9347 else
9348 blocked = i;
9349 }
9350
9351 /* Check for deadlock. This is when no moves occurred and we have
9352 at least one blocked move. */
9353 if (moves == 0 && blocked != -1)
9354 {
9355 /* Need to use scratch register to break deadlock.
9356 Add move to put dst of blocked move into scratch.
9357 When this move occurs, it will break chain deadlock.
9358 The scratch register is substituted for real move. */
9359
9360 gcc_assert (SCRATCH != GET_CODE (scratch));
9361
9362 move[size].src = move[blocked].dst;
9363 move[size].dst = scratch;
9364 /* Scratch move is never blocked. */
9365 move[size].links = -1;
9366 /* Make sure we have valid link. */
9367 gcc_assert (move[blocked].links != -1);
9368 /* Replace src of blocking move with scratch reg. */
9369 move[move[blocked].links].src = scratch;
9370 /* Make dependent on scratch move occurring. */
9371 move[blocked].links = size;
9372 size=size+1;
9373 }
9374 }
9375 while (blocked != -1);
9376 }
9377 return true;
9378 }
9379
9380
9381 /* Worker function for `ADJUST_INSN_LENGTH'. */
9382 /* Modifies the length assigned to instruction INSN
9383 LEN is the initially computed length of the insn. */
9384
9385 int
9386 avr_adjust_insn_length (rtx_insn *insn, int len)
9387 {
9388 rtx *op = recog_data.operand;
9389 enum attr_adjust_len adjust_len;
9390
9391 /* As we pretend jump tables in .text, fix branch offsets crossing jump
9392 tables now. */
9393
9394 if (JUMP_TABLE_DATA_P (insn))
9395 return 0;
9396
9397 /* Some complex insns don't need length adjustment and therefore
9398 the length need not/must not be adjusted for these insns.
9399 It is easier to state this in an insn attribute "adjust_len" than
9400 to clutter up code here... */
9401
9402 if (!NONDEBUG_INSN_P (insn)
9403 || -1 == recog_memoized (insn))
9404 {
9405 return len;
9406 }
9407
9408 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
9409
9410 adjust_len = get_attr_adjust_len (insn);
9411
9412 if (adjust_len == ADJUST_LEN_NO)
9413 {
9414 /* Nothing to adjust: The length from attribute "length" is fine.
9415 This is the default. */
9416
9417 return len;
9418 }
9419
9420 /* Extract insn's operands. */
9421
9422 extract_constrain_insn_cached (insn);
9423
9424 /* Dispatch to right function. */
9425
9426 switch (adjust_len)
9427 {
9428 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
9429 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
9430 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
9431
9432 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
9433
9434 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
9435 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
9436
9437 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
9438 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
9439 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
9440 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
9441 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
9442 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
9443 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
9444
9445 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
9446 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
9447 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
9448
9449 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
9450 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
9451 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
9452 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
9453 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9454
9455 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
9456 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
9457 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9458
9459 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
9460 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
9461 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9462
9463 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
9464 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
9465 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
9466
9467 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
9468 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
9469 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
9470
9471 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
9472
9473 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
9474
9475 case ADJUST_LEN_INSV_NOTBIT:
9476 avr_out_insert_notbit (insn, op, NULL_RTX, &len);
9477 break;
9478 case ADJUST_LEN_INSV_NOTBIT_0:
9479 avr_out_insert_notbit (insn, op, const0_rtx, &len);
9480 break;
9481 case ADJUST_LEN_INSV_NOTBIT_7:
9482 avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
9483 break;
9484
9485 default:
9486 gcc_unreachable();
9487 }
9488
9489 return len;
9490 }
9491
9492 /* Return nonzero if register REG dead after INSN. */
9493
9494 int
9495 reg_unused_after (rtx_insn *insn, rtx reg)
9496 {
9497 return (dead_or_set_p (insn, reg)
9498 || (REG_P (reg) && _reg_unused_after (insn, reg)));
9499 }
9500
9501 /* Return nonzero if REG is not used after INSN.
9502 We assume REG is a reload reg, and therefore does
9503 not live past labels. It may live past calls or jumps though. */
9504
9505 int
9506 _reg_unused_after (rtx_insn *insn, rtx reg)
9507 {
9508 enum rtx_code code;
9509 rtx set;
9510
9511 /* If the reg is set by this instruction, then it is safe for our
9512 case. Disregard the case where this is a store to memory, since
9513 we are checking a register used in the store address. */
9514 set = single_set (insn);
9515 if (set && !MEM_P (SET_DEST (set))
9516 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9517 return 1;
9518
9519 while ((insn = NEXT_INSN (insn)))
9520 {
9521 rtx set;
9522 code = GET_CODE (insn);
9523
9524 #if 0
9525 /* If this is a label that existed before reload, then the register
9526 if dead here. However, if this is a label added by reorg, then
9527 the register may still be live here. We can't tell the difference,
9528 so we just ignore labels completely. */
9529 if (code == CODE_LABEL)
9530 return 1;
9531 /* else */
9532 #endif
9533
9534 if (!INSN_P (insn))
9535 continue;
9536
9537 if (code == JUMP_INSN)
9538 return 0;
9539
9540 /* If this is a sequence, we must handle them all at once.
9541 We could have for instance a call that sets the target register,
9542 and an insn in a delay slot that uses the register. In this case,
9543 we must return 0. */
9544 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
9545 {
9546 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
9547 int retval = 0;
9548
9549 for (int i = 0; i < seq->len (); i++)
9550 {
9551 rtx_insn *this_insn = seq->insn (i);
9552 rtx set = single_set (this_insn);
9553
9554 if (CALL_P (this_insn))
9555 code = CALL_INSN;
9556 else if (JUMP_P (this_insn))
9557 {
9558 if (INSN_ANNULLED_BRANCH_P (this_insn))
9559 return 0;
9560 code = JUMP_INSN;
9561 }
9562
9563 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9564 return 0;
9565 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9566 {
9567 if (!MEM_P (SET_DEST (set)))
9568 retval = 1;
9569 else
9570 return 0;
9571 }
9572 if (set == 0
9573 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
9574 return 0;
9575 }
9576 if (retval == 1)
9577 return 1;
9578 else if (code == JUMP_INSN)
9579 return 0;
9580 }
9581
9582 if (code == CALL_INSN)
9583 {
9584 rtx tem;
9585 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
9586 if (GET_CODE (XEXP (tem, 0)) == USE
9587 && REG_P (XEXP (XEXP (tem, 0), 0))
9588 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
9589 return 0;
9590 if (call_used_regs[REGNO (reg)])
9591 return 1;
9592 }
9593
9594 set = single_set (insn);
9595
9596 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9597 return 0;
9598 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9599 return !MEM_P (SET_DEST (set));
9600 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9601 return 0;
9602 }
9603 return 1;
9604 }
9605
9606
9607 /* Implement `TARGET_ASM_INTEGER'. */
9608 /* Target hook for assembling integer objects. The AVR version needs
9609 special handling for references to certain labels. */
9610
9611 static bool
9612 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
9613 {
9614 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
9615 && text_segment_operand (x, VOIDmode))
9616 {
9617 fputs ("\t.word\tgs(", asm_out_file);
9618 output_addr_const (asm_out_file, x);
9619 fputs (")\n", asm_out_file);
9620
9621 return true;
9622 }
9623 else if (GET_MODE (x) == PSImode)
9624 {
9625 /* This needs binutils 2.23+, see PR binutils/13503 */
9626
9627 fputs ("\t.byte\tlo8(", asm_out_file);
9628 output_addr_const (asm_out_file, x);
9629 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9630
9631 fputs ("\t.byte\thi8(", asm_out_file);
9632 output_addr_const (asm_out_file, x);
9633 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9634
9635 fputs ("\t.byte\thh8(", asm_out_file);
9636 output_addr_const (asm_out_file, x);
9637 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9638
9639 return true;
9640 }
9641 else if (CONST_FIXED_P (x))
9642 {
9643 /* varasm fails to handle big fixed modes that don't fit in hwi. */
9644
9645 for (unsigned n = 0; n < size; n++)
9646 {
9647 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
9648 default_assemble_integer (xn, 1, aligned_p);
9649 }
9650
9651 return true;
9652 }
9653
9654 if (AVR_TINY
9655 && avr_address_tiny_pm_p (x))
9656 {
9657 x = plus_constant (Pmode, x, avr_arch->flash_pm_offset);
9658 }
9659
9660 return default_assemble_integer (x, size, aligned_p);
9661 }
9662
9663
9664 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
9665 /* Return value is nonzero if pseudos that have been
9666 assigned to registers of class CLASS would likely be spilled
9667 because registers of CLASS are needed for spill registers. */
9668
9669 static bool
9670 avr_class_likely_spilled_p (reg_class_t c)
9671 {
9672 return (c != ALL_REGS &&
9673 (AVR_TINY ? 1 : c != ADDW_REGS));
9674 }
9675
9676
9677 /* Valid attributes:
9678 progmem - Put data to program memory.
9679 signal - Make a function to be hardware interrupt.
9680 After function prologue interrupts remain disabled.
9681 interrupt - Make a function to be hardware interrupt. Before function
9682 prologue interrupts are enabled by means of SEI.
9683 naked - Don't generate function prologue/epilogue and RET
9684 instruction. */
9685
9686 /* Handle a "progmem" attribute; arguments as in
9687 struct attribute_spec.handler. */
9688
9689 static tree
9690 avr_handle_progmem_attribute (tree *node, tree name,
9691 tree args ATTRIBUTE_UNUSED,
9692 int flags ATTRIBUTE_UNUSED,
9693 bool *no_add_attrs)
9694 {
9695 if (DECL_P (*node))
9696 {
9697 if (TREE_CODE (*node) == TYPE_DECL)
9698 {
9699 /* This is really a decl attribute, not a type attribute,
9700 but try to handle it for GCC 3.0 backwards compatibility. */
9701
9702 tree type = TREE_TYPE (*node);
9703 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
9704 tree newtype = build_type_attribute_variant (type, attr);
9705
9706 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
9707 TREE_TYPE (*node) = newtype;
9708 *no_add_attrs = true;
9709 }
9710 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
9711 {
9712 *no_add_attrs = false;
9713 }
9714 else
9715 {
9716 warning (OPT_Wattributes, "%qE attribute ignored",
9717 name);
9718 *no_add_attrs = true;
9719 }
9720 }
9721
9722 return NULL_TREE;
9723 }
9724
9725 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9726 struct attribute_spec.handler. */
9727
9728 static tree
9729 avr_handle_fndecl_attribute (tree *node, tree name,
9730 tree args ATTRIBUTE_UNUSED,
9731 int flags ATTRIBUTE_UNUSED,
9732 bool *no_add_attrs)
9733 {
9734 if (TREE_CODE (*node) != FUNCTION_DECL)
9735 {
9736 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9737 name);
9738 *no_add_attrs = true;
9739 }
9740
9741 return NULL_TREE;
9742 }
9743
9744 static tree
9745 avr_handle_fntype_attribute (tree *node, tree name,
9746 tree args ATTRIBUTE_UNUSED,
9747 int flags ATTRIBUTE_UNUSED,
9748 bool *no_add_attrs)
9749 {
9750 if (TREE_CODE (*node) != FUNCTION_TYPE)
9751 {
9752 warning (OPT_Wattributes, "%qE attribute only applies to functions",
9753 name);
9754 *no_add_attrs = true;
9755 }
9756
9757 return NULL_TREE;
9758 }
9759
9760 static tree
9761 avr_handle_absdata_attribute (tree *node, tree name, tree /* args */,
9762 int /* flags */, bool *no_add)
9763 {
9764 location_t loc = DECL_SOURCE_LOCATION (*node);
9765
9766 if (AVR_TINY)
9767 {
9768 if (TREE_CODE (*node) != VAR_DECL
9769 || (!TREE_STATIC (*node) && !DECL_EXTERNAL (*node)))
9770 {
9771 warning_at (loc, OPT_Wattributes, "%qE attribute only applies to"
9772 " variables in static storage", name);
9773 *no_add = true;
9774 }
9775 }
9776 else
9777 {
9778 warning_at (loc, OPT_Wattributes, "%qE attribute only supported"
9779 " for reduced Tiny cores", name);
9780 *no_add = true;
9781 }
9782
9783 return NULL_TREE;
9784 }
9785
9786 static tree
9787 avr_handle_addr_attribute (tree *node, tree name, tree args,
9788 int flags ATTRIBUTE_UNUSED, bool *no_add)
9789 {
9790 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9791 location_t loc = DECL_SOURCE_LOCATION (*node);
9792
9793 if (!VAR_P (*node))
9794 {
9795 warning_at (loc, OPT_Wattributes, "%qE attribute only applies to "
9796 "variables", name);
9797 *no_add = true;
9798 return NULL_TREE;
9799 }
9800
9801 if (args != NULL_TREE)
9802 {
9803 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9804 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9805 tree arg = TREE_VALUE (args);
9806 if (TREE_CODE (arg) != INTEGER_CST)
9807 {
9808 warning_at (loc, OPT_Wattributes, "%qE attribute allows only an "
9809 "integer constant argument", name);
9810 *no_add = true;
9811 }
9812 else if (io_p
9813 && (!tree_fits_shwi_p (arg)
9814 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9815 ? low_io_address_operand : io_address_operand)
9816 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9817 {
9818 warning_at (loc, OPT_Wattributes, "%qE attribute address "
9819 "out of range", name);
9820 *no_add = true;
9821 }
9822 else
9823 {
9824 tree attribs = DECL_ATTRIBUTES (*node);
9825 const char *names[] = { "io", "io_low", "address", NULL };
9826 for (const char **p = names; *p; p++)
9827 {
9828 tree other = lookup_attribute (*p, attribs);
9829 if (other && TREE_VALUE (other))
9830 {
9831 warning_at (loc, OPT_Wattributes,
9832 "both %s and %qE attribute provide address",
9833 *p, name);
9834 *no_add = true;
9835 break;
9836 }
9837 }
9838 }
9839 }
9840
9841 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9842 warning_at (loc, OPT_Wattributes, "%qE attribute on non-volatile variable",
9843 name);
9844
9845 return NULL_TREE;
9846 }
9847
9848 rtx
9849 avr_eval_addr_attrib (rtx x)
9850 {
9851 if (SYMBOL_REF_P (x)
9852 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9853 {
9854 tree decl = SYMBOL_REF_DECL (x);
9855 tree attr = NULL_TREE;
9856
9857 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9858 {
9859 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9860 if (!attr || !TREE_VALUE (attr))
9861 attr = lookup_attribute ("io_low", DECL_ATTRIBUTES (decl));
9862 gcc_assert (attr);
9863 }
9864 if (!attr || !TREE_VALUE (attr))
9865 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9866 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9867 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9868 }
9869 return x;
9870 }
9871
9872
9873 /* AVR attributes. */
9874 static const struct attribute_spec
9875 avr_attribute_table[] =
9876 {
9877 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9878 affects_type_identity } */
9879 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
9880 false },
9881 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9882 false },
9883 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9884 false },
9885 { "no_gccisr", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9886 false },
9887 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
9888 false },
9889 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
9890 false },
9891 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
9892 false },
9893 { "io", 0, 1, true, false, false, avr_handle_addr_attribute,
9894 false },
9895 { "io_low", 0, 1, true, false, false, avr_handle_addr_attribute,
9896 false },
9897 { "address", 1, 1, true, false, false, avr_handle_addr_attribute,
9898 false },
9899 { "absdata", 0, 0, true, false, false, avr_handle_absdata_attribute,
9900 false },
9901 { NULL, 0, 0, false, false, false, NULL, false }
9902 };
9903
9904
9905 /* Return true if we support address space AS for the architecture in effect
9906 and false, otherwise. If LOC is not UNKNOWN_LOCATION then also issue
9907 a respective error. */
9908
9909 bool
9910 avr_addr_space_supported_p (addr_space_t as, location_t loc)
9911 {
9912 if (AVR_TINY)
9913 {
9914 if (loc != UNKNOWN_LOCATION)
9915 error_at (loc, "address spaces are not supported for reduced "
9916 "Tiny devices");
9917 return false;
9918 }
9919 else if (avr_addrspace[as].segment >= avr_n_flash)
9920 {
9921 if (loc != UNKNOWN_LOCATION)
9922 error_at (loc, "address space %qs not supported for devices with "
9923 "flash size up to %d KiB", avr_addrspace[as].name,
9924 64 * avr_n_flash);
9925 return false;
9926 }
9927
9928 return true;
9929 }
9930
9931
9932 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'. */
9933
9934 static void
9935 avr_addr_space_diagnose_usage (addr_space_t as, location_t loc)
9936 {
9937 (void) avr_addr_space_supported_p (as, loc);
9938 }
9939
9940
9941 /* Look if DECL shall be placed in program memory space by
9942 means of attribute `progmem' or some address-space qualifier.
9943 Return non-zero if DECL is data that must end up in Flash and
9944 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9945
9946 Return 2 if DECL is located in 24-bit flash address-space
9947 Return 1 if DECL is located in 16-bit flash address-space
9948 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9949 Return 0 otherwise */
9950
9951 int
9952 avr_progmem_p (tree decl, tree attributes)
9953 {
9954 tree a;
9955
9956 if (TREE_CODE (decl) != VAR_DECL)
9957 return 0;
9958
9959 if (avr_decl_memx_p (decl))
9960 return 2;
9961
9962 if (avr_decl_flash_p (decl))
9963 return 1;
9964
9965 if (NULL_TREE
9966 != lookup_attribute ("progmem", attributes))
9967 return -1;
9968
9969 a = decl;
9970
9971 do
9972 a = TREE_TYPE(a);
9973 while (TREE_CODE (a) == ARRAY_TYPE);
9974
9975 if (a == error_mark_node)
9976 return 0;
9977
9978 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9979 return -1;
9980
9981 return 0;
9982 }
9983
9984
9985 /* Return true if DECL has attribute `absdata' set. This function should
9986 only be used for AVR_TINY. */
9987
9988 static bool
9989 avr_decl_absdata_p (tree decl, tree attributes)
9990 {
9991 return (TREE_CODE (decl) == VAR_DECL
9992 && NULL_TREE != lookup_attribute ("absdata", attributes));
9993 }
9994
9995
9996 /* Scan type TYP for pointer references to address space ASn.
9997 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9998 the AS are also declared to be CONST.
9999 Otherwise, return the respective address space, i.e. a value != 0. */
10000
10001 static addr_space_t
10002 avr_nonconst_pointer_addrspace (tree typ)
10003 {
10004 while (ARRAY_TYPE == TREE_CODE (typ))
10005 typ = TREE_TYPE (typ);
10006
10007 if (POINTER_TYPE_P (typ))
10008 {
10009 addr_space_t as;
10010 tree target = TREE_TYPE (typ);
10011
10012 /* Pointer to function: Test the function's return type. */
10013
10014 if (FUNCTION_TYPE == TREE_CODE (target))
10015 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
10016
10017 /* "Ordinary" pointers... */
10018
10019 while (TREE_CODE (target) == ARRAY_TYPE)
10020 target = TREE_TYPE (target);
10021
10022 /* Pointers to non-generic address space must be const. */
10023
10024 as = TYPE_ADDR_SPACE (target);
10025
10026 if (!ADDR_SPACE_GENERIC_P (as)
10027 && !TYPE_READONLY (target)
10028 && avr_addr_space_supported_p (as))
10029 {
10030 return as;
10031 }
10032
10033 /* Scan pointer's target type. */
10034
10035 return avr_nonconst_pointer_addrspace (target);
10036 }
10037
10038 return ADDR_SPACE_GENERIC;
10039 }
10040
10041
10042 /* Sanity check NODE so that all pointers targeting non-generic address spaces
10043 go along with CONST qualifier. Writing to these address spaces should
10044 be detected and complained about as early as possible. */
10045
10046 static bool
10047 avr_pgm_check_var_decl (tree node)
10048 {
10049 const char *reason = NULL;
10050
10051 addr_space_t as = ADDR_SPACE_GENERIC;
10052
10053 gcc_assert (as == 0);
10054
10055 if (avr_log.progmem)
10056 avr_edump ("%?: %t\n", node);
10057
10058 switch (TREE_CODE (node))
10059 {
10060 default:
10061 break;
10062
10063 case VAR_DECL:
10064 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10065 reason = _("variable");
10066 break;
10067
10068 case PARM_DECL:
10069 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10070 reason = _("function parameter");
10071 break;
10072
10073 case FIELD_DECL:
10074 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
10075 reason = _("structure field");
10076 break;
10077
10078 case FUNCTION_DECL:
10079 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
10080 as)
10081 reason = _("return type of function");
10082 break;
10083
10084 case POINTER_TYPE:
10085 if (as = avr_nonconst_pointer_addrspace (node), as)
10086 reason = _("pointer");
10087 break;
10088 }
10089
10090 if (reason)
10091 {
10092 if (TYPE_P (node))
10093 error ("pointer targeting address space %qs must be const in %qT",
10094 avr_addrspace[as].name, node);
10095 else
10096 error ("pointer targeting address space %qs must be const"
10097 " in %s %q+D",
10098 avr_addrspace[as].name, reason, node);
10099 }
10100
10101 return reason == NULL;
10102 }
10103
10104
10105 /* Add the section attribute if the variable is in progmem. */
10106
10107 static void
10108 avr_insert_attributes (tree node, tree *attributes)
10109 {
10110 avr_pgm_check_var_decl (node);
10111
10112 if (TREE_CODE (node) == VAR_DECL
10113 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
10114 && avr_progmem_p (node, *attributes))
10115 {
10116 addr_space_t as;
10117 tree node0 = node;
10118
10119 /* For C++, we have to peel arrays in order to get correct
10120 determination of readonlyness. */
10121
10122 do
10123 node0 = TREE_TYPE (node0);
10124 while (TREE_CODE (node0) == ARRAY_TYPE);
10125
10126 if (error_mark_node == node0)
10127 return;
10128
10129 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
10130
10131 if (!TYPE_READONLY (node0)
10132 && !TREE_READONLY (node))
10133 {
10134 const char *reason = "__attribute__((progmem))";
10135
10136 if (!ADDR_SPACE_GENERIC_P (as))
10137 reason = avr_addrspace[as].name;
10138
10139 if (avr_log.progmem)
10140 avr_edump ("\n%?: %t\n%t\n", node, node0);
10141
10142 error ("variable %q+D must be const in order to be put into"
10143 " read-only section by means of %qs", node, reason);
10144 }
10145 }
10146 }
10147
10148
10149 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
10150 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
10151 /* Track need of __do_clear_bss. */
10152
10153 void
10154 avr_asm_output_aligned_decl_common (FILE * stream,
10155 tree decl,
10156 const char *name,
10157 unsigned HOST_WIDE_INT size,
10158 unsigned int align, bool local_p)
10159 {
10160 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10161 rtx symbol;
10162
10163 if (mem != NULL_RTX && MEM_P (mem)
10164 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10165 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10166 {
10167 if (!local_p)
10168 {
10169 fprintf (stream, "\t.globl\t");
10170 assemble_name (stream, name);
10171 fprintf (stream, "\n");
10172 }
10173 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
10174 {
10175 assemble_name (stream, name);
10176 fprintf (stream, " = %ld\n",
10177 (long) INTVAL (avr_eval_addr_attrib (symbol)));
10178 }
10179 else if (local_p)
10180 error_at (DECL_SOURCE_LOCATION (decl),
10181 "static IO declaration for %q+D needs an address", decl);
10182 return;
10183 }
10184
10185 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
10186 There is no need to trigger __do_clear_bss code for them. */
10187
10188 if (!STR_PREFIX_P (name, "__gnu_lto"))
10189 avr_need_clear_bss_p = true;
10190
10191 if (local_p)
10192 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
10193 else
10194 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
10195 }
10196
10197 void
10198 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
10199 unsigned HOST_WIDE_INT size, int align,
10200 void (*default_func)
10201 (FILE *, tree, const char *,
10202 unsigned HOST_WIDE_INT, int))
10203 {
10204 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
10205 rtx symbol;
10206
10207 if (mem != NULL_RTX && MEM_P (mem)
10208 && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
10209 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
10210 {
10211 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
10212 error_at (DECL_SOURCE_LOCATION (decl),
10213 "IO definition for %q+D needs an address", decl);
10214 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
10215 }
10216 else
10217 default_func (file, decl, name, size, align);
10218 }
10219
10220
10221 /* Unnamed section callback for data_section
10222 to track need of __do_copy_data. */
10223
10224 static void
10225 avr_output_data_section_asm_op (const void *data)
10226 {
10227 avr_need_copy_data_p = true;
10228
10229 /* Dispatch to default. */
10230 output_section_asm_op (data);
10231 }
10232
10233
10234 /* Unnamed section callback for bss_section
10235 to track need of __do_clear_bss. */
10236
10237 static void
10238 avr_output_bss_section_asm_op (const void *data)
10239 {
10240 avr_need_clear_bss_p = true;
10241
10242 /* Dispatch to default. */
10243 output_section_asm_op (data);
10244 }
10245
10246
10247 /* Unnamed section callback for progmem*.data sections. */
10248
10249 static void
10250 avr_output_progmem_section_asm_op (const void *data)
10251 {
10252 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
10253 (const char*) data);
10254 }
10255
10256
10257 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
10258
10259 static void
10260 avr_asm_init_sections (void)
10261 {
10262 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10263 resp. `avr_need_copy_data_p'. If flash is not mapped to RAM then
10264 we have also to track .rodata because it is located in RAM then. */
10265
10266 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10267 if (0 == avr_arch->flash_pm_offset)
10268 #endif
10269 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
10270 data_section->unnamed.callback = avr_output_data_section_asm_op;
10271 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
10272 }
10273
10274
10275 /* Implement `TARGET_ASM_NAMED_SECTION'. */
10276 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
10277
10278 static void
10279 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
10280 {
10281 if (flags & AVR_SECTION_PROGMEM)
10282 {
10283 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
10284 const char *old_prefix = ".rodata";
10285 const char *new_prefix = avr_addrspace[as].section_name;
10286
10287 if (STR_PREFIX_P (name, old_prefix))
10288 {
10289 const char *sname = ACONCAT ((new_prefix,
10290 name + strlen (old_prefix), NULL));
10291 default_elf_asm_named_section (sname, flags, decl);
10292 return;
10293 }
10294
10295 default_elf_asm_named_section (new_prefix, flags, decl);
10296 return;
10297 }
10298
10299 if (!avr_need_copy_data_p)
10300 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
10301 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
10302
10303 if (!avr_need_copy_data_p
10304 #if defined HAVE_LD_AVR_AVRXMEGA3_RODATA_IN_FLASH
10305 && 0 == avr_arch->flash_pm_offset
10306 #endif
10307 )
10308 avr_need_copy_data_p = (STR_PREFIX_P (name, ".rodata")
10309 || STR_PREFIX_P (name, ".gnu.linkonce.r"));
10310
10311 if (!avr_need_clear_bss_p)
10312 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
10313
10314 default_elf_asm_named_section (name, flags, decl);
10315 }
10316
10317
10318 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
10319
10320 static unsigned int
10321 avr_section_type_flags (tree decl, const char *name, int reloc)
10322 {
10323 unsigned int flags = default_section_type_flags (decl, name, reloc);
10324
10325 if (STR_PREFIX_P (name, ".noinit"))
10326 {
10327 if (decl && TREE_CODE (decl) == VAR_DECL
10328 && DECL_INITIAL (decl) == NULL_TREE)
10329 flags |= SECTION_BSS; /* @nobits */
10330 else
10331 warning (0, "only uninitialized variables can be placed in the "
10332 ".noinit section");
10333 }
10334
10335 if (decl && DECL_P (decl)
10336 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10337 {
10338 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10339
10340 /* Attribute progmem puts data in generic address space.
10341 Set section flags as if it was in __flash to get the right
10342 section prefix in the remainder. */
10343
10344 if (ADDR_SPACE_GENERIC_P (as))
10345 as = ADDR_SPACE_FLASH;
10346
10347 flags |= as * SECTION_MACH_DEP;
10348 flags &= ~SECTION_WRITE;
10349 flags &= ~SECTION_BSS;
10350 }
10351
10352 return flags;
10353 }
10354
10355
10356 /* A helper for the next function. NODE is a decl that is associated with
10357 a symbol. Return TRUE if the respective object may be accessed by LDS.
10358 There might still be other reasons for why LDS is not appropriate.
10359 This function is only appropriate for AVR_TINY. */
10360
10361 static bool
10362 avr_decl_maybe_lds_p (tree node)
10363 {
10364 if (!node
10365 || TREE_CODE (node) != VAR_DECL
10366 || DECL_SECTION_NAME (node) != NULL)
10367 return false;
10368
10369 /* Don't use LDS for objects that go to .rodata. The current default
10370 linker description file still locates .rodata in RAM, but this is not
10371 a must. A better linker script would just keep .rodata in flash and
10372 add an offset of 0x4000 to the VMA. Hence avoid LDS for such data. */
10373
10374 if (TREE_READONLY (node))
10375 return false;
10376
10377 // C++ requires peeling arrays.
10378
10379 do
10380 node = TREE_TYPE (node);
10381 while (ARRAY_TYPE == TREE_CODE (node));
10382
10383 return (node != error_mark_node
10384 && !TYPE_READONLY (node));
10385 }
10386
10387
10388 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
10389
10390 static void
10391 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
10392 {
10393 tree addr_attr = NULL_TREE;
10394
10395 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10396 readily available, see PR34734. So we postpone the warning
10397 about uninitialized data in program memory section until here. */
10398
10399 if (new_decl_p
10400 && decl && DECL_P (decl)
10401 && !DECL_EXTERNAL (decl)
10402 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10403 {
10404 if (!TREE_READONLY (decl))
10405 {
10406 // This might happen with C++ if stuff needs constructing.
10407 error ("variable %q+D with dynamic initialization put "
10408 "into program memory area", decl);
10409 }
10410 else if (NULL_TREE == DECL_INITIAL (decl))
10411 {
10412 // Don't warn for (implicit) aliases like in PR80462.
10413 tree asmname = DECL_ASSEMBLER_NAME (decl);
10414 varpool_node *node = varpool_node::get_for_asmname (asmname);
10415 bool alias_p = node && node->alias;
10416
10417 if (!alias_p)
10418 warning (OPT_Wuninitialized, "uninitialized variable %q+D put "
10419 "into program memory area", decl);
10420 }
10421 }
10422
10423 default_encode_section_info (decl, rtl, new_decl_p);
10424
10425 if (decl && DECL_P (decl)
10426 && TREE_CODE (decl) != FUNCTION_DECL
10427 && MEM_P (rtl)
10428 && SYMBOL_REF_P (XEXP (rtl, 0)))
10429 {
10430 rtx sym = XEXP (rtl, 0);
10431 tree type = TREE_TYPE (decl);
10432 tree attr = DECL_ATTRIBUTES (decl);
10433 if (type == error_mark_node)
10434 return;
10435
10436 addr_space_t as = TYPE_ADDR_SPACE (type);
10437
10438 /* PSTR strings are in generic space but located in flash:
10439 patch address space. */
10440
10441 if (!AVR_TINY
10442 && -1 == avr_progmem_p (decl, attr))
10443 as = ADDR_SPACE_FLASH;
10444
10445 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
10446
10447 tree io_low_attr = lookup_attribute ("io_low", attr);
10448 tree io_attr = lookup_attribute ("io", attr);
10449
10450 if (io_low_attr
10451 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
10452 addr_attr = io_attr;
10453 else if (io_attr
10454 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
10455 addr_attr = io_attr;
10456 else
10457 addr_attr = lookup_attribute ("address", attr);
10458 if (io_low_attr
10459 || (io_attr && addr_attr
10460 && low_io_address_operand
10461 (GEN_INT (TREE_INT_CST_LOW
10462 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
10463 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
10464 if (io_attr || io_low_attr)
10465 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
10466 /* If we have an (io) address attribute specification, but the variable
10467 is external, treat the address as only a tentative definition
10468 to be used to determine if an io port is in the lower range, but
10469 don't use the exact value for constant propagation. */
10470 if (addr_attr && !DECL_EXTERNAL (decl))
10471 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
10472 }
10473
10474 if (AVR_TINY
10475 && decl
10476 && VAR_DECL == TREE_CODE (decl)
10477 && MEM_P (rtl)
10478 && SYMBOL_REF_P (XEXP (rtl, 0)))
10479 {
10480 rtx sym = XEXP (rtl, 0);
10481 bool progmem_p = -1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl));
10482
10483 if (progmem_p)
10484 {
10485 // Tag symbols for addition of 0x4000 (avr_arch->flash_pm_offset).
10486 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_PM;
10487 }
10488
10489 if (avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl))
10490 || (TARGET_ABSDATA
10491 && !progmem_p
10492 && !addr_attr
10493 && avr_decl_maybe_lds_p (decl))
10494 || (addr_attr
10495 // If addr_attr is non-null, it has an argument. Peek into it.
10496 && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr))) < 0xc0))
10497 {
10498 // May be accessed by LDS / STS.
10499 SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_ABSDATA;
10500 }
10501
10502 if (progmem_p
10503 && avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl)))
10504 {
10505 error ("%q+D has incompatible attributes %qs and %qs",
10506 decl, "progmem", "absdata");
10507 }
10508 }
10509 }
10510
10511
10512 /* Implement `TARGET_ASM_SELECT_SECTION' */
10513
10514 static section *
10515 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
10516 {
10517 section * sect = default_elf_select_section (decl, reloc, align);
10518
10519 if (decl && DECL_P (decl)
10520 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10521 {
10522 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10523
10524 /* __progmem__ goes in generic space but shall be allocated to
10525 .progmem.data */
10526
10527 if (ADDR_SPACE_GENERIC_P (as))
10528 as = ADDR_SPACE_FLASH;
10529
10530 if (sect->common.flags & SECTION_NAMED)
10531 {
10532 const char * name = sect->named.name;
10533 const char * old_prefix = ".rodata";
10534 const char * new_prefix = avr_addrspace[as].section_name;
10535
10536 if (STR_PREFIX_P (name, old_prefix))
10537 {
10538 const char *sname = ACONCAT ((new_prefix,
10539 name + strlen (old_prefix), NULL));
10540 return get_section (sname,
10541 sect->common.flags & ~SECTION_DECLARED,
10542 sect->named.decl);
10543 }
10544 }
10545
10546 if (!progmem_section[as])
10547 {
10548 progmem_section[as]
10549 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
10550 avr_addrspace[as].section_name);
10551 }
10552
10553 return progmem_section[as];
10554 }
10555
10556 return sect;
10557 }
10558
10559 /* Implement `TARGET_ASM_FILE_START'. */
10560 /* Outputs some text at the start of each assembler file. */
10561
10562 static void
10563 avr_file_start (void)
10564 {
10565 int sfr_offset = avr_arch->sfr_offset;
10566
10567 if (avr_arch->asm_only)
10568 error ("architecture %qs supported for assembler only", avr_mmcu);
10569
10570 default_file_start ();
10571
10572 /* Print I/O addresses of some SFRs used with IN and OUT. */
10573
10574 if (AVR_HAVE_SPH)
10575 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
10576
10577 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
10578 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
10579 if (AVR_HAVE_RAMPZ)
10580 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
10581 if (AVR_HAVE_RAMPY)
10582 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
10583 if (AVR_HAVE_RAMPX)
10584 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
10585 if (AVR_HAVE_RAMPD)
10586 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
10587 if (AVR_XMEGA || AVR_TINY)
10588 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
10589 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
10590 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
10591 }
10592
10593
10594 /* Implement `TARGET_ASM_FILE_END'. */
10595 /* Outputs to the stdio stream FILE some
10596 appropriate text to go at the end of an assembler file. */
10597
10598 static void
10599 avr_file_end (void)
10600 {
10601 /* Output these only if there is anything in the
10602 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
10603 input section(s) - some code size can be saved by not
10604 linking in the initialization code from libgcc if resp.
10605 sections are empty, see PR18145. */
10606
10607 if (avr_need_copy_data_p)
10608 fputs (".global __do_copy_data\n", asm_out_file);
10609
10610 if (avr_need_clear_bss_p)
10611 fputs (".global __do_clear_bss\n", asm_out_file);
10612 }
10613
10614
10615 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
10616 /* Choose the order in which to allocate hard registers for
10617 pseudo-registers local to a basic block.
10618
10619 Store the desired register order in the array `reg_alloc_order'.
10620 Element 0 should be the register to allocate first; element 1, the
10621 next register; and so on. */
10622
10623 void
10624 avr_adjust_reg_alloc_order (void)
10625 {
10626 static const int order_0[] =
10627 {
10628 24, 25,
10629 18, 19, 20, 21, 22, 23,
10630 30, 31,
10631 26, 27, 28, 29,
10632 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10633 0, 1,
10634 32, 33, 34, 35
10635 };
10636 static const int tiny_order_0[] = {
10637 20, 21,
10638 22, 23,
10639 24, 25,
10640 30, 31,
10641 26, 27,
10642 28, 29,
10643 19, 18,
10644 16, 17,
10645 32, 33, 34, 35,
10646 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10647 };
10648 static const int order_1[] =
10649 {
10650 18, 19, 20, 21, 22, 23, 24, 25,
10651 30, 31,
10652 26, 27, 28, 29,
10653 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10654 0, 1,
10655 32, 33, 34, 35
10656 };
10657 static const int tiny_order_1[] = {
10658 22, 23,
10659 24, 25,
10660 30, 31,
10661 26, 27,
10662 28, 29,
10663 21, 20, 19, 18,
10664 16, 17,
10665 32, 33, 34, 35,
10666 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10667 };
10668 static const int order_2[] =
10669 {
10670 25, 24, 23, 22, 21, 20, 19, 18,
10671 30, 31,
10672 26, 27, 28, 29,
10673 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10674 1, 0,
10675 32, 33, 34, 35
10676 };
10677
10678 /* Select specific register allocation order.
10679 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
10680 so different allocation order should be used. */
10681
10682 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
10683 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
10684 : (AVR_TINY ? tiny_order_0 : order_0));
10685
10686 for (size_t i = 0; i < ARRAY_SIZE (order_0); ++i)
10687 reg_alloc_order[i] = order[i];
10688 }
10689
10690
10691 /* Implement `TARGET_REGISTER_MOVE_COST' */
10692
10693 static int
10694 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
10695 reg_class_t from, reg_class_t to)
10696 {
10697 return (from == STACK_REG ? 6
10698 : to == STACK_REG ? 12
10699 : 2);
10700 }
10701
10702
10703 /* Implement `TARGET_MEMORY_MOVE_COST' */
10704
10705 static int
10706 avr_memory_move_cost (machine_mode mode,
10707 reg_class_t rclass ATTRIBUTE_UNUSED,
10708 bool in ATTRIBUTE_UNUSED)
10709 {
10710 return (mode == QImode ? 2
10711 : mode == HImode ? 4
10712 : mode == SImode ? 8
10713 : mode == SFmode ? 8
10714 : 16);
10715 }
10716
10717
10718 /* Cost for mul highpart. X is a LSHIFTRT, i.e. the outer TRUNCATE is
10719 already stripped off. */
10720
10721 static int
10722 avr_mul_highpart_cost (rtx x, int)
10723 {
10724 if (AVR_HAVE_MUL
10725 && LSHIFTRT == GET_CODE (x)
10726 && MULT == GET_CODE (XEXP (x, 0))
10727 && CONST_INT_P (XEXP (x, 1)))
10728 {
10729 // This is the wider mode.
10730 machine_mode mode = GET_MODE (x);
10731
10732 // The middle-end might still have PR81444, i.e. it is calling the cost
10733 // functions with strange modes. Fix this now by also considering
10734 // PSImode (should actually be SImode instead).
10735 if (HImode == mode || PSImode == mode || SImode == mode)
10736 {
10737 return COSTS_N_INSNS (2);
10738 }
10739 }
10740
10741 return 10000;
10742 }
10743
10744
10745 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
10746 cost of an RTX operand given its context. X is the rtx of the
10747 operand, MODE is its mode, and OUTER is the rtx_code of this
10748 operand's parent operator. */
10749
10750 static int
10751 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
10752 int opno, bool speed)
10753 {
10754 enum rtx_code code = GET_CODE (x);
10755 int total;
10756
10757 switch (code)
10758 {
10759 case REG:
10760 case SUBREG:
10761 return 0;
10762
10763 case CONST_INT:
10764 case CONST_FIXED:
10765 case CONST_DOUBLE:
10766 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
10767
10768 default:
10769 break;
10770 }
10771
10772 total = 0;
10773 avr_rtx_costs (x, mode, outer, opno, &total, speed);
10774 return total;
10775 }
10776
10777 /* Worker function for AVR backend's rtx_cost function.
10778 X is rtx expression whose cost is to be calculated.
10779 Return true if the complete cost has been computed.
10780 Return false if subexpressions should be scanned.
10781 In either case, *TOTAL contains the cost result. */
10782
10783 static bool
10784 avr_rtx_costs_1 (rtx x, machine_mode mode, int outer_code,
10785 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
10786 {
10787 enum rtx_code code = GET_CODE (x);
10788 HOST_WIDE_INT val;
10789
10790 switch (code)
10791 {
10792 case CONST_INT:
10793 case CONST_FIXED:
10794 case CONST_DOUBLE:
10795 case SYMBOL_REF:
10796 case CONST:
10797 case LABEL_REF:
10798 /* Immediate constants are as cheap as registers. */
10799 *total = 0;
10800 return true;
10801
10802 case MEM:
10803 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10804 return true;
10805
10806 case NEG:
10807 switch (mode)
10808 {
10809 case E_QImode:
10810 case E_SFmode:
10811 *total = COSTS_N_INSNS (1);
10812 break;
10813
10814 case E_HImode:
10815 case E_PSImode:
10816 case E_SImode:
10817 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
10818 break;
10819
10820 default:
10821 return false;
10822 }
10823 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10824 return true;
10825
10826 case ABS:
10827 switch (mode)
10828 {
10829 case E_QImode:
10830 case E_SFmode:
10831 *total = COSTS_N_INSNS (1);
10832 break;
10833
10834 default:
10835 return false;
10836 }
10837 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10838 return true;
10839
10840 case NOT:
10841 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10842 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10843 return true;
10844
10845 case ZERO_EXTEND:
10846 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10847 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10848 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10849 code, 0, speed);
10850 return true;
10851
10852 case SIGN_EXTEND:
10853 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10854 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10855 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10856 code, 0, speed);
10857 return true;
10858
10859 case PLUS:
10860 switch (mode)
10861 {
10862 case E_QImode:
10863 if (AVR_HAVE_MUL
10864 && MULT == GET_CODE (XEXP (x, 0))
10865 && register_operand (XEXP (x, 1), QImode))
10866 {
10867 /* multiply-add */
10868 *total = COSTS_N_INSNS (speed ? 4 : 3);
10869 /* multiply-add with constant: will be split and load constant. */
10870 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10871 *total = COSTS_N_INSNS (1) + *total;
10872 return true;
10873 }
10874 *total = COSTS_N_INSNS (1);
10875 if (!CONST_INT_P (XEXP (x, 1)))
10876 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10877 break;
10878
10879 case E_HImode:
10880 if (AVR_HAVE_MUL
10881 && (MULT == GET_CODE (XEXP (x, 0))
10882 || ASHIFT == GET_CODE (XEXP (x, 0)))
10883 && register_operand (XEXP (x, 1), HImode)
10884 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10885 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10886 {
10887 /* multiply-add */
10888 *total = COSTS_N_INSNS (speed ? 5 : 4);
10889 /* multiply-add with constant: will be split and load constant. */
10890 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10891 *total = COSTS_N_INSNS (1) + *total;
10892 return true;
10893 }
10894 if (!CONST_INT_P (XEXP (x, 1)))
10895 {
10896 *total = COSTS_N_INSNS (2);
10897 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10898 speed);
10899 }
10900 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10901 *total = COSTS_N_INSNS (1);
10902 else
10903 *total = COSTS_N_INSNS (2);
10904 break;
10905
10906 case E_PSImode:
10907 if (!CONST_INT_P (XEXP (x, 1)))
10908 {
10909 *total = COSTS_N_INSNS (3);
10910 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10911 speed);
10912 }
10913 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10914 *total = COSTS_N_INSNS (2);
10915 else
10916 *total = COSTS_N_INSNS (3);
10917 break;
10918
10919 case E_SImode:
10920 if (!CONST_INT_P (XEXP (x, 1)))
10921 {
10922 *total = COSTS_N_INSNS (4);
10923 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10924 speed);
10925 }
10926 else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10927 *total = COSTS_N_INSNS (1);
10928 else
10929 *total = COSTS_N_INSNS (4);
10930 break;
10931
10932 default:
10933 return false;
10934 }
10935 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10936 return true;
10937
10938 case MINUS:
10939 if (AVR_HAVE_MUL
10940 && QImode == mode
10941 && register_operand (XEXP (x, 0), QImode)
10942 && MULT == GET_CODE (XEXP (x, 1)))
10943 {
10944 /* multiply-sub */
10945 *total = COSTS_N_INSNS (speed ? 4 : 3);
10946 /* multiply-sub with constant: will be split and load constant. */
10947 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10948 *total = COSTS_N_INSNS (1) + *total;
10949 return true;
10950 }
10951 if (AVR_HAVE_MUL
10952 && HImode == mode
10953 && register_operand (XEXP (x, 0), HImode)
10954 && (MULT == GET_CODE (XEXP (x, 1))
10955 || ASHIFT == GET_CODE (XEXP (x, 1)))
10956 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10957 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10958 {
10959 /* multiply-sub */
10960 *total = COSTS_N_INSNS (speed ? 5 : 4);
10961 /* multiply-sub with constant: will be split and load constant. */
10962 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10963 *total = COSTS_N_INSNS (1) + *total;
10964 return true;
10965 }
10966 /* FALLTHRU */
10967 case AND:
10968 case IOR:
10969 if (IOR == code
10970 && HImode == mode
10971 && ASHIFT == GET_CODE (XEXP (x, 0)))
10972 {
10973 *total = COSTS_N_INSNS (2);
10974 // Just a rough estimate. If we see no sign- or zero-extend,
10975 // then increase the cost a little bit.
10976 if (REG_P (XEXP (XEXP (x, 0), 0)))
10977 *total += COSTS_N_INSNS (1);
10978 if (REG_P (XEXP (x, 1)))
10979 *total += COSTS_N_INSNS (1);
10980 return true;
10981 }
10982 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10983 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10984 if (!CONST_INT_P (XEXP (x, 1)))
10985 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10986 return true;
10987
10988 case XOR:
10989 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10991 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10992 return true;
10993
10994 case MULT:
10995 switch (mode)
10996 {
10997 case E_QImode:
10998 if (AVR_HAVE_MUL)
10999 *total = COSTS_N_INSNS (!speed ? 3 : 4);
11000 else if (!speed)
11001 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11002 else
11003 return false;
11004 break;
11005
11006 case E_HImode:
11007 if (AVR_HAVE_MUL)
11008 {
11009 rtx op0 = XEXP (x, 0);
11010 rtx op1 = XEXP (x, 1);
11011 enum rtx_code code0 = GET_CODE (op0);
11012 enum rtx_code code1 = GET_CODE (op1);
11013 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
11014 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
11015
11016 if (ex0
11017 && (u8_operand (op1, HImode)
11018 || s8_operand (op1, HImode)))
11019 {
11020 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11021 return true;
11022 }
11023 if (ex0
11024 && register_operand (op1, HImode))
11025 {
11026 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11027 return true;
11028 }
11029 else if (ex0 || ex1)
11030 {
11031 *total = COSTS_N_INSNS (!speed ? 3 : 5);
11032 return true;
11033 }
11034 else if (register_operand (op0, HImode)
11035 && (u8_operand (op1, HImode)
11036 || s8_operand (op1, HImode)))
11037 {
11038 *total = COSTS_N_INSNS (!speed ? 6 : 9);
11039 return true;
11040 }
11041 else
11042 *total = COSTS_N_INSNS (!speed ? 7 : 10);
11043 }
11044 else if (!speed)
11045 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11046 else
11047 return false;
11048 break;
11049
11050 case E_PSImode:
11051 if (!speed)
11052 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11053 else
11054 *total = 10;
11055 break;
11056
11057 case E_SImode:
11058 case E_DImode:
11059 if (AVR_HAVE_MUL)
11060 {
11061 if (!speed)
11062 {
11063 /* Add some additional costs besides CALL like moves etc. */
11064
11065 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11066 }
11067 else
11068 {
11069 /* Just a rough estimate. Even with -O2 we don't want bulky
11070 code expanded inline. */
11071
11072 *total = COSTS_N_INSNS (25);
11073 }
11074 }
11075 else
11076 {
11077 if (speed)
11078 *total = COSTS_N_INSNS (300);
11079 else
11080 /* Add some additional costs besides CALL like moves etc. */
11081 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
11082 }
11083
11084 if (mode == DImode)
11085 *total *= 2;
11086
11087 return true;
11088
11089 default:
11090 return false;
11091 }
11092 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11093 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
11094 return true;
11095
11096 case DIV:
11097 case MOD:
11098 case UDIV:
11099 case UMOD:
11100 if (!speed)
11101 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
11102 else
11103 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
11104 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11105 /* For div/mod with const-int divisor we have at least the cost of
11106 loading the divisor. */
11107 if (CONST_INT_P (XEXP (x, 1)))
11108 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
11109 /* Add some overall penaly for clobbering and moving around registers */
11110 *total += COSTS_N_INSNS (2);
11111 return true;
11112
11113 case ROTATE:
11114 switch (mode)
11115 {
11116 case E_QImode:
11117 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
11118 *total = COSTS_N_INSNS (1);
11119
11120 break;
11121
11122 case E_HImode:
11123 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
11124 *total = COSTS_N_INSNS (3);
11125
11126 break;
11127
11128 case E_SImode:
11129 if (CONST_INT_P (XEXP (x, 1)))
11130 switch (INTVAL (XEXP (x, 1)))
11131 {
11132 case 8:
11133 case 24:
11134 *total = COSTS_N_INSNS (5);
11135 break;
11136 case 16:
11137 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
11138 break;
11139 }
11140 break;
11141
11142 default:
11143 return false;
11144 }
11145 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11146 return true;
11147
11148 case ASHIFT:
11149 switch (mode)
11150 {
11151 case E_QImode:
11152 if (!CONST_INT_P (XEXP (x, 1)))
11153 {
11154 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11155 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11156 speed);
11157 }
11158 else
11159 {
11160 val = INTVAL (XEXP (x, 1));
11161 if (val == 7)
11162 *total = COSTS_N_INSNS (3);
11163 else if (val >= 0 && val <= 7)
11164 *total = COSTS_N_INSNS (val);
11165 else
11166 *total = COSTS_N_INSNS (1);
11167 }
11168 break;
11169
11170 case E_HImode:
11171 if (AVR_HAVE_MUL)
11172 {
11173 if (const_2_to_7_operand (XEXP (x, 1), HImode)
11174 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
11175 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
11176 {
11177 *total = COSTS_N_INSNS (!speed ? 4 : 6);
11178 return true;
11179 }
11180 }
11181
11182 if (const1_rtx == (XEXP (x, 1))
11183 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
11184 {
11185 *total = COSTS_N_INSNS (2);
11186 return true;
11187 }
11188
11189 if (!CONST_INT_P (XEXP (x, 1)))
11190 {
11191 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11192 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11193 speed);
11194 }
11195 else
11196 switch (INTVAL (XEXP (x, 1)))
11197 {
11198 case 0:
11199 *total = 0;
11200 break;
11201 case 1:
11202 case 8:
11203 *total = COSTS_N_INSNS (2);
11204 break;
11205 case 9:
11206 *total = COSTS_N_INSNS (3);
11207 break;
11208 case 2:
11209 case 3:
11210 case 10:
11211 case 15:
11212 *total = COSTS_N_INSNS (4);
11213 break;
11214 case 7:
11215 case 11:
11216 case 12:
11217 *total = COSTS_N_INSNS (5);
11218 break;
11219 case 4:
11220 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11221 break;
11222 case 6:
11223 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11224 break;
11225 case 5:
11226 *total = COSTS_N_INSNS (!speed ? 5 : 10);
11227 break;
11228 default:
11229 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11230 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11231 speed);
11232 }
11233 break;
11234
11235 case E_PSImode:
11236 if (!CONST_INT_P (XEXP (x, 1)))
11237 {
11238 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11239 }
11240 else
11241 switch (INTVAL (XEXP (x, 1)))
11242 {
11243 case 0:
11244 *total = 0;
11245 break;
11246 case 1:
11247 case 8:
11248 case 16:
11249 *total = COSTS_N_INSNS (3);
11250 break;
11251 case 23:
11252 *total = COSTS_N_INSNS (5);
11253 break;
11254 default:
11255 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11256 break;
11257 }
11258 break;
11259
11260 case E_SImode:
11261 if (!CONST_INT_P (XEXP (x, 1)))
11262 {
11263 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11264 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11265 speed);
11266 }
11267 else
11268 switch (INTVAL (XEXP (x, 1)))
11269 {
11270 case 0:
11271 *total = 0;
11272 break;
11273 case 24:
11274 *total = COSTS_N_INSNS (3);
11275 break;
11276 case 1:
11277 case 8:
11278 case 16:
11279 *total = COSTS_N_INSNS (4);
11280 break;
11281 case 31:
11282 *total = COSTS_N_INSNS (6);
11283 break;
11284 case 2:
11285 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11286 break;
11287 default:
11288 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11290 speed);
11291 }
11292 break;
11293
11294 default:
11295 return false;
11296 }
11297 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11298 return true;
11299
11300 case ASHIFTRT:
11301 switch (mode)
11302 {
11303 case E_QImode:
11304 if (!CONST_INT_P (XEXP (x, 1)))
11305 {
11306 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11308 speed);
11309 }
11310 else
11311 {
11312 val = INTVAL (XEXP (x, 1));
11313 if (val == 6)
11314 *total = COSTS_N_INSNS (4);
11315 else if (val == 7)
11316 *total = COSTS_N_INSNS (2);
11317 else if (val >= 0 && val <= 7)
11318 *total = COSTS_N_INSNS (val);
11319 else
11320 *total = COSTS_N_INSNS (1);
11321 }
11322 break;
11323
11324 case E_HImode:
11325 if (!CONST_INT_P (XEXP (x, 1)))
11326 {
11327 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11328 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11329 speed);
11330 }
11331 else
11332 switch (INTVAL (XEXP (x, 1)))
11333 {
11334 case 0:
11335 *total = 0;
11336 break;
11337 case 1:
11338 *total = COSTS_N_INSNS (2);
11339 break;
11340 case 15:
11341 *total = COSTS_N_INSNS (3);
11342 break;
11343 case 2:
11344 case 7:
11345 case 8:
11346 case 9:
11347 *total = COSTS_N_INSNS (4);
11348 break;
11349 case 10:
11350 case 14:
11351 *total = COSTS_N_INSNS (5);
11352 break;
11353 case 11:
11354 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11355 break;
11356 case 12:
11357 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11358 break;
11359 case 6:
11360 case 13:
11361 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11362 break;
11363 default:
11364 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11365 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11366 speed);
11367 }
11368 break;
11369
11370 case E_PSImode:
11371 if (!CONST_INT_P (XEXP (x, 1)))
11372 {
11373 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11374 }
11375 else
11376 switch (INTVAL (XEXP (x, 1)))
11377 {
11378 case 0:
11379 *total = 0;
11380 break;
11381 case 1:
11382 *total = COSTS_N_INSNS (3);
11383 break;
11384 case 16:
11385 case 8:
11386 *total = COSTS_N_INSNS (5);
11387 break;
11388 case 23:
11389 *total = COSTS_N_INSNS (4);
11390 break;
11391 default:
11392 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11393 break;
11394 }
11395 break;
11396
11397 case E_SImode:
11398 if (!CONST_INT_P (XEXP (x, 1)))
11399 {
11400 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11401 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11402 speed);
11403 }
11404 else
11405 switch (INTVAL (XEXP (x, 1)))
11406 {
11407 case 0:
11408 *total = 0;
11409 break;
11410 case 1:
11411 *total = COSTS_N_INSNS (4);
11412 break;
11413 case 8:
11414 case 16:
11415 case 24:
11416 *total = COSTS_N_INSNS (6);
11417 break;
11418 case 2:
11419 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11420 break;
11421 case 31:
11422 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
11423 break;
11424 default:
11425 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11426 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11427 speed);
11428 }
11429 break;
11430
11431 default:
11432 return false;
11433 }
11434 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11435 return true;
11436
11437 case LSHIFTRT:
11438 if (outer_code == TRUNCATE)
11439 {
11440 *total = avr_mul_highpart_cost (x, speed);
11441 return true;
11442 }
11443
11444 switch (mode)
11445 {
11446 case E_QImode:
11447 if (!CONST_INT_P (XEXP (x, 1)))
11448 {
11449 *total = COSTS_N_INSNS (!speed ? 4 : 17);
11450 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11451 speed);
11452 }
11453 else
11454 {
11455 val = INTVAL (XEXP (x, 1));
11456 if (val == 7)
11457 *total = COSTS_N_INSNS (3);
11458 else if (val >= 0 && val <= 7)
11459 *total = COSTS_N_INSNS (val);
11460 else
11461 *total = COSTS_N_INSNS (1);
11462 }
11463 break;
11464
11465 case E_HImode:
11466 if (!CONST_INT_P (XEXP (x, 1)))
11467 {
11468 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11469 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11470 speed);
11471 }
11472 else
11473 switch (INTVAL (XEXP (x, 1)))
11474 {
11475 case 0:
11476 *total = 0;
11477 break;
11478 case 1:
11479 case 8:
11480 *total = COSTS_N_INSNS (2);
11481 break;
11482 case 9:
11483 *total = COSTS_N_INSNS (3);
11484 break;
11485 case 2:
11486 case 10:
11487 case 15:
11488 *total = COSTS_N_INSNS (4);
11489 break;
11490 case 7:
11491 case 11:
11492 *total = COSTS_N_INSNS (5);
11493 break;
11494 case 3:
11495 case 12:
11496 case 13:
11497 case 14:
11498 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11499 break;
11500 case 4:
11501 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11502 break;
11503 case 5:
11504 case 6:
11505 *total = COSTS_N_INSNS (!speed ? 5 : 9);
11506 break;
11507 default:
11508 *total = COSTS_N_INSNS (!speed ? 5 : 41);
11509 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11510 speed);
11511 }
11512 break;
11513
11514 case E_PSImode:
11515 if (!CONST_INT_P (XEXP (x, 1)))
11516 {
11517 *total = COSTS_N_INSNS (!speed ? 6 : 73);
11518 }
11519 else
11520 switch (INTVAL (XEXP (x, 1)))
11521 {
11522 case 0:
11523 *total = 0;
11524 break;
11525 case 1:
11526 case 8:
11527 case 16:
11528 *total = COSTS_N_INSNS (3);
11529 break;
11530 case 23:
11531 *total = COSTS_N_INSNS (5);
11532 break;
11533 default:
11534 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11535 break;
11536 }
11537 break;
11538
11539 case E_SImode:
11540 if (!CONST_INT_P (XEXP (x, 1)))
11541 {
11542 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11543 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11544 speed);
11545 }
11546 else
11547 switch (INTVAL (XEXP (x, 1)))
11548 {
11549 case 0:
11550 *total = 0;
11551 break;
11552 case 1:
11553 *total = COSTS_N_INSNS (4);
11554 break;
11555 case 2:
11556 *total = COSTS_N_INSNS (!speed ? 7 : 8);
11557 break;
11558 case 8:
11559 case 16:
11560 case 24:
11561 *total = COSTS_N_INSNS (4);
11562 break;
11563 case 31:
11564 *total = COSTS_N_INSNS (6);
11565 break;
11566 default:
11567 *total = COSTS_N_INSNS (!speed ? 7 : 113);
11568 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11569 speed);
11570 }
11571 break;
11572
11573 default:
11574 return false;
11575 }
11576 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11577 return true;
11578
11579 case COMPARE:
11580 switch (GET_MODE (XEXP (x, 0)))
11581 {
11582 case E_QImode:
11583 *total = COSTS_N_INSNS (1);
11584 if (!CONST_INT_P (XEXP (x, 1)))
11585 *total += avr_operand_rtx_cost (XEXP (x, 1), QImode, code,
11586 1, speed);
11587 break;
11588
11589 case E_HImode:
11590 *total = COSTS_N_INSNS (2);
11591 if (!CONST_INT_P (XEXP (x, 1)))
11592 *total += avr_operand_rtx_cost (XEXP (x, 1), HImode, code,
11593 1, speed);
11594 else if (INTVAL (XEXP (x, 1)) != 0)
11595 *total += COSTS_N_INSNS (1);
11596 break;
11597
11598 case E_PSImode:
11599 *total = COSTS_N_INSNS (3);
11600 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
11601 *total += COSTS_N_INSNS (2);
11602 break;
11603
11604 case E_SImode:
11605 *total = COSTS_N_INSNS (4);
11606 if (!CONST_INT_P (XEXP (x, 1)))
11607 *total += avr_operand_rtx_cost (XEXP (x, 1), SImode, code,
11608 1, speed);
11609 else if (INTVAL (XEXP (x, 1)) != 0)
11610 *total += COSTS_N_INSNS (3);
11611 break;
11612
11613 default:
11614 return false;
11615 }
11616 *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
11617 code, 0, speed);
11618 return true;
11619
11620 case TRUNCATE:
11621 if (LSHIFTRT == GET_CODE (XEXP (x, 0)))
11622 {
11623 *total = avr_mul_highpart_cost (XEXP (x, 0), speed);
11624 return true;
11625 }
11626 break;
11627
11628 default:
11629 break;
11630 }
11631 return false;
11632 }
11633
11634
11635 /* Implement `TARGET_RTX_COSTS'. */
11636
11637 static bool
11638 avr_rtx_costs (rtx x, machine_mode mode, int outer_code,
11639 int opno, int *total, bool speed)
11640 {
11641 bool done = avr_rtx_costs_1 (x, mode, outer_code, opno, total, speed);
11642
11643 if (avr_log.rtx_costs)
11644 {
11645 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
11646 done, speed ? "speed" : "size", *total, outer_code, x);
11647 }
11648
11649 return done;
11650 }
11651
11652
11653 /* Implement `TARGET_ADDRESS_COST'. */
11654
11655 static int
11656 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
11657 addr_space_t as ATTRIBUTE_UNUSED,
11658 bool speed ATTRIBUTE_UNUSED)
11659 {
11660 int cost = 4;
11661
11662 if (GET_CODE (x) == PLUS
11663 && CONST_INT_P (XEXP (x, 1))
11664 && (REG_P (XEXP (x, 0))
11665 || SUBREG_P (XEXP (x, 0))))
11666 {
11667 if (INTVAL (XEXP (x, 1)) > MAX_LD_OFFSET(mode))
11668 cost = 18;
11669 }
11670 else if (CONSTANT_ADDRESS_P (x))
11671 {
11672 if (io_address_operand (x, QImode))
11673 cost = 2;
11674
11675 if (AVR_TINY
11676 && avr_address_tiny_absdata_p (x, QImode))
11677 cost = 2;
11678 }
11679
11680 if (avr_log.address_cost)
11681 avr_edump ("\n%?: %d = %r\n", cost, x);
11682
11683 return cost;
11684 }
11685
11686 /* Test for extra memory constraint 'Q'.
11687 It's a memory address based on Y or Z pointer with valid displacement. */
11688
11689 int
11690 extra_constraint_Q (rtx x)
11691 {
11692 int ok = 0;
11693 rtx plus = XEXP (x, 0);
11694
11695 if (GET_CODE (plus) == PLUS
11696 && REG_P (XEXP (plus, 0))
11697 && CONST_INT_P (XEXP (plus, 1))
11698 && (INTVAL (XEXP (plus, 1))
11699 <= MAX_LD_OFFSET (GET_MODE (x))))
11700 {
11701 rtx xx = XEXP (plus, 0);
11702 int regno = REGNO (xx);
11703
11704 ok = (/* allocate pseudos */
11705 regno >= FIRST_PSEUDO_REGISTER
11706 /* strictly check */
11707 || regno == REG_Z || regno == REG_Y
11708 /* XXX frame & arg pointer checks */
11709 || xx == frame_pointer_rtx
11710 || xx == arg_pointer_rtx);
11711
11712 if (avr_log.constraints)
11713 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
11714 ok, reload_completed, reload_in_progress, x);
11715 }
11716
11717 return ok;
11718 }
11719
11720 /* Convert condition code CONDITION to the valid AVR condition code. */
11721
11722 RTX_CODE
11723 avr_normalize_condition (RTX_CODE condition)
11724 {
11725 switch (condition)
11726 {
11727 case GT:
11728 return GE;
11729 case GTU:
11730 return GEU;
11731 case LE:
11732 return LT;
11733 case LEU:
11734 return LTU;
11735 default:
11736 gcc_unreachable ();
11737 }
11738 }
11739
11740 /* Helper function for `avr_reorg'. */
11741
11742 static rtx
11743 avr_compare_pattern (rtx_insn *insn)
11744 {
11745 rtx pattern = single_set (insn);
11746
11747 if (pattern
11748 && NONJUMP_INSN_P (insn)
11749 && SET_DEST (pattern) == cc0_rtx
11750 && GET_CODE (SET_SRC (pattern)) == COMPARE)
11751 {
11752 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
11753 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
11754
11755 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
11756 They must not be swapped, thus skip them. */
11757
11758 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
11759 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
11760 return pattern;
11761 }
11762
11763 return NULL_RTX;
11764 }
11765
11766 /* Helper function for `avr_reorg'. */
11767
11768 /* Expansion of switch/case decision trees leads to code like
11769
11770 cc0 = compare (Reg, Num)
11771 if (cc0 == 0)
11772 goto L1
11773
11774 cc0 = compare (Reg, Num)
11775 if (cc0 > 0)
11776 goto L2
11777
11778 The second comparison is superfluous and can be deleted.
11779 The second jump condition can be transformed from a
11780 "difficult" one to a "simple" one because "cc0 > 0" and
11781 "cc0 >= 0" will have the same effect here.
11782
11783 This function relies on the way switch/case is being expaned
11784 as binary decision tree. For example code see PR 49903.
11785
11786 Return TRUE if optimization performed.
11787 Return FALSE if nothing changed.
11788
11789 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11790
11791 We don't want to do this in text peephole because it is
11792 tedious to work out jump offsets there and the second comparison
11793 might have been transormed by `avr_reorg'.
11794
11795 RTL peephole won't do because peephole2 does not scan across
11796 basic blocks. */
11797
11798 static bool
11799 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
11800 {
11801 rtx comp1, ifelse1, xcond1;
11802 rtx_insn *branch1;
11803 rtx comp2, ifelse2, xcond2;
11804 rtx_insn *branch2, *insn2;
11805 enum rtx_code code;
11806 rtx_insn *jump;
11807 rtx target, cond;
11808
11809 /* Look out for: compare1 - branch1 - compare2 - branch2 */
11810
11811 branch1 = next_nonnote_nondebug_insn (insn1);
11812 if (!branch1 || !JUMP_P (branch1))
11813 return false;
11814
11815 insn2 = next_nonnote_nondebug_insn (branch1);
11816 if (!insn2 || !avr_compare_pattern (insn2))
11817 return false;
11818
11819 branch2 = next_nonnote_nondebug_insn (insn2);
11820 if (!branch2 || !JUMP_P (branch2))
11821 return false;
11822
11823 comp1 = avr_compare_pattern (insn1);
11824 comp2 = avr_compare_pattern (insn2);
11825 xcond1 = single_set (branch1);
11826 xcond2 = single_set (branch2);
11827
11828 if (!comp1 || !comp2
11829 || !rtx_equal_p (comp1, comp2)
11830 || !xcond1 || SET_DEST (xcond1) != pc_rtx
11831 || !xcond2 || SET_DEST (xcond2) != pc_rtx
11832 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
11833 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
11834 {
11835 return false;
11836 }
11837
11838 comp1 = SET_SRC (comp1);
11839 ifelse1 = SET_SRC (xcond1);
11840 ifelse2 = SET_SRC (xcond2);
11841
11842 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
11843
11844 if (EQ != GET_CODE (XEXP (ifelse1, 0))
11845 || !REG_P (XEXP (comp1, 0))
11846 || !CONST_INT_P (XEXP (comp1, 1))
11847 || XEXP (ifelse1, 2) != pc_rtx
11848 || XEXP (ifelse2, 2) != pc_rtx
11849 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
11850 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
11851 || !COMPARISON_P (XEXP (ifelse2, 0))
11852 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
11853 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
11854 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
11855 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
11856 {
11857 return false;
11858 }
11859
11860 /* We filtered the insn sequence to look like
11861
11862 (set (cc0)
11863 (compare (reg:M N)
11864 (const_int VAL)))
11865 (set (pc)
11866 (if_then_else (eq (cc0)
11867 (const_int 0))
11868 (label_ref L1)
11869 (pc)))
11870
11871 (set (cc0)
11872 (compare (reg:M N)
11873 (const_int VAL)))
11874 (set (pc)
11875 (if_then_else (CODE (cc0)
11876 (const_int 0))
11877 (label_ref L2)
11878 (pc)))
11879 */
11880
11881 code = GET_CODE (XEXP (ifelse2, 0));
11882
11883 /* Map GT/GTU to GE/GEU which is easier for AVR.
11884 The first two instructions compare/branch on EQ
11885 so we may replace the difficult
11886
11887 if (x == VAL) goto L1;
11888 if (x > VAL) goto L2;
11889
11890 with easy
11891
11892 if (x == VAL) goto L1;
11893 if (x >= VAL) goto L2;
11894
11895 Similarly, replace LE/LEU by LT/LTU. */
11896
11897 switch (code)
11898 {
11899 case EQ:
11900 case LT: case LTU:
11901 case GE: case GEU:
11902 break;
11903
11904 case LE: case LEU:
11905 case GT: case GTU:
11906 code = avr_normalize_condition (code);
11907 break;
11908
11909 default:
11910 return false;
11911 }
11912
11913 /* Wrap the branches into UNSPECs so they won't be changed or
11914 optimized in the remainder. */
11915
11916 target = XEXP (XEXP (ifelse1, 1), 0);
11917 cond = XEXP (ifelse1, 0);
11918 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11919
11920 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11921
11922 target = XEXP (XEXP (ifelse2, 1), 0);
11923 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11924 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11925
11926 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11927
11928 /* The comparisons in insn1 and insn2 are exactly the same;
11929 insn2 is superfluous so delete it. */
11930
11931 delete_insn (insn2);
11932 delete_insn (branch1);
11933 delete_insn (branch2);
11934
11935 return true;
11936 }
11937
11938
11939 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11940 /* Optimize conditional jumps. */
11941
11942 static void
11943 avr_reorg (void)
11944 {
11945 rtx_insn *insn = get_insns();
11946
11947 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11948 {
11949 rtx pattern = avr_compare_pattern (insn);
11950
11951 if (!pattern)
11952 continue;
11953
11954 if (optimize
11955 && avr_reorg_remove_redundant_compare (insn))
11956 {
11957 continue;
11958 }
11959
11960 if (compare_diff_p (insn))
11961 {
11962 /* Now we work under compare insn with difficult branch. */
11963
11964 rtx_insn *next = next_real_insn (insn);
11965 rtx pat = PATTERN (next);
11966
11967 pattern = SET_SRC (pattern);
11968
11969 if (true_regnum (XEXP (pattern, 0)) >= 0
11970 && true_regnum (XEXP (pattern, 1)) >= 0)
11971 {
11972 rtx x = XEXP (pattern, 0);
11973 rtx src = SET_SRC (pat);
11974 rtx t = XEXP (src, 0);
11975 PUT_CODE (t, swap_condition (GET_CODE (t)));
11976 XEXP (pattern, 0) = XEXP (pattern, 1);
11977 XEXP (pattern, 1) = x;
11978 INSN_CODE (next) = -1;
11979 }
11980 else if (true_regnum (XEXP (pattern, 0)) >= 0
11981 && XEXP (pattern, 1) == const0_rtx)
11982 {
11983 /* This is a tst insn, we can reverse it. */
11984 rtx src = SET_SRC (pat);
11985 rtx t = XEXP (src, 0);
11986
11987 PUT_CODE (t, swap_condition (GET_CODE (t)));
11988 XEXP (pattern, 1) = XEXP (pattern, 0);
11989 XEXP (pattern, 0) = const0_rtx;
11990 INSN_CODE (next) = -1;
11991 INSN_CODE (insn) = -1;
11992 }
11993 else if (true_regnum (XEXP (pattern, 0)) >= 0
11994 && CONST_INT_P (XEXP (pattern, 1)))
11995 {
11996 rtx x = XEXP (pattern, 1);
11997 rtx src = SET_SRC (pat);
11998 rtx t = XEXP (src, 0);
11999 machine_mode mode = GET_MODE (XEXP (pattern, 0));
12000
12001 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
12002 {
12003 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
12004 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
12005 INSN_CODE (next) = -1;
12006 INSN_CODE (insn) = -1;
12007 }
12008 }
12009 }
12010 }
12011 }
12012
12013 /* Returns register number for function return value.*/
12014
12015 static inline unsigned int
12016 avr_ret_register (void)
12017 {
12018 return 24;
12019 }
12020
12021
12022 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
12023
12024 static bool
12025 avr_function_value_regno_p (const unsigned int regno)
12026 {
12027 return (regno == avr_ret_register ());
12028 }
12029
12030
12031 /* Implement `TARGET_LIBCALL_VALUE'. */
12032 /* Create an RTX representing the place where a
12033 library function returns a value of mode MODE. */
12034
12035 static rtx
12036 avr_libcall_value (machine_mode mode,
12037 const_rtx func ATTRIBUTE_UNUSED)
12038 {
12039 int offs = GET_MODE_SIZE (mode);
12040
12041 if (offs <= 4)
12042 offs = (offs + 1) & ~1;
12043
12044 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
12045 }
12046
12047
12048 /* Implement `TARGET_FUNCTION_VALUE'. */
12049 /* Create an RTX representing the place where a
12050 function returns a value of data type VALTYPE. */
12051
12052 static rtx
12053 avr_function_value (const_tree type,
12054 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
12055 bool outgoing ATTRIBUTE_UNUSED)
12056 {
12057 unsigned int offs;
12058
12059 if (TYPE_MODE (type) != BLKmode)
12060 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
12061
12062 offs = int_size_in_bytes (type);
12063 if (offs < 2)
12064 offs = 2;
12065 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
12066 offs = GET_MODE_SIZE (SImode);
12067 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
12068 offs = GET_MODE_SIZE (DImode);
12069
12070 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
12071 }
12072
12073 int
12074 test_hard_reg_class (enum reg_class rclass, rtx x)
12075 {
12076 int regno = true_regnum (x);
12077 if (regno < 0)
12078 return 0;
12079
12080 if (TEST_HARD_REG_CLASS (rclass, regno))
12081 return 1;
12082
12083 return 0;
12084 }
12085
12086
12087 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
12088 and thus is suitable to be skipped by CPSE, SBRC, etc. */
12089
12090 static bool
12091 avr_2word_insn_p (rtx_insn *insn)
12092 {
12093 if (TARGET_SKIP_BUG
12094 || !insn
12095 || 2 != get_attr_length (insn))
12096 {
12097 return false;
12098 }
12099
12100 switch (INSN_CODE (insn))
12101 {
12102 default:
12103 return false;
12104
12105 case CODE_FOR_movqi_insn:
12106 case CODE_FOR_movuqq_insn:
12107 case CODE_FOR_movqq_insn:
12108 {
12109 rtx set = single_set (insn);
12110 rtx src = SET_SRC (set);
12111 rtx dest = SET_DEST (set);
12112
12113 /* Factor out LDS and STS from movqi_insn. */
12114
12115 if (MEM_P (dest)
12116 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
12117 {
12118 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
12119 }
12120 else if (REG_P (dest)
12121 && MEM_P (src))
12122 {
12123 return CONSTANT_ADDRESS_P (XEXP (src, 0));
12124 }
12125
12126 return false;
12127 }
12128
12129 case CODE_FOR_call_insn:
12130 case CODE_FOR_call_value_insn:
12131 return true;
12132 }
12133 }
12134
12135
12136 int
12137 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
12138 {
12139 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
12140 ? XEXP (dest, 0)
12141 : dest);
12142 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
12143 int dest_addr = INSN_ADDRESSES (uid);
12144 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
12145
12146 return (jump_offset == 1
12147 || (jump_offset == 2
12148 && avr_2word_insn_p (next_active_insn (insn))));
12149 }
12150
12151
12152 /* Implement TARGET_HARD_REGNO_MODE_OK. On the enhanced core, anything
12153 larger than 1 byte must start in even numbered register for "movw" to
12154 work (this way we don't have to check for odd registers everywhere). */
12155
12156 static bool
12157 avr_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
12158 {
12159 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
12160 Disallowing QI et al. in these regs might lead to code like
12161 (set (subreg:QI (reg:HI 28) n) ...)
12162 which will result in wrong code because reload does not
12163 handle SUBREGs of hard regsisters like this.
12164 This could be fixed in reload. However, it appears
12165 that fixing reload is not wanted by reload people. */
12166
12167 /* Any GENERAL_REGS register can hold 8-bit values. */
12168
12169 if (GET_MODE_SIZE (mode) == 1)
12170 return true;
12171
12172 /* FIXME: Ideally, the following test is not needed.
12173 However, it turned out that it can reduce the number
12174 of spill fails. AVR and it's poor endowment with
12175 address registers is extreme stress test for reload. */
12176
12177 if (GET_MODE_SIZE (mode) >= 4
12178 && regno >= REG_X)
12179 return false;
12180
12181 /* All modes larger than 8 bits should start in an even register. */
12182
12183 return !(regno & 1);
12184 }
12185
12186
12187 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED. */
12188
12189 static bool
12190 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
12191 {
12192 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
12193 represent valid hard registers like, e.g. HI:29. Returning TRUE
12194 for such registers can lead to performance degradation as mentioned
12195 in PR53595. Thus, report invalid hard registers as FALSE. */
12196
12197 if (!avr_hard_regno_mode_ok (regno, mode))
12198 return 0;
12199
12200 /* Return true if any of the following boundaries is crossed:
12201 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
12202
12203 return ((regno <= LAST_CALLEE_SAVED_REG
12204 && regno + GET_MODE_SIZE (mode) > 1 + LAST_CALLEE_SAVED_REG)
12205 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
12206 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
12207 }
12208
12209
12210 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
12211
12212 enum reg_class
12213 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
12214 addr_space_t as, RTX_CODE outer_code,
12215 RTX_CODE index_code ATTRIBUTE_UNUSED)
12216 {
12217 if (!ADDR_SPACE_GENERIC_P (as))
12218 {
12219 return POINTER_Z_REGS;
12220 }
12221
12222 if (!avr_strict_X)
12223 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
12224
12225 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
12226 }
12227
12228
12229 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
12230
12231 bool
12232 avr_regno_mode_code_ok_for_base_p (int regno,
12233 machine_mode mode ATTRIBUTE_UNUSED,
12234 addr_space_t as ATTRIBUTE_UNUSED,
12235 RTX_CODE outer_code,
12236 RTX_CODE index_code ATTRIBUTE_UNUSED)
12237 {
12238 bool ok = false;
12239
12240 if (!ADDR_SPACE_GENERIC_P (as))
12241 {
12242 if (regno < FIRST_PSEUDO_REGISTER
12243 && regno == REG_Z)
12244 {
12245 return true;
12246 }
12247
12248 if (reg_renumber)
12249 {
12250 regno = reg_renumber[regno];
12251
12252 if (regno == REG_Z)
12253 {
12254 return true;
12255 }
12256 }
12257
12258 return false;
12259 }
12260
12261 if (regno < FIRST_PSEUDO_REGISTER
12262 && (regno == REG_X
12263 || regno == REG_Y
12264 || regno == REG_Z
12265 || regno == ARG_POINTER_REGNUM))
12266 {
12267 ok = true;
12268 }
12269 else if (reg_renumber)
12270 {
12271 regno = reg_renumber[regno];
12272
12273 if (regno == REG_X
12274 || regno == REG_Y
12275 || regno == REG_Z
12276 || regno == ARG_POINTER_REGNUM)
12277 {
12278 ok = true;
12279 }
12280 }
12281
12282 if (avr_strict_X
12283 && PLUS == outer_code
12284 && regno == REG_X)
12285 {
12286 ok = false;
12287 }
12288
12289 return ok;
12290 }
12291
12292
12293 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
12294 /* Set 32-bit register OP[0] to compile-time constant OP[1].
12295 CLOBBER_REG is a QI clobber register or NULL_RTX.
12296 LEN == NULL: output instructions.
12297 LEN != NULL: set *LEN to the length of the instruction sequence
12298 (in words) printed with LEN = NULL.
12299 If CLEAR_P is true, OP[0] had been cleard to Zero already.
12300 If CLEAR_P is false, nothing is known about OP[0].
12301
12302 The effect on cc0 is as follows:
12303
12304 Load 0 to any register except ZERO_REG : NONE
12305 Load ld register with any value : NONE
12306 Anything else: : CLOBBER */
12307
12308 static void
12309 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
12310 {
12311 rtx src = op[1];
12312 rtx dest = op[0];
12313 rtx xval, xdest[4];
12314 int ival[4];
12315 int clobber_val = 1234;
12316 bool cooked_clobber_p = false;
12317 bool set_p = false;
12318 machine_mode mode = GET_MODE (dest);
12319 int n_bytes = GET_MODE_SIZE (mode);
12320
12321 gcc_assert (REG_P (dest)
12322 && CONSTANT_P (src));
12323
12324 if (len)
12325 *len = 0;
12326
12327 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
12328 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
12329
12330 if (REGNO (dest) < 16
12331 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
12332 {
12333 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
12334 }
12335
12336 /* We might need a clobber reg but don't have one. Look at the value to
12337 be loaded more closely. A clobber is only needed if it is a symbol
12338 or contains a byte that is neither 0, -1 or a power of 2. */
12339
12340 if (NULL_RTX == clobber_reg
12341 && !test_hard_reg_class (LD_REGS, dest)
12342 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
12343 || !avr_popcount_each_byte (src, n_bytes,
12344 (1 << 0) | (1 << 1) | (1 << 8))))
12345 {
12346 /* We have no clobber register but need one. Cook one up.
12347 That's cheaper than loading from constant pool. */
12348
12349 cooked_clobber_p = true;
12350 clobber_reg = all_regs_rtx[REG_Z + 1];
12351 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
12352 }
12353
12354 /* Now start filling DEST from LSB to MSB. */
12355
12356 for (int n = 0; n < n_bytes; n++)
12357 {
12358 int ldreg_p;
12359 bool done_byte = false;
12360 rtx xop[3];
12361
12362 /* Crop the n-th destination byte. */
12363
12364 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
12365 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
12366
12367 if (!CONST_INT_P (src)
12368 && !CONST_FIXED_P (src)
12369 && !CONST_DOUBLE_P (src))
12370 {
12371 static const char* const asm_code[][2] =
12372 {
12373 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
12374 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
12375 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
12376 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
12377 };
12378
12379 xop[0] = xdest[n];
12380 xop[1] = src;
12381 xop[2] = clobber_reg;
12382
12383 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
12384
12385 continue;
12386 }
12387
12388 /* Crop the n-th source byte. */
12389
12390 xval = simplify_gen_subreg (QImode, src, mode, n);
12391 ival[n] = INTVAL (xval);
12392
12393 /* Look if we can reuse the low word by means of MOVW. */
12394
12395 if (n == 2
12396 && n_bytes >= 4
12397 && AVR_HAVE_MOVW)
12398 {
12399 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
12400 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
12401
12402 if (INTVAL (lo16) == INTVAL (hi16))
12403 {
12404 if (0 != INTVAL (lo16)
12405 || !clear_p)
12406 {
12407 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
12408 }
12409
12410 break;
12411 }
12412 }
12413
12414 /* Don't use CLR so that cc0 is set as expected. */
12415
12416 if (ival[n] == 0)
12417 {
12418 if (!clear_p)
12419 avr_asm_len (ldreg_p ? "ldi %0,0"
12420 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
12421 : "mov %0,__zero_reg__",
12422 &xdest[n], len, 1);
12423 continue;
12424 }
12425
12426 if (clobber_val == ival[n]
12427 && REGNO (clobber_reg) == REGNO (xdest[n]))
12428 {
12429 continue;
12430 }
12431
12432 /* LD_REGS can use LDI to move a constant value */
12433
12434 if (ldreg_p)
12435 {
12436 xop[0] = xdest[n];
12437 xop[1] = xval;
12438 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
12439 continue;
12440 }
12441
12442 /* Try to reuse value already loaded in some lower byte. */
12443
12444 for (int j = 0; j < n; j++)
12445 if (ival[j] == ival[n])
12446 {
12447 xop[0] = xdest[n];
12448 xop[1] = xdest[j];
12449
12450 avr_asm_len ("mov %0,%1", xop, len, 1);
12451 done_byte = true;
12452 break;
12453 }
12454
12455 if (done_byte)
12456 continue;
12457
12458 /* Need no clobber reg for -1: Use CLR/DEC */
12459
12460 if (-1 == ival[n])
12461 {
12462 if (!clear_p)
12463 avr_asm_len ("clr %0", &xdest[n], len, 1);
12464
12465 avr_asm_len ("dec %0", &xdest[n], len, 1);
12466 continue;
12467 }
12468 else if (1 == ival[n])
12469 {
12470 if (!clear_p)
12471 avr_asm_len ("clr %0", &xdest[n], len, 1);
12472
12473 avr_asm_len ("inc %0", &xdest[n], len, 1);
12474 continue;
12475 }
12476
12477 /* Use T flag or INC to manage powers of 2 if we have
12478 no clobber reg. */
12479
12480 if (NULL_RTX == clobber_reg
12481 && single_one_operand (xval, QImode))
12482 {
12483 xop[0] = xdest[n];
12484 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
12485
12486 gcc_assert (constm1_rtx != xop[1]);
12487
12488 if (!set_p)
12489 {
12490 set_p = true;
12491 avr_asm_len ("set", xop, len, 1);
12492 }
12493
12494 if (!clear_p)
12495 avr_asm_len ("clr %0", xop, len, 1);
12496
12497 avr_asm_len ("bld %0,%1", xop, len, 1);
12498 continue;
12499 }
12500
12501 /* We actually need the LD_REGS clobber reg. */
12502
12503 gcc_assert (NULL_RTX != clobber_reg);
12504
12505 xop[0] = xdest[n];
12506 xop[1] = xval;
12507 xop[2] = clobber_reg;
12508 clobber_val = ival[n];
12509
12510 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12511 "mov %0,%2", xop, len, 2);
12512 }
12513
12514 /* If we cooked up a clobber reg above, restore it. */
12515
12516 if (cooked_clobber_p)
12517 {
12518 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
12519 }
12520 }
12521
12522
12523 /* Reload the constant OP[1] into the HI register OP[0].
12524 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12525 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12526 need a clobber reg or have to cook one up.
12527
12528 PLEN == NULL: Output instructions.
12529 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
12530 by the insns printed.
12531
12532 Return "". */
12533
12534 const char*
12535 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
12536 {
12537 output_reload_in_const (op, clobber_reg, plen, false);
12538 return "";
12539 }
12540
12541
12542 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
12543 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12544 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12545 need a clobber reg or have to cook one up.
12546
12547 LEN == NULL: Output instructions.
12548
12549 LEN != NULL: Output nothing. Set *LEN to number of words occupied
12550 by the insns printed.
12551
12552 Return "". */
12553
12554 const char *
12555 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
12556 {
12557 if (AVR_HAVE_MOVW
12558 && !test_hard_reg_class (LD_REGS, op[0])
12559 && (CONST_INT_P (op[1])
12560 || CONST_FIXED_P (op[1])
12561 || CONST_DOUBLE_P (op[1])))
12562 {
12563 int len_clr, len_noclr;
12564
12565 /* In some cases it is better to clear the destination beforehand, e.g.
12566
12567 CLR R2 CLR R3 MOVW R4,R2 INC R2
12568
12569 is shorther than
12570
12571 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
12572
12573 We find it too tedious to work that out in the print function.
12574 Instead, we call the print function twice to get the lengths of
12575 both methods and use the shortest one. */
12576
12577 output_reload_in_const (op, clobber_reg, &len_clr, true);
12578 output_reload_in_const (op, clobber_reg, &len_noclr, false);
12579
12580 if (len_noclr - len_clr == 4)
12581 {
12582 /* Default needs 4 CLR instructions: clear register beforehand. */
12583
12584 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
12585 "mov %B0,__zero_reg__" CR_TAB
12586 "movw %C0,%A0", &op[0], len, 3);
12587
12588 output_reload_in_const (op, clobber_reg, len, true);
12589
12590 if (len)
12591 *len += 3;
12592
12593 return "";
12594 }
12595 }
12596
12597 /* Default: destination not pre-cleared. */
12598
12599 output_reload_in_const (op, clobber_reg, len, false);
12600 return "";
12601 }
12602
12603 const char*
12604 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
12605 {
12606 output_reload_in_const (op, clobber_reg, len, false);
12607 return "";
12608 }
12609
12610
12611 /* Worker function for `ASM_OUTPUT_ADDR_VEC'. */
12612 /* Emit jump tables out-of-line so that branches crossing the table
12613 get shorter offsets. If we have JUMP + CALL, then put the tables
12614 in a dedicated non-.text section so that CALLs get better chance to
12615 be relaxed to RCALLs.
12616
12617 We emit the tables by hand because `function_rodata_section' does not
12618 work as expected, cf. PR71151, and we do *NOT* want the table to be
12619 in .rodata, hence setting JUMP_TABLES_IN_TEXT_SECTION = 0 is of limited
12620 use; and setting it to 1 attributes table lengths to branch offsets...
12621 Moreover, fincal.c keeps switching section before each table entry
12622 which we find too fragile as to rely on section caching. */
12623
12624 void
12625 avr_output_addr_vec (rtx_insn *labl, rtx table)
12626 {
12627 FILE *stream = asm_out_file;
12628
12629 app_disable();
12630
12631 // Switch to appropriate (sub)section.
12632
12633 if (DECL_SECTION_NAME (current_function_decl)
12634 && symtab_node::get (current_function_decl)
12635 && ! symtab_node::get (current_function_decl)->implicit_section)
12636 {
12637 // .subsection will emit the code after the function and in the
12638 // section as chosen by the user.
12639
12640 switch_to_section (current_function_section ());
12641 fprintf (stream, "\t.subsection\t1\n");
12642 }
12643 else
12644 {
12645 // Since PR63223 there is no restriction where to put the table; it
12646 // may even reside above 128 KiB. We put it in a section as high as
12647 // possible and avoid progmem in order not to waste flash <= 64 KiB.
12648
12649 const char *sec_name = ".jumptables.gcc";
12650
12651 // The table belongs to its host function, therefore use fine
12652 // grained sections so that, if that function is removed by
12653 // --gc-sections, the child table(s) may also be removed. */
12654
12655 tree asm_name = DECL_ASSEMBLER_NAME (current_function_decl);
12656 const char *fname = IDENTIFIER_POINTER (asm_name);
12657 fname = targetm.strip_name_encoding (fname);
12658 sec_name = ACONCAT ((sec_name, ".", fname, NULL));
12659
12660 fprintf (stream, "\t.section\t%s,\"%s\",@progbits\n", sec_name,
12661 AVR_HAVE_JMP_CALL ? "a" : "ax");
12662 }
12663
12664 // Output the label that preceeds the table.
12665
12666 ASM_OUTPUT_ALIGN (stream, 1);
12667 targetm.asm_out.internal_label (stream, "L", CODE_LABEL_NUMBER (labl));
12668
12669 // Output the table's content.
12670
12671 int vlen = XVECLEN (table, 0);
12672
12673 for (int idx = 0; idx < vlen; idx++)
12674 {
12675 int value = CODE_LABEL_NUMBER (XEXP (XVECEXP (table, 0, idx), 0));
12676
12677 if (AVR_HAVE_JMP_CALL)
12678 fprintf (stream, "\t.word gs(.L%d)\n", value);
12679 else
12680 fprintf (stream, "\trjmp .L%d\n", value);
12681 }
12682
12683 // Switch back to original section. As we clobbered the section above,
12684 // forget the current section before switching back.
12685
12686 in_section = NULL;
12687 switch_to_section (current_function_section ());
12688 }
12689
12690
12691 /* Implement `TARGET_CONDITIONAL_REGISTER_USAGE'. */
12692
12693 static void
12694 avr_conditional_register_usage (void)
12695 {
12696 if (AVR_TINY)
12697 {
12698 const int tiny_reg_alloc_order[] = {
12699 24, 25,
12700 22, 23,
12701 30, 31,
12702 26, 27,
12703 28, 29,
12704 21, 20, 19, 18,
12705 16, 17,
12706 32, 33, 34, 35,
12707 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
12708 };
12709
12710 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
12711 - R0-R15 are not available in Tiny Core devices
12712 - R16 and R17 are fixed registers. */
12713
12714 for (size_t i = 0; i <= 17; i++)
12715 {
12716 fixed_regs[i] = 1;
12717 call_used_regs[i] = 1;
12718 }
12719
12720 /* Set R18 to R21 as callee saved registers
12721 - R18, R19, R20 and R21 are the callee saved registers in
12722 Tiny Core devices */
12723
12724 for (size_t i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
12725 {
12726 call_used_regs[i] = 0;
12727 }
12728
12729 /* Update register allocation order for Tiny Core devices */
12730
12731 for (size_t i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
12732 {
12733 reg_alloc_order[i] = tiny_reg_alloc_order[i];
12734 }
12735
12736 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
12737 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
12738 }
12739 }
12740
12741 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
12742 /* Returns true if SCRATCH are safe to be allocated as a scratch
12743 registers (for a define_peephole2) in the current function. */
12744
12745 static bool
12746 avr_hard_regno_scratch_ok (unsigned int regno)
12747 {
12748 /* Interrupt functions can only use registers that have already been saved
12749 by the prologue, even if they would normally be call-clobbered. */
12750
12751 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12752 && !df_regs_ever_live_p (regno))
12753 return false;
12754
12755 /* Don't allow hard registers that might be part of the frame pointer.
12756 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12757 and don't care for a frame pointer that spans more than one register. */
12758
12759 if ((!reload_completed || frame_pointer_needed)
12760 && (regno == REG_Y || regno == REG_Y + 1))
12761 {
12762 return false;
12763 }
12764
12765 return true;
12766 }
12767
12768
12769 /* Worker function for `HARD_REGNO_RENAME_OK'. */
12770 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
12771
12772 int
12773 avr_hard_regno_rename_ok (unsigned int old_reg,
12774 unsigned int new_reg)
12775 {
12776 /* Interrupt functions can only use registers that have already been
12777 saved by the prologue, even if they would normally be
12778 call-clobbered. */
12779
12780 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12781 && !df_regs_ever_live_p (new_reg))
12782 return 0;
12783
12784 /* Don't allow hard registers that might be part of the frame pointer.
12785 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12786 and don't care for a frame pointer that spans more than one register. */
12787
12788 if ((!reload_completed || frame_pointer_needed)
12789 && (old_reg == REG_Y || old_reg == REG_Y + 1
12790 || new_reg == REG_Y || new_reg == REG_Y + 1))
12791 {
12792 return 0;
12793 }
12794
12795 return 1;
12796 }
12797
12798 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
12799 or memory location in the I/O space (QImode only).
12800
12801 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
12802 Operand 1: register operand to test, or CONST_INT memory address.
12803 Operand 2: bit number.
12804 Operand 3: label to jump to if the test is true. */
12805
12806 const char*
12807 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
12808 {
12809 enum rtx_code comp = GET_CODE (operands[0]);
12810 bool long_jump = get_attr_length (insn) >= 4;
12811 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
12812
12813 if (comp == GE)
12814 comp = EQ;
12815 else if (comp == LT)
12816 comp = NE;
12817
12818 if (reverse)
12819 comp = reverse_condition (comp);
12820
12821 switch (GET_CODE (operands[1]))
12822 {
12823 default:
12824 gcc_unreachable();
12825
12826 case CONST_INT:
12827 case CONST:
12828 case SYMBOL_REF:
12829
12830 if (low_io_address_operand (operands[1], QImode))
12831 {
12832 if (comp == EQ)
12833 output_asm_insn ("sbis %i1,%2", operands);
12834 else
12835 output_asm_insn ("sbic %i1,%2", operands);
12836 }
12837 else
12838 {
12839 gcc_assert (io_address_operand (operands[1], QImode));
12840 output_asm_insn ("in __tmp_reg__,%i1", operands);
12841 if (comp == EQ)
12842 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
12843 else
12844 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
12845 }
12846
12847 break; /* CONST_INT */
12848
12849 case REG:
12850
12851 if (comp == EQ)
12852 output_asm_insn ("sbrs %T1%T2", operands);
12853 else
12854 output_asm_insn ("sbrc %T1%T2", operands);
12855
12856 break; /* REG */
12857 } /* switch */
12858
12859 if (long_jump)
12860 return ("rjmp .+4" CR_TAB
12861 "jmp %x3");
12862
12863 if (!reverse)
12864 return "rjmp %x3";
12865
12866 return "";
12867 }
12868
12869 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
12870
12871 static void
12872 avr_asm_out_ctor (rtx symbol, int priority)
12873 {
12874 fputs ("\t.global __do_global_ctors\n", asm_out_file);
12875 default_ctor_section_asm_out_constructor (symbol, priority);
12876 }
12877
12878
12879 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
12880
12881 static void
12882 avr_asm_out_dtor (rtx symbol, int priority)
12883 {
12884 fputs ("\t.global __do_global_dtors\n", asm_out_file);
12885 default_dtor_section_asm_out_destructor (symbol, priority);
12886 }
12887
12888
12889 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
12890
12891 static bool
12892 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
12893 {
12894 HOST_WIDE_INT size = int_size_in_bytes (type);
12895 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
12896
12897 /* In avr, there are 8 return registers. But, for Tiny Core
12898 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12899 Return true if size is unknown or greater than the limit. */
12900
12901 if (size == -1 || size > ret_size_limit)
12902 {
12903 return true;
12904 }
12905 else
12906 {
12907 return false;
12908 }
12909 }
12910
12911
12912 /* Implement `CASE_VALUES_THRESHOLD'. */
12913 /* Supply the default for --param case-values-threshold=0 */
12914
12915 static unsigned int
12916 avr_case_values_threshold (void)
12917 {
12918 /* The exact break-even point between a jump table and an if-else tree
12919 depends on several factors not available here like, e.g. if 8-bit
12920 comparisons can be used in the if-else tree or not, on the
12921 range of the case values, if the case value can be reused, on the
12922 register allocation, etc. '7' appears to be a good choice. */
12923
12924 return 7;
12925 }
12926
12927
12928 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
12929
12930 static scalar_int_mode
12931 avr_addr_space_address_mode (addr_space_t as)
12932 {
12933 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12934 }
12935
12936
12937 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12938
12939 static scalar_int_mode
12940 avr_addr_space_pointer_mode (addr_space_t as)
12941 {
12942 return avr_addr_space_address_mode (as);
12943 }
12944
12945
12946 /* Helper for following function. */
12947
12948 static bool
12949 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12950 {
12951 gcc_assert (REG_P (reg));
12952
12953 if (strict)
12954 {
12955 return REGNO (reg) == REG_Z;
12956 }
12957
12958 /* Avoid combine to propagate hard regs. */
12959
12960 if (can_create_pseudo_p()
12961 && REGNO (reg) < REG_Z)
12962 {
12963 return false;
12964 }
12965
12966 return true;
12967 }
12968
12969
12970 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12971
12972 static bool
12973 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12974 bool strict, addr_space_t as)
12975 {
12976 bool ok = false;
12977
12978 switch (as)
12979 {
12980 default:
12981 gcc_unreachable();
12982
12983 case ADDR_SPACE_GENERIC:
12984 return avr_legitimate_address_p (mode, x, strict);
12985
12986 case ADDR_SPACE_FLASH:
12987 case ADDR_SPACE_FLASH1:
12988 case ADDR_SPACE_FLASH2:
12989 case ADDR_SPACE_FLASH3:
12990 case ADDR_SPACE_FLASH4:
12991 case ADDR_SPACE_FLASH5:
12992
12993 switch (GET_CODE (x))
12994 {
12995 case REG:
12996 ok = avr_reg_ok_for_pgm_addr (x, strict);
12997 break;
12998
12999 case POST_INC:
13000 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
13001 break;
13002
13003 default:
13004 break;
13005 }
13006
13007 break; /* FLASH */
13008
13009 case ADDR_SPACE_MEMX:
13010 if (REG_P (x))
13011 ok = (!strict
13012 && can_create_pseudo_p());
13013
13014 if (LO_SUM == GET_CODE (x))
13015 {
13016 rtx hi = XEXP (x, 0);
13017 rtx lo = XEXP (x, 1);
13018
13019 ok = (REG_P (hi)
13020 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
13021 && REG_P (lo)
13022 && REGNO (lo) == REG_Z);
13023 }
13024
13025 break; /* MEMX */
13026 }
13027
13028 if (avr_log.legitimate_address_p)
13029 {
13030 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
13031 "reload_completed=%d reload_in_progress=%d %s:",
13032 ok, mode, strict, reload_completed, reload_in_progress,
13033 reg_renumber ? "(reg_renumber)" : "");
13034
13035 if (GET_CODE (x) == PLUS
13036 && REG_P (XEXP (x, 0))
13037 && CONST_INT_P (XEXP (x, 1))
13038 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
13039 && reg_renumber)
13040 {
13041 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
13042 true_regnum (XEXP (x, 0)));
13043 }
13044
13045 avr_edump ("\n%r\n", x);
13046 }
13047
13048 return ok;
13049 }
13050
13051
13052 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
13053
13054 static rtx
13055 avr_addr_space_legitimize_address (rtx x, rtx old_x,
13056 machine_mode mode, addr_space_t as)
13057 {
13058 if (ADDR_SPACE_GENERIC_P (as))
13059 return avr_legitimize_address (x, old_x, mode);
13060
13061 if (avr_log.legitimize_address)
13062 {
13063 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
13064 }
13065
13066 return old_x;
13067 }
13068
13069
13070 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
13071
13072 static rtx
13073 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
13074 {
13075 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
13076 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
13077
13078 if (avr_log.progmem)
13079 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
13080 src, type_from, type_to);
13081
13082 /* Up-casting from 16-bit to 24-bit pointer. */
13083
13084 if (as_from != ADDR_SPACE_MEMX
13085 && as_to == ADDR_SPACE_MEMX)
13086 {
13087 int msb;
13088 rtx sym = src;
13089 rtx reg = gen_reg_rtx (PSImode);
13090
13091 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
13092 sym = XEXP (sym, 0);
13093
13094 /* Look at symbol flags: avr_encode_section_info set the flags
13095 also if attribute progmem was seen so that we get the right
13096 promotion for, e.g. PSTR-like strings that reside in generic space
13097 but are located in flash. In that case we patch the incoming
13098 address space. */
13099
13100 if (SYMBOL_REF_P (sym)
13101 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
13102 {
13103 as_from = ADDR_SPACE_FLASH;
13104 }
13105
13106 /* Linearize memory: RAM has bit 23 set. */
13107
13108 msb = ADDR_SPACE_GENERIC_P (as_from)
13109 ? 0x80
13110 : avr_addrspace[as_from].segment;
13111
13112 src = force_reg (Pmode, src);
13113
13114 emit_insn (msb == 0
13115 ? gen_zero_extendhipsi2 (reg, src)
13116 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
13117
13118 return reg;
13119 }
13120
13121 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
13122
13123 if (as_from == ADDR_SPACE_MEMX
13124 && as_to != ADDR_SPACE_MEMX)
13125 {
13126 rtx new_src = gen_reg_rtx (Pmode);
13127
13128 src = force_reg (PSImode, src);
13129
13130 emit_move_insn (new_src,
13131 simplify_gen_subreg (Pmode, src, PSImode, 0));
13132 return new_src;
13133 }
13134
13135 return src;
13136 }
13137
13138
13139 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
13140
13141 static bool
13142 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
13143 addr_space_t superset ATTRIBUTE_UNUSED)
13144 {
13145 /* Allow any kind of pointer mess. */
13146
13147 return true;
13148 }
13149
13150
13151 /* Implement `TARGET_CONVERT_TO_TYPE'. */
13152
13153 static tree
13154 avr_convert_to_type (tree type, tree expr)
13155 {
13156 /* Print a diagnose for pointer conversion that changes the address
13157 space of the pointer target to a non-enclosing address space,
13158 provided -Waddr-space-convert is on.
13159
13160 FIXME: Filter out cases where the target object is known to
13161 be located in the right memory, like in
13162
13163 (const __flash*) PSTR ("text")
13164
13165 Also try to distinguish between explicit casts requested by
13166 the user and implicit casts like
13167
13168 void f (const __flash char*);
13169
13170 void g (const char *p)
13171 {
13172 f ((const __flash*) p);
13173 }
13174
13175 under the assumption that an explicit casts means that the user
13176 knows what he is doing, e.g. interface with PSTR or old style
13177 code with progmem and pgm_read_xxx.
13178 */
13179
13180 if (avr_warn_addr_space_convert
13181 && expr != error_mark_node
13182 && POINTER_TYPE_P (type)
13183 && POINTER_TYPE_P (TREE_TYPE (expr)))
13184 {
13185 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
13186 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
13187
13188 if (avr_log.progmem)
13189 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
13190
13191 if (as_new != ADDR_SPACE_MEMX
13192 && as_new != as_old)
13193 {
13194 location_t loc = EXPR_LOCATION (expr);
13195 const char *name_old = avr_addrspace[as_old].name;
13196 const char *name_new = avr_addrspace[as_new].name;
13197
13198 warning (OPT_Waddr_space_convert,
13199 "conversion from address space %qs to address space %qs",
13200 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
13201 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
13202
13203 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
13204 }
13205 }
13206
13207 return NULL_TREE;
13208 }
13209
13210
13211 /* Implement `TARGET_LEGITIMATE_COMBINED_INSN'. */
13212
13213 /* PR78883: Filter out paradoxical SUBREGs of MEM which are not handled
13214 properly by following passes. As INSN_SCHEDULING is off and hence
13215 general_operand accepts such expressions, ditch them now. */
13216
13217 static bool
13218 avr_legitimate_combined_insn (rtx_insn *insn)
13219 {
13220 subrtx_iterator::array_type array;
13221
13222 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
13223 {
13224 const_rtx op = *iter;
13225
13226 if (SUBREG_P (op)
13227 && MEM_P (SUBREG_REG (op))
13228 && (GET_MODE_SIZE (GET_MODE (op))
13229 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
13230 {
13231 return false;
13232 }
13233 }
13234
13235 return true;
13236 }
13237
13238
13239 /* PR63633: The middle-end might come up with hard regs as input operands.
13240
13241 RMASK is a bit mask representing a subset of hard registers R0...R31:
13242 Rn is an element of that set iff bit n of RMASK is set.
13243 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13244 OP[n] has to be fixed; otherwise OP[n] is left alone.
13245
13246 For each element of OPMASK which is a hard register overlapping RMASK,
13247 replace OP[n] with a newly created pseudo register
13248
13249 HREG == 0: Also emit a move insn that copies the contents of that
13250 hard register into the new pseudo.
13251
13252 HREG != 0: Also set HREG[n] to the hard register. */
13253
13254 static void
13255 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
13256 {
13257 for (; opmask; opmask >>= 1, op++)
13258 {
13259 rtx reg = *op;
13260
13261 if (hreg)
13262 *hreg = NULL_RTX;
13263
13264 if ((opmask & 1)
13265 && REG_P (reg)
13266 && REGNO (reg) < FIRST_PSEUDO_REGISTER
13267 // This hard-reg overlaps other prohibited hard regs?
13268 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
13269 {
13270 *op = gen_reg_rtx (GET_MODE (reg));
13271 if (hreg == NULL)
13272 emit_move_insn (*op, reg);
13273 else
13274 *hreg = reg;
13275 }
13276
13277 if (hreg)
13278 hreg++;
13279 }
13280 }
13281
13282
13283 void
13284 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
13285 {
13286 avr_fix_operands (op, NULL, opmask, rmask);
13287 }
13288
13289
13290 /* Helper for the function below: If bit n of MASK is set and
13291 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
13292 Otherwise do nothing for that n. Return TRUE. */
13293
13294 static bool
13295 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
13296 {
13297 for (; mask; mask >>= 1, op++, hreg++)
13298 if ((mask & 1)
13299 && *hreg)
13300 emit_move_insn (*hreg, *op);
13301
13302 return true;
13303 }
13304
13305
13306 /* PR63633: The middle-end might come up with hard regs as output operands.
13307
13308 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
13309 RMASK is a bit mask representing a subset of hard registers R0...R31:
13310 Rn is an element of that set iff bit n of RMASK is set.
13311 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
13312 OP[n] has to be fixed; otherwise OP[n] is left alone.
13313
13314 Emit the insn sequence as generated by GEN() with all elements of OPMASK
13315 which are hard registers overlapping RMASK replaced by newly created
13316 pseudo registers. After the sequence has been emitted, emit insns that
13317 move the contents of respective pseudos to their hard regs. */
13318
13319 bool
13320 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
13321 unsigned opmask, unsigned rmask)
13322 {
13323 const int n = 3;
13324 rtx hreg[n];
13325
13326 /* It is letigimate for GEN to call this function, and in order not to
13327 get self-recursive we use the following static kludge. This is the
13328 only way not to duplicate all expanders and to avoid ugly and
13329 hard-to-maintain C-code instead of the much more appreciated RTL
13330 representation as supplied by define_expand. */
13331 static bool lock = false;
13332
13333 gcc_assert (opmask < (1u << n));
13334
13335 if (lock)
13336 return false;
13337
13338 avr_fix_operands (op, hreg, opmask, rmask);
13339
13340 lock = true;
13341 emit_insn (gen (op[0], op[1], op[2]));
13342 lock = false;
13343
13344 return avr_move_fixed_operands (op, hreg, opmask);
13345 }
13346
13347
13348 /* Worker function for movmemhi expander.
13349 XOP[0] Destination as MEM:BLK
13350 XOP[1] Source " "
13351 XOP[2] # Bytes to copy
13352
13353 Return TRUE if the expansion is accomplished.
13354 Return FALSE if the operand compination is not supported. */
13355
13356 bool
13357 avr_emit_movmemhi (rtx *xop)
13358 {
13359 HOST_WIDE_INT count;
13360 machine_mode loop_mode;
13361 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
13362 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
13363 rtx a_hi8 = NULL_RTX;
13364
13365 if (avr_mem_flash_p (xop[0]))
13366 return false;
13367
13368 if (!CONST_INT_P (xop[2]))
13369 return false;
13370
13371 count = INTVAL (xop[2]);
13372 if (count <= 0)
13373 return false;
13374
13375 a_src = XEXP (xop[1], 0);
13376 a_dest = XEXP (xop[0], 0);
13377
13378 if (PSImode == GET_MODE (a_src))
13379 {
13380 gcc_assert (as == ADDR_SPACE_MEMX);
13381
13382 loop_mode = (count < 0x100) ? QImode : HImode;
13383 loop_reg = gen_rtx_REG (loop_mode, 24);
13384 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
13385
13386 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
13387 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
13388 }
13389 else
13390 {
13391 int segment = avr_addrspace[as].segment;
13392
13393 if (segment
13394 && avr_n_flash > 1)
13395 {
13396 a_hi8 = GEN_INT (segment);
13397 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
13398 }
13399 else if (!ADDR_SPACE_GENERIC_P (as))
13400 {
13401 as = ADDR_SPACE_FLASH;
13402 }
13403
13404 addr1 = a_src;
13405
13406 loop_mode = (count <= 0x100) ? QImode : HImode;
13407 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
13408 }
13409
13410 xas = GEN_INT (as);
13411
13412 /* FIXME: Register allocator might come up with spill fails if it is left
13413 on its own. Thus, we allocate the pointer registers by hand:
13414 Z = source address
13415 X = destination address */
13416
13417 emit_move_insn (lpm_addr_reg_rtx, addr1);
13418 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
13419
13420 /* FIXME: Register allocator does a bad job and might spill address
13421 register(s) inside the loop leading to additional move instruction
13422 to/from stack which could clobber tmp_reg. Thus, do *not* emit
13423 load and store as separate insns. Instead, we perform the copy
13424 by means of one monolithic insn. */
13425
13426 gcc_assert (TMP_REGNO == LPM_REGNO);
13427
13428 if (as != ADDR_SPACE_MEMX)
13429 {
13430 /* Load instruction ([E]LPM or LD) is known at compile time:
13431 Do the copy-loop inline. */
13432
13433 rtx (*fun) (rtx, rtx, rtx)
13434 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
13435
13436 insn = fun (xas, loop_reg, loop_reg);
13437 }
13438 else
13439 {
13440 rtx (*fun) (rtx, rtx)
13441 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
13442
13443 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
13444
13445 insn = fun (xas, GEN_INT (avr_addr.rampz));
13446 }
13447
13448 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
13449 emit_insn (insn);
13450
13451 return true;
13452 }
13453
13454
13455 /* Print assembler for movmem_qi, movmem_hi insns...
13456 $0 : Address Space
13457 $1, $2 : Loop register
13458 Z : Source address
13459 X : Destination address
13460 */
13461
13462 const char*
13463 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
13464 {
13465 addr_space_t as = (addr_space_t) INTVAL (op[0]);
13466 machine_mode loop_mode = GET_MODE (op[1]);
13467 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
13468 rtx xop[3];
13469
13470 if (plen)
13471 *plen = 0;
13472
13473 xop[0] = op[0];
13474 xop[1] = op[1];
13475 xop[2] = tmp_reg_rtx;
13476
13477 /* Loop label */
13478
13479 avr_asm_len ("0:", xop, plen, 0);
13480
13481 /* Load with post-increment */
13482
13483 switch (as)
13484 {
13485 default:
13486 gcc_unreachable();
13487
13488 case ADDR_SPACE_GENERIC:
13489
13490 avr_asm_len ("ld %2,Z+", xop, plen, 1);
13491 break;
13492
13493 case ADDR_SPACE_FLASH:
13494
13495 if (AVR_HAVE_LPMX)
13496 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
13497 else
13498 avr_asm_len ("lpm" CR_TAB
13499 "adiw r30,1", xop, plen, 2);
13500 break;
13501
13502 case ADDR_SPACE_FLASH1:
13503 case ADDR_SPACE_FLASH2:
13504 case ADDR_SPACE_FLASH3:
13505 case ADDR_SPACE_FLASH4:
13506 case ADDR_SPACE_FLASH5:
13507
13508 if (AVR_HAVE_ELPMX)
13509 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
13510 else
13511 avr_asm_len ("elpm" CR_TAB
13512 "adiw r30,1", xop, plen, 2);
13513 break;
13514 }
13515
13516 /* Store with post-increment */
13517
13518 avr_asm_len ("st X+,%2", xop, plen, 1);
13519
13520 /* Decrement loop-counter and set Z-flag */
13521
13522 if (QImode == loop_mode)
13523 {
13524 avr_asm_len ("dec %1", xop, plen, 1);
13525 }
13526 else if (sbiw_p)
13527 {
13528 avr_asm_len ("sbiw %1,1", xop, plen, 1);
13529 }
13530 else
13531 {
13532 avr_asm_len ("subi %A1,1" CR_TAB
13533 "sbci %B1,0", xop, plen, 2);
13534 }
13535
13536 /* Loop until zero */
13537
13538 return avr_asm_len ("brne 0b", xop, plen, 1);
13539 }
13540
13541
13542 \f
13543 /* Helper for __builtin_avr_delay_cycles */
13544
13545 static rtx
13546 avr_mem_clobber (void)
13547 {
13548 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
13549 MEM_VOLATILE_P (mem) = 1;
13550 return mem;
13551 }
13552
13553 static void
13554 avr_expand_delay_cycles (rtx operands0)
13555 {
13556 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
13557 unsigned HOST_WIDE_INT cycles_used;
13558 unsigned HOST_WIDE_INT loop_count;
13559
13560 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
13561 {
13562 loop_count = ((cycles - 9) / 6) + 1;
13563 cycles_used = ((loop_count - 1) * 6) + 9;
13564 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
13565 avr_mem_clobber()));
13566 cycles -= cycles_used;
13567 }
13568
13569 if (IN_RANGE (cycles, 262145, 83886081))
13570 {
13571 loop_count = ((cycles - 7) / 5) + 1;
13572 if (loop_count > 0xFFFFFF)
13573 loop_count = 0xFFFFFF;
13574 cycles_used = ((loop_count - 1) * 5) + 7;
13575 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
13576 avr_mem_clobber()));
13577 cycles -= cycles_used;
13578 }
13579
13580 if (IN_RANGE (cycles, 768, 262144))
13581 {
13582 loop_count = ((cycles - 5) / 4) + 1;
13583 if (loop_count > 0xFFFF)
13584 loop_count = 0xFFFF;
13585 cycles_used = ((loop_count - 1) * 4) + 5;
13586 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
13587 avr_mem_clobber()));
13588 cycles -= cycles_used;
13589 }
13590
13591 if (IN_RANGE (cycles, 6, 767))
13592 {
13593 loop_count = cycles / 3;
13594 if (loop_count > 255)
13595 loop_count = 255;
13596 cycles_used = loop_count * 3;
13597 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
13598 avr_mem_clobber()));
13599 cycles -= cycles_used;
13600 }
13601
13602 while (cycles >= 2)
13603 {
13604 emit_insn (gen_nopv (GEN_INT (2)));
13605 cycles -= 2;
13606 }
13607
13608 if (cycles == 1)
13609 {
13610 emit_insn (gen_nopv (GEN_INT (1)));
13611 cycles--;
13612 }
13613 }
13614
13615
13616 static void
13617 avr_expand_nops (rtx operands0)
13618 {
13619 unsigned HOST_WIDE_INT n_nops = UINTVAL (operands0) & GET_MODE_MASK (HImode);
13620
13621 while (n_nops--)
13622 {
13623 emit_insn (gen_nopv (const1_rtx));
13624 }
13625 }
13626
13627
13628 /* Compute the image of x under f, i.e. perform x --> f(x) */
13629
13630 static int
13631 avr_map (unsigned int f, int x)
13632 {
13633 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
13634 }
13635
13636
13637 /* Return some metrics of map A. */
13638
13639 enum
13640 {
13641 /* Number of fixed points in { 0 ... 7 } */
13642 MAP_FIXED_0_7,
13643
13644 /* Size of preimage of non-fixed points in { 0 ... 7 } */
13645 MAP_NONFIXED_0_7,
13646
13647 /* Mask representing the fixed points in { 0 ... 7 } */
13648 MAP_MASK_FIXED_0_7,
13649
13650 /* Size of the preimage of { 0 ... 7 } */
13651 MAP_PREIMAGE_0_7,
13652
13653 /* Mask that represents the preimage of { f } */
13654 MAP_MASK_PREIMAGE_F
13655 };
13656
13657 static unsigned
13658 avr_map_metric (unsigned int a, int mode)
13659 {
13660 unsigned metric = 0;
13661
13662 for (unsigned i = 0; i < 8; i++)
13663 {
13664 unsigned ai = avr_map (a, i);
13665
13666 if (mode == MAP_FIXED_0_7)
13667 metric += ai == i;
13668 else if (mode == MAP_NONFIXED_0_7)
13669 metric += ai < 8 && ai != i;
13670 else if (mode == MAP_MASK_FIXED_0_7)
13671 metric |= ((unsigned) (ai == i)) << i;
13672 else if (mode == MAP_PREIMAGE_0_7)
13673 metric += ai < 8;
13674 else if (mode == MAP_MASK_PREIMAGE_F)
13675 metric |= ((unsigned) (ai == 0xf)) << i;
13676 else
13677 gcc_unreachable();
13678 }
13679
13680 return metric;
13681 }
13682
13683
13684 /* Return true if IVAL has a 0xf in its hexadecimal representation
13685 and false, otherwise. Only nibbles 0..7 are taken into account.
13686 Used as constraint helper for C0f and Cxf. */
13687
13688 bool
13689 avr_has_nibble_0xf (rtx ival)
13690 {
13691 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
13692 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13693 }
13694
13695
13696 /* We have a set of bits that are mapped by a function F.
13697 Try to decompose F by means of a second function G so that
13698
13699 F = F o G^-1 o G
13700
13701 and
13702
13703 cost (F o G^-1) + cost (G) < cost (F)
13704
13705 Example: Suppose builtin insert_bits supplies us with the map
13706 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
13707 nibble of the result, we can just as well rotate the bits before inserting
13708 them and use the map 0x7654ffff which is cheaper than the original map.
13709 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
13710
13711 typedef struct
13712 {
13713 /* tree code of binary function G */
13714 enum tree_code code;
13715
13716 /* The constant second argument of G */
13717 int arg;
13718
13719 /* G^-1, the inverse of G (*, arg) */
13720 unsigned ginv;
13721
13722 /* The cost of applying G (*, arg) */
13723 int cost;
13724
13725 /* The composition F o G^-1 (*, arg) for some function F */
13726 unsigned int map;
13727
13728 /* For debug purpose only */
13729 const char *str;
13730 } avr_map_op_t;
13731
13732 static const avr_map_op_t avr_map_op[] =
13733 {
13734 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
13735 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
13736 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
13737 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
13738 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
13739 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
13740 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
13741 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
13742 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
13743 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
13744 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
13745 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
13746 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
13747 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
13748 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
13749 };
13750
13751
13752 /* Try to decompose F as F = (F o G^-1) o G as described above.
13753 The result is a struct representing F o G^-1 and G.
13754 If result.cost < 0 then such a decomposition does not exist. */
13755
13756 static avr_map_op_t
13757 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
13758 {
13759 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
13760 avr_map_op_t f_ginv = *g;
13761 unsigned int ginv = g->ginv;
13762
13763 f_ginv.cost = -1;
13764
13765 /* Step 1: Computing F o G^-1 */
13766
13767 for (int i = 7; i >= 0; i--)
13768 {
13769 int x = avr_map (f, i);
13770
13771 if (x <= 7)
13772 {
13773 x = avr_map (ginv, x);
13774
13775 /* The bit is no element of the image of G: no avail (cost = -1) */
13776
13777 if (x > 7)
13778 return f_ginv;
13779 }
13780
13781 f_ginv.map = (f_ginv.map << 4) + x;
13782 }
13783
13784 /* Step 2: Compute the cost of the operations.
13785 The overall cost of doing an operation prior to the insertion is
13786 the cost of the insertion plus the cost of the operation. */
13787
13788 /* Step 2a: Compute cost of F o G^-1 */
13789
13790 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
13791 {
13792 /* The mapping consists only of fixed points and can be folded
13793 to AND/OR logic in the remainder. Reasonable cost is 3. */
13794
13795 f_ginv.cost = 2 + (val_used_p && !val_const_p);
13796 }
13797 else
13798 {
13799 rtx xop[4];
13800
13801 /* Get the cost of the insn by calling the output worker with some
13802 fake values. Mimic effect of reloading xop[3]: Unused operands
13803 are mapped to 0 and used operands are reloaded to xop[0]. */
13804
13805 xop[0] = all_regs_rtx[24];
13806 xop[1] = gen_int_mode (f_ginv.map, SImode);
13807 xop[2] = all_regs_rtx[25];
13808 xop[3] = val_used_p ? xop[0] : const0_rtx;
13809
13810 avr_out_insert_bits (xop, &f_ginv.cost);
13811
13812 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
13813 }
13814
13815 /* Step 2b: Add cost of G */
13816
13817 f_ginv.cost += g->cost;
13818
13819 if (avr_log.builtin)
13820 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
13821
13822 return f_ginv;
13823 }
13824
13825
13826 /* Insert bits from XOP[1] into XOP[0] according to MAP.
13827 XOP[0] and XOP[1] don't overlap.
13828 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
13829 If FIXP_P = false: Just move the bit if its position in the destination
13830 is different to its source position. */
13831
13832 static void
13833 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
13834 {
13835 /* T-flag contains this bit of the source, i.e. of XOP[1] */
13836 int t_bit_src = -1;
13837
13838 /* We order the operations according to the requested source bit b. */
13839
13840 for (int b = 0; b < 8; b++)
13841 for (int bit_dest = 0; bit_dest < 8; bit_dest++)
13842 {
13843 int bit_src = avr_map (map, bit_dest);
13844
13845 if (b != bit_src
13846 || bit_src >= 8
13847 /* Same position: No need to copy as requested by FIXP_P. */
13848 || (bit_dest == bit_src && !fixp_p))
13849 continue;
13850
13851 if (t_bit_src != bit_src)
13852 {
13853 /* Source bit is not yet in T: Store it to T. */
13854
13855 t_bit_src = bit_src;
13856
13857 xop[3] = GEN_INT (bit_src);
13858 avr_asm_len ("bst %T1%T3", xop, plen, 1);
13859 }
13860
13861 /* Load destination bit with T. */
13862
13863 xop[3] = GEN_INT (bit_dest);
13864 avr_asm_len ("bld %T0%T3", xop, plen, 1);
13865 }
13866 }
13867
13868
13869 /* PLEN == 0: Print assembler code for `insert_bits'.
13870 PLEN != 0: Compute code length in bytes.
13871
13872 OP[0]: Result
13873 OP[1]: The mapping composed of nibbles. If nibble no. N is
13874 0: Bit N of result is copied from bit OP[2].0
13875 ... ...
13876 7: Bit N of result is copied from bit OP[2].7
13877 0xf: Bit N of result is copied from bit OP[3].N
13878 OP[2]: Bits to be inserted
13879 OP[3]: Target value */
13880
13881 const char*
13882 avr_out_insert_bits (rtx *op, int *plen)
13883 {
13884 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
13885 unsigned mask_fixed;
13886 bool fixp_p = true;
13887 rtx xop[4];
13888
13889 xop[0] = op[0];
13890 xop[1] = op[2];
13891 xop[2] = op[3];
13892
13893 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
13894
13895 if (plen)
13896 *plen = 0;
13897 else if (flag_print_asm_name)
13898 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
13899
13900 /* If MAP has fixed points it might be better to initialize the result
13901 with the bits to be inserted instead of moving all bits by hand. */
13902
13903 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
13904
13905 if (REGNO (xop[0]) == REGNO (xop[1]))
13906 {
13907 /* Avoid early-clobber conflicts */
13908
13909 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
13910 xop[1] = tmp_reg_rtx;
13911 fixp_p = false;
13912 }
13913
13914 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13915 {
13916 /* XOP[2] is used and reloaded to XOP[0] already */
13917
13918 int n_fix = 0, n_nofix = 0;
13919
13920 gcc_assert (REG_P (xop[2]));
13921
13922 /* Get the code size of the bit insertions; once with all bits
13923 moved and once with fixed points omitted. */
13924
13925 avr_move_bits (xop, map, true, &n_fix);
13926 avr_move_bits (xop, map, false, &n_nofix);
13927
13928 if (fixp_p && n_fix - n_nofix > 3)
13929 {
13930 xop[3] = gen_int_mode (~mask_fixed, QImode);
13931
13932 avr_asm_len ("eor %0,%1" CR_TAB
13933 "andi %0,%3" CR_TAB
13934 "eor %0,%1", xop, plen, 3);
13935 fixp_p = false;
13936 }
13937 }
13938 else
13939 {
13940 /* XOP[2] is unused */
13941
13942 if (fixp_p && mask_fixed)
13943 {
13944 avr_asm_len ("mov %0,%1", xop, plen, 1);
13945 fixp_p = false;
13946 }
13947 }
13948
13949 /* Move/insert remaining bits. */
13950
13951 avr_move_bits (xop, map, fixp_p, plen);
13952
13953 return "";
13954 }
13955
13956
13957 /* IDs for all the AVR builtins. */
13958
13959 enum avr_builtin_id
13960 {
13961 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13962 AVR_BUILTIN_ ## NAME,
13963 #include "builtins.def"
13964 #undef DEF_BUILTIN
13965
13966 AVR_BUILTIN_COUNT
13967 };
13968
13969 struct GTY(()) avr_builtin_description
13970 {
13971 enum insn_code icode;
13972 int n_args;
13973 tree fndecl;
13974 };
13975
13976
13977 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13978 that a built-in's ID can be used to access the built-in by means of
13979 avr_bdesc[ID] */
13980
13981 static GTY(()) struct avr_builtin_description
13982 avr_bdesc[AVR_BUILTIN_COUNT] =
13983 {
13984 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13985 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13986 #include "builtins.def"
13987 #undef DEF_BUILTIN
13988 };
13989
13990
13991 /* Implement `TARGET_BUILTIN_DECL'. */
13992
13993 static tree
13994 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13995 {
13996 if (id < AVR_BUILTIN_COUNT)
13997 return avr_bdesc[id].fndecl;
13998
13999 return error_mark_node;
14000 }
14001
14002
14003 static void
14004 avr_init_builtin_int24 (void)
14005 {
14006 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
14007 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
14008
14009 lang_hooks.types.register_builtin_type (int24_type, "__int24");
14010 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
14011 }
14012
14013
14014 /* Implement `TARGET_INIT_BUILTINS' */
14015 /* Set up all builtin functions for this target. */
14016
14017 static void
14018 avr_init_builtins (void)
14019 {
14020 tree void_ftype_void
14021 = build_function_type_list (void_type_node, NULL_TREE);
14022 tree uchar_ftype_uchar
14023 = build_function_type_list (unsigned_char_type_node,
14024 unsigned_char_type_node,
14025 NULL_TREE);
14026 tree uint_ftype_uchar_uchar
14027 = build_function_type_list (unsigned_type_node,
14028 unsigned_char_type_node,
14029 unsigned_char_type_node,
14030 NULL_TREE);
14031 tree int_ftype_char_char
14032 = build_function_type_list (integer_type_node,
14033 char_type_node,
14034 char_type_node,
14035 NULL_TREE);
14036 tree int_ftype_char_uchar
14037 = build_function_type_list (integer_type_node,
14038 char_type_node,
14039 unsigned_char_type_node,
14040 NULL_TREE);
14041 tree void_ftype_ulong
14042 = build_function_type_list (void_type_node,
14043 long_unsigned_type_node,
14044 NULL_TREE);
14045
14046 tree uchar_ftype_ulong_uchar_uchar
14047 = build_function_type_list (unsigned_char_type_node,
14048 long_unsigned_type_node,
14049 unsigned_char_type_node,
14050 unsigned_char_type_node,
14051 NULL_TREE);
14052
14053 tree const_memx_void_node
14054 = build_qualified_type (void_type_node,
14055 TYPE_QUAL_CONST
14056 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
14057
14058 tree const_memx_ptr_type_node
14059 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
14060
14061 tree char_ftype_const_memx_ptr
14062 = build_function_type_list (char_type_node,
14063 const_memx_ptr_type_node,
14064 NULL);
14065
14066 #define ITYP(T) \
14067 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
14068
14069 #define FX_FTYPE_FX(fx) \
14070 tree fx##r_ftype_##fx##r \
14071 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
14072 tree fx##k_ftype_##fx##k \
14073 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
14074
14075 #define FX_FTYPE_FX_INT(fx) \
14076 tree fx##r_ftype_##fx##r_int \
14077 = build_function_type_list (node_##fx##r, node_##fx##r, \
14078 integer_type_node, NULL); \
14079 tree fx##k_ftype_##fx##k_int \
14080 = build_function_type_list (node_##fx##k, node_##fx##k, \
14081 integer_type_node, NULL)
14082
14083 #define INT_FTYPE_FX(fx) \
14084 tree int_ftype_##fx##r \
14085 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
14086 tree int_ftype_##fx##k \
14087 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
14088
14089 #define INTX_FTYPE_FX(fx) \
14090 tree int##fx##r_ftype_##fx##r \
14091 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
14092 tree int##fx##k_ftype_##fx##k \
14093 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
14094
14095 #define FX_FTYPE_INTX(fx) \
14096 tree fx##r_ftype_int##fx##r \
14097 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
14098 tree fx##k_ftype_int##fx##k \
14099 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
14100
14101 tree node_hr = short_fract_type_node;
14102 tree node_nr = fract_type_node;
14103 tree node_lr = long_fract_type_node;
14104 tree node_llr = long_long_fract_type_node;
14105
14106 tree node_uhr = unsigned_short_fract_type_node;
14107 tree node_unr = unsigned_fract_type_node;
14108 tree node_ulr = unsigned_long_fract_type_node;
14109 tree node_ullr = unsigned_long_long_fract_type_node;
14110
14111 tree node_hk = short_accum_type_node;
14112 tree node_nk = accum_type_node;
14113 tree node_lk = long_accum_type_node;
14114 tree node_llk = long_long_accum_type_node;
14115
14116 tree node_uhk = unsigned_short_accum_type_node;
14117 tree node_unk = unsigned_accum_type_node;
14118 tree node_ulk = unsigned_long_accum_type_node;
14119 tree node_ullk = unsigned_long_long_accum_type_node;
14120
14121
14122 /* For absfx builtins. */
14123
14124 FX_FTYPE_FX (h);
14125 FX_FTYPE_FX (n);
14126 FX_FTYPE_FX (l);
14127 FX_FTYPE_FX (ll);
14128
14129 /* For roundfx builtins. */
14130
14131 FX_FTYPE_FX_INT (h);
14132 FX_FTYPE_FX_INT (n);
14133 FX_FTYPE_FX_INT (l);
14134 FX_FTYPE_FX_INT (ll);
14135
14136 FX_FTYPE_FX_INT (uh);
14137 FX_FTYPE_FX_INT (un);
14138 FX_FTYPE_FX_INT (ul);
14139 FX_FTYPE_FX_INT (ull);
14140
14141 /* For countlsfx builtins. */
14142
14143 INT_FTYPE_FX (h);
14144 INT_FTYPE_FX (n);
14145 INT_FTYPE_FX (l);
14146 INT_FTYPE_FX (ll);
14147
14148 INT_FTYPE_FX (uh);
14149 INT_FTYPE_FX (un);
14150 INT_FTYPE_FX (ul);
14151 INT_FTYPE_FX (ull);
14152
14153 /* For bitsfx builtins. */
14154
14155 INTX_FTYPE_FX (h);
14156 INTX_FTYPE_FX (n);
14157 INTX_FTYPE_FX (l);
14158 INTX_FTYPE_FX (ll);
14159
14160 INTX_FTYPE_FX (uh);
14161 INTX_FTYPE_FX (un);
14162 INTX_FTYPE_FX (ul);
14163 INTX_FTYPE_FX (ull);
14164
14165 /* For fxbits builtins. */
14166
14167 FX_FTYPE_INTX (h);
14168 FX_FTYPE_INTX (n);
14169 FX_FTYPE_INTX (l);
14170 FX_FTYPE_INTX (ll);
14171
14172 FX_FTYPE_INTX (uh);
14173 FX_FTYPE_INTX (un);
14174 FX_FTYPE_INTX (ul);
14175 FX_FTYPE_INTX (ull);
14176
14177
14178 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
14179 { \
14180 int id = AVR_BUILTIN_ ## NAME; \
14181 const char *Name = "__builtin_avr_" #NAME; \
14182 char *name = (char*) alloca (1 + strlen (Name)); \
14183 \
14184 gcc_assert (id < AVR_BUILTIN_COUNT); \
14185 avr_bdesc[id].fndecl \
14186 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
14187 BUILT_IN_MD, LIBNAME, NULL_TREE); \
14188 }
14189 #include "builtins.def"
14190 #undef DEF_BUILTIN
14191
14192 avr_init_builtin_int24 ();
14193 }
14194
14195
14196 /* Subroutine of avr_expand_builtin to expand vanilla builtins
14197 with non-void result and 1 ... 3 arguments. */
14198
14199 static rtx
14200 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
14201 {
14202 rtx pat, xop[3];
14203 int n_args = call_expr_nargs (exp);
14204 machine_mode tmode = insn_data[icode].operand[0].mode;
14205
14206 gcc_assert (n_args >= 1 && n_args <= 3);
14207
14208 if (target == NULL_RTX
14209 || GET_MODE (target) != tmode
14210 || !insn_data[icode].operand[0].predicate (target, tmode))
14211 {
14212 target = gen_reg_rtx (tmode);
14213 }
14214
14215 for (int n = 0; n < n_args; n++)
14216 {
14217 tree arg = CALL_EXPR_ARG (exp, n);
14218 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14219 machine_mode opmode = GET_MODE (op);
14220 machine_mode mode = insn_data[icode].operand[n + 1].mode;
14221
14222 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
14223 {
14224 opmode = HImode;
14225 op = gen_lowpart (HImode, op);
14226 }
14227
14228 /* In case the insn wants input operands in modes different from
14229 the result, abort. */
14230
14231 gcc_assert (opmode == mode || opmode == VOIDmode);
14232
14233 if (!insn_data[icode].operand[n + 1].predicate (op, mode))
14234 op = copy_to_mode_reg (mode, op);
14235
14236 xop[n] = op;
14237 }
14238
14239 switch (n_args)
14240 {
14241 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
14242 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
14243 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
14244
14245 default:
14246 gcc_unreachable();
14247 }
14248
14249 if (pat == NULL_RTX)
14250 return NULL_RTX;
14251
14252 emit_insn (pat);
14253
14254 return target;
14255 }
14256
14257
14258 /* Implement `TARGET_EXPAND_BUILTIN'. */
14259 /* Expand an expression EXP that calls a built-in function,
14260 with result going to TARGET if that's convenient
14261 (and in mode MODE if that's convenient).
14262 SUBTARGET may be used as the target for computing one of EXP's operands.
14263 IGNORE is nonzero if the value is to be ignored. */
14264
14265 static rtx
14266 avr_expand_builtin (tree exp, rtx target,
14267 rtx subtarget ATTRIBUTE_UNUSED,
14268 machine_mode mode ATTRIBUTE_UNUSED,
14269 int ignore)
14270 {
14271 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
14272 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
14273 unsigned int id = DECL_FUNCTION_CODE (fndecl);
14274 const struct avr_builtin_description *d = &avr_bdesc[id];
14275 tree arg0;
14276 rtx op0;
14277
14278 gcc_assert (id < AVR_BUILTIN_COUNT);
14279
14280 switch (id)
14281 {
14282 case AVR_BUILTIN_NOP:
14283 emit_insn (gen_nopv (GEN_INT (1)));
14284 return 0;
14285
14286 case AVR_BUILTIN_DELAY_CYCLES:
14287 {
14288 arg0 = CALL_EXPR_ARG (exp, 0);
14289 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14290
14291 if (!CONST_INT_P (op0))
14292 error ("%s expects a compile time integer constant", bname);
14293 else
14294 avr_expand_delay_cycles (op0);
14295
14296 return NULL_RTX;
14297 }
14298
14299 case AVR_BUILTIN_NOPS:
14300 {
14301 arg0 = CALL_EXPR_ARG (exp, 0);
14302 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14303
14304 if (!CONST_INT_P (op0))
14305 error ("%s expects a compile time integer constant", bname);
14306 else
14307 avr_expand_nops (op0);
14308
14309 return NULL_RTX;
14310 }
14311
14312 case AVR_BUILTIN_INSERT_BITS:
14313 {
14314 arg0 = CALL_EXPR_ARG (exp, 0);
14315 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
14316
14317 if (!CONST_INT_P (op0))
14318 {
14319 error ("%s expects a compile time long integer constant"
14320 " as first argument", bname);
14321 return target;
14322 }
14323
14324 break;
14325 }
14326
14327 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
14328 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
14329 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
14330 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
14331
14332 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
14333 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
14334 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
14335 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
14336
14337 /* Warn about odd rounding. Rounding points >= FBIT will have
14338 no effect. */
14339
14340 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
14341 break;
14342
14343 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
14344
14345 if (rbit >= (int) GET_MODE_FBIT (mode))
14346 {
14347 warning (OPT_Wextra, "rounding to %d bits has no effect for "
14348 "fixed-point value with %d fractional bits",
14349 rbit, GET_MODE_FBIT (mode));
14350
14351 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
14352 EXPAND_NORMAL);
14353 }
14354 else if (rbit <= - (int) GET_MODE_IBIT (mode))
14355 {
14356 warning (0, "rounding result will always be 0");
14357 return CONST0_RTX (mode);
14358 }
14359
14360 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
14361
14362 TR 18037 only specifies results for RP > 0. However, the
14363 remaining cases of -IBIT < RP <= 0 can easily be supported
14364 without any additional overhead. */
14365
14366 break; /* round */
14367 }
14368
14369 /* No fold found and no insn: Call support function from libgcc. */
14370
14371 if (d->icode == CODE_FOR_nothing
14372 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
14373 {
14374 return expand_call (exp, target, ignore);
14375 }
14376
14377 /* No special treatment needed: vanilla expand. */
14378
14379 gcc_assert (d->icode != CODE_FOR_nothing);
14380 gcc_assert (d->n_args == call_expr_nargs (exp));
14381
14382 if (d->n_args == 0)
14383 {
14384 emit_insn ((GEN_FCN (d->icode)) (target));
14385 return NULL_RTX;
14386 }
14387
14388 return avr_default_expand_builtin (d->icode, exp, target);
14389 }
14390
14391
14392 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
14393
14394 static tree
14395 avr_fold_absfx (tree tval)
14396 {
14397 if (FIXED_CST != TREE_CODE (tval))
14398 return NULL_TREE;
14399
14400 /* Our fixed-points have no padding: Use double_int payload directly. */
14401
14402 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
14403 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
14404 double_int ival = fval.data.sext (bits);
14405
14406 if (!ival.is_negative())
14407 return tval;
14408
14409 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
14410
14411 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
14412 ? double_int::max_value (bits, false)
14413 : -ival;
14414
14415 return build_fixed (TREE_TYPE (tval), fval);
14416 }
14417
14418
14419 /* Implement `TARGET_FOLD_BUILTIN'. */
14420
14421 static tree
14422 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
14423 bool ignore ATTRIBUTE_UNUSED)
14424 {
14425 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
14426 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
14427
14428 if (!optimize)
14429 return NULL_TREE;
14430
14431 switch (fcode)
14432 {
14433 default:
14434 break;
14435
14436 case AVR_BUILTIN_SWAP:
14437 {
14438 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
14439 build_int_cst (val_type, 4));
14440 }
14441
14442 case AVR_BUILTIN_ABSHR:
14443 case AVR_BUILTIN_ABSR:
14444 case AVR_BUILTIN_ABSLR:
14445 case AVR_BUILTIN_ABSLLR:
14446
14447 case AVR_BUILTIN_ABSHK:
14448 case AVR_BUILTIN_ABSK:
14449 case AVR_BUILTIN_ABSLK:
14450 case AVR_BUILTIN_ABSLLK:
14451 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
14452
14453 return avr_fold_absfx (arg[0]);
14454
14455 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
14456 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
14457 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
14458 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
14459
14460 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
14461 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
14462 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
14463 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
14464
14465 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
14466 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
14467 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
14468 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
14469
14470 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
14471 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
14472 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
14473 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
14474
14475 gcc_assert (TYPE_PRECISION (val_type)
14476 == TYPE_PRECISION (TREE_TYPE (arg[0])));
14477
14478 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
14479
14480 case AVR_BUILTIN_INSERT_BITS:
14481 {
14482 tree tbits = arg[1];
14483 tree tval = arg[2];
14484 tree tmap;
14485 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
14486 unsigned int map;
14487 bool changed = false;
14488 avr_map_op_t best_g;
14489
14490 if (TREE_CODE (arg[0]) != INTEGER_CST)
14491 {
14492 /* No constant as first argument: Don't fold this and run into
14493 error in avr_expand_builtin. */
14494
14495 break;
14496 }
14497
14498 tmap = wide_int_to_tree (map_type, arg[0]);
14499 map = TREE_INT_CST_LOW (tmap);
14500
14501 if (TREE_CODE (tval) != INTEGER_CST
14502 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
14503 {
14504 /* There are no F in the map, i.e. 3rd operand is unused.
14505 Replace that argument with some constant to render
14506 respective input unused. */
14507
14508 tval = build_int_cst (val_type, 0);
14509 changed = true;
14510 }
14511
14512 if (TREE_CODE (tbits) != INTEGER_CST
14513 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
14514 {
14515 /* Similar for the bits to be inserted. If they are unused,
14516 we can just as well pass 0. */
14517
14518 tbits = build_int_cst (val_type, 0);
14519 }
14520
14521 if (TREE_CODE (tbits) == INTEGER_CST)
14522 {
14523 /* Inserting bits known at compile time is easy and can be
14524 performed by AND and OR with appropriate masks. */
14525
14526 int bits = TREE_INT_CST_LOW (tbits);
14527 int mask_ior = 0, mask_and = 0xff;
14528
14529 for (size_t i = 0; i < 8; i++)
14530 {
14531 int mi = avr_map (map, i);
14532
14533 if (mi < 8)
14534 {
14535 if (bits & (1 << mi)) mask_ior |= (1 << i);
14536 else mask_and &= ~(1 << i);
14537 }
14538 }
14539
14540 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
14541 build_int_cst (val_type, mask_ior));
14542 return fold_build2 (BIT_AND_EXPR, val_type, tval,
14543 build_int_cst (val_type, mask_and));
14544 }
14545
14546 if (changed)
14547 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14548
14549 /* If bits don't change their position we can use vanilla logic
14550 to merge the two arguments. */
14551
14552 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
14553 {
14554 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
14555 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
14556
14557 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
14558 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
14559 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
14560 }
14561
14562 /* Try to decomposing map to reduce overall cost. */
14563
14564 if (avr_log.builtin)
14565 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
14566
14567 best_g = avr_map_op[0];
14568 best_g.cost = 1000;
14569
14570 for (size_t i = 0; i < ARRAY_SIZE (avr_map_op); i++)
14571 {
14572 avr_map_op_t g
14573 = avr_map_decompose (map, avr_map_op + i,
14574 TREE_CODE (tval) == INTEGER_CST);
14575
14576 if (g.cost >= 0 && g.cost < best_g.cost)
14577 best_g = g;
14578 }
14579
14580 if (avr_log.builtin)
14581 avr_edump ("\n");
14582
14583 if (best_g.arg == 0)
14584 /* No optimization found */
14585 break;
14586
14587 /* Apply operation G to the 2nd argument. */
14588
14589 if (avr_log.builtin)
14590 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
14591 best_g.str, best_g.arg, best_g.map, best_g.cost);
14592
14593 /* Do right-shifts arithmetically: They copy the MSB instead of
14594 shifting in a non-usable value (0) as with logic right-shift. */
14595
14596 tbits = fold_convert (signed_char_type_node, tbits);
14597 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
14598 build_int_cst (val_type, best_g.arg));
14599 tbits = fold_convert (val_type, tbits);
14600
14601 /* Use map o G^-1 instead of original map to undo the effect of G. */
14602
14603 tmap = wide_int_to_tree (map_type, best_g.map);
14604
14605 return build_call_expr (fndecl, 3, tmap, tbits, tval);
14606 } /* AVR_BUILTIN_INSERT_BITS */
14607 }
14608
14609 return NULL_TREE;
14610 }
14611
14612 \f
14613
14614 /* Initialize the GCC target structure. */
14615
14616 #undef TARGET_ASM_ALIGNED_HI_OP
14617 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
14618 #undef TARGET_ASM_ALIGNED_SI_OP
14619 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
14620 #undef TARGET_ASM_UNALIGNED_HI_OP
14621 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
14622 #undef TARGET_ASM_UNALIGNED_SI_OP
14623 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
14624 #undef TARGET_ASM_INTEGER
14625 #define TARGET_ASM_INTEGER avr_assemble_integer
14626 #undef TARGET_ASM_FILE_START
14627 #define TARGET_ASM_FILE_START avr_file_start
14628 #undef TARGET_ASM_FILE_END
14629 #define TARGET_ASM_FILE_END avr_file_end
14630
14631 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
14632 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
14633 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
14634 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
14635
14636 #undef TARGET_FUNCTION_VALUE
14637 #define TARGET_FUNCTION_VALUE avr_function_value
14638 #undef TARGET_LIBCALL_VALUE
14639 #define TARGET_LIBCALL_VALUE avr_libcall_value
14640 #undef TARGET_FUNCTION_VALUE_REGNO_P
14641 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
14642
14643 #undef TARGET_ATTRIBUTE_TABLE
14644 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
14645 #undef TARGET_INSERT_ATTRIBUTES
14646 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
14647 #undef TARGET_SECTION_TYPE_FLAGS
14648 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
14649
14650 #undef TARGET_ASM_NAMED_SECTION
14651 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
14652 #undef TARGET_ASM_INIT_SECTIONS
14653 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
14654 #undef TARGET_ENCODE_SECTION_INFO
14655 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
14656 #undef TARGET_ASM_SELECT_SECTION
14657 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
14658
14659 #undef TARGET_ASM_FINAL_POSTSCAN_INSN
14660 #define TARGET_ASM_FINAL_POSTSCAN_INSN avr_asm_final_postscan_insn
14661
14662 #undef TARGET_REGISTER_MOVE_COST
14663 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
14664 #undef TARGET_MEMORY_MOVE_COST
14665 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
14666 #undef TARGET_RTX_COSTS
14667 #define TARGET_RTX_COSTS avr_rtx_costs
14668 #undef TARGET_ADDRESS_COST
14669 #define TARGET_ADDRESS_COST avr_address_cost
14670 #undef TARGET_MACHINE_DEPENDENT_REORG
14671 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
14672 #undef TARGET_FUNCTION_ARG
14673 #define TARGET_FUNCTION_ARG avr_function_arg
14674 #undef TARGET_FUNCTION_ARG_ADVANCE
14675 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
14676
14677 #undef TARGET_SET_CURRENT_FUNCTION
14678 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
14679
14680 #undef TARGET_RETURN_IN_MEMORY
14681 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
14682
14683 #undef TARGET_STRICT_ARGUMENT_NAMING
14684 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
14685
14686 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
14687 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
14688
14689 #undef TARGET_CONDITIONAL_REGISTER_USAGE
14690 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
14691
14692 #undef TARGET_HARD_REGNO_MODE_OK
14693 #define TARGET_HARD_REGNO_MODE_OK avr_hard_regno_mode_ok
14694 #undef TARGET_HARD_REGNO_SCRATCH_OK
14695 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
14696 #undef TARGET_HARD_REGNO_CALL_PART_CLOBBERED
14697 #define TARGET_HARD_REGNO_CALL_PART_CLOBBERED \
14698 avr_hard_regno_call_part_clobbered
14699
14700 #undef TARGET_CASE_VALUES_THRESHOLD
14701 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
14702
14703 #undef TARGET_FRAME_POINTER_REQUIRED
14704 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
14705 #undef TARGET_CAN_ELIMINATE
14706 #define TARGET_CAN_ELIMINATE avr_can_eliminate
14707
14708 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
14709 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
14710
14711 #undef TARGET_WARN_FUNC_RETURN
14712 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
14713
14714 #undef TARGET_CLASS_LIKELY_SPILLED_P
14715 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
14716
14717 #undef TARGET_OPTION_OVERRIDE
14718 #define TARGET_OPTION_OVERRIDE avr_option_override
14719
14720 #undef TARGET_CANNOT_MODIFY_JUMPS_P
14721 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
14722
14723 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
14724 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
14725
14726 #undef TARGET_INIT_BUILTINS
14727 #define TARGET_INIT_BUILTINS avr_init_builtins
14728
14729 #undef TARGET_BUILTIN_DECL
14730 #define TARGET_BUILTIN_DECL avr_builtin_decl
14731
14732 #undef TARGET_EXPAND_BUILTIN
14733 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
14734
14735 #undef TARGET_FOLD_BUILTIN
14736 #define TARGET_FOLD_BUILTIN avr_fold_builtin
14737
14738 #undef TARGET_SCALAR_MODE_SUPPORTED_P
14739 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
14740
14741 #undef TARGET_BUILD_BUILTIN_VA_LIST
14742 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
14743
14744 #undef TARGET_FIXED_POINT_SUPPORTED_P
14745 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
14746
14747 #undef TARGET_CONVERT_TO_TYPE
14748 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
14749
14750 #undef TARGET_LRA_P
14751 #define TARGET_LRA_P hook_bool_void_false
14752
14753 #undef TARGET_ADDR_SPACE_SUBSET_P
14754 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
14755
14756 #undef TARGET_ADDR_SPACE_CONVERT
14757 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
14758
14759 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
14760 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
14761
14762 #undef TARGET_ADDR_SPACE_POINTER_MODE
14763 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
14764
14765 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
14766 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
14767 avr_addr_space_legitimate_address_p
14768
14769 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
14770 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
14771
14772 #undef TARGET_ADDR_SPACE_DIAGNOSE_USAGE
14773 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
14774
14775 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
14776 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
14777
14778 #undef TARGET_PRINT_OPERAND
14779 #define TARGET_PRINT_OPERAND avr_print_operand
14780 #undef TARGET_PRINT_OPERAND_ADDRESS
14781 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
14782 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
14783 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
14784
14785 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
14786 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
14787 avr_use_by_pieces_infrastructure_p
14788
14789 #undef TARGET_LEGITIMATE_COMBINED_INSN
14790 #define TARGET_LEGITIMATE_COMBINED_INSN avr_legitimate_combined_insn
14791
14792 struct gcc_target targetm = TARGET_INITIALIZER;
14793
14794 \f
14795 #include "gt-avr.h"