1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2016 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "c-family/c-common.h"
36 #include "conditions.h"
37 #include "insn-attr.h"
41 #include "stor-layout.h"
45 #include "langhooks.h"
50 #include "tree-pass.h"
51 #include "print-rtl.h"
53 /* This file should be included last. */
54 #include "target-def.h"
56 /* Maximal allowed offset for an address in the LD command */
57 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
59 /* Return true if STR starts with PREFIX and false, otherwise. */
60 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
62 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
63 address space where data is to be located.
64 As the only non-generic address spaces are all located in flash,
65 this can be used to test if data shall go into some .progmem* section.
66 This must be the rightmost field of machine dependent section flags. */
67 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
69 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
70 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
72 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
73 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
74 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
76 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
77 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
80 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
81 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
82 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
83 / SYMBOL_FLAG_MACH_DEP)
85 /* (AVR_TINY only): Symbol has attribute progmem */
86 #define AVR_SYMBOL_FLAG_TINY_PM \
87 (SYMBOL_FLAG_MACH_DEP << 7)
89 /* (AVR_TINY only): Symbol has attribute absdata */
90 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
91 (SYMBOL_FLAG_MACH_DEP << 8)
93 #define TINY_ADIW(REG1, REG2, I) \
94 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
95 "sbci " #REG2 ",hi8(-(" #I "))"
97 #define TINY_SBIW(REG1, REG2, I) \
98 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
99 "sbci " #REG2 ",hi8((" #I "))"
101 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
102 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
104 /* Known address spaces. The order must be the same as in the respective
105 enum from avr.h (or designated initialized must be used). */
106 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
108 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
109 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
110 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
111 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
112 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
113 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
114 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
115 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
119 /* Holding RAM addresses of some SFRs used by the compiler and that
120 are unique over all devices in an architecture like 'avr4'. */
124 /* SREG: The processor status */
127 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
133 /* RAMPZ: The high byte of 24-bit address used with ELPM */
136 /* SP: The stack pointer and its low and high byte */
141 static avr_addr_t avr_addr
;
144 /* Prototypes for local helper functions. */
146 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
147 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
148 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
149 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
150 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
151 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
153 static int get_sequence_length (rtx_insn
*insns
);
154 static int sequent_regs_live (void);
155 static const char *ptrreg_to_str (int);
156 static const char *cond_string (enum rtx_code
);
157 static int avr_num_arg_regs (machine_mode
, const_tree
);
158 static int avr_operand_rtx_cost (rtx
, machine_mode
, enum rtx_code
,
160 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
161 static struct machine_function
* avr_init_machine_status (void);
164 /* Prototypes for hook implementors if needed before their implementation. */
166 static bool avr_rtx_costs (rtx
, machine_mode
, int, int, int*, bool);
169 /* Allocate registers from r25 to r8 for parameters for function calls. */
170 #define FIRST_CUM_REG 26
172 /* Last call saved register */
173 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
175 /* Implicit target register of LPM instruction (R0) */
176 extern GTY(()) rtx lpm_reg_rtx
;
179 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
180 extern GTY(()) rtx lpm_addr_reg_rtx
;
181 rtx lpm_addr_reg_rtx
;
183 /* Temporary register RTX (reg:QI TMP_REGNO) */
184 extern GTY(()) rtx tmp_reg_rtx
;
187 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
188 extern GTY(()) rtx zero_reg_rtx
;
191 /* RTXs for all general purpose registers as QImode */
192 extern GTY(()) rtx all_regs_rtx
[32];
193 rtx all_regs_rtx
[32];
195 /* SREG, the processor status */
196 extern GTY(()) rtx sreg_rtx
;
199 /* RAMP* special function registers */
200 extern GTY(()) rtx rampd_rtx
;
201 extern GTY(()) rtx rampx_rtx
;
202 extern GTY(()) rtx rampy_rtx
;
203 extern GTY(()) rtx rampz_rtx
;
209 /* RTX containing the strings "" and "e", respectively */
210 static GTY(()) rtx xstring_empty
;
211 static GTY(()) rtx xstring_e
;
213 /* Current architecture. */
214 const avr_arch_t
*avr_arch
;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash* or __memx. Only used as singletons inside
218 avr_asm_select_section, but it must not be local there because of GTY. */
219 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
221 /* Condition for insns/expanders from avr-dimode.md. */
222 bool avr_have_dimode
= true;
224 /* To track if code will use .bss and/or .data. */
225 bool avr_need_clear_bss_p
= false;
226 bool avr_need_copy_data_p
= false;
229 /* Transform UP into lowercase and write the result to LO.
230 You must provide enough space for LO. Return LO. */
233 avr_tolower (char *lo
, const char *up
)
237 for (; *up
; up
++, lo
++)
246 /* Custom function to count number of set bits. */
249 avr_popcount (unsigned int val
)
263 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
264 Return true if the least significant N_BYTES bytes of XVAL all have a
265 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
266 of integers which contains an integer N iff bit N of POP_MASK is set. */
269 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
273 machine_mode mode
= GET_MODE (xval
);
275 if (VOIDmode
== mode
)
278 for (i
= 0; i
< n_bytes
; i
++)
280 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
281 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
283 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
291 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
292 the bit representation of X by "casting" it to CONST_INT. */
295 avr_to_int_mode (rtx x
)
297 machine_mode mode
= GET_MODE (x
);
299 return VOIDmode
== mode
301 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
306 static const pass_data avr_pass_data_recompute_notes
=
309 "", // name (will be patched)
310 OPTGROUP_NONE
, // optinfo_flags
312 0, // properties_required
313 0, // properties_provided
314 0, // properties_destroyed
315 0, // todo_flags_start
316 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
320 class avr_pass_recompute_notes
: public rtl_opt_pass
323 avr_pass_recompute_notes (gcc::context
*ctxt
, const char *name
)
324 : rtl_opt_pass (avr_pass_data_recompute_notes
, ctxt
)
329 virtual unsigned int execute (function
*)
331 df_note_add_problem ();
336 }; // avr_pass_recompute_notes
338 static const pass_data avr_pass_data_casesi
=
341 "", // name (will be patched)
342 OPTGROUP_NONE
, // optinfo_flags
344 0, // properties_required
345 0, // properties_provided
346 0, // properties_destroyed
347 0, // todo_flags_start
348 0 // todo_flags_finish
352 class avr_pass_casesi
: public rtl_opt_pass
355 avr_pass_casesi (gcc::context
*ctxt
, const char *name
)
356 : rtl_opt_pass (avr_pass_data_casesi
, ctxt
)
361 void avr_rest_of_handle_casesi (function
*);
363 virtual bool gate (function
*) { return optimize
> 0; }
365 virtual unsigned int execute (function
*func
)
367 avr_rest_of_handle_casesi (func
);
371 }; // avr_pass_casesi
376 make_avr_pass_recompute_notes (gcc::context
*ctxt
)
378 return new avr_pass_recompute_notes (ctxt
, "avr-notes-free-cfg");
382 make_avr_pass_casesi (gcc::context
*ctxt
)
384 return new avr_pass_casesi (ctxt
, "avr-casesi");
388 /* Make one parallel insn with all the patterns from insns i[0]..i[5]. */
391 avr_parallel_insn_from_insns (rtx_insn
*i
[6])
393 rtvec vec
= gen_rtvec (6, PATTERN (i
[0]), PATTERN (i
[1]), PATTERN (i
[2]),
394 PATTERN (i
[3]), PATTERN (i
[4]), PATTERN (i
[5]));
396 emit (gen_rtx_PARALLEL (VOIDmode
, vec
));
397 rtx_insn
*insn
= get_insns();
404 /* Return true if we see an insn stream generated by casesi expander together
405 with an extension to SImode of the switch value.
407 If this is the case, fill in the insns from casesi to INSNS[1..5] and
408 the SImode extension to INSNS[0]. Moreover, extract the operands of
409 pattern casesi_<mode>_sequence forged from the sequence to recog_data. */
412 avr_is_casesi_sequence (basic_block bb
, rtx_insn
*insn
, rtx_insn
*insns
[6])
416 /* A first and quick test for a casesi sequences. As a side effect of
417 the test, harvest respective insns to INSNS[0..5]. */
419 if (!(JUMP_P (insns
[5] = insn
)
420 // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
421 // hence the following test ensures that we are actually dealing
422 // with code from casesi.
423 && (set_5
= single_set (insns
[5]))
424 && UNSPEC
== GET_CODE (SET_SRC (set_5
))
425 && UNSPEC_INDEX_JMP
== XINT (SET_SRC (set_5
), 1)
427 && (insns
[4] = prev_real_insn (insns
[5]))
428 && (insns
[3] = prev_real_insn (insns
[4]))
429 && (insns
[2] = prev_real_insn (insns
[3]))
430 && (insns
[1] = prev_real_insn (insns
[2]))
432 // Insn prior to casesi.
433 && (insns
[0] = prev_real_insn (insns
[1]))
434 && (set_0
= single_set (insns
[0]))
435 && extend_operator (SET_SRC (set_0
), SImode
)))
442 fprintf (dump_file
, ";; Sequence from casesi in "
443 "[bb %d]:\n\n", bb
->index
);
444 for (int i
= 0; i
< 6; i
++)
445 print_rtl_single (dump_file
, insns
[i
]);
448 /* We have to deal with quite some operands. Extracting them by hand
449 would be tedious, therefore wrap the insn patterns into a parallel,
450 run recog against it and then use insn extract to get the operands. */
452 rtx_insn
*xinsn
= avr_parallel_insn_from_insns (insns
);
454 INSN_CODE (xinsn
) = recog (PATTERN (xinsn
), xinsn
, NULL
/* num_clobbers */);
456 /* Failing to recognize means that someone changed the casesi expander or
457 that some passes prior to this one performed some unexpected changes.
458 Gracefully drop such situations instead of aborting. */
460 if (INSN_CODE (xinsn
) < 0)
463 fprintf (dump_file
, ";; Sequence not recognized, giving up.\n\n");
468 gcc_assert (CODE_FOR_casesi_qi_sequence
== INSN_CODE (xinsn
)
469 || CODE_FOR_casesi_hi_sequence
== INSN_CODE (xinsn
));
471 extract_insn (xinsn
);
473 // Assert on the anatomy of xinsn's operands we are going to work with.
475 gcc_assert (11 == recog_data
.n_operands
);
476 gcc_assert (4 == recog_data
.n_dups
);
480 fprintf (dump_file
, ";; Operands extracted:\n");
481 for (int i
= 0; i
< recog_data
.n_operands
; i
++)
482 avr_fdump (dump_file
, ";; $%d = %r\n", i
, recog_data
.operand
[i
]);
483 fprintf (dump_file
, "\n");
490 /* Perform some extra checks on operands of casesi_<mode>_sequence.
491 Not all operand dependencies can be described by means of predicates.
492 This function performs left over checks and should always return true.
493 Returning false means that someone changed the casesi expander but did
494 not adjust casesi_<mode>_sequence. */
497 avr_casei_sequence_check_operands (rtx
*xop
)
499 rtx sub_5
= NULL_RTX
;
501 if (AVR_HAVE_EIJMP_EICALL
502 // The last clobber op of the tablejump.
503 && xop
[8] == all_regs_rtx
[24])
505 // $6 is: (subreg:SI ($5) 0)
509 if (!AVR_HAVE_EIJMP_EICALL
510 // $6 is: (plus:HI (subreg:SI ($5) 0)
512 && PLUS
== GET_CODE (xop
[6])
513 && LABEL_REF
== GET_CODE (XEXP (xop
[6], 1))
514 && rtx_equal_p (xop
[3], XEXP (XEXP (xop
[6], 1), 0))
515 // The last clobber op of the tablejump.
516 && xop
[8] == const0_rtx
)
518 sub_5
= XEXP (xop
[6], 0);
523 && 0 == SUBREG_BYTE (sub_5
)
524 && rtx_equal_p (xop
[5], SUBREG_REG (sub_5
)))
528 fprintf (dump_file
, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
534 /* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
535 extension of an 8-bit or 16-bit integer to SImode. XOP contains the
536 operands of INSNS as extracted by insn_extract from pattern
537 casesi_<mode>_sequence:
539 $0: SImode reg switch value as result of $9.
540 $1: Negative of smallest index in switch.
541 $2: Number of entries in switch.
543 $4: Label if out-of-bounds.
545 $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
546 2-byte PC: subreg:HI ($5)
547 $7: HI reg index into table (Z or pseudo)
548 $8: R24 or const0_rtx (to be clobbered)
549 $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
550 $10: QImode or HImode register input of $9.
552 Try to optimize this sequence, i.e. use the original HImode / QImode
553 switch value instead of SImode. */
556 avr_optimize_casesi (rtx_insn
*insns
[6], rtx
*xop
)
558 // Original mode of the switch value; this is QImode or HImode.
559 machine_mode mode
= GET_MODE (xop
[10]);
561 // How the original switch value was extended to SImode; this is
562 // SIGN_EXTEND or ZERO_EXTEND.
563 enum rtx_code code
= GET_CODE (xop
[9]);
565 // Lower index, upper index (plus one) and range of case calues.
566 HOST_WIDE_INT low_idx
= -INTVAL (xop
[1]);
567 HOST_WIDE_INT num_idx
= INTVAL (xop
[2]);
568 HOST_WIDE_INT hig_idx
= low_idx
+ num_idx
;
570 // Maximum ranges of (un)signed QImode resp. HImode.
571 int imin
= QImode
== mode
? INT8_MIN
: INT16_MIN
;
572 int imax
= QImode
== mode
? INT8_MAX
: INT16_MAX
;
573 unsigned umax
= QImode
== mode
? UINT8_MAX
: UINT16_MAX
;
575 // Testing the case range and whether it fits into the range of the
576 // (un)signed mode. This test should actually always pass because it
577 // makes no sense to have case values outside the mode range. Notice
578 // that case labels which are unreachable because they are outside the
579 // mode of the switch value (e.g. "case -1" for uint8_t) have already
580 // been thrown away by the middle-end.
582 if (SIGN_EXTEND
== code
588 else if (ZERO_EXTEND
== code
590 && (unsigned) hig_idx
<= umax
)
597 fprintf (dump_file
, ";; Case ranges too big, giving up.\n\n");
601 // Do normalization of switch value $10 and out-of-bound check in its
602 // original mode instead of in SImode. Use a newly created pseudo.
603 // This will replace insns[1..2].
607 rtx_insn
*seq1
, *seq2
, *last1
, *last2
;
609 rtx reg
= copy_to_mode_reg (mode
, xop
[10]);
611 rtx (*gen_add
)(rtx
,rtx
,rtx
) = QImode
== mode
? gen_addqi3
: gen_addhi3
;
612 rtx (*gen_cmp
)(rtx
,rtx
) = QImode
== mode
? gen_cmpqi3
: gen_cmphi3
;
614 emit_insn (gen_add (reg
, reg
, gen_int_mode (-low_idx
, mode
)));
615 emit_insn (gen_cmp (reg
, gen_int_mode (num_idx
, mode
)));
618 last1
= get_last_insn();
621 emit_insn_before (seq1
, insns
[1]);
623 // After the out-of-bounds test and corresponding branch, use a
624 // 16-bit index. If QImode is used, extend it to HImode first.
625 // This will replace insns[4].
630 reg
= force_reg (HImode
, gen_rtx_fmt_e (code
, HImode
, reg
));
632 rtx pat_4
= AVR_3_BYTE_PC
633 ? gen_movhi (xop
[7], reg
)
634 : gen_addhi3 (xop
[7], reg
, gen_rtx_LABEL_REF (VOIDmode
, xop
[3]));
639 last2
= get_last_insn();
642 emit_insn_after (seq2
, insns
[4]);
646 fprintf (dump_file
, ";; New insns: ");
648 for (rtx_insn
*insn
= seq1
; ; insn
= NEXT_INSN (insn
))
650 fprintf (dump_file
, "%d, ", INSN_UID (insn
));
654 for (rtx_insn
*insn
= seq2
; ; insn
= NEXT_INSN (insn
))
656 fprintf (dump_file
, "%d%s", INSN_UID (insn
),
657 insn
== last2
? ".\n\n" : ", ");
662 fprintf (dump_file
, ";; Deleting insns: %d, %d, %d.\n\n",
663 INSN_UID (insns
[1]), INSN_UID (insns
[2]), INSN_UID (insns
[4]));
666 // Pseudodelete the SImode and subreg of SImode insns. We don't care
667 // about the extension insns[0]: Its result is now unused and other
668 // passes will clean it up.
670 SET_INSN_DELETED (insns
[1]);
671 SET_INSN_DELETED (insns
[2]);
672 SET_INSN_DELETED (insns
[4]);
677 avr_pass_casesi::avr_rest_of_handle_casesi (function
*func
)
681 FOR_EACH_BB_FN (bb
, func
)
683 rtx_insn
*insn
, *insns
[6];
685 FOR_BB_INSNS (bb
, insn
)
687 if (avr_is_casesi_sequence (bb
, insn
, insns
))
689 avr_optimize_casesi (insns
, recog_data
.operand
);
696 /* Set `avr_arch' as specified by `-mmcu='.
697 Return true on success. */
700 avr_set_core_architecture (void)
702 /* Search for mcu core architecture. */
705 avr_mmcu
= AVR_MMCU_DEFAULT
;
707 avr_arch
= &avr_arch_types
[0];
709 for (const avr_mcu_t
*mcu
= avr_mcu_types
; ; mcu
++)
711 if (NULL
== mcu
->name
)
713 /* Reached the end of `avr_mcu_types'. This should actually never
714 happen as options are provided by device-specs. It could be a
715 typo in a device-specs or calling the compiler proper directly
716 with -mmcu=<device>. */
718 error ("unknown core architecture %qs specified with %qs",
720 avr_inform_core_architectures ();
723 else if (0 == strcmp (mcu
->name
, avr_mmcu
)
724 // Is this a proper architecture ?
725 && NULL
== mcu
->macro
)
727 avr_arch
= &avr_arch_types
[mcu
->arch_id
];
729 avr_n_flash
= mcu
->n_flash
;
739 /* Implement `TARGET_OPTION_OVERRIDE'. */
742 avr_option_override (void)
744 /* Disable -fdelete-null-pointer-checks option for AVR target.
745 This option compiler assumes that dereferencing of a null pointer
746 would halt the program. For AVR this assumption is not true and
747 programs can safely dereference null pointers. Changes made by this
748 option may not work properly for AVR. So disable this option. */
750 flag_delete_null_pointer_checks
= 0;
752 /* caller-save.c looks for call-clobbered hard registers that are assigned
753 to pseudos that cross calls and tries so save-restore them around calls
754 in order to reduce the number of stack slots needed.
756 This might lead to situations where reload is no more able to cope
757 with the challenge of AVR's very few address registers and fails to
758 perform the requested spills. */
761 flag_caller_saves
= 0;
763 /* Allow optimizer to introduce store data races. This used to be the
764 default - it was changed because bigger targets did not see any
765 performance decrease. For the AVR though, disallowing data races
766 introduces additional code in LIM and increases reg pressure. */
768 maybe_set_param_value (PARAM_ALLOW_STORE_DATA_RACES
, 1,
769 global_options
.x_param_values
,
770 global_options_set
.x_param_values
);
772 /* Unwind tables currently require a frame pointer for correctness,
773 see toplev.c:process_options(). */
775 if ((flag_unwind_tables
776 || flag_non_call_exceptions
777 || flag_asynchronous_unwind_tables
)
778 && !ACCUMULATE_OUTGOING_ARGS
)
780 flag_omit_frame_pointer
= 0;
784 warning (OPT_fpic
, "-fpic is not supported");
786 warning (OPT_fPIC
, "-fPIC is not supported");
788 warning (OPT_fpie
, "-fpie is not supported");
790 warning (OPT_fPIE
, "-fPIE is not supported");
792 if (!avr_set_core_architecture())
795 /* RAM addresses of some SFRs common to all devices in respective arch. */
797 /* SREG: Status Register containing flags like I (global IRQ) */
798 avr_addr
.sreg
= 0x3F + avr_arch
->sfr_offset
;
800 /* RAMPZ: Address' high part when loading via ELPM */
801 avr_addr
.rampz
= 0x3B + avr_arch
->sfr_offset
;
803 avr_addr
.rampy
= 0x3A + avr_arch
->sfr_offset
;
804 avr_addr
.rampx
= 0x39 + avr_arch
->sfr_offset
;
805 avr_addr
.rampd
= 0x38 + avr_arch
->sfr_offset
;
806 avr_addr
.ccp
= (AVR_TINY
? 0x3C : 0x34) + avr_arch
->sfr_offset
;
808 /* SP: Stack Pointer (SP_H:SP_L) */
809 avr_addr
.sp_l
= 0x3D + avr_arch
->sfr_offset
;
810 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
812 init_machine_status
= avr_init_machine_status
;
814 avr_log_set_avr_log();
817 /* Function to set up the backend function structure. */
819 static struct machine_function
*
820 avr_init_machine_status (void)
822 return ggc_cleared_alloc
<machine_function
> ();
826 /* Implement `INIT_EXPANDERS'. */
827 /* The function works like a singleton. */
830 avr_init_expanders (void)
834 for (regno
= 0; regno
< 32; regno
++)
835 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
837 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
838 tmp_reg_rtx
= all_regs_rtx
[AVR_TMP_REGNO
];
839 zero_reg_rtx
= all_regs_rtx
[AVR_ZERO_REGNO
];
841 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
843 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
844 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
845 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
846 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
847 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
849 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
850 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
852 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
855 avr_have_dimode
= false;
859 /* Implement `REGNO_REG_CLASS'. */
860 /* Return register class for register R. */
863 avr_regno_reg_class (int r
)
865 static const enum reg_class reg_class_tab
[] =
869 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
870 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
871 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
872 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
874 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
875 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
877 ADDW_REGS
, ADDW_REGS
,
879 POINTER_X_REGS
, POINTER_X_REGS
,
881 POINTER_Y_REGS
, POINTER_Y_REGS
,
883 POINTER_Z_REGS
, POINTER_Z_REGS
,
889 return reg_class_tab
[r
];
895 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
898 avr_scalar_mode_supported_p (machine_mode mode
)
900 if (ALL_FIXED_POINT_MODE_P (mode
))
906 return default_scalar_mode_supported_p (mode
);
910 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
913 avr_decl_flash_p (tree decl
)
915 if (TREE_CODE (decl
) != VAR_DECL
916 || TREE_TYPE (decl
) == error_mark_node
)
921 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
925 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
926 address space and FALSE, otherwise. */
929 avr_decl_memx_p (tree decl
)
931 if (TREE_CODE (decl
) != VAR_DECL
932 || TREE_TYPE (decl
) == error_mark_node
)
937 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
941 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
944 avr_mem_flash_p (rtx x
)
947 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
951 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
952 address space and FALSE, otherwise. */
955 avr_mem_memx_p (rtx x
)
958 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
962 /* A helper for the subsequent function attribute used to dig for
963 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
966 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
968 if (FUNCTION_DECL
== TREE_CODE (func
))
970 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
975 func
= TREE_TYPE (func
);
978 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
979 || TREE_CODE (func
) == METHOD_TYPE
);
981 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
984 /* Return nonzero if FUNC is a naked function. */
987 avr_naked_function_p (tree func
)
989 return avr_lookup_function_attribute1 (func
, "naked");
992 /* Return nonzero if FUNC is an interrupt function as specified
993 by the "interrupt" attribute. */
996 avr_interrupt_function_p (tree func
)
998 return avr_lookup_function_attribute1 (func
, "interrupt");
1001 /* Return nonzero if FUNC is a signal function as specified
1002 by the "signal" attribute. */
1005 avr_signal_function_p (tree func
)
1007 return avr_lookup_function_attribute1 (func
, "signal");
1010 /* Return nonzero if FUNC is an OS_task function. */
1013 avr_OS_task_function_p (tree func
)
1015 return avr_lookup_function_attribute1 (func
, "OS_task");
1018 /* Return nonzero if FUNC is an OS_main function. */
1021 avr_OS_main_function_p (tree func
)
1023 return avr_lookup_function_attribute1 (func
, "OS_main");
1027 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
1028 /* Sanity cheching for above function attributes. */
1031 avr_set_current_function (tree decl
)
1036 if (decl
== NULL_TREE
1037 || current_function_decl
== NULL_TREE
1038 || current_function_decl
== error_mark_node
1040 || cfun
->machine
->attributes_checked_p
)
1043 loc
= DECL_SOURCE_LOCATION (decl
);
1045 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
1046 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
1047 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
1048 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
1049 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
1051 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
1053 /* Too much attributes make no sense as they request conflicting features. */
1055 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
1056 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
1057 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
1058 " exclusive", "OS_task", "OS_main", isr
);
1060 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
1062 if (cfun
->machine
->is_naked
1063 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1064 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
1065 " no effect on %qs function", "OS_task", "OS_main", "naked");
1067 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1069 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
1070 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
1073 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
1074 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
1075 : IDENTIFIER_POINTER (DECL_NAME (decl
));
1077 /* Skip a leading '*' that might still prefix the assembler name,
1078 e.g. in non-LTO runs. */
1080 name
= default_strip_name_encoding (name
);
1082 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
1083 using this when it switched from SIGNAL and INTERRUPT to ISR. */
1085 if (cfun
->machine
->is_interrupt
)
1086 cfun
->machine
->is_signal
= 0;
1088 /* Interrupt handlers must be void __vector (void) functions. */
1090 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
1091 error_at (loc
, "%qs function cannot have arguments", isr
);
1093 if (TREE_CODE (ret
) != VOID_TYPE
)
1094 error_at (loc
, "%qs function cannot return a value", isr
);
1096 /* If the function has the 'signal' or 'interrupt' attribute, ensure
1097 that the name of the function is "__vector_NN" so as to catch
1098 when the user misspells the vector name. */
1100 if (!STR_PREFIX_P (name
, "__vector"))
1101 warning_at (loc
, OPT_Wmisspelled_isr
, "%qs appears to be a misspelled "
1102 "%s handler, missing __vector prefix", name
, isr
);
1105 /* Don't print the above diagnostics more than once. */
1107 cfun
->machine
->attributes_checked_p
= 1;
1111 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
1114 avr_accumulate_outgoing_args (void)
1117 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
1119 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1120 what offset is correct. In some cases it is relative to
1121 virtual_outgoing_args_rtx and in others it is relative to
1122 virtual_stack_vars_rtx. For example code see
1123 gcc.c-torture/execute/built-in-setjmp.c
1124 gcc.c-torture/execute/builtins/sprintf-chk.c */
1126 return (TARGET_ACCUMULATE_OUTGOING_ARGS
1127 && !(cfun
->calls_setjmp
1128 || cfun
->has_nonlocal_label
));
1132 /* Report contribution of accumulated outgoing arguments to stack size. */
1135 avr_outgoing_args_size (void)
1137 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
1141 /* Implement `STARTING_FRAME_OFFSET'. */
1142 /* This is the offset from the frame pointer register to the first stack slot
1143 that contains a variable living in the frame. */
1146 avr_starting_frame_offset (void)
1148 return 1 + avr_outgoing_args_size ();
1152 /* Return the number of hard registers to push/pop in the prologue/epilogue
1153 of the current function, and optionally store these registers in SET. */
1156 avr_regs_to_save (HARD_REG_SET
*set
)
1159 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1162 CLEAR_HARD_REG_SET (*set
);
1165 /* No need to save any registers if the function never returns or
1166 has the "OS_task" or "OS_main" attribute. */
1168 if (TREE_THIS_VOLATILE (current_function_decl
)
1169 || cfun
->machine
->is_OS_task
1170 || cfun
->machine
->is_OS_main
)
1173 for (reg
= 0; reg
< 32; reg
++)
1175 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1176 any global register variables. */
1178 if (fixed_regs
[reg
])
1181 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
1182 || (df_regs_ever_live_p (reg
)
1183 && (int_or_sig_p
|| !call_used_regs
[reg
])
1184 /* Don't record frame pointer registers here. They are treated
1185 indivitually in prologue. */
1186 && !(frame_pointer_needed
1187 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
1190 SET_HARD_REG_BIT (*set
, reg
);
1198 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1201 avr_allocate_stack_slots_for_args (void)
1203 return !cfun
->machine
->is_naked
;
1207 /* Return true if register FROM can be eliminated via register TO. */
1210 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1212 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
1213 || !frame_pointer_needed
);
1217 /* Implement `TARGET_WARN_FUNC_RETURN'. */
1220 avr_warn_func_return (tree decl
)
1222 /* Naked functions are implemented entirely in assembly, including the
1223 return sequence, so suppress warnings about this. */
1225 return !avr_naked_function_p (decl
);
1228 /* Compute offset between arg_pointer and frame_pointer. */
1231 avr_initial_elimination_offset (int from
, int to
)
1233 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1237 int offset
= frame_pointer_needed
? 2 : 0;
1238 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
1240 offset
+= avr_regs_to_save (NULL
);
1241 return (get_frame_size () + avr_outgoing_args_size()
1242 + avr_pc_size
+ 1 + offset
);
1247 /* Helper for the function below. */
1250 avr_adjust_type_node (tree
*node
, machine_mode mode
, int sat_p
)
1252 *node
= make_node (FIXED_POINT_TYPE
);
1253 TYPE_SATURATING (*node
) = sat_p
;
1254 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
1255 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
1256 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
1257 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
1258 SET_TYPE_ALIGN (*node
, 8);
1259 SET_TYPE_MODE (*node
, mode
);
1261 layout_type (*node
);
1265 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
1268 avr_build_builtin_va_list (void)
1270 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1271 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1272 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1273 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1274 to the long long accum modes instead of the desired [U]TAmode.
1276 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
1277 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
1278 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1279 libgcc to detect IBIT and FBIT. */
1281 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
1282 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
1283 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
1284 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
1286 unsigned_long_long_accum_type_node
= uta_type_node
;
1287 long_long_accum_type_node
= ta_type_node
;
1288 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
1289 sat_long_long_accum_type_node
= sat_ta_type_node
;
1291 /* Dispatch to the default handler. */
1293 return std_build_builtin_va_list ();
1297 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
1298 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1299 frame pointer by +STARTING_FRAME_OFFSET.
1300 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
1301 avoids creating add/sub of offset in nonlocal goto and setjmp. */
1304 avr_builtin_setjmp_frame_value (void)
1306 rtx xval
= gen_reg_rtx (Pmode
);
1307 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
1308 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
1313 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1314 This is return address of function. */
1317 avr_return_addr_rtx (int count
, rtx tem
)
1321 /* Can only return this function's return address. Others not supported. */
1327 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
1328 warning (0, "%<builtin_return_address%> contains only 2 bytes"
1332 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
1334 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
1335 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
1336 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
1340 /* Return 1 if the function epilogue is just a single "ret". */
1343 avr_simple_epilogue (void)
1345 return (! frame_pointer_needed
1346 && get_frame_size () == 0
1347 && avr_outgoing_args_size() == 0
1348 && avr_regs_to_save (NULL
) == 0
1349 && ! cfun
->machine
->is_interrupt
1350 && ! cfun
->machine
->is_signal
1351 && ! cfun
->machine
->is_naked
1352 && ! TREE_THIS_VOLATILE (current_function_decl
));
1355 /* This function checks sequence of live registers. */
1358 sequent_regs_live (void)
1364 for (reg
= 0; reg
<= LAST_CALLEE_SAVED_REG
; ++reg
)
1366 if (fixed_regs
[reg
])
1368 /* Don't recognize sequences that contain global register
1377 if (!call_used_regs
[reg
])
1379 if (df_regs_ever_live_p (reg
))
1389 if (!frame_pointer_needed
)
1391 if (df_regs_ever_live_p (REG_Y
))
1399 if (df_regs_ever_live_p (REG_Y
+1))
1412 return (cur_seq
== live_seq
) ? live_seq
: 0;
1415 /* Obtain the length sequence of insns. */
1418 get_sequence_length (rtx_insn
*insns
)
1423 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
1424 length
+= get_attr_length (insn
);
1430 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1433 avr_incoming_return_addr_rtx (void)
1435 /* The return address is at the top of the stack. Note that the push
1436 was via post-decrement, which means the actual address is off by one. */
1437 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
1440 /* Helper for expand_prologue. Emit a push of a byte register. */
1443 emit_push_byte (unsigned regno
, bool frame_related_p
)
1448 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
1449 mem
= gen_frame_mem (QImode
, mem
);
1450 reg
= gen_rtx_REG (QImode
, regno
);
1452 insn
= emit_insn (gen_rtx_SET (mem
, reg
));
1453 if (frame_related_p
)
1454 RTX_FRAME_RELATED_P (insn
) = 1;
1456 cfun
->machine
->stack_usage
++;
1460 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1461 SFR is a MEM representing the memory location of the SFR.
1462 If CLR_P then clear the SFR after the push using zero_reg. */
1465 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
1469 gcc_assert (MEM_P (sfr
));
1471 /* IN __tmp_reg__, IO(SFR) */
1472 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1473 if (frame_related_p
)
1474 RTX_FRAME_RELATED_P (insn
) = 1;
1476 /* PUSH __tmp_reg__ */
1477 emit_push_byte (AVR_TMP_REGNO
, frame_related_p
);
1481 /* OUT IO(SFR), __zero_reg__ */
1482 insn
= emit_move_insn (sfr
, const0_rtx
);
1483 if (frame_related_p
)
1484 RTX_FRAME_RELATED_P (insn
) = 1;
1489 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1492 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1493 int live_seq
= sequent_regs_live ();
1495 HOST_WIDE_INT size_max
1496 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1498 bool minimize
= (TARGET_CALL_PROLOGUES
1502 && !cfun
->machine
->is_OS_task
1503 && !cfun
->machine
->is_OS_main
1507 && (frame_pointer_needed
1508 || avr_outgoing_args_size() > 8
1509 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1513 int first_reg
, reg
, offset
;
1515 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1516 gen_int_mode (size
, HImode
));
1518 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1519 gen_int_mode (live_seq
+size
, HImode
));
1520 insn
= emit_insn (pattern
);
1521 RTX_FRAME_RELATED_P (insn
) = 1;
1523 /* Describe the effect of the unspec_volatile call to prologue_saves.
1524 Note that this formulation assumes that add_reg_note pushes the
1525 notes to the front. Thus we build them in the reverse order of
1526 how we want dwarf2out to process them. */
1528 /* The function does always set frame_pointer_rtx, but whether that
1529 is going to be permanent in the function is frame_pointer_needed. */
1531 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1532 gen_rtx_SET ((frame_pointer_needed
1534 : stack_pointer_rtx
),
1535 plus_constant (Pmode
, stack_pointer_rtx
,
1536 -(size
+ live_seq
))));
1538 /* Note that live_seq always contains r28+r29, but the other
1539 registers to be saved are all below 18. */
1541 first_reg
= (LAST_CALLEE_SAVED_REG
+ 1) - (live_seq
- 2);
1543 for (reg
= 29, offset
= -live_seq
+ 1;
1545 reg
= (reg
== 28 ? LAST_CALLEE_SAVED_REG
: reg
- 1), ++offset
)
1549 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1551 r
= gen_rtx_REG (QImode
, reg
);
1552 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (m
, r
));
1555 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1557 else /* !minimize */
1561 for (reg
= 0; reg
< 32; ++reg
)
1562 if (TEST_HARD_REG_BIT (set
, reg
))
1563 emit_push_byte (reg
, true);
1565 if (frame_pointer_needed
1566 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1568 /* Push frame pointer. Always be consistent about the
1569 ordering of pushes -- epilogue_restores expects the
1570 register pair to be pushed low byte first. */
1572 emit_push_byte (REG_Y
, true);
1573 emit_push_byte (REG_Y
+ 1, true);
1576 if (frame_pointer_needed
1579 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1580 RTX_FRAME_RELATED_P (insn
) = 1;
1585 /* Creating a frame can be done by direct manipulation of the
1586 stack or via the frame pointer. These two methods are:
1593 the optimum method depends on function type, stack and
1594 frame size. To avoid a complex logic, both methods are
1595 tested and shortest is selected.
1597 There is also the case where SIZE != 0 and no frame pointer is
1598 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1599 In that case, insn (*) is not needed in that case.
1600 We use the X register as scratch. This is save because in X
1602 In an interrupt routine, the case of SIZE != 0 together with
1603 !frame_pointer_needed can only occur if the function is not a
1604 leaf function and thus X has already been saved. */
1607 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1608 rtx_insn
*fp_plus_insns
;
1611 gcc_assert (frame_pointer_needed
1615 fp
= my_fp
= (frame_pointer_needed
1617 : gen_rtx_REG (Pmode
, REG_X
));
1619 if (AVR_HAVE_8BIT_SP
)
1621 /* The high byte (r29) does not change:
1622 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1624 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1627 /* Cut down size and avoid size = 0 so that we don't run
1628 into ICE like PR52488 in the remainder. */
1630 if (size
> size_max
)
1632 /* Don't error so that insane code from newlib still compiles
1633 and does not break building newlib. As PR51345 is implemented
1634 now, there are multilib variants with -msp8.
1636 If user wants sanity checks he can use -Wstack-usage=
1639 For CFA we emit the original, non-saturated size so that
1640 the generic machinery is aware of the real stack usage and
1641 will print the above diagnostic as expected. */
1646 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1647 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1649 /************ Method 1: Adjust frame pointer ************/
1653 /* Normally, the dwarf2out frame-related-expr interpreter does
1654 not expect to have the CFA change once the frame pointer is
1655 set up. Thus, we avoid marking the move insn below and
1656 instead indicate that the entire operation is complete after
1657 the frame pointer subtraction is done. */
1659 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1660 if (frame_pointer_needed
)
1662 RTX_FRAME_RELATED_P (insn
) = 1;
1663 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1664 gen_rtx_SET (fp
, stack_pointer_rtx
));
1667 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1670 if (frame_pointer_needed
)
1672 RTX_FRAME_RELATED_P (insn
) = 1;
1673 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1674 gen_rtx_SET (fp
, plus_constant (Pmode
, fp
,
1678 /* Copy to stack pointer. Note that since we've already
1679 changed the CFA to the frame pointer this operation
1680 need not be annotated if frame pointer is needed.
1681 Always move through unspec, see PR50063.
1682 For meaning of irq_state see movhi_sp_r insn. */
1684 if (cfun
->machine
->is_interrupt
)
1687 if (TARGET_NO_INTERRUPTS
1688 || cfun
->machine
->is_signal
1689 || cfun
->machine
->is_OS_main
)
1692 if (AVR_HAVE_8BIT_SP
)
1695 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1696 fp
, GEN_INT (irq_state
)));
1697 if (!frame_pointer_needed
)
1699 RTX_FRAME_RELATED_P (insn
) = 1;
1700 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1701 gen_rtx_SET (stack_pointer_rtx
,
1702 plus_constant (Pmode
,
1707 fp_plus_insns
= get_insns ();
1710 /************ Method 2: Adjust Stack pointer ************/
1712 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1713 can only handle specific offsets. */
1715 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1717 rtx_insn
*sp_plus_insns
;
1721 insn
= emit_move_insn (stack_pointer_rtx
,
1722 plus_constant (Pmode
, stack_pointer_rtx
,
1724 RTX_FRAME_RELATED_P (insn
) = 1;
1725 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1726 gen_rtx_SET (stack_pointer_rtx
,
1727 plus_constant (Pmode
,
1730 if (frame_pointer_needed
)
1732 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1733 RTX_FRAME_RELATED_P (insn
) = 1;
1736 sp_plus_insns
= get_insns ();
1739 /************ Use shortest method ************/
1741 emit_insn (get_sequence_length (sp_plus_insns
)
1742 < get_sequence_length (fp_plus_insns
)
1748 emit_insn (fp_plus_insns
);
1751 cfun
->machine
->stack_usage
+= size_cfa
;
1752 } /* !minimize && size != 0 */
1757 /* Output function prologue. */
1760 avr_expand_prologue (void)
1765 size
= get_frame_size() + avr_outgoing_args_size();
1767 cfun
->machine
->stack_usage
= 0;
1769 /* Prologue: naked. */
1770 if (cfun
->machine
->is_naked
)
1775 avr_regs_to_save (&set
);
1777 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1779 /* Enable interrupts. */
1780 if (cfun
->machine
->is_interrupt
)
1781 emit_insn (gen_enable_interrupt ());
1783 /* Push zero reg. */
1784 emit_push_byte (AVR_ZERO_REGNO
, true);
1787 emit_push_byte (AVR_TMP_REGNO
, true);
1790 /* ??? There's no dwarf2 column reserved for SREG. */
1791 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1793 /* Clear zero reg. */
1794 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1796 /* Prevent any attempt to delete the setting of ZERO_REG! */
1797 emit_use (zero_reg_rtx
);
1799 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1800 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1803 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1806 && TEST_HARD_REG_BIT (set
, REG_X
)
1807 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1809 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1813 && (frame_pointer_needed
1814 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1815 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1817 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1821 && TEST_HARD_REG_BIT (set
, REG_Z
)
1822 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1824 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1826 } /* is_interrupt is_signal */
1828 avr_prologue_setup_frame (size
, set
);
1830 if (flag_stack_usage_info
)
1831 current_function_static_stack_size
= cfun
->machine
->stack_usage
+ INCOMING_FRAME_SP_OFFSET
;
1835 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1836 /* Output summary at end of function prologue. */
1839 avr_asm_function_end_prologue (FILE *file
)
1841 if (cfun
->machine
->is_naked
)
1843 fputs ("/* prologue: naked */\n", file
);
1847 if (cfun
->machine
->is_interrupt
)
1849 fputs ("/* prologue: Interrupt */\n", file
);
1851 else if (cfun
->machine
->is_signal
)
1853 fputs ("/* prologue: Signal */\n", file
);
1856 fputs ("/* prologue: function */\n", file
);
1859 if (ACCUMULATE_OUTGOING_ARGS
)
1860 fprintf (file
, "/* outgoing args size = %d */\n",
1861 avr_outgoing_args_size());
1863 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1865 fprintf (file
, "/* stack size = %d */\n",
1866 cfun
->machine
->stack_usage
);
1867 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1868 usage for offset so that SP + .L__stack_offset = return address. */
1869 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1873 /* Implement `EPILOGUE_USES'. */
1876 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1878 if (reload_completed
1880 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1885 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1888 emit_pop_byte (unsigned regno
)
1892 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1893 mem
= gen_frame_mem (QImode
, mem
);
1894 reg
= gen_rtx_REG (QImode
, regno
);
1896 emit_insn (gen_rtx_SET (reg
, mem
));
1899 /* Output RTL epilogue. */
1902 avr_expand_epilogue (bool sibcall_p
)
1909 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1911 size
= get_frame_size() + avr_outgoing_args_size();
1913 /* epilogue: naked */
1914 if (cfun
->machine
->is_naked
)
1916 gcc_assert (!sibcall_p
);
1918 emit_jump_insn (gen_return ());
1922 avr_regs_to_save (&set
);
1923 live_seq
= sequent_regs_live ();
1925 minimize
= (TARGET_CALL_PROLOGUES
1928 && !cfun
->machine
->is_OS_task
1929 && !cfun
->machine
->is_OS_main
1934 || frame_pointer_needed
1937 /* Get rid of frame. */
1939 if (!frame_pointer_needed
)
1941 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1946 emit_move_insn (frame_pointer_rtx
,
1947 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1950 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1956 /* Try two methods to adjust stack and select shortest. */
1960 rtx_insn
*fp_plus_insns
;
1961 HOST_WIDE_INT size_max
;
1963 gcc_assert (frame_pointer_needed
1967 fp
= my_fp
= (frame_pointer_needed
1969 : gen_rtx_REG (Pmode
, REG_X
));
1971 if (AVR_HAVE_8BIT_SP
)
1973 /* The high byte (r29) does not change:
1974 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1976 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1979 /* For rationale see comment in prologue generation. */
1981 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1982 if (size
> size_max
)
1984 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1986 /********** Method 1: Adjust fp register **********/
1990 if (!frame_pointer_needed
)
1991 emit_move_insn (fp
, stack_pointer_rtx
);
1993 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1995 /* Copy to stack pointer. */
1997 if (TARGET_NO_INTERRUPTS
)
2000 if (AVR_HAVE_8BIT_SP
)
2003 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
2004 GEN_INT (irq_state
)));
2006 fp_plus_insns
= get_insns ();
2009 /********** Method 2: Adjust Stack pointer **********/
2011 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
2013 rtx_insn
*sp_plus_insns
;
2017 emit_move_insn (stack_pointer_rtx
,
2018 plus_constant (Pmode
, stack_pointer_rtx
, size
));
2020 sp_plus_insns
= get_insns ();
2023 /************ Use shortest method ************/
2025 emit_insn (get_sequence_length (sp_plus_insns
)
2026 < get_sequence_length (fp_plus_insns
)
2031 emit_insn (fp_plus_insns
);
2034 if (frame_pointer_needed
2035 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
2037 /* Restore previous frame_pointer. See avr_expand_prologue for
2038 rationale for not using pophi. */
2040 emit_pop_byte (REG_Y
+ 1);
2041 emit_pop_byte (REG_Y
);
2044 /* Restore used registers. */
2046 for (reg
= 31; reg
>= 0; --reg
)
2047 if (TEST_HARD_REG_BIT (set
, reg
))
2048 emit_pop_byte (reg
);
2052 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2053 The conditions to restore them must be tha same as in prologue. */
2056 && TEST_HARD_REG_BIT (set
, REG_Z
)
2057 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
2059 emit_pop_byte (TMP_REGNO
);
2060 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
2064 && (frame_pointer_needed
2065 || (TEST_HARD_REG_BIT (set
, REG_Y
)
2066 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
2068 emit_pop_byte (TMP_REGNO
);
2069 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
2073 && TEST_HARD_REG_BIT (set
, REG_X
)
2074 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
2076 emit_pop_byte (TMP_REGNO
);
2077 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
2082 emit_pop_byte (TMP_REGNO
);
2083 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
2086 /* Restore SREG using tmp_reg as scratch. */
2088 emit_pop_byte (AVR_TMP_REGNO
);
2089 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
2091 /* Restore tmp REG. */
2092 emit_pop_byte (AVR_TMP_REGNO
);
2094 /* Restore zero REG. */
2095 emit_pop_byte (AVR_ZERO_REGNO
);
2099 emit_jump_insn (gen_return ());
2103 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
2106 avr_asm_function_begin_epilogue (FILE *file
)
2108 fprintf (file
, "/* epilogue start */\n");
2112 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
2115 avr_cannot_modify_jumps_p (void)
2118 /* Naked Functions must not have any instructions after
2119 their epilogue, see PR42240 */
2121 if (reload_completed
2123 && cfun
->machine
->is_naked
)
2132 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
2135 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
2137 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
2138 This hook just serves to hack around PR rtl-optimization/52543 by
2139 claiming that non-generic addresses were mode-dependent so that
2140 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
2141 RTXes to probe SET and MEM costs and assumes that MEM is always in the
2142 generic address space which is not true. */
2144 return !ADDR_SPACE_GENERIC_P (as
);
2148 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2149 address with the `absdata' variable attribute, i.e. respective
2150 data can be read / written by LDS / STS instruction.
2151 This is used only for AVR_TINY. */
2154 avr_address_tiny_absdata_p (rtx x
, machine_mode mode
)
2156 if (CONST
== GET_CODE (x
))
2157 x
= XEXP (XEXP (x
, 0), 0);
2159 if (SYMBOL_REF_P (x
))
2160 return SYMBOL_REF_FLAGS (x
) & AVR_SYMBOL_FLAG_TINY_ABSDATA
;
2163 && IN_RANGE (INTVAL (x
), 0, 0xc0 - GET_MODE_SIZE (mode
)))
2170 /* Helper function for `avr_legitimate_address_p'. */
2173 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
2174 RTX_CODE outer_code
, bool strict
)
2177 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
2178 as
, outer_code
, UNKNOWN
)
2180 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
2184 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2185 machine for a memory operand of mode MODE. */
2188 avr_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
2190 bool ok
= CONSTANT_ADDRESS_P (x
);
2192 switch (GET_CODE (x
))
2195 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
2199 && GET_MODE_SIZE (mode
) > 4
2200 && REG_X
== REGNO (x
))
2208 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
2209 GET_CODE (x
), strict
);
2214 rtx reg
= XEXP (x
, 0);
2215 rtx op1
= XEXP (x
, 1);
2218 && CONST_INT_P (op1
)
2219 && INTVAL (op1
) >= 0)
2221 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
2226 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
2229 if (reg
== frame_pointer_rtx
2230 || reg
== arg_pointer_rtx
)
2235 else if (frame_pointer_needed
2236 && reg
== frame_pointer_rtx
)
2249 && CONSTANT_ADDRESS_P (x
))
2251 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2252 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
2254 ok
= avr_address_tiny_absdata_p (x
, mode
);
2257 if (avr_log
.legitimate_address_p
)
2259 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2260 "reload_completed=%d reload_in_progress=%d %s:",
2261 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
2262 reg_renumber
? "(reg_renumber)" : "");
2264 if (GET_CODE (x
) == PLUS
2265 && REG_P (XEXP (x
, 0))
2266 && CONST_INT_P (XEXP (x
, 1))
2267 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
2270 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
2271 true_regnum (XEXP (x
, 0)));
2274 avr_edump ("\n%r\n", x
);
2281 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2282 now only a helper for avr_addr_space_legitimize_address. */
2283 /* Attempts to replace X with a valid
2284 memory address for an operand of mode MODE */
2287 avr_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
)
2289 bool big_offset_p
= false;
2295 if (CONSTANT_ADDRESS_P (x
)
2296 && ! avr_address_tiny_absdata_p (x
, mode
))
2298 x
= force_reg (Pmode
, x
);
2302 if (GET_CODE (oldx
) == PLUS
2303 && REG_P (XEXP (oldx
, 0)))
2305 if (REG_P (XEXP (oldx
, 1)))
2306 x
= force_reg (GET_MODE (oldx
), oldx
);
2307 else if (CONST_INT_P (XEXP (oldx
, 1)))
2309 int offs
= INTVAL (XEXP (oldx
, 1));
2310 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
2311 && offs
> MAX_LD_OFFSET (mode
))
2313 big_offset_p
= true;
2314 x
= force_reg (GET_MODE (oldx
), oldx
);
2319 if (avr_log
.legitimize_address
)
2321 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
2324 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
2331 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
2332 /* This will allow register R26/27 to be used where it is no worse than normal
2333 base pointers R28/29 or R30/31. For example, if base offset is greater
2334 than 63 bytes or for R++ or --R addressing. */
2337 avr_legitimize_reload_address (rtx
*px
, machine_mode mode
,
2338 int opnum
, int type
, int addr_type
,
2339 int ind_levels ATTRIBUTE_UNUSED
,
2340 rtx (*mk_memloc
)(rtx
,int))
2344 if (avr_log
.legitimize_reload_address
)
2345 avr_edump ("\n%?:%m %r\n", mode
, x
);
2347 if (1 && (GET_CODE (x
) == POST_INC
2348 || GET_CODE (x
) == PRE_DEC
))
2350 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
2351 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
2352 opnum
, RELOAD_OTHER
);
2354 if (avr_log
.legitimize_reload_address
)
2355 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2356 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
2361 if (GET_CODE (x
) == PLUS
2362 && REG_P (XEXP (x
, 0))
2363 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
2364 && CONST_INT_P (XEXP (x
, 1))
2365 && INTVAL (XEXP (x
, 1)) >= 1)
2367 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
2371 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
2373 int regno
= REGNO (XEXP (x
, 0));
2374 rtx mem
= mk_memloc (x
, regno
);
2376 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
2377 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
2378 1, (enum reload_type
) addr_type
);
2380 if (avr_log
.legitimize_reload_address
)
2381 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2382 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
2384 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
2385 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2386 opnum
, (enum reload_type
) type
);
2388 if (avr_log
.legitimize_reload_address
)
2389 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2390 BASE_POINTER_REGS
, mem
, NULL_RTX
);
2395 else if (! (frame_pointer_needed
2396 && XEXP (x
, 0) == frame_pointer_rtx
))
2398 push_reload (x
, NULL_RTX
, px
, NULL
,
2399 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2400 opnum
, (enum reload_type
) type
);
2402 if (avr_log
.legitimize_reload_address
)
2403 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2404 POINTER_REGS
, x
, NULL_RTX
);
2414 /* Helper function to print assembler resp. track instruction
2415 sequence lengths. Always return "".
2418 Output assembler code from template TPL with operands supplied
2419 by OPERANDS. This is just forwarding to output_asm_insn.
2422 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2423 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2424 Don't output anything.
2428 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
2432 output_asm_insn (tpl
, operands
);
2446 /* Return a pointer register name as a string. */
2449 ptrreg_to_str (int regno
)
2453 case REG_X
: return "X";
2454 case REG_Y
: return "Y";
2455 case REG_Z
: return "Z";
2457 output_operand_lossage ("address operand requires constraint for"
2458 " X, Y, or Z register");
2463 /* Return the condition name as a string.
2464 Used in conditional jump constructing */
2467 cond_string (enum rtx_code code
)
2476 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2481 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2497 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
2498 This must be used for AVR_TINY only because on other cores
2499 the flash memory is not visible in the RAM address range and
2500 cannot be read by, say, LD instruction. */
2503 avr_address_tiny_pm_p (rtx x
)
2505 if (CONST
== GET_CODE (x
))
2506 x
= XEXP (XEXP (x
, 0), 0);
2508 if (SYMBOL_REF_P (x
))
2509 return SYMBOL_REF_FLAGS (x
) & AVR_SYMBOL_FLAG_TINY_PM
;
2514 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2515 /* Output ADDR to FILE as address. */
2518 avr_print_operand_address (FILE *file
, machine_mode
/*mode*/, rtx addr
)
2521 && avr_address_tiny_pm_p (addr
))
2523 addr
= plus_constant (Pmode
, addr
, AVR_TINY_PM_OFFSET
);
2526 switch (GET_CODE (addr
))
2529 fprintf (file
, "%s", ptrreg_to_str (REGNO (addr
)));
2533 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2537 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2541 if (CONSTANT_ADDRESS_P (addr
)
2542 && text_segment_operand (addr
, VOIDmode
))
2545 if (GET_CODE (x
) == CONST
)
2547 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2549 /* Assembler gs() will implant word address. Make offset
2550 a byte offset inside gs() for assembler. This is
2551 needed because the more logical (constant+gs(sym)) is not
2552 accepted by gas. For 128K and smaller devices this is ok.
2553 For large devices it will create a trampoline to offset
2554 from symbol which may not be what the user really wanted. */
2556 fprintf (file
, "gs(");
2557 output_addr_const (file
, XEXP (x
,0));
2558 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2559 2 * INTVAL (XEXP (x
, 1)));
2561 if (warning (0, "pointer offset from symbol maybe incorrect"))
2563 output_addr_const (stderr
, addr
);
2564 fprintf(stderr
,"\n");
2569 fprintf (file
, "gs(");
2570 output_addr_const (file
, addr
);
2571 fprintf (file
, ")");
2575 output_addr_const (file
, addr
);
2580 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2583 avr_print_operand_punct_valid_p (unsigned char code
)
2585 return code
== '~' || code
== '!';
2589 /* Implement `TARGET_PRINT_OPERAND'. */
2590 /* Output X as assembler operand to file FILE.
2591 For a description of supported %-codes, see top of avr.md. */
2594 avr_print_operand (FILE *file
, rtx x
, int code
)
2596 int abcd
= 0, ef
= 0, ij
= 0;
2598 if (code
>= 'A' && code
<= 'D')
2600 else if (code
== 'E' || code
== 'F')
2602 else if (code
== 'I' || code
== 'J')
2607 if (!AVR_HAVE_JMP_CALL
)
2610 else if (code
== '!')
2612 if (AVR_HAVE_EIJMP_EICALL
)
2615 else if (code
== 't'
2618 static int t_regno
= -1;
2619 static int t_nbits
= -1;
2621 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2623 t_regno
= REGNO (x
);
2624 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2626 else if (CONST_INT_P (x
) && t_regno
>= 0
2627 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2629 int bpos
= INTVAL (x
);
2631 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2633 fprintf (file
, ",%d", bpos
% 8);
2638 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2640 else if (code
== 'E' || code
== 'F')
2642 rtx op
= XEXP(x
, 0);
2643 fprintf (file
, "%s", reg_names
[REGNO (op
) + ef
]);
2645 else if (code
== 'I' || code
== 'J')
2647 rtx op
= XEXP(XEXP(x
, 0), 0);
2648 fprintf (file
, "%s", reg_names
[REGNO (op
) + ij
]);
2652 if (x
== zero_reg_rtx
)
2653 fprintf (file
, "__zero_reg__");
2654 else if (code
== 'r' && REGNO (x
) < 32)
2655 fprintf (file
, "%d", (int) REGNO (x
));
2657 fprintf (file
, "%s", reg_names
[REGNO (x
) + abcd
]);
2659 else if (CONST_INT_P (x
))
2661 HOST_WIDE_INT ival
= INTVAL (x
);
2664 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2665 else if (low_io_address_operand (x
, VOIDmode
)
2666 || high_io_address_operand (x
, VOIDmode
))
2668 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2669 fprintf (file
, "__RAMPZ__");
2670 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2671 fprintf (file
, "__RAMPY__");
2672 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2673 fprintf (file
, "__RAMPX__");
2674 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2675 fprintf (file
, "__RAMPD__");
2676 else if ((AVR_XMEGA
|| AVR_TINY
) && ival
== avr_addr
.ccp
)
2677 fprintf (file
, "__CCP__");
2678 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2679 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2680 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2683 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2684 ival
- avr_arch
->sfr_offset
);
2688 fatal_insn ("bad address, not an I/O address:", x
);
2692 rtx addr
= XEXP (x
, 0);
2696 if (!CONSTANT_P (addr
))
2697 fatal_insn ("bad address, not a constant:", addr
);
2698 /* Assembler template with m-code is data - not progmem section */
2699 if (text_segment_operand (addr
, VOIDmode
))
2700 if (warning (0, "accessing data memory with"
2701 " program memory address"))
2703 output_addr_const (stderr
, addr
);
2704 fprintf(stderr
,"\n");
2706 output_addr_const (file
, addr
);
2708 else if (code
== 'i')
2710 avr_print_operand (file
, addr
, 'i');
2712 else if (code
== 'o')
2714 if (GET_CODE (addr
) != PLUS
)
2715 fatal_insn ("bad address, not (reg+disp):", addr
);
2717 avr_print_operand (file
, XEXP (addr
, 1), 0);
2719 else if (code
== 'b')
2721 if (GET_CODE (addr
) != PLUS
)
2722 fatal_insn ("bad address, not (reg+disp):", addr
);
2724 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
, 0));
2726 else if (code
== 'p' || code
== 'r')
2728 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2729 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2733 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
, 0));
2735 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2737 else if (GET_CODE (addr
) == PLUS
)
2739 avr_print_operand_address (file
, VOIDmode
, XEXP (addr
,0));
2740 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2741 fatal_insn ("internal compiler error. Bad address:"
2744 avr_print_operand (file
, XEXP (addr
,1), code
);
2747 avr_print_operand_address (file
, VOIDmode
, addr
);
2749 else if (code
== 'i')
2751 if (GET_CODE (x
) == SYMBOL_REF
&& (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
))
2752 avr_print_operand_address
2753 (file
, VOIDmode
, plus_constant (HImode
, x
, -avr_arch
->sfr_offset
));
2755 fatal_insn ("bad address, not an I/O address:", x
);
2757 else if (code
== 'x')
2759 /* Constant progmem address - like used in jmp or call */
2760 if (0 == text_segment_operand (x
, VOIDmode
))
2761 if (warning (0, "accessing program memory"
2762 " with data memory address"))
2764 output_addr_const (stderr
, x
);
2765 fprintf(stderr
,"\n");
2767 /* Use normal symbol for direct address no linker trampoline needed */
2768 output_addr_const (file
, x
);
2770 else if (CONST_FIXED_P (x
))
2772 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2774 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2776 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2778 else if (GET_CODE (x
) == CONST_DOUBLE
)
2781 if (GET_MODE (x
) != SFmode
)
2782 fatal_insn ("internal compiler error. Unknown mode:", x
);
2783 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), val
);
2784 fprintf (file
, "0x%lx", val
);
2786 else if (GET_CODE (x
) == CONST_STRING
)
2787 fputs (XSTR (x
, 0), file
);
2788 else if (code
== 'j')
2789 fputs (cond_string (GET_CODE (x
)), file
);
2790 else if (code
== 'k')
2791 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2793 avr_print_operand_address (file
, VOIDmode
, x
);
2797 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
2799 /* Prefer sequence of loads/stores for moves of size upto
2800 two - two pairs of load/store instructions are always better
2801 than the 5 instruction sequence for a loop (1 instruction
2802 for loop counter setup, and 4 for the body of the loop). */
2805 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size
,
2806 unsigned int align ATTRIBUTE_UNUSED
,
2807 enum by_pieces_operation op
,
2811 if (op
!= MOVE_BY_PIECES
|| (speed_p
&& (size
> (MOVE_MAX_PIECES
))))
2812 return default_use_by_pieces_infrastructure_p (size
, align
, op
, speed_p
);
2814 return size
<= (MOVE_MAX_PIECES
);
2818 /* Worker function for `NOTICE_UPDATE_CC'. */
2819 /* Update the condition code in the INSN. */
2822 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx_insn
*insn
)
2825 enum attr_cc cc
= get_attr_cc (insn
);
2835 rtx
*op
= recog_data
.operand
;
2838 /* Extract insn's operands. */
2839 extract_constrain_insn_cached (insn
);
2847 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2848 cc
= (enum attr_cc
) icc
;
2853 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2854 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2855 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2857 /* Any other "r,rL" combination does not alter cc0. */
2861 } /* inner switch */
2865 } /* outer swicth */
2870 /* Special values like CC_OUT_PLUS from above have been
2871 mapped to "standard" CC_* values so we never come here. */
2877 /* Insn does not affect CC at all, but it might set some registers
2878 that are stored in cc_status. If such a register is affected by
2879 the current insn, for example by means of a SET or a CLOBBER,
2880 then we must reset cc_status; cf. PR77326.
2882 Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
2883 will abort on COMPARE (which might be found in cc_status.value1/2).
2884 Thus work out the registers set by the insn and regs mentioned
2885 in cc_status.value1/2. */
2887 if (cc_status
.value1
2888 || cc_status
.value2
)
2890 HARD_REG_SET regs_used
;
2891 HARD_REG_SET regs_set
;
2892 CLEAR_HARD_REG_SET (regs_used
);
2894 if (cc_status
.value1
2895 && !CONSTANT_P (cc_status
.value1
))
2897 find_all_hard_regs (cc_status
.value1
, ®s_used
);
2900 if (cc_status
.value2
2901 && !CONSTANT_P (cc_status
.value2
))
2903 find_all_hard_regs (cc_status
.value2
, ®s_used
);
2906 find_all_hard_reg_sets (insn
, ®s_set
, false);
2908 if (hard_reg_set_intersect_p (regs_used
, regs_set
))
2921 set
= single_set (insn
);
2925 cc_status
.flags
|= CC_NO_OVERFLOW
;
2926 cc_status
.value1
= SET_DEST (set
);
2931 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2932 of this combination, cf. also PR61055. */
2937 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2938 The V flag may or may not be known but that's ok because
2939 alter_cond will change tests to use EQ/NE. */
2940 set
= single_set (insn
);
2944 cc_status
.value1
= SET_DEST (set
);
2945 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2950 set
= single_set (insn
);
2953 cc_status
.value1
= SET_SRC (set
);
2957 /* Insn doesn't leave CC in a usable state. */
2963 /* Choose mode for jump insn:
2964 1 - relative jump in range -63 <= x <= 62 ;
2965 2 - relative jump in range -2046 <= x <= 2045 ;
2966 3 - absolute jump (only for ATmega[16]03). */
2969 avr_jump_mode (rtx x
, rtx_insn
*insn
)
2971 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2972 ? XEXP (x
, 0) : x
));
2973 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2974 int jump_distance
= cur_addr
- dest_addr
;
2976 if (-63 <= jump_distance
&& jump_distance
<= 62)
2978 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2980 else if (AVR_HAVE_JMP_CALL
)
2986 /* Return an AVR condition jump commands.
2987 X is a comparison RTX.
2988 LEN is a number returned by avr_jump_mode function.
2989 If REVERSE nonzero then condition code in X must be reversed. */
2992 ret_cond_branch (rtx x
, int len
, int reverse
)
2994 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2999 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
3000 return (len
== 1 ? ("breq .+2" CR_TAB
3002 len
== 2 ? ("breq .+4" CR_TAB
3010 return (len
== 1 ? ("breq .+2" CR_TAB
3012 len
== 2 ? ("breq .+4" CR_TAB
3019 return (len
== 1 ? ("breq .+2" CR_TAB
3021 len
== 2 ? ("breq .+4" CR_TAB
3028 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
3029 return (len
== 1 ? ("breq %0" CR_TAB
3031 len
== 2 ? ("breq .+2" CR_TAB
3038 return (len
== 1 ? ("breq %0" CR_TAB
3040 len
== 2 ? ("breq .+2" CR_TAB
3047 return (len
== 1 ? ("breq %0" CR_TAB
3049 len
== 2 ? ("breq .+2" CR_TAB
3063 return ("br%j1 .+2" CR_TAB
3066 return ("br%j1 .+4" CR_TAB
3077 return ("br%k1 .+2" CR_TAB
3080 return ("br%k1 .+4" CR_TAB
3089 /* Worker function for `FINAL_PRESCAN_INSN'. */
3090 /* Output insn cost for next insn. */
3093 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
3094 int num_operands ATTRIBUTE_UNUSED
)
3096 if (avr_log
.rtx_costs
)
3098 rtx set
= single_set (insn
);
3101 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
3102 set_src_cost (SET_SRC (set
), GET_MODE (SET_DEST (set
)),
3103 optimize_insn_for_speed_p ()));
3105 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
3106 rtx_cost (PATTERN (insn
), VOIDmode
, INSN
, 0,
3107 optimize_insn_for_speed_p()));
3111 /* Return 0 if undefined, 1 if always true or always false. */
3114 avr_simplify_comparison_p (machine_mode mode
, RTX_CODE op
, rtx x
)
3116 unsigned int max
= (mode
== QImode
? 0xff :
3117 mode
== HImode
? 0xffff :
3118 mode
== PSImode
? 0xffffff :
3119 mode
== SImode
? 0xffffffff : 0);
3120 if (max
&& op
&& CONST_INT_P (x
))
3122 if (unsigned_condition (op
) != op
)
3125 if (max
!= (INTVAL (x
) & max
)
3126 && INTVAL (x
) != 0xff)
3133 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
3134 /* Returns nonzero if REGNO is the number of a hard
3135 register in which function arguments are sometimes passed. */
3138 avr_function_arg_regno_p(int r
)
3140 return (AVR_TINY
? r
>= 20 && r
<= 25 : r
>= 8 && r
<= 25);
3144 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
3145 /* Initializing the variable cum for the state at the beginning
3146 of the argument list. */
3149 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
3150 tree fndecl ATTRIBUTE_UNUSED
)
3152 cum
->nregs
= AVR_TINY
? 6 : 18;
3153 cum
->regno
= FIRST_CUM_REG
;
3154 if (!libname
&& stdarg_p (fntype
))
3157 /* Assume the calle may be tail called */
3159 cfun
->machine
->sibcall_fails
= 0;
3162 /* Returns the number of registers to allocate for a function argument. */
3165 avr_num_arg_regs (machine_mode mode
, const_tree type
)
3169 if (mode
== BLKmode
)
3170 size
= int_size_in_bytes (type
);
3172 size
= GET_MODE_SIZE (mode
);
3174 /* Align all function arguments to start in even-numbered registers.
3175 Odd-sized arguments leave holes above them. */
3177 return (size
+ 1) & ~1;
3181 /* Implement `TARGET_FUNCTION_ARG'. */
3182 /* Controls whether a function argument is passed
3183 in a register, and which register. */
3186 avr_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
3187 const_tree type
, bool named ATTRIBUTE_UNUSED
)
3189 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3190 int bytes
= avr_num_arg_regs (mode
, type
);
3192 if (cum
->nregs
&& bytes
<= cum
->nregs
)
3193 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
3199 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
3200 /* Update the summarizer variable CUM to advance past an argument
3201 in the argument list. */
3204 avr_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
3205 const_tree type
, bool named ATTRIBUTE_UNUSED
)
3207 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3208 int bytes
= avr_num_arg_regs (mode
, type
);
3210 cum
->nregs
-= bytes
;
3211 cum
->regno
-= bytes
;
3213 /* A parameter is being passed in a call-saved register. As the original
3214 contents of these regs has to be restored before leaving the function,
3215 a function must not pass arguments in call-saved regs in order to get
3220 && !call_used_regs
[cum
->regno
])
3222 /* FIXME: We ship info on failing tail-call in struct machine_function.
3223 This uses internals of calls.c:expand_call() and the way args_so_far
3224 is used. targetm.function_ok_for_sibcall() needs to be extended to
3225 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
3226 dependent so that such an extension is not wanted. */
3228 cfun
->machine
->sibcall_fails
= 1;
3231 /* Test if all registers needed by the ABI are actually available. If the
3232 user has fixed a GPR needed to pass an argument, an (implicit) function
3233 call will clobber that fixed register. See PR45099 for an example. */
3240 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
3241 if (fixed_regs
[regno
])
3242 warning (0, "fixed register %s used to pass parameter to function",
3246 if (cum
->nregs
<= 0)
3249 cum
->regno
= FIRST_CUM_REG
;
3253 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3254 /* Decide whether we can make a sibling call to a function. DECL is the
3255 declaration of the function being targeted by the call and EXP is the
3256 CALL_EXPR representing the call. */
3259 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
3263 /* Tail-calling must fail if callee-saved regs are used to pass
3264 function args. We must not tail-call when `epilogue_restores'
3265 is used. Unfortunately, we cannot tell at this point if that
3266 actually will happen or not, and we cannot step back from
3267 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
3269 if (cfun
->machine
->sibcall_fails
3270 || TARGET_CALL_PROLOGUES
)
3275 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
3279 decl_callee
= TREE_TYPE (decl_callee
);
3283 decl_callee
= fntype_callee
;
3285 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
3286 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
3288 decl_callee
= TREE_TYPE (decl_callee
);
3292 /* Ensure that caller and callee have compatible epilogues */
3294 if (cfun
->machine
->is_interrupt
3295 || cfun
->machine
->is_signal
3296 || cfun
->machine
->is_naked
3297 || avr_naked_function_p (decl_callee
)
3298 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
3299 || (avr_OS_task_function_p (decl_callee
)
3300 != cfun
->machine
->is_OS_task
)
3301 || (avr_OS_main_function_p (decl_callee
)
3302 != cfun
->machine
->is_OS_main
))
3310 /***********************************************************************
3311 Functions for outputting various mov's for a various modes
3312 ************************************************************************/
3314 /* Return true if a value of mode MODE is read from flash by
3315 __load_* function from libgcc. */
3318 avr_load_libgcc_p (rtx op
)
3320 machine_mode mode
= GET_MODE (op
);
3321 int n_bytes
= GET_MODE_SIZE (mode
);
3325 && avr_mem_flash_p (op
));
3328 /* Return true if a value of mode MODE is read by __xload_* function. */
3331 avr_xload_libgcc_p (machine_mode mode
)
3333 int n_bytes
= GET_MODE_SIZE (mode
);
3336 || avr_n_flash
> 1);
3340 /* Fixme: This is a hack because secondary reloads don't works as expected.
3342 Find an unused d-register to be used as scratch in INSN.
3343 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3344 is a register, skip all possible return values that overlap EXCLUDE.
3345 The policy for the returned register is similar to that of
3346 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3349 Return a QImode d-register or NULL_RTX if nothing found. */
3352 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
3355 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
3356 || avr_signal_function_p (current_function_decl
));
3358 for (regno
= 16; regno
< 32; regno
++)
3360 rtx reg
= all_regs_rtx
[regno
];
3363 && reg_overlap_mentioned_p (exclude
, reg
))
3364 || fixed_regs
[regno
])
3369 /* Try non-live register */
3371 if (!df_regs_ever_live_p (regno
)
3372 && (TREE_THIS_VOLATILE (current_function_decl
)
3373 || cfun
->machine
->is_OS_task
3374 || cfun
->machine
->is_OS_main
3375 || (!isr_p
&& call_used_regs
[regno
])))
3380 /* Any live register can be used if it is unused after.
3381 Prologue/epilogue will care for it as needed. */
3383 if (df_regs_ever_live_p (regno
)
3384 && reg_unused_after (insn
, reg
))
3394 /* Helper function for the next function in the case where only restricted
3395 version of LPM instruction is available. */
3398 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
3402 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3405 regno_dest
= REGNO (dest
);
3407 /* The implicit target register of LPM. */
3408 xop
[3] = lpm_reg_rtx
;
3410 switch (GET_CODE (addr
))
3417 gcc_assert (REG_Z
== REGNO (addr
));
3425 avr_asm_len ("%4lpm", xop
, plen
, 1);
3427 if (regno_dest
!= LPM_REGNO
)
3428 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3433 if (REGNO (dest
) == REG_Z
)
3434 return avr_asm_len ("%4lpm" CR_TAB
3439 "pop %A0", xop
, plen
, 6);
3441 avr_asm_len ("%4lpm" CR_TAB
3445 "mov %B0,%3", xop
, plen
, 5);
3447 if (!reg_unused_after (insn
, addr
))
3448 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3457 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3460 if (regno_dest
== LPM_REGNO
)
3461 avr_asm_len ("%4lpm" CR_TAB
3462 "adiw %2,1", xop
, plen
, 2);
3464 avr_asm_len ("%4lpm" CR_TAB
3466 "adiw %2,1", xop
, plen
, 3);
3469 avr_asm_len ("%4lpm" CR_TAB
3471 "adiw %2,1", xop
, plen
, 3);
3474 avr_asm_len ("%4lpm" CR_TAB
3476 "adiw %2,1", xop
, plen
, 3);
3479 avr_asm_len ("%4lpm" CR_TAB
3481 "adiw %2,1", xop
, plen
, 3);
3483 break; /* POST_INC */
3485 } /* switch CODE (addr) */
3491 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3492 OP[1] in AS1 to register OP[0].
3493 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3497 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
3501 rtx src
= SET_SRC (single_set (insn
));
3503 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3506 addr_space_t as
= MEM_ADDR_SPACE (src
);
3513 warning (0, "writing to address space %qs not supported",
3514 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
3519 addr
= XEXP (src
, 0);
3520 code
= GET_CODE (addr
);
3522 gcc_assert (REG_P (dest
));
3523 gcc_assert (REG
== code
|| POST_INC
== code
);
3527 xop
[2] = lpm_addr_reg_rtx
;
3528 xop
[4] = xstring_empty
;
3529 xop
[5] = tmp_reg_rtx
;
3530 xop
[6] = XEXP (rampz_rtx
, 0);
3532 segment
= avr_addrspace
[as
].segment
;
3534 /* Set RAMPZ as needed. */
3538 xop
[4] = GEN_INT (segment
);
3539 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
3541 if (xop
[3] != NULL_RTX
)
3543 avr_asm_len ("ldi %3,%4" CR_TAB
3544 "out %i6,%3", xop
, plen
, 2);
3546 else if (segment
== 1)
3548 avr_asm_len ("clr %5" CR_TAB
3550 "out %i6,%5", xop
, plen
, 3);
3554 avr_asm_len ("mov %5,%2" CR_TAB
3557 "mov %2,%5", xop
, plen
, 4);
3562 if (!AVR_HAVE_ELPMX
)
3563 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3565 else if (!AVR_HAVE_LPMX
)
3567 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3570 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3572 switch (GET_CODE (addr
))
3579 gcc_assert (REG_Z
== REGNO (addr
));
3587 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3590 if (REGNO (dest
) == REG_Z
)
3591 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3592 "%4lpm %B0,%a2" CR_TAB
3593 "mov %A0,%5", xop
, plen
, 3);
3596 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3597 "%4lpm %B0,%a2", xop
, plen
, 2);
3599 if (!reg_unused_after (insn
, addr
))
3600 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3607 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3608 "%4lpm %B0,%a2+" CR_TAB
3609 "%4lpm %C0,%a2", xop
, plen
, 3);
3611 if (!reg_unused_after (insn
, addr
))
3612 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3618 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3619 "%4lpm %B0,%a2+", xop
, plen
, 2);
3621 if (REGNO (dest
) == REG_Z
- 2)
3622 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3623 "%4lpm %C0,%a2" CR_TAB
3624 "mov %D0,%5", xop
, plen
, 3);
3627 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3628 "%4lpm %D0,%a2", xop
, plen
, 2);
3630 if (!reg_unused_after (insn
, addr
))
3631 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3641 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3644 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3645 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3646 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3647 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3649 break; /* POST_INC */
3651 } /* switch CODE (addr) */
3653 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3655 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3657 xop
[0] = zero_reg_rtx
;
3658 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3665 /* Worker function for xload_8 insn. */
3668 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3674 xop
[2] = lpm_addr_reg_rtx
;
3675 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3677 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3679 avr_asm_len ("sbrc %1,7" CR_TAB
3680 "ld %3,%a2", xop
, plen
, 2);
3682 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3683 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3690 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
3692 rtx dest
= operands
[0];
3693 rtx src
= operands
[1];
3695 if (avr_mem_flash_p (src
)
3696 || avr_mem_flash_p (dest
))
3698 return avr_out_lpm (insn
, operands
, plen
);
3701 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3705 if (REG_P (src
)) /* mov r,r */
3707 if (test_hard_reg_class (STACK_REG
, dest
))
3708 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3709 else if (test_hard_reg_class (STACK_REG
, src
))
3710 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3712 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3714 else if (CONSTANT_P (src
))
3716 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3719 else if (MEM_P (src
))
3720 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3722 else if (MEM_P (dest
))
3727 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3729 return out_movqi_mr_r (insn
, xop
, plen
);
3737 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
3742 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3744 if (avr_mem_flash_p (src
)
3745 || avr_mem_flash_p (dest
))
3747 return avr_out_lpm (insn
, xop
, plen
);
3750 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3754 if (REG_P (src
)) /* mov r,r */
3756 if (test_hard_reg_class (STACK_REG
, dest
))
3758 if (AVR_HAVE_8BIT_SP
)
3759 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3762 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3763 "out __SP_H__,%B1", xop
, plen
, -2);
3765 /* Use simple load of SP if no interrupts are used. */
3767 return TARGET_NO_INTERRUPTS
3768 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3769 "out __SP_L__,%A1", xop
, plen
, -2)
3770 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3772 "out __SP_H__,%B1" CR_TAB
3773 "out __SREG__,__tmp_reg__" CR_TAB
3774 "out __SP_L__,%A1", xop
, plen
, -5);
3776 else if (test_hard_reg_class (STACK_REG
, src
))
3778 return !AVR_HAVE_SPH
3779 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3780 "clr %B0", xop
, plen
, -2)
3782 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3783 "in %B0,__SP_H__", xop
, plen
, -2);
3786 return AVR_HAVE_MOVW
3787 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3789 : avr_asm_len ("mov %A0,%A1" CR_TAB
3790 "mov %B0,%B1", xop
, plen
, -2);
3792 else if (CONSTANT_P (src
))
3794 return output_reload_inhi (xop
, NULL
, plen
);
3796 else if (MEM_P (src
))
3798 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3801 else if (MEM_P (dest
))
3806 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3808 return out_movhi_mr_r (insn
, xop
, plen
);
3811 fatal_insn ("invalid insn:", insn
);
3817 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3820 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3824 rtx x
= XEXP (src
, 0);
3826 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3827 "ld %0,%b1" , op
, plen
, -3);
3829 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3830 && !reg_unused_after (insn
, XEXP (x
,0)))
3831 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
), op
, plen
, 2);
3837 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3841 rtx x
= XEXP (src
, 0);
3843 if (CONSTANT_ADDRESS_P (x
))
3845 int n_words
= AVR_TINY
? 1 : 2;
3846 return optimize
> 0 && io_address_operand (x
, QImode
)
3847 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3848 : avr_asm_len ("lds %0,%m1", op
, plen
, -n_words
);
3851 if (GET_CODE (x
) == PLUS
3852 && REG_P (XEXP (x
, 0))
3853 && CONST_INT_P (XEXP (x
, 1)))
3855 /* memory access by reg+disp */
3857 int disp
= INTVAL (XEXP (x
, 1));
3860 return avr_out_movqi_r_mr_reg_disp_tiny (insn
, op
, plen
);
3862 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3864 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3865 fatal_insn ("incorrect insn:",insn
);
3867 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3868 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3869 "ldd %0,Y+63" CR_TAB
3870 "sbiw r28,%o1-63", op
, plen
, -3);
3872 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3873 "sbci r29,hi8(-%o1)" CR_TAB
3875 "subi r28,lo8(%o1)" CR_TAB
3876 "sbci r29,hi8(%o1)", op
, plen
, -5);
3878 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3880 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3881 it but I have this situation with extremal optimizing options. */
3883 avr_asm_len ("adiw r26,%o1" CR_TAB
3884 "ld %0,X", op
, plen
, -2);
3886 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3887 && !reg_unused_after (insn
, XEXP (x
,0)))
3889 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3895 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3898 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3902 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3905 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3909 rtx base
= XEXP (src
, 0);
3911 int reg_dest
= true_regnum (dest
);
3912 int reg_base
= true_regnum (base
);
3914 if (reg_dest
== reg_base
) /* R = (R) */
3915 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3917 "mov %A0,__tmp_reg__", op
, plen
, -3);
3919 avr_asm_len ("ld %A0,%1+" CR_TAB
3920 "ld %B0,%1", op
, plen
, -2);
3922 if (!reg_unused_after (insn
, base
))
3923 avr_asm_len (TINY_SBIW (%E1
, %F1
, 1), op
, plen
, 2);
3929 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3932 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3936 rtx base
= XEXP (src
, 0);
3938 int reg_dest
= true_regnum (dest
);
3939 int reg_base
= true_regnum (XEXP (base
, 0));
3941 if (reg_base
== reg_dest
)
3943 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3944 "ld __tmp_reg__,%b1+" CR_TAB
3946 "mov %A0,__tmp_reg__", op
, plen
, -5);
3950 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3951 "ld %A0,%b1+" CR_TAB
3952 "ld %B0,%b1", op
, plen
, -4);
3954 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3955 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+1), op
, plen
, 2);
3962 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3965 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3967 int mem_volatile_p
= 0;
3970 rtx base
= XEXP (src
, 0);
3972 /* "volatile" forces reading low byte first, even if less efficient,
3973 for correct operation with 16-bit I/O registers. */
3974 mem_volatile_p
= MEM_VOLATILE_P (src
);
3976 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3977 fatal_insn ("incorrect insn:", insn
);
3979 if (!mem_volatile_p
)
3980 return avr_asm_len ("ld %B0,%1" CR_TAB
3981 "ld %A0,%1", op
, plen
, -2);
3983 return avr_asm_len (TINY_SBIW (%I1
, %J1
, 2) CR_TAB
3984 "ld %A0,%p1+" CR_TAB
3986 TINY_SBIW (%I1
, %J1
, 1), op
, plen
, -6);
3991 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3995 rtx base
= XEXP (src
, 0);
3996 int reg_dest
= true_regnum (dest
);
3997 int reg_base
= true_regnum (base
);
3998 /* "volatile" forces reading low byte first, even if less efficient,
3999 for correct operation with 16-bit I/O registers. */
4000 int mem_volatile_p
= MEM_VOLATILE_P (src
);
4005 return avr_out_movhi_r_mr_reg_no_disp_tiny (insn
, op
, plen
);
4007 if (reg_dest
== reg_base
) /* R = (R) */
4008 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4010 "mov %A0,__tmp_reg__", op
, plen
, -3);
4012 if (reg_base
!= REG_X
)
4013 return avr_asm_len ("ld %A0,%1" CR_TAB
4014 "ldd %B0,%1+1", op
, plen
, -2);
4016 avr_asm_len ("ld %A0,X+" CR_TAB
4017 "ld %B0,X", op
, plen
, -2);
4019 if (!reg_unused_after (insn
, base
))
4020 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4024 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4026 int disp
= INTVAL (XEXP (base
, 1));
4027 int reg_base
= true_regnum (XEXP (base
, 0));
4030 return avr_out_movhi_r_mr_reg_disp_tiny (insn
, op
, plen
);
4032 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4034 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4035 fatal_insn ("incorrect insn:",insn
);
4037 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
4038 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4039 "ldd %A0,Y+62" CR_TAB
4040 "ldd %B0,Y+63" CR_TAB
4041 "sbiw r28,%o1-62", op
, plen
, -4)
4043 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4044 "sbci r29,hi8(-%o1)" CR_TAB
4046 "ldd %B0,Y+1" CR_TAB
4047 "subi r28,lo8(%o1)" CR_TAB
4048 "sbci r29,hi8(%o1)", op
, plen
, -6);
4051 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4052 it but I have this situation with extremal
4053 optimization options. */
4055 if (reg_base
== REG_X
)
4056 return reg_base
== reg_dest
4057 ? avr_asm_len ("adiw r26,%o1" CR_TAB
4058 "ld __tmp_reg__,X+" CR_TAB
4060 "mov %A0,__tmp_reg__", op
, plen
, -4)
4062 : avr_asm_len ("adiw r26,%o1" CR_TAB
4065 "sbiw r26,%o1+1", op
, plen
, -4);
4067 return reg_base
== reg_dest
4068 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4069 "ldd %B0,%B1" CR_TAB
4070 "mov %A0,__tmp_reg__", op
, plen
, -3)
4072 : avr_asm_len ("ldd %A0,%A1" CR_TAB
4073 "ldd %B0,%B1", op
, plen
, -2);
4075 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4078 return avr_out_movhi_r_mr_pre_dec_tiny (insn
, op
, plen
);
4080 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
4081 fatal_insn ("incorrect insn:", insn
);
4083 if (!mem_volatile_p
)
4084 return avr_asm_len ("ld %B0,%1" CR_TAB
4085 "ld %A0,%1", op
, plen
, -2);
4087 return REGNO (XEXP (base
, 0)) == REG_X
4088 ? avr_asm_len ("sbiw r26,2" CR_TAB
4091 "sbiw r26,1", op
, plen
, -4)
4093 : avr_asm_len ("sbiw %r1,2" CR_TAB
4095 "ldd %B0,%p1+1", op
, plen
, -3);
4097 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4099 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
4100 fatal_insn ("incorrect insn:", insn
);
4102 return avr_asm_len ("ld %A0,%1" CR_TAB
4103 "ld %B0,%1", op
, plen
, -2);
4105 else if (CONSTANT_ADDRESS_P (base
))
4107 int n_words
= AVR_TINY
? 2 : 4;
4108 return optimize
> 0 && io_address_operand (base
, HImode
)
4109 ? avr_asm_len ("in %A0,%i1" CR_TAB
4110 "in %B0,%i1+1", op
, plen
, -2)
4112 : avr_asm_len ("lds %A0,%m1" CR_TAB
4113 "lds %B0,%m1+1", op
, plen
, -n_words
);
4116 fatal_insn ("unknown move insn:",insn
);
4121 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4125 rtx base
= XEXP (src
, 0);
4126 int reg_dest
= true_regnum (dest
);
4127 int reg_base
= true_regnum (base
);
4129 if (reg_dest
== reg_base
)
4131 /* "ld r26,-X" is undefined */
4132 return *l
= 9, (TINY_ADIW (%E1
, %F1
, 3) CR_TAB
4135 "ld __tmp_reg__,-%1" CR_TAB
4136 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
4138 "mov %B0,__tmp_reg__");
4140 else if (reg_dest
== reg_base
- 2)
4142 return *l
= 5, ("ld %A0,%1+" CR_TAB
4144 "ld __tmp_reg__,%1+" CR_TAB
4146 "mov %C0,__tmp_reg__");
4148 else if (reg_unused_after (insn
, base
))
4150 return *l
= 4, ("ld %A0,%1+" CR_TAB
4157 return *l
= 6, ("ld %A0,%1+" CR_TAB
4161 TINY_SBIW (%E1
, %F1
, 3));
4167 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4171 rtx base
= XEXP (src
, 0);
4172 int reg_dest
= true_regnum (dest
);
4173 int reg_base
= true_regnum (XEXP (base
, 0));
4175 if (reg_dest
== reg_base
)
4177 /* "ld r26,-X" is undefined */
4178 return *l
= 9, (TINY_ADIW (%I1
, %J1
, %o1
+3) CR_TAB
4180 "ld %C0,-%b1" CR_TAB
4181 "ld __tmp_reg__,-%b1" CR_TAB
4182 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
4184 "mov %B0,__tmp_reg__");
4186 else if (reg_dest
== reg_base
- 2)
4188 return *l
= 7, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4189 "ld %A0,%b1+" CR_TAB
4190 "ld %B0,%b1+" CR_TAB
4191 "ld __tmp_reg__,%b1+" CR_TAB
4193 "mov %C0,__tmp_reg__");
4195 else if (reg_unused_after (insn
, XEXP (base
, 0)))
4197 return *l
= 6, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4198 "ld %A0,%b1+" CR_TAB
4199 "ld %B0,%b1+" CR_TAB
4200 "ld %C0,%b1+" CR_TAB
4205 return *l
= 8, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4206 "ld %A0,%b1+" CR_TAB
4207 "ld %B0,%b1+" CR_TAB
4208 "ld %C0,%b1+" CR_TAB
4210 TINY_SBIW (%I1
, %J1
, %o1
+3));
4215 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
4219 rtx base
= XEXP (src
, 0);
4220 int reg_dest
= true_regnum (dest
);
4221 int reg_base
= true_regnum (base
);
4230 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn
, op
, l
);
4232 if (reg_base
== REG_X
) /* (R26) */
4234 if (reg_dest
== REG_X
)
4235 /* "ld r26,-X" is undefined */
4236 return *l
=7, ("adiw r26,3" CR_TAB
4239 "ld __tmp_reg__,-X" CR_TAB
4242 "mov r27,__tmp_reg__");
4243 else if (reg_dest
== REG_X
- 2)
4244 return *l
=5, ("ld %A0,X+" CR_TAB
4246 "ld __tmp_reg__,X+" CR_TAB
4248 "mov %C0,__tmp_reg__");
4249 else if (reg_unused_after (insn
, base
))
4250 return *l
=4, ("ld %A0,X+" CR_TAB
4255 return *l
=5, ("ld %A0,X+" CR_TAB
4263 if (reg_dest
== reg_base
)
4264 return *l
=5, ("ldd %D0,%1+3" CR_TAB
4265 "ldd %C0,%1+2" CR_TAB
4266 "ldd __tmp_reg__,%1+1" CR_TAB
4268 "mov %B0,__tmp_reg__");
4269 else if (reg_base
== reg_dest
+ 2)
4270 return *l
=5, ("ld %A0,%1" CR_TAB
4271 "ldd %B0,%1+1" CR_TAB
4272 "ldd __tmp_reg__,%1+2" CR_TAB
4273 "ldd %D0,%1+3" CR_TAB
4274 "mov %C0,__tmp_reg__");
4276 return *l
=4, ("ld %A0,%1" CR_TAB
4277 "ldd %B0,%1+1" CR_TAB
4278 "ldd %C0,%1+2" CR_TAB
4282 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4284 int disp
= INTVAL (XEXP (base
, 1));
4287 return avr_out_movsi_r_mr_reg_disp_tiny (insn
, op
, l
);
4289 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4291 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4292 fatal_insn ("incorrect insn:",insn
);
4294 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4295 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
4296 "ldd %A0,Y+60" CR_TAB
4297 "ldd %B0,Y+61" CR_TAB
4298 "ldd %C0,Y+62" CR_TAB
4299 "ldd %D0,Y+63" CR_TAB
4302 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
4303 "sbci r29,hi8(-%o1)" CR_TAB
4305 "ldd %B0,Y+1" CR_TAB
4306 "ldd %C0,Y+2" CR_TAB
4307 "ldd %D0,Y+3" CR_TAB
4308 "subi r28,lo8(%o1)" CR_TAB
4309 "sbci r29,hi8(%o1)");
4312 reg_base
= true_regnum (XEXP (base
, 0));
4313 if (reg_base
== REG_X
)
4316 if (reg_dest
== REG_X
)
4319 /* "ld r26,-X" is undefined */
4320 return ("adiw r26,%o1+3" CR_TAB
4323 "ld __tmp_reg__,-X" CR_TAB
4326 "mov r27,__tmp_reg__");
4329 if (reg_dest
== REG_X
- 2)
4330 return ("adiw r26,%o1" CR_TAB
4333 "ld __tmp_reg__,X+" CR_TAB
4335 "mov r26,__tmp_reg__");
4337 return ("adiw r26,%o1" CR_TAB
4344 if (reg_dest
== reg_base
)
4345 return *l
=5, ("ldd %D0,%D1" CR_TAB
4346 "ldd %C0,%C1" CR_TAB
4347 "ldd __tmp_reg__,%B1" CR_TAB
4348 "ldd %A0,%A1" CR_TAB
4349 "mov %B0,__tmp_reg__");
4350 else if (reg_dest
== reg_base
- 2)
4351 return *l
=5, ("ldd %A0,%A1" CR_TAB
4352 "ldd %B0,%B1" CR_TAB
4353 "ldd __tmp_reg__,%C1" CR_TAB
4354 "ldd %D0,%D1" CR_TAB
4355 "mov %C0,__tmp_reg__");
4356 return *l
=4, ("ldd %A0,%A1" CR_TAB
4357 "ldd %B0,%B1" CR_TAB
4358 "ldd %C0,%C1" CR_TAB
4361 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4362 return *l
=4, ("ld %D0,%1" CR_TAB
4366 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4367 return *l
=4, ("ld %A0,%1" CR_TAB
4371 else if (CONSTANT_ADDRESS_P (base
))
4373 if (io_address_operand (base
, SImode
))
4376 return ("in %A0,%i1" CR_TAB
4377 "in %B0,%i1+1" CR_TAB
4378 "in %C0,%i1+2" CR_TAB
4383 *l
= AVR_TINY
? 4 : 8;
4384 return ("lds %A0,%m1" CR_TAB
4385 "lds %B0,%m1+1" CR_TAB
4386 "lds %C0,%m1+2" CR_TAB
4391 fatal_insn ("unknown move insn:",insn
);
4396 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
4400 rtx base
= XEXP (dest
, 0);
4401 int reg_base
= true_regnum (base
);
4402 int reg_src
= true_regnum (src
);
4404 if (reg_base
== reg_src
)
4406 /* "ld r26,-X" is undefined */
4407 if (reg_unused_after (insn
, base
))
4409 return *l
= 7, ("mov __tmp_reg__, %B1" CR_TAB
4411 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4412 "st %0+,__tmp_reg__" CR_TAB
4418 return *l
= 9, ("mov __tmp_reg__, %B1" CR_TAB
4420 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4421 "st %0+,__tmp_reg__" CR_TAB
4424 TINY_SBIW (%E0
, %F0
, 3));
4427 else if (reg_base
== reg_src
+ 2)
4429 if (reg_unused_after (insn
, base
))
4430 return *l
= 7, ("mov __zero_reg__,%C1" CR_TAB
4431 "mov __tmp_reg__,%D1" CR_TAB
4434 "st %0+,__zero_reg__" CR_TAB
4435 "st %0,__tmp_reg__" CR_TAB
4436 "clr __zero_reg__");
4438 return *l
= 9, ("mov __zero_reg__,%C1" CR_TAB
4439 "mov __tmp_reg__,%D1" CR_TAB
4442 "st %0+,__zero_reg__" CR_TAB
4443 "st %0,__tmp_reg__" CR_TAB
4444 "clr __zero_reg__" CR_TAB
4445 TINY_SBIW (%E0
, %F0
, 3));
4448 return *l
= 6, ("st %0+,%A1" CR_TAB
4452 TINY_SBIW (%E0
, %F0
, 3));
4456 avr_out_movsi_mr_r_reg_disp_tiny (rtx op
[], int *l
)
4460 rtx base
= XEXP (dest
, 0);
4461 int reg_base
= REGNO (XEXP (base
, 0));
4462 int reg_src
=true_regnum (src
);
4464 if (reg_base
== reg_src
)
4467 return ("mov __tmp_reg__,%A2" CR_TAB
4468 "mov __zero_reg__,%B2" CR_TAB
4469 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4470 "st %b0+,__tmp_reg__" CR_TAB
4471 "st %b0+,__zero_reg__" CR_TAB
4472 "st %b0+,%C2" CR_TAB
4474 "clr __zero_reg__" CR_TAB
4475 TINY_SBIW (%I0
, %J0
, %o0
+3));
4477 else if (reg_src
== reg_base
- 2)
4480 return ("mov __tmp_reg__,%C2" CR_TAB
4481 "mov __zero_reg__,%D2" CR_TAB
4482 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4483 "st %b0+,%A0" CR_TAB
4484 "st %b0+,%B0" CR_TAB
4485 "st %b0+,__tmp_reg__" CR_TAB
4486 "st %b0,__zero_reg__" CR_TAB
4487 "clr __zero_reg__" CR_TAB
4488 TINY_SBIW (%I0
, %J0
, %o0
+3));
4491 return (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4492 "st %b0+,%A1" CR_TAB
4493 "st %b0+,%B1" CR_TAB
4494 "st %b0+,%C1" CR_TAB
4496 TINY_SBIW (%I0
, %J0
, %o0
+3));
4500 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
4504 rtx base
= XEXP (dest
, 0);
4505 int reg_base
= true_regnum (base
);
4506 int reg_src
= true_regnum (src
);
4512 if (CONSTANT_ADDRESS_P (base
))
4514 if (io_address_operand (base
, SImode
))
4516 return *l
=4,("out %i0, %A1" CR_TAB
4517 "out %i0+1,%B1" CR_TAB
4518 "out %i0+2,%C1" CR_TAB
4523 *l
= AVR_TINY
? 4 : 8;
4524 return ("sts %m0,%A1" CR_TAB
4525 "sts %m0+1,%B1" CR_TAB
4526 "sts %m0+2,%C1" CR_TAB
4531 if (reg_base
> 0) /* (r) */
4534 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn
, op
, l
);
4536 if (reg_base
== REG_X
) /* (R26) */
4538 if (reg_src
== REG_X
)
4540 /* "st X+,r26" is undefined */
4541 if (reg_unused_after (insn
, base
))
4542 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
4545 "st X+,__tmp_reg__" CR_TAB
4549 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
4552 "st X+,__tmp_reg__" CR_TAB
4557 else if (reg_base
== reg_src
+ 2)
4559 if (reg_unused_after (insn
, base
))
4560 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
4561 "mov __tmp_reg__,%D1" CR_TAB
4564 "st %0+,__zero_reg__" CR_TAB
4565 "st %0,__tmp_reg__" CR_TAB
4566 "clr __zero_reg__");
4568 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
4569 "mov __tmp_reg__,%D1" CR_TAB
4572 "st %0+,__zero_reg__" CR_TAB
4573 "st %0,__tmp_reg__" CR_TAB
4574 "clr __zero_reg__" CR_TAB
4577 return *l
=5, ("st %0+,%A1" CR_TAB
4584 return *l
=4, ("st %0,%A1" CR_TAB
4585 "std %0+1,%B1" CR_TAB
4586 "std %0+2,%C1" CR_TAB
4589 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4591 int disp
= INTVAL (XEXP (base
, 1));
4594 return avr_out_movsi_mr_r_reg_disp_tiny (op
, l
);
4596 reg_base
= REGNO (XEXP (base
, 0));
4597 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4599 if (reg_base
!= REG_Y
)
4600 fatal_insn ("incorrect insn:",insn
);
4602 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4603 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
4604 "std Y+60,%A1" CR_TAB
4605 "std Y+61,%B1" CR_TAB
4606 "std Y+62,%C1" CR_TAB
4607 "std Y+63,%D1" CR_TAB
4610 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
4611 "sbci r29,hi8(-%o0)" CR_TAB
4613 "std Y+1,%B1" CR_TAB
4614 "std Y+2,%C1" CR_TAB
4615 "std Y+3,%D1" CR_TAB
4616 "subi r28,lo8(%o0)" CR_TAB
4617 "sbci r29,hi8(%o0)");
4619 if (reg_base
== REG_X
)
4622 if (reg_src
== REG_X
)
4625 return ("mov __tmp_reg__,r26" CR_TAB
4626 "mov __zero_reg__,r27" CR_TAB
4627 "adiw r26,%o0" CR_TAB
4628 "st X+,__tmp_reg__" CR_TAB
4629 "st X+,__zero_reg__" CR_TAB
4632 "clr __zero_reg__" CR_TAB
4635 else if (reg_src
== REG_X
- 2)
4638 return ("mov __tmp_reg__,r26" CR_TAB
4639 "mov __zero_reg__,r27" CR_TAB
4640 "adiw r26,%o0" CR_TAB
4643 "st X+,__tmp_reg__" CR_TAB
4644 "st X,__zero_reg__" CR_TAB
4645 "clr __zero_reg__" CR_TAB
4649 return ("adiw r26,%o0" CR_TAB
4656 return *l
=4, ("std %A0,%A1" CR_TAB
4657 "std %B0,%B1" CR_TAB
4658 "std %C0,%C1" CR_TAB
4661 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4662 return *l
=4, ("st %0,%D1" CR_TAB
4666 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4667 return *l
=4, ("st %0,%A1" CR_TAB
4671 fatal_insn ("unknown move insn:",insn
);
4676 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
4679 rtx dest
= operands
[0];
4680 rtx src
= operands
[1];
4683 if (avr_mem_flash_p (src
)
4684 || avr_mem_flash_p (dest
))
4686 return avr_out_lpm (insn
, operands
, real_l
);
4692 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
4695 if (REG_P (src
)) /* mov r,r */
4697 if (true_regnum (dest
) > true_regnum (src
))
4702 return ("movw %C0,%C1" CR_TAB
4706 return ("mov %D0,%D1" CR_TAB
4707 "mov %C0,%C1" CR_TAB
4708 "mov %B0,%B1" CR_TAB
4716 return ("movw %A0,%A1" CR_TAB
4720 return ("mov %A0,%A1" CR_TAB
4721 "mov %B0,%B1" CR_TAB
4722 "mov %C0,%C1" CR_TAB
4726 else if (CONSTANT_P (src
))
4728 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
4730 else if (MEM_P (src
))
4731 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
4733 else if (MEM_P (dest
))
4737 if (src
== CONST0_RTX (GET_MODE (dest
)))
4738 operands
[1] = zero_reg_rtx
;
4740 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
4743 output_asm_insn (templ
, operands
);
4748 fatal_insn ("invalid insn:", insn
);
4753 /* Handle loads of 24-bit types from memory to register. */
4756 avr_out_load_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4760 rtx base
= XEXP (src
, 0);
4761 int reg_dest
= true_regnum (dest
);
4762 int reg_base
= true_regnum (base
);
4764 if (reg_base
== reg_dest
)
4766 return avr_asm_len (TINY_ADIW (%E1
, %F1
, 2) CR_TAB
4768 "ld __tmp_reg__,-%1" CR_TAB
4769 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
4771 "mov %B0,__tmp_reg__", op
, plen
, -8);
4775 avr_asm_len ("ld %A0,%1+" CR_TAB
4777 "ld %C0,%1", op
, plen
, -3);
4779 if (reg_dest
!= reg_base
- 2
4780 && !reg_unused_after (insn
, base
))
4782 avr_asm_len (TINY_SBIW (%E1
, %F1
, 2), op
, plen
, 2);
4789 avr_out_load_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4793 rtx base
= XEXP (src
, 0);
4794 int reg_dest
= true_regnum (dest
);
4795 int reg_base
= true_regnum (base
);
4797 reg_base
= true_regnum (XEXP (base
, 0));
4798 if (reg_base
== reg_dest
)
4800 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
+2) CR_TAB
4802 "ld __tmp_reg__,-%b1" CR_TAB
4803 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
4805 "mov %B0,__tmp_reg__", op
, plen
, -8);
4809 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4810 "ld %A0,%b1+" CR_TAB
4811 "ld %B0,%b1+" CR_TAB
4812 "ld %C0,%b1", op
, plen
, -5);
4814 if (reg_dest
!= reg_base
- 2
4815 && !reg_unused_after (insn
, XEXP (base
, 0)))
4816 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+2), op
, plen
, 2);
4823 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4827 rtx base
= XEXP (src
, 0);
4828 int reg_dest
= true_regnum (dest
);
4829 int reg_base
= true_regnum (base
);
4834 return avr_out_load_psi_reg_no_disp_tiny (insn
, op
, plen
);
4836 if (reg_base
== REG_X
) /* (R26) */
4838 if (reg_dest
== REG_X
)
4839 /* "ld r26,-X" is undefined */
4840 return avr_asm_len ("adiw r26,2" CR_TAB
4842 "ld __tmp_reg__,-X" CR_TAB
4845 "mov r27,__tmp_reg__", op
, plen
, -6);
4848 avr_asm_len ("ld %A0,X+" CR_TAB
4850 "ld %C0,X", op
, plen
, -3);
4852 if (reg_dest
!= REG_X
- 2
4853 && !reg_unused_after (insn
, base
))
4855 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4861 else /* reg_base != REG_X */
4863 if (reg_dest
== reg_base
)
4864 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4865 "ldd __tmp_reg__,%1+1" CR_TAB
4867 "mov %B0,__tmp_reg__", op
, plen
, -4);
4869 return avr_asm_len ("ld %A0,%1" CR_TAB
4870 "ldd %B0,%1+1" CR_TAB
4871 "ldd %C0,%1+2", op
, plen
, -3);
4874 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4876 int disp
= INTVAL (XEXP (base
, 1));
4879 return avr_out_load_psi_reg_disp_tiny (insn
, op
, plen
);
4881 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4883 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4884 fatal_insn ("incorrect insn:",insn
);
4886 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4887 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4888 "ldd %A0,Y+61" CR_TAB
4889 "ldd %B0,Y+62" CR_TAB
4890 "ldd %C0,Y+63" CR_TAB
4891 "sbiw r28,%o1-61", op
, plen
, -5);
4893 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4894 "sbci r29,hi8(-%o1)" CR_TAB
4896 "ldd %B0,Y+1" CR_TAB
4897 "ldd %C0,Y+2" CR_TAB
4898 "subi r28,lo8(%o1)" CR_TAB
4899 "sbci r29,hi8(%o1)", op
, plen
, -7);
4902 reg_base
= true_regnum (XEXP (base
, 0));
4903 if (reg_base
== REG_X
)
4906 if (reg_dest
== REG_X
)
4908 /* "ld r26,-X" is undefined */
4909 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4911 "ld __tmp_reg__,-X" CR_TAB
4914 "mov r27,__tmp_reg__", op
, plen
, -6);
4917 avr_asm_len ("adiw r26,%o1" CR_TAB
4920 "ld %C0,X", op
, plen
, -4);
4922 if (reg_dest
!= REG_W
4923 && !reg_unused_after (insn
, XEXP (base
, 0)))
4924 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
4929 if (reg_dest
== reg_base
)
4930 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4931 "ldd __tmp_reg__,%B1" CR_TAB
4932 "ldd %A0,%A1" CR_TAB
4933 "mov %B0,__tmp_reg__", op
, plen
, -4);
4935 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4936 "ldd %B0,%B1" CR_TAB
4937 "ldd %C0,%C1", op
, plen
, -3);
4939 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4940 return avr_asm_len ("ld %C0,%1" CR_TAB
4942 "ld %A0,%1", op
, plen
, -3);
4943 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4944 return avr_asm_len ("ld %A0,%1" CR_TAB
4946 "ld %C0,%1", op
, plen
, -3);
4948 else if (CONSTANT_ADDRESS_P (base
))
4950 int n_words
= AVR_TINY
? 3 : 6;
4951 return avr_asm_len ("lds %A0,%m1" CR_TAB
4952 "lds %B0,%m1+1" CR_TAB
4953 "lds %C0,%m1+2", op
, plen
, -n_words
);
4956 fatal_insn ("unknown move insn:",insn
);
4962 avr_out_store_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4966 rtx base
= XEXP (dest
, 0);
4967 int reg_base
= true_regnum (base
);
4968 int reg_src
= true_regnum (src
);
4970 if (reg_base
== reg_src
)
4972 avr_asm_len ("st %0,%A1" CR_TAB
4973 "mov __tmp_reg__,%B1" CR_TAB
4974 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
/* st X+, r27 is undefined */
4975 "st %0+,__tmp_reg__" CR_TAB
4976 "st %0,%C1", op
, plen
, -6);
4979 else if (reg_src
== reg_base
- 2)
4981 avr_asm_len ("st %0,%A1" CR_TAB
4982 "mov __tmp_reg__,%C1" CR_TAB
4983 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4985 "st %0,__tmp_reg__", op
, plen
, 6);
4989 avr_asm_len ("st %0+,%A1" CR_TAB
4991 "st %0,%C1", op
, plen
, -3);
4994 if (!reg_unused_after (insn
, base
))
4995 avr_asm_len (TINY_SBIW (%E0
, %F0
, 2), op
, plen
, 2);
5001 avr_out_store_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
5005 rtx base
= XEXP (dest
, 0);
5006 int reg_base
= REGNO (XEXP (base
, 0));
5007 int reg_src
= true_regnum (src
);
5009 if (reg_src
== reg_base
)
5010 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5011 "mov __zero_reg__,%B1" CR_TAB
5012 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5013 "st %b0+,__tmp_reg__" CR_TAB
5014 "st %b0+,__zero_reg__" CR_TAB
5016 "clr __zero_reg__", op
, plen
, -8);
5017 else if (reg_src
== reg_base
- 2)
5018 avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
5019 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5020 "st %b0+,%A1" CR_TAB
5021 "st %b0+,%B1" CR_TAB
5022 "st %b0,__tmp_reg__", op
, plen
, -6);
5024 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5025 "st %b0+,%A1" CR_TAB
5026 "st %b0+,%B1" CR_TAB
5027 "st %b0,%C1", op
, plen
, -5);
5029 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5030 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, 2);
5035 /* Handle store of 24-bit type from register or zero to memory. */
5038 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5042 rtx base
= XEXP (dest
, 0);
5043 int reg_base
= true_regnum (base
);
5045 if (CONSTANT_ADDRESS_P (base
))
5047 int n_words
= AVR_TINY
? 3 : 6;
5048 return avr_asm_len ("sts %m0,%A1" CR_TAB
5049 "sts %m0+1,%B1" CR_TAB
5050 "sts %m0+2,%C1", op
, plen
, -n_words
);
5053 if (reg_base
> 0) /* (r) */
5056 return avr_out_store_psi_reg_no_disp_tiny (insn
, op
, plen
);
5058 if (reg_base
== REG_X
) /* (R26) */
5060 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
5062 avr_asm_len ("st %0+,%A1" CR_TAB
5064 "st %0,%C1", op
, plen
, -3);
5066 if (!reg_unused_after (insn
, base
))
5067 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
5072 return avr_asm_len ("st %0,%A1" CR_TAB
5073 "std %0+1,%B1" CR_TAB
5074 "std %0+2,%C1", op
, plen
, -3);
5076 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
5078 int disp
= INTVAL (XEXP (base
, 1));
5081 return avr_out_store_psi_reg_disp_tiny (insn
, op
, plen
);
5083 reg_base
= REGNO (XEXP (base
, 0));
5085 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5087 if (reg_base
!= REG_Y
)
5088 fatal_insn ("incorrect insn:",insn
);
5090 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
5091 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5092 "std Y+61,%A1" CR_TAB
5093 "std Y+62,%B1" CR_TAB
5094 "std Y+63,%C1" CR_TAB
5095 "sbiw r28,%o0-61", op
, plen
, -5);
5097 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5098 "sbci r29,hi8(-%o0)" CR_TAB
5100 "std Y+1,%B1" CR_TAB
5101 "std Y+2,%C1" CR_TAB
5102 "subi r28,lo8(%o0)" CR_TAB
5103 "sbci r29,hi8(%o0)", op
, plen
, -7);
5105 if (reg_base
== REG_X
)
5108 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
5110 avr_asm_len ("adiw r26,%o0" CR_TAB
5113 "st X,%C1", op
, plen
, -4);
5115 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5116 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
5121 return avr_asm_len ("std %A0,%A1" CR_TAB
5122 "std %B0,%B1" CR_TAB
5123 "std %C0,%C1", op
, plen
, -3);
5125 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5126 return avr_asm_len ("st %0,%C1" CR_TAB
5128 "st %0,%A1", op
, plen
, -3);
5129 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5130 return avr_asm_len ("st %0,%A1" CR_TAB
5132 "st %0,%C1", op
, plen
, -3);
5134 fatal_insn ("unknown move insn:",insn
);
5139 /* Move around 24-bit stuff. */
5142 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5147 if (avr_mem_flash_p (src
)
5148 || avr_mem_flash_p (dest
))
5150 return avr_out_lpm (insn
, op
, plen
);
5153 if (register_operand (dest
, VOIDmode
))
5155 if (register_operand (src
, VOIDmode
)) /* mov r,r */
5157 if (true_regnum (dest
) > true_regnum (src
))
5159 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
5162 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
5164 return avr_asm_len ("mov %B0,%B1" CR_TAB
5165 "mov %A0,%A1", op
, plen
, 2);
5170 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
5172 avr_asm_len ("mov %A0,%A1" CR_TAB
5173 "mov %B0,%B1", op
, plen
, -2);
5175 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
5178 else if (CONSTANT_P (src
))
5180 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
5182 else if (MEM_P (src
))
5183 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
5185 else if (MEM_P (dest
))
5190 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
5192 return avr_out_store_psi (insn
, xop
, plen
);
5195 fatal_insn ("invalid insn:", insn
);
5200 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5204 rtx x
= XEXP (dest
, 0);
5206 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
5208 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5209 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5210 "st %b0,__tmp_reg__", op
, plen
, -4);
5214 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
5215 "st %b0,%1", op
, plen
, -3);
5218 if (!reg_unused_after (insn
, XEXP (x
,0)))
5219 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
5225 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5229 rtx x
= XEXP (dest
, 0);
5231 if (CONSTANT_ADDRESS_P (x
))
5233 int n_words
= AVR_TINY
? 1 : 2;
5234 return optimize
> 0 && io_address_operand (x
, QImode
)
5235 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
5236 : avr_asm_len ("sts %m0,%1", op
, plen
, -n_words
);
5238 else if (GET_CODE (x
) == PLUS
5239 && REG_P (XEXP (x
, 0))
5240 && CONST_INT_P (XEXP (x
, 1)))
5242 /* memory access by reg+disp */
5244 int disp
= INTVAL (XEXP (x
, 1));
5247 return avr_out_movqi_mr_r_reg_disp_tiny (insn
, op
, plen
);
5249 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
5251 if (REGNO (XEXP (x
, 0)) != REG_Y
)
5252 fatal_insn ("incorrect insn:",insn
);
5254 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
5255 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5256 "std Y+63,%1" CR_TAB
5257 "sbiw r28,%o0-63", op
, plen
, -3);
5259 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5260 "sbci r29,hi8(-%o0)" CR_TAB
5262 "subi r28,lo8(%o0)" CR_TAB
5263 "sbci r29,hi8(%o0)", op
, plen
, -5);
5265 else if (REGNO (XEXP (x
,0)) == REG_X
)
5267 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
5269 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5270 "adiw r26,%o0" CR_TAB
5271 "st X,__tmp_reg__", op
, plen
, -3);
5275 avr_asm_len ("adiw r26,%o0" CR_TAB
5276 "st X,%1", op
, plen
, -2);
5279 if (!reg_unused_after (insn
, XEXP (x
,0)))
5280 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
5285 return avr_asm_len ("std %0,%1", op
, plen
, -1);
5288 return avr_asm_len ("st %0,%1", op
, plen
, -1);
5292 /* Helper for the next function for XMEGA. It does the same
5293 but with low byte first. */
5296 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
5300 rtx base
= XEXP (dest
, 0);
5301 int reg_base
= true_regnum (base
);
5302 int reg_src
= true_regnum (src
);
5304 /* "volatile" forces writing low byte first, even if less efficient,
5305 for correct operation with 16-bit I/O registers like SP. */
5306 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
5308 if (CONSTANT_ADDRESS_P (base
))
5310 int n_words
= AVR_TINY
? 2 : 4;
5311 return optimize
> 0 && io_address_operand (base
, HImode
)
5312 ? avr_asm_len ("out %i0,%A1" CR_TAB
5313 "out %i0+1,%B1", op
, plen
, -2)
5315 : avr_asm_len ("sts %m0,%A1" CR_TAB
5316 "sts %m0+1,%B1", op
, plen
, -n_words
);
5321 if (reg_base
!= REG_X
)
5322 return avr_asm_len ("st %0,%A1" CR_TAB
5323 "std %0+1,%B1", op
, plen
, -2);
5325 if (reg_src
== REG_X
)
5326 /* "st X+,r26" and "st -X,r26" are undefined. */
5327 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5330 "st X,__tmp_reg__", op
, plen
, -4);
5332 avr_asm_len ("st X+,%A1" CR_TAB
5333 "st X,%B1", op
, plen
, -2);
5335 return reg_unused_after (insn
, base
)
5337 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
5339 else if (GET_CODE (base
) == PLUS
)
5341 int disp
= INTVAL (XEXP (base
, 1));
5342 reg_base
= REGNO (XEXP (base
, 0));
5343 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5345 if (reg_base
!= REG_Y
)
5346 fatal_insn ("incorrect insn:",insn
);
5348 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5349 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5350 "std Y+62,%A1" CR_TAB
5351 "std Y+63,%B1" CR_TAB
5352 "sbiw r28,%o0-62", op
, plen
, -4)
5354 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5355 "sbci r29,hi8(-%o0)" CR_TAB
5357 "std Y+1,%B1" CR_TAB
5358 "subi r28,lo8(%o0)" CR_TAB
5359 "sbci r29,hi8(%o0)", op
, plen
, -6);
5362 if (reg_base
!= REG_X
)
5363 return avr_asm_len ("std %A0,%A1" CR_TAB
5364 "std %B0,%B1", op
, plen
, -2);
5366 return reg_src
== REG_X
5367 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5368 "mov __zero_reg__,r27" CR_TAB
5369 "adiw r26,%o0" CR_TAB
5370 "st X+,__tmp_reg__" CR_TAB
5371 "st X,__zero_reg__" CR_TAB
5372 "clr __zero_reg__" CR_TAB
5373 "sbiw r26,%o0+1", op
, plen
, -7)
5375 : avr_asm_len ("adiw r26,%o0" CR_TAB
5378 "sbiw r26,%o0+1", op
, plen
, -4);
5380 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5382 if (!mem_volatile_p
)
5383 return avr_asm_len ("st %0,%B1" CR_TAB
5384 "st %0,%A1", op
, plen
, -2);
5386 return REGNO (XEXP (base
, 0)) == REG_X
5387 ? avr_asm_len ("sbiw r26,2" CR_TAB
5390 "sbiw r26,1", op
, plen
, -4)
5392 : avr_asm_len ("sbiw %r0,2" CR_TAB
5394 "std %p0+1,%B1", op
, plen
, -3);
5396 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5398 return avr_asm_len ("st %0,%A1" CR_TAB
5399 "st %0,%B1", op
, plen
, -2);
5402 fatal_insn ("unknown move insn:",insn
);
5407 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5411 rtx base
= XEXP (dest
, 0);
5412 int reg_base
= true_regnum (base
);
5413 int reg_src
= true_regnum (src
);
5414 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
5416 if (reg_base
== reg_src
)
5418 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5419 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5421 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5422 "st %0,__tmp_reg__", op
, plen
, -5)
5423 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5424 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5425 "st %0,__tmp_reg__" CR_TAB
5426 TINY_SBIW (%E0
, %F0
, 1) CR_TAB
5427 "st %0, %A1", op
, plen
, -7);
5430 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5431 ? avr_asm_len ("st %0+,%A1" CR_TAB
5432 "st %0,%B1", op
, plen
, -2)
5433 : avr_asm_len (TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5435 "st -%0,%A1", op
, plen
, -4);
5439 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
5443 rtx base
= XEXP (dest
, 0);
5444 int reg_base
= REGNO (XEXP (base
, 0));
5445 int reg_src
= true_regnum (src
);
5447 if (reg_src
== reg_base
)
5448 avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5449 "mov __zero_reg__,%B1" CR_TAB
5450 TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5451 "st %b0,__zero_reg__" CR_TAB
5452 "st -%b0,__tmp_reg__" CR_TAB
5453 "clr __zero_reg__", op
, plen
, -7);
5455 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5457 "st -%b0,%A1", op
, plen
, -4);
5459 if (!reg_unused_after (insn
, XEXP (base
, 0)))
5460 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
5466 avr_out_movhi_mr_r_post_inc_tiny (rtx op
[], int *plen
)
5468 return avr_asm_len (TINY_ADIW (%I0
, %J0
, 1) CR_TAB
5470 "st -%p0,%A1" CR_TAB
5471 TINY_ADIW (%I0
, %J0
, 2), op
, plen
, -6);
5475 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5479 rtx base
= XEXP (dest
, 0);
5480 int reg_base
= true_regnum (base
);
5481 int reg_src
= true_regnum (src
);
5484 /* "volatile" forces writing high-byte first (no-xmega) resp.
5485 low-byte first (xmega) even if less efficient, for correct
5486 operation with 16-bit I/O registers like. */
5489 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
5491 mem_volatile_p
= MEM_VOLATILE_P (dest
);
5493 if (CONSTANT_ADDRESS_P (base
))
5495 int n_words
= AVR_TINY
? 2 : 4;
5496 return optimize
> 0 && io_address_operand (base
, HImode
)
5497 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5498 "out %i0,%A1", op
, plen
, -2)
5500 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5501 "sts %m0,%A1", op
, plen
, -n_words
);
5507 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn
, op
, plen
);
5509 if (reg_base
!= REG_X
)
5510 return avr_asm_len ("std %0+1,%B1" CR_TAB
5511 "st %0,%A1", op
, plen
, -2);
5513 if (reg_src
== REG_X
)
5514 /* "st X+,r26" and "st -X,r26" are undefined. */
5515 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5516 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5519 "st X,__tmp_reg__", op
, plen
, -4)
5521 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5523 "st X,__tmp_reg__" CR_TAB
5525 "st X,r26", op
, plen
, -5);
5527 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5528 ? avr_asm_len ("st X+,%A1" CR_TAB
5529 "st X,%B1", op
, plen
, -2)
5530 : avr_asm_len ("adiw r26,1" CR_TAB
5532 "st -X,%A1", op
, plen
, -3);
5534 else if (GET_CODE (base
) == PLUS
)
5536 int disp
= INTVAL (XEXP (base
, 1));
5539 return avr_out_movhi_mr_r_reg_disp_tiny (insn
, op
, plen
);
5541 reg_base
= REGNO (XEXP (base
, 0));
5542 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5544 if (reg_base
!= REG_Y
)
5545 fatal_insn ("incorrect insn:",insn
);
5547 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5548 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5549 "std Y+63,%B1" CR_TAB
5550 "std Y+62,%A1" CR_TAB
5551 "sbiw r28,%o0-62", op
, plen
, -4)
5553 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5554 "sbci r29,hi8(-%o0)" CR_TAB
5555 "std Y+1,%B1" CR_TAB
5557 "subi r28,lo8(%o0)" CR_TAB
5558 "sbci r29,hi8(%o0)", op
, plen
, -6);
5561 if (reg_base
!= REG_X
)
5562 return avr_asm_len ("std %B0,%B1" CR_TAB
5563 "std %A0,%A1", op
, plen
, -2);
5565 return reg_src
== REG_X
5566 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5567 "mov __zero_reg__,r27" CR_TAB
5568 "adiw r26,%o0+1" CR_TAB
5569 "st X,__zero_reg__" CR_TAB
5570 "st -X,__tmp_reg__" CR_TAB
5571 "clr __zero_reg__" CR_TAB
5572 "sbiw r26,%o0", op
, plen
, -7)
5574 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5577 "sbiw r26,%o0", op
, plen
, -4);
5579 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5581 return avr_asm_len ("st %0,%B1" CR_TAB
5582 "st %0,%A1", op
, plen
, -2);
5584 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5586 if (!mem_volatile_p
)
5587 return avr_asm_len ("st %0,%A1" CR_TAB
5588 "st %0,%B1", op
, plen
, -2);
5591 return avr_out_movhi_mr_r_post_inc_tiny (op
, plen
);
5593 return REGNO (XEXP (base
, 0)) == REG_X
5594 ? avr_asm_len ("adiw r26,1" CR_TAB
5597 "adiw r26,2", op
, plen
, -4)
5599 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5601 "adiw %r0,2", op
, plen
, -3);
5603 fatal_insn ("unknown move insn:",insn
);
5607 /* Return 1 if frame pointer for current function required. */
5610 avr_frame_pointer_required_p (void)
5612 return (cfun
->calls_alloca
5613 || cfun
->calls_setjmp
5614 || cfun
->has_nonlocal_label
5615 || crtl
->args
.info
.nregs
== 0
5616 || get_frame_size () > 0);
5619 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5622 compare_condition (rtx_insn
*insn
)
5624 rtx_insn
*next
= next_real_insn (insn
);
5626 if (next
&& JUMP_P (next
))
5628 rtx pat
= PATTERN (next
);
5629 rtx src
= SET_SRC (pat
);
5631 if (IF_THEN_ELSE
== GET_CODE (src
))
5632 return GET_CODE (XEXP (src
, 0));
5639 /* Returns true iff INSN is a tst insn that only tests the sign. */
5642 compare_sign_p (rtx_insn
*insn
)
5644 RTX_CODE cond
= compare_condition (insn
);
5645 return (cond
== GE
|| cond
== LT
);
5649 /* Returns true iff the next insn is a JUMP_INSN with a condition
5650 that needs to be swapped (GT, GTU, LE, LEU). */
5653 compare_diff_p (rtx_insn
*insn
)
5655 RTX_CODE cond
= compare_condition (insn
);
5656 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
5659 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5662 compare_eq_p (rtx_insn
*insn
)
5664 RTX_CODE cond
= compare_condition (insn
);
5665 return (cond
== EQ
|| cond
== NE
);
5669 /* Output compare instruction
5671 compare (XOP[0], XOP[1])
5673 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5674 XOP[2] is an 8-bit scratch register as needed.
5676 PLEN == NULL: Output instructions.
5677 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5678 Don't output anything. */
5681 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
5683 /* Register to compare and value to compare against. */
5687 /* MODE of the comparison. */
5690 /* Number of bytes to operate on. */
5691 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
5693 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5694 int clobber_val
= -1;
5696 /* Map fixed mode operands to integer operands with the same binary
5697 representation. They are easier to handle in the remainder. */
5699 if (CONST_FIXED_P (xval
))
5701 xreg
= avr_to_int_mode (xop
[0]);
5702 xval
= avr_to_int_mode (xop
[1]);
5705 mode
= GET_MODE (xreg
);
5707 gcc_assert (REG_P (xreg
));
5708 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
5709 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
5714 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5715 against 0 by ORing the bytes. This is one instruction shorter.
5716 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5717 and therefore don't use this. */
5719 if (!test_hard_reg_class (LD_REGS
, xreg
)
5720 && compare_eq_p (insn
)
5721 && reg_unused_after (insn
, xreg
))
5723 if (xval
== const1_rtx
)
5725 avr_asm_len ("dec %A0" CR_TAB
5726 "or %A0,%B0", xop
, plen
, 2);
5729 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
5732 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
5736 else if (xval
== constm1_rtx
)
5739 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
5742 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
5744 return avr_asm_len ("and %A0,%B0" CR_TAB
5745 "com %A0", xop
, plen
, 2);
5749 /* Comparisons == -1 and != -1 of a d-register that's used after the
5750 comparison. (If it's unused after we use CPI / SBCI or ADIW sequence
5751 from below.) Instead of CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx we can
5752 use CPI Rlo,-1 / CPC Rhi,Rlo which is 1 instruction shorter:
5753 If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
5754 when CPC'ing the high part. If CPI is false then CPC cannot render
5755 the result to true. This also works for the more generic case where
5756 the constant is of the form 0xabab. */
5759 && xval
!= const0_rtx
5760 && test_hard_reg_class (LD_REGS
, xreg
)
5761 && compare_eq_p (insn
)
5762 && !reg_unused_after (insn
, xreg
))
5764 rtx xlo8
= simplify_gen_subreg (QImode
, xval
, mode
, 0);
5765 rtx xhi8
= simplify_gen_subreg (QImode
, xval
, mode
, 1);
5767 if (INTVAL (xlo8
) == INTVAL (xhi8
))
5772 return avr_asm_len ("cpi %A0,%1" CR_TAB
5773 "cpc %B0,%A0", xop
, plen
, 2);
5777 for (i
= 0; i
< n_bytes
; i
++)
5779 /* We compare byte-wise. */
5780 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
5781 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5783 /* 8-bit value to compare with this byte. */
5784 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5786 /* Registers R16..R31 can operate with immediate. */
5787 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5790 xop
[1] = gen_int_mode (val8
, QImode
);
5792 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5795 && test_hard_reg_class (ADDW_REGS
, reg8
))
5797 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
5799 if (IN_RANGE (val16
, 0, 63)
5801 || reg_unused_after (insn
, xreg
)))
5804 avr_asm_len (TINY_SBIW (%A0
, %B0
, %1), xop
, plen
, 2);
5806 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
5813 && IN_RANGE (val16
, -63, -1)
5814 && compare_eq_p (insn
)
5815 && reg_unused_after (insn
, xreg
))
5818 ? avr_asm_len (TINY_ADIW (%A0
, %B0
, %n1
), xop
, plen
, 2)
5819 : avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
5823 /* Comparing against 0 is easy. */
5828 ? "cp %0,__zero_reg__"
5829 : "cpc %0,__zero_reg__", xop
, plen
, 1);
5833 /* Upper registers can compare and subtract-with-carry immediates.
5834 Notice that compare instructions do the same as respective subtract
5835 instruction; the only difference is that comparisons don't write
5836 the result back to the target register. */
5842 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
5845 else if (reg_unused_after (insn
, xreg
))
5847 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
5852 /* Must load the value into the scratch register. */
5854 gcc_assert (REG_P (xop
[2]));
5856 if (clobber_val
!= (int) val8
)
5857 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
5858 clobber_val
= (int) val8
;
5862 : "cpc %0,%2", xop
, plen
, 1);
5869 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5872 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5876 xop
[0] = gen_rtx_REG (DImode
, 18);
5880 return avr_out_compare (insn
, xop
, plen
);
5883 /* Output test instruction for HImode. */
5886 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5888 if (compare_sign_p (insn
))
5890 avr_asm_len ("tst %B0", op
, plen
, -1);
5892 else if (reg_unused_after (insn
, op
[0])
5893 && compare_eq_p (insn
))
5895 /* Faster than sbiw if we can clobber the operand. */
5896 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
5900 avr_out_compare (insn
, op
, plen
);
5907 /* Output test instruction for PSImode. */
5910 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5912 if (compare_sign_p (insn
))
5914 avr_asm_len ("tst %C0", op
, plen
, -1);
5916 else if (reg_unused_after (insn
, op
[0])
5917 && compare_eq_p (insn
))
5919 /* Faster than sbiw if we can clobber the operand. */
5920 avr_asm_len ("or %A0,%B0" CR_TAB
5921 "or %A0,%C0", op
, plen
, -2);
5925 avr_out_compare (insn
, op
, plen
);
5932 /* Output test instruction for SImode. */
5935 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5937 if (compare_sign_p (insn
))
5939 avr_asm_len ("tst %D0", op
, plen
, -1);
5941 else if (reg_unused_after (insn
, op
[0])
5942 && compare_eq_p (insn
))
5944 /* Faster than sbiw if we can clobber the operand. */
5945 avr_asm_len ("or %A0,%B0" CR_TAB
5947 "or %A0,%D0", op
, plen
, -3);
5951 avr_out_compare (insn
, op
, plen
);
5958 /* Generate asm equivalent for various shifts. This only handles cases
5959 that are not already carefully hand-optimized in ?sh??i3_out.
5961 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5962 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5963 OPERANDS[3] is a QImode scratch register from LD regs if
5964 available and SCRATCH, otherwise (no scratch available)
5966 TEMPL is an assembler template that shifts by one position.
5967 T_LEN is the length of this template. */
5970 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
5971 int *plen
, int t_len
)
5973 bool second_label
= true;
5974 bool saved_in_tmp
= false;
5975 bool use_zero_reg
= false;
5978 op
[0] = operands
[0];
5979 op
[1] = operands
[1];
5980 op
[2] = operands
[2];
5981 op
[3] = operands
[3];
5986 if (CONST_INT_P (operands
[2]))
5988 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
5989 && REG_P (operands
[3]));
5990 int count
= INTVAL (operands
[2]);
5991 int max_len
= 10; /* If larger than this, always use a loop. */
5996 if (count
< 8 && !scratch
)
5997 use_zero_reg
= true;
6000 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
6002 if (t_len
* count
<= max_len
)
6004 /* Output shifts inline with no loop - faster. */
6007 avr_asm_len (templ
, op
, plen
, t_len
);
6014 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
6016 else if (use_zero_reg
)
6018 /* Hack to save one word: use __zero_reg__ as loop counter.
6019 Set one bit, then shift in a loop until it is 0 again. */
6021 op
[3] = zero_reg_rtx
;
6023 avr_asm_len ("set" CR_TAB
6024 "bld %3,%2-1", op
, plen
, 2);
6028 /* No scratch register available, use one from LD_REGS (saved in
6029 __tmp_reg__) that doesn't overlap with registers to shift. */
6031 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
6032 op
[4] = tmp_reg_rtx
;
6033 saved_in_tmp
= true;
6035 avr_asm_len ("mov %4,%3" CR_TAB
6036 "ldi %3,%2", op
, plen
, 2);
6039 second_label
= false;
6041 else if (MEM_P (op
[2]))
6045 op_mov
[0] = op
[3] = tmp_reg_rtx
;
6048 out_movqi_r_mr (insn
, op_mov
, plen
);
6050 else if (register_operand (op
[2], QImode
))
6054 if (!reg_unused_after (insn
, op
[2])
6055 || reg_overlap_mentioned_p (op
[0], op
[2]))
6057 op
[3] = tmp_reg_rtx
;
6058 avr_asm_len ("mov %3,%2", op
, plen
, 1);
6062 fatal_insn ("bad shift insn:", insn
);
6065 avr_asm_len ("rjmp 2f", op
, plen
, 1);
6067 avr_asm_len ("1:", op
, plen
, 0);
6068 avr_asm_len (templ
, op
, plen
, t_len
);
6071 avr_asm_len ("2:", op
, plen
, 0);
6073 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
6074 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
6077 avr_asm_len ("mov %3,%4", op
, plen
, 1);
6081 /* 8bit shift left ((char)x << i) */
6084 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6086 if (GET_CODE (operands
[2]) == CONST_INT
)
6093 switch (INTVAL (operands
[2]))
6096 if (INTVAL (operands
[2]) < 8)
6108 return ("lsl %0" CR_TAB
6113 return ("lsl %0" CR_TAB
6118 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6121 return ("swap %0" CR_TAB
6125 return ("lsl %0" CR_TAB
6131 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6134 return ("swap %0" CR_TAB
6139 return ("lsl %0" CR_TAB
6146 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6149 return ("swap %0" CR_TAB
6155 return ("lsl %0" CR_TAB
6164 return ("ror %0" CR_TAB
6169 else if (CONSTANT_P (operands
[2]))
6170 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6172 out_shift_with_cnt ("lsl %0",
6173 insn
, operands
, len
, 1);
6178 /* 16bit shift left ((short)x << i) */
6181 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6183 if (GET_CODE (operands
[2]) == CONST_INT
)
6185 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6186 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6193 switch (INTVAL (operands
[2]))
6196 if (INTVAL (operands
[2]) < 16)
6200 return ("clr %B0" CR_TAB
6204 if (optimize_size
&& scratch
)
6209 return ("swap %A0" CR_TAB
6211 "andi %B0,0xf0" CR_TAB
6212 "eor %B0,%A0" CR_TAB
6213 "andi %A0,0xf0" CR_TAB
6219 return ("swap %A0" CR_TAB
6221 "ldi %3,0xf0" CR_TAB
6223 "eor %B0,%A0" CR_TAB
6227 break; /* optimize_size ? 6 : 8 */
6231 break; /* scratch ? 5 : 6 */
6235 return ("lsl %A0" CR_TAB
6239 "andi %B0,0xf0" CR_TAB
6240 "eor %B0,%A0" CR_TAB
6241 "andi %A0,0xf0" CR_TAB
6247 return ("lsl %A0" CR_TAB
6251 "ldi %3,0xf0" CR_TAB
6253 "eor %B0,%A0" CR_TAB
6261 break; /* scratch ? 5 : 6 */
6263 return ("clr __tmp_reg__" CR_TAB
6266 "ror __tmp_reg__" CR_TAB
6269 "ror __tmp_reg__" CR_TAB
6270 "mov %B0,%A0" CR_TAB
6271 "mov %A0,__tmp_reg__");
6275 return ("lsr %B0" CR_TAB
6276 "mov %B0,%A0" CR_TAB
6282 return *len
= 2, ("mov %B0,%A1" CR_TAB
6287 return ("mov %B0,%A0" CR_TAB
6293 return ("mov %B0,%A0" CR_TAB
6300 return ("mov %B0,%A0" CR_TAB
6310 return ("mov %B0,%A0" CR_TAB
6318 return ("mov %B0,%A0" CR_TAB
6321 "ldi %3,0xf0" CR_TAB
6325 return ("mov %B0,%A0" CR_TAB
6336 return ("mov %B0,%A0" CR_TAB
6342 if (AVR_HAVE_MUL
&& scratch
)
6345 return ("ldi %3,0x20" CR_TAB
6349 "clr __zero_reg__");
6351 if (optimize_size
&& scratch
)
6356 return ("mov %B0,%A0" CR_TAB
6360 "ldi %3,0xe0" CR_TAB
6366 return ("set" CR_TAB
6371 "clr __zero_reg__");
6374 return ("mov %B0,%A0" CR_TAB
6383 if (AVR_HAVE_MUL
&& ldi_ok
)
6386 return ("ldi %B0,0x40" CR_TAB
6387 "mul %A0,%B0" CR_TAB
6390 "clr __zero_reg__");
6392 if (AVR_HAVE_MUL
&& scratch
)
6395 return ("ldi %3,0x40" CR_TAB
6399 "clr __zero_reg__");
6401 if (optimize_size
&& ldi_ok
)
6404 return ("mov %B0,%A0" CR_TAB
6405 "ldi %A0,6" "\n1:\t"
6410 if (optimize_size
&& scratch
)
6413 return ("clr %B0" CR_TAB
6422 return ("clr %B0" CR_TAB
6429 out_shift_with_cnt ("lsl %A0" CR_TAB
6430 "rol %B0", insn
, operands
, len
, 2);
6435 /* 24-bit shift left */
6438 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6443 if (CONST_INT_P (op
[2]))
6445 switch (INTVAL (op
[2]))
6448 if (INTVAL (op
[2]) < 24)
6451 return avr_asm_len ("clr %A0" CR_TAB
6453 "clr %C0", op
, plen
, 3);
6457 int reg0
= REGNO (op
[0]);
6458 int reg1
= REGNO (op
[1]);
6461 return avr_asm_len ("mov %C0,%B1" CR_TAB
6462 "mov %B0,%A1" CR_TAB
6463 "clr %A0", op
, plen
, 3);
6465 return avr_asm_len ("clr %A0" CR_TAB
6466 "mov %B0,%A1" CR_TAB
6467 "mov %C0,%B1", op
, plen
, 3);
6472 int reg0
= REGNO (op
[0]);
6473 int reg1
= REGNO (op
[1]);
6475 if (reg0
+ 2 != reg1
)
6476 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
6478 return avr_asm_len ("clr %B0" CR_TAB
6479 "clr %A0", op
, plen
, 2);
6483 return avr_asm_len ("clr %C0" CR_TAB
6487 "clr %A0", op
, plen
, 5);
6491 out_shift_with_cnt ("lsl %A0" CR_TAB
6493 "rol %C0", insn
, op
, plen
, 3);
6498 /* 32bit shift left ((long)x << i) */
6501 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6503 if (GET_CODE (operands
[2]) == CONST_INT
)
6511 switch (INTVAL (operands
[2]))
6514 if (INTVAL (operands
[2]) < 32)
6518 return *len
= 3, ("clr %D0" CR_TAB
6522 return ("clr %D0" CR_TAB
6529 int reg0
= true_regnum (operands
[0]);
6530 int reg1
= true_regnum (operands
[1]);
6533 return ("mov %D0,%C1" CR_TAB
6534 "mov %C0,%B1" CR_TAB
6535 "mov %B0,%A1" CR_TAB
6538 return ("clr %A0" CR_TAB
6539 "mov %B0,%A1" CR_TAB
6540 "mov %C0,%B1" CR_TAB
6546 int reg0
= true_regnum (operands
[0]);
6547 int reg1
= true_regnum (operands
[1]);
6548 if (reg0
+ 2 == reg1
)
6549 return *len
= 2, ("clr %B0" CR_TAB
6552 return *len
= 3, ("movw %C0,%A1" CR_TAB
6556 return *len
= 4, ("mov %C0,%A1" CR_TAB
6557 "mov %D0,%B1" CR_TAB
6564 return ("mov %D0,%A1" CR_TAB
6571 return ("clr %D0" CR_TAB
6580 out_shift_with_cnt ("lsl %A0" CR_TAB
6583 "rol %D0", insn
, operands
, len
, 4);
6587 /* 8bit arithmetic shift right ((signed char)x >> i) */
6590 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6592 if (GET_CODE (operands
[2]) == CONST_INT
)
6599 switch (INTVAL (operands
[2]))
6607 return ("asr %0" CR_TAB
6612 return ("asr %0" CR_TAB
6618 return ("asr %0" CR_TAB
6625 return ("asr %0" CR_TAB
6633 return ("bst %0,6" CR_TAB
6639 if (INTVAL (operands
[2]) < 8)
6646 return ("lsl %0" CR_TAB
6650 else if (CONSTANT_P (operands
[2]))
6651 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6653 out_shift_with_cnt ("asr %0",
6654 insn
, operands
, len
, 1);
6659 /* 16bit arithmetic shift right ((signed short)x >> i) */
6662 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6664 if (GET_CODE (operands
[2]) == CONST_INT
)
6666 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6667 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6674 switch (INTVAL (operands
[2]))
6678 /* XXX try to optimize this too? */
6683 break; /* scratch ? 5 : 6 */
6685 return ("mov __tmp_reg__,%A0" CR_TAB
6686 "mov %A0,%B0" CR_TAB
6687 "lsl __tmp_reg__" CR_TAB
6689 "sbc %B0,%B0" CR_TAB
6690 "lsl __tmp_reg__" CR_TAB
6696 return ("lsl %A0" CR_TAB
6697 "mov %A0,%B0" CR_TAB
6703 int reg0
= true_regnum (operands
[0]);
6704 int reg1
= true_regnum (operands
[1]);
6707 return *len
= 3, ("mov %A0,%B0" CR_TAB
6711 return *len
= 4, ("mov %A0,%B1" CR_TAB
6719 return ("mov %A0,%B0" CR_TAB
6721 "sbc %B0,%B0" CR_TAB
6726 return ("mov %A0,%B0" CR_TAB
6728 "sbc %B0,%B0" CR_TAB
6733 if (AVR_HAVE_MUL
&& ldi_ok
)
6736 return ("ldi %A0,0x20" CR_TAB
6737 "muls %B0,%A0" CR_TAB
6739 "sbc %B0,%B0" CR_TAB
6740 "clr __zero_reg__");
6742 if (optimize_size
&& scratch
)
6745 return ("mov %A0,%B0" CR_TAB
6747 "sbc %B0,%B0" CR_TAB
6753 if (AVR_HAVE_MUL
&& ldi_ok
)
6756 return ("ldi %A0,0x10" CR_TAB
6757 "muls %B0,%A0" CR_TAB
6759 "sbc %B0,%B0" CR_TAB
6760 "clr __zero_reg__");
6762 if (optimize_size
&& scratch
)
6765 return ("mov %A0,%B0" CR_TAB
6767 "sbc %B0,%B0" CR_TAB
6774 if (AVR_HAVE_MUL
&& ldi_ok
)
6777 return ("ldi %A0,0x08" CR_TAB
6778 "muls %B0,%A0" CR_TAB
6780 "sbc %B0,%B0" CR_TAB
6781 "clr __zero_reg__");
6784 break; /* scratch ? 5 : 7 */
6786 return ("mov %A0,%B0" CR_TAB
6788 "sbc %B0,%B0" CR_TAB
6797 return ("lsl %B0" CR_TAB
6798 "sbc %A0,%A0" CR_TAB
6800 "mov %B0,%A0" CR_TAB
6804 if (INTVAL (operands
[2]) < 16)
6810 return *len
= 3, ("lsl %B0" CR_TAB
6811 "sbc %A0,%A0" CR_TAB
6816 out_shift_with_cnt ("asr %B0" CR_TAB
6817 "ror %A0", insn
, operands
, len
, 2);
6822 /* 24-bit arithmetic shift right */
6825 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6827 int dest
= REGNO (op
[0]);
6828 int src
= REGNO (op
[1]);
6830 if (CONST_INT_P (op
[2]))
6835 switch (INTVAL (op
[2]))
6839 return avr_asm_len ("mov %A0,%B1" CR_TAB
6840 "mov %B0,%C1" CR_TAB
6843 "dec %C0", op
, plen
, 5);
6845 return avr_asm_len ("clr %C0" CR_TAB
6848 "mov %B0,%C1" CR_TAB
6849 "mov %A0,%B1", op
, plen
, 5);
6852 if (dest
!= src
+ 2)
6853 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6855 return avr_asm_len ("clr %B0" CR_TAB
6858 "mov %C0,%B0", op
, plen
, 4);
6861 if (INTVAL (op
[2]) < 24)
6867 return avr_asm_len ("lsl %C0" CR_TAB
6868 "sbc %A0,%A0" CR_TAB
6869 "mov %B0,%A0" CR_TAB
6870 "mov %C0,%A0", op
, plen
, 4);
6874 out_shift_with_cnt ("asr %C0" CR_TAB
6876 "ror %A0", insn
, op
, plen
, 3);
6881 /* 32-bit arithmetic shift right ((signed long)x >> i) */
6884 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6886 if (GET_CODE (operands
[2]) == CONST_INT
)
6894 switch (INTVAL (operands
[2]))
6898 int reg0
= true_regnum (operands
[0]);
6899 int reg1
= true_regnum (operands
[1]);
6902 return ("mov %A0,%B1" CR_TAB
6903 "mov %B0,%C1" CR_TAB
6904 "mov %C0,%D1" CR_TAB
6909 return ("clr %D0" CR_TAB
6912 "mov %C0,%D1" CR_TAB
6913 "mov %B0,%C1" CR_TAB
6919 int reg0
= true_regnum (operands
[0]);
6920 int reg1
= true_regnum (operands
[1]);
6922 if (reg0
== reg1
+ 2)
6923 return *len
= 4, ("clr %D0" CR_TAB
6928 return *len
= 5, ("movw %A0,%C1" CR_TAB
6934 return *len
= 6, ("mov %B0,%D1" CR_TAB
6935 "mov %A0,%C1" CR_TAB
6943 return *len
= 6, ("mov %A0,%D1" CR_TAB
6947 "mov %B0,%D0" CR_TAB
6951 if (INTVAL (operands
[2]) < 32)
6958 return *len
= 4, ("lsl %D0" CR_TAB
6959 "sbc %A0,%A0" CR_TAB
6960 "mov %B0,%A0" CR_TAB
6963 return *len
= 5, ("lsl %D0" CR_TAB
6964 "sbc %A0,%A0" CR_TAB
6965 "mov %B0,%A0" CR_TAB
6966 "mov %C0,%A0" CR_TAB
6971 out_shift_with_cnt ("asr %D0" CR_TAB
6974 "ror %A0", insn
, operands
, len
, 4);
6978 /* 8-bit logic shift right ((unsigned char)x >> i) */
6981 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6983 if (GET_CODE (operands
[2]) == CONST_INT
)
6990 switch (INTVAL (operands
[2]))
6993 if (INTVAL (operands
[2]) < 8)
7005 return ("lsr %0" CR_TAB
7009 return ("lsr %0" CR_TAB
7014 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7017 return ("swap %0" CR_TAB
7021 return ("lsr %0" CR_TAB
7027 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7030 return ("swap %0" CR_TAB
7035 return ("lsr %0" CR_TAB
7042 if (test_hard_reg_class (LD_REGS
, operands
[0]))
7045 return ("swap %0" CR_TAB
7051 return ("lsr %0" CR_TAB
7060 return ("rol %0" CR_TAB
7065 else if (CONSTANT_P (operands
[2]))
7066 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
7068 out_shift_with_cnt ("lsr %0",
7069 insn
, operands
, len
, 1);
7073 /* 16-bit logic shift right ((unsigned short)x >> i) */
7076 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7078 if (GET_CODE (operands
[2]) == CONST_INT
)
7080 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
7081 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
7088 switch (INTVAL (operands
[2]))
7091 if (INTVAL (operands
[2]) < 16)
7095 return ("clr %B0" CR_TAB
7099 if (optimize_size
&& scratch
)
7104 return ("swap %B0" CR_TAB
7106 "andi %A0,0x0f" CR_TAB
7107 "eor %A0,%B0" CR_TAB
7108 "andi %B0,0x0f" CR_TAB
7114 return ("swap %B0" CR_TAB
7116 "ldi %3,0x0f" CR_TAB
7118 "eor %A0,%B0" CR_TAB
7122 break; /* optimize_size ? 6 : 8 */
7126 break; /* scratch ? 5 : 6 */
7130 return ("lsr %B0" CR_TAB
7134 "andi %A0,0x0f" CR_TAB
7135 "eor %A0,%B0" CR_TAB
7136 "andi %B0,0x0f" CR_TAB
7142 return ("lsr %B0" CR_TAB
7146 "ldi %3,0x0f" CR_TAB
7148 "eor %A0,%B0" CR_TAB
7156 break; /* scratch ? 5 : 6 */
7158 return ("clr __tmp_reg__" CR_TAB
7161 "rol __tmp_reg__" CR_TAB
7164 "rol __tmp_reg__" CR_TAB
7165 "mov %A0,%B0" CR_TAB
7166 "mov %B0,__tmp_reg__");
7170 return ("lsl %A0" CR_TAB
7171 "mov %A0,%B0" CR_TAB
7173 "sbc %B0,%B0" CR_TAB
7177 return *len
= 2, ("mov %A0,%B1" CR_TAB
7182 return ("mov %A0,%B0" CR_TAB
7188 return ("mov %A0,%B0" CR_TAB
7195 return ("mov %A0,%B0" CR_TAB
7205 return ("mov %A0,%B0" CR_TAB
7213 return ("mov %A0,%B0" CR_TAB
7216 "ldi %3,0x0f" CR_TAB
7220 return ("mov %A0,%B0" CR_TAB
7231 return ("mov %A0,%B0" CR_TAB
7237 if (AVR_HAVE_MUL
&& scratch
)
7240 return ("ldi %3,0x08" CR_TAB
7244 "clr __zero_reg__");
7246 if (optimize_size
&& scratch
)
7251 return ("mov %A0,%B0" CR_TAB
7255 "ldi %3,0x07" CR_TAB
7261 return ("set" CR_TAB
7266 "clr __zero_reg__");
7269 return ("mov %A0,%B0" CR_TAB
7278 if (AVR_HAVE_MUL
&& ldi_ok
)
7281 return ("ldi %A0,0x04" CR_TAB
7282 "mul %B0,%A0" CR_TAB
7285 "clr __zero_reg__");
7287 if (AVR_HAVE_MUL
&& scratch
)
7290 return ("ldi %3,0x04" CR_TAB
7294 "clr __zero_reg__");
7296 if (optimize_size
&& ldi_ok
)
7299 return ("mov %A0,%B0" CR_TAB
7300 "ldi %B0,6" "\n1:\t"
7305 if (optimize_size
&& scratch
)
7308 return ("clr %A0" CR_TAB
7317 return ("clr %A0" CR_TAB
7324 out_shift_with_cnt ("lsr %B0" CR_TAB
7325 "ror %A0", insn
, operands
, len
, 2);
7330 /* 24-bit logic shift right */
7333 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
7335 int dest
= REGNO (op
[0]);
7336 int src
= REGNO (op
[1]);
7338 if (CONST_INT_P (op
[2]))
7343 switch (INTVAL (op
[2]))
7347 return avr_asm_len ("mov %A0,%B1" CR_TAB
7348 "mov %B0,%C1" CR_TAB
7349 "clr %C0", op
, plen
, 3);
7351 return avr_asm_len ("clr %C0" CR_TAB
7352 "mov %B0,%C1" CR_TAB
7353 "mov %A0,%B1", op
, plen
, 3);
7356 if (dest
!= src
+ 2)
7357 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
7359 return avr_asm_len ("clr %B0" CR_TAB
7360 "clr %C0", op
, plen
, 2);
7363 if (INTVAL (op
[2]) < 24)
7369 return avr_asm_len ("clr %A0" CR_TAB
7373 "clr %C0", op
, plen
, 5);
7377 out_shift_with_cnt ("lsr %C0" CR_TAB
7379 "ror %A0", insn
, op
, plen
, 3);
7384 /* 32-bit logic shift right ((unsigned int)x >> i) */
7387 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
7389 if (GET_CODE (operands
[2]) == CONST_INT
)
7397 switch (INTVAL (operands
[2]))
7400 if (INTVAL (operands
[2]) < 32)
7404 return *len
= 3, ("clr %D0" CR_TAB
7408 return ("clr %D0" CR_TAB
7415 int reg0
= true_regnum (operands
[0]);
7416 int reg1
= true_regnum (operands
[1]);
7419 return ("mov %A0,%B1" CR_TAB
7420 "mov %B0,%C1" CR_TAB
7421 "mov %C0,%D1" CR_TAB
7424 return ("clr %D0" CR_TAB
7425 "mov %C0,%D1" CR_TAB
7426 "mov %B0,%C1" CR_TAB
7432 int reg0
= true_regnum (operands
[0]);
7433 int reg1
= true_regnum (operands
[1]);
7435 if (reg0
== reg1
+ 2)
7436 return *len
= 2, ("clr %C0" CR_TAB
7439 return *len
= 3, ("movw %A0,%C1" CR_TAB
7443 return *len
= 4, ("mov %B0,%D1" CR_TAB
7444 "mov %A0,%C1" CR_TAB
7450 return *len
= 4, ("mov %A0,%D1" CR_TAB
7457 return ("clr %A0" CR_TAB
7466 out_shift_with_cnt ("lsr %D0" CR_TAB
7469 "ror %A0", insn
, operands
, len
, 4);
7474 /* Output addition of register XOP[0] and compile time constant XOP[2].
7475 CODE == PLUS: perform addition by using ADD instructions or
7476 CODE == MINUS: perform addition by using SUB instructions:
7478 XOP[0] = XOP[0] + XOP[2]
7480 Or perform addition/subtraction with register XOP[2] depending on CODE:
7482 XOP[0] = XOP[0] +/- XOP[2]
7484 If PLEN == NULL, print assembler instructions to perform the operation;
7485 otherwise, set *PLEN to the length of the instruction sequence (in words)
7486 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7487 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7489 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7490 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7491 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7492 the subtrahend in the original insn, provided it is a compile time constant.
7493 In all other cases, SIGN is 0.
7495 If OUT_LABEL is true, print the final 0: label which is needed for
7496 saturated addition / subtraction. The only case where OUT_LABEL = false
7497 is useful is for saturated addition / subtraction performed during
7498 fixed-point rounding, cf. `avr_out_round'. */
7501 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
7502 enum rtx_code code_sat
, int sign
, bool out_label
)
7504 /* MODE of the operation. */
7505 machine_mode mode
= GET_MODE (xop
[0]);
7507 /* INT_MODE of the same size. */
7508 machine_mode imode
= int_mode_for_mode (mode
);
7510 /* Number of bytes to operate on. */
7511 int i
, n_bytes
= GET_MODE_SIZE (mode
);
7513 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7514 int clobber_val
= -1;
7516 /* op[0]: 8-bit destination register
7517 op[1]: 8-bit const int
7518 op[2]: 8-bit scratch register */
7521 /* Started the operation? Before starting the operation we may skip
7522 adding 0. This is no more true after the operation started because
7523 carry must be taken into account. */
7524 bool started
= false;
7526 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7529 /* Output a BRVC instruction. Only needed with saturation. */
7530 bool out_brvc
= true;
7537 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
7539 for (i
= 0; i
< n_bytes
; i
++)
7541 /* We operate byte-wise on the destination. */
7542 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7543 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
7546 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
7549 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
7553 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7555 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
7564 /* Except in the case of ADIW with 16-bit register (see below)
7565 addition does not set cc0 in a usable way. */
7567 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
7569 if (CONST_FIXED_P (xval
))
7570 xval
= avr_to_int_mode (xval
);
7572 /* Adding/Subtracting zero is a no-op. */
7574 if (xval
== const0_rtx
)
7581 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
7585 if (SS_PLUS
== code_sat
&& MINUS
== code
7587 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
7588 & GET_MODE_MASK (QImode
)))
7590 /* We compute x + 0x80 by means of SUB instructions. We negated the
7591 constant subtrahend above and are left with x - (-128) so that we
7592 need something like SUBI r,128 which does not exist because SUBI sets
7593 V according to the sign of the subtrahend. Notice the only case
7594 where this must be done is when NEG overflowed in case [2s] because
7595 the V computation needs the right sign of the subtrahend. */
7597 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7599 avr_asm_len ("subi %0,128" CR_TAB
7600 "brmi 0f", &msb
, plen
, 2);
7606 for (i
= 0; i
< n_bytes
; i
++)
7608 /* We operate byte-wise on the destination. */
7609 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7610 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
7612 /* 8-bit value to operate with this byte. */
7613 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
7615 /* Registers R16..R31 can operate with immediate. */
7616 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
7619 op
[1] = gen_int_mode (val8
, QImode
);
7621 /* To get usable cc0 no low-bytes must have been skipped. */
7629 && test_hard_reg_class (ADDW_REGS
, reg8
))
7631 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
7632 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
7634 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7635 i.e. operate word-wise. */
7642 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
7645 if (n_bytes
== 2 && PLUS
== code
)
7657 avr_asm_len (code
== PLUS
7658 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7662 else if ((val8
== 1 || val8
== 0xff)
7663 && UNKNOWN
== code_sat
7665 && i
== n_bytes
- 1)
7667 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
7677 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
7679 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
7681 /* This belongs to the x + 0x80 corner case. The code with
7682 ADD instruction is not smaller, thus make this case
7683 expensive so that the caller won't pick it. */
7689 if (clobber_val
!= (int) val8
)
7690 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7691 clobber_val
= (int) val8
;
7693 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
7700 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
7703 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
7705 if (clobber_val
!= (int) val8
)
7706 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7707 clobber_val
= (int) val8
;
7709 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
7721 } /* for all sub-bytes */
7725 if (UNKNOWN
== code_sat
)
7728 *pcc
= (int) CC_CLOBBER
;
7730 /* Vanilla addition/subtraction is done. We are left with saturation.
7732 We have to compute A = A <op> B where A is a register and
7733 B is a register or a non-zero compile time constant CONST.
7734 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7735 B stands for the original operand $2 in INSN. In the case of B = CONST,
7736 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7738 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7742 operation | code | sat if | b is | sat value | case
7743 -----------------+-------+----------+--------------+-----------+-------
7744 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7745 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7746 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7747 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7751 operation | code | sat if | b is | sat value | case
7752 -----------------+-------+----------+--------------+-----------+-------
7753 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7754 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7755 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7756 - as a + (-b) | add | V == 1 | const | s- | [4s]
7758 s+ = b < 0 ? -0x80 : 0x7f
7759 s- = b < 0 ? 0x7f : -0x80
7761 The cases a - b actually perform a - (-(-b)) if B is CONST.
7764 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7766 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
7769 bool need_copy
= true;
7770 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
7781 avr_asm_len ("brvc 0f", op
, plen
, 1);
7783 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7788 avr_asm_len ("ldi %0,0x7f" CR_TAB
7789 "adc %0,__zero_reg__", op
, plen
, 2);
7791 avr_asm_len ("ldi %0,0x7f" CR_TAB
7792 "ldi %1,0xff" CR_TAB
7793 "adc %1,__zero_reg__" CR_TAB
7794 "adc %0,__zero_reg__", op
, plen
, 4);
7796 else if (sign
== 0 && PLUS
== code
)
7800 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7803 avr_asm_len ("ldi %0,0x80" CR_TAB
7805 "dec %0", op
, plen
, 3);
7807 avr_asm_len ("ldi %0,0x80" CR_TAB
7810 "sbci %0,0", op
, plen
, 4);
7812 else if (sign
== 0 && MINUS
== code
)
7816 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7819 avr_asm_len ("ldi %0,0x7f" CR_TAB
7821 "inc %0", op
, plen
, 3);
7823 avr_asm_len ("ldi %0,0x7f" CR_TAB
7826 "sbci %0,-1", op
, plen
, 4);
7828 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
7830 /* [1s,const,B < 0] [2s,B < 0] */
7831 /* [3s,const,B > 0] [4s,B > 0] */
7835 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7839 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
7840 if (n_bytes
> 1 && need_copy
)
7841 avr_asm_len ("clr %1", op
, plen
, 1);
7843 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
7845 /* [1s,const,B > 0] [2s,B > 0] */
7846 /* [3s,const,B < 0] [4s,B < 0] */
7850 avr_asm_len ("sec" CR_TAB
7851 "%~call __sbc_8", op
, plen
, 1 + len_call
);
7855 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
7856 if (n_bytes
> 1 && need_copy
)
7857 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
7867 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
7872 avr_asm_len ("sec", op
, plen
, 1);
7873 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
7879 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
7880 avr_asm_len ("sec" CR_TAB
7881 "sbc %0,%0", op
, plen
, 2);
7883 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
7886 break; /* US_PLUS */
7891 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
7895 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7899 avr_asm_len ("clr %0", op
, plen
, 1);
7904 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7905 Now copy the right value to the LSBs. */
7907 if (need_copy
&& n_bytes
> 1)
7909 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
7911 avr_asm_len ("mov %1,%0", op
, plen
, 1);
7917 avr_asm_len ("movw %0,%1", op
, plen
, 1);
7919 avr_asm_len ("mov %A0,%1" CR_TAB
7920 "mov %B0,%1", op
, plen
, 2);
7923 else if (n_bytes
> 2)
7926 avr_asm_len ("mov %A0,%1" CR_TAB
7927 "mov %B0,%1", op
, plen
, 2);
7931 if (need_copy
&& n_bytes
== 8)
7934 avr_asm_len ("movw %r0+2,%0" CR_TAB
7935 "movw %r0+4,%0", xop
, plen
, 2);
7937 avr_asm_len ("mov %r0+2,%0" CR_TAB
7938 "mov %r0+3,%0" CR_TAB
7939 "mov %r0+4,%0" CR_TAB
7940 "mov %r0+5,%0", xop
, plen
, 4);
7944 avr_asm_len ("0:", op
, plen
, 0);
7948 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7949 is ont a compile-time constant:
7951 XOP[0] = XOP[0] +/- XOP[2]
7953 This is a helper for the function below. The only insns that need this
7954 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7957 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
7959 machine_mode mode
= GET_MODE (xop
[0]);
7961 /* Only pointer modes want to add symbols. */
7963 gcc_assert (mode
== HImode
|| mode
== PSImode
);
7965 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
7967 avr_asm_len (PLUS
== code
7968 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
7969 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
7972 if (PSImode
== mode
)
7973 avr_asm_len (PLUS
== code
7974 ? "sbci %C0,hlo8(-(%2))"
7975 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
7980 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7982 INSN is a single_set insn or an insn pattern with a binary operation as
7983 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7985 XOP are the operands of INSN. In the case of 64-bit operations with
7986 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7987 The non-saturating insns up to 32 bits may or may not supply a "d" class
7990 If PLEN == NULL output the instructions.
7991 If PLEN != NULL set *PLEN to the length of the sequence in words.
7993 PCC is a pointer to store the instructions' effect on cc0.
7996 PLEN and PCC default to NULL.
7998 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
8003 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
8005 int cc_plus
, cc_minus
, cc_dummy
;
8006 int len_plus
, len_minus
;
8008 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
8009 rtx xdest
= SET_DEST (xpattern
);
8010 machine_mode mode
= GET_MODE (xdest
);
8011 machine_mode imode
= int_mode_for_mode (mode
);
8012 int n_bytes
= GET_MODE_SIZE (mode
);
8013 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
8015 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
8021 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
8023 if (PLUS
== code_sat
|| MINUS
== code_sat
)
8026 if (n_bytes
<= 4 && REG_P (xop
[2]))
8028 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
8034 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
8035 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
8036 op
[2] = avr_to_int_mode (xop
[0]);
8041 && !CONST_INT_P (xop
[2])
8042 && !CONST_FIXED_P (xop
[2]))
8044 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
8047 op
[0] = avr_to_int_mode (xop
[0]);
8048 op
[1] = avr_to_int_mode (xop
[1]);
8049 op
[2] = avr_to_int_mode (xop
[2]);
8052 /* Saturations and 64-bit operations don't have a clobber operand.
8053 For the other cases, the caller will provide a proper XOP[3]. */
8055 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
8056 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
8058 /* Saturation will need the sign of the original operand. */
8060 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
8061 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
8063 /* If we subtract and the subtrahend is a constant, then negate it
8064 so that avr_out_plus_1 can be used. */
8067 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
8069 /* Work out the shortest sequence. */
8071 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
8072 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
8076 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
8077 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
8079 else if (len_minus
<= len_plus
)
8080 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
8082 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
8088 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8089 time constant XOP[2]:
8091 XOP[0] = XOP[0] <op> XOP[2]
8093 and return "". If PLEN == NULL, print assembler instructions to perform the
8094 operation; otherwise, set *PLEN to the length of the instruction sequence
8095 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
8096 register or SCRATCH if no clobber register is needed for the operation.
8097 INSN is an INSN_P or a pattern of an insn. */
8100 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
8102 /* CODE and MODE of the operation. */
8103 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
8104 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
8105 machine_mode mode
= GET_MODE (xop
[0]);
8107 /* Number of bytes to operate on. */
8108 int i
, n_bytes
= GET_MODE_SIZE (mode
);
8110 /* Value of T-flag (0 or 1) or -1 if unknow. */
8113 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
8114 int clobber_val
= -1;
8116 /* op[0]: 8-bit destination register
8117 op[1]: 8-bit const int
8118 op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8119 op[3]: 8-bit register containing 0xff or NULL_RTX */
8122 op
[2] = QImode
== mode
? NULL_RTX
: xop
[3];
8128 for (i
= 0; i
< n_bytes
; i
++)
8130 /* We operate byte-wise on the destination. */
8131 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
8132 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
8134 /* 8-bit value to operate with this byte. */
8135 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
8137 /* Number of bits set in the current byte of the constant. */
8138 int pop8
= avr_popcount (val8
);
8140 /* Registers R16..R31 can operate with immediate. */
8141 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
8144 op
[1] = GEN_INT (val8
);
8153 avr_asm_len ("ori %0,%1", op
, plen
, 1);
8157 avr_asm_len ("set", op
, plen
, 1);
8160 op
[1] = GEN_INT (exact_log2 (val8
));
8161 avr_asm_len ("bld %0,%1", op
, plen
, 1);
8165 if (op
[3] != NULL_RTX
)
8166 avr_asm_len ("mov %0,%3", op
, plen
, 1);
8168 avr_asm_len ("clr %0" CR_TAB
8169 "dec %0", op
, plen
, 2);
8175 if (clobber_val
!= (int) val8
)
8176 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8177 clobber_val
= (int) val8
;
8179 avr_asm_len ("or %0,%2", op
, plen
, 1);
8189 avr_asm_len ("clr %0", op
, plen
, 1);
8191 avr_asm_len ("andi %0,%1", op
, plen
, 1);
8195 avr_asm_len ("clt", op
, plen
, 1);
8198 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
8199 avr_asm_len ("bld %0,%1", op
, plen
, 1);
8203 if (clobber_val
!= (int) val8
)
8204 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8205 clobber_val
= (int) val8
;
8207 avr_asm_len ("and %0,%2", op
, plen
, 1);
8217 avr_asm_len ("com %0", op
, plen
, 1);
8218 else if (ld_reg_p
&& val8
== (1 << 7))
8219 avr_asm_len ("subi %0,%1", op
, plen
, 1);
8222 if (clobber_val
!= (int) val8
)
8223 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
8224 clobber_val
= (int) val8
;
8226 avr_asm_len ("eor %0,%2", op
, plen
, 1);
8232 /* Unknown rtx_code */
8235 } /* for all sub-bytes */
8241 /* Output sign extension from XOP[1] to XOP[0] and return "".
8242 If PLEN == NULL, print assembler instructions to perform the operation;
8243 otherwise, set *PLEN to the length of the instruction sequence (in words)
8244 as printed with PLEN == NULL. */
8247 avr_out_sign_extend (rtx_insn
*insn
, rtx
*xop
, int *plen
)
8249 // Size in bytes of source resp. destination operand.
8250 unsigned n_src
= GET_MODE_SIZE (GET_MODE (xop
[1]));
8251 unsigned n_dest
= GET_MODE_SIZE (GET_MODE (xop
[0]));
8252 rtx r_msb
= all_regs_rtx
[REGNO (xop
[1]) + n_src
- 1];
8257 // Copy destination to source
8259 if (REGNO (xop
[0]) != REGNO (xop
[1]))
8261 gcc_assert (n_src
<= 2);
8264 avr_asm_len (AVR_HAVE_MOVW
8266 : "mov %B0,%B1", xop
, plen
, 1);
8267 if (n_src
== 1 || !AVR_HAVE_MOVW
)
8268 avr_asm_len ("mov %A0,%A1", xop
, plen
, 1);
8271 // Set Carry to the sign bit MSB.7...
8273 if (REGNO (xop
[0]) == REGNO (xop
[1])
8274 || !reg_unused_after (insn
, r_msb
))
8276 avr_asm_len ("mov __tmp_reg__,%0", &r_msb
, plen
, 1);
8277 r_msb
= tmp_reg_rtx
;
8280 avr_asm_len ("lsl %0", &r_msb
, plen
, 1);
8282 // ...and propagate it to all the new sign bits
8284 for (unsigned n
= n_src
; n
< n_dest
; n
++)
8285 avr_asm_len ("sbc %0,%0", &all_regs_rtx
[REGNO (xop
[0]) + n
], plen
, 1);
8291 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8292 PLEN != NULL: Set *PLEN to the length of that sequence.
8296 avr_out_addto_sp (rtx
*op
, int *plen
)
8298 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
8299 int addend
= INTVAL (op
[0]);
8306 if (flag_verbose_asm
|| flag_print_asm_name
)
8307 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
8309 while (addend
<= -pc_len
)
8312 avr_asm_len ("rcall .", op
, plen
, 1);
8315 while (addend
++ < 0)
8316 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
8318 else if (addend
> 0)
8320 if (flag_verbose_asm
|| flag_print_asm_name
)
8321 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
8323 while (addend
-- > 0)
8324 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
8331 /* Output instructions to insert an inverted bit into OPERANDS[0]:
8332 $0.$1 = ~$2.$3 if XBITNO = NULL
8333 $0.$1 = ~$2.XBITNO if XBITNO != NULL.
8334 If PLEN = NULL then output the respective instruction sequence which
8335 is a combination of BST / BLD and some instruction(s) to invert the bit.
8336 If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
8340 avr_out_insert_notbit (rtx_insn
*insn
, rtx operands
[], rtx xbitno
, int *plen
)
8342 rtx op
[4] = { operands
[0], operands
[1], operands
[2],
8343 xbitno
== NULL_RTX
? operands
[3] : xbitno
};
8345 if (INTVAL (op
[1]) == 7
8346 && test_hard_reg_class (LD_REGS
, op
[0]))
8348 /* If the inserted bit number is 7 and we have a d-reg, then invert
8349 the bit after the insertion by means of SUBI *,0x80. */
8351 if (INTVAL (op
[3]) == 7
8352 && REGNO (op
[0]) == REGNO (op
[2]))
8354 avr_asm_len ("subi %0,0x80", op
, plen
, -1);
8358 avr_asm_len ("bst %2,%3" CR_TAB
8360 "subi %0,0x80", op
, plen
, -3);
8363 else if (test_hard_reg_class (LD_REGS
, op
[0])
8364 && (INTVAL (op
[1]) != INTVAL (op
[3])
8365 || !reg_overlap_mentioned_p (op
[0], op
[2])))
8367 /* If the destination bit is in a d-reg we can jump depending
8368 on the source bit and use ANDI / ORI. This just applies if we
8369 have not an early-clobber situation with the bit. */
8371 avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
8373 "ori %0,1<<%1", op
, plen
, -3);
8377 /* Otherwise, invert the bit by means of COM before we store it with
8378 BST and then undo the COM if needed. */
8380 avr_asm_len ("com %2" CR_TAB
8381 "bst %2,%3", op
, plen
, -2);
8383 if (!reg_unused_after (insn
, op
[2])
8384 // A simple 'reg_unused_after' is not enough because that function
8385 // assumes that the destination register is overwritten completely
8386 // and hence is in order for our purpose. This is not the case
8387 // with BLD which just changes one bit of the destination.
8388 || reg_overlap_mentioned_p (op
[0], op
[2]))
8390 /* Undo the COM from above. */
8391 avr_asm_len ("com %2", op
, plen
, 1);
8394 avr_asm_len ("bld %0,%1", op
, plen
, 1);
8401 /* Outputs instructions needed for fixed point type conversion.
8402 This includes converting between any fixed point type, as well
8403 as converting to any integer type. Conversion between integer
8404 types is not supported.
8406 Converting signed fractional types requires a bit shift if converting
8407 to or from any unsigned fractional type because the decimal place is
8408 shifted by 1 bit. When the destination is a signed fractional, the sign
8409 is stored in either the carry or T bit. */
8412 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
8416 RTX_CODE shift
= UNKNOWN
;
8417 bool sign_in_carry
= false;
8418 bool msb_in_carry
= false;
8419 bool lsb_in_tmp_reg
= false;
8420 bool lsb_in_carry
= false;
8421 bool frac_rounded
= false;
8422 const char *code_ashift
= "lsl %0";
8425 #define MAY_CLOBBER(RR) \
8426 /* Shorthand used below. */ \
8428 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
8429 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
8430 || (reg_unused_after (insn, all_regs_rtx[RR]) \
8431 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8435 /* bytes : Length of operand in bytes.
8436 ibyte : Length of integral part in bytes.
8437 fbyte, fbit : Length of fractional part in bytes, bits. */
8440 unsigned fbit
, bytes
, ibyte
, fbyte
;
8441 unsigned regno
, regno_msb
;
8442 } dest
, src
, *val
[2] = { &dest
, &src
};
8447 /* Step 0: Determine information on source and destination operand we
8448 ====== will need in the remainder. */
8450 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
8454 xop
[i
] = operands
[i
];
8456 mode
= GET_MODE (xop
[i
]);
8458 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
8459 val
[i
]->regno
= REGNO (xop
[i
]);
8460 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
8462 if (SCALAR_INT_MODE_P (mode
))
8464 val
[i
]->sbit
= intsigned
;
8467 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
8469 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
8470 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
8473 fatal_insn ("unsupported fixed-point conversion", insn
);
8475 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
8476 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
8479 // Byte offset of the decimal point taking into account different place
8480 // of the decimal point in input and output and different register numbers
8481 // of input and output.
8482 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
8484 // Number of destination bytes that will come from sign / zero extension.
8485 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
8487 // Number of bytes at the low end to be filled with zeros.
8488 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
8490 // Do we have a 16-Bit register that is cleared?
8491 rtx clrw
= NULL_RTX
;
8493 bool sign_extend
= src
.sbit
&& sign_bytes
;
8495 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
8497 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
8499 else if (dest
.fbit
% 8 == src
.fbit
% 8)
8504 /* If we need to round the fraction part, we might need to save/round it
8505 before clobbering any of it in Step 1. Also, we might want to do
8506 the rounding now to make use of LD_REGS. */
8507 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8508 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
8509 && !TARGET_FRACT_CONV_TRUNC
)
8513 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
8514 && dest
.regno
- offset
-1 >= dest
.regno
);
8515 unsigned s0
= dest
.regno
- offset
-1;
8516 bool use_src
= true;
8518 unsigned copied_msb
= src
.regno_msb
;
8519 bool have_carry
= false;
8521 if (src
.ibyte
> dest
.ibyte
)
8522 copied_msb
-= src
.ibyte
- dest
.ibyte
;
8524 for (sn
= s0
; sn
<= copied_msb
; sn
++)
8525 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
8526 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
8528 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
8530 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8531 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8535 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
8536 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
8538 avr_asm_len ("sec" CR_TAB
8539 "cpc %0,__zero_reg__",
8540 &all_regs_rtx
[sn
], plen
, 2);
8544 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8546 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
8547 &all_regs_rtx
[s0
], plen
, 1);
8548 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8549 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
8550 avr_asm_len ("\n0:", NULL
, plen
, 0);
8551 frac_rounded
= true;
8553 else if (use_src
&& overlap
)
8555 avr_asm_len ("clr __tmp_reg__" CR_TAB
8557 "dec __tmp_reg__", xop
, plen
, 1);
8561 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8566 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8569 avr_asm_len ("clt" CR_TAB
8570 "bld __tmp_reg__,7" CR_TAB
8571 "adc %0,__tmp_reg__",
8572 &all_regs_rtx
[s0
], plen
, 1);
8574 avr_asm_len ("lsr __tmp_reg" CR_TAB
8575 "add %0,__tmp_reg__",
8576 &all_regs_rtx
[s0
], plen
, 2);
8577 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8578 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8579 frac_rounded
= true;
8584 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
8585 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
8586 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
8587 xop
[2] = all_regs_rtx
[s0
];
8588 unsigned sn
= src
.regno
;
8589 if (!use_src
|| sn
== s0
)
8590 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8591 /* We need to consider to-be-discarded bits
8592 if the value is negative. */
8595 avr_asm_len ("tst %0" CR_TAB
8597 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8598 /* Test to-be-discarded bytes for any nozero bits.
8599 ??? Could use OR or SBIW to test two registers at once. */
8601 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8604 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8605 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8607 avr_asm_len ("breq 0f" CR_TAB
8609 "\n0:\t" "mov __tmp_reg__,%2",
8612 avr_asm_len ("breq 0f" CR_TAB
8614 "bld __tmp_reg__,0\n0:",
8617 lsb_in_tmp_reg
= true;
8621 /* Step 1: Clear bytes at the low end and copy payload bits from source
8622 ====== to destination. */
8624 int step
= offset
< 0 ? 1 : -1;
8625 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
8627 // We cleared at least that number of registers.
8630 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
8632 // Next regno of destination is needed for MOVW
8633 unsigned d1
= d0
+ step
;
8635 // Current and next regno of source
8636 signed s0
= d0
- offset
;
8637 signed s1
= s0
+ step
;
8639 // Must current resp. next regno be CLRed? This applies to the low
8640 // bytes of the destination that have no associated source bytes.
8641 bool clr0
= s0
< (signed) src
.regno
;
8642 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
8644 // First gather what code to emit (if any) and additional step to
8645 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8646 // is the source rtx for the current loop iteration.
8647 const char *code
= NULL
;
8652 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
8654 xop
[2] = all_regs_rtx
[d0
& ~1];
8656 code
= "movw %2,%3";
8661 xop
[2] = all_regs_rtx
[d0
];
8666 && d0
% 2 == (step
> 0))
8668 clrw
= all_regs_rtx
[d0
& ~1];
8672 else if (offset
&& s0
<= (signed) src
.regno_msb
)
8674 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
8675 && d0
% 2 == (offset
> 0)
8676 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
8677 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
8679 xop
[2] = all_regs_rtx
[d0
& ~movw
];
8680 xop
[3] = all_regs_rtx
[s0
& ~movw
];
8681 code
= movw
? "movw %2,%3" : "mov %2,%3";
8682 stepw
= step
* movw
;
8687 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
8688 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
8690 /* We are going to override the sign bit. If we sign-extend,
8691 store the sign in the Carry flag. This is not needed if
8692 the destination will be ASHIFT in the remainder because
8693 the ASHIFT will set Carry without extra instruction. */
8695 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
8696 sign_in_carry
= true;
8699 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8701 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8702 && src
.ibyte
> dest
.ibyte
8703 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
8705 /* We are going to override the MSB. If we shift right,
8706 store the MSB in the Carry flag. This is only needed if
8707 we don't sign-extend becaue with sign-extension the MSB
8708 (the sign) will be produced by the sign extension. */
8710 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
8711 msb_in_carry
= true;
8714 unsigned src_lsb
= dest
.regno
- offset
-1;
8716 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
8718 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
8720 /* We are going to override the new LSB; store it into carry. */
8722 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
8723 code_ashift
= "rol %0";
8724 lsb_in_carry
= true;
8727 avr_asm_len (code
, xop
, plen
, 1);
8732 /* Step 2: Shift destination left by 1 bit position. This might be needed
8733 ====== for signed input and unsigned output. */
8735 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
8737 unsigned s0
= dest
.regno
- offset
-1;
8739 /* n1169 4.1.4 says:
8740 "Conversions from a fixed-point to an integer type round toward zero."
8741 Hence, converting a fract type to integer only gives a non-zero result
8743 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8744 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
8745 && !TARGET_FRACT_CONV_TRUNC
)
8747 gcc_assert (s0
== src
.regno_msb
);
8748 /* Check if the input is -1. We do that by checking if negating
8749 the input causes an integer overflow. */
8750 unsigned sn
= src
.regno
;
8751 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8753 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8755 /* Overflow goes with set carry. Clear carry otherwise. */
8756 avr_asm_len ("brvs 0f" CR_TAB
8757 "clc\n0:", NULL
, plen
, 2);
8759 /* Likewise, when converting from accumulator types to integer, we
8760 need to round up negative values. */
8761 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8762 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
8763 && !TARGET_FRACT_CONV_TRUNC
8766 bool have_carry
= false;
8768 xop
[2] = all_regs_rtx
[s0
];
8769 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
8770 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8771 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8772 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8773 if (!lsb_in_tmp_reg
)
8775 unsigned sn
= src
.regno
;
8778 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
8783 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
8784 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
8786 /* Add in C and the rounding value 127. */
8787 /* If the destination msb is a sign byte, and in LD_REGS,
8788 grab it as a temporary. */
8790 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
8793 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
8794 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
8795 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
8796 : have_carry
? "adc %2,%3"
8797 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
8803 /* Fall back to use __zero_reg__ as a temporary. */
8804 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
8806 avr_asm_len ("clt" CR_TAB
8807 "bld __zero_reg__,7", NULL
, plen
, 2);
8809 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
8810 avr_asm_len (have_carry
&& lsb_in_tmp_reg
8811 ? "adc __tmp_reg__,__zero_reg__"
8812 : have_carry
? "adc %2,__zero_reg__"
8813 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
8814 : "add %2,__zero_reg__",
8816 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
8819 for (d0
= dest
.regno
+ zero_bytes
;
8820 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8821 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
8823 avr_asm_len (lsb_in_tmp_reg
8824 ? "\n0:\t" "lsl __tmp_reg__"
8825 : "\n0:\t" "lsl %2",
8828 else if (MAY_CLOBBER (s0
))
8829 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8831 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8832 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8834 code_ashift
= "rol %0";
8835 lsb_in_carry
= true;
8838 if (shift
== ASHIFT
)
8840 for (d0
= dest
.regno
+ zero_bytes
;
8841 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8843 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
8844 code_ashift
= "rol %0";
8847 lsb_in_carry
= false;
8848 sign_in_carry
= true;
8851 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8852 ======= it in sign-extension below. */
8854 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8855 && src
.ibyte
> dest
.ibyte
)
8857 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8859 if (MAY_CLOBBER (s0
))
8860 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
8862 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8863 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8865 msb_in_carry
= true;
8868 /* Step 3: Sign-extend or zero-extend the destination as needed.
8871 if (sign_extend
&& !sign_in_carry
)
8873 unsigned s0
= src
.regno_msb
;
8875 if (MAY_CLOBBER (s0
))
8876 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8878 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8879 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8881 sign_in_carry
= true;
8884 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
8886 unsigned copies
= 0;
8887 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
8889 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
8891 if (AVR_HAVE_MOVW
&& movw
8892 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
8894 xop
[2] = all_regs_rtx
[d0
];
8896 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
8901 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
8902 &all_regs_rtx
[d0
], plen
, 1);
8904 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
8905 movw
= all_regs_rtx
[d0
-1];
8910 /* Step 4: Right shift the destination. This might be needed for
8911 ====== conversions from unsigned to signed. */
8913 if (shift
== ASHIFTRT
)
8915 const char *code_ashiftrt
= "lsr %0";
8917 if (sign_extend
|| msb_in_carry
)
8918 code_ashiftrt
= "ror %0";
8920 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
8921 code_ashiftrt
= "asr %0";
8923 for (d0
= dest
.regno_msb
- sign_bytes
;
8924 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
8926 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
8927 code_ashiftrt
= "ror %0";
8937 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8938 XOP[2] is the rounding point, a CONST_INT. The function prints the
8939 instruction sequence if PLEN = NULL and computes the length in words
8940 of the sequence if PLEN != NULL. Most of this function deals with
8941 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8944 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
8946 machine_mode mode
= GET_MODE (xop
[0]);
8947 machine_mode imode
= int_mode_for_mode (mode
);
8948 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8949 int fbit
= (int) GET_MODE_FBIT (mode
);
8950 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
8951 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
8952 GET_MODE_PRECISION (imode
));
8953 // Lengths of PLUS and AND parts.
8954 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
8955 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
8957 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8958 // the saturated addition so that we can emit the "rjmp 1f" before the
8961 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
8962 rtx xpattern
, xsrc
, op
[4];
8964 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
8965 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
8966 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
8967 xpattern
= gen_rtx_SET (xop
[0], xsrc
);
8972 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
8974 avr_asm_len ("rjmp 1f" CR_TAB
8975 "0:", NULL
, plen_add
, 1);
8977 // Keep all bits from RP and higher: ... 2^(-RP)
8978 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8979 // Rounding point ^^^^^^^
8980 // Added above ^^^^^^^^^
8981 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
8982 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
8984 xpattern
= gen_rtx_SET (xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
8989 op
[3] = gen_rtx_SCRATCH (QImode
);
8990 avr_out_bitop (xpattern
, op
, plen_and
);
8991 avr_asm_len ("1:", NULL
, plen
, 0);
8994 *plen
= len_add
+ len_and
;
9000 /* Create RTL split patterns for byte sized rotate expressions. This
9001 produces a series of move instructions and considers overlap situations.
9002 Overlapping non-HImode operands need a scratch register. */
9005 avr_rotate_bytes (rtx operands
[])
9008 machine_mode mode
= GET_MODE (operands
[0]);
9009 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
9010 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
9011 int num
= INTVAL (operands
[2]);
9012 rtx scratch
= operands
[3];
9013 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
9014 Word move if no scratch is needed, otherwise use size of scratch. */
9015 machine_mode move_mode
= QImode
;
9016 int move_size
, offset
, size
;
9020 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
9023 move_mode
= GET_MODE (scratch
);
9025 /* Force DI rotate to use QI moves since other DI moves are currently split
9026 into QI moves so forward propagation works better. */
9029 /* Make scratch smaller if needed. */
9030 if (SCRATCH
!= GET_CODE (scratch
)
9031 && HImode
== GET_MODE (scratch
)
9032 && QImode
== move_mode
)
9033 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
9035 move_size
= GET_MODE_SIZE (move_mode
);
9036 /* Number of bytes/words to rotate. */
9037 offset
= (num
>> 3) / move_size
;
9038 /* Number of moves needed. */
9039 size
= GET_MODE_SIZE (mode
) / move_size
;
9040 /* Himode byte swap is special case to avoid a scratch register. */
9041 if (mode
== HImode
&& same_reg
)
9043 /* HImode byte swap, using xor. This is as quick as using scratch. */
9045 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
9046 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
9047 if (!rtx_equal_p (dst
, src
))
9049 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
9050 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
9051 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
9056 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9057 /* Create linked list of moves to determine move order. */
9061 } move
[MAX_SIZE
+ 8];
9064 gcc_assert (size
<= MAX_SIZE
);
9065 /* Generate list of subreg moves. */
9066 for (i
= 0; i
< size
; i
++)
9069 int to
= (from
+ offset
) % size
;
9070 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
9071 mode
, from
* move_size
);
9072 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
9073 mode
, to
* move_size
);
9076 /* Mark dependence where a dst of one move is the src of another move.
9077 The first move is a conflict as it must wait until second is
9078 performed. We ignore moves to self - we catch this later. */
9080 for (i
= 0; i
< size
; i
++)
9081 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
9082 for (j
= 0; j
< size
; j
++)
9083 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
9085 /* The dst of move i is the src of move j. */
9092 /* Go through move list and perform non-conflicting moves. As each
9093 non-overlapping move is made, it may remove other conflicts
9094 so the process is repeated until no conflicts remain. */
9099 /* Emit move where dst is not also a src or we have used that
9101 for (i
= 0; i
< size
; i
++)
9102 if (move
[i
].src
!= NULL_RTX
)
9104 if (move
[i
].links
== -1
9105 || move
[move
[i
].links
].src
== NULL_RTX
)
9108 /* Ignore NOP moves to self. */
9109 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
9110 emit_move_insn (move
[i
].dst
, move
[i
].src
);
9112 /* Remove conflict from list. */
9113 move
[i
].src
= NULL_RTX
;
9119 /* Check for deadlock. This is when no moves occurred and we have
9120 at least one blocked move. */
9121 if (moves
== 0 && blocked
!= -1)
9123 /* Need to use scratch register to break deadlock.
9124 Add move to put dst of blocked move into scratch.
9125 When this move occurs, it will break chain deadlock.
9126 The scratch register is substituted for real move. */
9128 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
9130 move
[size
].src
= move
[blocked
].dst
;
9131 move
[size
].dst
= scratch
;
9132 /* Scratch move is never blocked. */
9133 move
[size
].links
= -1;
9134 /* Make sure we have valid link. */
9135 gcc_assert (move
[blocked
].links
!= -1);
9136 /* Replace src of blocking move with scratch reg. */
9137 move
[move
[blocked
].links
].src
= scratch
;
9138 /* Make dependent on scratch move occurring. */
9139 move
[blocked
].links
= size
;
9143 while (blocked
!= -1);
9149 /* Worker function for `ADJUST_INSN_LENGTH'. */
9150 /* Modifies the length assigned to instruction INSN
9151 LEN is the initially computed length of the insn. */
9154 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
9156 rtx
*op
= recog_data
.operand
;
9157 enum attr_adjust_len adjust_len
;
9159 /* Some complex insns don't need length adjustment and therefore
9160 the length need not/must not be adjusted for these insns.
9161 It is easier to state this in an insn attribute "adjust_len" than
9162 to clutter up code here... */
9164 if (!NONDEBUG_INSN_P (insn
)
9165 || -1 == recog_memoized (insn
))
9170 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
9172 adjust_len
= get_attr_adjust_len (insn
);
9174 if (adjust_len
== ADJUST_LEN_NO
)
9176 /* Nothing to adjust: The length from attribute "length" is fine.
9177 This is the default. */
9182 /* Extract insn's operands. */
9184 extract_constrain_insn_cached (insn
);
9186 /* Dispatch to right function. */
9190 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
9191 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
9192 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
9194 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
9196 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
9197 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
9199 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
9200 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
9201 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
9202 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
9203 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
9204 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
9205 case ADJUST_LEN_SEXT
: avr_out_sign_extend (insn
, op
, &len
); break;
9207 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
9208 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
9209 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
9211 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
9212 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
9213 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
9214 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
9215 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
9217 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
9218 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
9219 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
9221 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
9222 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
9223 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
9225 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
9226 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
9227 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
9229 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
9230 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
9231 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
9233 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
9235 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
9237 case ADJUST_LEN_INSV_NOTBIT
:
9238 avr_out_insert_notbit (insn
, op
, NULL_RTX
, &len
);
9240 case ADJUST_LEN_INSV_NOTBIT_0
:
9241 avr_out_insert_notbit (insn
, op
, const0_rtx
, &len
);
9243 case ADJUST_LEN_INSV_NOTBIT_7
:
9244 avr_out_insert_notbit (insn
, op
, GEN_INT (7), &len
);
9254 /* Return nonzero if register REG dead after INSN. */
9257 reg_unused_after (rtx_insn
*insn
, rtx reg
)
9259 return (dead_or_set_p (insn
, reg
)
9260 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
9263 /* Return nonzero if REG is not used after INSN.
9264 We assume REG is a reload reg, and therefore does
9265 not live past labels. It may live past calls or jumps though. */
9268 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
9273 /* If the reg is set by this instruction, then it is safe for our
9274 case. Disregard the case where this is a store to memory, since
9275 we are checking a register used in the store address. */
9276 set
= single_set (insn
);
9277 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
9278 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
9281 while ((insn
= NEXT_INSN (insn
)))
9284 code
= GET_CODE (insn
);
9287 /* If this is a label that existed before reload, then the register
9288 if dead here. However, if this is a label added by reorg, then
9289 the register may still be live here. We can't tell the difference,
9290 so we just ignore labels completely. */
9291 if (code
== CODE_LABEL
)
9299 if (code
== JUMP_INSN
)
9302 /* If this is a sequence, we must handle them all at once.
9303 We could have for instance a call that sets the target register,
9304 and an insn in a delay slot that uses the register. In this case,
9305 we must return 0. */
9306 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
9308 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
9312 for (i
= 0; i
< seq
->len (); i
++)
9314 rtx_insn
*this_insn
= seq
->insn (i
);
9315 rtx set
= single_set (this_insn
);
9317 if (CALL_P (this_insn
))
9319 else if (JUMP_P (this_insn
))
9321 if (INSN_ANNULLED_BRANCH_P (this_insn
))
9326 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
9328 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
9330 if (GET_CODE (SET_DEST (set
)) != MEM
)
9336 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
9341 else if (code
== JUMP_INSN
)
9345 if (code
== CALL_INSN
)
9348 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
9349 if (GET_CODE (XEXP (tem
, 0)) == USE
9350 && REG_P (XEXP (XEXP (tem
, 0), 0))
9351 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
9353 if (call_used_regs
[REGNO (reg
)])
9357 set
= single_set (insn
);
9359 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
9361 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
9362 return GET_CODE (SET_DEST (set
)) != MEM
;
9363 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
9370 /* Implement `TARGET_ASM_INTEGER'. */
9371 /* Target hook for assembling integer objects. The AVR version needs
9372 special handling for references to certain labels. */
9375 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
9377 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
9378 && text_segment_operand (x
, VOIDmode
))
9380 fputs ("\t.word\tgs(", asm_out_file
);
9381 output_addr_const (asm_out_file
, x
);
9382 fputs (")\n", asm_out_file
);
9386 else if (GET_MODE (x
) == PSImode
)
9388 /* This needs binutils 2.23+, see PR binutils/13503 */
9390 fputs ("\t.byte\tlo8(", asm_out_file
);
9391 output_addr_const (asm_out_file
, x
);
9392 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
9394 fputs ("\t.byte\thi8(", asm_out_file
);
9395 output_addr_const (asm_out_file
, x
);
9396 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
9398 fputs ("\t.byte\thh8(", asm_out_file
);
9399 output_addr_const (asm_out_file
, x
);
9400 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
9404 else if (CONST_FIXED_P (x
))
9408 /* varasm fails to handle big fixed modes that don't fit in hwi. */
9410 for (n
= 0; n
< size
; n
++)
9412 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
9413 default_assemble_integer (xn
, 1, aligned_p
);
9420 && avr_address_tiny_pm_p (x
))
9422 x
= plus_constant (Pmode
, x
, AVR_TINY_PM_OFFSET
);
9425 return default_assemble_integer (x
, size
, aligned_p
);
9429 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
9430 /* Return value is nonzero if pseudos that have been
9431 assigned to registers of class CLASS would likely be spilled
9432 because registers of CLASS are needed for spill registers. */
9435 avr_class_likely_spilled_p (reg_class_t c
)
9437 return (c
!= ALL_REGS
&&
9438 (AVR_TINY
? 1 : c
!= ADDW_REGS
));
9442 /* Valid attributes:
9443 progmem - Put data to program memory.
9444 signal - Make a function to be hardware interrupt.
9445 After function prologue interrupts remain disabled.
9446 interrupt - Make a function to be hardware interrupt. Before function
9447 prologue interrupts are enabled by means of SEI.
9448 naked - Don't generate function prologue/epilogue and RET
9451 /* Handle a "progmem" attribute; arguments as in
9452 struct attribute_spec.handler. */
9455 avr_handle_progmem_attribute (tree
*node
, tree name
,
9456 tree args ATTRIBUTE_UNUSED
,
9457 int flags ATTRIBUTE_UNUSED
,
9462 if (TREE_CODE (*node
) == TYPE_DECL
)
9464 /* This is really a decl attribute, not a type attribute,
9465 but try to handle it for GCC 3.0 backwards compatibility. */
9467 tree type
= TREE_TYPE (*node
);
9468 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
9469 tree newtype
= build_type_attribute_variant (type
, attr
);
9471 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
9472 TREE_TYPE (*node
) = newtype
;
9473 *no_add_attrs
= true;
9475 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
9477 *no_add_attrs
= false;
9481 warning (OPT_Wattributes
, "%qE attribute ignored",
9483 *no_add_attrs
= true;
9490 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9491 struct attribute_spec.handler. */
9494 avr_handle_fndecl_attribute (tree
*node
, tree name
,
9495 tree args ATTRIBUTE_UNUSED
,
9496 int flags ATTRIBUTE_UNUSED
,
9499 if (TREE_CODE (*node
) != FUNCTION_DECL
)
9501 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
9503 *no_add_attrs
= true;
9510 avr_handle_fntype_attribute (tree
*node
, tree name
,
9511 tree args ATTRIBUTE_UNUSED
,
9512 int flags ATTRIBUTE_UNUSED
,
9515 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
9517 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
9519 *no_add_attrs
= true;
9526 avr_handle_absdata_attribute (tree
*node
, tree name
, tree
/* args */,
9527 int /* flags */, bool *no_add
)
9529 location_t loc
= DECL_SOURCE_LOCATION (*node
);
9533 if (TREE_CODE (*node
) != VAR_DECL
9534 || (!TREE_STATIC (*node
) && !DECL_EXTERNAL (*node
)))
9536 warning_at (loc
, OPT_Wattributes
, "%qE attribute only applies to"
9537 " variables in static storage", name
);
9543 warning_at (loc
, OPT_Wattributes
, "%qE attribute only supported"
9544 " for reduced Tiny cores", name
);
9552 avr_handle_addr_attribute (tree
*node
, tree name
, tree args
,
9553 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
9555 bool io_p
= (strncmp (IDENTIFIER_POINTER (name
), "io", 2) == 0);
9556 location_t loc
= DECL_SOURCE_LOCATION (*node
);
9558 if (TREE_CODE (*node
) != VAR_DECL
)
9560 warning_at (loc
, 0, "%qE attribute only applies to variables", name
);
9564 if (args
!= NULL_TREE
)
9566 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
9567 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
9568 tree arg
= TREE_VALUE (args
);
9569 if (TREE_CODE (arg
) != INTEGER_CST
)
9571 warning (0, "%qE attribute allows only an integer constant argument",
9576 && (!tree_fits_shwi_p (arg
)
9577 || !(strcmp (IDENTIFIER_POINTER (name
), "io_low") == 0
9578 ? low_io_address_operand
: io_address_operand
)
9579 (GEN_INT (TREE_INT_CST_LOW (arg
)), QImode
)))
9581 warning_at (loc
, 0, "%qE attribute address out of range", name
);
9586 tree attribs
= DECL_ATTRIBUTES (*node
);
9587 const char *names
[] = { "io", "io_low", "address", NULL
} ;
9588 for (const char **p
= names
; *p
; p
++)
9590 tree other
= lookup_attribute (*p
, attribs
);
9591 if (other
&& TREE_VALUE (other
))
9594 "both %s and %qE attribute provide address",
9603 if (*no_add
== false && io_p
&& !TREE_THIS_VOLATILE (*node
))
9604 warning_at (loc
, 0, "%qE attribute on non-volatile variable", name
);
9610 avr_eval_addr_attrib (rtx x
)
9612 if (GET_CODE (x
) == SYMBOL_REF
9613 && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_ADDRESS
))
9615 tree decl
= SYMBOL_REF_DECL (x
);
9616 tree attr
= NULL_TREE
;
9618 if (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
)
9620 attr
= lookup_attribute ("io", DECL_ATTRIBUTES (decl
));
9621 if (!attr
|| !TREE_VALUE (attr
))
9622 attr
= lookup_attribute ("io_low", DECL_ATTRIBUTES (decl
));
9625 if (!attr
|| !TREE_VALUE (attr
))
9626 attr
= lookup_attribute ("address", DECL_ATTRIBUTES (decl
));
9627 gcc_assert (attr
&& TREE_VALUE (attr
) && TREE_VALUE (TREE_VALUE (attr
)));
9628 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
))));
9634 /* AVR attributes. */
9635 static const struct attribute_spec
9636 avr_attribute_table
[] =
9638 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9639 affects_type_identity } */
9640 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
9642 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9644 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9646 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9648 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9650 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9652 { "io", 0, 1, false, false, false, avr_handle_addr_attribute
,
9654 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute
,
9656 { "address", 1, 1, false, false, false, avr_handle_addr_attribute
,
9658 { "absdata", 0, 0, true, false, false, avr_handle_absdata_attribute
,
9660 { NULL
, 0, 0, false, false, false, NULL
, false }
9664 /* Return true if we support address space AS for the architecture in effect
9665 and false, otherwise. If LOC is not UNKNOWN_LOCATION then also issue
9666 a respective error. */
9669 avr_addr_space_supported_p (addr_space_t as
, location_t loc
)
9673 if (loc
!= UNKNOWN_LOCATION
)
9674 error_at (loc
, "address spaces are not supported for reduced "
9678 else if (avr_addrspace
[as
].segment
>= avr_n_flash
)
9680 if (loc
!= UNKNOWN_LOCATION
)
9681 error_at (loc
, "address space %qs not supported for devices with "
9682 "flash size up to %d KiB", avr_addrspace
[as
].name
,
9691 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'. */
9694 avr_addr_space_diagnose_usage (addr_space_t as
, location_t loc
)
9696 (void) avr_addr_space_supported_p (as
, loc
);
9700 /* Look if DECL shall be placed in program memory space by
9701 means of attribute `progmem' or some address-space qualifier.
9702 Return non-zero if DECL is data that must end up in Flash and
9703 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9705 Return 2 if DECL is located in 24-bit flash address-space
9706 Return 1 if DECL is located in 16-bit flash address-space
9707 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9708 Return 0 otherwise */
9711 avr_progmem_p (tree decl
, tree attributes
)
9715 if (TREE_CODE (decl
) != VAR_DECL
)
9718 if (avr_decl_memx_p (decl
))
9721 if (avr_decl_flash_p (decl
))
9725 != lookup_attribute ("progmem", attributes
))
9732 while (TREE_CODE (a
) == ARRAY_TYPE
);
9734 if (a
== error_mark_node
)
9737 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
9744 /* Return true if DECL has attribute `absdata' set. This function should
9745 only be used for AVR_TINY. */
9748 avr_decl_absdata_p (tree decl
, tree attributes
)
9750 return (TREE_CODE (decl
) == VAR_DECL
9751 && NULL_TREE
!= lookup_attribute ("absdata", attributes
));
9755 /* Scan type TYP for pointer references to address space ASn.
9756 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9757 the AS are also declared to be CONST.
9758 Otherwise, return the respective address space, i.e. a value != 0. */
9761 avr_nonconst_pointer_addrspace (tree typ
)
9763 while (ARRAY_TYPE
== TREE_CODE (typ
))
9764 typ
= TREE_TYPE (typ
);
9766 if (POINTER_TYPE_P (typ
))
9769 tree target
= TREE_TYPE (typ
);
9771 /* Pointer to function: Test the function's return type. */
9773 if (FUNCTION_TYPE
== TREE_CODE (target
))
9774 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
9776 /* "Ordinary" pointers... */
9778 while (TREE_CODE (target
) == ARRAY_TYPE
)
9779 target
= TREE_TYPE (target
);
9781 /* Pointers to non-generic address space must be const. */
9783 as
= TYPE_ADDR_SPACE (target
);
9785 if (!ADDR_SPACE_GENERIC_P (as
)
9786 && !TYPE_READONLY (target
)
9787 && avr_addr_space_supported_p (as
))
9792 /* Scan pointer's target type. */
9794 return avr_nonconst_pointer_addrspace (target
);
9797 return ADDR_SPACE_GENERIC
;
9801 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9802 go along with CONST qualifier. Writing to these address spaces should
9803 be detected and complained about as early as possible. */
9806 avr_pgm_check_var_decl (tree node
)
9808 const char *reason
= NULL
;
9810 addr_space_t as
= ADDR_SPACE_GENERIC
;
9812 gcc_assert (as
== 0);
9814 if (avr_log
.progmem
)
9815 avr_edump ("%?: %t\n", node
);
9817 switch (TREE_CODE (node
))
9823 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9824 reason
= "variable";
9828 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9829 reason
= "function parameter";
9833 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9834 reason
= "structure field";
9838 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
9840 reason
= "return type of function";
9844 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
9852 error ("pointer targeting address space %qs must be const in %qT",
9853 avr_addrspace
[as
].name
, node
);
9855 error ("pointer targeting address space %qs must be const"
9857 avr_addrspace
[as
].name
, reason
, node
);
9860 return reason
== NULL
;
9864 /* Add the section attribute if the variable is in progmem. */
9867 avr_insert_attributes (tree node
, tree
*attributes
)
9869 avr_pgm_check_var_decl (node
);
9871 if (TREE_CODE (node
) == VAR_DECL
9872 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
9873 && avr_progmem_p (node
, *attributes
))
9878 /* For C++, we have to peel arrays in order to get correct
9879 determination of readonlyness. */
9882 node0
= TREE_TYPE (node0
);
9883 while (TREE_CODE (node0
) == ARRAY_TYPE
);
9885 if (error_mark_node
== node0
)
9888 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
9890 if (!TYPE_READONLY (node0
)
9891 && !TREE_READONLY (node
))
9893 const char *reason
= "__attribute__((progmem))";
9895 if (!ADDR_SPACE_GENERIC_P (as
))
9896 reason
= avr_addrspace
[as
].name
;
9898 if (avr_log
.progmem
)
9899 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
9901 error ("variable %q+D must be const in order to be put into"
9902 " read-only section by means of %qs", node
, reason
);
9908 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9909 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9910 /* Track need of __do_clear_bss. */
9913 avr_asm_output_aligned_decl_common (FILE * stream
,
9916 unsigned HOST_WIDE_INT size
,
9917 unsigned int align
, bool local_p
)
9919 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9922 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9923 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9924 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9929 fprintf (stream
, "\t.globl\t");
9930 assemble_name (stream
, name
);
9931 fprintf (stream
, "\n");
9933 if (SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
)
9935 assemble_name (stream
, name
);
9936 fprintf (stream
, " = %ld\n",
9937 (long) INTVAL (avr_eval_addr_attrib (symbol
)));
9940 error_at (DECL_SOURCE_LOCATION (decl
),
9941 "static IO declaration for %q+D needs an address", decl
);
9945 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9946 There is no need to trigger __do_clear_bss code for them. */
9948 if (!STR_PREFIX_P (name
, "__gnu_lto"))
9949 avr_need_clear_bss_p
= true;
9952 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
9954 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
9958 avr_asm_asm_output_aligned_bss (FILE *file
, tree decl
, const char *name
,
9959 unsigned HOST_WIDE_INT size
, int align
,
9960 void (*default_func
)
9961 (FILE *, tree
, const char *,
9962 unsigned HOST_WIDE_INT
, int))
9964 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9967 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9968 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9969 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9971 if (!(SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
))
9972 error_at (DECL_SOURCE_LOCATION (decl
),
9973 "IO definition for %q+D needs an address", decl
);
9974 avr_asm_output_aligned_decl_common (file
, decl
, name
, size
, align
, false);
9977 default_func (file
, decl
, name
, size
, align
);
9981 /* Unnamed section callback for data_section
9982 to track need of __do_copy_data. */
9985 avr_output_data_section_asm_op (const void *data
)
9987 avr_need_copy_data_p
= true;
9989 /* Dispatch to default. */
9990 output_section_asm_op (data
);
9994 /* Unnamed section callback for bss_section
9995 to track need of __do_clear_bss. */
9998 avr_output_bss_section_asm_op (const void *data
)
10000 avr_need_clear_bss_p
= true;
10002 /* Dispatch to default. */
10003 output_section_asm_op (data
);
10007 /* Unnamed section callback for progmem*.data sections. */
10010 avr_output_progmem_section_asm_op (const void *data
)
10012 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
10013 (const char*) data
);
10017 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
10020 avr_asm_init_sections (void)
10022 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10023 resp. `avr_need_copy_data_p'. */
10025 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
10026 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
10027 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
10031 /* Implement `TARGET_ASM_NAMED_SECTION'. */
10032 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
10035 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
10037 if (flags
& AVR_SECTION_PROGMEM
)
10039 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
10040 const char *old_prefix
= ".rodata";
10041 const char *new_prefix
= avr_addrspace
[as
].section_name
;
10043 if (STR_PREFIX_P (name
, old_prefix
))
10045 const char *sname
= ACONCAT ((new_prefix
,
10046 name
+ strlen (old_prefix
), NULL
));
10047 default_elf_asm_named_section (sname
, flags
, decl
);
10051 default_elf_asm_named_section (new_prefix
, flags
, decl
);
10055 if (!avr_need_copy_data_p
)
10056 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
10057 || STR_PREFIX_P (name
, ".rodata")
10058 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
10060 if (!avr_need_clear_bss_p
)
10061 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
10063 default_elf_asm_named_section (name
, flags
, decl
);
10067 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
10069 static unsigned int
10070 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
10072 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
10074 if (STR_PREFIX_P (name
, ".noinit"))
10076 if (decl
&& TREE_CODE (decl
) == VAR_DECL
10077 && DECL_INITIAL (decl
) == NULL_TREE
)
10078 flags
|= SECTION_BSS
; /* @nobits */
10080 warning (0, "only uninitialized variables can be placed in the "
10081 ".noinit section");
10084 if (decl
&& DECL_P (decl
)
10085 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
10087 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
10089 /* Attribute progmem puts data in generic address space.
10090 Set section flags as if it was in __flash to get the right
10091 section prefix in the remainder. */
10093 if (ADDR_SPACE_GENERIC_P (as
))
10094 as
= ADDR_SPACE_FLASH
;
10096 flags
|= as
* SECTION_MACH_DEP
;
10097 flags
&= ~SECTION_WRITE
;
10098 flags
&= ~SECTION_BSS
;
10105 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
10108 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
10110 tree addr_attr
= NULL_TREE
;
10112 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10113 readily available, see PR34734. So we postpone the warning
10114 about uninitialized data in program memory section until here. */
10117 && decl
&& DECL_P (decl
)
10118 && NULL_TREE
== DECL_INITIAL (decl
)
10119 && !DECL_EXTERNAL (decl
)
10120 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
10122 warning (OPT_Wuninitialized
,
10123 "uninitialized variable %q+D put into "
10124 "program memory area", decl
);
10127 default_encode_section_info (decl
, rtl
, new_decl_p
);
10129 if (decl
&& DECL_P (decl
)
10130 && TREE_CODE (decl
) != FUNCTION_DECL
10132 && SYMBOL_REF_P (XEXP (rtl
, 0)))
10134 rtx sym
= XEXP (rtl
, 0);
10135 tree type
= TREE_TYPE (decl
);
10136 tree attr
= DECL_ATTRIBUTES (decl
);
10137 if (type
== error_mark_node
)
10140 addr_space_t as
= TYPE_ADDR_SPACE (type
);
10142 /* PSTR strings are in generic space but located in flash:
10143 patch address space. */
10146 && -1 == avr_progmem_p (decl
, attr
))
10147 as
= ADDR_SPACE_FLASH
;
10149 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
10151 tree io_low_attr
= lookup_attribute ("io_low", attr
);
10152 tree io_attr
= lookup_attribute ("io", attr
);
10155 && TREE_VALUE (io_low_attr
) && TREE_VALUE (TREE_VALUE (io_low_attr
)))
10156 addr_attr
= io_attr
;
10158 && TREE_VALUE (io_attr
) && TREE_VALUE (TREE_VALUE (io_attr
)))
10159 addr_attr
= io_attr
;
10161 addr_attr
= lookup_attribute ("address", attr
);
10163 || (io_attr
&& addr_attr
10164 && low_io_address_operand
10165 (GEN_INT (TREE_INT_CST_LOW
10166 (TREE_VALUE (TREE_VALUE (addr_attr
)))), QImode
)))
10167 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO_LOW
;
10168 if (io_attr
|| io_low_attr
)
10169 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO
;
10170 /* If we have an (io) address attribute specification, but the variable
10171 is external, treat the address as only a tentative definition
10172 to be used to determine if an io port is in the lower range, but
10173 don't use the exact value for constant propagation. */
10174 if (addr_attr
&& !DECL_EXTERNAL (decl
))
10175 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_ADDRESS
;
10180 && VAR_DECL
== TREE_CODE (decl
)
10182 && SYMBOL_REF_P (XEXP (rtl
, 0)))
10184 rtx sym
= XEXP (rtl
, 0);
10185 bool progmem_p
= -1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
));
10189 // Tag symbols for later addition of 0x4000 (AVR_TINY_PM_OFFSET).
10190 SYMBOL_REF_FLAGS (sym
) |= AVR_SYMBOL_FLAG_TINY_PM
;
10193 if (avr_decl_absdata_p (decl
, DECL_ATTRIBUTES (decl
))
10198 // If addr_attr is non-null, it has an argument. Peek into it.
10199 && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr
))) < 0xc0))
10201 // May be accessed by LDS / STS.
10202 SYMBOL_REF_FLAGS (sym
) |= AVR_SYMBOL_FLAG_TINY_ABSDATA
;
10206 && avr_decl_absdata_p (decl
, DECL_ATTRIBUTES (decl
)))
10208 error ("%q+D has incompatible attributes %qs and %qs",
10209 decl
, "progmem", "absdata");
10215 /* Implement `TARGET_ASM_SELECT_SECTION' */
10218 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
10220 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
10222 if (decl
&& DECL_P (decl
)
10223 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
10225 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
10227 /* __progmem__ goes in generic space but shall be allocated to
10230 if (ADDR_SPACE_GENERIC_P (as
))
10231 as
= ADDR_SPACE_FLASH
;
10233 if (sect
->common
.flags
& SECTION_NAMED
)
10235 const char * name
= sect
->named
.name
;
10236 const char * old_prefix
= ".rodata";
10237 const char * new_prefix
= avr_addrspace
[as
].section_name
;
10239 if (STR_PREFIX_P (name
, old_prefix
))
10241 const char *sname
= ACONCAT ((new_prefix
,
10242 name
+ strlen (old_prefix
), NULL
));
10243 return get_section (sname
,
10244 sect
->common
.flags
& ~SECTION_DECLARED
,
10249 if (!progmem_section
[as
])
10251 progmem_section
[as
]
10252 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
10253 avr_addrspace
[as
].section_name
);
10256 return progmem_section
[as
];
10262 /* Implement `TARGET_ASM_FILE_START'. */
10263 /* Outputs some text at the start of each assembler file. */
10266 avr_file_start (void)
10268 int sfr_offset
= avr_arch
->sfr_offset
;
10270 if (avr_arch
->asm_only
)
10271 error ("architecture %qs supported for assembler only", avr_mmcu
);
10273 default_file_start ();
10275 /* Print I/O addresses of some SFRs used with IN and OUT. */
10278 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
10280 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
10281 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
10282 if (AVR_HAVE_RAMPZ
)
10283 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
10284 if (AVR_HAVE_RAMPY
)
10285 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
10286 if (AVR_HAVE_RAMPX
)
10287 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
10288 if (AVR_HAVE_RAMPD
)
10289 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
10290 if (AVR_XMEGA
|| AVR_TINY
)
10291 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
10292 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", AVR_TMP_REGNO
);
10293 fprintf (asm_out_file
, "__zero_reg__ = %d\n", AVR_ZERO_REGNO
);
10297 /* Implement `TARGET_ASM_FILE_END'. */
10298 /* Outputs to the stdio stream FILE some
10299 appropriate text to go at the end of an assembler file. */
10302 avr_file_end (void)
10304 /* Output these only if there is anything in the
10305 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
10306 input section(s) - some code size can be saved by not
10307 linking in the initialization code from libgcc if resp.
10308 sections are empty, see PR18145. */
10310 if (avr_need_copy_data_p
)
10311 fputs (".global __do_copy_data\n", asm_out_file
);
10313 if (avr_need_clear_bss_p
)
10314 fputs (".global __do_clear_bss\n", asm_out_file
);
10318 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
10319 /* Choose the order in which to allocate hard registers for
10320 pseudo-registers local to a basic block.
10322 Store the desired register order in the array `reg_alloc_order'.
10323 Element 0 should be the register to allocate first; element 1, the
10324 next register; and so on. */
10327 avr_adjust_reg_alloc_order (void)
10330 static const int order_0
[] =
10333 18, 19, 20, 21, 22, 23,
10336 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10340 static const int tiny_order_0
[] = {
10350 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10352 static const int order_1
[] =
10354 18, 19, 20, 21, 22, 23, 24, 25,
10357 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10361 static const int tiny_order_1
[] = {
10370 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10372 static const int order_2
[] =
10374 25, 24, 23, 22, 21, 20, 19, 18,
10377 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10382 /* Select specific register allocation order.
10383 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
10384 so different allocation order should be used. */
10386 const int *order
= (TARGET_ORDER_1
? (AVR_TINY
? tiny_order_1
: order_1
)
10387 : TARGET_ORDER_2
? (AVR_TINY
? tiny_order_0
: order_2
)
10388 : (AVR_TINY
? tiny_order_0
: order_0
));
10390 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
10391 reg_alloc_order
[i
] = order
[i
];
10395 /* Implement `TARGET_REGISTER_MOVE_COST' */
10398 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
10399 reg_class_t from
, reg_class_t to
)
10401 return (from
== STACK_REG
? 6
10402 : to
== STACK_REG
? 12
10407 /* Implement `TARGET_MEMORY_MOVE_COST' */
10410 avr_memory_move_cost (machine_mode mode
,
10411 reg_class_t rclass ATTRIBUTE_UNUSED
,
10412 bool in ATTRIBUTE_UNUSED
)
10414 return (mode
== QImode
? 2
10415 : mode
== HImode
? 4
10416 : mode
== SImode
? 8
10417 : mode
== SFmode
? 8
10422 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
10423 cost of an RTX operand given its context. X is the rtx of the
10424 operand, MODE is its mode, and OUTER is the rtx_code of this
10425 operand's parent operator. */
10428 avr_operand_rtx_cost (rtx x
, machine_mode mode
, enum rtx_code outer
,
10429 int opno
, bool speed
)
10431 enum rtx_code code
= GET_CODE (x
);
10443 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10450 avr_rtx_costs (x
, mode
, outer
, opno
, &total
, speed
);
10454 /* Worker function for AVR backend's rtx_cost function.
10455 X is rtx expression whose cost is to be calculated.
10456 Return true if the complete cost has been computed.
10457 Return false if subexpressions should be scanned.
10458 In either case, *TOTAL contains the cost result. */
10461 avr_rtx_costs_1 (rtx x
, machine_mode mode
, int outer_code ATTRIBUTE_UNUSED
,
10462 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
10464 enum rtx_code code
= GET_CODE (x
);
10475 /* Immediate constants are as cheap as registers. */
10480 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10488 *total
= COSTS_N_INSNS (1);
10494 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
10500 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10508 *total
= COSTS_N_INSNS (1);
10514 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10518 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10519 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10523 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
10524 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
10525 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
10530 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
10531 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
10532 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
10541 && MULT
== GET_CODE (XEXP (x
, 0))
10542 && register_operand (XEXP (x
, 1), QImode
))
10545 *total
= COSTS_N_INSNS (speed
? 4 : 3);
10546 /* multiply-add with constant: will be split and load constant. */
10547 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10548 *total
= COSTS_N_INSNS (1) + *total
;
10551 *total
= COSTS_N_INSNS (1);
10552 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10553 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10558 && (MULT
== GET_CODE (XEXP (x
, 0))
10559 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
10560 && register_operand (XEXP (x
, 1), HImode
)
10561 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
10562 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
10565 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10566 /* multiply-add with constant: will be split and load constant. */
10567 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10568 *total
= COSTS_N_INSNS (1) + *total
;
10571 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10573 *total
= COSTS_N_INSNS (2);
10574 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10577 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10578 *total
= COSTS_N_INSNS (1);
10580 *total
= COSTS_N_INSNS (2);
10584 if (!CONST_INT_P (XEXP (x
, 1)))
10586 *total
= COSTS_N_INSNS (3);
10587 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10590 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10591 *total
= COSTS_N_INSNS (2);
10593 *total
= COSTS_N_INSNS (3);
10597 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10599 *total
= COSTS_N_INSNS (4);
10600 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10603 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10604 *total
= COSTS_N_INSNS (1);
10606 *total
= COSTS_N_INSNS (4);
10612 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10618 && register_operand (XEXP (x
, 0), QImode
)
10619 && MULT
== GET_CODE (XEXP (x
, 1)))
10622 *total
= COSTS_N_INSNS (speed
? 4 : 3);
10623 /* multiply-sub with constant: will be split and load constant. */
10624 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10625 *total
= COSTS_N_INSNS (1) + *total
;
10630 && register_operand (XEXP (x
, 0), HImode
)
10631 && (MULT
== GET_CODE (XEXP (x
, 1))
10632 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
10633 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
10634 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
10637 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10638 /* multiply-sub with constant: will be split and load constant. */
10639 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10640 *total
= COSTS_N_INSNS (1) + *total
;
10646 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10647 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10648 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10649 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10653 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10654 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10655 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10663 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
10665 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10673 rtx op0
= XEXP (x
, 0);
10674 rtx op1
= XEXP (x
, 1);
10675 enum rtx_code code0
= GET_CODE (op0
);
10676 enum rtx_code code1
= GET_CODE (op1
);
10677 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
10678 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
10681 && (u8_operand (op1
, HImode
)
10682 || s8_operand (op1
, HImode
)))
10684 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10688 && register_operand (op1
, HImode
))
10690 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10693 else if (ex0
|| ex1
)
10695 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
10698 else if (register_operand (op0
, HImode
)
10699 && (u8_operand (op1
, HImode
)
10700 || s8_operand (op1
, HImode
)))
10702 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
10706 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
10709 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10716 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10727 /* Add some additional costs besides CALL like moves etc. */
10729 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10733 /* Just a rough estimate. Even with -O2 we don't want bulky
10734 code expanded inline. */
10736 *total
= COSTS_N_INSNS (25);
10742 *total
= COSTS_N_INSNS (300);
10744 /* Add some additional costs besides CALL like moves etc. */
10745 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10748 if (mode
== DImode
)
10756 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10757 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10765 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10767 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
10768 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10769 /* For div/mod with const-int divisor we have at least the cost of
10770 loading the divisor. */
10771 if (CONST_INT_P (XEXP (x
, 1)))
10772 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10773 /* Add some overall penaly for clobbering and moving around registers */
10774 *total
+= COSTS_N_INSNS (2);
10781 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
10782 *total
= COSTS_N_INSNS (1);
10787 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
10788 *total
= COSTS_N_INSNS (3);
10793 if (CONST_INT_P (XEXP (x
, 1)))
10794 switch (INTVAL (XEXP (x
, 1)))
10798 *total
= COSTS_N_INSNS (5);
10801 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
10809 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10816 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10818 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10819 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10824 val
= INTVAL (XEXP (x
, 1));
10826 *total
= COSTS_N_INSNS (3);
10827 else if (val
>= 0 && val
<= 7)
10828 *total
= COSTS_N_INSNS (val
);
10830 *total
= COSTS_N_INSNS (1);
10837 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
10838 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
10839 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
10841 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10846 if (const1_rtx
== (XEXP (x
, 1))
10847 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
10849 *total
= COSTS_N_INSNS (2);
10853 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10855 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10856 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10860 switch (INTVAL (XEXP (x
, 1)))
10867 *total
= COSTS_N_INSNS (2);
10870 *total
= COSTS_N_INSNS (3);
10876 *total
= COSTS_N_INSNS (4);
10881 *total
= COSTS_N_INSNS (5);
10884 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10887 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
10890 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
10893 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10894 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10900 if (!CONST_INT_P (XEXP (x
, 1)))
10902 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10905 switch (INTVAL (XEXP (x
, 1)))
10913 *total
= COSTS_N_INSNS (3);
10916 *total
= COSTS_N_INSNS (5);
10919 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10925 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10927 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10928 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10932 switch (INTVAL (XEXP (x
, 1)))
10938 *total
= COSTS_N_INSNS (3);
10943 *total
= COSTS_N_INSNS (4);
10946 *total
= COSTS_N_INSNS (6);
10949 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10952 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10953 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10961 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10968 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10970 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10971 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10976 val
= INTVAL (XEXP (x
, 1));
10978 *total
= COSTS_N_INSNS (4);
10980 *total
= COSTS_N_INSNS (2);
10981 else if (val
>= 0 && val
<= 7)
10982 *total
= COSTS_N_INSNS (val
);
10984 *total
= COSTS_N_INSNS (1);
10989 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10991 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10992 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10996 switch (INTVAL (XEXP (x
, 1)))
11002 *total
= COSTS_N_INSNS (2);
11005 *total
= COSTS_N_INSNS (3);
11011 *total
= COSTS_N_INSNS (4);
11015 *total
= COSTS_N_INSNS (5);
11018 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
11021 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
11025 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
11028 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11029 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11035 if (!CONST_INT_P (XEXP (x
, 1)))
11037 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
11040 switch (INTVAL (XEXP (x
, 1)))
11046 *total
= COSTS_N_INSNS (3);
11050 *total
= COSTS_N_INSNS (5);
11053 *total
= COSTS_N_INSNS (4);
11056 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
11062 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11064 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11065 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11069 switch (INTVAL (XEXP (x
, 1)))
11075 *total
= COSTS_N_INSNS (4);
11080 *total
= COSTS_N_INSNS (6);
11083 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
11086 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
11089 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11090 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11098 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11105 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11107 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
11108 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11113 val
= INTVAL (XEXP (x
, 1));
11115 *total
= COSTS_N_INSNS (3);
11116 else if (val
>= 0 && val
<= 7)
11117 *total
= COSTS_N_INSNS (val
);
11119 *total
= COSTS_N_INSNS (1);
11124 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11126 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11127 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11131 switch (INTVAL (XEXP (x
, 1)))
11138 *total
= COSTS_N_INSNS (2);
11141 *total
= COSTS_N_INSNS (3);
11146 *total
= COSTS_N_INSNS (4);
11150 *total
= COSTS_N_INSNS (5);
11156 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
11159 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
11163 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
11166 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
11167 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11173 if (!CONST_INT_P (XEXP (x
, 1)))
11175 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
11178 switch (INTVAL (XEXP (x
, 1)))
11186 *total
= COSTS_N_INSNS (3);
11189 *total
= COSTS_N_INSNS (5);
11192 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
11198 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11200 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11201 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11205 switch (INTVAL (XEXP (x
, 1)))
11211 *total
= COSTS_N_INSNS (4);
11214 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
11219 *total
= COSTS_N_INSNS (4);
11222 *total
= COSTS_N_INSNS (6);
11225 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
11226 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
11234 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
11238 switch (GET_MODE (XEXP (x
, 0)))
11241 *total
= COSTS_N_INSNS (1);
11242 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11243 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), QImode
, code
,
11248 *total
= COSTS_N_INSNS (2);
11249 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11250 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), HImode
, code
,
11252 else if (INTVAL (XEXP (x
, 1)) != 0)
11253 *total
+= COSTS_N_INSNS (1);
11257 *total
= COSTS_N_INSNS (3);
11258 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
11259 *total
+= COSTS_N_INSNS (2);
11263 *total
= COSTS_N_INSNS (4);
11264 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
11265 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), SImode
, code
,
11267 else if (INTVAL (XEXP (x
, 1)) != 0)
11268 *total
+= COSTS_N_INSNS (3);
11274 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
11280 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
11281 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
11282 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
11284 if (QImode
== mode
|| HImode
== mode
)
11286 *total
= COSTS_N_INSNS (2);
11299 /* Implement `TARGET_RTX_COSTS'. */
11302 avr_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
11303 int opno
, int *total
, bool speed
)
11305 bool done
= avr_rtx_costs_1 (x
, mode
, outer_code
,
11306 opno
, total
, speed
);
11308 if (avr_log
.rtx_costs
)
11310 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
11311 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
11318 /* Implement `TARGET_ADDRESS_COST'. */
11321 avr_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
11322 addr_space_t as ATTRIBUTE_UNUSED
,
11323 bool speed ATTRIBUTE_UNUSED
)
11327 if (GET_CODE (x
) == PLUS
11328 && CONST_INT_P (XEXP (x
, 1))
11329 && (REG_P (XEXP (x
, 0))
11330 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
11332 if (INTVAL (XEXP (x
, 1)) > MAX_LD_OFFSET(mode
))
11335 else if (CONSTANT_ADDRESS_P (x
))
11338 && io_address_operand (x
, QImode
))
11342 && avr_address_tiny_absdata_p (x
, QImode
))
11346 if (avr_log
.address_cost
)
11347 avr_edump ("\n%?: %d = %r\n", cost
, x
);
11352 /* Test for extra memory constraint 'Q'.
11353 It's a memory address based on Y or Z pointer with valid displacement. */
11356 extra_constraint_Q (rtx x
)
11360 if (GET_CODE (XEXP (x
,0)) == PLUS
11361 && REG_P (XEXP (XEXP (x
,0), 0))
11362 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
11363 && (INTVAL (XEXP (XEXP (x
,0), 1))
11364 <= MAX_LD_OFFSET (GET_MODE (x
))))
11366 rtx xx
= XEXP (XEXP (x
,0), 0);
11367 int regno
= REGNO (xx
);
11369 ok
= (/* allocate pseudos */
11370 regno
>= FIRST_PSEUDO_REGISTER
11371 /* strictly check */
11372 || regno
== REG_Z
|| regno
== REG_Y
11373 /* XXX frame & arg pointer checks */
11374 || xx
== frame_pointer_rtx
11375 || xx
== arg_pointer_rtx
);
11377 if (avr_log
.constraints
)
11378 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
11379 ok
, reload_completed
, reload_in_progress
, x
);
11385 /* Convert condition code CONDITION to the valid AVR condition code. */
11388 avr_normalize_condition (RTX_CODE condition
)
11401 gcc_unreachable ();
11405 /* Helper function for `avr_reorg'. */
11408 avr_compare_pattern (rtx_insn
*insn
)
11410 rtx pattern
= single_set (insn
);
11413 && NONJUMP_INSN_P (insn
)
11414 && SET_DEST (pattern
) == cc0_rtx
11415 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
11417 machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
11418 machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
11420 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
11421 They must not be swapped, thus skip them. */
11423 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
11424 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
11431 /* Helper function for `avr_reorg'. */
11433 /* Expansion of switch/case decision trees leads to code like
11435 cc0 = compare (Reg, Num)
11439 cc0 = compare (Reg, Num)
11443 The second comparison is superfluous and can be deleted.
11444 The second jump condition can be transformed from a
11445 "difficult" one to a "simple" one because "cc0 > 0" and
11446 "cc0 >= 0" will have the same effect here.
11448 This function relies on the way switch/case is being expaned
11449 as binary decision tree. For example code see PR 49903.
11451 Return TRUE if optimization performed.
11452 Return FALSE if nothing changed.
11454 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11456 We don't want to do this in text peephole because it is
11457 tedious to work out jump offsets there and the second comparison
11458 might have been transormed by `avr_reorg'.
11460 RTL peephole won't do because peephole2 does not scan across
11464 avr_reorg_remove_redundant_compare (rtx_insn
*insn1
)
11466 rtx comp1
, ifelse1
, xcond1
;
11468 rtx comp2
, ifelse2
, xcond2
;
11469 rtx_insn
*branch2
, *insn2
;
11470 enum rtx_code code
;
11474 /* Look out for: compare1 - branch1 - compare2 - branch2 */
11476 branch1
= next_nonnote_nondebug_insn (insn1
);
11477 if (!branch1
|| !JUMP_P (branch1
))
11480 insn2
= next_nonnote_nondebug_insn (branch1
);
11481 if (!insn2
|| !avr_compare_pattern (insn2
))
11484 branch2
= next_nonnote_nondebug_insn (insn2
);
11485 if (!branch2
|| !JUMP_P (branch2
))
11488 comp1
= avr_compare_pattern (insn1
);
11489 comp2
= avr_compare_pattern (insn2
);
11490 xcond1
= single_set (branch1
);
11491 xcond2
= single_set (branch2
);
11493 if (!comp1
|| !comp2
11494 || !rtx_equal_p (comp1
, comp2
)
11495 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
11496 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
11497 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
11498 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
11503 comp1
= SET_SRC (comp1
);
11504 ifelse1
= SET_SRC (xcond1
);
11505 ifelse2
= SET_SRC (xcond2
);
11507 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
11509 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
11510 || !REG_P (XEXP (comp1
, 0))
11511 || !CONST_INT_P (XEXP (comp1
, 1))
11512 || XEXP (ifelse1
, 2) != pc_rtx
11513 || XEXP (ifelse2
, 2) != pc_rtx
11514 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
11515 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
11516 || !COMPARISON_P (XEXP (ifelse2
, 0))
11517 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
11518 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
11519 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
11520 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
11525 /* We filtered the insn sequence to look like
11531 (if_then_else (eq (cc0)
11540 (if_then_else (CODE (cc0)
11546 code
= GET_CODE (XEXP (ifelse2
, 0));
11548 /* Map GT/GTU to GE/GEU which is easier for AVR.
11549 The first two instructions compare/branch on EQ
11550 so we may replace the difficult
11552 if (x == VAL) goto L1;
11553 if (x > VAL) goto L2;
11557 if (x == VAL) goto L1;
11558 if (x >= VAL) goto L2;
11560 Similarly, replace LE/LEU by LT/LTU. */
11571 code
= avr_normalize_condition (code
);
11578 /* Wrap the branches into UNSPECs so they won't be changed or
11579 optimized in the remainder. */
11581 target
= XEXP (XEXP (ifelse1
, 1), 0);
11582 cond
= XEXP (ifelse1
, 0);
11583 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
11585 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
11587 target
= XEXP (XEXP (ifelse2
, 1), 0);
11588 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
11589 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
11591 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
11593 /* The comparisons in insn1 and insn2 are exactly the same;
11594 insn2 is superfluous so delete it. */
11596 delete_insn (insn2
);
11597 delete_insn (branch1
);
11598 delete_insn (branch2
);
11604 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11605 /* Optimize conditional jumps. */
11610 rtx_insn
*insn
= get_insns();
11612 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
11614 rtx pattern
= avr_compare_pattern (insn
);
11620 && avr_reorg_remove_redundant_compare (insn
))
11625 if (compare_diff_p (insn
))
11627 /* Now we work under compare insn with difficult branch. */
11629 rtx_insn
*next
= next_real_insn (insn
);
11630 rtx pat
= PATTERN (next
);
11632 pattern
= SET_SRC (pattern
);
11634 if (true_regnum (XEXP (pattern
, 0)) >= 0
11635 && true_regnum (XEXP (pattern
, 1)) >= 0)
11637 rtx x
= XEXP (pattern
, 0);
11638 rtx src
= SET_SRC (pat
);
11639 rtx t
= XEXP (src
,0);
11640 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11641 XEXP (pattern
, 0) = XEXP (pattern
, 1);
11642 XEXP (pattern
, 1) = x
;
11643 INSN_CODE (next
) = -1;
11645 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11646 && XEXP (pattern
, 1) == const0_rtx
)
11648 /* This is a tst insn, we can reverse it. */
11649 rtx src
= SET_SRC (pat
);
11650 rtx t
= XEXP (src
,0);
11652 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11653 XEXP (pattern
, 1) = XEXP (pattern
, 0);
11654 XEXP (pattern
, 0) = const0_rtx
;
11655 INSN_CODE (next
) = -1;
11656 INSN_CODE (insn
) = -1;
11658 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11659 && CONST_INT_P (XEXP (pattern
, 1)))
11661 rtx x
= XEXP (pattern
, 1);
11662 rtx src
= SET_SRC (pat
);
11663 rtx t
= XEXP (src
,0);
11664 machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
11666 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
11668 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
11669 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
11670 INSN_CODE (next
) = -1;
11671 INSN_CODE (insn
) = -1;
11678 /* Returns register number for function return value.*/
11680 static inline unsigned int
11681 avr_ret_register (void)
11687 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
11690 avr_function_value_regno_p (const unsigned int regno
)
11692 return (regno
== avr_ret_register ());
11696 /* Implement `TARGET_LIBCALL_VALUE'. */
11697 /* Create an RTX representing the place where a
11698 library function returns a value of mode MODE. */
11701 avr_libcall_value (machine_mode mode
,
11702 const_rtx func ATTRIBUTE_UNUSED
)
11704 int offs
= GET_MODE_SIZE (mode
);
11707 offs
= (offs
+ 1) & ~1;
11709 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
11713 /* Implement `TARGET_FUNCTION_VALUE'. */
11714 /* Create an RTX representing the place where a
11715 function returns a value of data type VALTYPE. */
11718 avr_function_value (const_tree type
,
11719 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
11720 bool outgoing ATTRIBUTE_UNUSED
)
11724 if (TYPE_MODE (type
) != BLKmode
)
11725 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
11727 offs
= int_size_in_bytes (type
);
11730 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
11731 offs
= GET_MODE_SIZE (SImode
);
11732 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
11733 offs
= GET_MODE_SIZE (DImode
);
11735 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
11739 test_hard_reg_class (enum reg_class rclass
, rtx x
)
11741 int regno
= true_regnum (x
);
11745 if (TEST_HARD_REG_CLASS (rclass
, regno
))
11752 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11753 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11756 avr_2word_insn_p (rtx_insn
*insn
)
11758 if (TARGET_SKIP_BUG
11760 || 2 != get_attr_length (insn
))
11765 switch (INSN_CODE (insn
))
11770 case CODE_FOR_movqi_insn
:
11771 case CODE_FOR_movuqq_insn
:
11772 case CODE_FOR_movqq_insn
:
11774 rtx set
= single_set (insn
);
11775 rtx src
= SET_SRC (set
);
11776 rtx dest
= SET_DEST (set
);
11778 /* Factor out LDS and STS from movqi_insn. */
11781 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
11783 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
11785 else if (REG_P (dest
)
11788 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
11794 case CODE_FOR_call_insn
:
11795 case CODE_FOR_call_value_insn
:
11802 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
11804 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
11807 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
11808 int dest_addr
= INSN_ADDRESSES (uid
);
11809 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
11811 return (jump_offset
== 1
11812 || (jump_offset
== 2
11813 && avr_2word_insn_p (next_active_insn (insn
))));
11817 /* Worker function for `HARD_REGNO_MODE_OK'. */
11818 /* Returns 1 if a value of mode MODE can be stored starting with hard
11819 register number REGNO. On the enhanced core, anything larger than
11820 1 byte must start in even numbered register for "movw" to work
11821 (this way we don't have to check for odd registers everywhere). */
11824 avr_hard_regno_mode_ok (int regno
, machine_mode mode
)
11826 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11827 Disallowing QI et al. in these regs might lead to code like
11828 (set (subreg:QI (reg:HI 28) n) ...)
11829 which will result in wrong code because reload does not
11830 handle SUBREGs of hard regsisters like this.
11831 This could be fixed in reload. However, it appears
11832 that fixing reload is not wanted by reload people. */
11834 /* Any GENERAL_REGS register can hold 8-bit values. */
11836 if (GET_MODE_SIZE (mode
) == 1)
11839 /* FIXME: Ideally, the following test is not needed.
11840 However, it turned out that it can reduce the number
11841 of spill fails. AVR and it's poor endowment with
11842 address registers is extreme stress test for reload. */
11844 if (GET_MODE_SIZE (mode
) >= 4
11848 /* All modes larger than 8 bits should start in an even register. */
11850 return !(regno
& 1);
11854 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11857 avr_hard_regno_call_part_clobbered (unsigned regno
, machine_mode mode
)
11859 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11860 represent valid hard registers like, e.g. HI:29. Returning TRUE
11861 for such registers can lead to performance degradation as mentioned
11862 in PR53595. Thus, report invalid hard registers as FALSE. */
11864 if (!avr_hard_regno_mode_ok (regno
, mode
))
11867 /* Return true if any of the following boundaries is crossed:
11868 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
11870 return ((regno
<= LAST_CALLEE_SAVED_REG
&&
11871 regno
+ GET_MODE_SIZE (mode
) > (LAST_CALLEE_SAVED_REG
+ 1))
11872 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
11873 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
11877 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
11880 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED
,
11881 addr_space_t as
, RTX_CODE outer_code
,
11882 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11884 if (!ADDR_SPACE_GENERIC_P (as
))
11886 return POINTER_Z_REGS
;
11890 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
11892 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
11896 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11899 avr_regno_mode_code_ok_for_base_p (int regno
,
11900 machine_mode mode ATTRIBUTE_UNUSED
,
11901 addr_space_t as ATTRIBUTE_UNUSED
,
11902 RTX_CODE outer_code
,
11903 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11907 if (!ADDR_SPACE_GENERIC_P (as
))
11909 if (regno
< FIRST_PSEUDO_REGISTER
11917 regno
= reg_renumber
[regno
];
11919 if (regno
== REG_Z
)
11928 if (regno
< FIRST_PSEUDO_REGISTER
11932 || regno
== ARG_POINTER_REGNUM
))
11936 else if (reg_renumber
)
11938 regno
= reg_renumber
[regno
];
11943 || regno
== ARG_POINTER_REGNUM
)
11950 && PLUS
== outer_code
11960 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
11961 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11962 CLOBBER_REG is a QI clobber register or NULL_RTX.
11963 LEN == NULL: output instructions.
11964 LEN != NULL: set *LEN to the length of the instruction sequence
11965 (in words) printed with LEN = NULL.
11966 If CLEAR_P is true, OP[0] had been cleard to Zero already.
11967 If CLEAR_P is false, nothing is known about OP[0].
11969 The effect on cc0 is as follows:
11971 Load 0 to any register except ZERO_REG : NONE
11972 Load ld register with any value : NONE
11973 Anything else: : CLOBBER */
11976 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
11980 rtx xval
, xdest
[4];
11982 int clobber_val
= 1234;
11983 bool cooked_clobber_p
= false;
11984 bool set_p
= false;
11985 machine_mode mode
= GET_MODE (dest
);
11986 int n
, n_bytes
= GET_MODE_SIZE (mode
);
11988 gcc_assert (REG_P (dest
)
11989 && CONSTANT_P (src
));
11994 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11995 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
11997 if (REGNO (dest
) < 16
11998 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
12000 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
12003 /* We might need a clobber reg but don't have one. Look at the value to
12004 be loaded more closely. A clobber is only needed if it is a symbol
12005 or contains a byte that is neither 0, -1 or a power of 2. */
12007 if (NULL_RTX
== clobber_reg
12008 && !test_hard_reg_class (LD_REGS
, dest
)
12009 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
12010 || !avr_popcount_each_byte (src
, n_bytes
,
12011 (1 << 0) | (1 << 1) | (1 << 8))))
12013 /* We have no clobber register but need one. Cook one up.
12014 That's cheaper than loading from constant pool. */
12016 cooked_clobber_p
= true;
12017 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
12018 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
12021 /* Now start filling DEST from LSB to MSB. */
12023 for (n
= 0; n
< n_bytes
; n
++)
12026 bool done_byte
= false;
12030 /* Crop the n-th destination byte. */
12032 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
12033 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
12035 if (!CONST_INT_P (src
)
12036 && !CONST_FIXED_P (src
)
12037 && !CONST_DOUBLE_P (src
))
12039 static const char* const asm_code
[][2] =
12041 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
12042 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
12043 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
12044 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
12049 xop
[2] = clobber_reg
;
12051 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
12056 /* Crop the n-th source byte. */
12058 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
12059 ival
[n
] = INTVAL (xval
);
12061 /* Look if we can reuse the low word by means of MOVW. */
12067 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
12068 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
12070 if (INTVAL (lo16
) == INTVAL (hi16
))
12072 if (0 != INTVAL (lo16
)
12075 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
12082 /* Don't use CLR so that cc0 is set as expected. */
12087 avr_asm_len (ldreg_p
? "ldi %0,0"
12088 : AVR_ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
12089 : "mov %0,__zero_reg__",
12090 &xdest
[n
], len
, 1);
12094 if (clobber_val
== ival
[n
]
12095 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
12100 /* LD_REGS can use LDI to move a constant value */
12106 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
12110 /* Try to reuse value already loaded in some lower byte. */
12112 for (j
= 0; j
< n
; j
++)
12113 if (ival
[j
] == ival
[n
])
12118 avr_asm_len ("mov %0,%1", xop
, len
, 1);
12126 /* Need no clobber reg for -1: Use CLR/DEC */
12131 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
12133 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
12136 else if (1 == ival
[n
])
12139 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
12141 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
12145 /* Use T flag or INC to manage powers of 2 if we have
12148 if (NULL_RTX
== clobber_reg
12149 && single_one_operand (xval
, QImode
))
12152 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
12154 gcc_assert (constm1_rtx
!= xop
[1]);
12159 avr_asm_len ("set", xop
, len
, 1);
12163 avr_asm_len ("clr %0", xop
, len
, 1);
12165 avr_asm_len ("bld %0,%1", xop
, len
, 1);
12169 /* We actually need the LD_REGS clobber reg. */
12171 gcc_assert (NULL_RTX
!= clobber_reg
);
12175 xop
[2] = clobber_reg
;
12176 clobber_val
= ival
[n
];
12178 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12179 "mov %0,%2", xop
, len
, 2);
12182 /* If we cooked up a clobber reg above, restore it. */
12184 if (cooked_clobber_p
)
12186 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
12191 /* Reload the constant OP[1] into the HI register OP[0].
12192 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12193 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12194 need a clobber reg or have to cook one up.
12196 PLEN == NULL: Output instructions.
12197 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
12198 by the insns printed.
12203 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
12205 output_reload_in_const (op
, clobber_reg
, plen
, false);
12210 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
12211 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12212 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
12213 need a clobber reg or have to cook one up.
12215 LEN == NULL: Output instructions.
12217 LEN != NULL: Output nothing. Set *LEN to number of words occupied
12218 by the insns printed.
12223 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
12226 && !test_hard_reg_class (LD_REGS
, op
[0])
12227 && (CONST_INT_P (op
[1])
12228 || CONST_FIXED_P (op
[1])
12229 || CONST_DOUBLE_P (op
[1])))
12231 int len_clr
, len_noclr
;
12233 /* In some cases it is better to clear the destination beforehand, e.g.
12235 CLR R2 CLR R3 MOVW R4,R2 INC R2
12239 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
12241 We find it too tedious to work that out in the print function.
12242 Instead, we call the print function twice to get the lengths of
12243 both methods and use the shortest one. */
12245 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
12246 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
12248 if (len_noclr
- len_clr
== 4)
12250 /* Default needs 4 CLR instructions: clear register beforehand. */
12252 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
12253 "mov %B0,__zero_reg__" CR_TAB
12254 "movw %C0,%A0", &op
[0], len
, 3);
12256 output_reload_in_const (op
, clobber_reg
, len
, true);
12265 /* Default: destination not pre-cleared. */
12267 output_reload_in_const (op
, clobber_reg
, len
, false);
12272 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
12274 output_reload_in_const (op
, clobber_reg
, len
, false);
12279 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
12282 avr_output_addr_vec_elt (FILE *stream
, int value
)
12284 if (AVR_HAVE_JMP_CALL
)
12285 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
12287 fprintf (stream
, "\trjmp .L%d\n", value
);
12291 avr_conditional_register_usage(void)
12297 const int tiny_reg_alloc_order
[] = {
12306 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
12309 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
12310 - R0-R15 are not available in Tiny Core devices
12311 - R16 and R17 are fixed registers. */
12313 for (i
= 0; i
<= 17; i
++)
12316 call_used_regs
[i
] = 1;
12319 /* Set R18 to R21 as callee saved registers
12320 - R18, R19, R20 and R21 are the callee saved registers in
12321 Tiny Core devices */
12323 for (i
= 18; i
<= LAST_CALLEE_SAVED_REG
; i
++)
12325 call_used_regs
[i
] = 0;
12328 /* Update register allocation order for Tiny Core devices */
12330 for (i
= 0; i
< ARRAY_SIZE (tiny_reg_alloc_order
); i
++)
12332 reg_alloc_order
[i
] = tiny_reg_alloc_order
[i
];
12335 CLEAR_HARD_REG_SET (reg_class_contents
[(int) ADDW_REGS
]);
12336 CLEAR_HARD_REG_SET (reg_class_contents
[(int) NO_LD_REGS
]);
12340 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
12341 /* Returns true if SCRATCH are safe to be allocated as a scratch
12342 registers (for a define_peephole2) in the current function. */
12345 avr_hard_regno_scratch_ok (unsigned int regno
)
12347 /* Interrupt functions can only use registers that have already been saved
12348 by the prologue, even if they would normally be call-clobbered. */
12350 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
12351 && !df_regs_ever_live_p (regno
))
12354 /* Don't allow hard registers that might be part of the frame pointer.
12355 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12356 and don't care for a frame pointer that spans more than one register. */
12358 if ((!reload_completed
|| frame_pointer_needed
)
12359 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
12368 /* Worker function for `HARD_REGNO_RENAME_OK'. */
12369 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
12372 avr_hard_regno_rename_ok (unsigned int old_reg
,
12373 unsigned int new_reg
)
12375 /* Interrupt functions can only use registers that have already been
12376 saved by the prologue, even if they would normally be
12379 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
12380 && !df_regs_ever_live_p (new_reg
))
12383 /* Don't allow hard registers that might be part of the frame pointer.
12384 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12385 and don't care for a frame pointer that spans more than one register. */
12387 if ((!reload_completed
|| frame_pointer_needed
)
12388 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
12389 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
12397 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
12398 or memory location in the I/O space (QImode only).
12400 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
12401 Operand 1: register operand to test, or CONST_INT memory address.
12402 Operand 2: bit number.
12403 Operand 3: label to jump to if the test is true. */
12406 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
12408 enum rtx_code comp
= GET_CODE (operands
[0]);
12409 bool long_jump
= get_attr_length (insn
) >= 4;
12410 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
12414 else if (comp
== LT
)
12418 comp
= reverse_condition (comp
);
12420 switch (GET_CODE (operands
[1]))
12429 if (low_io_address_operand (operands
[1], QImode
))
12432 output_asm_insn ("sbis %i1,%2", operands
);
12434 output_asm_insn ("sbic %i1,%2", operands
);
12438 gcc_assert (io_address_operand (operands
[1], QImode
));
12439 output_asm_insn ("in __tmp_reg__,%i1", operands
);
12441 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
12443 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
12446 break; /* CONST_INT */
12451 output_asm_insn ("sbrs %T1%T2", operands
);
12453 output_asm_insn ("sbrc %T1%T2", operands
);
12459 return ("rjmp .+4" CR_TAB
12468 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
12471 avr_asm_out_ctor (rtx symbol
, int priority
)
12473 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
12474 default_ctor_section_asm_out_constructor (symbol
, priority
);
12478 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
12481 avr_asm_out_dtor (rtx symbol
, int priority
)
12483 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
12484 default_dtor_section_asm_out_destructor (symbol
, priority
);
12488 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
12491 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
12493 HOST_WIDE_INT size
= int_size_in_bytes (type
);
12494 HOST_WIDE_INT ret_size_limit
= AVR_TINY
? 4 : 8;
12496 /* In avr, there are 8 return registers. But, for Tiny Core
12497 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12498 Return true if size is unknown or greater than the limit. */
12500 if (size
== -1 || size
> ret_size_limit
)
12511 /* Implement `CASE_VALUES_THRESHOLD'. */
12512 /* Supply the default for --param case-values-threshold=0 */
12514 static unsigned int
12515 avr_case_values_threshold (void)
12517 /* The exact break-even point between a jump table and an if-else tree
12518 depends on several factors not available here like, e.g. if 8-bit
12519 comparisons can be used in the if-else tree or not, on the
12520 range of the case values, if the case value can be reused, on the
12521 register allocation, etc. '7' appears to be a good choice. */
12527 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
12529 static machine_mode
12530 avr_addr_space_address_mode (addr_space_t as
)
12532 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
12536 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
12538 static machine_mode
12539 avr_addr_space_pointer_mode (addr_space_t as
)
12541 return avr_addr_space_address_mode (as
);
12545 /* Helper for following function. */
12548 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
12550 gcc_assert (REG_P (reg
));
12554 return REGNO (reg
) == REG_Z
;
12557 /* Avoid combine to propagate hard regs. */
12559 if (can_create_pseudo_p()
12560 && REGNO (reg
) < REG_Z
)
12569 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12572 avr_addr_space_legitimate_address_p (machine_mode mode
, rtx x
,
12573 bool strict
, addr_space_t as
)
12582 case ADDR_SPACE_GENERIC
:
12583 return avr_legitimate_address_p (mode
, x
, strict
);
12585 case ADDR_SPACE_FLASH
:
12586 case ADDR_SPACE_FLASH1
:
12587 case ADDR_SPACE_FLASH2
:
12588 case ADDR_SPACE_FLASH3
:
12589 case ADDR_SPACE_FLASH4
:
12590 case ADDR_SPACE_FLASH5
:
12592 switch (GET_CODE (x
))
12595 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
12599 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
12608 case ADDR_SPACE_MEMX
:
12611 && can_create_pseudo_p());
12613 if (LO_SUM
== GET_CODE (x
))
12615 rtx hi
= XEXP (x
, 0);
12616 rtx lo
= XEXP (x
, 1);
12619 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
12621 && REGNO (lo
) == REG_Z
);
12627 if (avr_log
.legitimate_address_p
)
12629 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12630 "reload_completed=%d reload_in_progress=%d %s:",
12631 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
12632 reg_renumber
? "(reg_renumber)" : "");
12634 if (GET_CODE (x
) == PLUS
12635 && REG_P (XEXP (x
, 0))
12636 && CONST_INT_P (XEXP (x
, 1))
12637 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
12640 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
12641 true_regnum (XEXP (x
, 0)));
12644 avr_edump ("\n%r\n", x
);
12651 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12654 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
12655 machine_mode mode
, addr_space_t as
)
12657 if (ADDR_SPACE_GENERIC_P (as
))
12658 return avr_legitimize_address (x
, old_x
, mode
);
12660 if (avr_log
.legitimize_address
)
12662 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
12669 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12672 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
12674 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
12675 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
12677 if (avr_log
.progmem
)
12678 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12679 src
, type_from
, type_to
);
12681 /* Up-casting from 16-bit to 24-bit pointer. */
12683 if (as_from
!= ADDR_SPACE_MEMX
12684 && as_to
== ADDR_SPACE_MEMX
)
12688 rtx reg
= gen_reg_rtx (PSImode
);
12690 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
12691 sym
= XEXP (sym
, 0);
12693 /* Look at symbol flags: avr_encode_section_info set the flags
12694 also if attribute progmem was seen so that we get the right
12695 promotion for, e.g. PSTR-like strings that reside in generic space
12696 but are located in flash. In that case we patch the incoming
12699 if (SYMBOL_REF
== GET_CODE (sym
)
12700 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
12702 as_from
= ADDR_SPACE_FLASH
;
12705 /* Linearize memory: RAM has bit 23 set. */
12707 msb
= ADDR_SPACE_GENERIC_P (as_from
)
12709 : avr_addrspace
[as_from
].segment
;
12711 src
= force_reg (Pmode
, src
);
12713 emit_insn (msb
== 0
12714 ? gen_zero_extendhipsi2 (reg
, src
)
12715 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
12720 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
12722 if (as_from
== ADDR_SPACE_MEMX
12723 && as_to
!= ADDR_SPACE_MEMX
)
12725 rtx new_src
= gen_reg_rtx (Pmode
);
12727 src
= force_reg (PSImode
, src
);
12729 emit_move_insn (new_src
,
12730 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
12738 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12741 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
12742 addr_space_t superset ATTRIBUTE_UNUSED
)
12744 /* Allow any kind of pointer mess. */
12750 /* Implement `TARGET_CONVERT_TO_TYPE'. */
12753 avr_convert_to_type (tree type
, tree expr
)
12755 /* Print a diagnose for pointer conversion that changes the address
12756 space of the pointer target to a non-enclosing address space,
12757 provided -Waddr-space-convert is on.
12759 FIXME: Filter out cases where the target object is known to
12760 be located in the right memory, like in
12762 (const __flash*) PSTR ("text")
12764 Also try to distinguish between explicit casts requested by
12765 the user and implicit casts like
12767 void f (const __flash char*);
12769 void g (const char *p)
12771 f ((const __flash*) p);
12774 under the assumption that an explicit casts means that the user
12775 knows what he is doing, e.g. interface with PSTR or old style
12776 code with progmem and pgm_read_xxx.
12779 if (avr_warn_addr_space_convert
12780 && expr
!= error_mark_node
12781 && POINTER_TYPE_P (type
)
12782 && POINTER_TYPE_P (TREE_TYPE (expr
)))
12784 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
12785 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
12787 if (avr_log
.progmem
)
12788 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
12790 if (as_new
!= ADDR_SPACE_MEMX
12791 && as_new
!= as_old
)
12793 location_t loc
= EXPR_LOCATION (expr
);
12794 const char *name_old
= avr_addrspace
[as_old
].name
;
12795 const char *name_new
= avr_addrspace
[as_new
].name
;
12797 warning (OPT_Waddr_space_convert
,
12798 "conversion from address space %qs to address space %qs",
12799 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
12800 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
12802 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
12810 /* PR63633: The middle-end might come up with hard regs as input operands.
12812 RMASK is a bit mask representing a subset of hard registers R0...R31:
12813 Rn is an element of that set iff bit n of RMASK is set.
12814 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12815 OP[n] has to be fixed; otherwise OP[n] is left alone.
12817 For each element of OPMASK which is a hard register overlapping RMASK,
12818 replace OP[n] with a newly created pseudo register
12820 HREG == 0: Also emit a move insn that copies the contents of that
12821 hard register into the new pseudo.
12823 HREG != 0: Also set HREG[n] to the hard register. */
12826 avr_fix_operands (rtx
*op
, rtx
*hreg
, unsigned opmask
, unsigned rmask
)
12828 for (; opmask
; opmask
>>= 1, op
++)
12837 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
12838 // This hard-reg overlaps other prohibited hard regs?
12839 && (rmask
& regmask (GET_MODE (reg
), REGNO (reg
))))
12841 *op
= gen_reg_rtx (GET_MODE (reg
));
12843 emit_move_insn (*op
, reg
);
12855 avr_fix_inputs (rtx
*op
, unsigned opmask
, unsigned rmask
)
12857 avr_fix_operands (op
, NULL
, opmask
, rmask
);
12861 /* Helper for the function below: If bit n of MASK is set and
12862 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12863 Otherwise do nothing for that n. Return TRUE. */
12866 avr_move_fixed_operands (rtx
*op
, rtx
*hreg
, unsigned mask
)
12868 for (; mask
; mask
>>= 1, op
++, hreg
++)
12871 emit_move_insn (*hreg
, *op
);
12877 /* PR63633: The middle-end might come up with hard regs as output operands.
12879 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12880 RMASK is a bit mask representing a subset of hard registers R0...R31:
12881 Rn is an element of that set iff bit n of RMASK is set.
12882 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12883 OP[n] has to be fixed; otherwise OP[n] is left alone.
12885 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12886 which are hard registers overlapping RMASK replaced by newly created
12887 pseudo registers. After the sequence has been emitted, emit insns that
12888 move the contents of respective pseudos to their hard regs. */
12891 avr_emit3_fix_outputs (rtx (*gen
)(rtx
,rtx
,rtx
), rtx
*op
,
12892 unsigned opmask
, unsigned rmask
)
12897 /* It is letigimate for GEN to call this function, and in order not to
12898 get self-recursive we use the following static kludge. This is the
12899 only way not to duplicate all expanders and to avoid ugly and
12900 hard-to-maintain C-code instead of the much more appreciated RTL
12901 representation as supplied by define_expand. */
12902 static bool lock
= false;
12904 gcc_assert (opmask
< (1u << n
));
12909 avr_fix_operands (op
, hreg
, opmask
, rmask
);
12912 emit_insn (gen (op
[0], op
[1], op
[2]));
12915 return avr_move_fixed_operands (op
, hreg
, opmask
);
12919 /* Worker function for movmemhi expander.
12920 XOP[0] Destination as MEM:BLK
12922 XOP[2] # Bytes to copy
12924 Return TRUE if the expansion is accomplished.
12925 Return FALSE if the operand compination is not supported. */
12928 avr_emit_movmemhi (rtx
*xop
)
12930 HOST_WIDE_INT count
;
12931 machine_mode loop_mode
;
12932 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
12933 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
12934 rtx a_hi8
= NULL_RTX
;
12936 if (avr_mem_flash_p (xop
[0]))
12939 if (!CONST_INT_P (xop
[2]))
12942 count
= INTVAL (xop
[2]);
12946 a_src
= XEXP (xop
[1], 0);
12947 a_dest
= XEXP (xop
[0], 0);
12949 if (PSImode
== GET_MODE (a_src
))
12951 gcc_assert (as
== ADDR_SPACE_MEMX
);
12953 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
12954 loop_reg
= gen_rtx_REG (loop_mode
, 24);
12955 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
12957 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
12958 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
12962 int segment
= avr_addrspace
[as
].segment
;
12965 && avr_n_flash
> 1)
12967 a_hi8
= GEN_INT (segment
);
12968 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
12970 else if (!ADDR_SPACE_GENERIC_P (as
))
12972 as
= ADDR_SPACE_FLASH
;
12977 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
12978 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
12981 xas
= GEN_INT (as
);
12983 /* FIXME: Register allocator might come up with spill fails if it is left
12984 on its own. Thus, we allocate the pointer registers by hand:
12986 X = destination address */
12988 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
12989 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
12991 /* FIXME: Register allocator does a bad job and might spill address
12992 register(s) inside the loop leading to additional move instruction
12993 to/from stack which could clobber tmp_reg. Thus, do *not* emit
12994 load and store as separate insns. Instead, we perform the copy
12995 by means of one monolithic insn. */
12997 gcc_assert (TMP_REGNO
== LPM_REGNO
);
12999 if (as
!= ADDR_SPACE_MEMX
)
13001 /* Load instruction ([E]LPM or LD) is known at compile time:
13002 Do the copy-loop inline. */
13004 rtx (*fun
) (rtx
, rtx
, rtx
)
13005 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
13007 insn
= fun (xas
, loop_reg
, loop_reg
);
13011 rtx (*fun
) (rtx
, rtx
)
13012 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
13014 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
13016 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
13019 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
13026 /* Print assembler for movmem_qi, movmem_hi insns...
13028 $1, $2 : Loop register
13030 X : Destination address
13034 avr_out_movmem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
13036 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
13037 machine_mode loop_mode
= GET_MODE (op
[1]);
13038 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
13046 xop
[2] = tmp_reg_rtx
;
13050 avr_asm_len ("0:", xop
, plen
, 0);
13052 /* Load with post-increment */
13059 case ADDR_SPACE_GENERIC
:
13061 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
13064 case ADDR_SPACE_FLASH
:
13067 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
13069 avr_asm_len ("lpm" CR_TAB
13070 "adiw r30,1", xop
, plen
, 2);
13073 case ADDR_SPACE_FLASH1
:
13074 case ADDR_SPACE_FLASH2
:
13075 case ADDR_SPACE_FLASH3
:
13076 case ADDR_SPACE_FLASH4
:
13077 case ADDR_SPACE_FLASH5
:
13079 if (AVR_HAVE_ELPMX
)
13080 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
13082 avr_asm_len ("elpm" CR_TAB
13083 "adiw r30,1", xop
, plen
, 2);
13087 /* Store with post-increment */
13089 avr_asm_len ("st X+,%2", xop
, plen
, 1);
13091 /* Decrement loop-counter and set Z-flag */
13093 if (QImode
== loop_mode
)
13095 avr_asm_len ("dec %1", xop
, plen
, 1);
13099 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
13103 avr_asm_len ("subi %A1,1" CR_TAB
13104 "sbci %B1,0", xop
, plen
, 2);
13107 /* Loop until zero */
13109 return avr_asm_len ("brne 0b", xop
, plen
, 1);
13114 /* Helper for __builtin_avr_delay_cycles */
13117 avr_mem_clobber (void)
13119 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
13120 MEM_VOLATILE_P (mem
) = 1;
13125 avr_expand_delay_cycles (rtx operands0
)
13127 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
13128 unsigned HOST_WIDE_INT cycles_used
;
13129 unsigned HOST_WIDE_INT loop_count
;
13131 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
13133 loop_count
= ((cycles
- 9) / 6) + 1;
13134 cycles_used
= ((loop_count
- 1) * 6) + 9;
13135 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
13136 avr_mem_clobber()));
13137 cycles
-= cycles_used
;
13140 if (IN_RANGE (cycles
, 262145, 83886081))
13142 loop_count
= ((cycles
- 7) / 5) + 1;
13143 if (loop_count
> 0xFFFFFF)
13144 loop_count
= 0xFFFFFF;
13145 cycles_used
= ((loop_count
- 1) * 5) + 7;
13146 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
13147 avr_mem_clobber()));
13148 cycles
-= cycles_used
;
13151 if (IN_RANGE (cycles
, 768, 262144))
13153 loop_count
= ((cycles
- 5) / 4) + 1;
13154 if (loop_count
> 0xFFFF)
13155 loop_count
= 0xFFFF;
13156 cycles_used
= ((loop_count
- 1) * 4) + 5;
13157 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
13158 avr_mem_clobber()));
13159 cycles
-= cycles_used
;
13162 if (IN_RANGE (cycles
, 6, 767))
13164 loop_count
= cycles
/ 3;
13165 if (loop_count
> 255)
13167 cycles_used
= loop_count
* 3;
13168 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
13169 avr_mem_clobber()));
13170 cycles
-= cycles_used
;
13173 while (cycles
>= 2)
13175 emit_insn (gen_nopv (GEN_INT(2)));
13181 emit_insn (gen_nopv (GEN_INT(1)));
13188 avr_expand_nops (rtx operands0
)
13190 unsigned HOST_WIDE_INT n_nops
= UINTVAL (operands0
) & GET_MODE_MASK (HImode
);
13194 emit_insn (gen_nopv (const1_rtx
));
13199 /* Compute the image of x under f, i.e. perform x --> f(x) */
13202 avr_map (unsigned int f
, int x
)
13204 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
13208 /* Return some metrics of map A. */
13212 /* Number of fixed points in { 0 ... 7 } */
13215 /* Size of preimage of non-fixed points in { 0 ... 7 } */
13218 /* Mask representing the fixed points in { 0 ... 7 } */
13219 MAP_MASK_FIXED_0_7
,
13221 /* Size of the preimage of { 0 ... 7 } */
13224 /* Mask that represents the preimage of { f } */
13225 MAP_MASK_PREIMAGE_F
13229 avr_map_metric (unsigned int a
, int mode
)
13231 unsigned i
, metric
= 0;
13233 for (i
= 0; i
< 8; i
++)
13235 unsigned ai
= avr_map (a
, i
);
13237 if (mode
== MAP_FIXED_0_7
)
13239 else if (mode
== MAP_NONFIXED_0_7
)
13240 metric
+= ai
< 8 && ai
!= i
;
13241 else if (mode
== MAP_MASK_FIXED_0_7
)
13242 metric
|= ((unsigned) (ai
== i
)) << i
;
13243 else if (mode
== MAP_PREIMAGE_0_7
)
13245 else if (mode
== MAP_MASK_PREIMAGE_F
)
13246 metric
|= ((unsigned) (ai
== 0xf)) << i
;
13255 /* Return true if IVAL has a 0xf in its hexadecimal representation
13256 and false, otherwise. Only nibbles 0..7 are taken into account.
13257 Used as constraint helper for C0f and Cxf. */
13260 avr_has_nibble_0xf (rtx ival
)
13262 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
13263 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
13267 /* We have a set of bits that are mapped by a function F.
13268 Try to decompose F by means of a second function G so that
13274 cost (F o G^-1) + cost (G) < cost (F)
13276 Example: Suppose builtin insert_bits supplies us with the map
13277 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
13278 nibble of the result, we can just as well rotate the bits before inserting
13279 them and use the map 0x7654ffff which is cheaper than the original map.
13280 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
13284 /* tree code of binary function G */
13285 enum tree_code code
;
13287 /* The constant second argument of G */
13290 /* G^-1, the inverse of G (*, arg) */
13293 /* The cost of appplying G (*, arg) */
13296 /* The composition F o G^-1 (*, arg) for some function F */
13299 /* For debug purpose only */
13303 static const avr_map_op_t avr_map_op
[] =
13305 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
13306 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
13307 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
13308 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
13309 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
13310 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
13311 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
13312 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
13313 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
13314 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
13315 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
13316 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
13317 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
13318 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
13319 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
13323 /* Try to decompose F as F = (F o G^-1) o G as described above.
13324 The result is a struct representing F o G^-1 and G.
13325 If result.cost < 0 then such a decomposition does not exist. */
13327 static avr_map_op_t
13328 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
13331 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
13332 avr_map_op_t f_ginv
= *g
;
13333 unsigned int ginv
= g
->ginv
;
13337 /* Step 1: Computing F o G^-1 */
13339 for (i
= 7; i
>= 0; i
--)
13341 int x
= avr_map (f
, i
);
13345 x
= avr_map (ginv
, x
);
13347 /* The bit is no element of the image of G: no avail (cost = -1) */
13353 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
13356 /* Step 2: Compute the cost of the operations.
13357 The overall cost of doing an operation prior to the insertion is
13358 the cost of the insertion plus the cost of the operation. */
13360 /* Step 2a: Compute cost of F o G^-1 */
13362 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
13364 /* The mapping consists only of fixed points and can be folded
13365 to AND/OR logic in the remainder. Reasonable cost is 3. */
13367 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
13373 /* Get the cost of the insn by calling the output worker with some
13374 fake values. Mimic effect of reloading xop[3]: Unused operands
13375 are mapped to 0 and used operands are reloaded to xop[0]. */
13377 xop
[0] = all_regs_rtx
[24];
13378 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
13379 xop
[2] = all_regs_rtx
[25];
13380 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
13382 avr_out_insert_bits (xop
, &f_ginv
.cost
);
13384 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
13387 /* Step 2b: Add cost of G */
13389 f_ginv
.cost
+= g
->cost
;
13391 if (avr_log
.builtin
)
13392 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
13398 /* Insert bits from XOP[1] into XOP[0] according to MAP.
13399 XOP[0] and XOP[1] don't overlap.
13400 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
13401 If FIXP_P = false: Just move the bit if its position in the destination
13402 is different to its source position. */
13405 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
13409 /* T-flag contains this bit of the source, i.e. of XOP[1] */
13410 int t_bit_src
= -1;
13412 /* We order the operations according to the requested source bit b. */
13414 for (b
= 0; b
< 8; b
++)
13415 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
13417 int bit_src
= avr_map (map
, bit_dest
);
13421 /* Same position: No need to copy as requested by FIXP_P. */
13422 || (bit_dest
== bit_src
&& !fixp_p
))
13425 if (t_bit_src
!= bit_src
)
13427 /* Source bit is not yet in T: Store it to T. */
13429 t_bit_src
= bit_src
;
13431 xop
[3] = GEN_INT (bit_src
);
13432 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
13435 /* Load destination bit with T. */
13437 xop
[3] = GEN_INT (bit_dest
);
13438 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
13443 /* PLEN == 0: Print assembler code for `insert_bits'.
13444 PLEN != 0: Compute code length in bytes.
13447 OP[1]: The mapping composed of nibbles. If nibble no. N is
13448 0: Bit N of result is copied from bit OP[2].0
13450 7: Bit N of result is copied from bit OP[2].7
13451 0xf: Bit N of result is copied from bit OP[3].N
13452 OP[2]: Bits to be inserted
13453 OP[3]: Target value */
13456 avr_out_insert_bits (rtx
*op
, int *plen
)
13458 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
13459 unsigned mask_fixed
;
13460 bool fixp_p
= true;
13467 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
13471 else if (flag_print_asm_name
)
13472 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
13474 /* If MAP has fixed points it might be better to initialize the result
13475 with the bits to be inserted instead of moving all bits by hand. */
13477 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
13479 if (REGNO (xop
[0]) == REGNO (xop
[1]))
13481 /* Avoid early-clobber conflicts */
13483 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
13484 xop
[1] = tmp_reg_rtx
;
13488 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
13490 /* XOP[2] is used and reloaded to XOP[0] already */
13492 int n_fix
= 0, n_nofix
= 0;
13494 gcc_assert (REG_P (xop
[2]));
13496 /* Get the code size of the bit insertions; once with all bits
13497 moved and once with fixed points omitted. */
13499 avr_move_bits (xop
, map
, true, &n_fix
);
13500 avr_move_bits (xop
, map
, false, &n_nofix
);
13502 if (fixp_p
&& n_fix
- n_nofix
> 3)
13504 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
13506 avr_asm_len ("eor %0,%1" CR_TAB
13507 "andi %0,%3" CR_TAB
13508 "eor %0,%1", xop
, plen
, 3);
13514 /* XOP[2] is unused */
13516 if (fixp_p
&& mask_fixed
)
13518 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
13523 /* Move/insert remaining bits. */
13525 avr_move_bits (xop
, map
, fixp_p
, plen
);
13531 /* IDs for all the AVR builtins. */
13533 enum avr_builtin_id
13535 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13536 AVR_BUILTIN_ ## NAME,
13537 #include "builtins.def"
13543 struct GTY(()) avr_builtin_description
13545 enum insn_code icode
;
13551 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13552 that a built-in's ID can be used to access the built-in by means of
13555 static GTY(()) struct avr_builtin_description
13556 avr_bdesc
[AVR_BUILTIN_COUNT
] =
13558 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
13559 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13560 #include "builtins.def"
13565 /* Implement `TARGET_BUILTIN_DECL'. */
13568 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
13570 if (id
< AVR_BUILTIN_COUNT
)
13571 return avr_bdesc
[id
].fndecl
;
13573 return error_mark_node
;
13578 avr_init_builtin_int24 (void)
13580 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
13581 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
13583 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
13584 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
13588 /* Implement `TARGET_INIT_BUILTINS' */
13589 /* Set up all builtin functions for this target. */
13592 avr_init_builtins (void)
13594 tree void_ftype_void
13595 = build_function_type_list (void_type_node
, NULL_TREE
);
13596 tree uchar_ftype_uchar
13597 = build_function_type_list (unsigned_char_type_node
,
13598 unsigned_char_type_node
,
13600 tree uint_ftype_uchar_uchar
13601 = build_function_type_list (unsigned_type_node
,
13602 unsigned_char_type_node
,
13603 unsigned_char_type_node
,
13605 tree int_ftype_char_char
13606 = build_function_type_list (integer_type_node
,
13610 tree int_ftype_char_uchar
13611 = build_function_type_list (integer_type_node
,
13613 unsigned_char_type_node
,
13615 tree void_ftype_ulong
13616 = build_function_type_list (void_type_node
,
13617 long_unsigned_type_node
,
13620 tree uchar_ftype_ulong_uchar_uchar
13621 = build_function_type_list (unsigned_char_type_node
,
13622 long_unsigned_type_node
,
13623 unsigned_char_type_node
,
13624 unsigned_char_type_node
,
13627 tree const_memx_void_node
13628 = build_qualified_type (void_type_node
,
13630 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
13632 tree const_memx_ptr_type_node
13633 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
13635 tree char_ftype_const_memx_ptr
13636 = build_function_type_list (char_type_node
,
13637 const_memx_ptr_type_node
,
13641 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13643 #define FX_FTYPE_FX(fx) \
13644 tree fx##r_ftype_##fx##r \
13645 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13646 tree fx##k_ftype_##fx##k \
13647 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13649 #define FX_FTYPE_FX_INT(fx) \
13650 tree fx##r_ftype_##fx##r_int \
13651 = build_function_type_list (node_##fx##r, node_##fx##r, \
13652 integer_type_node, NULL); \
13653 tree fx##k_ftype_##fx##k_int \
13654 = build_function_type_list (node_##fx##k, node_##fx##k, \
13655 integer_type_node, NULL)
13657 #define INT_FTYPE_FX(fx) \
13658 tree int_ftype_##fx##r \
13659 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13660 tree int_ftype_##fx##k \
13661 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13663 #define INTX_FTYPE_FX(fx) \
13664 tree int##fx##r_ftype_##fx##r \
13665 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13666 tree int##fx##k_ftype_##fx##k \
13667 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13669 #define FX_FTYPE_INTX(fx) \
13670 tree fx##r_ftype_int##fx##r \
13671 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13672 tree fx##k_ftype_int##fx##k \
13673 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13675 tree node_hr
= short_fract_type_node
;
13676 tree node_nr
= fract_type_node
;
13677 tree node_lr
= long_fract_type_node
;
13678 tree node_llr
= long_long_fract_type_node
;
13680 tree node_uhr
= unsigned_short_fract_type_node
;
13681 tree node_unr
= unsigned_fract_type_node
;
13682 tree node_ulr
= unsigned_long_fract_type_node
;
13683 tree node_ullr
= unsigned_long_long_fract_type_node
;
13685 tree node_hk
= short_accum_type_node
;
13686 tree node_nk
= accum_type_node
;
13687 tree node_lk
= long_accum_type_node
;
13688 tree node_llk
= long_long_accum_type_node
;
13690 tree node_uhk
= unsigned_short_accum_type_node
;
13691 tree node_unk
= unsigned_accum_type_node
;
13692 tree node_ulk
= unsigned_long_accum_type_node
;
13693 tree node_ullk
= unsigned_long_long_accum_type_node
;
13696 /* For absfx builtins. */
13703 /* For roundfx builtins. */
13705 FX_FTYPE_FX_INT (h
);
13706 FX_FTYPE_FX_INT (n
);
13707 FX_FTYPE_FX_INT (l
);
13708 FX_FTYPE_FX_INT (ll
);
13710 FX_FTYPE_FX_INT (uh
);
13711 FX_FTYPE_FX_INT (un
);
13712 FX_FTYPE_FX_INT (ul
);
13713 FX_FTYPE_FX_INT (ull
);
13715 /* For countlsfx builtins. */
13725 INT_FTYPE_FX (ull
);
13727 /* For bitsfx builtins. */
13732 INTX_FTYPE_FX (ll
);
13734 INTX_FTYPE_FX (uh
);
13735 INTX_FTYPE_FX (un
);
13736 INTX_FTYPE_FX (ul
);
13737 INTX_FTYPE_FX (ull
);
13739 /* For fxbits builtins. */
13744 FX_FTYPE_INTX (ll
);
13746 FX_FTYPE_INTX (uh
);
13747 FX_FTYPE_INTX (un
);
13748 FX_FTYPE_INTX (ul
);
13749 FX_FTYPE_INTX (ull
);
13752 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13754 int id = AVR_BUILTIN_ ## NAME; \
13755 const char *Name = "__builtin_avr_" #NAME; \
13756 char *name = (char*) alloca (1 + strlen (Name)); \
13758 gcc_assert (id < AVR_BUILTIN_COUNT); \
13759 avr_bdesc[id].fndecl \
13760 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
13761 BUILT_IN_MD, LIBNAME, NULL_TREE); \
13763 #include "builtins.def"
13766 avr_init_builtin_int24 ();
13770 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13771 with non-void result and 1 ... 3 arguments. */
13774 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
13777 int n
, n_args
= call_expr_nargs (exp
);
13778 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
13780 gcc_assert (n_args
>= 1 && n_args
<= 3);
13782 if (target
== NULL_RTX
13783 || GET_MODE (target
) != tmode
13784 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
13786 target
= gen_reg_rtx (tmode
);
13789 for (n
= 0; n
< n_args
; n
++)
13791 tree arg
= CALL_EXPR_ARG (exp
, n
);
13792 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13793 machine_mode opmode
= GET_MODE (op
);
13794 machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
13796 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
13799 op
= gen_lowpart (HImode
, op
);
13802 /* In case the insn wants input operands in modes different from
13803 the result, abort. */
13805 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
13807 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
13808 op
= copy_to_mode_reg (mode
, op
);
13815 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
13816 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
13817 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
13823 if (pat
== NULL_RTX
)
13832 /* Implement `TARGET_EXPAND_BUILTIN'. */
13833 /* Expand an expression EXP that calls a built-in function,
13834 with result going to TARGET if that's convenient
13835 (and in mode MODE if that's convenient).
13836 SUBTARGET may be used as the target for computing one of EXP's operands.
13837 IGNORE is nonzero if the value is to be ignored. */
13840 avr_expand_builtin (tree exp
, rtx target
,
13841 rtx subtarget ATTRIBUTE_UNUSED
,
13842 machine_mode mode ATTRIBUTE_UNUSED
,
13845 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
13846 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
13847 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
13848 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
13852 gcc_assert (id
< AVR_BUILTIN_COUNT
);
13856 case AVR_BUILTIN_NOP
:
13857 emit_insn (gen_nopv (GEN_INT(1)));
13860 case AVR_BUILTIN_DELAY_CYCLES
:
13862 arg0
= CALL_EXPR_ARG (exp
, 0);
13863 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13865 if (!CONST_INT_P (op0
))
13866 error ("%s expects a compile time integer constant", bname
);
13868 avr_expand_delay_cycles (op0
);
13873 case AVR_BUILTIN_NOPS
:
13875 arg0
= CALL_EXPR_ARG (exp
, 0);
13876 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13878 if (!CONST_INT_P (op0
))
13879 error ("%s expects a compile time integer constant", bname
);
13881 avr_expand_nops (op0
);
13886 case AVR_BUILTIN_INSERT_BITS
:
13888 arg0
= CALL_EXPR_ARG (exp
, 0);
13889 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13891 if (!CONST_INT_P (op0
))
13893 error ("%s expects a compile time long integer constant"
13894 " as first argument", bname
);
13901 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
13902 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
13903 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
13904 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
13906 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
13907 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
13908 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
13909 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
13911 /* Warn about odd rounding. Rounding points >= FBIT will have
13914 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
13917 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
13919 if (rbit
>= (int) GET_MODE_FBIT (mode
))
13921 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
13922 "fixed-point value with %d fractional bits",
13923 rbit
, GET_MODE_FBIT (mode
));
13925 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
13928 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
13930 warning (0, "rounding result will always be 0");
13931 return CONST0_RTX (mode
);
13934 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13936 TR 18037 only specifies results for RP > 0. However, the
13937 remaining cases of -IBIT < RP <= 0 can easily be supported
13938 without any additional overhead. */
13943 /* No fold found and no insn: Call support function from libgcc. */
13945 if (d
->icode
== CODE_FOR_nothing
13946 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
13948 return expand_call (exp
, target
, ignore
);
13951 /* No special treatment needed: vanilla expand. */
13953 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
13954 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
13956 if (d
->n_args
== 0)
13958 emit_insn ((GEN_FCN (d
->icode
)) (target
));
13962 return avr_default_expand_builtin (d
->icode
, exp
, target
);
13966 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13969 avr_fold_absfx (tree tval
)
13971 if (FIXED_CST
!= TREE_CODE (tval
))
13974 /* Our fixed-points have no padding: Use double_int payload directly. */
13976 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
13977 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
13978 double_int ival
= fval
.data
.sext (bits
);
13980 if (!ival
.is_negative())
13983 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13985 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
13986 ? double_int::max_value (bits
, false)
13989 return build_fixed (TREE_TYPE (tval
), fval
);
13993 /* Implement `TARGET_FOLD_BUILTIN'. */
13996 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
13997 bool ignore ATTRIBUTE_UNUSED
)
13999 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
14000 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
14010 case AVR_BUILTIN_SWAP
:
14012 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
14013 build_int_cst (val_type
, 4));
14016 case AVR_BUILTIN_ABSHR
:
14017 case AVR_BUILTIN_ABSR
:
14018 case AVR_BUILTIN_ABSLR
:
14019 case AVR_BUILTIN_ABSLLR
:
14021 case AVR_BUILTIN_ABSHK
:
14022 case AVR_BUILTIN_ABSK
:
14023 case AVR_BUILTIN_ABSLK
:
14024 case AVR_BUILTIN_ABSLLK
:
14025 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
14027 return avr_fold_absfx (arg
[0]);
14029 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
14030 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
14031 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
14032 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
14034 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
14035 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
14036 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
14037 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
14039 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
14040 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
14041 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
14042 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
14044 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
14045 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
14046 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
14047 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
14049 gcc_assert (TYPE_PRECISION (val_type
)
14050 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
14052 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
14054 case AVR_BUILTIN_INSERT_BITS
:
14056 tree tbits
= arg
[1];
14057 tree tval
= arg
[2];
14059 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
14061 bool changed
= false;
14063 avr_map_op_t best_g
;
14065 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
14067 /* No constant as first argument: Don't fold this and run into
14068 error in avr_expand_builtin. */
14073 tmap
= wide_int_to_tree (map_type
, arg
[0]);
14074 map
= TREE_INT_CST_LOW (tmap
);
14076 if (TREE_CODE (tval
) != INTEGER_CST
14077 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
14079 /* There are no F in the map, i.e. 3rd operand is unused.
14080 Replace that argument with some constant to render
14081 respective input unused. */
14083 tval
= build_int_cst (val_type
, 0);
14087 if (TREE_CODE (tbits
) != INTEGER_CST
14088 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
14090 /* Similar for the bits to be inserted. If they are unused,
14091 we can just as well pass 0. */
14093 tbits
= build_int_cst (val_type
, 0);
14096 if (TREE_CODE (tbits
) == INTEGER_CST
)
14098 /* Inserting bits known at compile time is easy and can be
14099 performed by AND and OR with appropriate masks. */
14101 int bits
= TREE_INT_CST_LOW (tbits
);
14102 int mask_ior
= 0, mask_and
= 0xff;
14104 for (i
= 0; i
< 8; i
++)
14106 int mi
= avr_map (map
, i
);
14110 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
14111 else mask_and
&= ~(1 << i
);
14115 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
14116 build_int_cst (val_type
, mask_ior
));
14117 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
14118 build_int_cst (val_type
, mask_and
));
14122 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
14124 /* If bits don't change their position we can use vanilla logic
14125 to merge the two arguments. */
14127 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
14129 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
14130 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
14132 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
14133 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
14134 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
14137 /* Try to decomposing map to reduce overall cost. */
14139 if (avr_log
.builtin
)
14140 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
14142 best_g
= avr_map_op
[0];
14143 best_g
.cost
= 1000;
14145 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
14148 = avr_map_decompose (map
, avr_map_op
+ i
,
14149 TREE_CODE (tval
) == INTEGER_CST
);
14151 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
14155 if (avr_log
.builtin
)
14158 if (best_g
.arg
== 0)
14159 /* No optimization found */
14162 /* Apply operation G to the 2nd argument. */
14164 if (avr_log
.builtin
)
14165 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
14166 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
14168 /* Do right-shifts arithmetically: They copy the MSB instead of
14169 shifting in a non-usable value (0) as with logic right-shift. */
14171 tbits
= fold_convert (signed_char_type_node
, tbits
);
14172 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
14173 build_int_cst (val_type
, best_g
.arg
));
14174 tbits
= fold_convert (val_type
, tbits
);
14176 /* Use map o G^-1 instead of original map to undo the effect of G. */
14178 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
14180 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
14181 } /* AVR_BUILTIN_INSERT_BITS */
14189 /* Initialize the GCC target structure. */
14191 #undef TARGET_ASM_ALIGNED_HI_OP
14192 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
14193 #undef TARGET_ASM_ALIGNED_SI_OP
14194 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
14195 #undef TARGET_ASM_UNALIGNED_HI_OP
14196 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
14197 #undef TARGET_ASM_UNALIGNED_SI_OP
14198 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
14199 #undef TARGET_ASM_INTEGER
14200 #define TARGET_ASM_INTEGER avr_assemble_integer
14201 #undef TARGET_ASM_FILE_START
14202 #define TARGET_ASM_FILE_START avr_file_start
14203 #undef TARGET_ASM_FILE_END
14204 #define TARGET_ASM_FILE_END avr_file_end
14206 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
14207 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
14208 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
14209 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
14211 #undef TARGET_FUNCTION_VALUE
14212 #define TARGET_FUNCTION_VALUE avr_function_value
14213 #undef TARGET_LIBCALL_VALUE
14214 #define TARGET_LIBCALL_VALUE avr_libcall_value
14215 #undef TARGET_FUNCTION_VALUE_REGNO_P
14216 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
14218 #undef TARGET_ATTRIBUTE_TABLE
14219 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
14220 #undef TARGET_INSERT_ATTRIBUTES
14221 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
14222 #undef TARGET_SECTION_TYPE_FLAGS
14223 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
14225 #undef TARGET_ASM_NAMED_SECTION
14226 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
14227 #undef TARGET_ASM_INIT_SECTIONS
14228 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
14229 #undef TARGET_ENCODE_SECTION_INFO
14230 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
14231 #undef TARGET_ASM_SELECT_SECTION
14232 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
14234 #undef TARGET_REGISTER_MOVE_COST
14235 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
14236 #undef TARGET_MEMORY_MOVE_COST
14237 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
14238 #undef TARGET_RTX_COSTS
14239 #define TARGET_RTX_COSTS avr_rtx_costs
14240 #undef TARGET_ADDRESS_COST
14241 #define TARGET_ADDRESS_COST avr_address_cost
14242 #undef TARGET_MACHINE_DEPENDENT_REORG
14243 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
14244 #undef TARGET_FUNCTION_ARG
14245 #define TARGET_FUNCTION_ARG avr_function_arg
14246 #undef TARGET_FUNCTION_ARG_ADVANCE
14247 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
14249 #undef TARGET_SET_CURRENT_FUNCTION
14250 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
14252 #undef TARGET_RETURN_IN_MEMORY
14253 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
14255 #undef TARGET_STRICT_ARGUMENT_NAMING
14256 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
14258 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
14259 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
14261 #undef TARGET_CONDITIONAL_REGISTER_USAGE
14262 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
14264 #undef TARGET_HARD_REGNO_SCRATCH_OK
14265 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
14266 #undef TARGET_CASE_VALUES_THRESHOLD
14267 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
14269 #undef TARGET_FRAME_POINTER_REQUIRED
14270 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
14271 #undef TARGET_CAN_ELIMINATE
14272 #define TARGET_CAN_ELIMINATE avr_can_eliminate
14274 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
14275 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
14277 #undef TARGET_WARN_FUNC_RETURN
14278 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
14280 #undef TARGET_CLASS_LIKELY_SPILLED_P
14281 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
14283 #undef TARGET_OPTION_OVERRIDE
14284 #define TARGET_OPTION_OVERRIDE avr_option_override
14286 #undef TARGET_CANNOT_MODIFY_JUMPS_P
14287 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
14289 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
14290 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
14292 #undef TARGET_INIT_BUILTINS
14293 #define TARGET_INIT_BUILTINS avr_init_builtins
14295 #undef TARGET_BUILTIN_DECL
14296 #define TARGET_BUILTIN_DECL avr_builtin_decl
14298 #undef TARGET_EXPAND_BUILTIN
14299 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
14301 #undef TARGET_FOLD_BUILTIN
14302 #define TARGET_FOLD_BUILTIN avr_fold_builtin
14304 #undef TARGET_SCALAR_MODE_SUPPORTED_P
14305 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
14307 #undef TARGET_BUILD_BUILTIN_VA_LIST
14308 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
14310 #undef TARGET_FIXED_POINT_SUPPORTED_P
14311 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
14313 #undef TARGET_CONVERT_TO_TYPE
14314 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
14316 #undef TARGET_LRA_P
14317 #define TARGET_LRA_P hook_bool_void_false
14319 #undef TARGET_ADDR_SPACE_SUBSET_P
14320 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
14322 #undef TARGET_ADDR_SPACE_CONVERT
14323 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
14325 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
14326 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
14328 #undef TARGET_ADDR_SPACE_POINTER_MODE
14329 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
14331 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
14332 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
14333 avr_addr_space_legitimate_address_p
14335 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
14336 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
14338 #undef TARGET_ADDR_SPACE_DIAGNOSE_USAGE
14339 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
14341 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
14342 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
14344 #undef TARGET_PRINT_OPERAND
14345 #define TARGET_PRINT_OPERAND avr_print_operand
14346 #undef TARGET_PRINT_OPERAND_ADDRESS
14347 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
14348 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
14349 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
14351 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
14352 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
14353 avr_use_by_pieces_infrastructure_p
14355 struct gcc_target targetm
= TARGET_INITIALIZER
;
14358 #include "gt-avr.h"