1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
53 #define CONST_FIXED_P(X) (CONST_FIXED == GET_CODE (X))
56 /* Maximal allowed offset for an address in the LD command */
57 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
59 /* Return true if STR starts with PREFIX and false, otherwise. */
60 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
62 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
63 address space where data is to be located.
64 As the only non-generic address spaces are all located in Flash,
65 this can be used to test if data shall go into some .progmem* section.
66 This must be the rightmost field of machine dependent section flags. */
67 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
69 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
70 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
72 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
73 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
74 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
76 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
77 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
80 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
81 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
82 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
83 / SYMBOL_FLAG_MACH_DEP)
85 /* Known address spaces. The order must be the same as in the respective
86 enum from avr.h (or designated initialized must be used). */
87 const avr_addrspace_t avr_addrspace
[] =
89 { ADDR_SPACE_RAM
, 0, 2, "" , 0 },
90 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0 },
91 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1 },
92 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2 },
93 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3 },
94 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4 },
95 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5 },
96 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0 },
100 /* Map 64-k Flash segment to section prefix. */
101 static const char* const progmem_section_prefix
[6] =
111 /* Holding RAM addresses of some SFRs used by the compiler and that
112 are unique over all devices in an architecture like 'avr4'. */
116 /* SREG: The pocessor status */
119 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
125 /* RAMPZ: The high byte of 24-bit address used with ELPM */
128 /* SP: The stack pointer and its low and high byte */
133 static avr_addr_t avr_addr
;
136 /* Prototypes for local helper functions. */
138 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
139 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
140 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
141 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
142 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
143 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
145 static int get_sequence_length (rtx insns
);
146 static int sequent_regs_live (void);
147 static const char *ptrreg_to_str (int);
148 static const char *cond_string (enum rtx_code
);
149 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
150 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
152 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
153 static struct machine_function
* avr_init_machine_status (void);
156 /* Prototypes for hook implementors if needed before their implementation. */
158 static bool avr_rtx_costs (rtx
, int, int, int, int *, bool);
161 /* Allocate registers from r25 to r8 for parameters for function calls. */
162 #define FIRST_CUM_REG 26
164 /* Implicit target register of LPM instruction (R0) */
165 extern GTY(()) rtx lpm_reg_rtx
;
168 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
169 extern GTY(()) rtx lpm_addr_reg_rtx
;
170 rtx lpm_addr_reg_rtx
;
172 /* Temporary register RTX (reg:QI TMP_REGNO) */
173 extern GTY(()) rtx tmp_reg_rtx
;
176 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
177 extern GTY(()) rtx zero_reg_rtx
;
180 /* RTXs for all general purpose registers as QImode */
181 extern GTY(()) rtx all_regs_rtx
[32];
182 rtx all_regs_rtx
[32];
184 /* SREG, the processor status */
185 extern GTY(()) rtx sreg_rtx
;
188 /* RAMP* special function registers */
189 extern GTY(()) rtx rampd_rtx
;
190 extern GTY(()) rtx rampx_rtx
;
191 extern GTY(()) rtx rampy_rtx
;
192 extern GTY(()) rtx rampz_rtx
;
198 /* RTX containing the strings "" and "e", respectively */
199 static GTY(()) rtx xstring_empty
;
200 static GTY(()) rtx xstring_e
;
202 /* Preprocessor macros to define depending on MCU type. */
203 const char *avr_extra_arch_macro
;
205 /* Current architecture. */
206 const struct base_arch_s
*avr_current_arch
;
208 /* Current device. */
209 const struct mcu_type_s
*avr_current_device
;
211 /* Section to put switch tables in. */
212 static GTY(()) section
*progmem_swtable_section
;
214 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
215 or to address space __flash*. */
216 static GTY(()) section
*progmem_section
[6];
218 /* Condition for insns/expanders from avr-dimode.md. */
219 bool avr_have_dimode
= true;
221 /* To track if code will use .bss and/or .data. */
222 bool avr_need_clear_bss_p
= false;
223 bool avr_need_copy_data_p
= false;
227 /* Custom function to count number of set bits. */
230 avr_popcount (unsigned int val
)
244 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
245 Return true if the least significant N_BYTES bytes of XVAL all have a
246 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
247 of integers which contains an integer N iff bit N of POP_MASK is set. */
250 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
254 enum machine_mode mode
= GET_MODE (xval
);
256 if (VOIDmode
== mode
)
259 for (i
= 0; i
< n_bytes
; i
++)
261 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
262 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
264 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
272 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
273 the bit representation of X by "casting" it to CONST_INT. */
276 avr_to_int_mode (rtx x
)
278 enum machine_mode mode
= GET_MODE (x
);
280 return VOIDmode
== mode
282 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
286 /* Implement `TARGET_OPTION_OVERRIDE'. */
289 avr_option_override (void)
291 flag_delete_null_pointer_checks
= 0;
293 /* caller-save.c looks for call-clobbered hard registers that are assigned
294 to pseudos that cross calls and tries so save-restore them around calls
295 in order to reduce the number of stack slots needed.
297 This might leads to situations where reload is no more able to cope
298 with the challenge of AVR's very few address registers and fails to
299 perform the requested spills. */
302 flag_caller_saves
= 0;
304 /* Unwind tables currently require a frame pointer for correctness,
305 see toplev.c:process_options(). */
307 if ((flag_unwind_tables
308 || flag_non_call_exceptions
309 || flag_asynchronous_unwind_tables
)
310 && !ACCUMULATE_OUTGOING_ARGS
)
312 flag_omit_frame_pointer
= 0;
315 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
316 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
317 avr_extra_arch_macro
= avr_current_device
->macro
;
319 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
321 /* SREG: Status Register containing flags like I (global IRQ) */
322 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
324 /* RAMPZ: Address' high part when loading via ELPM */
325 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
327 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
328 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
329 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
330 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
332 /* SP: Stack Pointer (SP_H:SP_L) */
333 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
334 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
336 init_machine_status
= avr_init_machine_status
;
338 avr_log_set_avr_log();
341 /* Function to set up the backend function structure. */
343 static struct machine_function
*
344 avr_init_machine_status (void)
346 return ggc_alloc_cleared_machine_function ();
350 /* Implement `INIT_EXPANDERS'. */
351 /* The function works like a singleton. */
354 avr_init_expanders (void)
358 for (regno
= 0; regno
< 32; regno
++)
359 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
361 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
362 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
363 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
365 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
367 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
368 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
369 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
370 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
371 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
373 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
374 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
378 /* Return register class for register R. */
381 avr_regno_reg_class (int r
)
383 static const enum reg_class reg_class_tab
[] =
387 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
388 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
389 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
390 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
392 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
393 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
395 ADDW_REGS
, ADDW_REGS
,
397 POINTER_X_REGS
, POINTER_X_REGS
,
399 POINTER_Y_REGS
, POINTER_Y_REGS
,
401 POINTER_Z_REGS
, POINTER_Z_REGS
,
407 return reg_class_tab
[r
];
413 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
416 avr_scalar_mode_supported_p (enum machine_mode mode
)
418 if (ALL_FIXED_POINT_MODE_P (mode
))
424 return default_scalar_mode_supported_p (mode
);
428 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
431 avr_decl_flash_p (tree decl
)
433 if (TREE_CODE (decl
) != VAR_DECL
434 || TREE_TYPE (decl
) == error_mark_node
)
439 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
443 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
444 address space and FALSE, otherwise. */
447 avr_decl_memx_p (tree decl
)
449 if (TREE_CODE (decl
) != VAR_DECL
450 || TREE_TYPE (decl
) == error_mark_node
)
455 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
459 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
462 avr_mem_flash_p (rtx x
)
465 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
469 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
470 address space and FALSE, otherwise. */
473 avr_mem_memx_p (rtx x
)
476 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
480 /* A helper for the subsequent function attribute used to dig for
481 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
484 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
486 if (FUNCTION_DECL
== TREE_CODE (func
))
488 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
493 func
= TREE_TYPE (func
);
496 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
497 || TREE_CODE (func
) == METHOD_TYPE
);
499 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
502 /* Return nonzero if FUNC is a naked function. */
505 avr_naked_function_p (tree func
)
507 return avr_lookup_function_attribute1 (func
, "naked");
510 /* Return nonzero if FUNC is an interrupt function as specified
511 by the "interrupt" attribute. */
514 avr_interrupt_function_p (tree func
)
516 return avr_lookup_function_attribute1 (func
, "interrupt");
519 /* Return nonzero if FUNC is a signal function as specified
520 by the "signal" attribute. */
523 avr_signal_function_p (tree func
)
525 return avr_lookup_function_attribute1 (func
, "signal");
528 /* Return nonzero if FUNC is an OS_task function. */
531 avr_OS_task_function_p (tree func
)
533 return avr_lookup_function_attribute1 (func
, "OS_task");
536 /* Return nonzero if FUNC is an OS_main function. */
539 avr_OS_main_function_p (tree func
)
541 return avr_lookup_function_attribute1 (func
, "OS_main");
545 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
546 /* Sanity cheching for above function attributes. */
549 avr_set_current_function (tree decl
)
554 if (decl
== NULL_TREE
555 || current_function_decl
== NULL_TREE
556 || current_function_decl
== error_mark_node
557 || cfun
->machine
->attributes_checked_p
)
560 loc
= DECL_SOURCE_LOCATION (decl
);
562 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
563 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
564 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
565 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
566 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
568 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
570 /* Too much attributes make no sense as they request conflicting features. */
572 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
573 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
574 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
575 " exclusive", "OS_task", "OS_main", isr
);
577 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
579 if (cfun
->machine
->is_naked
580 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
581 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
582 " no effect on %qs function", "OS_task", "OS_main", "naked");
584 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
586 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
587 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
588 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
590 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
591 using this when it switched from SIGNAL and INTERRUPT to ISR. */
593 if (cfun
->machine
->is_interrupt
)
594 cfun
->machine
->is_signal
= 0;
596 /* Interrupt handlers must be void __vector (void) functions. */
598 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
599 error_at (loc
, "%qs function cannot have arguments", isr
);
601 if (TREE_CODE (ret
) != VOID_TYPE
)
602 error_at (loc
, "%qs function cannot return a value", isr
);
604 /* If the function has the 'signal' or 'interrupt' attribute, ensure
605 that the name of the function is "__vector_NN" so as to catch
606 when the user misspells the vector name. */
608 if (!STR_PREFIX_P (name
, "__vector"))
609 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
613 /* Avoid the above diagnosis to be printed more than once. */
615 cfun
->machine
->attributes_checked_p
= 1;
619 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
622 avr_accumulate_outgoing_args (void)
625 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
627 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
628 what offset is correct. In some cases it is relative to
629 virtual_outgoing_args_rtx and in others it is relative to
630 virtual_stack_vars_rtx. For example code see
631 gcc.c-torture/execute/built-in-setjmp.c
632 gcc.c-torture/execute/builtins/sprintf-chk.c */
634 return (TARGET_ACCUMULATE_OUTGOING_ARGS
635 && !(cfun
->calls_setjmp
636 || cfun
->has_nonlocal_label
));
640 /* Report contribution of accumulated outgoing arguments to stack size. */
643 avr_outgoing_args_size (void)
645 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
649 /* Implement `STARTING_FRAME_OFFSET'. */
650 /* This is the offset from the frame pointer register to the first stack slot
651 that contains a variable living in the frame. */
654 avr_starting_frame_offset (void)
656 return 1 + avr_outgoing_args_size ();
660 /* Return the number of hard registers to push/pop in the prologue/epilogue
661 of the current function, and optionally store these registers in SET. */
664 avr_regs_to_save (HARD_REG_SET
*set
)
667 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
670 CLEAR_HARD_REG_SET (*set
);
673 /* No need to save any registers if the function never returns or
674 has the "OS_task" or "OS_main" attribute. */
675 if (TREE_THIS_VOLATILE (current_function_decl
)
676 || cfun
->machine
->is_OS_task
677 || cfun
->machine
->is_OS_main
)
680 for (reg
= 0; reg
< 32; reg
++)
682 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
683 any global register variables. */
687 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
688 || (df_regs_ever_live_p (reg
)
689 && (int_or_sig_p
|| !call_used_regs
[reg
])
690 /* Don't record frame pointer registers here. They are treated
691 indivitually in prologue. */
692 && !(frame_pointer_needed
693 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
696 SET_HARD_REG_BIT (*set
, reg
);
704 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
707 avr_allocate_stack_slots_for_args (void)
709 return !cfun
->machine
->is_naked
;
713 /* Return true if register FROM can be eliminated via register TO. */
716 avr_can_eliminate (const int from
, const int to
)
718 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
719 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
720 || ((from
== FRAME_POINTER_REGNUM
721 || from
== FRAME_POINTER_REGNUM
+ 1)
722 && !frame_pointer_needed
));
726 /* Implement TARGET_WARN_FUNC_RETURN. */
729 avr_warn_func_return (tree decl
)
731 /* Naked functions are implemented entirely in assembly, including the
732 return sequence, so suppress warnings about this. */
733 return !avr_naked_function_p (decl
);
736 /* Compute offset between arg_pointer and frame_pointer. */
739 avr_initial_elimination_offset (int from
, int to
)
741 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
745 int offset
= frame_pointer_needed
? 2 : 0;
746 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
748 offset
+= avr_regs_to_save (NULL
);
749 return (get_frame_size () + avr_outgoing_args_size()
750 + avr_pc_size
+ 1 + offset
);
755 /* Helper for the function below. */
758 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
760 *node
= make_node (FIXED_POINT_TYPE
);
761 TYPE_SATURATING (*node
) = sat_p
;
762 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
763 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
764 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
765 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
766 TYPE_ALIGN (*node
) = 8;
767 SET_TYPE_MODE (*node
, mode
);
773 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
776 avr_build_builtin_va_list (void)
778 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
779 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
780 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
781 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
782 to the long long accum modes instead of the desired [U]TAmode.
784 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
785 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
786 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
787 libgcc to detect IBIT and FBIT. */
789 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
790 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
791 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
792 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
794 unsigned_long_long_accum_type_node
= uta_type_node
;
795 long_long_accum_type_node
= ta_type_node
;
796 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
797 sat_long_long_accum_type_node
= sat_ta_type_node
;
799 /* Dispatch to the default handler. */
801 return std_build_builtin_va_list ();
805 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
806 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
807 frame pointer by +STARTING_FRAME_OFFSET.
808 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
809 avoids creating add/sub of offset in nonlocal goto and setjmp. */
812 avr_builtin_setjmp_frame_value (void)
814 rtx xval
= gen_reg_rtx (Pmode
);
815 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
816 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
821 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
822 This is return address of function. */
824 avr_return_addr_rtx (int count
, rtx tem
)
828 /* Can only return this function's return address. Others not supported. */
834 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
835 warning (0, "'builtin_return_address' contains only 2 bytes of address");
838 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
840 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
841 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
842 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
846 /* Return 1 if the function epilogue is just a single "ret". */
849 avr_simple_epilogue (void)
851 return (! frame_pointer_needed
852 && get_frame_size () == 0
853 && avr_outgoing_args_size() == 0
854 && avr_regs_to_save (NULL
) == 0
855 && ! cfun
->machine
->is_interrupt
856 && ! cfun
->machine
->is_signal
857 && ! cfun
->machine
->is_naked
858 && ! TREE_THIS_VOLATILE (current_function_decl
));
861 /* This function checks sequence of live registers. */
864 sequent_regs_live (void)
870 for (reg
= 0; reg
< 18; ++reg
)
874 /* Don't recognize sequences that contain global register
883 if (!call_used_regs
[reg
])
885 if (df_regs_ever_live_p (reg
))
895 if (!frame_pointer_needed
)
897 if (df_regs_ever_live_p (REG_Y
))
905 if (df_regs_ever_live_p (REG_Y
+1))
918 return (cur_seq
== live_seq
) ? live_seq
: 0;
921 /* Obtain the length sequence of insns. */
924 get_sequence_length (rtx insns
)
929 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
930 length
+= get_attr_length (insn
);
935 /* Implement INCOMING_RETURN_ADDR_RTX. */
938 avr_incoming_return_addr_rtx (void)
940 /* The return address is at the top of the stack. Note that the push
941 was via post-decrement, which means the actual address is off by one. */
942 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
945 /* Helper for expand_prologue. Emit a push of a byte register. */
948 emit_push_byte (unsigned regno
, bool frame_related_p
)
952 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
953 mem
= gen_frame_mem (QImode
, mem
);
954 reg
= gen_rtx_REG (QImode
, regno
);
956 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
958 RTX_FRAME_RELATED_P (insn
) = 1;
960 cfun
->machine
->stack_usage
++;
964 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
965 SFR is a MEM representing the memory location of the SFR.
966 If CLR_P then clear the SFR after the push using zero_reg. */
969 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
973 gcc_assert (MEM_P (sfr
));
975 /* IN __tmp_reg__, IO(SFR) */
976 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
978 RTX_FRAME_RELATED_P (insn
) = 1;
980 /* PUSH __tmp_reg__ */
981 emit_push_byte (TMP_REGNO
, frame_related_p
);
985 /* OUT IO(SFR), __zero_reg__ */
986 insn
= emit_move_insn (sfr
, const0_rtx
);
988 RTX_FRAME_RELATED_P (insn
) = 1;
993 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
996 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
997 int live_seq
= sequent_regs_live ();
999 HOST_WIDE_INT size_max
1000 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1002 bool minimize
= (TARGET_CALL_PROLOGUES
1006 && !cfun
->machine
->is_OS_task
1007 && !cfun
->machine
->is_OS_main
);
1010 && (frame_pointer_needed
1011 || avr_outgoing_args_size() > 8
1012 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1016 int first_reg
, reg
, offset
;
1018 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1019 gen_int_mode (size
, HImode
));
1021 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1022 gen_int_mode (live_seq
+size
, HImode
));
1023 insn
= emit_insn (pattern
);
1024 RTX_FRAME_RELATED_P (insn
) = 1;
1026 /* Describe the effect of the unspec_volatile call to prologue_saves.
1027 Note that this formulation assumes that add_reg_note pushes the
1028 notes to the front. Thus we build them in the reverse order of
1029 how we want dwarf2out to process them. */
1031 /* The function does always set frame_pointer_rtx, but whether that
1032 is going to be permanent in the function is frame_pointer_needed. */
1034 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1035 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1037 : stack_pointer_rtx
),
1038 plus_constant (Pmode
, stack_pointer_rtx
,
1039 -(size
+ live_seq
))));
1041 /* Note that live_seq always contains r28+r29, but the other
1042 registers to be saved are all below 18. */
1044 first_reg
= 18 - (live_seq
- 2);
1046 for (reg
= 29, offset
= -live_seq
+ 1;
1048 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1052 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1054 r
= gen_rtx_REG (QImode
, reg
);
1055 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1058 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1060 else /* !minimize */
1064 for (reg
= 0; reg
< 32; ++reg
)
1065 if (TEST_HARD_REG_BIT (set
, reg
))
1066 emit_push_byte (reg
, true);
1068 if (frame_pointer_needed
1069 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1071 /* Push frame pointer. Always be consistent about the
1072 ordering of pushes -- epilogue_restores expects the
1073 register pair to be pushed low byte first. */
1075 emit_push_byte (REG_Y
, true);
1076 emit_push_byte (REG_Y
+ 1, true);
1079 if (frame_pointer_needed
1082 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1083 RTX_FRAME_RELATED_P (insn
) = 1;
1088 /* Creating a frame can be done by direct manipulation of the
1089 stack or via the frame pointer. These two methods are:
1096 the optimum method depends on function type, stack and
1097 frame size. To avoid a complex logic, both methods are
1098 tested and shortest is selected.
1100 There is also the case where SIZE != 0 and no frame pointer is
1101 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1102 In that case, insn (*) is not needed in that case.
1103 We use the X register as scratch. This is save because in X
1105 In an interrupt routine, the case of SIZE != 0 together with
1106 !frame_pointer_needed can only occur if the function is not a
1107 leaf function and thus X has already been saved. */
1110 HOST_WIDE_INT size_cfa
= size
;
1111 rtx fp_plus_insns
, fp
, my_fp
;
1113 gcc_assert (frame_pointer_needed
1117 fp
= my_fp
= (frame_pointer_needed
1119 : gen_rtx_REG (Pmode
, REG_X
));
1121 if (AVR_HAVE_8BIT_SP
)
1123 /* The high byte (r29) does not change:
1124 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1126 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1129 /* Cut down size and avoid size = 0 so that we don't run
1130 into ICE like PR52488 in the remainder. */
1132 if (size
> size_max
)
1134 /* Don't error so that insane code from newlib still compiles
1135 and does not break building newlib. As PR51345 is implemented
1136 now, there are multilib variants with -msp8.
1138 If user wants sanity checks he can use -Wstack-usage=
1141 For CFA we emit the original, non-saturated size so that
1142 the generic machinery is aware of the real stack usage and
1143 will print the above diagnostic as expected. */
1148 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1150 /************ Method 1: Adjust frame pointer ************/
1154 /* Normally, the dwarf2out frame-related-expr interpreter does
1155 not expect to have the CFA change once the frame pointer is
1156 set up. Thus, we avoid marking the move insn below and
1157 instead indicate that the entire operation is complete after
1158 the frame pointer subtraction is done. */
1160 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1161 if (frame_pointer_needed
)
1163 RTX_FRAME_RELATED_P (insn
) = 1;
1164 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1165 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1168 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1170 if (frame_pointer_needed
)
1172 RTX_FRAME_RELATED_P (insn
) = 1;
1173 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1174 gen_rtx_SET (VOIDmode
, fp
,
1175 plus_constant (Pmode
, fp
,
1179 /* Copy to stack pointer. Note that since we've already
1180 changed the CFA to the frame pointer this operation
1181 need not be annotated if frame pointer is needed.
1182 Always move through unspec, see PR50063.
1183 For meaning of irq_state see movhi_sp_r insn. */
1185 if (cfun
->machine
->is_interrupt
)
1188 if (TARGET_NO_INTERRUPTS
1189 || cfun
->machine
->is_signal
1190 || cfun
->machine
->is_OS_main
)
1193 if (AVR_HAVE_8BIT_SP
)
1196 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1197 fp
, GEN_INT (irq_state
)));
1198 if (!frame_pointer_needed
)
1200 RTX_FRAME_RELATED_P (insn
) = 1;
1201 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1202 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1203 plus_constant (Pmode
,
1208 fp_plus_insns
= get_insns ();
1211 /************ Method 2: Adjust Stack pointer ************/
1213 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1214 can only handle specific offsets. */
1216 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1222 insn
= emit_move_insn (stack_pointer_rtx
,
1223 plus_constant (Pmode
, stack_pointer_rtx
,
1225 RTX_FRAME_RELATED_P (insn
) = 1;
1226 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1227 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1228 plus_constant (Pmode
,
1231 if (frame_pointer_needed
)
1233 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1234 RTX_FRAME_RELATED_P (insn
) = 1;
1237 sp_plus_insns
= get_insns ();
1240 /************ Use shortest method ************/
1242 emit_insn (get_sequence_length (sp_plus_insns
)
1243 < get_sequence_length (fp_plus_insns
)
1249 emit_insn (fp_plus_insns
);
1252 cfun
->machine
->stack_usage
+= size_cfa
;
1253 } /* !minimize && size != 0 */
1258 /* Output function prologue. */
1261 expand_prologue (void)
1266 size
= get_frame_size() + avr_outgoing_args_size();
1268 cfun
->machine
->stack_usage
= 0;
1270 /* Prologue: naked. */
1271 if (cfun
->machine
->is_naked
)
1276 avr_regs_to_save (&set
);
1278 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1280 /* Enable interrupts. */
1281 if (cfun
->machine
->is_interrupt
)
1282 emit_insn (gen_enable_interrupt ());
1284 /* Push zero reg. */
1285 emit_push_byte (ZERO_REGNO
, true);
1288 emit_push_byte (TMP_REGNO
, true);
1291 /* ??? There's no dwarf2 column reserved for SREG. */
1292 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1294 /* Clear zero reg. */
1295 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1297 /* Prevent any attempt to delete the setting of ZERO_REG! */
1298 emit_use (zero_reg_rtx
);
1300 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1301 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1304 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1307 && TEST_HARD_REG_BIT (set
, REG_X
)
1308 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1310 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1314 && (frame_pointer_needed
1315 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1316 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1318 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1322 && TEST_HARD_REG_BIT (set
, REG_Z
)
1323 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1325 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1327 } /* is_interrupt is_signal */
1329 avr_prologue_setup_frame (size
, set
);
1331 if (flag_stack_usage_info
)
1332 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1335 /* Output summary at end of function prologue. */
1338 avr_asm_function_end_prologue (FILE *file
)
1340 if (cfun
->machine
->is_naked
)
1342 fputs ("/* prologue: naked */\n", file
);
1346 if (cfun
->machine
->is_interrupt
)
1348 fputs ("/* prologue: Interrupt */\n", file
);
1350 else if (cfun
->machine
->is_signal
)
1352 fputs ("/* prologue: Signal */\n", file
);
1355 fputs ("/* prologue: function */\n", file
);
1358 if (ACCUMULATE_OUTGOING_ARGS
)
1359 fprintf (file
, "/* outgoing args size = %d */\n",
1360 avr_outgoing_args_size());
1362 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1364 fprintf (file
, "/* stack size = %d */\n",
1365 cfun
->machine
->stack_usage
);
1366 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1367 usage for offset so that SP + .L__stack_offset = return address. */
1368 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1372 /* Implement EPILOGUE_USES. */
1375 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1377 if (reload_completed
1379 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1384 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1387 emit_pop_byte (unsigned regno
)
1391 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1392 mem
= gen_frame_mem (QImode
, mem
);
1393 reg
= gen_rtx_REG (QImode
, regno
);
1395 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1398 /* Output RTL epilogue. */
1401 expand_epilogue (bool sibcall_p
)
1408 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1410 size
= get_frame_size() + avr_outgoing_args_size();
1412 /* epilogue: naked */
1413 if (cfun
->machine
->is_naked
)
1415 gcc_assert (!sibcall_p
);
1417 emit_jump_insn (gen_return ());
1421 avr_regs_to_save (&set
);
1422 live_seq
= sequent_regs_live ();
1424 minimize
= (TARGET_CALL_PROLOGUES
1427 && !cfun
->machine
->is_OS_task
1428 && !cfun
->machine
->is_OS_main
);
1432 || frame_pointer_needed
1435 /* Get rid of frame. */
1437 if (!frame_pointer_needed
)
1439 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1444 emit_move_insn (frame_pointer_rtx
,
1445 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1448 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1454 /* Try two methods to adjust stack and select shortest. */
1459 HOST_WIDE_INT size_max
;
1461 gcc_assert (frame_pointer_needed
1465 fp
= my_fp
= (frame_pointer_needed
1467 : gen_rtx_REG (Pmode
, REG_X
));
1469 if (AVR_HAVE_8BIT_SP
)
1471 /* The high byte (r29) does not change:
1472 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1474 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1477 /* For rationale see comment in prologue generation. */
1479 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1480 if (size
> size_max
)
1482 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1484 /********** Method 1: Adjust fp register **********/
1488 if (!frame_pointer_needed
)
1489 emit_move_insn (fp
, stack_pointer_rtx
);
1491 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1493 /* Copy to stack pointer. */
1495 if (TARGET_NO_INTERRUPTS
)
1498 if (AVR_HAVE_8BIT_SP
)
1501 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1502 GEN_INT (irq_state
)));
1504 fp_plus_insns
= get_insns ();
1507 /********** Method 2: Adjust Stack pointer **********/
1509 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1515 emit_move_insn (stack_pointer_rtx
,
1516 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1518 sp_plus_insns
= get_insns ();
1521 /************ Use shortest method ************/
1523 emit_insn (get_sequence_length (sp_plus_insns
)
1524 < get_sequence_length (fp_plus_insns
)
1529 emit_insn (fp_plus_insns
);
1532 if (frame_pointer_needed
1533 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1535 /* Restore previous frame_pointer. See expand_prologue for
1536 rationale for not using pophi. */
1538 emit_pop_byte (REG_Y
+ 1);
1539 emit_pop_byte (REG_Y
);
1542 /* Restore used registers. */
1544 for (reg
= 31; reg
>= 0; --reg
)
1545 if (TEST_HARD_REG_BIT (set
, reg
))
1546 emit_pop_byte (reg
);
1550 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1551 The conditions to restore them must be tha same as in prologue. */
1554 && TEST_HARD_REG_BIT (set
, REG_Z
)
1555 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1557 emit_pop_byte (TMP_REGNO
);
1558 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1562 && (frame_pointer_needed
1563 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1564 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1566 emit_pop_byte (TMP_REGNO
);
1567 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1571 && TEST_HARD_REG_BIT (set
, REG_X
)
1572 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1574 emit_pop_byte (TMP_REGNO
);
1575 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1580 emit_pop_byte (TMP_REGNO
);
1581 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1584 /* Restore SREG using tmp_reg as scratch. */
1586 emit_pop_byte (TMP_REGNO
);
1587 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1589 /* Restore tmp REG. */
1590 emit_pop_byte (TMP_REGNO
);
1592 /* Restore zero REG. */
1593 emit_pop_byte (ZERO_REGNO
);
1597 emit_jump_insn (gen_return ());
1600 /* Output summary messages at beginning of function epilogue. */
1603 avr_asm_function_begin_epilogue (FILE *file
)
1605 fprintf (file
, "/* epilogue start */\n");
1609 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1612 avr_cannot_modify_jumps_p (void)
1615 /* Naked Functions must not have any instructions after
1616 their epilogue, see PR42240 */
1618 if (reload_completed
1620 && cfun
->machine
->is_naked
)
1629 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1631 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1632 This hook just serves to hack around PR rtl-optimization/52543 by
1633 claiming that PSImode addresses (which are used for the 24-bit
1634 address space __memx) were mode-dependent so that lower-subreg.s
1635 will skip these addresses. See also the similar FIXME comment along
1636 with mov<mode> expanders in avr.md. */
1639 avr_mode_dependent_address_p (const_rtx addr
)
1641 return GET_MODE (addr
) != Pmode
;
1645 /* Helper function for `avr_legitimate_address_p'. */
1648 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1649 RTX_CODE outer_code
, bool strict
)
1652 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1653 as
, outer_code
, UNKNOWN
)
1655 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1659 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1660 machine for a memory operand of mode MODE. */
1663 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1665 bool ok
= CONSTANT_ADDRESS_P (x
);
1667 switch (GET_CODE (x
))
1670 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1674 && GET_MODE_SIZE (mode
) > 4
1675 && REG_X
== REGNO (x
))
1683 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1684 GET_CODE (x
), strict
);
1689 rtx reg
= XEXP (x
, 0);
1690 rtx op1
= XEXP (x
, 1);
1693 && CONST_INT_P (op1
)
1694 && INTVAL (op1
) >= 0)
1696 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1701 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1704 if (reg
== frame_pointer_rtx
1705 || reg
== arg_pointer_rtx
)
1710 else if (frame_pointer_needed
1711 && reg
== frame_pointer_rtx
)
1723 if (avr_log
.legitimate_address_p
)
1725 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1726 "reload_completed=%d reload_in_progress=%d %s:",
1727 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1728 reg_renumber
? "(reg_renumber)" : "");
1730 if (GET_CODE (x
) == PLUS
1731 && REG_P (XEXP (x
, 0))
1732 && CONST_INT_P (XEXP (x
, 1))
1733 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1736 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1737 true_regnum (XEXP (x
, 0)));
1740 avr_edump ("\n%r\n", x
);
1747 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1748 now only a helper for avr_addr_space_legitimize_address. */
1749 /* Attempts to replace X with a valid
1750 memory address for an operand of mode MODE */
1753 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1755 bool big_offset_p
= false;
1759 if (GET_CODE (oldx
) == PLUS
1760 && REG_P (XEXP (oldx
, 0)))
1762 if (REG_P (XEXP (oldx
, 1)))
1763 x
= force_reg (GET_MODE (oldx
), oldx
);
1764 else if (CONST_INT_P (XEXP (oldx
, 1)))
1766 int offs
= INTVAL (XEXP (oldx
, 1));
1767 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1768 && offs
> MAX_LD_OFFSET (mode
))
1770 big_offset_p
= true;
1771 x
= force_reg (GET_MODE (oldx
), oldx
);
1776 if (avr_log
.legitimize_address
)
1778 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1781 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1788 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1789 /* This will allow register R26/27 to be used where it is no worse than normal
1790 base pointers R28/29 or R30/31. For example, if base offset is greater
1791 than 63 bytes or for R++ or --R addressing. */
1794 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1795 int opnum
, int type
, int addr_type
,
1796 int ind_levels ATTRIBUTE_UNUSED
,
1797 rtx (*mk_memloc
)(rtx
,int))
1801 if (avr_log
.legitimize_reload_address
)
1802 avr_edump ("\n%?:%m %r\n", mode
, x
);
1804 if (1 && (GET_CODE (x
) == POST_INC
1805 || GET_CODE (x
) == PRE_DEC
))
1807 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1808 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1809 opnum
, RELOAD_OTHER
);
1811 if (avr_log
.legitimize_reload_address
)
1812 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1813 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1818 if (GET_CODE (x
) == PLUS
1819 && REG_P (XEXP (x
, 0))
1820 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1821 && CONST_INT_P (XEXP (x
, 1))
1822 && INTVAL (XEXP (x
, 1)) >= 1)
1824 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1828 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1830 int regno
= REGNO (XEXP (x
, 0));
1831 rtx mem
= mk_memloc (x
, regno
);
1833 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1834 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1835 1, (enum reload_type
) addr_type
);
1837 if (avr_log
.legitimize_reload_address
)
1838 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1839 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1841 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1842 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1843 opnum
, (enum reload_type
) type
);
1845 if (avr_log
.legitimize_reload_address
)
1846 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1847 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1852 else if (! (frame_pointer_needed
1853 && XEXP (x
, 0) == frame_pointer_rtx
))
1855 push_reload (x
, NULL_RTX
, px
, NULL
,
1856 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1857 opnum
, (enum reload_type
) type
);
1859 if (avr_log
.legitimize_reload_address
)
1860 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1861 POINTER_REGS
, x
, NULL_RTX
);
1871 /* Helper function to print assembler resp. track instruction
1872 sequence lengths. Always return "".
1875 Output assembler code from template TPL with operands supplied
1876 by OPERANDS. This is just forwarding to output_asm_insn.
1879 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1880 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1881 Don't output anything.
1885 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1889 output_asm_insn (tpl
, operands
);
1903 /* Return a pointer register name as a string. */
1906 ptrreg_to_str (int regno
)
1910 case REG_X
: return "X";
1911 case REG_Y
: return "Y";
1912 case REG_Z
: return "Z";
1914 output_operand_lossage ("address operand requires constraint for"
1915 " X, Y, or Z register");
1920 /* Return the condition name as a string.
1921 Used in conditional jump constructing */
1924 cond_string (enum rtx_code code
)
1933 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1938 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1954 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1955 /* Output ADDR to FILE as address. */
1958 avr_print_operand_address (FILE *file
, rtx addr
)
1960 switch (GET_CODE (addr
))
1963 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1967 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1971 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1975 if (CONSTANT_ADDRESS_P (addr
)
1976 && text_segment_operand (addr
, VOIDmode
))
1979 if (GET_CODE (x
) == CONST
)
1981 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1983 /* Assembler gs() will implant word address. Make offset
1984 a byte offset inside gs() for assembler. This is
1985 needed because the more logical (constant+gs(sym)) is not
1986 accepted by gas. For 128K and lower devices this is ok.
1987 For large devices it will create a Trampoline to offset
1988 from symbol which may not be what the user really wanted. */
1989 fprintf (file
, "gs(");
1990 output_addr_const (file
, XEXP (x
,0));
1991 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
1992 2 * INTVAL (XEXP (x
, 1)));
1994 if (warning (0, "pointer offset from symbol maybe incorrect"))
1996 output_addr_const (stderr
, addr
);
1997 fprintf(stderr
,"\n");
2002 fprintf (file
, "gs(");
2003 output_addr_const (file
, addr
);
2004 fprintf (file
, ")");
2008 output_addr_const (file
, addr
);
2013 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2016 avr_print_operand_punct_valid_p (unsigned char code
)
2018 return code
== '~' || code
== '!';
2022 /* Implement `TARGET_PRINT_OPERAND'. */
2023 /* Output X as assembler operand to file FILE.
2024 For a description of supported %-codes, see top of avr.md. */
2027 avr_print_operand (FILE *file
, rtx x
, int code
)
2031 if (code
>= 'A' && code
<= 'D')
2036 if (!AVR_HAVE_JMP_CALL
)
2039 else if (code
== '!')
2041 if (AVR_HAVE_EIJMP_EICALL
)
2044 else if (code
== 't'
2047 static int t_regno
= -1;
2048 static int t_nbits
= -1;
2050 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2052 t_regno
= REGNO (x
);
2053 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2055 else if (CONST_INT_P (x
) && t_regno
>= 0
2056 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2058 int bpos
= INTVAL (x
);
2060 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2062 fprintf (file
, ",%d", bpos
% 8);
2067 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2071 if (x
== zero_reg_rtx
)
2072 fprintf (file
, "__zero_reg__");
2074 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
2076 else if (CONST_INT_P (x
))
2078 HOST_WIDE_INT ival
= INTVAL (x
);
2081 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2082 else if (low_io_address_operand (x
, VOIDmode
)
2083 || high_io_address_operand (x
, VOIDmode
))
2085 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2086 fprintf (file
, "__RAMPZ__");
2087 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2088 fprintf (file
, "__RAMPY__");
2089 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2090 fprintf (file
, "__RAMPX__");
2091 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2092 fprintf (file
, "__RAMPD__");
2093 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2094 fprintf (file
, "__CCP__");
2095 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2096 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2097 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2100 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2101 ival
- avr_current_arch
->sfr_offset
);
2105 fatal_insn ("bad address, not an I/O address:", x
);
2109 rtx addr
= XEXP (x
, 0);
2113 if (!CONSTANT_P (addr
))
2114 fatal_insn ("bad address, not a constant:", addr
);
2115 /* Assembler template with m-code is data - not progmem section */
2116 if (text_segment_operand (addr
, VOIDmode
))
2117 if (warning (0, "accessing data memory with"
2118 " program memory address"))
2120 output_addr_const (stderr
, addr
);
2121 fprintf(stderr
,"\n");
2123 output_addr_const (file
, addr
);
2125 else if (code
== 'i')
2127 avr_print_operand (file
, addr
, 'i');
2129 else if (code
== 'o')
2131 if (GET_CODE (addr
) != PLUS
)
2132 fatal_insn ("bad address, not (reg+disp):", addr
);
2134 avr_print_operand (file
, XEXP (addr
, 1), 0);
2136 else if (code
== 'p' || code
== 'r')
2138 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2139 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2142 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2144 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2146 else if (GET_CODE (addr
) == PLUS
)
2148 avr_print_operand_address (file
, XEXP (addr
,0));
2149 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2150 fatal_insn ("internal compiler error. Bad address:"
2153 avr_print_operand (file
, XEXP (addr
,1), code
);
2156 avr_print_operand_address (file
, addr
);
2158 else if (code
== 'i')
2160 fatal_insn ("bad address, not an I/O address:", x
);
2162 else if (code
== 'x')
2164 /* Constant progmem address - like used in jmp or call */
2165 if (0 == text_segment_operand (x
, VOIDmode
))
2166 if (warning (0, "accessing program memory"
2167 " with data memory address"))
2169 output_addr_const (stderr
, x
);
2170 fprintf(stderr
,"\n");
2172 /* Use normal symbol for direct address no linker trampoline needed */
2173 output_addr_const (file
, x
);
2175 else if (GET_CODE (x
) == CONST_FIXED
)
2177 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2179 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2181 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2183 else if (GET_CODE (x
) == CONST_DOUBLE
)
2187 if (GET_MODE (x
) != SFmode
)
2188 fatal_insn ("internal compiler error. Unknown mode:", x
);
2189 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2190 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2191 fprintf (file
, "0x%lx", val
);
2193 else if (GET_CODE (x
) == CONST_STRING
)
2194 fputs (XSTR (x
, 0), file
);
2195 else if (code
== 'j')
2196 fputs (cond_string (GET_CODE (x
)), file
);
2197 else if (code
== 'k')
2198 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2200 avr_print_operand_address (file
, x
);
2203 /* Update the condition code in the INSN. */
2206 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2209 enum attr_cc cc
= get_attr_cc (insn
);
2217 case CC_OUT_PLUS_NOCLOBBER
:
2221 rtx
*op
= recog_data
.operand
;
2224 /* Extract insn's operands. */
2225 extract_constrain_insn_cached (insn
);
2233 avr_out_plus (op
, &len_dummy
, &icc
);
2234 cc
= (enum attr_cc
) icc
;
2237 case CC_OUT_PLUS_NOCLOBBER
:
2238 avr_out_plus_noclobber (op
, &len_dummy
, &icc
);
2239 cc
= (enum attr_cc
) icc
;
2243 avr_out_minus (op
, &len_dummy
, &icc
);
2244 cc
= (enum attr_cc
) icc
;
2249 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2250 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2251 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2253 /* Any other "r,rL" combination does not alter cc0. */
2257 } /* inner switch */
2261 } /* outer swicth */
2266 /* Special values like CC_OUT_PLUS from above have been
2267 mapped to "standard" CC_* values so we never come here. */
2273 /* Insn does not affect CC at all. */
2281 set
= single_set (insn
);
2285 cc_status
.flags
|= CC_NO_OVERFLOW
;
2286 cc_status
.value1
= SET_DEST (set
);
2291 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2292 The V flag may or may not be known but that's ok because
2293 alter_cond will change tests to use EQ/NE. */
2294 set
= single_set (insn
);
2298 cc_status
.value1
= SET_DEST (set
);
2299 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2304 set
= single_set (insn
);
2307 cc_status
.value1
= SET_SRC (set
);
2311 /* Insn doesn't leave CC in a usable state. */
2317 /* Choose mode for jump insn:
2318 1 - relative jump in range -63 <= x <= 62 ;
2319 2 - relative jump in range -2046 <= x <= 2045 ;
2320 3 - absolute jump (only for ATmega[16]03). */
2323 avr_jump_mode (rtx x
, rtx insn
)
2325 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2326 ? XEXP (x
, 0) : x
));
2327 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2328 int jump_distance
= cur_addr
- dest_addr
;
2330 if (-63 <= jump_distance
&& jump_distance
<= 62)
2332 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2334 else if (AVR_HAVE_JMP_CALL
)
2340 /* return an AVR condition jump commands.
2341 X is a comparison RTX.
2342 LEN is a number returned by avr_jump_mode function.
2343 if REVERSE nonzero then condition code in X must be reversed. */
2346 ret_cond_branch (rtx x
, int len
, int reverse
)
2348 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2353 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2354 return (len
== 1 ? ("breq .+2" CR_TAB
2356 len
== 2 ? ("breq .+4" CR_TAB
2364 return (len
== 1 ? ("breq .+2" CR_TAB
2366 len
== 2 ? ("breq .+4" CR_TAB
2373 return (len
== 1 ? ("breq .+2" CR_TAB
2375 len
== 2 ? ("breq .+4" CR_TAB
2382 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2383 return (len
== 1 ? ("breq %0" CR_TAB
2385 len
== 2 ? ("breq .+2" CR_TAB
2392 return (len
== 1 ? ("breq %0" CR_TAB
2394 len
== 2 ? ("breq .+2" CR_TAB
2401 return (len
== 1 ? ("breq %0" CR_TAB
2403 len
== 2 ? ("breq .+2" CR_TAB
2417 return ("br%j1 .+2" CR_TAB
2420 return ("br%j1 .+4" CR_TAB
2431 return ("br%k1 .+2" CR_TAB
2434 return ("br%k1 .+4" CR_TAB
2442 /* Output insn cost for next insn. */
2445 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2446 int num_operands ATTRIBUTE_UNUSED
)
2448 if (avr_log
.rtx_costs
)
2450 rtx set
= single_set (insn
);
2453 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2454 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2456 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2457 rtx_cost (PATTERN (insn
), INSN
, 0,
2458 optimize_insn_for_speed_p()));
2462 /* Return 0 if undefined, 1 if always true or always false. */
2465 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2467 unsigned int max
= (mode
== QImode
? 0xff :
2468 mode
== HImode
? 0xffff :
2469 mode
== PSImode
? 0xffffff :
2470 mode
== SImode
? 0xffffffff : 0);
2471 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
2473 if (unsigned_condition (op
) != op
)
2476 if (max
!= (INTVAL (x
) & max
)
2477 && INTVAL (x
) != 0xff)
2484 /* Returns nonzero if REGNO is the number of a hard
2485 register in which function arguments are sometimes passed. */
2488 function_arg_regno_p(int r
)
2490 return (r
>= 8 && r
<= 25);
2493 /* Initializing the variable cum for the state at the beginning
2494 of the argument list. */
2497 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2498 tree fndecl ATTRIBUTE_UNUSED
)
2501 cum
->regno
= FIRST_CUM_REG
;
2502 if (!libname
&& stdarg_p (fntype
))
2505 /* Assume the calle may be tail called */
2507 cfun
->machine
->sibcall_fails
= 0;
2510 /* Returns the number of registers to allocate for a function argument. */
2513 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2517 if (mode
== BLKmode
)
2518 size
= int_size_in_bytes (type
);
2520 size
= GET_MODE_SIZE (mode
);
2522 /* Align all function arguments to start in even-numbered registers.
2523 Odd-sized arguments leave holes above them. */
2525 return (size
+ 1) & ~1;
2528 /* Controls whether a function argument is passed
2529 in a register, and which register. */
2532 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2533 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2535 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2536 int bytes
= avr_num_arg_regs (mode
, type
);
2538 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2539 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2544 /* Update the summarizer variable CUM to advance past an argument
2545 in the argument list. */
2548 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2549 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2551 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2552 int bytes
= avr_num_arg_regs (mode
, type
);
2554 cum
->nregs
-= bytes
;
2555 cum
->regno
-= bytes
;
2557 /* A parameter is being passed in a call-saved register. As the original
2558 contents of these regs has to be restored before leaving the function,
2559 a function must not pass arguments in call-saved regs in order to get
2564 && !call_used_regs
[cum
->regno
])
2566 /* FIXME: We ship info on failing tail-call in struct machine_function.
2567 This uses internals of calls.c:expand_call() and the way args_so_far
2568 is used. targetm.function_ok_for_sibcall() needs to be extended to
2569 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2570 dependent so that such an extension is not wanted. */
2572 cfun
->machine
->sibcall_fails
= 1;
2575 /* Test if all registers needed by the ABI are actually available. If the
2576 user has fixed a GPR needed to pass an argument, an (implicit) function
2577 call will clobber that fixed register. See PR45099 for an example. */
2584 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2585 if (fixed_regs
[regno
])
2586 warning (0, "fixed register %s used to pass parameter to function",
2590 if (cum
->nregs
<= 0)
2593 cum
->regno
= FIRST_CUM_REG
;
2597 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2598 /* Decide whether we can make a sibling call to a function. DECL is the
2599 declaration of the function being targeted by the call and EXP is the
2600 CALL_EXPR representing the call. */
2603 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2607 /* Tail-calling must fail if callee-saved regs are used to pass
2608 function args. We must not tail-call when `epilogue_restores'
2609 is used. Unfortunately, we cannot tell at this point if that
2610 actually will happen or not, and we cannot step back from
2611 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2613 if (cfun
->machine
->sibcall_fails
2614 || TARGET_CALL_PROLOGUES
)
2619 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2623 decl_callee
= TREE_TYPE (decl_callee
);
2627 decl_callee
= fntype_callee
;
2629 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2630 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2632 decl_callee
= TREE_TYPE (decl_callee
);
2636 /* Ensure that caller and callee have compatible epilogues */
2638 if (cfun
->machine
->is_interrupt
2639 || cfun
->machine
->is_signal
2640 || cfun
->machine
->is_naked
2641 || avr_naked_function_p (decl_callee
)
2642 /* FIXME: For OS_task and OS_main, we are over-conservative.
2643 This is due to missing documentation of these attributes
2644 and what they actually should do and should not do. */
2645 || (avr_OS_task_function_p (decl_callee
)
2646 != cfun
->machine
->is_OS_task
)
2647 || (avr_OS_main_function_p (decl_callee
)
2648 != cfun
->machine
->is_OS_main
))
2656 /***********************************************************************
2657 Functions for outputting various mov's for a various modes
2658 ************************************************************************/
2660 /* Return true if a value of mode MODE is read from flash by
2661 __load_* function from libgcc. */
2664 avr_load_libgcc_p (rtx op
)
2666 enum machine_mode mode
= GET_MODE (op
);
2667 int n_bytes
= GET_MODE_SIZE (mode
);
2672 && MEM_ADDR_SPACE (op
) == ADDR_SPACE_FLASH
);
2675 /* Return true if a value of mode MODE is read by __xload_* function. */
2678 avr_xload_libgcc_p (enum machine_mode mode
)
2680 int n_bytes
= GET_MODE_SIZE (mode
);
2683 || avr_current_device
->n_flash
> 1);
2687 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2688 OP[1] in AS1 to register OP[0].
2689 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2693 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2697 rtx src
= SET_SRC (single_set (insn
));
2699 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2701 addr_space_t as
= MEM_ADDR_SPACE (src
);
2708 warning (0, "writing to address space %qs not supported",
2709 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2714 addr
= XEXP (src
, 0);
2715 code
= GET_CODE (addr
);
2717 gcc_assert (REG_P (dest
));
2718 gcc_assert (REG
== code
|| POST_INC
== code
);
2720 /* Only 1-byte moves from __flash are representes as open coded
2721 mov insns. All other loads from flash are not handled here but
2722 by some UNSPEC instead, see respective FIXME in machine description. */
2724 gcc_assert (as
== ADDR_SPACE_FLASH
);
2725 gcc_assert (n_bytes
== 1);
2728 xop
[1] = lpm_addr_reg_rtx
;
2729 xop
[2] = lpm_reg_rtx
;
2738 gcc_assert (REG_Z
== REGNO (addr
));
2740 return AVR_HAVE_LPMX
2741 ? avr_asm_len ("lpm %0,%a1", xop
, plen
, 1)
2742 : avr_asm_len ("lpm" CR_TAB
2743 "mov %0,%2", xop
, plen
, 2);
2747 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0)));
2749 return AVR_HAVE_LPMX
2750 ? avr_asm_len ("lpm %0,%a1+", xop
, plen
, 1)
2751 : avr_asm_len ("lpm" CR_TAB
2753 "mov %0,%2", xop
, plen
, 3);
2760 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2761 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2763 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2767 avr_load_lpm (rtx insn
, rtx
*op
, int *plen
)
2770 int n
, n_bytes
= GET_MODE_SIZE (GET_MODE (op
[0]));
2771 rtx xsegment
= op
[1];
2772 bool clobber_z
= PARALLEL
== GET_CODE (PATTERN (insn
));
2773 bool r30_in_tmp
= false;
2778 xop
[1] = lpm_addr_reg_rtx
;
2779 xop
[2] = lpm_reg_rtx
;
2780 xop
[3] = xstring_empty
;
2782 /* Set RAMPZ as needed. */
2784 if (REG_P (xsegment
))
2786 avr_asm_len ("out __RAMPZ__,%0", &xsegment
, plen
, 1);
2790 /* Load the individual bytes from LSB to MSB. */
2792 for (n
= 0; n
< n_bytes
; n
++)
2794 xop
[0] = all_regs_rtx
[REGNO (op
[0]) + n
];
2796 if ((CONST_INT_P (xsegment
) && AVR_HAVE_LPMX
)
2797 || (REG_P (xsegment
) && AVR_HAVE_ELPMX
))
2800 avr_asm_len ("%3lpm %0,%a1", xop
, plen
, 1);
2801 else if (REGNO (xop
[0]) == REG_Z
)
2803 avr_asm_len ("%3lpm %2,%a1+", xop
, plen
, 1);
2807 avr_asm_len ("%3lpm %0,%a1+", xop
, plen
, 1);
2811 gcc_assert (clobber_z
);
2813 avr_asm_len ("%3lpm" CR_TAB
2814 "mov %0,%2", xop
, plen
, 2);
2817 avr_asm_len ("adiw %1,1", xop
, plen
, 1);
2822 avr_asm_len ("mov %1,%2", xop
, plen
, 1);
2826 && !reg_unused_after (insn
, lpm_addr_reg_rtx
)
2827 && !reg_overlap_mentioned_p (op
[0], lpm_addr_reg_rtx
))
2829 xop
[2] = GEN_INT (n_bytes
-1);
2830 avr_asm_len ("sbiw %1,%2", xop
, plen
, 1);
2833 if (REG_P (xsegment
) && AVR_HAVE_RAMPD
)
2835 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2837 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop
, plen
, 1);
2844 /* Worker function for xload_8 insn. */
2847 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
2853 xop
[2] = lpm_addr_reg_rtx
;
2854 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
2859 avr_asm_len ("sbrc %1,7" CR_TAB
2861 "sbrs %1,7", xop
, plen
, 3);
2863 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, 1);
2865 if (REGNO (xop
[0]) != REGNO (xop
[3]))
2866 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2873 output_movqi (rtx insn
, rtx operands
[], int *real_l
)
2875 rtx dest
= operands
[0];
2876 rtx src
= operands
[1];
2878 if (avr_mem_flash_p (src
)
2879 || avr_mem_flash_p (dest
))
2881 return avr_out_lpm (insn
, operands
, real_l
);
2887 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
2891 if (REG_P (src
)) /* mov r,r */
2893 if (test_hard_reg_class (STACK_REG
, dest
))
2895 else if (test_hard_reg_class (STACK_REG
, src
))
2900 else if (CONSTANT_P (src
))
2902 output_reload_in_const (operands
, NULL_RTX
, real_l
, false);
2905 else if (MEM_P (src
))
2906 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2908 else if (MEM_P (dest
))
2913 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2915 return out_movqi_mr_r (insn
, xop
, real_l
);
2922 output_movhi (rtx insn
, rtx xop
[], int *plen
)
2927 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
2929 if (avr_mem_flash_p (src
)
2930 || avr_mem_flash_p (dest
))
2932 return avr_out_lpm (insn
, xop
, plen
);
2935 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
2939 if (REG_P (src
)) /* mov r,r */
2941 if (test_hard_reg_class (STACK_REG
, dest
))
2943 if (AVR_HAVE_8BIT_SP
)
2944 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
2947 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2948 "out __SP_H__,%B1", xop
, plen
, -2);
2950 /* Use simple load of SP if no interrupts are used. */
2952 return TARGET_NO_INTERRUPTS
2953 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2954 "out __SP_L__,%A1", xop
, plen
, -2)
2955 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2957 "out __SP_H__,%B1" CR_TAB
2958 "out __SREG__,__tmp_reg__" CR_TAB
2959 "out __SP_L__,%A1", xop
, plen
, -5);
2961 else if (test_hard_reg_class (STACK_REG
, src
))
2963 return !AVR_HAVE_SPH
2964 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2965 "clr %B0", xop
, plen
, -2)
2967 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2968 "in %B0,__SP_H__", xop
, plen
, -2);
2971 return AVR_HAVE_MOVW
2972 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
2974 : avr_asm_len ("mov %A0,%A1" CR_TAB
2975 "mov %B0,%B1", xop
, plen
, -2);
2977 else if (CONSTANT_P (src
))
2979 return output_reload_inhi (xop
, NULL
, plen
);
2981 else if (MEM_P (src
))
2983 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
2986 else if (MEM_P (dest
))
2991 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
2993 return out_movhi_mr_r (insn
, xop
, plen
);
2996 fatal_insn ("invalid insn:", insn
);
3002 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3006 rtx x
= XEXP (src
, 0);
3008 if (CONSTANT_ADDRESS_P (x
))
3010 return optimize
> 0 && io_address_operand (x
, QImode
)
3011 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3012 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3014 else if (GET_CODE (x
) == PLUS
3015 && REG_P (XEXP (x
, 0))
3016 && CONST_INT_P (XEXP (x
, 1)))
3018 /* memory access by reg+disp */
3020 int disp
= INTVAL (XEXP (x
, 1));
3022 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3024 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3025 fatal_insn ("incorrect insn:",insn
);
3027 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3028 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3029 "ldd %0,Y+63" CR_TAB
3030 "sbiw r28,%o1-63", op
, plen
, -3);
3032 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3033 "sbci r29,hi8(-%o1)" CR_TAB
3035 "subi r28,lo8(%o1)" CR_TAB
3036 "sbci r29,hi8(%o1)", op
, plen
, -5);
3038 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3040 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3041 it but I have this situation with extremal optimizing options. */
3043 avr_asm_len ("adiw r26,%o1" CR_TAB
3044 "ld %0,X", op
, plen
, -2);
3046 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3047 && !reg_unused_after (insn
, XEXP (x
,0)))
3049 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3055 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3058 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3062 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3066 rtx base
= XEXP (src
, 0);
3067 int reg_dest
= true_regnum (dest
);
3068 int reg_base
= true_regnum (base
);
3069 /* "volatile" forces reading low byte first, even if less efficient,
3070 for correct operation with 16-bit I/O registers. */
3071 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3075 if (reg_dest
== reg_base
) /* R = (R) */
3076 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3078 "mov %A0,__tmp_reg__", op
, plen
, -3);
3080 if (reg_base
!= REG_X
)
3081 return avr_asm_len ("ld %A0,%1" CR_TAB
3082 "ldd %B0,%1+1", op
, plen
, -2);
3084 avr_asm_len ("ld %A0,X+" CR_TAB
3085 "ld %B0,X", op
, plen
, -2);
3087 if (!reg_unused_after (insn
, base
))
3088 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3092 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3094 int disp
= INTVAL (XEXP (base
, 1));
3095 int reg_base
= true_regnum (XEXP (base
, 0));
3097 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3099 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3100 fatal_insn ("incorrect insn:",insn
);
3102 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3103 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3104 "ldd %A0,Y+62" CR_TAB
3105 "ldd %B0,Y+63" CR_TAB
3106 "sbiw r28,%o1-62", op
, plen
, -4)
3108 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3109 "sbci r29,hi8(-%o1)" CR_TAB
3111 "ldd %B0,Y+1" CR_TAB
3112 "subi r28,lo8(%o1)" CR_TAB
3113 "sbci r29,hi8(%o1)", op
, plen
, -6);
3116 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3117 it but I have this situation with extremal
3118 optimization options. */
3120 if (reg_base
== REG_X
)
3121 return reg_base
== reg_dest
3122 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3123 "ld __tmp_reg__,X+" CR_TAB
3125 "mov %A0,__tmp_reg__", op
, plen
, -4)
3127 : avr_asm_len ("adiw r26,%o1" CR_TAB
3130 "sbiw r26,%o1+1", op
, plen
, -4);
3132 return reg_base
== reg_dest
3133 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3134 "ldd %B0,%B1" CR_TAB
3135 "mov %A0,__tmp_reg__", op
, plen
, -3)
3137 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3138 "ldd %B0,%B1", op
, plen
, -2);
3140 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3142 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3143 fatal_insn ("incorrect insn:", insn
);
3145 if (!mem_volatile_p
)
3146 return avr_asm_len ("ld %B0,%1" CR_TAB
3147 "ld %A0,%1", op
, plen
, -2);
3149 return REGNO (XEXP (base
, 0)) == REG_X
3150 ? avr_asm_len ("sbiw r26,2" CR_TAB
3153 "sbiw r26,1", op
, plen
, -4)
3155 : avr_asm_len ("sbiw %r1,2" CR_TAB
3157 "ldd %B0,%p1+1", op
, plen
, -3);
3159 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3161 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3162 fatal_insn ("incorrect insn:", insn
);
3164 return avr_asm_len ("ld %A0,%1" CR_TAB
3165 "ld %B0,%1", op
, plen
, -2);
3167 else if (CONSTANT_ADDRESS_P (base
))
3169 return optimize
> 0 && io_address_operand (base
, HImode
)
3170 ? avr_asm_len ("in %A0,%i1" CR_TAB
3171 "in %B0,%i1+1", op
, plen
, -2)
3173 : avr_asm_len ("lds %A0,%m1" CR_TAB
3174 "lds %B0,%m1+1", op
, plen
, -4);
3177 fatal_insn ("unknown move insn:",insn
);
3182 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3186 rtx base
= XEXP (src
, 0);
3187 int reg_dest
= true_regnum (dest
);
3188 int reg_base
= true_regnum (base
);
3196 if (reg_base
== REG_X
) /* (R26) */
3198 if (reg_dest
== REG_X
)
3199 /* "ld r26,-X" is undefined */
3200 return *l
=7, ("adiw r26,3" CR_TAB
3203 "ld __tmp_reg__,-X" CR_TAB
3206 "mov r27,__tmp_reg__");
3207 else if (reg_dest
== REG_X
- 2)
3208 return *l
=5, ("ld %A0,X+" CR_TAB
3210 "ld __tmp_reg__,X+" CR_TAB
3212 "mov %C0,__tmp_reg__");
3213 else if (reg_unused_after (insn
, base
))
3214 return *l
=4, ("ld %A0,X+" CR_TAB
3219 return *l
=5, ("ld %A0,X+" CR_TAB
3227 if (reg_dest
== reg_base
)
3228 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3229 "ldd %C0,%1+2" CR_TAB
3230 "ldd __tmp_reg__,%1+1" CR_TAB
3232 "mov %B0,__tmp_reg__");
3233 else if (reg_base
== reg_dest
+ 2)
3234 return *l
=5, ("ld %A0,%1" CR_TAB
3235 "ldd %B0,%1+1" CR_TAB
3236 "ldd __tmp_reg__,%1+2" CR_TAB
3237 "ldd %D0,%1+3" CR_TAB
3238 "mov %C0,__tmp_reg__");
3240 return *l
=4, ("ld %A0,%1" CR_TAB
3241 "ldd %B0,%1+1" CR_TAB
3242 "ldd %C0,%1+2" CR_TAB
3246 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3248 int disp
= INTVAL (XEXP (base
, 1));
3250 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3252 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3253 fatal_insn ("incorrect insn:",insn
);
3255 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3256 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3257 "ldd %A0,Y+60" CR_TAB
3258 "ldd %B0,Y+61" CR_TAB
3259 "ldd %C0,Y+62" CR_TAB
3260 "ldd %D0,Y+63" CR_TAB
3263 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3264 "sbci r29,hi8(-%o1)" CR_TAB
3266 "ldd %B0,Y+1" CR_TAB
3267 "ldd %C0,Y+2" CR_TAB
3268 "ldd %D0,Y+3" CR_TAB
3269 "subi r28,lo8(%o1)" CR_TAB
3270 "sbci r29,hi8(%o1)");
3273 reg_base
= true_regnum (XEXP (base
, 0));
3274 if (reg_base
== REG_X
)
3277 if (reg_dest
== REG_X
)
3280 /* "ld r26,-X" is undefined */
3281 return ("adiw r26,%o1+3" CR_TAB
3284 "ld __tmp_reg__,-X" CR_TAB
3287 "mov r27,__tmp_reg__");
3290 if (reg_dest
== REG_X
- 2)
3291 return ("adiw r26,%o1" CR_TAB
3294 "ld __tmp_reg__,X+" CR_TAB
3296 "mov r26,__tmp_reg__");
3298 return ("adiw r26,%o1" CR_TAB
3305 if (reg_dest
== reg_base
)
3306 return *l
=5, ("ldd %D0,%D1" CR_TAB
3307 "ldd %C0,%C1" CR_TAB
3308 "ldd __tmp_reg__,%B1" CR_TAB
3309 "ldd %A0,%A1" CR_TAB
3310 "mov %B0,__tmp_reg__");
3311 else if (reg_dest
== reg_base
- 2)
3312 return *l
=5, ("ldd %A0,%A1" CR_TAB
3313 "ldd %B0,%B1" CR_TAB
3314 "ldd __tmp_reg__,%C1" CR_TAB
3315 "ldd %D0,%D1" CR_TAB
3316 "mov %C0,__tmp_reg__");
3317 return *l
=4, ("ldd %A0,%A1" CR_TAB
3318 "ldd %B0,%B1" CR_TAB
3319 "ldd %C0,%C1" CR_TAB
3322 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3323 return *l
=4, ("ld %D0,%1" CR_TAB
3327 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3328 return *l
=4, ("ld %A0,%1" CR_TAB
3332 else if (CONSTANT_ADDRESS_P (base
))
3333 return *l
=8, ("lds %A0,%m1" CR_TAB
3334 "lds %B0,%m1+1" CR_TAB
3335 "lds %C0,%m1+2" CR_TAB
3338 fatal_insn ("unknown move insn:",insn
);
3343 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3347 rtx base
= XEXP (dest
, 0);
3348 int reg_base
= true_regnum (base
);
3349 int reg_src
= true_regnum (src
);
3355 if (CONSTANT_ADDRESS_P (base
))
3356 return *l
=8,("sts %m0,%A1" CR_TAB
3357 "sts %m0+1,%B1" CR_TAB
3358 "sts %m0+2,%C1" CR_TAB
3360 if (reg_base
> 0) /* (r) */
3362 if (reg_base
== REG_X
) /* (R26) */
3364 if (reg_src
== REG_X
)
3366 /* "st X+,r26" is undefined */
3367 if (reg_unused_after (insn
, base
))
3368 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3371 "st X+,__tmp_reg__" CR_TAB
3375 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3378 "st X+,__tmp_reg__" CR_TAB
3383 else if (reg_base
== reg_src
+ 2)
3385 if (reg_unused_after (insn
, base
))
3386 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3387 "mov __tmp_reg__,%D1" CR_TAB
3390 "st %0+,__zero_reg__" CR_TAB
3391 "st %0,__tmp_reg__" CR_TAB
3392 "clr __zero_reg__");
3394 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3395 "mov __tmp_reg__,%D1" CR_TAB
3398 "st %0+,__zero_reg__" CR_TAB
3399 "st %0,__tmp_reg__" CR_TAB
3400 "clr __zero_reg__" CR_TAB
3403 return *l
=5, ("st %0+,%A1" CR_TAB
3410 return *l
=4, ("st %0,%A1" CR_TAB
3411 "std %0+1,%B1" CR_TAB
3412 "std %0+2,%C1" CR_TAB
3415 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3417 int disp
= INTVAL (XEXP (base
, 1));
3418 reg_base
= REGNO (XEXP (base
, 0));
3419 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3421 if (reg_base
!= REG_Y
)
3422 fatal_insn ("incorrect insn:",insn
);
3424 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3425 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3426 "std Y+60,%A1" CR_TAB
3427 "std Y+61,%B1" CR_TAB
3428 "std Y+62,%C1" CR_TAB
3429 "std Y+63,%D1" CR_TAB
3432 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3433 "sbci r29,hi8(-%o0)" CR_TAB
3435 "std Y+1,%B1" CR_TAB
3436 "std Y+2,%C1" CR_TAB
3437 "std Y+3,%D1" CR_TAB
3438 "subi r28,lo8(%o0)" CR_TAB
3439 "sbci r29,hi8(%o0)");
3441 if (reg_base
== REG_X
)
3444 if (reg_src
== REG_X
)
3447 return ("mov __tmp_reg__,r26" CR_TAB
3448 "mov __zero_reg__,r27" CR_TAB
3449 "adiw r26,%o0" CR_TAB
3450 "st X+,__tmp_reg__" CR_TAB
3451 "st X+,__zero_reg__" CR_TAB
3454 "clr __zero_reg__" CR_TAB
3457 else if (reg_src
== REG_X
- 2)
3460 return ("mov __tmp_reg__,r26" CR_TAB
3461 "mov __zero_reg__,r27" CR_TAB
3462 "adiw r26,%o0" CR_TAB
3465 "st X+,__tmp_reg__" CR_TAB
3466 "st X,__zero_reg__" CR_TAB
3467 "clr __zero_reg__" CR_TAB
3471 return ("adiw r26,%o0" CR_TAB
3478 return *l
=4, ("std %A0,%A1" CR_TAB
3479 "std %B0,%B1" CR_TAB
3480 "std %C0,%C1" CR_TAB
3483 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3484 return *l
=4, ("st %0,%D1" CR_TAB
3488 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3489 return *l
=4, ("st %0,%A1" CR_TAB
3493 fatal_insn ("unknown move insn:",insn
);
3498 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3501 rtx dest
= operands
[0];
3502 rtx src
= operands
[1];
3505 if (avr_mem_flash_p (src
)
3506 || avr_mem_flash_p (dest
))
3508 return avr_out_lpm (insn
, operands
, real_l
);
3514 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3517 if (REG_P (src
)) /* mov r,r */
3519 if (true_regnum (dest
) > true_regnum (src
))
3524 return ("movw %C0,%C1" CR_TAB
3528 return ("mov %D0,%D1" CR_TAB
3529 "mov %C0,%C1" CR_TAB
3530 "mov %B0,%B1" CR_TAB
3538 return ("movw %A0,%A1" CR_TAB
3542 return ("mov %A0,%A1" CR_TAB
3543 "mov %B0,%B1" CR_TAB
3544 "mov %C0,%C1" CR_TAB
3548 else if (CONSTANT_P (src
))
3550 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3552 else if (MEM_P (src
))
3553 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3555 else if (MEM_P (dest
))
3559 if (src
== CONST0_RTX (GET_MODE (dest
)))
3560 operands
[1] = zero_reg_rtx
;
3562 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3565 output_asm_insn (templ
, operands
);
3570 fatal_insn ("invalid insn:", insn
);
3575 /* Handle loads of 24-bit types from memory to register. */
3578 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3582 rtx base
= XEXP (src
, 0);
3583 int reg_dest
= true_regnum (dest
);
3584 int reg_base
= true_regnum (base
);
3588 if (reg_base
== REG_X
) /* (R26) */
3590 if (reg_dest
== REG_X
)
3591 /* "ld r26,-X" is undefined */
3592 return avr_asm_len ("adiw r26,2" CR_TAB
3594 "ld __tmp_reg__,-X" CR_TAB
3597 "mov r27,__tmp_reg__", op
, plen
, -6);
3600 avr_asm_len ("ld %A0,X+" CR_TAB
3602 "ld %C0,X", op
, plen
, -3);
3604 if (reg_dest
!= REG_X
- 2
3605 && !reg_unused_after (insn
, base
))
3607 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3613 else /* reg_base != REG_X */
3615 if (reg_dest
== reg_base
)
3616 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3617 "ldd __tmp_reg__,%1+1" CR_TAB
3619 "mov %B0,__tmp_reg__", op
, plen
, -4);
3621 return avr_asm_len ("ld %A0,%1" CR_TAB
3622 "ldd %B0,%1+1" CR_TAB
3623 "ldd %C0,%1+2", op
, plen
, -3);
3626 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3628 int disp
= INTVAL (XEXP (base
, 1));
3630 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3632 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3633 fatal_insn ("incorrect insn:",insn
);
3635 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3636 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3637 "ldd %A0,Y+61" CR_TAB
3638 "ldd %B0,Y+62" CR_TAB
3639 "ldd %C0,Y+63" CR_TAB
3640 "sbiw r28,%o1-61", op
, plen
, -5);
3642 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3643 "sbci r29,hi8(-%o1)" CR_TAB
3645 "ldd %B0,Y+1" CR_TAB
3646 "ldd %C0,Y+2" CR_TAB
3647 "subi r28,lo8(%o1)" CR_TAB
3648 "sbci r29,hi8(%o1)", op
, plen
, -7);
3651 reg_base
= true_regnum (XEXP (base
, 0));
3652 if (reg_base
== REG_X
)
3655 if (reg_dest
== REG_X
)
3657 /* "ld r26,-X" is undefined */
3658 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3660 "ld __tmp_reg__,-X" CR_TAB
3663 "mov r27,__tmp_reg__", op
, plen
, -6);
3666 avr_asm_len ("adiw r26,%o1" CR_TAB
3669 "ld %C0,X", op
, plen
, -4);
3671 if (reg_dest
!= REG_W
3672 && !reg_unused_after (insn
, XEXP (base
, 0)))
3673 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3678 if (reg_dest
== reg_base
)
3679 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3680 "ldd __tmp_reg__,%B1" CR_TAB
3681 "ldd %A0,%A1" CR_TAB
3682 "mov %B0,__tmp_reg__", op
, plen
, -4);
3684 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3685 "ldd %B0,%B1" CR_TAB
3686 "ldd %C0,%C1", op
, plen
, -3);
3688 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3689 return avr_asm_len ("ld %C0,%1" CR_TAB
3691 "ld %A0,%1", op
, plen
, -3);
3692 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3693 return avr_asm_len ("ld %A0,%1" CR_TAB
3695 "ld %C0,%1", op
, plen
, -3);
3697 else if (CONSTANT_ADDRESS_P (base
))
3698 return avr_asm_len ("lds %A0,%m1" CR_TAB
3699 "lds %B0,%m1+1" CR_TAB
3700 "lds %C0,%m1+2", op
, plen
, -6);
3702 fatal_insn ("unknown move insn:",insn
);
3706 /* Handle store of 24-bit type from register or zero to memory. */
3709 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3713 rtx base
= XEXP (dest
, 0);
3714 int reg_base
= true_regnum (base
);
3716 if (CONSTANT_ADDRESS_P (base
))
3717 return avr_asm_len ("sts %m0,%A1" CR_TAB
3718 "sts %m0+1,%B1" CR_TAB
3719 "sts %m0+2,%C1", op
, plen
, -6);
3721 if (reg_base
> 0) /* (r) */
3723 if (reg_base
== REG_X
) /* (R26) */
3725 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3727 avr_asm_len ("st %0+,%A1" CR_TAB
3729 "st %0,%C1", op
, plen
, -3);
3731 if (!reg_unused_after (insn
, base
))
3732 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3737 return avr_asm_len ("st %0,%A1" CR_TAB
3738 "std %0+1,%B1" CR_TAB
3739 "std %0+2,%C1", op
, plen
, -3);
3741 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3743 int disp
= INTVAL (XEXP (base
, 1));
3744 reg_base
= REGNO (XEXP (base
, 0));
3746 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3748 if (reg_base
!= REG_Y
)
3749 fatal_insn ("incorrect insn:",insn
);
3751 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3752 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3753 "std Y+61,%A1" CR_TAB
3754 "std Y+62,%B1" CR_TAB
3755 "std Y+63,%C1" CR_TAB
3756 "sbiw r28,%o0-60", op
, plen
, -5);
3758 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3759 "sbci r29,hi8(-%o0)" CR_TAB
3761 "std Y+1,%B1" CR_TAB
3762 "std Y+2,%C1" CR_TAB
3763 "subi r28,lo8(%o0)" CR_TAB
3764 "sbci r29,hi8(%o0)", op
, plen
, -7);
3766 if (reg_base
== REG_X
)
3769 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
3771 avr_asm_len ("adiw r26,%o0" CR_TAB
3774 "st X,%C1", op
, plen
, -4);
3776 if (!reg_unused_after (insn
, XEXP (base
, 0)))
3777 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
3782 return avr_asm_len ("std %A0,%A1" CR_TAB
3783 "std %B0,%B1" CR_TAB
3784 "std %C0,%C1", op
, plen
, -3);
3786 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3787 return avr_asm_len ("st %0,%C1" CR_TAB
3789 "st %0,%A1", op
, plen
, -3);
3790 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3791 return avr_asm_len ("st %0,%A1" CR_TAB
3793 "st %0,%C1", op
, plen
, -3);
3795 fatal_insn ("unknown move insn:",insn
);
3800 /* Move around 24-bit stuff. */
3803 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
3808 if (avr_mem_flash_p (src
)
3809 || avr_mem_flash_p (dest
))
3811 return avr_out_lpm (insn
, op
, plen
);
3814 if (register_operand (dest
, VOIDmode
))
3816 if (register_operand (src
, VOIDmode
)) /* mov r,r */
3818 if (true_regnum (dest
) > true_regnum (src
))
3820 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
3823 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
3825 return avr_asm_len ("mov %B0,%B1" CR_TAB
3826 "mov %A0,%A1", op
, plen
, 2);
3831 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
3833 avr_asm_len ("mov %A0,%A1" CR_TAB
3834 "mov %B0,%B1", op
, plen
, -2);
3836 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
3839 else if (CONSTANT_P (src
))
3841 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
3843 else if (MEM_P (src
))
3844 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
3846 else if (MEM_P (dest
))
3851 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3853 return avr_out_store_psi (insn
, xop
, plen
);
3856 fatal_insn ("invalid insn:", insn
);
3862 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
3866 rtx x
= XEXP (dest
, 0);
3868 if (CONSTANT_ADDRESS_P (x
))
3870 return optimize
> 0 && io_address_operand (x
, QImode
)
3871 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
3872 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
3874 else if (GET_CODE (x
) == PLUS
3875 && REG_P (XEXP (x
, 0))
3876 && CONST_INT_P (XEXP (x
, 1)))
3878 /* memory access by reg+disp */
3880 int disp
= INTVAL (XEXP (x
, 1));
3882 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
3884 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3885 fatal_insn ("incorrect insn:",insn
);
3887 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3888 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3889 "std Y+63,%1" CR_TAB
3890 "sbiw r28,%o0-63", op
, plen
, -3);
3892 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3893 "sbci r29,hi8(-%o0)" CR_TAB
3895 "subi r28,lo8(%o0)" CR_TAB
3896 "sbci r29,hi8(%o0)", op
, plen
, -5);
3898 else if (REGNO (XEXP (x
,0)) == REG_X
)
3900 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
3902 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3903 "adiw r26,%o0" CR_TAB
3904 "st X,__tmp_reg__", op
, plen
, -3);
3908 avr_asm_len ("adiw r26,%o0" CR_TAB
3909 "st X,%1", op
, plen
, -2);
3912 if (!reg_unused_after (insn
, XEXP (x
,0)))
3913 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
3918 return avr_asm_len ("std %0,%1", op
, plen
, -1);
3921 return avr_asm_len ("st %0,%1", op
, plen
, -1);
3925 /* Helper for the next function for XMEGA. It does the same
3926 but with low byte first. */
3929 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
3933 rtx base
= XEXP (dest
, 0);
3934 int reg_base
= true_regnum (base
);
3935 int reg_src
= true_regnum (src
);
3937 /* "volatile" forces writing low byte first, even if less efficient,
3938 for correct operation with 16-bit I/O registers like SP. */
3939 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
3941 if (CONSTANT_ADDRESS_P (base
))
3942 return optimize
> 0 && io_address_operand (base
, HImode
)
3943 ? avr_asm_len ("out %i0,%A1" CR_TAB
3944 "out %i0+1,%B1", op
, plen
, -2)
3946 : avr_asm_len ("sts %m0,%A1" CR_TAB
3947 "sts %m0+1,%B1", op
, plen
, -4);
3951 if (reg_base
!= REG_X
)
3952 return avr_asm_len ("st %0,%A1" CR_TAB
3953 "std %0+1,%B1", op
, plen
, -2);
3955 if (reg_src
== REG_X
)
3956 /* "st X+,r26" and "st -X,r26" are undefined. */
3957 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3960 "st X,__tmp_reg__", op
, plen
, -4);
3962 avr_asm_len ("st X+,%A1" CR_TAB
3963 "st X,%B1", op
, plen
, -2);
3965 return reg_unused_after (insn
, base
)
3967 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3969 else if (GET_CODE (base
) == PLUS
)
3971 int disp
= INTVAL (XEXP (base
, 1));
3972 reg_base
= REGNO (XEXP (base
, 0));
3973 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3975 if (reg_base
!= REG_Y
)
3976 fatal_insn ("incorrect insn:",insn
);
3978 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
3979 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3980 "std Y+62,%A1" CR_TAB
3981 "std Y+63,%B1" CR_TAB
3982 "sbiw r28,%o0-62", op
, plen
, -4)
3984 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3985 "sbci r29,hi8(-%o0)" CR_TAB
3987 "std Y+1,%B1" CR_TAB
3988 "subi r28,lo8(%o0)" CR_TAB
3989 "sbci r29,hi8(%o0)", op
, plen
, -6);
3992 if (reg_base
!= REG_X
)
3993 return avr_asm_len ("std %A0,%A1" CR_TAB
3994 "std %B0,%B1", op
, plen
, -2);
3996 return reg_src
== REG_X
3997 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3998 "mov __zero_reg__,r27" CR_TAB
3999 "adiw r26,%o0" CR_TAB
4000 "st X+,__tmp_reg__" CR_TAB
4001 "st X,__zero_reg__" CR_TAB
4002 "clr __zero_reg__" CR_TAB
4003 "sbiw r26,%o0+1", op
, plen
, -7)
4005 : avr_asm_len ("adiw r26,%o0" CR_TAB
4008 "sbiw r26,%o0+1", op
, plen
, -4);
4010 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4012 if (!mem_volatile_p
)
4013 return avr_asm_len ("st %0,%B1" CR_TAB
4014 "st %0,%A1", op
, plen
, -2);
4016 return REGNO (XEXP (base
, 0)) == REG_X
4017 ? avr_asm_len ("sbiw r26,2" CR_TAB
4020 "sbiw r26,1", op
, plen
, -4)
4022 : avr_asm_len ("sbiw %r0,2" CR_TAB
4024 "std %p0+1,%B1", op
, plen
, -3);
4026 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4028 return avr_asm_len ("st %0,%A1" CR_TAB
4029 "st %0,%B1", op
, plen
, -2);
4032 fatal_insn ("unknown move insn:",insn
);
4038 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4042 rtx base
= XEXP (dest
, 0);
4043 int reg_base
= true_regnum (base
);
4044 int reg_src
= true_regnum (src
);
4047 /* "volatile" forces writing high-byte first (no-xmega) resp.
4048 low-byte first (xmega) even if less efficient, for correct
4049 operation with 16-bit I/O registers like. */
4052 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4054 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4056 if (CONSTANT_ADDRESS_P (base
))
4057 return optimize
> 0 && io_address_operand (base
, HImode
)
4058 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4059 "out %i0,%A1", op
, plen
, -2)
4061 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4062 "sts %m0,%A1", op
, plen
, -4);
4066 if (reg_base
!= REG_X
)
4067 return avr_asm_len ("std %0+1,%B1" CR_TAB
4068 "st %0,%A1", op
, plen
, -2);
4070 if (reg_src
== REG_X
)
4071 /* "st X+,r26" and "st -X,r26" are undefined. */
4072 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4073 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4076 "st X,__tmp_reg__", op
, plen
, -4)
4078 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4080 "st X,__tmp_reg__" CR_TAB
4082 "st X,r26", op
, plen
, -5);
4084 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4085 ? avr_asm_len ("st X+,%A1" CR_TAB
4086 "st X,%B1", op
, plen
, -2)
4087 : avr_asm_len ("adiw r26,1" CR_TAB
4089 "st -X,%A1", op
, plen
, -3);
4091 else if (GET_CODE (base
) == PLUS
)
4093 int disp
= INTVAL (XEXP (base
, 1));
4094 reg_base
= REGNO (XEXP (base
, 0));
4095 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4097 if (reg_base
!= REG_Y
)
4098 fatal_insn ("incorrect insn:",insn
);
4100 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4101 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4102 "std Y+63,%B1" CR_TAB
4103 "std Y+62,%A1" CR_TAB
4104 "sbiw r28,%o0-62", op
, plen
, -4)
4106 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4107 "sbci r29,hi8(-%o0)" CR_TAB
4108 "std Y+1,%B1" CR_TAB
4110 "subi r28,lo8(%o0)" CR_TAB
4111 "sbci r29,hi8(%o0)", op
, plen
, -6);
4114 if (reg_base
!= REG_X
)
4115 return avr_asm_len ("std %B0,%B1" CR_TAB
4116 "std %A0,%A1", op
, plen
, -2);
4118 return reg_src
== REG_X
4119 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4120 "mov __zero_reg__,r27" CR_TAB
4121 "adiw r26,%o0+1" CR_TAB
4122 "st X,__zero_reg__" CR_TAB
4123 "st -X,__tmp_reg__" CR_TAB
4124 "clr __zero_reg__" CR_TAB
4125 "sbiw r26,%o0", op
, plen
, -7)
4127 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4130 "sbiw r26,%o0", op
, plen
, -4);
4132 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4134 return avr_asm_len ("st %0,%B1" CR_TAB
4135 "st %0,%A1", op
, plen
, -2);
4137 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4139 if (!mem_volatile_p
)
4140 return avr_asm_len ("st %0,%A1" CR_TAB
4141 "st %0,%B1", op
, plen
, -2);
4143 return REGNO (XEXP (base
, 0)) == REG_X
4144 ? avr_asm_len ("adiw r26,1" CR_TAB
4147 "adiw r26,2", op
, plen
, -4)
4149 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4151 "adiw %r0,2", op
, plen
, -3);
4153 fatal_insn ("unknown move insn:",insn
);
4157 /* Return 1 if frame pointer for current function required. */
4160 avr_frame_pointer_required_p (void)
4162 return (cfun
->calls_alloca
4163 || cfun
->calls_setjmp
4164 || cfun
->has_nonlocal_label
4165 || crtl
->args
.info
.nregs
== 0
4166 || get_frame_size () > 0);
4169 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4172 compare_condition (rtx insn
)
4174 rtx next
= next_real_insn (insn
);
4176 if (next
&& JUMP_P (next
))
4178 rtx pat
= PATTERN (next
);
4179 rtx src
= SET_SRC (pat
);
4181 if (IF_THEN_ELSE
== GET_CODE (src
))
4182 return GET_CODE (XEXP (src
, 0));
4189 /* Returns true iff INSN is a tst insn that only tests the sign. */
4192 compare_sign_p (rtx insn
)
4194 RTX_CODE cond
= compare_condition (insn
);
4195 return (cond
== GE
|| cond
== LT
);
4199 /* Returns true iff the next insn is a JUMP_INSN with a condition
4200 that needs to be swapped (GT, GTU, LE, LEU). */
4203 compare_diff_p (rtx insn
)
4205 RTX_CODE cond
= compare_condition (insn
);
4206 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4209 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4212 compare_eq_p (rtx insn
)
4214 RTX_CODE cond
= compare_condition (insn
);
4215 return (cond
== EQ
|| cond
== NE
);
4219 /* Output compare instruction
4221 compare (XOP[0], XOP[1])
4223 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4224 XOP[2] is an 8-bit scratch register as needed.
4226 PLEN == NULL: Output instructions.
4227 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4228 Don't output anything. */
4231 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4233 /* Register to compare and value to compare against. */
4237 /* MODE of the comparison. */
4238 enum machine_mode mode
;
4240 /* Number of bytes to operate on. */
4241 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4243 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4244 int clobber_val
= -1;
4246 /* Map fixed mode operands to integer operands with the same binary
4247 representation. They are easier to handle in the remainder. */
4249 if (CONST_FIXED
== GET_CODE (xval
))
4251 xreg
= avr_to_int_mode (xop
[0]);
4252 xval
= avr_to_int_mode (xop
[1]);
4255 mode
= GET_MODE (xreg
);
4257 gcc_assert (REG_P (xreg
));
4258 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4259 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4264 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4265 against 0 by ORing the bytes. This is one instruction shorter.
4266 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4267 and therefore don't use this. */
4269 if (!test_hard_reg_class (LD_REGS
, xreg
)
4270 && compare_eq_p (insn
)
4271 && reg_unused_after (insn
, xreg
))
4273 if (xval
== const1_rtx
)
4275 avr_asm_len ("dec %A0" CR_TAB
4276 "or %A0,%B0", xop
, plen
, 2);
4279 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4282 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4286 else if (xval
== constm1_rtx
)
4289 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4292 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4294 return avr_asm_len ("and %A0,%B0" CR_TAB
4295 "com %A0", xop
, plen
, 2);
4299 for (i
= 0; i
< n_bytes
; i
++)
4301 /* We compare byte-wise. */
4302 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4303 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4305 /* 8-bit value to compare with this byte. */
4306 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4308 /* Registers R16..R31 can operate with immediate. */
4309 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4312 xop
[1] = gen_int_mode (val8
, QImode
);
4314 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4317 && test_hard_reg_class (ADDW_REGS
, reg8
))
4319 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4321 if (IN_RANGE (val16
, 0, 63)
4323 || reg_unused_after (insn
, xreg
)))
4325 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4331 && IN_RANGE (val16
, -63, -1)
4332 && compare_eq_p (insn
)
4333 && reg_unused_after (insn
, xreg
))
4335 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4339 /* Comparing against 0 is easy. */
4344 ? "cp %0,__zero_reg__"
4345 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4349 /* Upper registers can compare and subtract-with-carry immediates.
4350 Notice that compare instructions do the same as respective subtract
4351 instruction; the only difference is that comparisons don't write
4352 the result back to the target register. */
4358 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4361 else if (reg_unused_after (insn
, xreg
))
4363 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4368 /* Must load the value into the scratch register. */
4370 gcc_assert (REG_P (xop
[2]));
4372 if (clobber_val
!= (int) val8
)
4373 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4374 clobber_val
= (int) val8
;
4378 : "cpc %0,%2", xop
, plen
, 1);
4385 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4388 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4392 xop
[0] = gen_rtx_REG (DImode
, 18);
4396 return avr_out_compare (insn
, xop
, plen
);
4399 /* Output test instruction for HImode. */
4402 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4404 if (compare_sign_p (insn
))
4406 avr_asm_len ("tst %B0", op
, plen
, -1);
4408 else if (reg_unused_after (insn
, op
[0])
4409 && compare_eq_p (insn
))
4411 /* Faster than sbiw if we can clobber the operand. */
4412 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4416 avr_out_compare (insn
, op
, plen
);
4423 /* Output test instruction for PSImode. */
4426 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4428 if (compare_sign_p (insn
))
4430 avr_asm_len ("tst %C0", op
, plen
, -1);
4432 else if (reg_unused_after (insn
, op
[0])
4433 && compare_eq_p (insn
))
4435 /* Faster than sbiw if we can clobber the operand. */
4436 avr_asm_len ("or %A0,%B0" CR_TAB
4437 "or %A0,%C0", op
, plen
, -2);
4441 avr_out_compare (insn
, op
, plen
);
4448 /* Output test instruction for SImode. */
4451 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4453 if (compare_sign_p (insn
))
4455 avr_asm_len ("tst %D0", op
, plen
, -1);
4457 else if (reg_unused_after (insn
, op
[0])
4458 && compare_eq_p (insn
))
4460 /* Faster than sbiw if we can clobber the operand. */
4461 avr_asm_len ("or %A0,%B0" CR_TAB
4463 "or %A0,%D0", op
, plen
, -3);
4467 avr_out_compare (insn
, op
, plen
);
4474 /* Generate asm equivalent for various shifts. This only handles cases
4475 that are not already carefully hand-optimized in ?sh??i3_out.
4477 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4478 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4479 OPERANDS[3] is a QImode scratch register from LD regs if
4480 available and SCRATCH, otherwise (no scratch available)
4482 TEMPL is an assembler template that shifts by one position.
4483 T_LEN is the length of this template. */
4486 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4487 int *plen
, int t_len
)
4489 bool second_label
= true;
4490 bool saved_in_tmp
= false;
4491 bool use_zero_reg
= false;
4494 op
[0] = operands
[0];
4495 op
[1] = operands
[1];
4496 op
[2] = operands
[2];
4497 op
[3] = operands
[3];
4502 if (CONST_INT_P (operands
[2]))
4504 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4505 && REG_P (operands
[3]));
4506 int count
= INTVAL (operands
[2]);
4507 int max_len
= 10; /* If larger than this, always use a loop. */
4512 if (count
< 8 && !scratch
)
4513 use_zero_reg
= true;
4516 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4518 if (t_len
* count
<= max_len
)
4520 /* Output shifts inline with no loop - faster. */
4523 avr_asm_len (templ
, op
, plen
, t_len
);
4530 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4532 else if (use_zero_reg
)
4534 /* Hack to save one word: use __zero_reg__ as loop counter.
4535 Set one bit, then shift in a loop until it is 0 again. */
4537 op
[3] = zero_reg_rtx
;
4539 avr_asm_len ("set" CR_TAB
4540 "bld %3,%2-1", op
, plen
, 2);
4544 /* No scratch register available, use one from LD_REGS (saved in
4545 __tmp_reg__) that doesn't overlap with registers to shift. */
4547 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4548 op
[4] = tmp_reg_rtx
;
4549 saved_in_tmp
= true;
4551 avr_asm_len ("mov %4,%3" CR_TAB
4552 "ldi %3,%2", op
, plen
, 2);
4555 second_label
= false;
4557 else if (MEM_P (op
[2]))
4561 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4564 out_movqi_r_mr (insn
, op_mov
, plen
);
4566 else if (register_operand (op
[2], QImode
))
4570 if (!reg_unused_after (insn
, op
[2])
4571 || reg_overlap_mentioned_p (op
[0], op
[2]))
4573 op
[3] = tmp_reg_rtx
;
4574 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4578 fatal_insn ("bad shift insn:", insn
);
4581 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4583 avr_asm_len ("1:", op
, plen
, 0);
4584 avr_asm_len (templ
, op
, plen
, t_len
);
4587 avr_asm_len ("2:", op
, plen
, 0);
4589 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4590 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4593 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4597 /* 8bit shift left ((char)x << i) */
4600 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4602 if (GET_CODE (operands
[2]) == CONST_INT
)
4609 switch (INTVAL (operands
[2]))
4612 if (INTVAL (operands
[2]) < 8)
4624 return ("lsl %0" CR_TAB
4629 return ("lsl %0" CR_TAB
4634 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4637 return ("swap %0" CR_TAB
4641 return ("lsl %0" CR_TAB
4647 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4650 return ("swap %0" CR_TAB
4655 return ("lsl %0" CR_TAB
4662 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4665 return ("swap %0" CR_TAB
4671 return ("lsl %0" CR_TAB
4680 return ("ror %0" CR_TAB
4685 else if (CONSTANT_P (operands
[2]))
4686 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4688 out_shift_with_cnt ("lsl %0",
4689 insn
, operands
, len
, 1);
4694 /* 16bit shift left ((short)x << i) */
4697 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4699 if (GET_CODE (operands
[2]) == CONST_INT
)
4701 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4702 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4709 switch (INTVAL (operands
[2]))
4712 if (INTVAL (operands
[2]) < 16)
4716 return ("clr %B0" CR_TAB
4720 if (optimize_size
&& scratch
)
4725 return ("swap %A0" CR_TAB
4727 "andi %B0,0xf0" CR_TAB
4728 "eor %B0,%A0" CR_TAB
4729 "andi %A0,0xf0" CR_TAB
4735 return ("swap %A0" CR_TAB
4737 "ldi %3,0xf0" CR_TAB
4739 "eor %B0,%A0" CR_TAB
4743 break; /* optimize_size ? 6 : 8 */
4747 break; /* scratch ? 5 : 6 */
4751 return ("lsl %A0" CR_TAB
4755 "andi %B0,0xf0" CR_TAB
4756 "eor %B0,%A0" CR_TAB
4757 "andi %A0,0xf0" CR_TAB
4763 return ("lsl %A0" CR_TAB
4767 "ldi %3,0xf0" CR_TAB
4769 "eor %B0,%A0" CR_TAB
4777 break; /* scratch ? 5 : 6 */
4779 return ("clr __tmp_reg__" CR_TAB
4782 "ror __tmp_reg__" CR_TAB
4785 "ror __tmp_reg__" CR_TAB
4786 "mov %B0,%A0" CR_TAB
4787 "mov %A0,__tmp_reg__");
4791 return ("lsr %B0" CR_TAB
4792 "mov %B0,%A0" CR_TAB
4798 return *len
= 2, ("mov %B0,%A1" CR_TAB
4803 return ("mov %B0,%A0" CR_TAB
4809 return ("mov %B0,%A0" CR_TAB
4816 return ("mov %B0,%A0" CR_TAB
4826 return ("mov %B0,%A0" CR_TAB
4834 return ("mov %B0,%A0" CR_TAB
4837 "ldi %3,0xf0" CR_TAB
4841 return ("mov %B0,%A0" CR_TAB
4852 return ("mov %B0,%A0" CR_TAB
4858 if (AVR_HAVE_MUL
&& scratch
)
4861 return ("ldi %3,0x20" CR_TAB
4865 "clr __zero_reg__");
4867 if (optimize_size
&& scratch
)
4872 return ("mov %B0,%A0" CR_TAB
4876 "ldi %3,0xe0" CR_TAB
4882 return ("set" CR_TAB
4887 "clr __zero_reg__");
4890 return ("mov %B0,%A0" CR_TAB
4899 if (AVR_HAVE_MUL
&& ldi_ok
)
4902 return ("ldi %B0,0x40" CR_TAB
4903 "mul %A0,%B0" CR_TAB
4906 "clr __zero_reg__");
4908 if (AVR_HAVE_MUL
&& scratch
)
4911 return ("ldi %3,0x40" CR_TAB
4915 "clr __zero_reg__");
4917 if (optimize_size
&& ldi_ok
)
4920 return ("mov %B0,%A0" CR_TAB
4921 "ldi %A0,6" "\n1:\t"
4926 if (optimize_size
&& scratch
)
4929 return ("clr %B0" CR_TAB
4938 return ("clr %B0" CR_TAB
4945 out_shift_with_cnt ("lsl %A0" CR_TAB
4946 "rol %B0", insn
, operands
, len
, 2);
4951 /* 24-bit shift left */
4954 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
4959 if (CONST_INT_P (op
[2]))
4961 switch (INTVAL (op
[2]))
4964 if (INTVAL (op
[2]) < 24)
4967 return avr_asm_len ("clr %A0" CR_TAB
4969 "clr %C0", op
, plen
, 3);
4973 int reg0
= REGNO (op
[0]);
4974 int reg1
= REGNO (op
[1]);
4977 return avr_asm_len ("mov %C0,%B1" CR_TAB
4978 "mov %B0,%A1" CR_TAB
4979 "clr %A0", op
, plen
, 3);
4981 return avr_asm_len ("clr %A0" CR_TAB
4982 "mov %B0,%A1" CR_TAB
4983 "mov %C0,%B1", op
, plen
, 3);
4988 int reg0
= REGNO (op
[0]);
4989 int reg1
= REGNO (op
[1]);
4991 if (reg0
+ 2 != reg1
)
4992 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
4994 return avr_asm_len ("clr %B0" CR_TAB
4995 "clr %A0", op
, plen
, 2);
4999 return avr_asm_len ("clr %C0" CR_TAB
5003 "clr %A0", op
, plen
, 5);
5007 out_shift_with_cnt ("lsl %A0" CR_TAB
5009 "rol %C0", insn
, op
, plen
, 3);
5014 /* 32bit shift left ((long)x << i) */
5017 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5019 if (GET_CODE (operands
[2]) == CONST_INT
)
5027 switch (INTVAL (operands
[2]))
5030 if (INTVAL (operands
[2]) < 32)
5034 return *len
= 3, ("clr %D0" CR_TAB
5038 return ("clr %D0" CR_TAB
5045 int reg0
= true_regnum (operands
[0]);
5046 int reg1
= true_regnum (operands
[1]);
5049 return ("mov %D0,%C1" CR_TAB
5050 "mov %C0,%B1" CR_TAB
5051 "mov %B0,%A1" CR_TAB
5054 return ("clr %A0" CR_TAB
5055 "mov %B0,%A1" CR_TAB
5056 "mov %C0,%B1" CR_TAB
5062 int reg0
= true_regnum (operands
[0]);
5063 int reg1
= true_regnum (operands
[1]);
5064 if (reg0
+ 2 == reg1
)
5065 return *len
= 2, ("clr %B0" CR_TAB
5068 return *len
= 3, ("movw %C0,%A1" CR_TAB
5072 return *len
= 4, ("mov %C0,%A1" CR_TAB
5073 "mov %D0,%B1" CR_TAB
5080 return ("mov %D0,%A1" CR_TAB
5087 return ("clr %D0" CR_TAB
5096 out_shift_with_cnt ("lsl %A0" CR_TAB
5099 "rol %D0", insn
, operands
, len
, 4);
5103 /* 8bit arithmetic shift right ((signed char)x >> i) */
5106 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5108 if (GET_CODE (operands
[2]) == CONST_INT
)
5115 switch (INTVAL (operands
[2]))
5123 return ("asr %0" CR_TAB
5128 return ("asr %0" CR_TAB
5134 return ("asr %0" CR_TAB
5141 return ("asr %0" CR_TAB
5149 return ("bst %0,6" CR_TAB
5155 if (INTVAL (operands
[2]) < 8)
5162 return ("lsl %0" CR_TAB
5166 else if (CONSTANT_P (operands
[2]))
5167 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5169 out_shift_with_cnt ("asr %0",
5170 insn
, operands
, len
, 1);
5175 /* 16bit arithmetic shift right ((signed short)x >> i) */
5178 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5180 if (GET_CODE (operands
[2]) == CONST_INT
)
5182 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5183 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5190 switch (INTVAL (operands
[2]))
5194 /* XXX try to optimize this too? */
5199 break; /* scratch ? 5 : 6 */
5201 return ("mov __tmp_reg__,%A0" CR_TAB
5202 "mov %A0,%B0" CR_TAB
5203 "lsl __tmp_reg__" CR_TAB
5205 "sbc %B0,%B0" CR_TAB
5206 "lsl __tmp_reg__" CR_TAB
5212 return ("lsl %A0" CR_TAB
5213 "mov %A0,%B0" CR_TAB
5219 int reg0
= true_regnum (operands
[0]);
5220 int reg1
= true_regnum (operands
[1]);
5223 return *len
= 3, ("mov %A0,%B0" CR_TAB
5227 return *len
= 4, ("mov %A0,%B1" CR_TAB
5235 return ("mov %A0,%B0" CR_TAB
5237 "sbc %B0,%B0" CR_TAB
5242 return ("mov %A0,%B0" CR_TAB
5244 "sbc %B0,%B0" CR_TAB
5249 if (AVR_HAVE_MUL
&& ldi_ok
)
5252 return ("ldi %A0,0x20" CR_TAB
5253 "muls %B0,%A0" CR_TAB
5255 "sbc %B0,%B0" CR_TAB
5256 "clr __zero_reg__");
5258 if (optimize_size
&& scratch
)
5261 return ("mov %A0,%B0" CR_TAB
5263 "sbc %B0,%B0" CR_TAB
5269 if (AVR_HAVE_MUL
&& ldi_ok
)
5272 return ("ldi %A0,0x10" CR_TAB
5273 "muls %B0,%A0" CR_TAB
5275 "sbc %B0,%B0" CR_TAB
5276 "clr __zero_reg__");
5278 if (optimize_size
&& scratch
)
5281 return ("mov %A0,%B0" CR_TAB
5283 "sbc %B0,%B0" CR_TAB
5290 if (AVR_HAVE_MUL
&& ldi_ok
)
5293 return ("ldi %A0,0x08" CR_TAB
5294 "muls %B0,%A0" CR_TAB
5296 "sbc %B0,%B0" CR_TAB
5297 "clr __zero_reg__");
5300 break; /* scratch ? 5 : 7 */
5302 return ("mov %A0,%B0" CR_TAB
5304 "sbc %B0,%B0" CR_TAB
5313 return ("lsl %B0" CR_TAB
5314 "sbc %A0,%A0" CR_TAB
5316 "mov %B0,%A0" CR_TAB
5320 if (INTVAL (operands
[2]) < 16)
5326 return *len
= 3, ("lsl %B0" CR_TAB
5327 "sbc %A0,%A0" CR_TAB
5332 out_shift_with_cnt ("asr %B0" CR_TAB
5333 "ror %A0", insn
, operands
, len
, 2);
5338 /* 24-bit arithmetic shift right */
5341 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5343 int dest
= REGNO (op
[0]);
5344 int src
= REGNO (op
[1]);
5346 if (CONST_INT_P (op
[2]))
5351 switch (INTVAL (op
[2]))
5355 return avr_asm_len ("mov %A0,%B1" CR_TAB
5356 "mov %B0,%C1" CR_TAB
5359 "dec %C0", op
, plen
, 5);
5361 return avr_asm_len ("clr %C0" CR_TAB
5364 "mov %B0,%C1" CR_TAB
5365 "mov %A0,%B1", op
, plen
, 5);
5368 if (dest
!= src
+ 2)
5369 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5371 return avr_asm_len ("clr %B0" CR_TAB
5374 "mov %C0,%B0", op
, plen
, 4);
5377 if (INTVAL (op
[2]) < 24)
5383 return avr_asm_len ("lsl %C0" CR_TAB
5384 "sbc %A0,%A0" CR_TAB
5385 "mov %B0,%A0" CR_TAB
5386 "mov %C0,%A0", op
, plen
, 4);
5390 out_shift_with_cnt ("asr %C0" CR_TAB
5392 "ror %A0", insn
, op
, plen
, 3);
5397 /* 32bit arithmetic shift right ((signed long)x >> i) */
5400 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5402 if (GET_CODE (operands
[2]) == CONST_INT
)
5410 switch (INTVAL (operands
[2]))
5414 int reg0
= true_regnum (operands
[0]);
5415 int reg1
= true_regnum (operands
[1]);
5418 return ("mov %A0,%B1" CR_TAB
5419 "mov %B0,%C1" CR_TAB
5420 "mov %C0,%D1" CR_TAB
5425 return ("clr %D0" CR_TAB
5428 "mov %C0,%D1" CR_TAB
5429 "mov %B0,%C1" CR_TAB
5435 int reg0
= true_regnum (operands
[0]);
5436 int reg1
= true_regnum (operands
[1]);
5438 if (reg0
== reg1
+ 2)
5439 return *len
= 4, ("clr %D0" CR_TAB
5444 return *len
= 5, ("movw %A0,%C1" CR_TAB
5450 return *len
= 6, ("mov %B0,%D1" CR_TAB
5451 "mov %A0,%C1" CR_TAB
5459 return *len
= 6, ("mov %A0,%D1" CR_TAB
5463 "mov %B0,%D0" CR_TAB
5467 if (INTVAL (operands
[2]) < 32)
5474 return *len
= 4, ("lsl %D0" CR_TAB
5475 "sbc %A0,%A0" CR_TAB
5476 "mov %B0,%A0" CR_TAB
5479 return *len
= 5, ("lsl %D0" CR_TAB
5480 "sbc %A0,%A0" CR_TAB
5481 "mov %B0,%A0" CR_TAB
5482 "mov %C0,%A0" CR_TAB
5487 out_shift_with_cnt ("asr %D0" CR_TAB
5490 "ror %A0", insn
, operands
, len
, 4);
5494 /* 8bit logic shift right ((unsigned char)x >> i) */
5497 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5499 if (GET_CODE (operands
[2]) == CONST_INT
)
5506 switch (INTVAL (operands
[2]))
5509 if (INTVAL (operands
[2]) < 8)
5521 return ("lsr %0" CR_TAB
5525 return ("lsr %0" CR_TAB
5530 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5533 return ("swap %0" CR_TAB
5537 return ("lsr %0" CR_TAB
5543 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5546 return ("swap %0" CR_TAB
5551 return ("lsr %0" CR_TAB
5558 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5561 return ("swap %0" CR_TAB
5567 return ("lsr %0" CR_TAB
5576 return ("rol %0" CR_TAB
5581 else if (CONSTANT_P (operands
[2]))
5582 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5584 out_shift_with_cnt ("lsr %0",
5585 insn
, operands
, len
, 1);
5589 /* 16bit logic shift right ((unsigned short)x >> i) */
5592 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5594 if (GET_CODE (operands
[2]) == CONST_INT
)
5596 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5597 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5604 switch (INTVAL (operands
[2]))
5607 if (INTVAL (operands
[2]) < 16)
5611 return ("clr %B0" CR_TAB
5615 if (optimize_size
&& scratch
)
5620 return ("swap %B0" CR_TAB
5622 "andi %A0,0x0f" CR_TAB
5623 "eor %A0,%B0" CR_TAB
5624 "andi %B0,0x0f" CR_TAB
5630 return ("swap %B0" CR_TAB
5632 "ldi %3,0x0f" CR_TAB
5634 "eor %A0,%B0" CR_TAB
5638 break; /* optimize_size ? 6 : 8 */
5642 break; /* scratch ? 5 : 6 */
5646 return ("lsr %B0" CR_TAB
5650 "andi %A0,0x0f" CR_TAB
5651 "eor %A0,%B0" CR_TAB
5652 "andi %B0,0x0f" CR_TAB
5658 return ("lsr %B0" CR_TAB
5662 "ldi %3,0x0f" CR_TAB
5664 "eor %A0,%B0" CR_TAB
5672 break; /* scratch ? 5 : 6 */
5674 return ("clr __tmp_reg__" CR_TAB
5677 "rol __tmp_reg__" CR_TAB
5680 "rol __tmp_reg__" CR_TAB
5681 "mov %A0,%B0" CR_TAB
5682 "mov %B0,__tmp_reg__");
5686 return ("lsl %A0" CR_TAB
5687 "mov %A0,%B0" CR_TAB
5689 "sbc %B0,%B0" CR_TAB
5693 return *len
= 2, ("mov %A0,%B1" CR_TAB
5698 return ("mov %A0,%B0" CR_TAB
5704 return ("mov %A0,%B0" CR_TAB
5711 return ("mov %A0,%B0" CR_TAB
5721 return ("mov %A0,%B0" CR_TAB
5729 return ("mov %A0,%B0" CR_TAB
5732 "ldi %3,0x0f" CR_TAB
5736 return ("mov %A0,%B0" CR_TAB
5747 return ("mov %A0,%B0" CR_TAB
5753 if (AVR_HAVE_MUL
&& scratch
)
5756 return ("ldi %3,0x08" CR_TAB
5760 "clr __zero_reg__");
5762 if (optimize_size
&& scratch
)
5767 return ("mov %A0,%B0" CR_TAB
5771 "ldi %3,0x07" CR_TAB
5777 return ("set" CR_TAB
5782 "clr __zero_reg__");
5785 return ("mov %A0,%B0" CR_TAB
5794 if (AVR_HAVE_MUL
&& ldi_ok
)
5797 return ("ldi %A0,0x04" CR_TAB
5798 "mul %B0,%A0" CR_TAB
5801 "clr __zero_reg__");
5803 if (AVR_HAVE_MUL
&& scratch
)
5806 return ("ldi %3,0x04" CR_TAB
5810 "clr __zero_reg__");
5812 if (optimize_size
&& ldi_ok
)
5815 return ("mov %A0,%B0" CR_TAB
5816 "ldi %B0,6" "\n1:\t"
5821 if (optimize_size
&& scratch
)
5824 return ("clr %A0" CR_TAB
5833 return ("clr %A0" CR_TAB
5840 out_shift_with_cnt ("lsr %B0" CR_TAB
5841 "ror %A0", insn
, operands
, len
, 2);
5846 /* 24-bit logic shift right */
5849 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5851 int dest
= REGNO (op
[0]);
5852 int src
= REGNO (op
[1]);
5854 if (CONST_INT_P (op
[2]))
5859 switch (INTVAL (op
[2]))
5863 return avr_asm_len ("mov %A0,%B1" CR_TAB
5864 "mov %B0,%C1" CR_TAB
5865 "clr %C0", op
, plen
, 3);
5867 return avr_asm_len ("clr %C0" CR_TAB
5868 "mov %B0,%C1" CR_TAB
5869 "mov %A0,%B1", op
, plen
, 3);
5872 if (dest
!= src
+ 2)
5873 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5875 return avr_asm_len ("clr %B0" CR_TAB
5876 "clr %C0", op
, plen
, 2);
5879 if (INTVAL (op
[2]) < 24)
5885 return avr_asm_len ("clr %A0" CR_TAB
5889 "clr %C0", op
, plen
, 5);
5893 out_shift_with_cnt ("lsr %C0" CR_TAB
5895 "ror %A0", insn
, op
, plen
, 3);
5900 /* 32bit logic shift right ((unsigned int)x >> i) */
5903 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
5905 if (GET_CODE (operands
[2]) == CONST_INT
)
5913 switch (INTVAL (operands
[2]))
5916 if (INTVAL (operands
[2]) < 32)
5920 return *len
= 3, ("clr %D0" CR_TAB
5924 return ("clr %D0" CR_TAB
5931 int reg0
= true_regnum (operands
[0]);
5932 int reg1
= true_regnum (operands
[1]);
5935 return ("mov %A0,%B1" CR_TAB
5936 "mov %B0,%C1" CR_TAB
5937 "mov %C0,%D1" CR_TAB
5940 return ("clr %D0" CR_TAB
5941 "mov %C0,%D1" CR_TAB
5942 "mov %B0,%C1" CR_TAB
5948 int reg0
= true_regnum (operands
[0]);
5949 int reg1
= true_regnum (operands
[1]);
5951 if (reg0
== reg1
+ 2)
5952 return *len
= 2, ("clr %C0" CR_TAB
5955 return *len
= 3, ("movw %A0,%C1" CR_TAB
5959 return *len
= 4, ("mov %B0,%D1" CR_TAB
5960 "mov %A0,%C1" CR_TAB
5966 return *len
= 4, ("mov %A0,%D1" CR_TAB
5973 return ("clr %A0" CR_TAB
5982 out_shift_with_cnt ("lsr %D0" CR_TAB
5985 "ror %A0", insn
, operands
, len
, 4);
5990 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5992 XOP[0] = XOP[0] + XOP[2]
5994 and return "". If PLEN == NULL, print assembler instructions to perform the
5995 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5996 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5997 CODE == PLUS: perform addition by using ADD instructions.
5998 CODE == MINUS: perform addition by using SUB instructions.
5999 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
6002 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
)
6004 /* MODE of the operation. */
6005 enum machine_mode mode
= GET_MODE (xop
[0]);
6007 /* INT_MODE of the same size. */
6008 enum machine_mode imode
= int_mode_for_mode (mode
);
6010 /* Number of bytes to operate on. */
6011 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6013 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6014 int clobber_val
= -1;
6016 /* op[0]: 8-bit destination register
6017 op[1]: 8-bit const int
6018 op[2]: 8-bit scratch register */
6021 /* Started the operation? Before starting the operation we may skip
6022 adding 0. This is no more true after the operation started because
6023 carry must be taken into account. */
6024 bool started
= false;
6026 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6029 /* Except in the case of ADIW with 16-bit register (see below)
6030 addition does not set cc0 in a usable way. */
6032 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6034 if (CONST_FIXED_P (xval
))
6035 xval
= avr_to_int_mode (xval
);
6038 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6045 for (i
= 0; i
< n_bytes
; i
++)
6047 /* We operate byte-wise on the destination. */
6048 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6049 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6051 /* 8-bit value to operate with this byte. */
6052 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6054 /* Registers R16..R31 can operate with immediate. */
6055 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6058 op
[1] = gen_int_mode (val8
, QImode
);
6060 /* To get usable cc0 no low-bytes must have been skipped. */
6068 && test_hard_reg_class (ADDW_REGS
, reg8
))
6070 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6071 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6073 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6074 i.e. operate word-wise. */
6081 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6084 if (n_bytes
== 2 && PLUS
== code
)
6096 avr_asm_len (code
== PLUS
6097 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6101 else if ((val8
== 1 || val8
== 0xff)
6103 && i
== n_bytes
- 1)
6105 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6114 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6116 if (clobber_val
!= (int) val8
)
6117 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6118 clobber_val
= (int) val8
;
6120 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6127 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6130 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6132 if (clobber_val
!= (int) val8
)
6133 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6134 clobber_val
= (int) val8
;
6136 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6148 } /* for all sub-bytes */
6150 /* No output doesn't change cc0. */
6152 if (plen
&& *plen
== 0)
6157 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6159 XOP[0] = XOP[0] + XOP[2]
6161 and return "". If PLEN == NULL, print assembler instructions to perform the
6162 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6163 words) printed with PLEN == NULL.
6164 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6165 condition code (with respect to XOP[0]). */
6168 avr_out_plus (rtx
*xop
, int *plen
, int *pcc
)
6170 int len_plus
, len_minus
;
6171 int cc_plus
, cc_minus
, cc_dummy
;
6176 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6178 avr_out_plus_1 (xop
, &len_plus
, PLUS
, &cc_plus
);
6179 avr_out_plus_1 (xop
, &len_minus
, MINUS
, &cc_minus
);
6181 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6185 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6186 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6188 else if (len_minus
<= len_plus
)
6189 avr_out_plus_1 (xop
, NULL
, MINUS
, pcc
);
6191 avr_out_plus_1 (xop
, NULL
, PLUS
, pcc
);
6197 /* Same as above but XOP has just 3 entries.
6198 Supply a dummy 4th operand. */
6201 avr_out_plus_noclobber (rtx
*xop
, int *plen
, int *pcc
)
6210 return avr_out_plus (op
, plen
, pcc
);
6214 /* Output subtraction of register XOP[0] and compile time constant XOP[2]:
6216 XOP[0] = XOP[0] - XOP[2]
6218 This is basically the same as `avr_out_plus' except that we subtract.
6219 It's needed because (minus x const) is not mapped to (plus x -const)
6220 for the fixed point modes. */
6223 avr_out_minus (rtx
*xop
, int *plen
, int *pcc
)
6228 *pcc
= (int) CC_SET_CZN
;
6231 return avr_asm_len ("sub %A0,%A2" CR_TAB
6232 "sbc %B0,%B2", xop
, plen
, -2);
6234 if (!CONST_INT_P (xop
[2])
6235 && !CONST_FIXED_P (xop
[2]))
6236 return avr_asm_len ("subi %A0,lo8(%2)" CR_TAB
6237 "sbci %B0,hi8(%2)", xop
, plen
, -2);
6239 op
[0] = avr_to_int_mode (xop
[0]);
6240 op
[1] = avr_to_int_mode (xop
[1]);
6241 op
[2] = gen_int_mode (-INTVAL (avr_to_int_mode (xop
[2])),
6245 return avr_out_plus (op
, plen
, pcc
);
6249 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6252 avr_out_plus64 (rtx addend
, int *plen
)
6257 op
[0] = gen_rtx_REG (DImode
, 18);
6262 avr_out_plus_1 (op
, plen
, MINUS
, &cc_dummy
);
6268 /* Prepare operands of subdi3_const_insn to be used with avr_out_plus64. */
6271 avr_out_minus64 (rtx subtrahend
, int *plen
)
6273 rtx xneg
= avr_to_int_mode (subtrahend
);
6274 xneg
= simplify_unary_operation (NEG
, DImode
, xneg
, DImode
);
6276 return avr_out_plus64 (xneg
, plen
);
6280 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6281 time constant XOP[2]:
6283 XOP[0] = XOP[0] <op> XOP[2]
6285 and return "". If PLEN == NULL, print assembler instructions to perform the
6286 operation; otherwise, set *PLEN to the length of the instruction sequence
6287 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6288 register or SCRATCH if no clobber register is needed for the operation. */
6291 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6293 /* CODE and MODE of the operation. */
6294 enum rtx_code code
= GET_CODE (SET_SRC (single_set (insn
)));
6295 enum machine_mode mode
= GET_MODE (xop
[0]);
6297 /* Number of bytes to operate on. */
6298 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6300 /* Value of T-flag (0 or 1) or -1 if unknow. */
6303 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6304 int clobber_val
= -1;
6306 /* op[0]: 8-bit destination register
6307 op[1]: 8-bit const int
6308 op[2]: 8-bit clobber register or SCRATCH
6309 op[3]: 8-bit register containing 0xff or NULL_RTX */
6318 for (i
= 0; i
< n_bytes
; i
++)
6320 /* We operate byte-wise on the destination. */
6321 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6322 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6324 /* 8-bit value to operate with this byte. */
6325 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6327 /* Number of bits set in the current byte of the constant. */
6328 int pop8
= avr_popcount (val8
);
6330 /* Registers R16..R31 can operate with immediate. */
6331 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6334 op
[1] = GEN_INT (val8
);
6343 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6347 avr_asm_len ("set", op
, plen
, 1);
6350 op
[1] = GEN_INT (exact_log2 (val8
));
6351 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6355 if (op
[3] != NULL_RTX
)
6356 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6358 avr_asm_len ("clr %0" CR_TAB
6359 "dec %0", op
, plen
, 2);
6365 if (clobber_val
!= (int) val8
)
6366 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6367 clobber_val
= (int) val8
;
6369 avr_asm_len ("or %0,%2", op
, plen
, 1);
6379 avr_asm_len ("clr %0", op
, plen
, 1);
6381 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6385 avr_asm_len ("clt", op
, plen
, 1);
6388 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6389 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6393 if (clobber_val
!= (int) val8
)
6394 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6395 clobber_val
= (int) val8
;
6397 avr_asm_len ("and %0,%2", op
, plen
, 1);
6407 avr_asm_len ("com %0", op
, plen
, 1);
6408 else if (ld_reg_p
&& val8
== (1 << 7))
6409 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6412 if (clobber_val
!= (int) val8
)
6413 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6414 clobber_val
= (int) val8
;
6416 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6422 /* Unknown rtx_code */
6425 } /* for all sub-bytes */
6431 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6432 PLEN != NULL: Set *PLEN to the length of that sequence.
6436 avr_out_addto_sp (rtx
*op
, int *plen
)
6438 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6439 int addend
= INTVAL (op
[0]);
6446 if (flag_verbose_asm
|| flag_print_asm_name
)
6447 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
6449 while (addend
<= -pc_len
)
6452 avr_asm_len ("rcall .", op
, plen
, 1);
6455 while (addend
++ < 0)
6456 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
6458 else if (addend
> 0)
6460 if (flag_verbose_asm
|| flag_print_asm_name
)
6461 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
6463 while (addend
-- > 0)
6464 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
6471 /* Create RTL split patterns for byte sized rotate expressions. This
6472 produces a series of move instructions and considers overlap situations.
6473 Overlapping non-HImode operands need a scratch register. */
6476 avr_rotate_bytes (rtx operands
[])
6479 enum machine_mode mode
= GET_MODE (operands
[0]);
6480 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
6481 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
6482 int num
= INTVAL (operands
[2]);
6483 rtx scratch
= operands
[3];
6484 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6485 Word move if no scratch is needed, otherwise use size of scratch. */
6486 enum machine_mode move_mode
= QImode
;
6487 int move_size
, offset
, size
;
6491 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
6494 move_mode
= GET_MODE (scratch
);
6496 /* Force DI rotate to use QI moves since other DI moves are currently split
6497 into QI moves so forward propagation works better. */
6500 /* Make scratch smaller if needed. */
6501 if (SCRATCH
!= GET_CODE (scratch
)
6502 && HImode
== GET_MODE (scratch
)
6503 && QImode
== move_mode
)
6504 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
6506 move_size
= GET_MODE_SIZE (move_mode
);
6507 /* Number of bytes/words to rotate. */
6508 offset
= (num
>> 3) / move_size
;
6509 /* Number of moves needed. */
6510 size
= GET_MODE_SIZE (mode
) / move_size
;
6511 /* Himode byte swap is special case to avoid a scratch register. */
6512 if (mode
== HImode
&& same_reg
)
6514 /* HImode byte swap, using xor. This is as quick as using scratch. */
6516 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
6517 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
6518 if (!rtx_equal_p (dst
, src
))
6520 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6521 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
6522 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
6527 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6528 /* Create linked list of moves to determine move order. */
6532 } move
[MAX_SIZE
+ 8];
6535 gcc_assert (size
<= MAX_SIZE
);
6536 /* Generate list of subreg moves. */
6537 for (i
= 0; i
< size
; i
++)
6540 int to
= (from
+ offset
) % size
;
6541 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
6542 mode
, from
* move_size
);
6543 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
6544 mode
, to
* move_size
);
6547 /* Mark dependence where a dst of one move is the src of another move.
6548 The first move is a conflict as it must wait until second is
6549 performed. We ignore moves to self - we catch this later. */
6551 for (i
= 0; i
< size
; i
++)
6552 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
6553 for (j
= 0; j
< size
; j
++)
6554 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
6556 /* The dst of move i is the src of move j. */
6563 /* Go through move list and perform non-conflicting moves. As each
6564 non-overlapping move is made, it may remove other conflicts
6565 so the process is repeated until no conflicts remain. */
6570 /* Emit move where dst is not also a src or we have used that
6572 for (i
= 0; i
< size
; i
++)
6573 if (move
[i
].src
!= NULL_RTX
)
6575 if (move
[i
].links
== -1
6576 || move
[move
[i
].links
].src
== NULL_RTX
)
6579 /* Ignore NOP moves to self. */
6580 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
6581 emit_move_insn (move
[i
].dst
, move
[i
].src
);
6583 /* Remove conflict from list. */
6584 move
[i
].src
= NULL_RTX
;
6590 /* Check for deadlock. This is when no moves occurred and we have
6591 at least one blocked move. */
6592 if (moves
== 0 && blocked
!= -1)
6594 /* Need to use scratch register to break deadlock.
6595 Add move to put dst of blocked move into scratch.
6596 When this move occurs, it will break chain deadlock.
6597 The scratch register is substituted for real move. */
6599 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
6601 move
[size
].src
= move
[blocked
].dst
;
6602 move
[size
].dst
= scratch
;
6603 /* Scratch move is never blocked. */
6604 move
[size
].links
= -1;
6605 /* Make sure we have valid link. */
6606 gcc_assert (move
[blocked
].links
!= -1);
6607 /* Replace src of blocking move with scratch reg. */
6608 move
[move
[blocked
].links
].src
= scratch
;
6609 /* Make dependent on scratch move occuring. */
6610 move
[blocked
].links
= size
;
6614 while (blocked
!= -1);
6620 /* Outputs instructions needed for fixed point type conversion.
6621 This includes converting between any fixed point type, as well
6622 as converting to any integer type. Conversion between integer
6623 types is not supported.
6625 The number of instructions generated depends on the types
6626 being converted and the registers assigned to them.
6628 The number of instructions required to complete the conversion
6629 is least if the registers for source and destination are overlapping
6630 and are aligned at the decimal place as actual movement of data is
6631 completely avoided. In some cases, the conversion may already be
6632 complete without any instructions needed.
6634 When converting to signed types from signed types, sign extension
6637 Converting signed fractional types requires a bit shift if converting
6638 to or from any unsigned fractional type because the decimal place is
6639 shifted by 1 bit. When the destination is a signed fractional, the sign
6640 is stored in either the carry or T bit. */
6643 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
6647 /* ilen: Length of integral part (in bytes)
6648 flen: Length of fractional part (in bytes)
6649 tlen: Length of operand (in bytes)
6650 blen: Length of operand (in bits) */
6651 int ilen
[2], flen
[2], tlen
[2], blen
[2];
6652 int rdest
, rsource
, offset
;
6653 int start
, end
, dir
;
6654 bool sign_in_T
= false, sign_in_Carry
= false, sign_done
= false;
6655 bool widening_sign_extend
= false;
6656 int clrword
= -1, lastclr
= 0, clr
= 0;
6662 xop
[dest
] = operands
[dest
];
6663 xop
[src
] = operands
[src
];
6668 /* Determine format (integer and fractional parts)
6669 of types needing conversion. */
6671 for (i
= 0; i
< 2; i
++)
6673 enum machine_mode mode
= GET_MODE (xop
[i
]);
6675 tlen
[i
] = GET_MODE_SIZE (mode
);
6676 blen
[i
] = GET_MODE_BITSIZE (mode
);
6678 if (SCALAR_INT_MODE_P (mode
))
6680 sbit
[i
] = intsigned
;
6681 ilen
[i
] = GET_MODE_SIZE (mode
);
6684 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
6686 sbit
[i
] = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
6687 ilen
[i
] = (GET_MODE_IBIT (mode
) + 1) / 8;
6688 flen
[i
] = (GET_MODE_FBIT (mode
) + 1) / 8;
6691 fatal_insn ("unsupported fixed-point conversion", insn
);
6694 /* Perform sign extension if source and dest are both signed,
6695 and there are more integer parts in dest than in source. */
6697 widening_sign_extend
= sbit
[dest
] && sbit
[src
] && ilen
[dest
] > ilen
[src
];
6699 rdest
= REGNO (xop
[dest
]);
6700 rsource
= REGNO (xop
[src
]);
6701 offset
= flen
[src
] - flen
[dest
];
6703 /* Position of MSB resp. sign bit. */
6705 xop
[2] = GEN_INT (blen
[dest
] - 1);
6706 xop
[3] = GEN_INT (blen
[src
] - 1);
6708 /* Store the sign bit if the destination is a signed fract and the source
6709 has a sign in the integer part. */
6711 if (sbit
[dest
] && ilen
[dest
] == 0 && sbit
[src
] && ilen
[src
] > 0)
6713 /* To avoid using BST and BLD if the source and destination registers
6714 overlap or the source is unused after, we can use LSL to store the
6715 sign bit in carry since we don't need the integral part of the source.
6716 Restoring the sign from carry saves one BLD instruction below. */
6718 if (reg_unused_after (insn
, xop
[src
])
6719 || (rdest
< rsource
+ tlen
[src
]
6720 && rdest
+ tlen
[dest
] > rsource
))
6722 avr_asm_len ("lsl %T1%t3", xop
, plen
, 1);
6723 sign_in_Carry
= true;
6727 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
6732 /* Pick the correct direction to shift bytes. */
6734 if (rdest
< rsource
+ offset
)
6743 start
= tlen
[dest
] - 1;
6747 /* Perform conversion by moving registers into place, clearing
6748 destination registers that do not overlap with any source. */
6750 for (i
= start
; i
!= end
; i
+= dir
)
6752 int destloc
= rdest
+ i
;
6753 int sourceloc
= rsource
+ i
+ offset
;
6755 /* Source register location is outside range of source register,
6756 so clear this byte in the dest. */
6758 if (sourceloc
< rsource
6759 || sourceloc
>= rsource
+ tlen
[src
])
6763 && (sourceloc
+ dir
< rsource
6764 || sourceloc
+ dir
>= rsource
+ tlen
[src
])
6765 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
6766 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2)))
6769 /* Use already cleared word to clear two bytes at a time. */
6771 int even_i
= i
& ~1;
6772 int even_clrword
= clrword
& ~1;
6774 xop
[4] = GEN_INT (8 * even_i
);
6775 xop
[5] = GEN_INT (8 * even_clrword
);
6776 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
6781 if (i
== tlen
[dest
] - 1
6782 && widening_sign_extend
6783 && blen
[src
] - 1 - 8 * offset
< 0)
6785 /* The SBRC below that sign-extends would come
6786 up with a negative bit number because the sign
6787 bit is out of reach. ALso avoid some early-clobber
6788 situations because of premature CLR. */
6790 if (reg_unused_after (insn
, xop
[src
]))
6791 avr_asm_len ("lsl %T1%t3" CR_TAB
6792 "sbc %T0%t2,%T0%t2", xop
, plen
, 2);
6794 avr_asm_len ("mov __tmp_reg__,%T1%t3" CR_TAB
6795 "lsl __tmp_reg__" CR_TAB
6796 "sbc %T0%t2,%T0%t2", xop
, plen
, 3);
6802 /* Do not clear the register if it is going to get
6803 sign extended with a MOV later. */
6805 if (sbit
[dest
] && sbit
[src
]
6806 && i
!= tlen
[dest
] - 1
6812 xop
[4] = GEN_INT (8 * i
);
6813 avr_asm_len ("clr %T0%t4", xop
, plen
, 1);
6815 /* If the last byte was cleared too, we have a cleared
6816 word we can MOVW to clear two bytes at a time. */
6824 else if (destloc
== sourceloc
)
6826 /* Source byte is already in destination: Nothing needed. */
6832 /* Registers do not line up and source register location
6833 is within range: Perform move, shifting with MOV or MOVW. */
6837 && sourceloc
+ dir
>= rsource
6838 && sourceloc
+ dir
< rsource
+ tlen
[src
]
6839 && ((dir
== 1 && !(destloc
% 2) && !(sourceloc
% 2))
6840 || (dir
== -1 && (destloc
% 2) && (sourceloc
% 2))))
6842 int even_i
= i
& ~1;
6843 int even_i_plus_offset
= (i
+ offset
) & ~1;
6845 xop
[4] = GEN_INT (8 * even_i
);
6846 xop
[5] = GEN_INT (8 * even_i_plus_offset
);
6847 avr_asm_len ("movw %T0%t4,%T1%t5", xop
, plen
, 1);
6852 xop
[4] = GEN_INT (8 * i
);
6853 xop
[5] = GEN_INT (8 * (i
+ offset
));
6854 avr_asm_len ("mov %T0%t4,%T1%t5", xop
, plen
, 1);
6862 /* Perform sign extension if source and dest are both signed,
6863 and there are more integer parts in dest than in source. */
6865 if (widening_sign_extend
)
6869 xop
[4] = GEN_INT (blen
[src
] - 1 - 8 * offset
);
6871 /* Register was cleared above, so can become 0xff and extended.
6872 Note: Instead of the CLR/SBRC/COM the sign extension could
6873 be performed after the LSL below by means of a SBC if only
6874 one byte has to be shifted left. */
6876 avr_asm_len ("sbrc %T0%T4" CR_TAB
6877 "com %T0%t2", xop
, plen
, 2);
6880 /* Sign extend additional bytes by MOV and MOVW. */
6882 start
= tlen
[dest
] - 2;
6883 end
= flen
[dest
] + ilen
[src
] - 1;
6885 for (i
= start
; i
!= end
; i
--)
6887 if (AVR_HAVE_MOVW
&& i
!= start
&& i
-1 != end
)
6890 xop
[4] = GEN_INT (8 * i
);
6891 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 2));
6892 avr_asm_len ("movw %T0%t4,%T0%t5", xop
, plen
, 1);
6896 xop
[4] = GEN_INT (8 * i
);
6897 xop
[5] = GEN_INT (8 * (tlen
[dest
] - 1));
6898 avr_asm_len ("mov %T0%t4,%T0%t5", xop
, plen
, 1);
6903 /* If destination is a signed fract, and the source was not, a shift
6904 by 1 bit is needed. Also restore sign from carry or T. */
6906 if (sbit
[dest
] && !ilen
[dest
] && (!sbit
[src
] || ilen
[src
]))
6908 /* We have flen[src] non-zero fractional bytes to shift.
6909 Because of the right shift, handle one byte more so that the
6910 LSB won't be lost. */
6912 int nonzero
= flen
[src
] + 1;
6914 /* If the LSB is in the T flag and there are no fractional
6915 bits, the high byte is zero and no shift needed. */
6917 if (flen
[src
] == 0 && sign_in_T
)
6920 start
= flen
[dest
] - 1;
6921 end
= start
- nonzero
;
6923 for (i
= start
; i
> end
&& i
>= 0; i
--)
6925 xop
[4] = GEN_INT (8 * i
);
6926 if (i
== start
&& !sign_in_Carry
)
6927 avr_asm_len ("lsr %T0%t4", xop
, plen
, 1);
6929 avr_asm_len ("ror %T0%t4", xop
, plen
, 1);
6934 avr_asm_len ("bld %T0%T2", xop
, plen
, 1);
6937 else if (sbit
[src
] && !ilen
[src
] && (!sbit
[dest
] || ilen
[dest
]))
6939 /* If source was a signed fract and dest was not, shift 1 bit
6942 start
= flen
[dest
] - flen
[src
];
6947 for (i
= start
; i
< flen
[dest
]; i
++)
6949 xop
[4] = GEN_INT (8 * i
);
6952 avr_asm_len ("lsl %T0%t4", xop
, plen
, 1);
6954 avr_asm_len ("rol %T0%t4", xop
, plen
, 1);
6962 /* Modifies the length assigned to instruction INSN
6963 LEN is the initially computed length of the insn. */
6966 adjust_insn_length (rtx insn
, int len
)
6968 rtx
*op
= recog_data
.operand
;
6969 enum attr_adjust_len adjust_len
;
6971 /* Some complex insns don't need length adjustment and therefore
6972 the length need not/must not be adjusted for these insns.
6973 It is easier to state this in an insn attribute "adjust_len" than
6974 to clutter up code here... */
6976 if (-1 == recog_memoized (insn
))
6981 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6983 adjust_len
= get_attr_adjust_len (insn
);
6985 if (adjust_len
== ADJUST_LEN_NO
)
6987 /* Nothing to adjust: The length from attribute "length" is fine.
6988 This is the default. */
6993 /* Extract insn's operands. */
6995 extract_constrain_insn_cached (insn
);
6997 /* Dispatch to right function. */
7001 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7002 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7003 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7005 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7007 case ADJUST_LEN_OUT_PLUS
: avr_out_plus (op
, &len
, NULL
); break;
7008 case ADJUST_LEN_PLUS64
: avr_out_plus64 (op
[0], &len
); break;
7009 case ADJUST_LEN_MINUS
: avr_out_minus (op
, &len
, NULL
); break;
7010 case ADJUST_LEN_MINUS64
: avr_out_minus64 (op
[0], &len
); break;
7011 case ADJUST_LEN_OUT_PLUS_NOCLOBBER
:
7012 avr_out_plus_noclobber (op
, &len
, NULL
); break;
7014 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7016 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7017 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7018 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7019 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7020 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7021 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7022 case ADJUST_LEN_LOAD_LPM
: avr_load_lpm (insn
, op
, &len
); break;
7024 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7025 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7027 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7028 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7029 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7030 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7031 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7033 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7034 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7035 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7037 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7038 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7039 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7041 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7042 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7043 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7045 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7046 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7047 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7049 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7051 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7060 /* Return nonzero if register REG dead after INSN. */
7063 reg_unused_after (rtx insn
, rtx reg
)
7065 return (dead_or_set_p (insn
, reg
)
7066 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7069 /* Return nonzero if REG is not used after INSN.
7070 We assume REG is a reload reg, and therefore does
7071 not live past labels. It may live past calls or jumps though. */
7074 _reg_unused_after (rtx insn
, rtx reg
)
7079 /* If the reg is set by this instruction, then it is safe for our
7080 case. Disregard the case where this is a store to memory, since
7081 we are checking a register used in the store address. */
7082 set
= single_set (insn
);
7083 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7084 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7087 while ((insn
= NEXT_INSN (insn
)))
7090 code
= GET_CODE (insn
);
7093 /* If this is a label that existed before reload, then the register
7094 if dead here. However, if this is a label added by reorg, then
7095 the register may still be live here. We can't tell the difference,
7096 so we just ignore labels completely. */
7097 if (code
== CODE_LABEL
)
7105 if (code
== JUMP_INSN
)
7108 /* If this is a sequence, we must handle them all at once.
7109 We could have for instance a call that sets the target register,
7110 and an insn in a delay slot that uses the register. In this case,
7111 we must return 0. */
7112 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7117 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7119 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7120 rtx set
= single_set (this_insn
);
7122 if (GET_CODE (this_insn
) == CALL_INSN
)
7124 else if (GET_CODE (this_insn
) == JUMP_INSN
)
7126 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7131 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7133 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7135 if (GET_CODE (SET_DEST (set
)) != MEM
)
7141 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7146 else if (code
== JUMP_INSN
)
7150 if (code
== CALL_INSN
)
7153 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7154 if (GET_CODE (XEXP (tem
, 0)) == USE
7155 && REG_P (XEXP (XEXP (tem
, 0), 0))
7156 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7158 if (call_used_regs
[REGNO (reg
)])
7162 set
= single_set (insn
);
7164 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7166 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7167 return GET_CODE (SET_DEST (set
)) != MEM
;
7168 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7175 /* Target hook for assembling integer objects. The AVR version needs
7176 special handling for references to certain labels. */
7179 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7181 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7182 && text_segment_operand (x
, VOIDmode
))
7184 fputs ("\t.word\tgs(", asm_out_file
);
7185 output_addr_const (asm_out_file
, x
);
7186 fputs (")\n", asm_out_file
);
7190 else if (GET_MODE (x
) == PSImode
)
7192 /* This needs binutils 2.23+, see PR binutils/13503 */
7194 fputs ("\t.byte\tlo8(", asm_out_file
);
7195 output_addr_const (asm_out_file
, x
);
7196 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7198 fputs ("\t.byte\thi8(", asm_out_file
);
7199 output_addr_const (asm_out_file
, x
);
7200 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7202 fputs ("\t.byte\thh8(", asm_out_file
);
7203 output_addr_const (asm_out_file
, x
);
7204 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7208 else if (CONST_FIXED_P (x
))
7212 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7214 for (n
= 0; n
< size
; n
++)
7216 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
7217 default_assemble_integer (xn
, 1, aligned_p
);
7223 return default_assemble_integer (x
, size
, aligned_p
);
7227 /* Return value is nonzero if pseudos that have been
7228 assigned to registers of class CLASS would likely be spilled
7229 because registers of CLASS are needed for spill registers. */
7232 avr_class_likely_spilled_p (reg_class_t c
)
7234 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
7237 /* Valid attributes:
7238 progmem - put data to program memory;
7239 signal - make a function to be hardware interrupt. After function
7240 prologue interrupts are disabled;
7241 interrupt - make a function to be hardware interrupt. After function
7242 prologue interrupts are enabled;
7243 naked - don't generate function prologue/epilogue and `ret' command.
7245 Only `progmem' attribute valid for type. */
7247 /* Handle a "progmem" attribute; arguments as in
7248 struct attribute_spec.handler. */
7250 avr_handle_progmem_attribute (tree
*node
, tree name
,
7251 tree args ATTRIBUTE_UNUSED
,
7252 int flags ATTRIBUTE_UNUSED
,
7257 if (TREE_CODE (*node
) == TYPE_DECL
)
7259 /* This is really a decl attribute, not a type attribute,
7260 but try to handle it for GCC 3.0 backwards compatibility. */
7262 tree type
= TREE_TYPE (*node
);
7263 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
7264 tree newtype
= build_type_attribute_variant (type
, attr
);
7266 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
7267 TREE_TYPE (*node
) = newtype
;
7268 *no_add_attrs
= true;
7270 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
7272 *no_add_attrs
= false;
7276 warning (OPT_Wattributes
, "%qE attribute ignored",
7278 *no_add_attrs
= true;
7285 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7286 struct attribute_spec.handler. */
7289 avr_handle_fndecl_attribute (tree
*node
, tree name
,
7290 tree args ATTRIBUTE_UNUSED
,
7291 int flags ATTRIBUTE_UNUSED
,
7294 if (TREE_CODE (*node
) != FUNCTION_DECL
)
7296 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7298 *no_add_attrs
= true;
7305 avr_handle_fntype_attribute (tree
*node
, tree name
,
7306 tree args ATTRIBUTE_UNUSED
,
7307 int flags ATTRIBUTE_UNUSED
,
7310 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
7312 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
7314 *no_add_attrs
= true;
7321 /* AVR attributes. */
7322 static const struct attribute_spec
7323 avr_attribute_table
[] =
7325 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7326 affects_type_identity } */
7327 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
7329 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7331 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
7333 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7335 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7337 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
7339 { NULL
, 0, 0, false, false, false, NULL
, false }
7343 /* Look if DECL shall be placed in program memory space by
7344 means of attribute `progmem' or some address-space qualifier.
7345 Return non-zero if DECL is data that must end up in Flash and
7346 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7348 Return 2 if DECL is located in 24-bit flash address-space
7349 Return 1 if DECL is located in 16-bit flash address-space
7350 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7351 Return 0 otherwise */
7354 avr_progmem_p (tree decl
, tree attributes
)
7358 if (TREE_CODE (decl
) != VAR_DECL
)
7361 if (avr_decl_memx_p (decl
))
7364 if (avr_decl_flash_p (decl
))
7368 != lookup_attribute ("progmem", attributes
))
7375 while (TREE_CODE (a
) == ARRAY_TYPE
);
7377 if (a
== error_mark_node
)
7380 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
7387 /* Scan type TYP for pointer references to address space ASn.
7388 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7389 the AS are also declared to be CONST.
7390 Otherwise, return the respective address space, i.e. a value != 0. */
7393 avr_nonconst_pointer_addrspace (tree typ
)
7395 while (ARRAY_TYPE
== TREE_CODE (typ
))
7396 typ
= TREE_TYPE (typ
);
7398 if (POINTER_TYPE_P (typ
))
7401 tree target
= TREE_TYPE (typ
);
7403 /* Pointer to function: Test the function's return type. */
7405 if (FUNCTION_TYPE
== TREE_CODE (target
))
7406 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
7408 /* "Ordinary" pointers... */
7410 while (TREE_CODE (target
) == ARRAY_TYPE
)
7411 target
= TREE_TYPE (target
);
7413 /* Pointers to non-generic address space must be const.
7414 Refuse address spaces outside the device's flash. */
7416 as
= TYPE_ADDR_SPACE (target
);
7418 if (!ADDR_SPACE_GENERIC_P (as
)
7419 && (!TYPE_READONLY (target
)
7420 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
7425 /* Scan pointer's target type. */
7427 return avr_nonconst_pointer_addrspace (target
);
7430 return ADDR_SPACE_GENERIC
;
7434 /* Sanity check NODE so that all pointers targeting non-generic address spaces
7435 go along with CONST qualifier. Writing to these address spaces should
7436 be detected and complained about as early as possible. */
7439 avr_pgm_check_var_decl (tree node
)
7441 const char *reason
= NULL
;
7443 addr_space_t as
= ADDR_SPACE_GENERIC
;
7445 gcc_assert (as
== 0);
7447 if (avr_log
.progmem
)
7448 avr_edump ("%?: %t\n", node
);
7450 switch (TREE_CODE (node
))
7456 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7457 reason
= "variable";
7461 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7462 reason
= "function parameter";
7466 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
7467 reason
= "structure field";
7471 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
7473 reason
= "return type of function";
7477 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
7484 avr_edump ("%?: %s, %d, %d\n",
7485 avr_addrspace
[as
].name
,
7486 avr_addrspace
[as
].segment
, avr_current_device
->n_flash
);
7487 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7490 error ("%qT uses address space %qs beyond flash of %qs",
7491 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7493 error ("%s %q+D uses address space %qs beyond flash of %qs",
7494 reason
, node
, avr_addrspace
[as
].name
,
7495 avr_current_device
->name
);
7500 error ("pointer targeting address space %qs must be const in %qT",
7501 avr_addrspace
[as
].name
, node
);
7503 error ("pointer targeting address space %qs must be const"
7505 avr_addrspace
[as
].name
, reason
, node
);
7509 return reason
== NULL
;
7513 /* Add the section attribute if the variable is in progmem. */
7516 avr_insert_attributes (tree node
, tree
*attributes
)
7518 avr_pgm_check_var_decl (node
);
7520 if (TREE_CODE (node
) == VAR_DECL
7521 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
7522 && avr_progmem_p (node
, *attributes
))
7527 /* For C++, we have to peel arrays in order to get correct
7528 determination of readonlyness. */
7531 node0
= TREE_TYPE (node0
);
7532 while (TREE_CODE (node0
) == ARRAY_TYPE
);
7534 if (error_mark_node
== node0
)
7537 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
7539 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
7541 error ("variable %q+D located in address space %qs"
7542 " beyond flash of %qs",
7543 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
7546 if (!TYPE_READONLY (node0
)
7547 && !TREE_READONLY (node
))
7549 const char *reason
= "__attribute__((progmem))";
7551 if (!ADDR_SPACE_GENERIC_P (as
))
7552 reason
= avr_addrspace
[as
].name
;
7554 if (avr_log
.progmem
)
7555 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
7557 error ("variable %q+D must be const in order to be put into"
7558 " read-only section by means of %qs", node
, reason
);
7564 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7565 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7566 /* Track need of __do_clear_bss. */
7569 avr_asm_output_aligned_decl_common (FILE * stream
,
7570 const_tree decl ATTRIBUTE_UNUSED
,
7572 unsigned HOST_WIDE_INT size
,
7573 unsigned int align
, bool local_p
)
7575 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7576 There is no need to trigger __do_clear_bss code for them. */
7578 if (!STR_PREFIX_P (name
, "__gnu_lto"))
7579 avr_need_clear_bss_p
= true;
7582 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
7584 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
7588 /* Unnamed section callback for data_section
7589 to track need of __do_copy_data. */
7592 avr_output_data_section_asm_op (const void *data
)
7594 avr_need_copy_data_p
= true;
7596 /* Dispatch to default. */
7597 output_section_asm_op (data
);
7601 /* Unnamed section callback for bss_section
7602 to track need of __do_clear_bss. */
7605 avr_output_bss_section_asm_op (const void *data
)
7607 avr_need_clear_bss_p
= true;
7609 /* Dispatch to default. */
7610 output_section_asm_op (data
);
7614 /* Unnamed section callback for progmem*.data sections. */
7617 avr_output_progmem_section_asm_op (const void *data
)
7619 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
7620 (const char*) data
);
7624 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7627 avr_asm_init_sections (void)
7631 /* Set up a section for jump tables. Alignment is handled by
7632 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7634 if (AVR_HAVE_JMP_CALL
)
7636 progmem_swtable_section
7637 = get_unnamed_section (0, output_section_asm_op
,
7638 "\t.section\t.progmem.gcc_sw_table"
7639 ",\"a\",@progbits");
7643 progmem_swtable_section
7644 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7645 "\t.section\t.progmem.gcc_sw_table"
7646 ",\"ax\",@progbits");
7649 for (n
= 0; n
< sizeof (progmem_section
) / sizeof (*progmem_section
); n
++)
7652 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
7653 progmem_section_prefix
[n
]);
7656 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7657 resp. `avr_need_copy_data_p'. */
7659 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7660 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
7661 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
7665 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7668 avr_asm_function_rodata_section (tree decl
)
7670 /* If a function is unused and optimized out by -ffunction-sections
7671 and --gc-sections, ensure that the same will happen for its jump
7672 tables by putting them into individual sections. */
7677 /* Get the frodata section from the default function in varasm.c
7678 but treat function-associated data-like jump tables as code
7679 rather than as user defined data. AVR has no constant pools. */
7681 int fdata
= flag_data_sections
;
7683 flag_data_sections
= flag_function_sections
;
7684 frodata
= default_function_rodata_section (decl
);
7685 flag_data_sections
= fdata
;
7686 flags
= frodata
->common
.flags
;
7689 if (frodata
!= readonly_data_section
7690 && flags
& SECTION_NAMED
)
7692 /* Adjust section flags and replace section name prefix. */
7696 static const char* const prefix
[] =
7698 ".rodata", ".progmem.gcc_sw_table",
7699 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7702 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
7704 const char * old_prefix
= prefix
[i
];
7705 const char * new_prefix
= prefix
[i
+1];
7706 const char * name
= frodata
->named
.name
;
7708 if (STR_PREFIX_P (name
, old_prefix
))
7710 const char *rname
= ACONCAT ((new_prefix
,
7711 name
+ strlen (old_prefix
), NULL
));
7712 flags
&= ~SECTION_CODE
;
7713 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
7715 return get_section (rname
, flags
, frodata
->named
.decl
);
7720 return progmem_swtable_section
;
7724 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7725 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7728 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
7730 if (flags
& AVR_SECTION_PROGMEM
)
7732 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
7733 int segment
= avr_addrspace
[as
].segment
;
7734 const char *old_prefix
= ".rodata";
7735 const char *new_prefix
= progmem_section_prefix
[segment
];
7737 if (STR_PREFIX_P (name
, old_prefix
))
7739 const char *sname
= ACONCAT ((new_prefix
,
7740 name
+ strlen (old_prefix
), NULL
));
7741 default_elf_asm_named_section (sname
, flags
, decl
);
7745 default_elf_asm_named_section (new_prefix
, flags
, decl
);
7749 if (!avr_need_copy_data_p
)
7750 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
7751 || STR_PREFIX_P (name
, ".rodata")
7752 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
7754 if (!avr_need_clear_bss_p
)
7755 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
7757 default_elf_asm_named_section (name
, flags
, decl
);
7761 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
7763 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
7765 if (STR_PREFIX_P (name
, ".noinit"))
7767 if (decl
&& TREE_CODE (decl
) == VAR_DECL
7768 && DECL_INITIAL (decl
) == NULL_TREE
)
7769 flags
|= SECTION_BSS
; /* @nobits */
7771 warning (0, "only uninitialized variables can be placed in the "
7775 if (decl
&& DECL_P (decl
)
7776 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7778 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7780 /* Attribute progmem puts data in generic address space.
7781 Set section flags as if it was in __flash to get the right
7782 section prefix in the remainder. */
7784 if (ADDR_SPACE_GENERIC_P (as
))
7785 as
= ADDR_SPACE_FLASH
;
7787 flags
|= as
* SECTION_MACH_DEP
;
7788 flags
&= ~SECTION_WRITE
;
7789 flags
&= ~SECTION_BSS
;
7796 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7799 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
7801 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7802 readily available, see PR34734. So we postpone the warning
7803 about uninitialized data in program memory section until here. */
7806 && decl
&& DECL_P (decl
)
7807 && NULL_TREE
== DECL_INITIAL (decl
)
7808 && !DECL_EXTERNAL (decl
)
7809 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7811 warning (OPT_Wuninitialized
,
7812 "uninitialized variable %q+D put into "
7813 "program memory area", decl
);
7816 default_encode_section_info (decl
, rtl
, new_decl_p
);
7818 if (decl
&& DECL_P (decl
)
7819 && TREE_CODE (decl
) != FUNCTION_DECL
7821 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
7823 rtx sym
= XEXP (rtl
, 0);
7824 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7826 /* PSTR strings are in generic space but located in flash:
7827 patch address space. */
7829 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7830 as
= ADDR_SPACE_FLASH
;
7832 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
7837 /* Implement `TARGET_ASM_SELECT_SECTION' */
7840 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
7842 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
7844 if (decl
&& DECL_P (decl
)
7845 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
7847 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
7848 int segment
= avr_addrspace
[as
].segment
;
7850 if (sect
->common
.flags
& SECTION_NAMED
)
7852 const char * name
= sect
->named
.name
;
7853 const char * old_prefix
= ".rodata";
7854 const char * new_prefix
= progmem_section_prefix
[segment
];
7856 if (STR_PREFIX_P (name
, old_prefix
))
7858 const char *sname
= ACONCAT ((new_prefix
,
7859 name
+ strlen (old_prefix
), NULL
));
7860 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
7864 return progmem_section
[segment
];
7870 /* Implement `TARGET_ASM_FILE_START'. */
7871 /* Outputs some text at the start of each assembler file. */
7874 avr_file_start (void)
7876 int sfr_offset
= avr_current_arch
->sfr_offset
;
7878 if (avr_current_arch
->asm_only
)
7879 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
7881 default_file_start ();
7883 /* Print I/O addresses of some SFRs used with IN and OUT. */
7886 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
7888 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
7889 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
7891 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
7893 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
7895 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
7897 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
7899 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
7900 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
7901 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
7905 /* Implement `TARGET_ASM_FILE_END'. */
7906 /* Outputs to the stdio stream FILE some
7907 appropriate text to go at the end of an assembler file. */
7912 /* Output these only if there is anything in the
7913 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7914 input section(s) - some code size can be saved by not
7915 linking in the initialization code from libgcc if resp.
7916 sections are empty. */
7918 if (avr_need_copy_data_p
)
7919 fputs (".global __do_copy_data\n", asm_out_file
);
7921 if (avr_need_clear_bss_p
)
7922 fputs (".global __do_clear_bss\n", asm_out_file
);
7925 /* Choose the order in which to allocate hard registers for
7926 pseudo-registers local to a basic block.
7928 Store the desired register order in the array `reg_alloc_order'.
7929 Element 0 should be the register to allocate first; element 1, the
7930 next register; and so on. */
7933 order_regs_for_local_alloc (void)
7936 static const int order_0
[] = {
7944 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7948 static const int order_1
[] = {
7956 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7960 static const int order_2
[] = {
7969 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7974 const int *order
= (TARGET_ORDER_1
? order_1
:
7975 TARGET_ORDER_2
? order_2
:
7977 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
7978 reg_alloc_order
[i
] = order
[i
];
7982 /* Implement `TARGET_REGISTER_MOVE_COST' */
7985 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
7986 reg_class_t from
, reg_class_t to
)
7988 return (from
== STACK_REG
? 6
7989 : to
== STACK_REG
? 12
7994 /* Implement `TARGET_MEMORY_MOVE_COST' */
7997 avr_memory_move_cost (enum machine_mode mode
,
7998 reg_class_t rclass ATTRIBUTE_UNUSED
,
7999 bool in ATTRIBUTE_UNUSED
)
8001 return (mode
== QImode
? 2
8002 : mode
== HImode
? 4
8003 : mode
== SImode
? 8
8004 : mode
== SFmode
? 8
8009 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8010 cost of an RTX operand given its context. X is the rtx of the
8011 operand, MODE is its mode, and OUTER is the rtx_code of this
8012 operand's parent operator. */
8015 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8016 int opno
, bool speed
)
8018 enum rtx_code code
= GET_CODE (x
);
8030 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8037 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8041 /* Worker function for AVR backend's rtx_cost function.
8042 X is rtx expression whose cost is to be calculated.
8043 Return true if the complete cost has been computed.
8044 Return false if subexpressions should be scanned.
8045 In either case, *TOTAL contains the cost result. */
8048 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8049 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8051 enum rtx_code code
= (enum rtx_code
) codearg
;
8052 enum machine_mode mode
= GET_MODE (x
);
8063 /* Immediate constants are as cheap as registers. */
8068 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8076 *total
= COSTS_N_INSNS (1);
8082 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8088 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8096 *total
= COSTS_N_INSNS (1);
8102 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8106 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8107 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8111 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8112 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8113 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8117 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8118 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8119 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8127 && MULT
== GET_CODE (XEXP (x
, 0))
8128 && register_operand (XEXP (x
, 1), QImode
))
8131 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8132 /* multiply-add with constant: will be split and load constant. */
8133 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8134 *total
= COSTS_N_INSNS (1) + *total
;
8137 *total
= COSTS_N_INSNS (1);
8138 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8139 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8144 && (MULT
== GET_CODE (XEXP (x
, 0))
8145 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8146 && register_operand (XEXP (x
, 1), HImode
)
8147 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8148 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8151 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8152 /* multiply-add with constant: will be split and load constant. */
8153 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8154 *total
= COSTS_N_INSNS (1) + *total
;
8157 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8159 *total
= COSTS_N_INSNS (2);
8160 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8163 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8164 *total
= COSTS_N_INSNS (1);
8166 *total
= COSTS_N_INSNS (2);
8170 if (!CONST_INT_P (XEXP (x
, 1)))
8172 *total
= COSTS_N_INSNS (3);
8173 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8176 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8177 *total
= COSTS_N_INSNS (2);
8179 *total
= COSTS_N_INSNS (3);
8183 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8185 *total
= COSTS_N_INSNS (4);
8186 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8189 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8190 *total
= COSTS_N_INSNS (1);
8192 *total
= COSTS_N_INSNS (4);
8198 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8204 && register_operand (XEXP (x
, 0), QImode
)
8205 && MULT
== GET_CODE (XEXP (x
, 1)))
8208 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8209 /* multiply-sub with constant: will be split and load constant. */
8210 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8211 *total
= COSTS_N_INSNS (1) + *total
;
8216 && register_operand (XEXP (x
, 0), HImode
)
8217 && (MULT
== GET_CODE (XEXP (x
, 1))
8218 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
8219 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
8220 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
8223 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8224 /* multiply-sub with constant: will be split and load constant. */
8225 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
8226 *total
= COSTS_N_INSNS (1) + *total
;
8232 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8233 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8234 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8235 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8239 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8240 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8241 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8249 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
8251 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8259 rtx op0
= XEXP (x
, 0);
8260 rtx op1
= XEXP (x
, 1);
8261 enum rtx_code code0
= GET_CODE (op0
);
8262 enum rtx_code code1
= GET_CODE (op1
);
8263 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
8264 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
8267 && (u8_operand (op1
, HImode
)
8268 || s8_operand (op1
, HImode
)))
8270 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8274 && register_operand (op1
, HImode
))
8276 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8279 else if (ex0
|| ex1
)
8281 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
8284 else if (register_operand (op0
, HImode
)
8285 && (u8_operand (op1
, HImode
)
8286 || s8_operand (op1
, HImode
)))
8288 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
8292 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
8295 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8302 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8312 /* Add some additional costs besides CALL like moves etc. */
8314 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8318 /* Just a rough estimate. Even with -O2 we don't want bulky
8319 code expanded inline. */
8321 *total
= COSTS_N_INSNS (25);
8327 *total
= COSTS_N_INSNS (300);
8329 /* Add some additional costs besides CALL like moves etc. */
8330 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
8338 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8339 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8347 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
8349 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
8350 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8351 /* For div/mod with const-int divisor we have at least the cost of
8352 loading the divisor. */
8353 if (CONST_INT_P (XEXP (x
, 1)))
8354 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8355 /* Add some overall penaly for clobbering and moving around registers */
8356 *total
+= COSTS_N_INSNS (2);
8363 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
8364 *total
= COSTS_N_INSNS (1);
8369 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
8370 *total
= COSTS_N_INSNS (3);
8375 if (CONST_INT_P (XEXP (x
, 1)))
8376 switch (INTVAL (XEXP (x
, 1)))
8380 *total
= COSTS_N_INSNS (5);
8383 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
8391 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8398 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8400 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8401 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8406 val
= INTVAL (XEXP (x
, 1));
8408 *total
= COSTS_N_INSNS (3);
8409 else if (val
>= 0 && val
<= 7)
8410 *total
= COSTS_N_INSNS (val
);
8412 *total
= COSTS_N_INSNS (1);
8419 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
8420 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
8421 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
8423 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
8428 if (const1_rtx
== (XEXP (x
, 1))
8429 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
8431 *total
= COSTS_N_INSNS (2);
8435 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8437 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8438 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8442 switch (INTVAL (XEXP (x
, 1)))
8449 *total
= COSTS_N_INSNS (2);
8452 *total
= COSTS_N_INSNS (3);
8458 *total
= COSTS_N_INSNS (4);
8463 *total
= COSTS_N_INSNS (5);
8466 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8469 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8472 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
8475 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8476 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8482 if (!CONST_INT_P (XEXP (x
, 1)))
8484 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8487 switch (INTVAL (XEXP (x
, 1)))
8495 *total
= COSTS_N_INSNS (3);
8498 *total
= COSTS_N_INSNS (5);
8501 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8507 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8509 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8510 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8514 switch (INTVAL (XEXP (x
, 1)))
8520 *total
= COSTS_N_INSNS (3);
8525 *total
= COSTS_N_INSNS (4);
8528 *total
= COSTS_N_INSNS (6);
8531 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8534 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8535 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8543 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8550 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8552 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8553 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8558 val
= INTVAL (XEXP (x
, 1));
8560 *total
= COSTS_N_INSNS (4);
8562 *total
= COSTS_N_INSNS (2);
8563 else if (val
>= 0 && val
<= 7)
8564 *total
= COSTS_N_INSNS (val
);
8566 *total
= COSTS_N_INSNS (1);
8571 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8573 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8574 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8578 switch (INTVAL (XEXP (x
, 1)))
8584 *total
= COSTS_N_INSNS (2);
8587 *total
= COSTS_N_INSNS (3);
8593 *total
= COSTS_N_INSNS (4);
8597 *total
= COSTS_N_INSNS (5);
8600 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8603 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8607 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
8610 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8611 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8617 if (!CONST_INT_P (XEXP (x
, 1)))
8619 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8622 switch (INTVAL (XEXP (x
, 1)))
8628 *total
= COSTS_N_INSNS (3);
8632 *total
= COSTS_N_INSNS (5);
8635 *total
= COSTS_N_INSNS (4);
8638 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8644 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8646 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8647 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8651 switch (INTVAL (XEXP (x
, 1)))
8657 *total
= COSTS_N_INSNS (4);
8662 *total
= COSTS_N_INSNS (6);
8665 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8668 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
8671 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8672 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8680 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8687 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8689 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
8690 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8695 val
= INTVAL (XEXP (x
, 1));
8697 *total
= COSTS_N_INSNS (3);
8698 else if (val
>= 0 && val
<= 7)
8699 *total
= COSTS_N_INSNS (val
);
8701 *total
= COSTS_N_INSNS (1);
8706 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8708 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8709 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8713 switch (INTVAL (XEXP (x
, 1)))
8720 *total
= COSTS_N_INSNS (2);
8723 *total
= COSTS_N_INSNS (3);
8728 *total
= COSTS_N_INSNS (4);
8732 *total
= COSTS_N_INSNS (5);
8738 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
8741 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
8745 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
8748 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
8749 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8755 if (!CONST_INT_P (XEXP (x
, 1)))
8757 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
8760 switch (INTVAL (XEXP (x
, 1)))
8768 *total
= COSTS_N_INSNS (3);
8771 *total
= COSTS_N_INSNS (5);
8774 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
8780 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8782 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8783 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8787 switch (INTVAL (XEXP (x
, 1)))
8793 *total
= COSTS_N_INSNS (4);
8796 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
8801 *total
= COSTS_N_INSNS (4);
8804 *total
= COSTS_N_INSNS (6);
8807 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
8808 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8816 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8820 switch (GET_MODE (XEXP (x
, 0)))
8823 *total
= COSTS_N_INSNS (1);
8824 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8825 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8829 *total
= COSTS_N_INSNS (2);
8830 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8831 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8832 else if (INTVAL (XEXP (x
, 1)) != 0)
8833 *total
+= COSTS_N_INSNS (1);
8837 *total
= COSTS_N_INSNS (3);
8838 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
8839 *total
+= COSTS_N_INSNS (2);
8843 *total
= COSTS_N_INSNS (4);
8844 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8845 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8846 else if (INTVAL (XEXP (x
, 1)) != 0)
8847 *total
+= COSTS_N_INSNS (3);
8853 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8858 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
8859 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8860 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8862 if (QImode
== mode
|| HImode
== mode
)
8864 *total
= COSTS_N_INSNS (2);
8877 /* Implement `TARGET_RTX_COSTS'. */
8880 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
8881 int opno
, int *total
, bool speed
)
8883 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
8884 opno
, total
, speed
);
8886 if (avr_log
.rtx_costs
)
8888 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8889 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
8896 /* Implement `TARGET_ADDRESS_COST'. */
8899 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
8903 if (GET_CODE (x
) == PLUS
8904 && CONST_INT_P (XEXP (x
, 1))
8905 && (REG_P (XEXP (x
, 0))
8906 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
8908 if (INTVAL (XEXP (x
, 1)) >= 61)
8911 else if (CONSTANT_ADDRESS_P (x
))
8914 && io_address_operand (x
, QImode
))
8918 if (avr_log
.address_cost
)
8919 avr_edump ("\n%?: %d = %r\n", cost
, x
);
8924 /* Test for extra memory constraint 'Q'.
8925 It's a memory address based on Y or Z pointer with valid displacement. */
8928 extra_constraint_Q (rtx x
)
8932 if (GET_CODE (XEXP (x
,0)) == PLUS
8933 && REG_P (XEXP (XEXP (x
,0), 0))
8934 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
8935 && (INTVAL (XEXP (XEXP (x
,0), 1))
8936 <= MAX_LD_OFFSET (GET_MODE (x
))))
8938 rtx xx
= XEXP (XEXP (x
,0), 0);
8939 int regno
= REGNO (xx
);
8941 ok
= (/* allocate pseudos */
8942 regno
>= FIRST_PSEUDO_REGISTER
8943 /* strictly check */
8944 || regno
== REG_Z
|| regno
== REG_Y
8945 /* XXX frame & arg pointer checks */
8946 || xx
== frame_pointer_rtx
8947 || xx
== arg_pointer_rtx
);
8949 if (avr_log
.constraints
)
8950 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8951 ok
, reload_completed
, reload_in_progress
, x
);
8957 /* Convert condition code CONDITION to the valid AVR condition code. */
8960 avr_normalize_condition (RTX_CODE condition
)
8977 /* Helper function for `avr_reorg'. */
8980 avr_compare_pattern (rtx insn
)
8982 rtx pattern
= single_set (insn
);
8985 && NONJUMP_INSN_P (insn
)
8986 && SET_DEST (pattern
) == cc0_rtx
8987 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
8989 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
8990 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
8992 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
8993 They must not be swapped, thus skip them. */
8995 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
8996 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9003 /* Helper function for `avr_reorg'. */
9005 /* Expansion of switch/case decision trees leads to code like
9007 cc0 = compare (Reg, Num)
9011 cc0 = compare (Reg, Num)
9015 The second comparison is superfluous and can be deleted.
9016 The second jump condition can be transformed from a
9017 "difficult" one to a "simple" one because "cc0 > 0" and
9018 "cc0 >= 0" will have the same effect here.
9020 This function relies on the way switch/case is being expaned
9021 as binary decision tree. For example code see PR 49903.
9023 Return TRUE if optimization performed.
9024 Return FALSE if nothing changed.
9026 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9028 We don't want to do this in text peephole because it is
9029 tedious to work out jump offsets there and the second comparison
9030 might have been transormed by `avr_reorg'.
9032 RTL peephole won't do because peephole2 does not scan across
9036 avr_reorg_remove_redundant_compare (rtx insn1
)
9038 rtx comp1
, ifelse1
, xcond1
, branch1
;
9039 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9041 rtx jump
, target
, cond
;
9043 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9045 branch1
= next_nonnote_nondebug_insn (insn1
);
9046 if (!branch1
|| !JUMP_P (branch1
))
9049 insn2
= next_nonnote_nondebug_insn (branch1
);
9050 if (!insn2
|| !avr_compare_pattern (insn2
))
9053 branch2
= next_nonnote_nondebug_insn (insn2
);
9054 if (!branch2
|| !JUMP_P (branch2
))
9057 comp1
= avr_compare_pattern (insn1
);
9058 comp2
= avr_compare_pattern (insn2
);
9059 xcond1
= single_set (branch1
);
9060 xcond2
= single_set (branch2
);
9062 if (!comp1
|| !comp2
9063 || !rtx_equal_p (comp1
, comp2
)
9064 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9065 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9066 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9067 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9072 comp1
= SET_SRC (comp1
);
9073 ifelse1
= SET_SRC (xcond1
);
9074 ifelse2
= SET_SRC (xcond2
);
9076 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9078 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9079 || !REG_P (XEXP (comp1
, 0))
9080 || !CONST_INT_P (XEXP (comp1
, 1))
9081 || XEXP (ifelse1
, 2) != pc_rtx
9082 || XEXP (ifelse2
, 2) != pc_rtx
9083 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9084 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9085 || !COMPARISON_P (XEXP (ifelse2
, 0))
9086 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9087 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9088 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9089 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9094 /* We filtered the insn sequence to look like
9100 (if_then_else (eq (cc0)
9109 (if_then_else (CODE (cc0)
9115 code
= GET_CODE (XEXP (ifelse2
, 0));
9117 /* Map GT/GTU to GE/GEU which is easier for AVR.
9118 The first two instructions compare/branch on EQ
9119 so we may replace the difficult
9121 if (x == VAL) goto L1;
9122 if (x > VAL) goto L2;
9126 if (x == VAL) goto L1;
9127 if (x >= VAL) goto L2;
9129 Similarly, replace LE/LEU by LT/LTU. */
9140 code
= avr_normalize_condition (code
);
9147 /* Wrap the branches into UNSPECs so they won't be changed or
9148 optimized in the remainder. */
9150 target
= XEXP (XEXP (ifelse1
, 1), 0);
9151 cond
= XEXP (ifelse1
, 0);
9152 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9154 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9156 target
= XEXP (XEXP (ifelse2
, 1), 0);
9157 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9158 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9160 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9162 /* The comparisons in insn1 and insn2 are exactly the same;
9163 insn2 is superfluous so delete it. */
9165 delete_insn (insn2
);
9166 delete_insn (branch1
);
9167 delete_insn (branch2
);
9173 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9174 /* Optimize conditional jumps. */
9179 rtx insn
= get_insns();
9181 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9183 rtx pattern
= avr_compare_pattern (insn
);
9189 && avr_reorg_remove_redundant_compare (insn
))
9194 if (compare_diff_p (insn
))
9196 /* Now we work under compare insn with difficult branch. */
9198 rtx next
= next_real_insn (insn
);
9199 rtx pat
= PATTERN (next
);
9201 pattern
= SET_SRC (pattern
);
9203 if (true_regnum (XEXP (pattern
, 0)) >= 0
9204 && true_regnum (XEXP (pattern
, 1)) >= 0)
9206 rtx x
= XEXP (pattern
, 0);
9207 rtx src
= SET_SRC (pat
);
9208 rtx t
= XEXP (src
,0);
9209 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9210 XEXP (pattern
, 0) = XEXP (pattern
, 1);
9211 XEXP (pattern
, 1) = x
;
9212 INSN_CODE (next
) = -1;
9214 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9215 && XEXP (pattern
, 1) == const0_rtx
)
9217 /* This is a tst insn, we can reverse it. */
9218 rtx src
= SET_SRC (pat
);
9219 rtx t
= XEXP (src
,0);
9221 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
9222 XEXP (pattern
, 1) = XEXP (pattern
, 0);
9223 XEXP (pattern
, 0) = const0_rtx
;
9224 INSN_CODE (next
) = -1;
9225 INSN_CODE (insn
) = -1;
9227 else if (true_regnum (XEXP (pattern
, 0)) >= 0
9228 && CONST_INT_P (XEXP (pattern
, 1)))
9230 rtx x
= XEXP (pattern
, 1);
9231 rtx src
= SET_SRC (pat
);
9232 rtx t
= XEXP (src
,0);
9233 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
9235 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
9237 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
9238 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
9239 INSN_CODE (next
) = -1;
9240 INSN_CODE (insn
) = -1;
9247 /* Returns register number for function return value.*/
9249 static inline unsigned int
9250 avr_ret_register (void)
9255 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
9258 avr_function_value_regno_p (const unsigned int regno
)
9260 return (regno
== avr_ret_register ());
9263 /* Create an RTX representing the place where a
9264 library function returns a value of mode MODE. */
9267 avr_libcall_value (enum machine_mode mode
,
9268 const_rtx func ATTRIBUTE_UNUSED
)
9270 int offs
= GET_MODE_SIZE (mode
);
9273 offs
= (offs
+ 1) & ~1;
9275 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
9278 /* Create an RTX representing the place where a
9279 function returns a value of data type VALTYPE. */
9282 avr_function_value (const_tree type
,
9283 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
9284 bool outgoing ATTRIBUTE_UNUSED
)
9288 if (TYPE_MODE (type
) != BLKmode
)
9289 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
9291 offs
= int_size_in_bytes (type
);
9294 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
9295 offs
= GET_MODE_SIZE (SImode
);
9296 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
9297 offs
= GET_MODE_SIZE (DImode
);
9299 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
9303 test_hard_reg_class (enum reg_class rclass
, rtx x
)
9305 int regno
= true_regnum (x
);
9309 if (TEST_HARD_REG_CLASS (rclass
, regno
))
9316 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9317 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9320 avr_2word_insn_p (rtx insn
)
9322 if (avr_current_device
->errata_skip
9324 || 2 != get_attr_length (insn
))
9329 switch (INSN_CODE (insn
))
9334 case CODE_FOR_movqi_insn
:
9335 case CODE_FOR_movuqq_insn
:
9336 case CODE_FOR_movqq_insn
:
9338 rtx set
= single_set (insn
);
9339 rtx src
= SET_SRC (set
);
9340 rtx dest
= SET_DEST (set
);
9342 /* Factor out LDS and STS from movqi_insn. */
9345 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
9347 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
9349 else if (REG_P (dest
)
9352 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
9358 case CODE_FOR_call_insn
:
9359 case CODE_FOR_call_value_insn
:
9366 jump_over_one_insn_p (rtx insn
, rtx dest
)
9368 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
9371 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
9372 int dest_addr
= INSN_ADDRESSES (uid
);
9373 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
9375 return (jump_offset
== 1
9376 || (jump_offset
== 2
9377 && avr_2word_insn_p (next_active_insn (insn
))));
9380 /* Returns 1 if a value of mode MODE can be stored starting with hard
9381 register number REGNO. On the enhanced core, anything larger than
9382 1 byte must start in even numbered register for "movw" to work
9383 (this way we don't have to check for odd registers everywhere). */
9386 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
9388 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9389 Disallowing QI et al. in these regs might lead to code like
9390 (set (subreg:QI (reg:HI 28) n) ...)
9391 which will result in wrong code because reload does not
9392 handle SUBREGs of hard regsisters like this.
9393 This could be fixed in reload. However, it appears
9394 that fixing reload is not wanted by reload people. */
9396 /* Any GENERAL_REGS register can hold 8-bit values. */
9398 if (GET_MODE_SIZE (mode
) == 1)
9401 /* FIXME: Ideally, the following test is not needed.
9402 However, it turned out that it can reduce the number
9403 of spill fails. AVR and it's poor endowment with
9404 address registers is extreme stress test for reload. */
9406 if (GET_MODE_SIZE (mode
) >= 4
9410 /* All modes larger than 8 bits should start in an even register. */
9412 return !(regno
& 1);
9416 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9419 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
9421 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9422 represent valid hard registers like, e.g. HI:29. Returning TRUE
9423 for such registers can lead to performance degradation as mentioned
9424 in PR53595. Thus, report invalid hard registers as FALSE. */
9426 if (!avr_hard_regno_mode_ok (regno
, mode
))
9429 /* Return true if any of the following boundaries is crossed:
9430 17/18, 27/28 and 29/30. */
9432 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
9433 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
9434 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
9438 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
9441 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
9442 addr_space_t as
, RTX_CODE outer_code
,
9443 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9445 if (!ADDR_SPACE_GENERIC_P (as
))
9447 return POINTER_Z_REGS
;
9451 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
9453 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
9457 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9460 avr_regno_mode_code_ok_for_base_p (int regno
,
9461 enum machine_mode mode ATTRIBUTE_UNUSED
,
9462 addr_space_t as ATTRIBUTE_UNUSED
,
9463 RTX_CODE outer_code
,
9464 RTX_CODE index_code ATTRIBUTE_UNUSED
)
9468 if (!ADDR_SPACE_GENERIC_P (as
))
9470 if (regno
< FIRST_PSEUDO_REGISTER
9478 regno
= reg_renumber
[regno
];
9489 if (regno
< FIRST_PSEUDO_REGISTER
9493 || regno
== ARG_POINTER_REGNUM
))
9497 else if (reg_renumber
)
9499 regno
= reg_renumber
[regno
];
9504 || regno
== ARG_POINTER_REGNUM
)
9511 && PLUS
== outer_code
9521 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9522 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9523 CLOBBER_REG is a QI clobber register or NULL_RTX.
9524 LEN == NULL: output instructions.
9525 LEN != NULL: set *LEN to the length of the instruction sequence
9526 (in words) printed with LEN = NULL.
9527 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9528 If CLEAR_P is false, nothing is known about OP[0].
9530 The effect on cc0 is as follows:
9532 Load 0 to any register except ZERO_REG : NONE
9533 Load ld register with any value : NONE
9534 Anything else: : CLOBBER */
9537 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
9543 int clobber_val
= 1234;
9544 bool cooked_clobber_p
= false;
9546 enum machine_mode mode
= GET_MODE (dest
);
9547 int n
, n_bytes
= GET_MODE_SIZE (mode
);
9549 gcc_assert (REG_P (dest
)
9550 && CONSTANT_P (src
));
9555 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9556 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9558 if (REGNO (dest
) < 16
9559 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
9561 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
9564 /* We might need a clobber reg but don't have one. Look at the value to
9565 be loaded more closely. A clobber is only needed if it is a symbol
9566 or contains a byte that is neither 0, -1 or a power of 2. */
9568 if (NULL_RTX
== clobber_reg
9569 && !test_hard_reg_class (LD_REGS
, dest
)
9570 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
9571 || !avr_popcount_each_byte (src
, n_bytes
,
9572 (1 << 0) | (1 << 1) | (1 << 8))))
9574 /* We have no clobber register but need one. Cook one up.
9575 That's cheaper than loading from constant pool. */
9577 cooked_clobber_p
= true;
9578 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
9579 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
9582 /* Now start filling DEST from LSB to MSB. */
9584 for (n
= 0; n
< n_bytes
; n
++)
9587 bool done_byte
= false;
9591 /* Crop the n-th destination byte. */
9593 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
9594 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
9596 if (!CONST_INT_P (src
)
9597 && !CONST_FIXED_P (src
)
9598 && !CONST_DOUBLE_P (src
))
9600 static const char* const asm_code
[][2] =
9602 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
9603 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
9604 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
9605 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
9610 xop
[2] = clobber_reg
;
9612 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
9617 /* Crop the n-th source byte. */
9619 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
9620 ival
[n
] = INTVAL (xval
);
9622 /* Look if we can reuse the low word by means of MOVW. */
9628 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
9629 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
9631 if (INTVAL (lo16
) == INTVAL (hi16
))
9633 if (0 != INTVAL (lo16
)
9636 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
9643 /* Don't use CLR so that cc0 is set as expected. */
9648 avr_asm_len (ldreg_p
? "ldi %0,0"
9649 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
9650 : "mov %0,__zero_reg__",
9655 if (clobber_val
== ival
[n
]
9656 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
9661 /* LD_REGS can use LDI to move a constant value */
9667 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
9671 /* Try to reuse value already loaded in some lower byte. */
9673 for (j
= 0; j
< n
; j
++)
9674 if (ival
[j
] == ival
[n
])
9679 avr_asm_len ("mov %0,%1", xop
, len
, 1);
9687 /* Need no clobber reg for -1: Use CLR/DEC */
9692 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9694 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
9697 else if (1 == ival
[n
])
9700 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
9702 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
9706 /* Use T flag or INC to manage powers of 2 if we have
9709 if (NULL_RTX
== clobber_reg
9710 && single_one_operand (xval
, QImode
))
9713 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
9715 gcc_assert (constm1_rtx
!= xop
[1]);
9720 avr_asm_len ("set", xop
, len
, 1);
9724 avr_asm_len ("clr %0", xop
, len
, 1);
9726 avr_asm_len ("bld %0,%1", xop
, len
, 1);
9730 /* We actually need the LD_REGS clobber reg. */
9732 gcc_assert (NULL_RTX
!= clobber_reg
);
9736 xop
[2] = clobber_reg
;
9737 clobber_val
= ival
[n
];
9739 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9740 "mov %0,%2", xop
, len
, 2);
9743 /* If we cooked up a clobber reg above, restore it. */
9745 if (cooked_clobber_p
)
9747 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
9752 /* Reload the constant OP[1] into the HI register OP[0].
9753 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9754 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9755 need a clobber reg or have to cook one up.
9757 PLEN == NULL: Output instructions.
9758 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9759 by the insns printed.
9764 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
9766 output_reload_in_const (op
, clobber_reg
, plen
, false);
9771 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9772 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9773 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9774 need a clobber reg or have to cook one up.
9776 LEN == NULL: Output instructions.
9778 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9779 by the insns printed.
9784 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
9787 && !test_hard_reg_class (LD_REGS
, op
[0])
9788 && (CONST_INT_P (op
[1])
9789 || CONST_FIXED_P (op
[1])
9790 || CONST_DOUBLE_P (op
[1])))
9792 int len_clr
, len_noclr
;
9794 /* In some cases it is better to clear the destination beforehand, e.g.
9796 CLR R2 CLR R3 MOVW R4,R2 INC R2
9800 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9802 We find it too tedious to work that out in the print function.
9803 Instead, we call the print function twice to get the lengths of
9804 both methods and use the shortest one. */
9806 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
9807 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
9809 if (len_noclr
- len_clr
== 4)
9811 /* Default needs 4 CLR instructions: clear register beforehand. */
9813 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9814 "mov %B0,__zero_reg__" CR_TAB
9815 "movw %C0,%A0", &op
[0], len
, 3);
9817 output_reload_in_const (op
, clobber_reg
, len
, true);
9826 /* Default: destination not pre-cleared. */
9828 output_reload_in_const (op
, clobber_reg
, len
, false);
9833 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
9835 output_reload_in_const (op
, clobber_reg
, len
, false);
9841 avr_output_addr_vec_elt (FILE *stream
, int value
)
9843 if (AVR_HAVE_JMP_CALL
)
9844 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
9846 fprintf (stream
, "\trjmp .L%d\n", value
);
9849 /* Returns true if SCRATCH are safe to be allocated as a scratch
9850 registers (for a define_peephole2) in the current function. */
9853 avr_hard_regno_scratch_ok (unsigned int regno
)
9855 /* Interrupt functions can only use registers that have already been saved
9856 by the prologue, even if they would normally be call-clobbered. */
9858 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9859 && !df_regs_ever_live_p (regno
))
9862 /* Don't allow hard registers that might be part of the frame pointer.
9863 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9864 and don't care for a frame pointer that spans more than one register. */
9866 if ((!reload_completed
|| frame_pointer_needed
)
9867 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
9875 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9878 avr_hard_regno_rename_ok (unsigned int old_reg
,
9879 unsigned int new_reg
)
9881 /* Interrupt functions can only use registers that have already been
9882 saved by the prologue, even if they would normally be
9885 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
9886 && !df_regs_ever_live_p (new_reg
))
9889 /* Don't allow hard registers that might be part of the frame pointer.
9890 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9891 and don't care for a frame pointer that spans more than one register. */
9893 if ((!reload_completed
|| frame_pointer_needed
)
9894 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
9895 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
9903 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9904 or memory location in the I/O space (QImode only).
9906 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9907 Operand 1: register operand to test, or CONST_INT memory address.
9908 Operand 2: bit number.
9909 Operand 3: label to jump to if the test is true. */
9912 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
9914 enum rtx_code comp
= GET_CODE (operands
[0]);
9915 bool long_jump
= get_attr_length (insn
) >= 4;
9916 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
9920 else if (comp
== LT
)
9924 comp
= reverse_condition (comp
);
9926 switch (GET_CODE (operands
[1]))
9933 if (low_io_address_operand (operands
[1], QImode
))
9936 output_asm_insn ("sbis %i1,%2", operands
);
9938 output_asm_insn ("sbic %i1,%2", operands
);
9942 output_asm_insn ("in __tmp_reg__,%i1", operands
);
9944 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
9946 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
9949 break; /* CONST_INT */
9954 output_asm_insn ("sbrs %T1%T2", operands
);
9956 output_asm_insn ("sbrc %T1%T2", operands
);
9962 return ("rjmp .+4" CR_TAB
9971 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9974 avr_asm_out_ctor (rtx symbol
, int priority
)
9976 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
9977 default_ctor_section_asm_out_constructor (symbol
, priority
);
9980 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9983 avr_asm_out_dtor (rtx symbol
, int priority
)
9985 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
9986 default_dtor_section_asm_out_destructor (symbol
, priority
);
9989 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9992 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
9994 if (TYPE_MODE (type
) == BLKmode
)
9996 HOST_WIDE_INT size
= int_size_in_bytes (type
);
9997 return (size
== -1 || size
> 8);
10004 /* Implement `CASE_VALUES_THRESHOLD'. */
10005 /* Supply the default for --param case-values-threshold=0 */
10007 static unsigned int
10008 avr_case_values_threshold (void)
10010 /* The exact break-even point between a jump table and an if-else tree
10011 depends on several factors not available here like, e.g. if 8-bit
10012 comparisons can be used in the if-else tree or not, on the
10013 range of the case values, if the case value can be reused, on the
10014 register allocation, etc. '7' appears to be a good choice. */
10020 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10022 static enum machine_mode
10023 avr_addr_space_address_mode (addr_space_t as
)
10025 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10029 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10031 static enum machine_mode
10032 avr_addr_space_pointer_mode (addr_space_t as
)
10034 return avr_addr_space_address_mode (as
);
10038 /* Helper for following function. */
10041 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10048 return REGNO (reg
) == REG_Z
;
10051 /* Avoid combine to propagate hard regs. */
10053 if (can_create_pseudo_p()
10054 && REGNO (reg
) < REG_Z
)
10063 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10066 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10067 bool strict
, addr_space_t as
)
10076 case ADDR_SPACE_GENERIC
:
10077 return avr_legitimate_address_p (mode
, x
, strict
);
10079 case ADDR_SPACE_FLASH
:
10080 case ADDR_SPACE_FLASH1
:
10081 case ADDR_SPACE_FLASH2
:
10082 case ADDR_SPACE_FLASH3
:
10083 case ADDR_SPACE_FLASH4
:
10084 case ADDR_SPACE_FLASH5
:
10086 switch (GET_CODE (x
))
10089 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10093 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10102 case ADDR_SPACE_MEMX
:
10105 && can_create_pseudo_p());
10107 if (LO_SUM
== GET_CODE (x
))
10109 rtx hi
= XEXP (x
, 0);
10110 rtx lo
= XEXP (x
, 1);
10113 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10115 && REGNO (lo
) == REG_Z
);
10121 if (avr_log
.legitimate_address_p
)
10123 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10124 "reload_completed=%d reload_in_progress=%d %s:",
10125 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10126 reg_renumber
? "(reg_renumber)" : "");
10128 if (GET_CODE (x
) == PLUS
10129 && REG_P (XEXP (x
, 0))
10130 && CONST_INT_P (XEXP (x
, 1))
10131 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10134 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10135 true_regnum (XEXP (x
, 0)));
10138 avr_edump ("\n%r\n", x
);
10145 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10148 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10149 enum machine_mode mode
, addr_space_t as
)
10151 if (ADDR_SPACE_GENERIC_P (as
))
10152 return avr_legitimize_address (x
, old_x
, mode
);
10154 if (avr_log
.legitimize_address
)
10156 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10163 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10166 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10168 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10169 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10171 if (avr_log
.progmem
)
10172 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10173 src
, type_from
, type_to
);
10175 /* Up-casting from 16-bit to 24-bit pointer. */
10177 if (as_from
!= ADDR_SPACE_MEMX
10178 && as_to
== ADDR_SPACE_MEMX
)
10182 rtx reg
= gen_reg_rtx (PSImode
);
10184 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10185 sym
= XEXP (sym
, 0);
10187 /* Look at symbol flags: avr_encode_section_info set the flags
10188 also if attribute progmem was seen so that we get the right
10189 promotion for, e.g. PSTR-like strings that reside in generic space
10190 but are located in flash. In that case we patch the incoming
10193 if (SYMBOL_REF
== GET_CODE (sym
)
10194 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
10196 as_from
= ADDR_SPACE_FLASH
;
10199 /* Linearize memory: RAM has bit 23 set. */
10201 msb
= ADDR_SPACE_GENERIC_P (as_from
)
10203 : avr_addrspace
[as_from
].segment
;
10205 src
= force_reg (Pmode
, src
);
10207 emit_insn (msb
== 0
10208 ? gen_zero_extendhipsi2 (reg
, src
)
10209 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
10214 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
10216 if (as_from
== ADDR_SPACE_MEMX
10217 && as_to
!= ADDR_SPACE_MEMX
)
10219 rtx new_src
= gen_reg_rtx (Pmode
);
10221 src
= force_reg (PSImode
, src
);
10223 emit_move_insn (new_src
,
10224 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
10232 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10235 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
10236 addr_space_t superset ATTRIBUTE_UNUSED
)
10238 /* Allow any kind of pointer mess. */
10244 /* Worker function for movmemhi expander.
10245 XOP[0] Destination as MEM:BLK
10247 XOP[2] # Bytes to copy
10249 Return TRUE if the expansion is accomplished.
10250 Return FALSE if the operand compination is not supported. */
10253 avr_emit_movmemhi (rtx
*xop
)
10255 HOST_WIDE_INT count
;
10256 enum machine_mode loop_mode
;
10257 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
10258 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
10259 rtx a_hi8
= NULL_RTX
;
10261 if (avr_mem_flash_p (xop
[0]))
10264 if (!CONST_INT_P (xop
[2]))
10267 count
= INTVAL (xop
[2]);
10271 a_src
= XEXP (xop
[1], 0);
10272 a_dest
= XEXP (xop
[0], 0);
10274 if (PSImode
== GET_MODE (a_src
))
10276 gcc_assert (as
== ADDR_SPACE_MEMX
);
10278 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
10279 loop_reg
= gen_rtx_REG (loop_mode
, 24);
10280 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
10282 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
10283 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
10287 int segment
= avr_addrspace
[as
].segment
;
10290 && avr_current_device
->n_flash
> 1)
10292 a_hi8
= GEN_INT (segment
);
10293 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
10295 else if (!ADDR_SPACE_GENERIC_P (as
))
10297 as
= ADDR_SPACE_FLASH
;
10302 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
10303 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
10306 xas
= GEN_INT (as
);
10308 /* FIXME: Register allocator might come up with spill fails if it is left
10309 on its own. Thus, we allocate the pointer registers by hand:
10311 X = destination address */
10313 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
10314 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
10316 /* FIXME: Register allocator does a bad job and might spill address
10317 register(s) inside the loop leading to additional move instruction
10318 to/from stack which could clobber tmp_reg. Thus, do *not* emit
10319 load and store as separate insns. Instead, we perform the copy
10320 by means of one monolithic insn. */
10322 gcc_assert (TMP_REGNO
== LPM_REGNO
);
10324 if (as
!= ADDR_SPACE_MEMX
)
10326 /* Load instruction ([E]LPM or LD) is known at compile time:
10327 Do the copy-loop inline. */
10329 rtx (*fun
) (rtx
, rtx
, rtx
)
10330 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
10332 insn
= fun (xas
, loop_reg
, loop_reg
);
10336 rtx (*fun
) (rtx
, rtx
)
10337 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
10339 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
10341 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
10344 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
10351 /* Print assembler for movmem_qi, movmem_hi insns...
10353 $1, $2 : Loop register
10355 X : Destination address
10359 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
10361 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
10362 enum machine_mode loop_mode
= GET_MODE (op
[1]);
10363 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
10371 xop
[2] = tmp_reg_rtx
;
10375 avr_asm_len ("0:", xop
, plen
, 0);
10377 /* Load with post-increment */
10384 case ADDR_SPACE_GENERIC
:
10386 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
10389 case ADDR_SPACE_FLASH
:
10392 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
10394 avr_asm_len ("lpm" CR_TAB
10395 "adiw r30,1", xop
, plen
, 2);
10398 case ADDR_SPACE_FLASH1
:
10399 case ADDR_SPACE_FLASH2
:
10400 case ADDR_SPACE_FLASH3
:
10401 case ADDR_SPACE_FLASH4
:
10402 case ADDR_SPACE_FLASH5
:
10404 if (AVR_HAVE_ELPMX
)
10405 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
10407 avr_asm_len ("elpm" CR_TAB
10408 "adiw r30,1", xop
, plen
, 2);
10412 /* Store with post-increment */
10414 avr_asm_len ("st X+,%2", xop
, plen
, 1);
10416 /* Decrement loop-counter and set Z-flag */
10418 if (QImode
== loop_mode
)
10420 avr_asm_len ("dec %1", xop
, plen
, 1);
10424 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
10428 avr_asm_len ("subi %A1,1" CR_TAB
10429 "sbci %B1,0", xop
, plen
, 2);
10432 /* Loop until zero */
10434 return avr_asm_len ("brne 0b", xop
, plen
, 1);
10439 /* Helper for __builtin_avr_delay_cycles */
10442 avr_mem_clobber (void)
10444 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
10445 MEM_VOLATILE_P (mem
) = 1;
10450 avr_expand_delay_cycles (rtx operands0
)
10452 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
10453 unsigned HOST_WIDE_INT cycles_used
;
10454 unsigned HOST_WIDE_INT loop_count
;
10456 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
10458 loop_count
= ((cycles
- 9) / 6) + 1;
10459 cycles_used
= ((loop_count
- 1) * 6) + 9;
10460 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
10461 avr_mem_clobber()));
10462 cycles
-= cycles_used
;
10465 if (IN_RANGE (cycles
, 262145, 83886081))
10467 loop_count
= ((cycles
- 7) / 5) + 1;
10468 if (loop_count
> 0xFFFFFF)
10469 loop_count
= 0xFFFFFF;
10470 cycles_used
= ((loop_count
- 1) * 5) + 7;
10471 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
10472 avr_mem_clobber()));
10473 cycles
-= cycles_used
;
10476 if (IN_RANGE (cycles
, 768, 262144))
10478 loop_count
= ((cycles
- 5) / 4) + 1;
10479 if (loop_count
> 0xFFFF)
10480 loop_count
= 0xFFFF;
10481 cycles_used
= ((loop_count
- 1) * 4) + 5;
10482 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
10483 avr_mem_clobber()));
10484 cycles
-= cycles_used
;
10487 if (IN_RANGE (cycles
, 6, 767))
10489 loop_count
= cycles
/ 3;
10490 if (loop_count
> 255)
10492 cycles_used
= loop_count
* 3;
10493 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
10494 avr_mem_clobber()));
10495 cycles
-= cycles_used
;
10498 while (cycles
>= 2)
10500 emit_insn (gen_nopv (GEN_INT(2)));
10506 emit_insn (gen_nopv (GEN_INT(1)));
10512 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10515 avr_double_int_push_digit (double_int val
, int base
,
10516 unsigned HOST_WIDE_INT digit
)
10519 ? double_int_lshift (val
, 32, 64, false)
10520 : double_int_mul (val
, uhwi_to_double_int (base
));
10522 return double_int_add (val
, uhwi_to_double_int (digit
));
10526 /* Compute the image of x under f, i.e. perform x --> f(x) */
10529 avr_map (double_int f
, int x
)
10531 return 0xf & double_int_to_uhwi (double_int_rshift (f
, 4*x
, 64, false));
10535 /* Return some metrics of map A. */
10539 /* Number of fixed points in { 0 ... 7 } */
10542 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10545 /* Mask representing the fixed points in { 0 ... 7 } */
10546 MAP_MASK_FIXED_0_7
,
10548 /* Size of the preimage of { 0 ... 7 } */
10551 /* Mask that represents the preimage of { f } */
10552 MAP_MASK_PREIMAGE_F
10556 avr_map_metric (double_int a
, int mode
)
10558 unsigned i
, metric
= 0;
10560 for (i
= 0; i
< 8; i
++)
10562 unsigned ai
= avr_map (a
, i
);
10564 if (mode
== MAP_FIXED_0_7
)
10566 else if (mode
== MAP_NONFIXED_0_7
)
10567 metric
+= ai
< 8 && ai
!= i
;
10568 else if (mode
== MAP_MASK_FIXED_0_7
)
10569 metric
|= ((unsigned) (ai
== i
)) << i
;
10570 else if (mode
== MAP_PREIMAGE_0_7
)
10572 else if (mode
== MAP_MASK_PREIMAGE_F
)
10573 metric
|= ((unsigned) (ai
== 0xf)) << i
;
10582 /* Return true if IVAL has a 0xf in its hexadecimal representation
10583 and false, otherwise. Only nibbles 0..7 are taken into account.
10584 Used as constraint helper for C0f and Cxf. */
10587 avr_has_nibble_0xf (rtx ival
)
10589 return 0 != avr_map_metric (rtx_to_double_int (ival
), MAP_MASK_PREIMAGE_F
);
10593 /* We have a set of bits that are mapped by a function F.
10594 Try to decompose F by means of a second function G so that
10600 cost (F o G^-1) + cost (G) < cost (F)
10602 Example: Suppose builtin insert_bits supplies us with the map
10603 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10604 nibble of the result, we can just as well rotate the bits before inserting
10605 them and use the map 0x7654ffff which is cheaper than the original map.
10606 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10610 /* tree code of binary function G */
10611 enum tree_code code
;
10613 /* The constant second argument of G */
10616 /* G^-1, the inverse of G (*, arg) */
10619 /* The cost of appplying G (*, arg) */
10622 /* The composition F o G^-1 (*, arg) for some function F */
10625 /* For debug purpose only */
10629 static const avr_map_op_t avr_map_op
[] =
10631 { LROTATE_EXPR
, 0, 0x76543210, 0, { 0, 0 }, "id" },
10632 { LROTATE_EXPR
, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10633 { LROTATE_EXPR
, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10634 { LROTATE_EXPR
, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10635 { LROTATE_EXPR
, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10636 { LROTATE_EXPR
, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10637 { LROTATE_EXPR
, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10638 { LROTATE_EXPR
, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10639 { RSHIFT_EXPR
, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10640 { RSHIFT_EXPR
, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10641 { RSHIFT_EXPR
, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10642 { RSHIFT_EXPR
, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10643 { RSHIFT_EXPR
, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10644 { LSHIFT_EXPR
, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10645 { LSHIFT_EXPR
, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10649 /* Try to decompose F as F = (F o G^-1) o G as described above.
10650 The result is a struct representing F o G^-1 and G.
10651 If result.cost < 0 then such a decomposition does not exist. */
10653 static avr_map_op_t
10654 avr_map_decompose (double_int f
, const avr_map_op_t
*g
, bool val_const_p
)
10657 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
10658 avr_map_op_t f_ginv
= *g
;
10659 double_int ginv
= uhwi_to_double_int (g
->ginv
);
10663 /* Step 1: Computing F o G^-1 */
10665 for (i
= 7; i
>= 0; i
--)
10667 int x
= avr_map (f
, i
);
10671 x
= avr_map (ginv
, x
);
10673 /* The bit is no element of the image of G: no avail (cost = -1) */
10679 f_ginv
.map
= avr_double_int_push_digit (f_ginv
.map
, 16, x
);
10682 /* Step 2: Compute the cost of the operations.
10683 The overall cost of doing an operation prior to the insertion is
10684 the cost of the insertion plus the cost of the operation. */
10686 /* Step 2a: Compute cost of F o G^-1 */
10688 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
10690 /* The mapping consists only of fixed points and can be folded
10691 to AND/OR logic in the remainder. Reasonable cost is 3. */
10693 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
10699 /* Get the cost of the insn by calling the output worker with some
10700 fake values. Mimic effect of reloading xop[3]: Unused operands
10701 are mapped to 0 and used operands are reloaded to xop[0]. */
10703 xop
[0] = all_regs_rtx
[24];
10704 xop
[1] = gen_int_mode (double_int_to_uhwi (f_ginv
.map
), SImode
);
10705 xop
[2] = all_regs_rtx
[25];
10706 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
10708 avr_out_insert_bits (xop
, &f_ginv
.cost
);
10710 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
10713 /* Step 2b: Add cost of G */
10715 f_ginv
.cost
+= g
->cost
;
10717 if (avr_log
.builtin
)
10718 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
10724 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10725 XOP[0] and XOP[1] don't overlap.
10726 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10727 If FIXP_P = false: Just move the bit if its position in the destination
10728 is different to its source position. */
10731 avr_move_bits (rtx
*xop
, double_int map
, bool fixp_p
, int *plen
)
10735 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10736 int t_bit_src
= -1;
10738 /* We order the operations according to the requested source bit b. */
10740 for (b
= 0; b
< 8; b
++)
10741 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
10743 int bit_src
= avr_map (map
, bit_dest
);
10747 /* Same position: No need to copy as requested by FIXP_P. */
10748 || (bit_dest
== bit_src
&& !fixp_p
))
10751 if (t_bit_src
!= bit_src
)
10753 /* Source bit is not yet in T: Store it to T. */
10755 t_bit_src
= bit_src
;
10757 xop
[3] = GEN_INT (bit_src
);
10758 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
10761 /* Load destination bit with T. */
10763 xop
[3] = GEN_INT (bit_dest
);
10764 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
10769 /* PLEN == 0: Print assembler code for `insert_bits'.
10770 PLEN != 0: Compute code length in bytes.
10773 OP[1]: The mapping composed of nibbles. If nibble no. N is
10774 0: Bit N of result is copied from bit OP[2].0
10776 7: Bit N of result is copied from bit OP[2].7
10777 0xf: Bit N of result is copied from bit OP[3].N
10778 OP[2]: Bits to be inserted
10779 OP[3]: Target value */
10782 avr_out_insert_bits (rtx
*op
, int *plen
)
10784 double_int map
= rtx_to_double_int (op
[1]);
10785 unsigned mask_fixed
;
10786 bool fixp_p
= true;
10793 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
10797 else if (flag_print_asm_name
)
10798 fprintf (asm_out_file
,
10799 ASM_COMMENT_START
"map = 0x%08" HOST_LONG_FORMAT
"x\n",
10800 double_int_to_uhwi (map
) & GET_MODE_MASK (SImode
));
10802 /* If MAP has fixed points it might be better to initialize the result
10803 with the bits to be inserted instead of moving all bits by hand. */
10805 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
10807 if (REGNO (xop
[0]) == REGNO (xop
[1]))
10809 /* Avoid early-clobber conflicts */
10811 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
10812 xop
[1] = tmp_reg_rtx
;
10816 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
10818 /* XOP[2] is used and reloaded to XOP[0] already */
10820 int n_fix
= 0, n_nofix
= 0;
10822 gcc_assert (REG_P (xop
[2]));
10824 /* Get the code size of the bit insertions; once with all bits
10825 moved and once with fixed points omitted. */
10827 avr_move_bits (xop
, map
, true, &n_fix
);
10828 avr_move_bits (xop
, map
, false, &n_nofix
);
10830 if (fixp_p
&& n_fix
- n_nofix
> 3)
10832 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
10834 avr_asm_len ("eor %0,%1" CR_TAB
10835 "andi %0,%3" CR_TAB
10836 "eor %0,%1", xop
, plen
, 3);
10842 /* XOP[2] is unused */
10844 if (fixp_p
&& mask_fixed
)
10846 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
10851 /* Move/insert remaining bits. */
10853 avr_move_bits (xop
, map
, fixp_p
, plen
);
10859 /* IDs for all the AVR builtins. */
10861 enum avr_builtin_id
10864 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10865 #include "builtins.def"
10871 struct GTY(()) avr_builtin_description
10873 enum insn_code icode
;
10880 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
10881 that a built-in's ID can be used to access the built-in by means of
10884 static GTY(()) struct avr_builtin_description
10885 avr_bdesc
[AVR_BUILTIN_COUNT
] =
10888 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10889 { (enum insn_code) ICODE, NAME, N_ARGS, NULL_TREE },
10890 #include "builtins.def"
10895 /* Implement `TARGET_BUILTIN_DECL'. */
10898 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
10900 if (id
< AVR_BUILTIN_COUNT
)
10901 return avr_bdesc
[id
].fndecl
;
10903 return error_mark_node
;
10908 avr_init_builtin_int24 (void)
10910 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
10911 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
10913 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
10914 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
10918 /* Implement `TARGET_INIT_BUILTINS' */
10919 /* Set up all builtin functions for this target. */
10922 avr_init_builtins (void)
10924 tree void_ftype_void
10925 = build_function_type_list (void_type_node
, NULL_TREE
);
10926 tree uchar_ftype_uchar
10927 = build_function_type_list (unsigned_char_type_node
,
10928 unsigned_char_type_node
,
10930 tree uint_ftype_uchar_uchar
10931 = build_function_type_list (unsigned_type_node
,
10932 unsigned_char_type_node
,
10933 unsigned_char_type_node
,
10935 tree int_ftype_char_char
10936 = build_function_type_list (integer_type_node
,
10940 tree int_ftype_char_uchar
10941 = build_function_type_list (integer_type_node
,
10943 unsigned_char_type_node
,
10945 tree void_ftype_ulong
10946 = build_function_type_list (void_type_node
,
10947 long_unsigned_type_node
,
10950 tree uchar_ftype_ulong_uchar_uchar
10951 = build_function_type_list (unsigned_char_type_node
,
10952 long_unsigned_type_node
,
10953 unsigned_char_type_node
,
10954 unsigned_char_type_node
,
10957 tree const_memx_void_node
10958 = build_qualified_type (void_type_node
,
10960 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
10962 tree const_memx_ptr_type_node
10963 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
10965 tree char_ftype_const_memx_ptr
10966 = build_function_type_list (char_type_node
,
10967 const_memx_ptr_type_node
,
10970 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10971 gcc_assert (ID < AVR_BUILTIN_COUNT); \
10972 avr_bdesc[ID].fndecl \
10973 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10974 #include "builtins.def"
10977 avr_init_builtin_int24 ();
10981 /* Subroutine of avr_expand_builtin to expand vanilla builtins
10982 with non-void result and 1 ... 3 arguments. */
10985 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
10988 int n
, n_args
= call_expr_nargs (exp
);
10989 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
10991 gcc_assert (n_args
>= 1 && n_args
<= 3);
10993 if (target
== NULL_RTX
10994 || GET_MODE (target
) != tmode
10995 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
10997 target
= gen_reg_rtx (tmode
);
11000 for (n
= 0; n
< n_args
; n
++)
11002 tree arg
= CALL_EXPR_ARG (exp
, n
);
11003 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11004 enum machine_mode opmode
= GET_MODE (op
);
11005 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11007 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11010 op
= gen_lowpart (HImode
, op
);
11013 /* In case the insn wants input operands in modes different from
11014 the result, abort. */
11016 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11018 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11019 op
= copy_to_mode_reg (mode
, op
);
11026 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
11027 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
11028 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
11034 if (pat
== NULL_RTX
)
11043 /* Implement `TARGET_EXPAND_BUILTIN'. */
11044 /* Expand an expression EXP that calls a built-in function,
11045 with result going to TARGET if that's convenient
11046 (and in mode MODE if that's convenient).
11047 SUBTARGET may be used as the target for computing one of EXP's operands.
11048 IGNORE is nonzero if the value is to be ignored. */
11051 avr_expand_builtin (tree exp
, rtx target
,
11052 rtx subtarget ATTRIBUTE_UNUSED
,
11053 enum machine_mode mode ATTRIBUTE_UNUSED
,
11054 int ignore ATTRIBUTE_UNUSED
)
11056 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
11057 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
11058 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
11059 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
11063 gcc_assert (id
< AVR_BUILTIN_COUNT
);
11067 case AVR_BUILTIN_NOP
:
11068 emit_insn (gen_nopv (GEN_INT(1)));
11071 case AVR_BUILTIN_DELAY_CYCLES
:
11073 arg0
= CALL_EXPR_ARG (exp
, 0);
11074 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11076 if (!CONST_INT_P (op0
))
11077 error ("%s expects a compile time integer constant", bname
);
11079 avr_expand_delay_cycles (op0
);
11084 case AVR_BUILTIN_INSERT_BITS
:
11086 arg0
= CALL_EXPR_ARG (exp
, 0);
11087 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11089 if (!CONST_INT_P (op0
))
11091 error ("%s expects a compile time long integer constant"
11092 " as first argument", bname
);
11098 /* No special treatment needed: vanilla expand. */
11100 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
11102 if (d
->n_args
== 0)
11104 emit_insn ((GEN_FCN (d
->icode
)) (target
));
11108 return avr_default_expand_builtin (d
->icode
, exp
, target
);
11112 /* Implement `TARGET_FOLD_BUILTIN'. */
11115 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
11116 bool ignore ATTRIBUTE_UNUSED
)
11118 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
11119 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
11129 case AVR_BUILTIN_SWAP
:
11131 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
11132 build_int_cst (val_type
, 4));
11135 case AVR_BUILTIN_INSERT_BITS
:
11137 tree tbits
= arg
[1];
11138 tree tval
= arg
[2];
11140 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
11142 bool changed
= false;
11144 avr_map_op_t best_g
;
11146 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
11148 /* No constant as first argument: Don't fold this and run into
11149 error in avr_expand_builtin. */
11154 map
= tree_to_double_int (arg
[0]);
11155 tmap
= double_int_to_tree (map_type
, map
);
11157 if (TREE_CODE (tval
) != INTEGER_CST
11158 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11160 /* There are no F in the map, i.e. 3rd operand is unused.
11161 Replace that argument with some constant to render
11162 respective input unused. */
11164 tval
= build_int_cst (val_type
, 0);
11168 if (TREE_CODE (tbits
) != INTEGER_CST
11169 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
11171 /* Similar for the bits to be inserted. If they are unused,
11172 we can just as well pass 0. */
11174 tbits
= build_int_cst (val_type
, 0);
11177 if (TREE_CODE (tbits
) == INTEGER_CST
)
11179 /* Inserting bits known at compile time is easy and can be
11180 performed by AND and OR with appropriate masks. */
11182 int bits
= TREE_INT_CST_LOW (tbits
);
11183 int mask_ior
= 0, mask_and
= 0xff;
11185 for (i
= 0; i
< 8; i
++)
11187 int mi
= avr_map (map
, i
);
11191 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
11192 else mask_and
&= ~(1 << i
);
11196 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
11197 build_int_cst (val_type
, mask_ior
));
11198 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
11199 build_int_cst (val_type
, mask_and
));
11203 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11205 /* If bits don't change their position we can use vanilla logic
11206 to merge the two arguments. */
11208 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
11210 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11211 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
11213 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
11214 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
11215 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
11218 /* Try to decomposing map to reduce overall cost. */
11220 if (avr_log
.builtin
)
11221 avr_edump ("\n%?: %X\n%?: ROL cost: ", map
);
11223 best_g
= avr_map_op
[0];
11224 best_g
.cost
= 1000;
11226 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
11229 = avr_map_decompose (map
, avr_map_op
+ i
,
11230 TREE_CODE (tval
) == INTEGER_CST
);
11232 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
11236 if (avr_log
.builtin
)
11239 if (best_g
.arg
== 0)
11240 /* No optimization found */
11243 /* Apply operation G to the 2nd argument. */
11245 if (avr_log
.builtin
)
11246 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11247 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
11249 /* Do right-shifts arithmetically: They copy the MSB instead of
11250 shifting in a non-usable value (0) as with logic right-shift. */
11252 tbits
= fold_convert (signed_char_type_node
, tbits
);
11253 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
11254 build_int_cst (val_type
, best_g
.arg
));
11255 tbits
= fold_convert (val_type
, tbits
);
11257 /* Use map o G^-1 instead of original map to undo the effect of G. */
11259 tmap
= double_int_to_tree (map_type
, best_g
.map
);
11261 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
11262 } /* AVR_BUILTIN_INSERT_BITS */
11270 /* Initialize the GCC target structure. */
11272 #undef TARGET_ASM_ALIGNED_HI_OP
11273 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11274 #undef TARGET_ASM_ALIGNED_SI_OP
11275 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
11276 #undef TARGET_ASM_UNALIGNED_HI_OP
11277 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
11278 #undef TARGET_ASM_UNALIGNED_SI_OP
11279 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
11280 #undef TARGET_ASM_INTEGER
11281 #define TARGET_ASM_INTEGER avr_assemble_integer
11282 #undef TARGET_ASM_FILE_START
11283 #define TARGET_ASM_FILE_START avr_file_start
11284 #undef TARGET_ASM_FILE_END
11285 #define TARGET_ASM_FILE_END avr_file_end
11287 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
11288 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
11289 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
11290 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
11292 #undef TARGET_FUNCTION_VALUE
11293 #define TARGET_FUNCTION_VALUE avr_function_value
11294 #undef TARGET_LIBCALL_VALUE
11295 #define TARGET_LIBCALL_VALUE avr_libcall_value
11296 #undef TARGET_FUNCTION_VALUE_REGNO_P
11297 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
11299 #undef TARGET_ATTRIBUTE_TABLE
11300 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
11301 #undef TARGET_INSERT_ATTRIBUTES
11302 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
11303 #undef TARGET_SECTION_TYPE_FLAGS
11304 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
11306 #undef TARGET_ASM_NAMED_SECTION
11307 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
11308 #undef TARGET_ASM_INIT_SECTIONS
11309 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
11310 #undef TARGET_ENCODE_SECTION_INFO
11311 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
11312 #undef TARGET_ASM_SELECT_SECTION
11313 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
11315 #undef TARGET_REGISTER_MOVE_COST
11316 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
11317 #undef TARGET_MEMORY_MOVE_COST
11318 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
11319 #undef TARGET_RTX_COSTS
11320 #define TARGET_RTX_COSTS avr_rtx_costs
11321 #undef TARGET_ADDRESS_COST
11322 #define TARGET_ADDRESS_COST avr_address_cost
11323 #undef TARGET_MACHINE_DEPENDENT_REORG
11324 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
11325 #undef TARGET_FUNCTION_ARG
11326 #define TARGET_FUNCTION_ARG avr_function_arg
11327 #undef TARGET_FUNCTION_ARG_ADVANCE
11328 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
11330 #undef TARGET_SET_CURRENT_FUNCTION
11331 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
11333 #undef TARGET_RETURN_IN_MEMORY
11334 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
11336 #undef TARGET_STRICT_ARGUMENT_NAMING
11337 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
11339 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
11340 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
11342 #undef TARGET_HARD_REGNO_SCRATCH_OK
11343 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
11344 #undef TARGET_CASE_VALUES_THRESHOLD
11345 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
11347 #undef TARGET_FRAME_POINTER_REQUIRED
11348 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
11349 #undef TARGET_CAN_ELIMINATE
11350 #define TARGET_CAN_ELIMINATE avr_can_eliminate
11352 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
11353 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
11355 #undef TARGET_WARN_FUNC_RETURN
11356 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
11358 #undef TARGET_CLASS_LIKELY_SPILLED_P
11359 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
11361 #undef TARGET_OPTION_OVERRIDE
11362 #define TARGET_OPTION_OVERRIDE avr_option_override
11364 #undef TARGET_CANNOT_MODIFY_JUMPS_P
11365 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
11367 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
11368 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
11370 #undef TARGET_INIT_BUILTINS
11371 #define TARGET_INIT_BUILTINS avr_init_builtins
11373 #undef TARGET_BUILTIN_DECL
11374 #define TARGET_BUILTIN_DECL avr_builtin_decl
11376 #undef TARGET_EXPAND_BUILTIN
11377 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
11379 #undef TARGET_FOLD_BUILTIN
11380 #define TARGET_FOLD_BUILTIN avr_fold_builtin
11382 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
11383 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
11385 #undef TARGET_SCALAR_MODE_SUPPORTED_P
11386 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
11388 #undef TARGET_BUILD_BUILTIN_VA_LIST
11389 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
11391 #undef TARGET_FIXED_POINT_SUPPORTED_P
11392 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
11394 #undef TARGET_ADDR_SPACE_SUBSET_P
11395 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11397 #undef TARGET_ADDR_SPACE_CONVERT
11398 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11400 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
11401 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11403 #undef TARGET_ADDR_SPACE_POINTER_MODE
11404 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11406 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11407 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11408 avr_addr_space_legitimate_address_p
11410 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11411 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11413 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
11414 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
11416 #undef TARGET_PRINT_OPERAND
11417 #define TARGET_PRINT_OPERAND avr_print_operand
11418 #undef TARGET_PRINT_OPERAND_ADDRESS
11419 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11420 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11421 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11423 struct gcc_target targetm
= TARGET_INITIALIZER
;
11426 #include "gt-avr.h"