1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
36 #include "print-tree.h"
38 #include "stor-layout.h"
39 #include "stringpool.h"
42 #include "c-family/c-common.h"
43 #include "diagnostic-core.h"
54 #include "langhooks.h"
57 #include "target-def.h"
62 /* Maximal allowed offset for an address in the LD command */
63 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
65 /* Return true if STR starts with PREFIX and false, otherwise. */
66 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
68 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
69 address space where data is to be located.
70 As the only non-generic address spaces are all located in flash,
71 this can be used to test if data shall go into some .progmem* section.
72 This must be the rightmost field of machine dependent section flags. */
73 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
75 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
76 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
78 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
79 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
82 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
83 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
86 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
87 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
88 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
89 / SYMBOL_FLAG_MACH_DEP)
91 /* Known address spaces. The order must be the same as in the respective
92 enum from avr.h (or designated initialized must be used). */
93 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
95 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
96 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
97 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
98 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
99 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
100 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
101 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
102 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
106 /* Holding RAM addresses of some SFRs used by the compiler and that
107 are unique over all devices in an architecture like 'avr4'. */
111 /* SREG: The processor status */
114 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
120 /* RAMPZ: The high byte of 24-bit address used with ELPM */
123 /* SP: The stack pointer and its low and high byte */
128 static avr_addr_t avr_addr
;
131 /* Prototypes for local helper functions. */
133 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
134 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
135 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
136 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
137 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
138 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
140 static int get_sequence_length (rtx_insn
*insns
);
141 static int sequent_regs_live (void);
142 static const char *ptrreg_to_str (int);
143 static const char *cond_string (enum rtx_code
);
144 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
145 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
147 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
148 static struct machine_function
* avr_init_machine_status (void);
151 /* Prototypes for hook implementors if needed before their implementation. */
153 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
156 /* Allocate registers from r25 to r8 for parameters for function calls. */
157 #define FIRST_CUM_REG 26
159 /* Implicit target register of LPM instruction (R0) */
160 extern GTY(()) rtx lpm_reg_rtx
;
163 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
164 extern GTY(()) rtx lpm_addr_reg_rtx
;
165 rtx lpm_addr_reg_rtx
;
167 /* Temporary register RTX (reg:QI TMP_REGNO) */
168 extern GTY(()) rtx tmp_reg_rtx
;
171 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
172 extern GTY(()) rtx zero_reg_rtx
;
175 /* RTXs for all general purpose registers as QImode */
176 extern GTY(()) rtx all_regs_rtx
[32];
177 rtx all_regs_rtx
[32];
179 /* SREG, the processor status */
180 extern GTY(()) rtx sreg_rtx
;
183 /* RAMP* special function registers */
184 extern GTY(()) rtx rampd_rtx
;
185 extern GTY(()) rtx rampx_rtx
;
186 extern GTY(()) rtx rampy_rtx
;
187 extern GTY(()) rtx rampz_rtx
;
193 /* RTX containing the strings "" and "e", respectively */
194 static GTY(()) rtx xstring_empty
;
195 static GTY(()) rtx xstring_e
;
197 /* Current architecture. */
198 const avr_arch_t
*avr_current_arch
;
200 /* Current device. */
201 const avr_mcu_t
*avr_current_device
;
203 /* Section to put switch tables in. */
204 static GTY(()) section
*progmem_swtable_section
;
206 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
207 or to address space __flash* or __memx. Only used as singletons inside
208 avr_asm_select_section, but it must not be local there because of GTY. */
209 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
211 /* Condition for insns/expanders from avr-dimode.md. */
212 bool avr_have_dimode
= true;
214 /* To track if code will use .bss and/or .data. */
215 bool avr_need_clear_bss_p
= false;
216 bool avr_need_copy_data_p
= false;
219 /* Transform UP into lowercase and write the result to LO.
220 You must provide enough space for LO. Return LO. */
223 avr_tolower (char *lo
, const char *up
)
227 for (; *up
; up
++, lo
++)
236 /* Custom function to count number of set bits. */
239 avr_popcount (unsigned int val
)
253 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
254 Return true if the least significant N_BYTES bytes of XVAL all have a
255 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
256 of integers which contains an integer N iff bit N of POP_MASK is set. */
259 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
263 enum machine_mode mode
= GET_MODE (xval
);
265 if (VOIDmode
== mode
)
268 for (i
= 0; i
< n_bytes
; i
++)
270 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
271 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
273 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
281 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
282 the bit representation of X by "casting" it to CONST_INT. */
285 avr_to_int_mode (rtx x
)
287 enum machine_mode mode
= GET_MODE (x
);
289 return VOIDmode
== mode
291 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
295 /* Implement `TARGET_OPTION_OVERRIDE'. */
298 avr_option_override (void)
300 /* Disable -fdelete-null-pointer-checks option for AVR target.
301 This option compiler assumes that dereferencing of a null pointer
302 would halt the program. For AVR this assumption is not true and
303 programs can safely dereference null pointers. Changes made by this
304 option may not work properly for AVR. So disable this option. */
306 flag_delete_null_pointer_checks
= 0;
308 /* caller-save.c looks for call-clobbered hard registers that are assigned
309 to pseudos that cross calls and tries so save-restore them around calls
310 in order to reduce the number of stack slots needed.
312 This might lead to situations where reload is no more able to cope
313 with the challenge of AVR's very few address registers and fails to
314 perform the requested spills. */
317 flag_caller_saves
= 0;
319 /* Unwind tables currently require a frame pointer for correctness,
320 see toplev.c:process_options(). */
322 if ((flag_unwind_tables
323 || flag_non_call_exceptions
324 || flag_asynchronous_unwind_tables
)
325 && !ACCUMULATE_OUTGOING_ARGS
)
327 flag_omit_frame_pointer
= 0;
331 warning (OPT_fpic
, "-fpic is not supported");
333 warning (OPT_fPIC
, "-fPIC is not supported");
335 warning (OPT_fpie
, "-fpie is not supported");
337 warning (OPT_fPIE
, "-fPIE is not supported");
339 /* Search for mcu arch.
340 ??? We should probably just put the architecture-default device
341 settings in the architecture struct and remove any notion of a current
344 for (avr_current_device
= avr_mcu_types
; ; avr_current_device
++)
346 if (!avr_current_device
->name
)
347 fatal_error ("mcu not found");
348 if (!avr_current_device
->macro
349 && avr_current_device
->arch
== avr_arch_index
)
353 avr_current_arch
= &avr_arch_types
[avr_arch_index
];
355 avr_n_flash
= avr_current_device
->n_flash
;
357 /* RAM addresses of some SFRs common to all devices in respective arch. */
359 /* SREG: Status Register containing flags like I (global IRQ) */
360 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
362 /* RAMPZ: Address' high part when loading via ELPM */
363 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
365 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
366 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
367 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
368 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
370 /* SP: Stack Pointer (SP_H:SP_L) */
371 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
372 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
374 init_machine_status
= avr_init_machine_status
;
376 avr_log_set_avr_log();
379 /* Function to set up the backend function structure. */
381 static struct machine_function
*
382 avr_init_machine_status (void)
384 return ggc_cleared_alloc
<machine_function
> ();
388 /* Implement `INIT_EXPANDERS'. */
389 /* The function works like a singleton. */
392 avr_init_expanders (void)
396 for (regno
= 0; regno
< 32; regno
++)
397 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
399 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
400 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
401 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
403 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
405 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
406 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
407 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
408 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
409 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
411 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
412 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
416 /* Implement `REGNO_REG_CLASS'. */
417 /* Return register class for register R. */
420 avr_regno_reg_class (int r
)
422 static const enum reg_class reg_class_tab
[] =
426 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
427 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
428 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
429 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
431 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
432 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
434 ADDW_REGS
, ADDW_REGS
,
436 POINTER_X_REGS
, POINTER_X_REGS
,
438 POINTER_Y_REGS
, POINTER_Y_REGS
,
440 POINTER_Z_REGS
, POINTER_Z_REGS
,
446 return reg_class_tab
[r
];
452 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
455 avr_scalar_mode_supported_p (enum machine_mode mode
)
457 if (ALL_FIXED_POINT_MODE_P (mode
))
463 return default_scalar_mode_supported_p (mode
);
467 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
470 avr_decl_flash_p (tree decl
)
472 if (TREE_CODE (decl
) != VAR_DECL
473 || TREE_TYPE (decl
) == error_mark_node
)
478 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
482 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
483 address space and FALSE, otherwise. */
486 avr_decl_memx_p (tree decl
)
488 if (TREE_CODE (decl
) != VAR_DECL
489 || TREE_TYPE (decl
) == error_mark_node
)
494 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
498 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
501 avr_mem_flash_p (rtx x
)
504 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
508 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
509 address space and FALSE, otherwise. */
512 avr_mem_memx_p (rtx x
)
515 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
519 /* A helper for the subsequent function attribute used to dig for
520 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
523 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
525 if (FUNCTION_DECL
== TREE_CODE (func
))
527 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
532 func
= TREE_TYPE (func
);
535 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
536 || TREE_CODE (func
) == METHOD_TYPE
);
538 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
541 /* Return nonzero if FUNC is a naked function. */
544 avr_naked_function_p (tree func
)
546 return avr_lookup_function_attribute1 (func
, "naked");
549 /* Return nonzero if FUNC is an interrupt function as specified
550 by the "interrupt" attribute. */
553 avr_interrupt_function_p (tree func
)
555 return avr_lookup_function_attribute1 (func
, "interrupt");
558 /* Return nonzero if FUNC is a signal function as specified
559 by the "signal" attribute. */
562 avr_signal_function_p (tree func
)
564 return avr_lookup_function_attribute1 (func
, "signal");
567 /* Return nonzero if FUNC is an OS_task function. */
570 avr_OS_task_function_p (tree func
)
572 return avr_lookup_function_attribute1 (func
, "OS_task");
575 /* Return nonzero if FUNC is an OS_main function. */
578 avr_OS_main_function_p (tree func
)
580 return avr_lookup_function_attribute1 (func
, "OS_main");
584 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
585 /* Sanity cheching for above function attributes. */
588 avr_set_current_function (tree decl
)
593 if (decl
== NULL_TREE
594 || current_function_decl
== NULL_TREE
595 || current_function_decl
== error_mark_node
597 || cfun
->machine
->attributes_checked_p
)
600 loc
= DECL_SOURCE_LOCATION (decl
);
602 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
603 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
604 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
605 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
606 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
608 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
610 /* Too much attributes make no sense as they request conflicting features. */
612 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
613 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
614 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
615 " exclusive", "OS_task", "OS_main", isr
);
617 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
619 if (cfun
->machine
->is_naked
620 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
621 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
622 " no effect on %qs function", "OS_task", "OS_main", "naked");
624 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
626 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
627 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
630 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
631 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
632 : IDENTIFIER_POINTER (DECL_NAME (decl
));
634 /* Skip a leading '*' that might still prefix the assembler name,
635 e.g. in non-LTO runs. */
637 name
= default_strip_name_encoding (name
);
639 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
640 using this when it switched from SIGNAL and INTERRUPT to ISR. */
642 if (cfun
->machine
->is_interrupt
)
643 cfun
->machine
->is_signal
= 0;
645 /* Interrupt handlers must be void __vector (void) functions. */
647 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
648 error_at (loc
, "%qs function cannot have arguments", isr
);
650 if (TREE_CODE (ret
) != VOID_TYPE
)
651 error_at (loc
, "%qs function cannot return a value", isr
);
653 /* If the function has the 'signal' or 'interrupt' attribute, ensure
654 that the name of the function is "__vector_NN" so as to catch
655 when the user misspells the vector name. */
657 if (!STR_PREFIX_P (name
, "__vector"))
658 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
662 /* Don't print the above diagnostics more than once. */
664 cfun
->machine
->attributes_checked_p
= 1;
668 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
671 avr_accumulate_outgoing_args (void)
674 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
676 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
677 what offset is correct. In some cases it is relative to
678 virtual_outgoing_args_rtx and in others it is relative to
679 virtual_stack_vars_rtx. For example code see
680 gcc.c-torture/execute/built-in-setjmp.c
681 gcc.c-torture/execute/builtins/sprintf-chk.c */
683 return (TARGET_ACCUMULATE_OUTGOING_ARGS
684 && !(cfun
->calls_setjmp
685 || cfun
->has_nonlocal_label
));
689 /* Report contribution of accumulated outgoing arguments to stack size. */
692 avr_outgoing_args_size (void)
694 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
698 /* Implement `STARTING_FRAME_OFFSET'. */
699 /* This is the offset from the frame pointer register to the first stack slot
700 that contains a variable living in the frame. */
703 avr_starting_frame_offset (void)
705 return 1 + avr_outgoing_args_size ();
709 /* Return the number of hard registers to push/pop in the prologue/epilogue
710 of the current function, and optionally store these registers in SET. */
713 avr_regs_to_save (HARD_REG_SET
*set
)
716 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
719 CLEAR_HARD_REG_SET (*set
);
722 /* No need to save any registers if the function never returns or
723 has the "OS_task" or "OS_main" attribute. */
725 if (TREE_THIS_VOLATILE (current_function_decl
)
726 || cfun
->machine
->is_OS_task
727 || cfun
->machine
->is_OS_main
)
730 for (reg
= 0; reg
< 32; reg
++)
732 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
733 any global register variables. */
738 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
739 || (df_regs_ever_live_p (reg
)
740 && (int_or_sig_p
|| !call_used_regs
[reg
])
741 /* Don't record frame pointer registers here. They are treated
742 indivitually in prologue. */
743 && !(frame_pointer_needed
744 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
747 SET_HARD_REG_BIT (*set
, reg
);
755 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
758 avr_allocate_stack_slots_for_args (void)
760 return !cfun
->machine
->is_naked
;
764 /* Return true if register FROM can be eliminated via register TO. */
767 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
769 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
770 || !frame_pointer_needed
);
774 /* Implement `TARGET_WARN_FUNC_RETURN'. */
777 avr_warn_func_return (tree decl
)
779 /* Naked functions are implemented entirely in assembly, including the
780 return sequence, so suppress warnings about this. */
782 return !avr_naked_function_p (decl
);
785 /* Compute offset between arg_pointer and frame_pointer. */
788 avr_initial_elimination_offset (int from
, int to
)
790 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
794 int offset
= frame_pointer_needed
? 2 : 0;
795 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
797 offset
+= avr_regs_to_save (NULL
);
798 return (get_frame_size () + avr_outgoing_args_size()
799 + avr_pc_size
+ 1 + offset
);
804 /* Helper for the function below. */
807 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
809 *node
= make_node (FIXED_POINT_TYPE
);
810 TYPE_SATURATING (*node
) = sat_p
;
811 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
812 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
813 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
814 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
815 TYPE_ALIGN (*node
) = 8;
816 SET_TYPE_MODE (*node
, mode
);
822 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
825 avr_build_builtin_va_list (void)
827 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
828 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
829 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
830 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
831 to the long long accum modes instead of the desired [U]TAmode.
833 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
834 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
835 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
836 libgcc to detect IBIT and FBIT. */
838 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
839 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
840 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
841 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
843 unsigned_long_long_accum_type_node
= uta_type_node
;
844 long_long_accum_type_node
= ta_type_node
;
845 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
846 sat_long_long_accum_type_node
= sat_ta_type_node
;
848 /* Dispatch to the default handler. */
850 return std_build_builtin_va_list ();
854 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
855 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
856 frame pointer by +STARTING_FRAME_OFFSET.
857 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
858 avoids creating add/sub of offset in nonlocal goto and setjmp. */
861 avr_builtin_setjmp_frame_value (void)
863 rtx xval
= gen_reg_rtx (Pmode
);
864 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
865 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
870 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
871 This is return address of function. */
874 avr_return_addr_rtx (int count
, rtx tem
)
878 /* Can only return this function's return address. Others not supported. */
884 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
885 warning (0, "%<builtin_return_address%> contains only 2 bytes"
889 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
891 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
892 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
893 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
897 /* Return 1 if the function epilogue is just a single "ret". */
900 avr_simple_epilogue (void)
902 return (! frame_pointer_needed
903 && get_frame_size () == 0
904 && avr_outgoing_args_size() == 0
905 && avr_regs_to_save (NULL
) == 0
906 && ! cfun
->machine
->is_interrupt
907 && ! cfun
->machine
->is_signal
908 && ! cfun
->machine
->is_naked
909 && ! TREE_THIS_VOLATILE (current_function_decl
));
912 /* This function checks sequence of live registers. */
915 sequent_regs_live (void)
921 for (reg
= 0; reg
< 18; ++reg
)
925 /* Don't recognize sequences that contain global register
934 if (!call_used_regs
[reg
])
936 if (df_regs_ever_live_p (reg
))
946 if (!frame_pointer_needed
)
948 if (df_regs_ever_live_p (REG_Y
))
956 if (df_regs_ever_live_p (REG_Y
+1))
969 return (cur_seq
== live_seq
) ? live_seq
: 0;
972 /* Obtain the length sequence of insns. */
975 get_sequence_length (rtx_insn
*insns
)
980 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
981 length
+= get_attr_length (insn
);
987 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
990 avr_incoming_return_addr_rtx (void)
992 /* The return address is at the top of the stack. Note that the push
993 was via post-decrement, which means the actual address is off by one. */
994 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
997 /* Helper for expand_prologue. Emit a push of a byte register. */
1000 emit_push_byte (unsigned regno
, bool frame_related_p
)
1005 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
1006 mem
= gen_frame_mem (QImode
, mem
);
1007 reg
= gen_rtx_REG (QImode
, regno
);
1009 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
1010 if (frame_related_p
)
1011 RTX_FRAME_RELATED_P (insn
) = 1;
1013 cfun
->machine
->stack_usage
++;
1017 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1018 SFR is a MEM representing the memory location of the SFR.
1019 If CLR_P then clear the SFR after the push using zero_reg. */
1022 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
1026 gcc_assert (MEM_P (sfr
));
1028 /* IN __tmp_reg__, IO(SFR) */
1029 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1030 if (frame_related_p
)
1031 RTX_FRAME_RELATED_P (insn
) = 1;
1033 /* PUSH __tmp_reg__ */
1034 emit_push_byte (TMP_REGNO
, frame_related_p
);
1038 /* OUT IO(SFR), __zero_reg__ */
1039 insn
= emit_move_insn (sfr
, const0_rtx
);
1040 if (frame_related_p
)
1041 RTX_FRAME_RELATED_P (insn
) = 1;
1046 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1049 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1050 int live_seq
= sequent_regs_live ();
1052 HOST_WIDE_INT size_max
1053 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1055 bool minimize
= (TARGET_CALL_PROLOGUES
1059 && !cfun
->machine
->is_OS_task
1060 && !cfun
->machine
->is_OS_main
);
1063 && (frame_pointer_needed
1064 || avr_outgoing_args_size() > 8
1065 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1069 int first_reg
, reg
, offset
;
1071 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1072 gen_int_mode (size
, HImode
));
1074 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1075 gen_int_mode (live_seq
+size
, HImode
));
1076 insn
= emit_insn (pattern
);
1077 RTX_FRAME_RELATED_P (insn
) = 1;
1079 /* Describe the effect of the unspec_volatile call to prologue_saves.
1080 Note that this formulation assumes that add_reg_note pushes the
1081 notes to the front. Thus we build them in the reverse order of
1082 how we want dwarf2out to process them. */
1084 /* The function does always set frame_pointer_rtx, but whether that
1085 is going to be permanent in the function is frame_pointer_needed. */
1087 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1088 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1090 : stack_pointer_rtx
),
1091 plus_constant (Pmode
, stack_pointer_rtx
,
1092 -(size
+ live_seq
))));
1094 /* Note that live_seq always contains r28+r29, but the other
1095 registers to be saved are all below 18. */
1097 first_reg
= 18 - (live_seq
- 2);
1099 for (reg
= 29, offset
= -live_seq
+ 1;
1101 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1105 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1107 r
= gen_rtx_REG (QImode
, reg
);
1108 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1111 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1113 else /* !minimize */
1117 for (reg
= 0; reg
< 32; ++reg
)
1118 if (TEST_HARD_REG_BIT (set
, reg
))
1119 emit_push_byte (reg
, true);
1121 if (frame_pointer_needed
1122 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1124 /* Push frame pointer. Always be consistent about the
1125 ordering of pushes -- epilogue_restores expects the
1126 register pair to be pushed low byte first. */
1128 emit_push_byte (REG_Y
, true);
1129 emit_push_byte (REG_Y
+ 1, true);
1132 if (frame_pointer_needed
1135 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1136 RTX_FRAME_RELATED_P (insn
) = 1;
1141 /* Creating a frame can be done by direct manipulation of the
1142 stack or via the frame pointer. These two methods are:
1149 the optimum method depends on function type, stack and
1150 frame size. To avoid a complex logic, both methods are
1151 tested and shortest is selected.
1153 There is also the case where SIZE != 0 and no frame pointer is
1154 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1155 In that case, insn (*) is not needed in that case.
1156 We use the X register as scratch. This is save because in X
1158 In an interrupt routine, the case of SIZE != 0 together with
1159 !frame_pointer_needed can only occur if the function is not a
1160 leaf function and thus X has already been saved. */
1163 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1164 rtx_insn
*fp_plus_insns
;
1167 gcc_assert (frame_pointer_needed
1171 fp
= my_fp
= (frame_pointer_needed
1173 : gen_rtx_REG (Pmode
, REG_X
));
1175 if (AVR_HAVE_8BIT_SP
)
1177 /* The high byte (r29) does not change:
1178 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1180 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1183 /* Cut down size and avoid size = 0 so that we don't run
1184 into ICE like PR52488 in the remainder. */
1186 if (size
> size_max
)
1188 /* Don't error so that insane code from newlib still compiles
1189 and does not break building newlib. As PR51345 is implemented
1190 now, there are multilib variants with -msp8.
1192 If user wants sanity checks he can use -Wstack-usage=
1195 For CFA we emit the original, non-saturated size so that
1196 the generic machinery is aware of the real stack usage and
1197 will print the above diagnostic as expected. */
1202 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1203 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1205 /************ Method 1: Adjust frame pointer ************/
1209 /* Normally, the dwarf2out frame-related-expr interpreter does
1210 not expect to have the CFA change once the frame pointer is
1211 set up. Thus, we avoid marking the move insn below and
1212 instead indicate that the entire operation is complete after
1213 the frame pointer subtraction is done. */
1215 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1216 if (frame_pointer_needed
)
1218 RTX_FRAME_RELATED_P (insn
) = 1;
1219 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1220 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1223 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1226 if (frame_pointer_needed
)
1228 RTX_FRAME_RELATED_P (insn
) = 1;
1229 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1230 gen_rtx_SET (VOIDmode
, fp
,
1231 plus_constant (Pmode
, fp
,
1235 /* Copy to stack pointer. Note that since we've already
1236 changed the CFA to the frame pointer this operation
1237 need not be annotated if frame pointer is needed.
1238 Always move through unspec, see PR50063.
1239 For meaning of irq_state see movhi_sp_r insn. */
1241 if (cfun
->machine
->is_interrupt
)
1244 if (TARGET_NO_INTERRUPTS
1245 || cfun
->machine
->is_signal
1246 || cfun
->machine
->is_OS_main
)
1249 if (AVR_HAVE_8BIT_SP
)
1252 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1253 fp
, GEN_INT (irq_state
)));
1254 if (!frame_pointer_needed
)
1256 RTX_FRAME_RELATED_P (insn
) = 1;
1257 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1258 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1259 plus_constant (Pmode
,
1264 fp_plus_insns
= get_insns ();
1267 /************ Method 2: Adjust Stack pointer ************/
1269 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1270 can only handle specific offsets. */
1272 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1274 rtx_insn
*sp_plus_insns
;
1278 insn
= emit_move_insn (stack_pointer_rtx
,
1279 plus_constant (Pmode
, stack_pointer_rtx
,
1281 RTX_FRAME_RELATED_P (insn
) = 1;
1282 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1283 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1284 plus_constant (Pmode
,
1287 if (frame_pointer_needed
)
1289 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1290 RTX_FRAME_RELATED_P (insn
) = 1;
1293 sp_plus_insns
= get_insns ();
1296 /************ Use shortest method ************/
1298 emit_insn (get_sequence_length (sp_plus_insns
)
1299 < get_sequence_length (fp_plus_insns
)
1305 emit_insn (fp_plus_insns
);
1308 cfun
->machine
->stack_usage
+= size_cfa
;
1309 } /* !minimize && size != 0 */
1314 /* Output function prologue. */
1317 avr_expand_prologue (void)
1322 size
= get_frame_size() + avr_outgoing_args_size();
1324 cfun
->machine
->stack_usage
= 0;
1326 /* Prologue: naked. */
1327 if (cfun
->machine
->is_naked
)
1332 avr_regs_to_save (&set
);
1334 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1336 /* Enable interrupts. */
1337 if (cfun
->machine
->is_interrupt
)
1338 emit_insn (gen_enable_interrupt ());
1340 /* Push zero reg. */
1341 emit_push_byte (ZERO_REGNO
, true);
1344 emit_push_byte (TMP_REGNO
, true);
1347 /* ??? There's no dwarf2 column reserved for SREG. */
1348 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1350 /* Clear zero reg. */
1351 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1353 /* Prevent any attempt to delete the setting of ZERO_REG! */
1354 emit_use (zero_reg_rtx
);
1356 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1357 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1360 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1363 && TEST_HARD_REG_BIT (set
, REG_X
)
1364 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1366 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1370 && (frame_pointer_needed
1371 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1372 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1374 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1378 && TEST_HARD_REG_BIT (set
, REG_Z
)
1379 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1381 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1383 } /* is_interrupt is_signal */
1385 avr_prologue_setup_frame (size
, set
);
1387 if (flag_stack_usage_info
)
1388 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1392 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1393 /* Output summary at end of function prologue. */
1396 avr_asm_function_end_prologue (FILE *file
)
1398 if (cfun
->machine
->is_naked
)
1400 fputs ("/* prologue: naked */\n", file
);
1404 if (cfun
->machine
->is_interrupt
)
1406 fputs ("/* prologue: Interrupt */\n", file
);
1408 else if (cfun
->machine
->is_signal
)
1410 fputs ("/* prologue: Signal */\n", file
);
1413 fputs ("/* prologue: function */\n", file
);
1416 if (ACCUMULATE_OUTGOING_ARGS
)
1417 fprintf (file
, "/* outgoing args size = %d */\n",
1418 avr_outgoing_args_size());
1420 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1422 fprintf (file
, "/* stack size = %d */\n",
1423 cfun
->machine
->stack_usage
);
1424 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1425 usage for offset so that SP + .L__stack_offset = return address. */
1426 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1430 /* Implement `EPILOGUE_USES'. */
1433 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1435 if (reload_completed
1437 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1442 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1445 emit_pop_byte (unsigned regno
)
1449 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1450 mem
= gen_frame_mem (QImode
, mem
);
1451 reg
= gen_rtx_REG (QImode
, regno
);
1453 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1456 /* Output RTL epilogue. */
1459 avr_expand_epilogue (bool sibcall_p
)
1466 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1468 size
= get_frame_size() + avr_outgoing_args_size();
1470 /* epilogue: naked */
1471 if (cfun
->machine
->is_naked
)
1473 gcc_assert (!sibcall_p
);
1475 emit_jump_insn (gen_return ());
1479 avr_regs_to_save (&set
);
1480 live_seq
= sequent_regs_live ();
1482 minimize
= (TARGET_CALL_PROLOGUES
1485 && !cfun
->machine
->is_OS_task
1486 && !cfun
->machine
->is_OS_main
);
1490 || frame_pointer_needed
1493 /* Get rid of frame. */
1495 if (!frame_pointer_needed
)
1497 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1502 emit_move_insn (frame_pointer_rtx
,
1503 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1506 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1512 /* Try two methods to adjust stack and select shortest. */
1516 rtx_insn
*fp_plus_insns
;
1517 HOST_WIDE_INT size_max
;
1519 gcc_assert (frame_pointer_needed
1523 fp
= my_fp
= (frame_pointer_needed
1525 : gen_rtx_REG (Pmode
, REG_X
));
1527 if (AVR_HAVE_8BIT_SP
)
1529 /* The high byte (r29) does not change:
1530 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1532 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1535 /* For rationale see comment in prologue generation. */
1537 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1538 if (size
> size_max
)
1540 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1542 /********** Method 1: Adjust fp register **********/
1546 if (!frame_pointer_needed
)
1547 emit_move_insn (fp
, stack_pointer_rtx
);
1549 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1551 /* Copy to stack pointer. */
1553 if (TARGET_NO_INTERRUPTS
)
1556 if (AVR_HAVE_8BIT_SP
)
1559 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1560 GEN_INT (irq_state
)));
1562 fp_plus_insns
= get_insns ();
1565 /********** Method 2: Adjust Stack pointer **********/
1567 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1569 rtx_insn
*sp_plus_insns
;
1573 emit_move_insn (stack_pointer_rtx
,
1574 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1576 sp_plus_insns
= get_insns ();
1579 /************ Use shortest method ************/
1581 emit_insn (get_sequence_length (sp_plus_insns
)
1582 < get_sequence_length (fp_plus_insns
)
1587 emit_insn (fp_plus_insns
);
1590 if (frame_pointer_needed
1591 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1593 /* Restore previous frame_pointer. See avr_expand_prologue for
1594 rationale for not using pophi. */
1596 emit_pop_byte (REG_Y
+ 1);
1597 emit_pop_byte (REG_Y
);
1600 /* Restore used registers. */
1602 for (reg
= 31; reg
>= 0; --reg
)
1603 if (TEST_HARD_REG_BIT (set
, reg
))
1604 emit_pop_byte (reg
);
1608 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1609 The conditions to restore them must be tha same as in prologue. */
1612 && TEST_HARD_REG_BIT (set
, REG_Z
)
1613 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1615 emit_pop_byte (TMP_REGNO
);
1616 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1620 && (frame_pointer_needed
1621 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1622 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1624 emit_pop_byte (TMP_REGNO
);
1625 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1629 && TEST_HARD_REG_BIT (set
, REG_X
)
1630 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1632 emit_pop_byte (TMP_REGNO
);
1633 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1638 emit_pop_byte (TMP_REGNO
);
1639 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1642 /* Restore SREG using tmp_reg as scratch. */
1644 emit_pop_byte (TMP_REGNO
);
1645 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1647 /* Restore tmp REG. */
1648 emit_pop_byte (TMP_REGNO
);
1650 /* Restore zero REG. */
1651 emit_pop_byte (ZERO_REGNO
);
1655 emit_jump_insn (gen_return ());
1659 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1662 avr_asm_function_begin_epilogue (FILE *file
)
1664 fprintf (file
, "/* epilogue start */\n");
1668 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1671 avr_cannot_modify_jumps_p (void)
1674 /* Naked Functions must not have any instructions after
1675 their epilogue, see PR42240 */
1677 if (reload_completed
1679 && cfun
->machine
->is_naked
)
1688 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1691 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1693 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1694 This hook just serves to hack around PR rtl-optimization/52543 by
1695 claiming that non-generic addresses were mode-dependent so that
1696 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1697 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1698 generic address space which is not true. */
1700 return !ADDR_SPACE_GENERIC_P (as
);
1704 /* Helper function for `avr_legitimate_address_p'. */
1707 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1708 RTX_CODE outer_code
, bool strict
)
1711 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1712 as
, outer_code
, UNKNOWN
)
1714 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1718 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1719 machine for a memory operand of mode MODE. */
1722 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1724 bool ok
= CONSTANT_ADDRESS_P (x
);
1726 switch (GET_CODE (x
))
1729 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1733 && GET_MODE_SIZE (mode
) > 4
1734 && REG_X
== REGNO (x
))
1742 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1743 GET_CODE (x
), strict
);
1748 rtx reg
= XEXP (x
, 0);
1749 rtx op1
= XEXP (x
, 1);
1752 && CONST_INT_P (op1
)
1753 && INTVAL (op1
) >= 0)
1755 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1760 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1763 if (reg
== frame_pointer_rtx
1764 || reg
== arg_pointer_rtx
)
1769 else if (frame_pointer_needed
1770 && reg
== frame_pointer_rtx
)
1782 if (avr_log
.legitimate_address_p
)
1784 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1785 "reload_completed=%d reload_in_progress=%d %s:",
1786 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1787 reg_renumber
? "(reg_renumber)" : "");
1789 if (GET_CODE (x
) == PLUS
1790 && REG_P (XEXP (x
, 0))
1791 && CONST_INT_P (XEXP (x
, 1))
1792 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1795 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1796 true_regnum (XEXP (x
, 0)));
1799 avr_edump ("\n%r\n", x
);
1806 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1807 now only a helper for avr_addr_space_legitimize_address. */
1808 /* Attempts to replace X with a valid
1809 memory address for an operand of mode MODE */
1812 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1814 bool big_offset_p
= false;
1818 if (GET_CODE (oldx
) == PLUS
1819 && REG_P (XEXP (oldx
, 0)))
1821 if (REG_P (XEXP (oldx
, 1)))
1822 x
= force_reg (GET_MODE (oldx
), oldx
);
1823 else if (CONST_INT_P (XEXP (oldx
, 1)))
1825 int offs
= INTVAL (XEXP (oldx
, 1));
1826 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1827 && offs
> MAX_LD_OFFSET (mode
))
1829 big_offset_p
= true;
1830 x
= force_reg (GET_MODE (oldx
), oldx
);
1835 if (avr_log
.legitimize_address
)
1837 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1840 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1847 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1848 /* This will allow register R26/27 to be used where it is no worse than normal
1849 base pointers R28/29 or R30/31. For example, if base offset is greater
1850 than 63 bytes or for R++ or --R addressing. */
1853 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1854 int opnum
, int type
, int addr_type
,
1855 int ind_levels ATTRIBUTE_UNUSED
,
1856 rtx (*mk_memloc
)(rtx
,int))
1860 if (avr_log
.legitimize_reload_address
)
1861 avr_edump ("\n%?:%m %r\n", mode
, x
);
1863 if (1 && (GET_CODE (x
) == POST_INC
1864 || GET_CODE (x
) == PRE_DEC
))
1866 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1867 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1868 opnum
, RELOAD_OTHER
);
1870 if (avr_log
.legitimize_reload_address
)
1871 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1872 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1877 if (GET_CODE (x
) == PLUS
1878 && REG_P (XEXP (x
, 0))
1879 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1880 && CONST_INT_P (XEXP (x
, 1))
1881 && INTVAL (XEXP (x
, 1)) >= 1)
1883 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1887 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1889 int regno
= REGNO (XEXP (x
, 0));
1890 rtx mem
= mk_memloc (x
, regno
);
1892 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1893 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1894 1, (enum reload_type
) addr_type
);
1896 if (avr_log
.legitimize_reload_address
)
1897 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1898 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1900 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1901 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1902 opnum
, (enum reload_type
) type
);
1904 if (avr_log
.legitimize_reload_address
)
1905 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1906 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1911 else if (! (frame_pointer_needed
1912 && XEXP (x
, 0) == frame_pointer_rtx
))
1914 push_reload (x
, NULL_RTX
, px
, NULL
,
1915 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1916 opnum
, (enum reload_type
) type
);
1918 if (avr_log
.legitimize_reload_address
)
1919 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1920 POINTER_REGS
, x
, NULL_RTX
);
1930 /* Implement `TARGET_SECONDARY_RELOAD' */
1933 avr_secondary_reload (bool in_p
, rtx x
,
1934 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1935 enum machine_mode mode
, secondary_reload_info
*sri
)
1939 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1940 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1942 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1949 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1950 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1951 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1953 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1954 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1955 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1956 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1957 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1959 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1961 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1962 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1963 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1964 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1965 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1966 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1974 /* Helper function to print assembler resp. track instruction
1975 sequence lengths. Always return "".
1978 Output assembler code from template TPL with operands supplied
1979 by OPERANDS. This is just forwarding to output_asm_insn.
1982 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1983 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1984 Don't output anything.
1988 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1992 output_asm_insn (tpl
, operands
);
2006 /* Return a pointer register name as a string. */
2009 ptrreg_to_str (int regno
)
2013 case REG_X
: return "X";
2014 case REG_Y
: return "Y";
2015 case REG_Z
: return "Z";
2017 output_operand_lossage ("address operand requires constraint for"
2018 " X, Y, or Z register");
2023 /* Return the condition name as a string.
2024 Used in conditional jump constructing */
2027 cond_string (enum rtx_code code
)
2036 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2041 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2057 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2058 /* Output ADDR to FILE as address. */
2061 avr_print_operand_address (FILE *file
, rtx addr
)
2063 switch (GET_CODE (addr
))
2066 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2070 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2074 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2078 if (CONSTANT_ADDRESS_P (addr
)
2079 && text_segment_operand (addr
, VOIDmode
))
2082 if (GET_CODE (x
) == CONST
)
2084 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2086 /* Assembler gs() will implant word address. Make offset
2087 a byte offset inside gs() for assembler. This is
2088 needed because the more logical (constant+gs(sym)) is not
2089 accepted by gas. For 128K and smaller devices this is ok.
2090 For large devices it will create a trampoline to offset
2091 from symbol which may not be what the user really wanted. */
2093 fprintf (file
, "gs(");
2094 output_addr_const (file
, XEXP (x
,0));
2095 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2096 2 * INTVAL (XEXP (x
, 1)));
2098 if (warning (0, "pointer offset from symbol maybe incorrect"))
2100 output_addr_const (stderr
, addr
);
2101 fprintf(stderr
,"\n");
2106 fprintf (file
, "gs(");
2107 output_addr_const (file
, addr
);
2108 fprintf (file
, ")");
2112 output_addr_const (file
, addr
);
2117 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2120 avr_print_operand_punct_valid_p (unsigned char code
)
2122 return code
== '~' || code
== '!';
2126 /* Implement `TARGET_PRINT_OPERAND'. */
2127 /* Output X as assembler operand to file FILE.
2128 For a description of supported %-codes, see top of avr.md. */
2131 avr_print_operand (FILE *file
, rtx x
, int code
)
2135 if (code
>= 'A' && code
<= 'D')
2140 if (!AVR_HAVE_JMP_CALL
)
2143 else if (code
== '!')
2145 if (AVR_HAVE_EIJMP_EICALL
)
2148 else if (code
== 't'
2151 static int t_regno
= -1;
2152 static int t_nbits
= -1;
2154 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2156 t_regno
= REGNO (x
);
2157 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2159 else if (CONST_INT_P (x
) && t_regno
>= 0
2160 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2162 int bpos
= INTVAL (x
);
2164 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2166 fprintf (file
, ",%d", bpos
% 8);
2171 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2175 if (x
== zero_reg_rtx
)
2176 fprintf (file
, "__zero_reg__");
2177 else if (code
== 'r' && REGNO (x
) < 32)
2178 fprintf (file
, "%d", (int) REGNO (x
));
2180 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2182 else if (CONST_INT_P (x
))
2184 HOST_WIDE_INT ival
= INTVAL (x
);
2187 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2188 else if (low_io_address_operand (x
, VOIDmode
)
2189 || high_io_address_operand (x
, VOIDmode
))
2191 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2192 fprintf (file
, "__RAMPZ__");
2193 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2194 fprintf (file
, "__RAMPY__");
2195 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2196 fprintf (file
, "__RAMPX__");
2197 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2198 fprintf (file
, "__RAMPD__");
2199 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2200 fprintf (file
, "__CCP__");
2201 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2202 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2203 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2206 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2207 ival
- avr_current_arch
->sfr_offset
);
2211 fatal_insn ("bad address, not an I/O address:", x
);
2215 rtx addr
= XEXP (x
, 0);
2219 if (!CONSTANT_P (addr
))
2220 fatal_insn ("bad address, not a constant:", addr
);
2221 /* Assembler template with m-code is data - not progmem section */
2222 if (text_segment_operand (addr
, VOIDmode
))
2223 if (warning (0, "accessing data memory with"
2224 " program memory address"))
2226 output_addr_const (stderr
, addr
);
2227 fprintf(stderr
,"\n");
2229 output_addr_const (file
, addr
);
2231 else if (code
== 'i')
2233 avr_print_operand (file
, addr
, 'i');
2235 else if (code
== 'o')
2237 if (GET_CODE (addr
) != PLUS
)
2238 fatal_insn ("bad address, not (reg+disp):", addr
);
2240 avr_print_operand (file
, XEXP (addr
, 1), 0);
2242 else if (code
== 'p' || code
== 'r')
2244 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2245 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2248 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2250 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2252 else if (GET_CODE (addr
) == PLUS
)
2254 avr_print_operand_address (file
, XEXP (addr
,0));
2255 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2256 fatal_insn ("internal compiler error. Bad address:"
2259 avr_print_operand (file
, XEXP (addr
,1), code
);
2262 avr_print_operand_address (file
, addr
);
2264 else if (code
== 'i')
2266 if (GET_CODE (x
) == SYMBOL_REF
&& (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
))
2267 avr_print_operand_address
2268 (file
, plus_constant (HImode
, x
, -avr_current_arch
->sfr_offset
));
2270 fatal_insn ("bad address, not an I/O address:", x
);
2272 else if (code
== 'x')
2274 /* Constant progmem address - like used in jmp or call */
2275 if (0 == text_segment_operand (x
, VOIDmode
))
2276 if (warning (0, "accessing program memory"
2277 " with data memory address"))
2279 output_addr_const (stderr
, x
);
2280 fprintf(stderr
,"\n");
2282 /* Use normal symbol for direct address no linker trampoline needed */
2283 output_addr_const (file
, x
);
2285 else if (CONST_FIXED_P (x
))
2287 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2289 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2291 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2293 else if (GET_CODE (x
) == CONST_DOUBLE
)
2297 if (GET_MODE (x
) != SFmode
)
2298 fatal_insn ("internal compiler error. Unknown mode:", x
);
2299 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2300 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2301 fprintf (file
, "0x%lx", val
);
2303 else if (GET_CODE (x
) == CONST_STRING
)
2304 fputs (XSTR (x
, 0), file
);
2305 else if (code
== 'j')
2306 fputs (cond_string (GET_CODE (x
)), file
);
2307 else if (code
== 'k')
2308 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2310 avr_print_operand_address (file
, x
);
2314 /* Worker function for `NOTICE_UPDATE_CC'. */
2315 /* Update the condition code in the INSN. */
2318 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx_insn
*insn
)
2321 enum attr_cc cc
= get_attr_cc (insn
);
2331 rtx
*op
= recog_data
.operand
;
2334 /* Extract insn's operands. */
2335 extract_constrain_insn_cached (insn
);
2343 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2344 cc
= (enum attr_cc
) icc
;
2349 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2350 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2351 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2353 /* Any other "r,rL" combination does not alter cc0. */
2357 } /* inner switch */
2361 } /* outer swicth */
2366 /* Special values like CC_OUT_PLUS from above have been
2367 mapped to "standard" CC_* values so we never come here. */
2373 /* Insn does not affect CC at all. */
2381 set
= single_set (insn
);
2385 cc_status
.flags
|= CC_NO_OVERFLOW
;
2386 cc_status
.value1
= SET_DEST (set
);
2391 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2392 of this combination, cf. also PR61055. */
2397 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2398 The V flag may or may not be known but that's ok because
2399 alter_cond will change tests to use EQ/NE. */
2400 set
= single_set (insn
);
2404 cc_status
.value1
= SET_DEST (set
);
2405 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2410 set
= single_set (insn
);
2413 cc_status
.value1
= SET_SRC (set
);
2417 /* Insn doesn't leave CC in a usable state. */
2423 /* Choose mode for jump insn:
2424 1 - relative jump in range -63 <= x <= 62 ;
2425 2 - relative jump in range -2046 <= x <= 2045 ;
2426 3 - absolute jump (only for ATmega[16]03). */
2429 avr_jump_mode (rtx x
, rtx_insn
*insn
)
2431 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2432 ? XEXP (x
, 0) : x
));
2433 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2434 int jump_distance
= cur_addr
- dest_addr
;
2436 if (-63 <= jump_distance
&& jump_distance
<= 62)
2438 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2440 else if (AVR_HAVE_JMP_CALL
)
2446 /* Return an AVR condition jump commands.
2447 X is a comparison RTX.
2448 LEN is a number returned by avr_jump_mode function.
2449 If REVERSE nonzero then condition code in X must be reversed. */
2452 ret_cond_branch (rtx x
, int len
, int reverse
)
2454 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2459 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2460 return (len
== 1 ? ("breq .+2" CR_TAB
2462 len
== 2 ? ("breq .+4" CR_TAB
2470 return (len
== 1 ? ("breq .+2" CR_TAB
2472 len
== 2 ? ("breq .+4" CR_TAB
2479 return (len
== 1 ? ("breq .+2" CR_TAB
2481 len
== 2 ? ("breq .+4" CR_TAB
2488 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2489 return (len
== 1 ? ("breq %0" CR_TAB
2491 len
== 2 ? ("breq .+2" CR_TAB
2498 return (len
== 1 ? ("breq %0" CR_TAB
2500 len
== 2 ? ("breq .+2" CR_TAB
2507 return (len
== 1 ? ("breq %0" CR_TAB
2509 len
== 2 ? ("breq .+2" CR_TAB
2523 return ("br%j1 .+2" CR_TAB
2526 return ("br%j1 .+4" CR_TAB
2537 return ("br%k1 .+2" CR_TAB
2540 return ("br%k1 .+4" CR_TAB
2549 /* Worker function for `FINAL_PRESCAN_INSN'. */
2550 /* Output insn cost for next insn. */
2553 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2554 int num_operands ATTRIBUTE_UNUSED
)
2556 if (avr_log
.rtx_costs
)
2558 rtx set
= single_set (insn
);
2561 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2562 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2564 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2565 rtx_cost (PATTERN (insn
), INSN
, 0,
2566 optimize_insn_for_speed_p()));
2570 /* Return 0 if undefined, 1 if always true or always false. */
2573 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2575 unsigned int max
= (mode
== QImode
? 0xff :
2576 mode
== HImode
? 0xffff :
2577 mode
== PSImode
? 0xffffff :
2578 mode
== SImode
? 0xffffffff : 0);
2579 if (max
&& op
&& CONST_INT_P (x
))
2581 if (unsigned_condition (op
) != op
)
2584 if (max
!= (INTVAL (x
) & max
)
2585 && INTVAL (x
) != 0xff)
2592 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2593 /* Returns nonzero if REGNO is the number of a hard
2594 register in which function arguments are sometimes passed. */
2597 avr_function_arg_regno_p(int r
)
2599 return (r
>= 8 && r
<= 25);
2603 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2604 /* Initializing the variable cum for the state at the beginning
2605 of the argument list. */
2608 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2609 tree fndecl ATTRIBUTE_UNUSED
)
2612 cum
->regno
= FIRST_CUM_REG
;
2613 if (!libname
&& stdarg_p (fntype
))
2616 /* Assume the calle may be tail called */
2618 cfun
->machine
->sibcall_fails
= 0;
2621 /* Returns the number of registers to allocate for a function argument. */
2624 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2628 if (mode
== BLKmode
)
2629 size
= int_size_in_bytes (type
);
2631 size
= GET_MODE_SIZE (mode
);
2633 /* Align all function arguments to start in even-numbered registers.
2634 Odd-sized arguments leave holes above them. */
2636 return (size
+ 1) & ~1;
2640 /* Implement `TARGET_FUNCTION_ARG'. */
2641 /* Controls whether a function argument is passed
2642 in a register, and which register. */
2645 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2646 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2648 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2649 int bytes
= avr_num_arg_regs (mode
, type
);
2651 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2652 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2658 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2659 /* Update the summarizer variable CUM to advance past an argument
2660 in the argument list. */
2663 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2664 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2666 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2667 int bytes
= avr_num_arg_regs (mode
, type
);
2669 cum
->nregs
-= bytes
;
2670 cum
->regno
-= bytes
;
2672 /* A parameter is being passed in a call-saved register. As the original
2673 contents of these regs has to be restored before leaving the function,
2674 a function must not pass arguments in call-saved regs in order to get
2679 && !call_used_regs
[cum
->regno
])
2681 /* FIXME: We ship info on failing tail-call in struct machine_function.
2682 This uses internals of calls.c:expand_call() and the way args_so_far
2683 is used. targetm.function_ok_for_sibcall() needs to be extended to
2684 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2685 dependent so that such an extension is not wanted. */
2687 cfun
->machine
->sibcall_fails
= 1;
2690 /* Test if all registers needed by the ABI are actually available. If the
2691 user has fixed a GPR needed to pass an argument, an (implicit) function
2692 call will clobber that fixed register. See PR45099 for an example. */
2699 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2700 if (fixed_regs
[regno
])
2701 warning (0, "fixed register %s used to pass parameter to function",
2705 if (cum
->nregs
<= 0)
2708 cum
->regno
= FIRST_CUM_REG
;
2712 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2713 /* Decide whether we can make a sibling call to a function. DECL is the
2714 declaration of the function being targeted by the call and EXP is the
2715 CALL_EXPR representing the call. */
2718 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2722 /* Tail-calling must fail if callee-saved regs are used to pass
2723 function args. We must not tail-call when `epilogue_restores'
2724 is used. Unfortunately, we cannot tell at this point if that
2725 actually will happen or not, and we cannot step back from
2726 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2728 if (cfun
->machine
->sibcall_fails
2729 || TARGET_CALL_PROLOGUES
)
2734 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2738 decl_callee
= TREE_TYPE (decl_callee
);
2742 decl_callee
= fntype_callee
;
2744 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2745 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2747 decl_callee
= TREE_TYPE (decl_callee
);
2751 /* Ensure that caller and callee have compatible epilogues */
2753 if (cfun
->machine
->is_interrupt
2754 || cfun
->machine
->is_signal
2755 || cfun
->machine
->is_naked
2756 || avr_naked_function_p (decl_callee
)
2757 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2758 || (avr_OS_task_function_p (decl_callee
)
2759 != cfun
->machine
->is_OS_task
)
2760 || (avr_OS_main_function_p (decl_callee
)
2761 != cfun
->machine
->is_OS_main
))
2769 /***********************************************************************
2770 Functions for outputting various mov's for a various modes
2771 ************************************************************************/
2773 /* Return true if a value of mode MODE is read from flash by
2774 __load_* function from libgcc. */
2777 avr_load_libgcc_p (rtx op
)
2779 enum machine_mode mode
= GET_MODE (op
);
2780 int n_bytes
= GET_MODE_SIZE (mode
);
2784 && avr_mem_flash_p (op
));
2787 /* Return true if a value of mode MODE is read by __xload_* function. */
2790 avr_xload_libgcc_p (enum machine_mode mode
)
2792 int n_bytes
= GET_MODE_SIZE (mode
);
2795 || avr_n_flash
> 1);
2799 /* Fixme: This is a hack because secondary reloads don't works as expected.
2801 Find an unused d-register to be used as scratch in INSN.
2802 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2803 is a register, skip all possible return values that overlap EXCLUDE.
2804 The policy for the returned register is similar to that of
2805 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2808 Return a QImode d-register or NULL_RTX if nothing found. */
2811 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
2814 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2815 || avr_signal_function_p (current_function_decl
));
2817 for (regno
= 16; regno
< 32; regno
++)
2819 rtx reg
= all_regs_rtx
[regno
];
2822 && reg_overlap_mentioned_p (exclude
, reg
))
2823 || fixed_regs
[regno
])
2828 /* Try non-live register */
2830 if (!df_regs_ever_live_p (regno
)
2831 && (TREE_THIS_VOLATILE (current_function_decl
)
2832 || cfun
->machine
->is_OS_task
2833 || cfun
->machine
->is_OS_main
2834 || (!isr_p
&& call_used_regs
[regno
])))
2839 /* Any live register can be used if it is unused after.
2840 Prologue/epilogue will care for it as needed. */
2842 if (df_regs_ever_live_p (regno
)
2843 && reg_unused_after (insn
, reg
))
2853 /* Helper function for the next function in the case where only restricted
2854 version of LPM instruction is available. */
2857 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
2861 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2864 regno_dest
= REGNO (dest
);
2866 /* The implicit target register of LPM. */
2867 xop
[3] = lpm_reg_rtx
;
2869 switch (GET_CODE (addr
))
2876 gcc_assert (REG_Z
== REGNO (addr
));
2884 avr_asm_len ("%4lpm", xop
, plen
, 1);
2886 if (regno_dest
!= LPM_REGNO
)
2887 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2892 if (REGNO (dest
) == REG_Z
)
2893 return avr_asm_len ("%4lpm" CR_TAB
2898 "pop %A0", xop
, plen
, 6);
2900 avr_asm_len ("%4lpm" CR_TAB
2904 "mov %B0,%3", xop
, plen
, 5);
2906 if (!reg_unused_after (insn
, addr
))
2907 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2916 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2919 if (regno_dest
== LPM_REGNO
)
2920 avr_asm_len ("%4lpm" CR_TAB
2921 "adiw %2,1", xop
, plen
, 2);
2923 avr_asm_len ("%4lpm" CR_TAB
2925 "adiw %2,1", xop
, plen
, 3);
2928 avr_asm_len ("%4lpm" CR_TAB
2930 "adiw %2,1", xop
, plen
, 3);
2933 avr_asm_len ("%4lpm" CR_TAB
2935 "adiw %2,1", xop
, plen
, 3);
2938 avr_asm_len ("%4lpm" CR_TAB
2940 "adiw %2,1", xop
, plen
, 3);
2942 break; /* POST_INC */
2944 } /* switch CODE (addr) */
2950 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2951 OP[1] in AS1 to register OP[0].
2952 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2956 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
2960 rtx src
= SET_SRC (single_set (insn
));
2962 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2965 addr_space_t as
= MEM_ADDR_SPACE (src
);
2972 warning (0, "writing to address space %qs not supported",
2973 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2978 addr
= XEXP (src
, 0);
2979 code
= GET_CODE (addr
);
2981 gcc_assert (REG_P (dest
));
2982 gcc_assert (REG
== code
|| POST_INC
== code
);
2986 xop
[2] = lpm_addr_reg_rtx
;
2987 xop
[4] = xstring_empty
;
2988 xop
[5] = tmp_reg_rtx
;
2989 xop
[6] = XEXP (rampz_rtx
, 0);
2991 segment
= avr_addrspace
[as
].segment
;
2993 /* Set RAMPZ as needed. */
2997 xop
[4] = GEN_INT (segment
);
2998 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
3000 if (xop
[3] != NULL_RTX
)
3002 avr_asm_len ("ldi %3,%4" CR_TAB
3003 "out %i6,%3", xop
, plen
, 2);
3005 else if (segment
== 1)
3007 avr_asm_len ("clr %5" CR_TAB
3009 "out %i6,%5", xop
, plen
, 3);
3013 avr_asm_len ("mov %5,%2" CR_TAB
3016 "mov %2,%5", xop
, plen
, 4);
3021 if (!AVR_HAVE_ELPMX
)
3022 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3024 else if (!AVR_HAVE_LPMX
)
3026 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3029 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3031 switch (GET_CODE (addr
))
3038 gcc_assert (REG_Z
== REGNO (addr
));
3046 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3049 if (REGNO (dest
) == REG_Z
)
3050 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3051 "%4lpm %B0,%a2" CR_TAB
3052 "mov %A0,%5", xop
, plen
, 3);
3055 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3056 "%4lpm %B0,%a2", xop
, plen
, 2);
3058 if (!reg_unused_after (insn
, addr
))
3059 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3066 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3067 "%4lpm %B0,%a2+" CR_TAB
3068 "%4lpm %C0,%a2", xop
, plen
, 3);
3070 if (!reg_unused_after (insn
, addr
))
3071 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3077 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3078 "%4lpm %B0,%a2+", xop
, plen
, 2);
3080 if (REGNO (dest
) == REG_Z
- 2)
3081 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3082 "%4lpm %C0,%a2" CR_TAB
3083 "mov %D0,%5", xop
, plen
, 3);
3086 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3087 "%4lpm %D0,%a2", xop
, plen
, 2);
3089 if (!reg_unused_after (insn
, addr
))
3090 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3100 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3103 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3104 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3105 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3106 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3108 break; /* POST_INC */
3110 } /* switch CODE (addr) */
3112 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3114 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3116 xop
[0] = zero_reg_rtx
;
3117 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3124 /* Worker function for xload_8 insn. */
3127 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3133 xop
[2] = lpm_addr_reg_rtx
;
3134 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3136 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3138 avr_asm_len ("sbrc %1,7" CR_TAB
3139 "ld %3,%a2", xop
, plen
, 2);
3141 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3142 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3149 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
3151 rtx dest
= operands
[0];
3152 rtx src
= operands
[1];
3154 if (avr_mem_flash_p (src
)
3155 || avr_mem_flash_p (dest
))
3157 return avr_out_lpm (insn
, operands
, plen
);
3160 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3164 if (REG_P (src
)) /* mov r,r */
3166 if (test_hard_reg_class (STACK_REG
, dest
))
3167 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3168 else if (test_hard_reg_class (STACK_REG
, src
))
3169 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3171 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3173 else if (CONSTANT_P (src
))
3175 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3178 else if (MEM_P (src
))
3179 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3181 else if (MEM_P (dest
))
3186 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3188 return out_movqi_mr_r (insn
, xop
, plen
);
3196 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
3201 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3203 if (avr_mem_flash_p (src
)
3204 || avr_mem_flash_p (dest
))
3206 return avr_out_lpm (insn
, xop
, plen
);
3209 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3213 if (REG_P (src
)) /* mov r,r */
3215 if (test_hard_reg_class (STACK_REG
, dest
))
3217 if (AVR_HAVE_8BIT_SP
)
3218 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3221 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3222 "out __SP_H__,%B1", xop
, plen
, -2);
3224 /* Use simple load of SP if no interrupts are used. */
3226 return TARGET_NO_INTERRUPTS
3227 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3228 "out __SP_L__,%A1", xop
, plen
, -2)
3229 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3231 "out __SP_H__,%B1" CR_TAB
3232 "out __SREG__,__tmp_reg__" CR_TAB
3233 "out __SP_L__,%A1", xop
, plen
, -5);
3235 else if (test_hard_reg_class (STACK_REG
, src
))
3237 return !AVR_HAVE_SPH
3238 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3239 "clr %B0", xop
, plen
, -2)
3241 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3242 "in %B0,__SP_H__", xop
, plen
, -2);
3245 return AVR_HAVE_MOVW
3246 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3248 : avr_asm_len ("mov %A0,%A1" CR_TAB
3249 "mov %B0,%B1", xop
, plen
, -2);
3251 else if (CONSTANT_P (src
))
3253 return output_reload_inhi (xop
, NULL
, plen
);
3255 else if (MEM_P (src
))
3257 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3260 else if (MEM_P (dest
))
3265 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3267 return out_movhi_mr_r (insn
, xop
, plen
);
3270 fatal_insn ("invalid insn:", insn
);
3276 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3280 rtx x
= XEXP (src
, 0);
3282 if (CONSTANT_ADDRESS_P (x
))
3284 return optimize
> 0 && io_address_operand (x
, QImode
)
3285 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3286 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3288 else if (GET_CODE (x
) == PLUS
3289 && REG_P (XEXP (x
, 0))
3290 && CONST_INT_P (XEXP (x
, 1)))
3292 /* memory access by reg+disp */
3294 int disp
= INTVAL (XEXP (x
, 1));
3296 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3298 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3299 fatal_insn ("incorrect insn:",insn
);
3301 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3302 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3303 "ldd %0,Y+63" CR_TAB
3304 "sbiw r28,%o1-63", op
, plen
, -3);
3306 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3307 "sbci r29,hi8(-%o1)" CR_TAB
3309 "subi r28,lo8(%o1)" CR_TAB
3310 "sbci r29,hi8(%o1)", op
, plen
, -5);
3312 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3314 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3315 it but I have this situation with extremal optimizing options. */
3317 avr_asm_len ("adiw r26,%o1" CR_TAB
3318 "ld %0,X", op
, plen
, -2);
3320 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3321 && !reg_unused_after (insn
, XEXP (x
,0)))
3323 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3329 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3332 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3336 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3340 rtx base
= XEXP (src
, 0);
3341 int reg_dest
= true_regnum (dest
);
3342 int reg_base
= true_regnum (base
);
3343 /* "volatile" forces reading low byte first, even if less efficient,
3344 for correct operation with 16-bit I/O registers. */
3345 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3349 if (reg_dest
== reg_base
) /* R = (R) */
3350 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3352 "mov %A0,__tmp_reg__", op
, plen
, -3);
3354 if (reg_base
!= REG_X
)
3355 return avr_asm_len ("ld %A0,%1" CR_TAB
3356 "ldd %B0,%1+1", op
, plen
, -2);
3358 avr_asm_len ("ld %A0,X+" CR_TAB
3359 "ld %B0,X", op
, plen
, -2);
3361 if (!reg_unused_after (insn
, base
))
3362 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3366 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3368 int disp
= INTVAL (XEXP (base
, 1));
3369 int reg_base
= true_regnum (XEXP (base
, 0));
3371 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3373 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3374 fatal_insn ("incorrect insn:",insn
);
3376 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3377 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3378 "ldd %A0,Y+62" CR_TAB
3379 "ldd %B0,Y+63" CR_TAB
3380 "sbiw r28,%o1-62", op
, plen
, -4)
3382 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3383 "sbci r29,hi8(-%o1)" CR_TAB
3385 "ldd %B0,Y+1" CR_TAB
3386 "subi r28,lo8(%o1)" CR_TAB
3387 "sbci r29,hi8(%o1)", op
, plen
, -6);
3390 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3391 it but I have this situation with extremal
3392 optimization options. */
3394 if (reg_base
== REG_X
)
3395 return reg_base
== reg_dest
3396 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3397 "ld __tmp_reg__,X+" CR_TAB
3399 "mov %A0,__tmp_reg__", op
, plen
, -4)
3401 : avr_asm_len ("adiw r26,%o1" CR_TAB
3404 "sbiw r26,%o1+1", op
, plen
, -4);
3406 return reg_base
== reg_dest
3407 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3408 "ldd %B0,%B1" CR_TAB
3409 "mov %A0,__tmp_reg__", op
, plen
, -3)
3411 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3412 "ldd %B0,%B1", op
, plen
, -2);
3414 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3416 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3417 fatal_insn ("incorrect insn:", insn
);
3419 if (!mem_volatile_p
)
3420 return avr_asm_len ("ld %B0,%1" CR_TAB
3421 "ld %A0,%1", op
, plen
, -2);
3423 return REGNO (XEXP (base
, 0)) == REG_X
3424 ? avr_asm_len ("sbiw r26,2" CR_TAB
3427 "sbiw r26,1", op
, plen
, -4)
3429 : avr_asm_len ("sbiw %r1,2" CR_TAB
3431 "ldd %B0,%p1+1", op
, plen
, -3);
3433 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3435 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3436 fatal_insn ("incorrect insn:", insn
);
3438 return avr_asm_len ("ld %A0,%1" CR_TAB
3439 "ld %B0,%1", op
, plen
, -2);
3441 else if (CONSTANT_ADDRESS_P (base
))
3443 return optimize
> 0 && io_address_operand (base
, HImode
)
3444 ? avr_asm_len ("in %A0,%i1" CR_TAB
3445 "in %B0,%i1+1", op
, plen
, -2)
3447 : avr_asm_len ("lds %A0,%m1" CR_TAB
3448 "lds %B0,%m1+1", op
, plen
, -4);
3451 fatal_insn ("unknown move insn:",insn
);
3456 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
3460 rtx base
= XEXP (src
, 0);
3461 int reg_dest
= true_regnum (dest
);
3462 int reg_base
= true_regnum (base
);
3470 if (reg_base
== REG_X
) /* (R26) */
3472 if (reg_dest
== REG_X
)
3473 /* "ld r26,-X" is undefined */
3474 return *l
=7, ("adiw r26,3" CR_TAB
3477 "ld __tmp_reg__,-X" CR_TAB
3480 "mov r27,__tmp_reg__");
3481 else if (reg_dest
== REG_X
- 2)
3482 return *l
=5, ("ld %A0,X+" CR_TAB
3484 "ld __tmp_reg__,X+" CR_TAB
3486 "mov %C0,__tmp_reg__");
3487 else if (reg_unused_after (insn
, base
))
3488 return *l
=4, ("ld %A0,X+" CR_TAB
3493 return *l
=5, ("ld %A0,X+" CR_TAB
3501 if (reg_dest
== reg_base
)
3502 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3503 "ldd %C0,%1+2" CR_TAB
3504 "ldd __tmp_reg__,%1+1" CR_TAB
3506 "mov %B0,__tmp_reg__");
3507 else if (reg_base
== reg_dest
+ 2)
3508 return *l
=5, ("ld %A0,%1" CR_TAB
3509 "ldd %B0,%1+1" CR_TAB
3510 "ldd __tmp_reg__,%1+2" CR_TAB
3511 "ldd %D0,%1+3" CR_TAB
3512 "mov %C0,__tmp_reg__");
3514 return *l
=4, ("ld %A0,%1" CR_TAB
3515 "ldd %B0,%1+1" CR_TAB
3516 "ldd %C0,%1+2" CR_TAB
3520 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3522 int disp
= INTVAL (XEXP (base
, 1));
3524 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3526 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3527 fatal_insn ("incorrect insn:",insn
);
3529 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3530 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3531 "ldd %A0,Y+60" CR_TAB
3532 "ldd %B0,Y+61" CR_TAB
3533 "ldd %C0,Y+62" CR_TAB
3534 "ldd %D0,Y+63" CR_TAB
3537 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3538 "sbci r29,hi8(-%o1)" CR_TAB
3540 "ldd %B0,Y+1" CR_TAB
3541 "ldd %C0,Y+2" CR_TAB
3542 "ldd %D0,Y+3" CR_TAB
3543 "subi r28,lo8(%o1)" CR_TAB
3544 "sbci r29,hi8(%o1)");
3547 reg_base
= true_regnum (XEXP (base
, 0));
3548 if (reg_base
== REG_X
)
3551 if (reg_dest
== REG_X
)
3554 /* "ld r26,-X" is undefined */
3555 return ("adiw r26,%o1+3" CR_TAB
3558 "ld __tmp_reg__,-X" CR_TAB
3561 "mov r27,__tmp_reg__");
3564 if (reg_dest
== REG_X
- 2)
3565 return ("adiw r26,%o1" CR_TAB
3568 "ld __tmp_reg__,X+" CR_TAB
3570 "mov r26,__tmp_reg__");
3572 return ("adiw r26,%o1" CR_TAB
3579 if (reg_dest
== reg_base
)
3580 return *l
=5, ("ldd %D0,%D1" CR_TAB
3581 "ldd %C0,%C1" CR_TAB
3582 "ldd __tmp_reg__,%B1" CR_TAB
3583 "ldd %A0,%A1" CR_TAB
3584 "mov %B0,__tmp_reg__");
3585 else if (reg_dest
== reg_base
- 2)
3586 return *l
=5, ("ldd %A0,%A1" CR_TAB
3587 "ldd %B0,%B1" CR_TAB
3588 "ldd __tmp_reg__,%C1" CR_TAB
3589 "ldd %D0,%D1" CR_TAB
3590 "mov %C0,__tmp_reg__");
3591 return *l
=4, ("ldd %A0,%A1" CR_TAB
3592 "ldd %B0,%B1" CR_TAB
3593 "ldd %C0,%C1" CR_TAB
3596 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3597 return *l
=4, ("ld %D0,%1" CR_TAB
3601 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3602 return *l
=4, ("ld %A0,%1" CR_TAB
3606 else if (CONSTANT_ADDRESS_P (base
))
3607 return *l
=8, ("lds %A0,%m1" CR_TAB
3608 "lds %B0,%m1+1" CR_TAB
3609 "lds %C0,%m1+2" CR_TAB
3612 fatal_insn ("unknown move insn:",insn
);
3617 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
3621 rtx base
= XEXP (dest
, 0);
3622 int reg_base
= true_regnum (base
);
3623 int reg_src
= true_regnum (src
);
3629 if (CONSTANT_ADDRESS_P (base
))
3630 return *l
=8,("sts %m0,%A1" CR_TAB
3631 "sts %m0+1,%B1" CR_TAB
3632 "sts %m0+2,%C1" CR_TAB
3634 if (reg_base
> 0) /* (r) */
3636 if (reg_base
== REG_X
) /* (R26) */
3638 if (reg_src
== REG_X
)
3640 /* "st X+,r26" is undefined */
3641 if (reg_unused_after (insn
, base
))
3642 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3645 "st X+,__tmp_reg__" CR_TAB
3649 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3652 "st X+,__tmp_reg__" CR_TAB
3657 else if (reg_base
== reg_src
+ 2)
3659 if (reg_unused_after (insn
, base
))
3660 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3661 "mov __tmp_reg__,%D1" CR_TAB
3664 "st %0+,__zero_reg__" CR_TAB
3665 "st %0,__tmp_reg__" CR_TAB
3666 "clr __zero_reg__");
3668 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3669 "mov __tmp_reg__,%D1" CR_TAB
3672 "st %0+,__zero_reg__" CR_TAB
3673 "st %0,__tmp_reg__" CR_TAB
3674 "clr __zero_reg__" CR_TAB
3677 return *l
=5, ("st %0+,%A1" CR_TAB
3684 return *l
=4, ("st %0,%A1" CR_TAB
3685 "std %0+1,%B1" CR_TAB
3686 "std %0+2,%C1" CR_TAB
3689 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3691 int disp
= INTVAL (XEXP (base
, 1));
3692 reg_base
= REGNO (XEXP (base
, 0));
3693 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3695 if (reg_base
!= REG_Y
)
3696 fatal_insn ("incorrect insn:",insn
);
3698 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3699 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3700 "std Y+60,%A1" CR_TAB
3701 "std Y+61,%B1" CR_TAB
3702 "std Y+62,%C1" CR_TAB
3703 "std Y+63,%D1" CR_TAB
3706 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3707 "sbci r29,hi8(-%o0)" CR_TAB
3709 "std Y+1,%B1" CR_TAB
3710 "std Y+2,%C1" CR_TAB
3711 "std Y+3,%D1" CR_TAB
3712 "subi r28,lo8(%o0)" CR_TAB
3713 "sbci r29,hi8(%o0)");
3715 if (reg_base
== REG_X
)
3718 if (reg_src
== REG_X
)
3721 return ("mov __tmp_reg__,r26" CR_TAB
3722 "mov __zero_reg__,r27" CR_TAB
3723 "adiw r26,%o0" CR_TAB
3724 "st X+,__tmp_reg__" CR_TAB
3725 "st X+,__zero_reg__" CR_TAB
3728 "clr __zero_reg__" CR_TAB
3731 else if (reg_src
== REG_X
- 2)
3734 return ("mov __tmp_reg__,r26" CR_TAB
3735 "mov __zero_reg__,r27" CR_TAB
3736 "adiw r26,%o0" CR_TAB
3739 "st X+,__tmp_reg__" CR_TAB
3740 "st X,__zero_reg__" CR_TAB
3741 "clr __zero_reg__" CR_TAB
3745 return ("adiw r26,%o0" CR_TAB
3752 return *l
=4, ("std %A0,%A1" CR_TAB
3753 "std %B0,%B1" CR_TAB
3754 "std %C0,%C1" CR_TAB
3757 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3758 return *l
=4, ("st %0,%D1" CR_TAB
3762 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3763 return *l
=4, ("st %0,%A1" CR_TAB
3767 fatal_insn ("unknown move insn:",insn
);
3772 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
3775 rtx dest
= operands
[0];
3776 rtx src
= operands
[1];
3779 if (avr_mem_flash_p (src
)
3780 || avr_mem_flash_p (dest
))
3782 return avr_out_lpm (insn
, operands
, real_l
);
3788 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3791 if (REG_P (src
)) /* mov r,r */
3793 if (true_regnum (dest
) > true_regnum (src
))
3798 return ("movw %C0,%C1" CR_TAB
3802 return ("mov %D0,%D1" CR_TAB
3803 "mov %C0,%C1" CR_TAB
3804 "mov %B0,%B1" CR_TAB
3812 return ("movw %A0,%A1" CR_TAB
3816 return ("mov %A0,%A1" CR_TAB
3817 "mov %B0,%B1" CR_TAB
3818 "mov %C0,%C1" CR_TAB
3822 else if (CONSTANT_P (src
))
3824 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3826 else if (MEM_P (src
))
3827 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3829 else if (MEM_P (dest
))
3833 if (src
== CONST0_RTX (GET_MODE (dest
)))
3834 operands
[1] = zero_reg_rtx
;
3836 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3839 output_asm_insn (templ
, operands
);
3844 fatal_insn ("invalid insn:", insn
);
3849 /* Handle loads of 24-bit types from memory to register. */
3852 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
3856 rtx base
= XEXP (src
, 0);
3857 int reg_dest
= true_regnum (dest
);
3858 int reg_base
= true_regnum (base
);
3862 if (reg_base
== REG_X
) /* (R26) */
3864 if (reg_dest
== REG_X
)
3865 /* "ld r26,-X" is undefined */
3866 return avr_asm_len ("adiw r26,2" CR_TAB
3868 "ld __tmp_reg__,-X" CR_TAB
3871 "mov r27,__tmp_reg__", op
, plen
, -6);
3874 avr_asm_len ("ld %A0,X+" CR_TAB
3876 "ld %C0,X", op
, plen
, -3);
3878 if (reg_dest
!= REG_X
- 2
3879 && !reg_unused_after (insn
, base
))
3881 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3887 else /* reg_base != REG_X */
3889 if (reg_dest
== reg_base
)
3890 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3891 "ldd __tmp_reg__,%1+1" CR_TAB
3893 "mov %B0,__tmp_reg__", op
, plen
, -4);
3895 return avr_asm_len ("ld %A0,%1" CR_TAB
3896 "ldd %B0,%1+1" CR_TAB
3897 "ldd %C0,%1+2", op
, plen
, -3);
3900 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3902 int disp
= INTVAL (XEXP (base
, 1));
3904 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3906 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3907 fatal_insn ("incorrect insn:",insn
);
3909 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3910 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3911 "ldd %A0,Y+61" CR_TAB
3912 "ldd %B0,Y+62" CR_TAB
3913 "ldd %C0,Y+63" CR_TAB
3914 "sbiw r28,%o1-61", op
, plen
, -5);
3916 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3917 "sbci r29,hi8(-%o1)" CR_TAB
3919 "ldd %B0,Y+1" CR_TAB
3920 "ldd %C0,Y+2" CR_TAB
3921 "subi r28,lo8(%o1)" CR_TAB
3922 "sbci r29,hi8(%o1)", op
, plen
, -7);
3925 reg_base
= true_regnum (XEXP (base
, 0));
3926 if (reg_base
== REG_X
)
3929 if (reg_dest
== REG_X
)
3931 /* "ld r26,-X" is undefined */
3932 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3934 "ld __tmp_reg__,-X" CR_TAB
3937 "mov r27,__tmp_reg__", op
, plen
, -6);
3940 avr_asm_len ("adiw r26,%o1" CR_TAB
3943 "ld %C0,X", op
, plen
, -4);
3945 if (reg_dest
!= REG_W
3946 && !reg_unused_after (insn
, XEXP (base
, 0)))
3947 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3952 if (reg_dest
== reg_base
)
3953 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3954 "ldd __tmp_reg__,%B1" CR_TAB
3955 "ldd %A0,%A1" CR_TAB
3956 "mov %B0,__tmp_reg__", op
, plen
, -4);
3958 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3959 "ldd %B0,%B1" CR_TAB
3960 "ldd %C0,%C1", op
, plen
, -3);
3962 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3963 return avr_asm_len ("ld %C0,%1" CR_TAB
3965 "ld %A0,%1", op
, plen
, -3);
3966 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3967 return avr_asm_len ("ld %A0,%1" CR_TAB
3969 "ld %C0,%1", op
, plen
, -3);
3971 else if (CONSTANT_ADDRESS_P (base
))
3972 return avr_asm_len ("lds %A0,%m1" CR_TAB
3973 "lds %B0,%m1+1" CR_TAB
3974 "lds %C0,%m1+2", op
, plen
, -6);
3976 fatal_insn ("unknown move insn:",insn
);
3980 /* Handle store of 24-bit type from register or zero to memory. */
3983 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
3987 rtx base
= XEXP (dest
, 0);
3988 int reg_base
= true_regnum (base
);
3990 if (CONSTANT_ADDRESS_P (base
))
3991 return avr_asm_len ("sts %m0,%A1" CR_TAB
3992 "sts %m0+1,%B1" CR_TAB
3993 "sts %m0+2,%C1", op
, plen
, -6);
3995 if (reg_base
> 0) /* (r) */
3997 if (reg_base
== REG_X
) /* (R26) */
3999 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
4001 avr_asm_len ("st %0+,%A1" CR_TAB
4003 "st %0,%C1", op
, plen
, -3);
4005 if (!reg_unused_after (insn
, base
))
4006 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4011 return avr_asm_len ("st %0,%A1" CR_TAB
4012 "std %0+1,%B1" CR_TAB
4013 "std %0+2,%C1", op
, plen
, -3);
4015 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4017 int disp
= INTVAL (XEXP (base
, 1));
4018 reg_base
= REGNO (XEXP (base
, 0));
4020 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4022 if (reg_base
!= REG_Y
)
4023 fatal_insn ("incorrect insn:",insn
);
4025 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4026 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4027 "std Y+61,%A1" CR_TAB
4028 "std Y+62,%B1" CR_TAB
4029 "std Y+63,%C1" CR_TAB
4030 "sbiw r28,%o0-61", op
, plen
, -5);
4032 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4033 "sbci r29,hi8(-%o0)" CR_TAB
4035 "std Y+1,%B1" CR_TAB
4036 "std Y+2,%C1" CR_TAB
4037 "subi r28,lo8(%o0)" CR_TAB
4038 "sbci r29,hi8(%o0)", op
, plen
, -7);
4040 if (reg_base
== REG_X
)
4043 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4045 avr_asm_len ("adiw r26,%o0" CR_TAB
4048 "st X,%C1", op
, plen
, -4);
4050 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4051 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4056 return avr_asm_len ("std %A0,%A1" CR_TAB
4057 "std %B0,%B1" CR_TAB
4058 "std %C0,%C1", op
, plen
, -3);
4060 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4061 return avr_asm_len ("st %0,%C1" CR_TAB
4063 "st %0,%A1", op
, plen
, -3);
4064 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4065 return avr_asm_len ("st %0,%A1" CR_TAB
4067 "st %0,%C1", op
, plen
, -3);
4069 fatal_insn ("unknown move insn:",insn
);
4074 /* Move around 24-bit stuff. */
4077 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4082 if (avr_mem_flash_p (src
)
4083 || avr_mem_flash_p (dest
))
4085 return avr_out_lpm (insn
, op
, plen
);
4088 if (register_operand (dest
, VOIDmode
))
4090 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4092 if (true_regnum (dest
) > true_regnum (src
))
4094 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4097 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4099 return avr_asm_len ("mov %B0,%B1" CR_TAB
4100 "mov %A0,%A1", op
, plen
, 2);
4105 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4107 avr_asm_len ("mov %A0,%A1" CR_TAB
4108 "mov %B0,%B1", op
, plen
, -2);
4110 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4113 else if (CONSTANT_P (src
))
4115 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4117 else if (MEM_P (src
))
4118 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4120 else if (MEM_P (dest
))
4125 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4127 return avr_out_store_psi (insn
, xop
, plen
);
4130 fatal_insn ("invalid insn:", insn
);
4136 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4140 rtx x
= XEXP (dest
, 0);
4142 if (CONSTANT_ADDRESS_P (x
))
4144 return optimize
> 0 && io_address_operand (x
, QImode
)
4145 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4146 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4148 else if (GET_CODE (x
) == PLUS
4149 && REG_P (XEXP (x
, 0))
4150 && CONST_INT_P (XEXP (x
, 1)))
4152 /* memory access by reg+disp */
4154 int disp
= INTVAL (XEXP (x
, 1));
4156 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4158 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4159 fatal_insn ("incorrect insn:",insn
);
4161 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4162 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4163 "std Y+63,%1" CR_TAB
4164 "sbiw r28,%o0-63", op
, plen
, -3);
4166 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4167 "sbci r29,hi8(-%o0)" CR_TAB
4169 "subi r28,lo8(%o0)" CR_TAB
4170 "sbci r29,hi8(%o0)", op
, plen
, -5);
4172 else if (REGNO (XEXP (x
,0)) == REG_X
)
4174 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4176 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4177 "adiw r26,%o0" CR_TAB
4178 "st X,__tmp_reg__", op
, plen
, -3);
4182 avr_asm_len ("adiw r26,%o0" CR_TAB
4183 "st X,%1", op
, plen
, -2);
4186 if (!reg_unused_after (insn
, XEXP (x
,0)))
4187 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4192 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4195 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4199 /* Helper for the next function for XMEGA. It does the same
4200 but with low byte first. */
4203 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
4207 rtx base
= XEXP (dest
, 0);
4208 int reg_base
= true_regnum (base
);
4209 int reg_src
= true_regnum (src
);
4211 /* "volatile" forces writing low byte first, even if less efficient,
4212 for correct operation with 16-bit I/O registers like SP. */
4213 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4215 if (CONSTANT_ADDRESS_P (base
))
4216 return optimize
> 0 && io_address_operand (base
, HImode
)
4217 ? avr_asm_len ("out %i0,%A1" CR_TAB
4218 "out %i0+1,%B1", op
, plen
, -2)
4220 : avr_asm_len ("sts %m0,%A1" CR_TAB
4221 "sts %m0+1,%B1", op
, plen
, -4);
4225 if (reg_base
!= REG_X
)
4226 return avr_asm_len ("st %0,%A1" CR_TAB
4227 "std %0+1,%B1", op
, plen
, -2);
4229 if (reg_src
== REG_X
)
4230 /* "st X+,r26" and "st -X,r26" are undefined. */
4231 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4234 "st X,__tmp_reg__", op
, plen
, -4);
4236 avr_asm_len ("st X+,%A1" CR_TAB
4237 "st X,%B1", op
, plen
, -2);
4239 return reg_unused_after (insn
, base
)
4241 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4243 else if (GET_CODE (base
) == PLUS
)
4245 int disp
= INTVAL (XEXP (base
, 1));
4246 reg_base
= REGNO (XEXP (base
, 0));
4247 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4249 if (reg_base
!= REG_Y
)
4250 fatal_insn ("incorrect insn:",insn
);
4252 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4253 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4254 "std Y+62,%A1" CR_TAB
4255 "std Y+63,%B1" CR_TAB
4256 "sbiw r28,%o0-62", op
, plen
, -4)
4258 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4259 "sbci r29,hi8(-%o0)" CR_TAB
4261 "std Y+1,%B1" CR_TAB
4262 "subi r28,lo8(%o0)" CR_TAB
4263 "sbci r29,hi8(%o0)", op
, plen
, -6);
4266 if (reg_base
!= REG_X
)
4267 return avr_asm_len ("std %A0,%A1" CR_TAB
4268 "std %B0,%B1", op
, plen
, -2);
4270 return reg_src
== REG_X
4271 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4272 "mov __zero_reg__,r27" CR_TAB
4273 "adiw r26,%o0" CR_TAB
4274 "st X+,__tmp_reg__" CR_TAB
4275 "st X,__zero_reg__" CR_TAB
4276 "clr __zero_reg__" CR_TAB
4277 "sbiw r26,%o0+1", op
, plen
, -7)
4279 : avr_asm_len ("adiw r26,%o0" CR_TAB
4282 "sbiw r26,%o0+1", op
, plen
, -4);
4284 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4286 if (!mem_volatile_p
)
4287 return avr_asm_len ("st %0,%B1" CR_TAB
4288 "st %0,%A1", op
, plen
, -2);
4290 return REGNO (XEXP (base
, 0)) == REG_X
4291 ? avr_asm_len ("sbiw r26,2" CR_TAB
4294 "sbiw r26,1", op
, plen
, -4)
4296 : avr_asm_len ("sbiw %r0,2" CR_TAB
4298 "std %p0+1,%B1", op
, plen
, -3);
4300 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4302 return avr_asm_len ("st %0,%A1" CR_TAB
4303 "st %0,%B1", op
, plen
, -2);
4306 fatal_insn ("unknown move insn:",insn
);
4312 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4316 rtx base
= XEXP (dest
, 0);
4317 int reg_base
= true_regnum (base
);
4318 int reg_src
= true_regnum (src
);
4321 /* "volatile" forces writing high-byte first (no-xmega) resp.
4322 low-byte first (xmega) even if less efficient, for correct
4323 operation with 16-bit I/O registers like. */
4326 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4328 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4330 if (CONSTANT_ADDRESS_P (base
))
4331 return optimize
> 0 && io_address_operand (base
, HImode
)
4332 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4333 "out %i0,%A1", op
, plen
, -2)
4335 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4336 "sts %m0,%A1", op
, plen
, -4);
4340 if (reg_base
!= REG_X
)
4341 return avr_asm_len ("std %0+1,%B1" CR_TAB
4342 "st %0,%A1", op
, plen
, -2);
4344 if (reg_src
== REG_X
)
4345 /* "st X+,r26" and "st -X,r26" are undefined. */
4346 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4347 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4350 "st X,__tmp_reg__", op
, plen
, -4)
4352 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4354 "st X,__tmp_reg__" CR_TAB
4356 "st X,r26", op
, plen
, -5);
4358 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4359 ? avr_asm_len ("st X+,%A1" CR_TAB
4360 "st X,%B1", op
, plen
, -2)
4361 : avr_asm_len ("adiw r26,1" CR_TAB
4363 "st -X,%A1", op
, plen
, -3);
4365 else if (GET_CODE (base
) == PLUS
)
4367 int disp
= INTVAL (XEXP (base
, 1));
4368 reg_base
= REGNO (XEXP (base
, 0));
4369 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4371 if (reg_base
!= REG_Y
)
4372 fatal_insn ("incorrect insn:",insn
);
4374 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4375 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4376 "std Y+63,%B1" CR_TAB
4377 "std Y+62,%A1" CR_TAB
4378 "sbiw r28,%o0-62", op
, plen
, -4)
4380 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4381 "sbci r29,hi8(-%o0)" CR_TAB
4382 "std Y+1,%B1" CR_TAB
4384 "subi r28,lo8(%o0)" CR_TAB
4385 "sbci r29,hi8(%o0)", op
, plen
, -6);
4388 if (reg_base
!= REG_X
)
4389 return avr_asm_len ("std %B0,%B1" CR_TAB
4390 "std %A0,%A1", op
, plen
, -2);
4392 return reg_src
== REG_X
4393 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4394 "mov __zero_reg__,r27" CR_TAB
4395 "adiw r26,%o0+1" CR_TAB
4396 "st X,__zero_reg__" CR_TAB
4397 "st -X,__tmp_reg__" CR_TAB
4398 "clr __zero_reg__" CR_TAB
4399 "sbiw r26,%o0", op
, plen
, -7)
4401 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4404 "sbiw r26,%o0", op
, plen
, -4);
4406 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4408 return avr_asm_len ("st %0,%B1" CR_TAB
4409 "st %0,%A1", op
, plen
, -2);
4411 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4413 if (!mem_volatile_p
)
4414 return avr_asm_len ("st %0,%A1" CR_TAB
4415 "st %0,%B1", op
, plen
, -2);
4417 return REGNO (XEXP (base
, 0)) == REG_X
4418 ? avr_asm_len ("adiw r26,1" CR_TAB
4421 "adiw r26,2", op
, plen
, -4)
4423 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4425 "adiw %r0,2", op
, plen
, -3);
4427 fatal_insn ("unknown move insn:",insn
);
4431 /* Return 1 if frame pointer for current function required. */
4434 avr_frame_pointer_required_p (void)
4436 return (cfun
->calls_alloca
4437 || cfun
->calls_setjmp
4438 || cfun
->has_nonlocal_label
4439 || crtl
->args
.info
.nregs
== 0
4440 || get_frame_size () > 0);
4443 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4446 compare_condition (rtx_insn
*insn
)
4448 rtx_insn
*next
= next_real_insn (insn
);
4450 if (next
&& JUMP_P (next
))
4452 rtx pat
= PATTERN (next
);
4453 rtx src
= SET_SRC (pat
);
4455 if (IF_THEN_ELSE
== GET_CODE (src
))
4456 return GET_CODE (XEXP (src
, 0));
4463 /* Returns true iff INSN is a tst insn that only tests the sign. */
4466 compare_sign_p (rtx_insn
*insn
)
4468 RTX_CODE cond
= compare_condition (insn
);
4469 return (cond
== GE
|| cond
== LT
);
4473 /* Returns true iff the next insn is a JUMP_INSN with a condition
4474 that needs to be swapped (GT, GTU, LE, LEU). */
4477 compare_diff_p (rtx_insn
*insn
)
4479 RTX_CODE cond
= compare_condition (insn
);
4480 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4483 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4486 compare_eq_p (rtx_insn
*insn
)
4488 RTX_CODE cond
= compare_condition (insn
);
4489 return (cond
== EQ
|| cond
== NE
);
4493 /* Output compare instruction
4495 compare (XOP[0], XOP[1])
4497 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4498 XOP[2] is an 8-bit scratch register as needed.
4500 PLEN == NULL: Output instructions.
4501 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4502 Don't output anything. */
4505 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
4507 /* Register to compare and value to compare against. */
4511 /* MODE of the comparison. */
4512 enum machine_mode mode
;
4514 /* Number of bytes to operate on. */
4515 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4517 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4518 int clobber_val
= -1;
4520 /* Map fixed mode operands to integer operands with the same binary
4521 representation. They are easier to handle in the remainder. */
4523 if (CONST_FIXED_P (xval
))
4525 xreg
= avr_to_int_mode (xop
[0]);
4526 xval
= avr_to_int_mode (xop
[1]);
4529 mode
= GET_MODE (xreg
);
4531 gcc_assert (REG_P (xreg
));
4532 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4533 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4538 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4539 against 0 by ORing the bytes. This is one instruction shorter.
4540 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4541 and therefore don't use this. */
4543 if (!test_hard_reg_class (LD_REGS
, xreg
)
4544 && compare_eq_p (insn
)
4545 && reg_unused_after (insn
, xreg
))
4547 if (xval
== const1_rtx
)
4549 avr_asm_len ("dec %A0" CR_TAB
4550 "or %A0,%B0", xop
, plen
, 2);
4553 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4556 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4560 else if (xval
== constm1_rtx
)
4563 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4566 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4568 return avr_asm_len ("and %A0,%B0" CR_TAB
4569 "com %A0", xop
, plen
, 2);
4573 for (i
= 0; i
< n_bytes
; i
++)
4575 /* We compare byte-wise. */
4576 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4577 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4579 /* 8-bit value to compare with this byte. */
4580 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4582 /* Registers R16..R31 can operate with immediate. */
4583 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4586 xop
[1] = gen_int_mode (val8
, QImode
);
4588 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4591 && test_hard_reg_class (ADDW_REGS
, reg8
))
4593 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4595 if (IN_RANGE (val16
, 0, 63)
4597 || reg_unused_after (insn
, xreg
)))
4599 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4605 && IN_RANGE (val16
, -63, -1)
4606 && compare_eq_p (insn
)
4607 && reg_unused_after (insn
, xreg
))
4609 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4613 /* Comparing against 0 is easy. */
4618 ? "cp %0,__zero_reg__"
4619 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4623 /* Upper registers can compare and subtract-with-carry immediates.
4624 Notice that compare instructions do the same as respective subtract
4625 instruction; the only difference is that comparisons don't write
4626 the result back to the target register. */
4632 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4635 else if (reg_unused_after (insn
, xreg
))
4637 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4642 /* Must load the value into the scratch register. */
4644 gcc_assert (REG_P (xop
[2]));
4646 if (clobber_val
!= (int) val8
)
4647 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4648 clobber_val
= (int) val8
;
4652 : "cpc %0,%2", xop
, plen
, 1);
4659 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4662 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
4666 xop
[0] = gen_rtx_REG (DImode
, 18);
4670 return avr_out_compare (insn
, xop
, plen
);
4673 /* Output test instruction for HImode. */
4676 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4678 if (compare_sign_p (insn
))
4680 avr_asm_len ("tst %B0", op
, plen
, -1);
4682 else if (reg_unused_after (insn
, op
[0])
4683 && compare_eq_p (insn
))
4685 /* Faster than sbiw if we can clobber the operand. */
4686 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4690 avr_out_compare (insn
, op
, plen
);
4697 /* Output test instruction for PSImode. */
4700 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4702 if (compare_sign_p (insn
))
4704 avr_asm_len ("tst %C0", op
, plen
, -1);
4706 else if (reg_unused_after (insn
, op
[0])
4707 && compare_eq_p (insn
))
4709 /* Faster than sbiw if we can clobber the operand. */
4710 avr_asm_len ("or %A0,%B0" CR_TAB
4711 "or %A0,%C0", op
, plen
, -2);
4715 avr_out_compare (insn
, op
, plen
);
4722 /* Output test instruction for SImode. */
4725 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4727 if (compare_sign_p (insn
))
4729 avr_asm_len ("tst %D0", op
, plen
, -1);
4731 else if (reg_unused_after (insn
, op
[0])
4732 && compare_eq_p (insn
))
4734 /* Faster than sbiw if we can clobber the operand. */
4735 avr_asm_len ("or %A0,%B0" CR_TAB
4737 "or %A0,%D0", op
, plen
, -3);
4741 avr_out_compare (insn
, op
, plen
);
4748 /* Generate asm equivalent for various shifts. This only handles cases
4749 that are not already carefully hand-optimized in ?sh??i3_out.
4751 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4752 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4753 OPERANDS[3] is a QImode scratch register from LD regs if
4754 available and SCRATCH, otherwise (no scratch available)
4756 TEMPL is an assembler template that shifts by one position.
4757 T_LEN is the length of this template. */
4760 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
4761 int *plen
, int t_len
)
4763 bool second_label
= true;
4764 bool saved_in_tmp
= false;
4765 bool use_zero_reg
= false;
4768 op
[0] = operands
[0];
4769 op
[1] = operands
[1];
4770 op
[2] = operands
[2];
4771 op
[3] = operands
[3];
4776 if (CONST_INT_P (operands
[2]))
4778 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4779 && REG_P (operands
[3]));
4780 int count
= INTVAL (operands
[2]);
4781 int max_len
= 10; /* If larger than this, always use a loop. */
4786 if (count
< 8 && !scratch
)
4787 use_zero_reg
= true;
4790 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4792 if (t_len
* count
<= max_len
)
4794 /* Output shifts inline with no loop - faster. */
4797 avr_asm_len (templ
, op
, plen
, t_len
);
4804 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4806 else if (use_zero_reg
)
4808 /* Hack to save one word: use __zero_reg__ as loop counter.
4809 Set one bit, then shift in a loop until it is 0 again. */
4811 op
[3] = zero_reg_rtx
;
4813 avr_asm_len ("set" CR_TAB
4814 "bld %3,%2-1", op
, plen
, 2);
4818 /* No scratch register available, use one from LD_REGS (saved in
4819 __tmp_reg__) that doesn't overlap with registers to shift. */
4821 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4822 op
[4] = tmp_reg_rtx
;
4823 saved_in_tmp
= true;
4825 avr_asm_len ("mov %4,%3" CR_TAB
4826 "ldi %3,%2", op
, plen
, 2);
4829 second_label
= false;
4831 else if (MEM_P (op
[2]))
4835 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4838 out_movqi_r_mr (insn
, op_mov
, plen
);
4840 else if (register_operand (op
[2], QImode
))
4844 if (!reg_unused_after (insn
, op
[2])
4845 || reg_overlap_mentioned_p (op
[0], op
[2]))
4847 op
[3] = tmp_reg_rtx
;
4848 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4852 fatal_insn ("bad shift insn:", insn
);
4855 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4857 avr_asm_len ("1:", op
, plen
, 0);
4858 avr_asm_len (templ
, op
, plen
, t_len
);
4861 avr_asm_len ("2:", op
, plen
, 0);
4863 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4864 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4867 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4871 /* 8bit shift left ((char)x << i) */
4874 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
4876 if (GET_CODE (operands
[2]) == CONST_INT
)
4883 switch (INTVAL (operands
[2]))
4886 if (INTVAL (operands
[2]) < 8)
4898 return ("lsl %0" CR_TAB
4903 return ("lsl %0" CR_TAB
4908 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4911 return ("swap %0" CR_TAB
4915 return ("lsl %0" CR_TAB
4921 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4924 return ("swap %0" CR_TAB
4929 return ("lsl %0" CR_TAB
4936 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4939 return ("swap %0" CR_TAB
4945 return ("lsl %0" CR_TAB
4954 return ("ror %0" CR_TAB
4959 else if (CONSTANT_P (operands
[2]))
4960 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4962 out_shift_with_cnt ("lsl %0",
4963 insn
, operands
, len
, 1);
4968 /* 16bit shift left ((short)x << i) */
4971 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
4973 if (GET_CODE (operands
[2]) == CONST_INT
)
4975 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4976 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4983 switch (INTVAL (operands
[2]))
4986 if (INTVAL (operands
[2]) < 16)
4990 return ("clr %B0" CR_TAB
4994 if (optimize_size
&& scratch
)
4999 return ("swap %A0" CR_TAB
5001 "andi %B0,0xf0" CR_TAB
5002 "eor %B0,%A0" CR_TAB
5003 "andi %A0,0xf0" CR_TAB
5009 return ("swap %A0" CR_TAB
5011 "ldi %3,0xf0" CR_TAB
5013 "eor %B0,%A0" CR_TAB
5017 break; /* optimize_size ? 6 : 8 */
5021 break; /* scratch ? 5 : 6 */
5025 return ("lsl %A0" CR_TAB
5029 "andi %B0,0xf0" CR_TAB
5030 "eor %B0,%A0" CR_TAB
5031 "andi %A0,0xf0" CR_TAB
5037 return ("lsl %A0" CR_TAB
5041 "ldi %3,0xf0" CR_TAB
5043 "eor %B0,%A0" CR_TAB
5051 break; /* scratch ? 5 : 6 */
5053 return ("clr __tmp_reg__" CR_TAB
5056 "ror __tmp_reg__" CR_TAB
5059 "ror __tmp_reg__" CR_TAB
5060 "mov %B0,%A0" CR_TAB
5061 "mov %A0,__tmp_reg__");
5065 return ("lsr %B0" CR_TAB
5066 "mov %B0,%A0" CR_TAB
5072 return *len
= 2, ("mov %B0,%A1" CR_TAB
5077 return ("mov %B0,%A0" CR_TAB
5083 return ("mov %B0,%A0" CR_TAB
5090 return ("mov %B0,%A0" CR_TAB
5100 return ("mov %B0,%A0" CR_TAB
5108 return ("mov %B0,%A0" CR_TAB
5111 "ldi %3,0xf0" CR_TAB
5115 return ("mov %B0,%A0" CR_TAB
5126 return ("mov %B0,%A0" CR_TAB
5132 if (AVR_HAVE_MUL
&& scratch
)
5135 return ("ldi %3,0x20" CR_TAB
5139 "clr __zero_reg__");
5141 if (optimize_size
&& scratch
)
5146 return ("mov %B0,%A0" CR_TAB
5150 "ldi %3,0xe0" CR_TAB
5156 return ("set" CR_TAB
5161 "clr __zero_reg__");
5164 return ("mov %B0,%A0" CR_TAB
5173 if (AVR_HAVE_MUL
&& ldi_ok
)
5176 return ("ldi %B0,0x40" CR_TAB
5177 "mul %A0,%B0" CR_TAB
5180 "clr __zero_reg__");
5182 if (AVR_HAVE_MUL
&& scratch
)
5185 return ("ldi %3,0x40" CR_TAB
5189 "clr __zero_reg__");
5191 if (optimize_size
&& ldi_ok
)
5194 return ("mov %B0,%A0" CR_TAB
5195 "ldi %A0,6" "\n1:\t"
5200 if (optimize_size
&& scratch
)
5203 return ("clr %B0" CR_TAB
5212 return ("clr %B0" CR_TAB
5219 out_shift_with_cnt ("lsl %A0" CR_TAB
5220 "rol %B0", insn
, operands
, len
, 2);
5225 /* 24-bit shift left */
5228 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5233 if (CONST_INT_P (op
[2]))
5235 switch (INTVAL (op
[2]))
5238 if (INTVAL (op
[2]) < 24)
5241 return avr_asm_len ("clr %A0" CR_TAB
5243 "clr %C0", op
, plen
, 3);
5247 int reg0
= REGNO (op
[0]);
5248 int reg1
= REGNO (op
[1]);
5251 return avr_asm_len ("mov %C0,%B1" CR_TAB
5252 "mov %B0,%A1" CR_TAB
5253 "clr %A0", op
, plen
, 3);
5255 return avr_asm_len ("clr %A0" CR_TAB
5256 "mov %B0,%A1" CR_TAB
5257 "mov %C0,%B1", op
, plen
, 3);
5262 int reg0
= REGNO (op
[0]);
5263 int reg1
= REGNO (op
[1]);
5265 if (reg0
+ 2 != reg1
)
5266 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5268 return avr_asm_len ("clr %B0" CR_TAB
5269 "clr %A0", op
, plen
, 2);
5273 return avr_asm_len ("clr %C0" CR_TAB
5277 "clr %A0", op
, plen
, 5);
5281 out_shift_with_cnt ("lsl %A0" CR_TAB
5283 "rol %C0", insn
, op
, plen
, 3);
5288 /* 32bit shift left ((long)x << i) */
5291 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5293 if (GET_CODE (operands
[2]) == CONST_INT
)
5301 switch (INTVAL (operands
[2]))
5304 if (INTVAL (operands
[2]) < 32)
5308 return *len
= 3, ("clr %D0" CR_TAB
5312 return ("clr %D0" CR_TAB
5319 int reg0
= true_regnum (operands
[0]);
5320 int reg1
= true_regnum (operands
[1]);
5323 return ("mov %D0,%C1" CR_TAB
5324 "mov %C0,%B1" CR_TAB
5325 "mov %B0,%A1" CR_TAB
5328 return ("clr %A0" CR_TAB
5329 "mov %B0,%A1" CR_TAB
5330 "mov %C0,%B1" CR_TAB
5336 int reg0
= true_regnum (operands
[0]);
5337 int reg1
= true_regnum (operands
[1]);
5338 if (reg0
+ 2 == reg1
)
5339 return *len
= 2, ("clr %B0" CR_TAB
5342 return *len
= 3, ("movw %C0,%A1" CR_TAB
5346 return *len
= 4, ("mov %C0,%A1" CR_TAB
5347 "mov %D0,%B1" CR_TAB
5354 return ("mov %D0,%A1" CR_TAB
5361 return ("clr %D0" CR_TAB
5370 out_shift_with_cnt ("lsl %A0" CR_TAB
5373 "rol %D0", insn
, operands
, len
, 4);
5377 /* 8bit arithmetic shift right ((signed char)x >> i) */
5380 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5382 if (GET_CODE (operands
[2]) == CONST_INT
)
5389 switch (INTVAL (operands
[2]))
5397 return ("asr %0" CR_TAB
5402 return ("asr %0" CR_TAB
5408 return ("asr %0" CR_TAB
5415 return ("asr %0" CR_TAB
5423 return ("bst %0,6" CR_TAB
5429 if (INTVAL (operands
[2]) < 8)
5436 return ("lsl %0" CR_TAB
5440 else if (CONSTANT_P (operands
[2]))
5441 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5443 out_shift_with_cnt ("asr %0",
5444 insn
, operands
, len
, 1);
5449 /* 16bit arithmetic shift right ((signed short)x >> i) */
5452 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5454 if (GET_CODE (operands
[2]) == CONST_INT
)
5456 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5457 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5464 switch (INTVAL (operands
[2]))
5468 /* XXX try to optimize this too? */
5473 break; /* scratch ? 5 : 6 */
5475 return ("mov __tmp_reg__,%A0" CR_TAB
5476 "mov %A0,%B0" CR_TAB
5477 "lsl __tmp_reg__" CR_TAB
5479 "sbc %B0,%B0" CR_TAB
5480 "lsl __tmp_reg__" CR_TAB
5486 return ("lsl %A0" CR_TAB
5487 "mov %A0,%B0" CR_TAB
5493 int reg0
= true_regnum (operands
[0]);
5494 int reg1
= true_regnum (operands
[1]);
5497 return *len
= 3, ("mov %A0,%B0" CR_TAB
5501 return *len
= 4, ("mov %A0,%B1" CR_TAB
5509 return ("mov %A0,%B0" CR_TAB
5511 "sbc %B0,%B0" CR_TAB
5516 return ("mov %A0,%B0" CR_TAB
5518 "sbc %B0,%B0" CR_TAB
5523 if (AVR_HAVE_MUL
&& ldi_ok
)
5526 return ("ldi %A0,0x20" CR_TAB
5527 "muls %B0,%A0" CR_TAB
5529 "sbc %B0,%B0" CR_TAB
5530 "clr __zero_reg__");
5532 if (optimize_size
&& scratch
)
5535 return ("mov %A0,%B0" CR_TAB
5537 "sbc %B0,%B0" CR_TAB
5543 if (AVR_HAVE_MUL
&& ldi_ok
)
5546 return ("ldi %A0,0x10" CR_TAB
5547 "muls %B0,%A0" CR_TAB
5549 "sbc %B0,%B0" CR_TAB
5550 "clr __zero_reg__");
5552 if (optimize_size
&& scratch
)
5555 return ("mov %A0,%B0" CR_TAB
5557 "sbc %B0,%B0" CR_TAB
5564 if (AVR_HAVE_MUL
&& ldi_ok
)
5567 return ("ldi %A0,0x08" CR_TAB
5568 "muls %B0,%A0" CR_TAB
5570 "sbc %B0,%B0" CR_TAB
5571 "clr __zero_reg__");
5574 break; /* scratch ? 5 : 7 */
5576 return ("mov %A0,%B0" CR_TAB
5578 "sbc %B0,%B0" CR_TAB
5587 return ("lsl %B0" CR_TAB
5588 "sbc %A0,%A0" CR_TAB
5590 "mov %B0,%A0" CR_TAB
5594 if (INTVAL (operands
[2]) < 16)
5600 return *len
= 3, ("lsl %B0" CR_TAB
5601 "sbc %A0,%A0" CR_TAB
5606 out_shift_with_cnt ("asr %B0" CR_TAB
5607 "ror %A0", insn
, operands
, len
, 2);
5612 /* 24-bit arithmetic shift right */
5615 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5617 int dest
= REGNO (op
[0]);
5618 int src
= REGNO (op
[1]);
5620 if (CONST_INT_P (op
[2]))
5625 switch (INTVAL (op
[2]))
5629 return avr_asm_len ("mov %A0,%B1" CR_TAB
5630 "mov %B0,%C1" CR_TAB
5633 "dec %C0", op
, plen
, 5);
5635 return avr_asm_len ("clr %C0" CR_TAB
5638 "mov %B0,%C1" CR_TAB
5639 "mov %A0,%B1", op
, plen
, 5);
5642 if (dest
!= src
+ 2)
5643 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5645 return avr_asm_len ("clr %B0" CR_TAB
5648 "mov %C0,%B0", op
, plen
, 4);
5651 if (INTVAL (op
[2]) < 24)
5657 return avr_asm_len ("lsl %C0" CR_TAB
5658 "sbc %A0,%A0" CR_TAB
5659 "mov %B0,%A0" CR_TAB
5660 "mov %C0,%A0", op
, plen
, 4);
5664 out_shift_with_cnt ("asr %C0" CR_TAB
5666 "ror %A0", insn
, op
, plen
, 3);
5671 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5674 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5676 if (GET_CODE (operands
[2]) == CONST_INT
)
5684 switch (INTVAL (operands
[2]))
5688 int reg0
= true_regnum (operands
[0]);
5689 int reg1
= true_regnum (operands
[1]);
5692 return ("mov %A0,%B1" CR_TAB
5693 "mov %B0,%C1" CR_TAB
5694 "mov %C0,%D1" CR_TAB
5699 return ("clr %D0" CR_TAB
5702 "mov %C0,%D1" CR_TAB
5703 "mov %B0,%C1" CR_TAB
5709 int reg0
= true_regnum (operands
[0]);
5710 int reg1
= true_regnum (operands
[1]);
5712 if (reg0
== reg1
+ 2)
5713 return *len
= 4, ("clr %D0" CR_TAB
5718 return *len
= 5, ("movw %A0,%C1" CR_TAB
5724 return *len
= 6, ("mov %B0,%D1" CR_TAB
5725 "mov %A0,%C1" CR_TAB
5733 return *len
= 6, ("mov %A0,%D1" CR_TAB
5737 "mov %B0,%D0" CR_TAB
5741 if (INTVAL (operands
[2]) < 32)
5748 return *len
= 4, ("lsl %D0" CR_TAB
5749 "sbc %A0,%A0" CR_TAB
5750 "mov %B0,%A0" CR_TAB
5753 return *len
= 5, ("lsl %D0" CR_TAB
5754 "sbc %A0,%A0" CR_TAB
5755 "mov %B0,%A0" CR_TAB
5756 "mov %C0,%A0" CR_TAB
5761 out_shift_with_cnt ("asr %D0" CR_TAB
5764 "ror %A0", insn
, operands
, len
, 4);
5768 /* 8-bit logic shift right ((unsigned char)x >> i) */
5771 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5773 if (GET_CODE (operands
[2]) == CONST_INT
)
5780 switch (INTVAL (operands
[2]))
5783 if (INTVAL (operands
[2]) < 8)
5795 return ("lsr %0" CR_TAB
5799 return ("lsr %0" CR_TAB
5804 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5807 return ("swap %0" CR_TAB
5811 return ("lsr %0" CR_TAB
5817 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5820 return ("swap %0" CR_TAB
5825 return ("lsr %0" CR_TAB
5832 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5835 return ("swap %0" CR_TAB
5841 return ("lsr %0" CR_TAB
5850 return ("rol %0" CR_TAB
5855 else if (CONSTANT_P (operands
[2]))
5856 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5858 out_shift_with_cnt ("lsr %0",
5859 insn
, operands
, len
, 1);
5863 /* 16-bit logic shift right ((unsigned short)x >> i) */
5866 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5868 if (GET_CODE (operands
[2]) == CONST_INT
)
5870 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5871 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5878 switch (INTVAL (operands
[2]))
5881 if (INTVAL (operands
[2]) < 16)
5885 return ("clr %B0" CR_TAB
5889 if (optimize_size
&& scratch
)
5894 return ("swap %B0" CR_TAB
5896 "andi %A0,0x0f" CR_TAB
5897 "eor %A0,%B0" CR_TAB
5898 "andi %B0,0x0f" CR_TAB
5904 return ("swap %B0" CR_TAB
5906 "ldi %3,0x0f" CR_TAB
5908 "eor %A0,%B0" CR_TAB
5912 break; /* optimize_size ? 6 : 8 */
5916 break; /* scratch ? 5 : 6 */
5920 return ("lsr %B0" CR_TAB
5924 "andi %A0,0x0f" CR_TAB
5925 "eor %A0,%B0" CR_TAB
5926 "andi %B0,0x0f" CR_TAB
5932 return ("lsr %B0" CR_TAB
5936 "ldi %3,0x0f" CR_TAB
5938 "eor %A0,%B0" CR_TAB
5946 break; /* scratch ? 5 : 6 */
5948 return ("clr __tmp_reg__" CR_TAB
5951 "rol __tmp_reg__" CR_TAB
5954 "rol __tmp_reg__" CR_TAB
5955 "mov %A0,%B0" CR_TAB
5956 "mov %B0,__tmp_reg__");
5960 return ("lsl %A0" CR_TAB
5961 "mov %A0,%B0" CR_TAB
5963 "sbc %B0,%B0" CR_TAB
5967 return *len
= 2, ("mov %A0,%B1" CR_TAB
5972 return ("mov %A0,%B0" CR_TAB
5978 return ("mov %A0,%B0" CR_TAB
5985 return ("mov %A0,%B0" CR_TAB
5995 return ("mov %A0,%B0" CR_TAB
6003 return ("mov %A0,%B0" CR_TAB
6006 "ldi %3,0x0f" CR_TAB
6010 return ("mov %A0,%B0" CR_TAB
6021 return ("mov %A0,%B0" CR_TAB
6027 if (AVR_HAVE_MUL
&& scratch
)
6030 return ("ldi %3,0x08" CR_TAB
6034 "clr __zero_reg__");
6036 if (optimize_size
&& scratch
)
6041 return ("mov %A0,%B0" CR_TAB
6045 "ldi %3,0x07" CR_TAB
6051 return ("set" CR_TAB
6056 "clr __zero_reg__");
6059 return ("mov %A0,%B0" CR_TAB
6068 if (AVR_HAVE_MUL
&& ldi_ok
)
6071 return ("ldi %A0,0x04" CR_TAB
6072 "mul %B0,%A0" CR_TAB
6075 "clr __zero_reg__");
6077 if (AVR_HAVE_MUL
&& scratch
)
6080 return ("ldi %3,0x04" CR_TAB
6084 "clr __zero_reg__");
6086 if (optimize_size
&& ldi_ok
)
6089 return ("mov %A0,%B0" CR_TAB
6090 "ldi %B0,6" "\n1:\t"
6095 if (optimize_size
&& scratch
)
6098 return ("clr %A0" CR_TAB
6107 return ("clr %A0" CR_TAB
6114 out_shift_with_cnt ("lsr %B0" CR_TAB
6115 "ror %A0", insn
, operands
, len
, 2);
6120 /* 24-bit logic shift right */
6123 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6125 int dest
= REGNO (op
[0]);
6126 int src
= REGNO (op
[1]);
6128 if (CONST_INT_P (op
[2]))
6133 switch (INTVAL (op
[2]))
6137 return avr_asm_len ("mov %A0,%B1" CR_TAB
6138 "mov %B0,%C1" CR_TAB
6139 "clr %C0", op
, plen
, 3);
6141 return avr_asm_len ("clr %C0" CR_TAB
6142 "mov %B0,%C1" CR_TAB
6143 "mov %A0,%B1", op
, plen
, 3);
6146 if (dest
!= src
+ 2)
6147 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6149 return avr_asm_len ("clr %B0" CR_TAB
6150 "clr %C0", op
, plen
, 2);
6153 if (INTVAL (op
[2]) < 24)
6159 return avr_asm_len ("clr %A0" CR_TAB
6163 "clr %C0", op
, plen
, 5);
6167 out_shift_with_cnt ("lsr %C0" CR_TAB
6169 "ror %A0", insn
, op
, plen
, 3);
6174 /* 32-bit logic shift right ((unsigned int)x >> i) */
6177 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6179 if (GET_CODE (operands
[2]) == CONST_INT
)
6187 switch (INTVAL (operands
[2]))
6190 if (INTVAL (operands
[2]) < 32)
6194 return *len
= 3, ("clr %D0" CR_TAB
6198 return ("clr %D0" CR_TAB
6205 int reg0
= true_regnum (operands
[0]);
6206 int reg1
= true_regnum (operands
[1]);
6209 return ("mov %A0,%B1" CR_TAB
6210 "mov %B0,%C1" CR_TAB
6211 "mov %C0,%D1" CR_TAB
6214 return ("clr %D0" CR_TAB
6215 "mov %C0,%D1" CR_TAB
6216 "mov %B0,%C1" CR_TAB
6222 int reg0
= true_regnum (operands
[0]);
6223 int reg1
= true_regnum (operands
[1]);
6225 if (reg0
== reg1
+ 2)
6226 return *len
= 2, ("clr %C0" CR_TAB
6229 return *len
= 3, ("movw %A0,%C1" CR_TAB
6233 return *len
= 4, ("mov %B0,%D1" CR_TAB
6234 "mov %A0,%C1" CR_TAB
6240 return *len
= 4, ("mov %A0,%D1" CR_TAB
6247 return ("clr %A0" CR_TAB
6256 out_shift_with_cnt ("lsr %D0" CR_TAB
6259 "ror %A0", insn
, operands
, len
, 4);
6264 /* Output addition of register XOP[0] and compile time constant XOP[2].
6265 CODE == PLUS: perform addition by using ADD instructions or
6266 CODE == MINUS: perform addition by using SUB instructions:
6268 XOP[0] = XOP[0] + XOP[2]
6270 Or perform addition/subtraction with register XOP[2] depending on CODE:
6272 XOP[0] = XOP[0] +/- XOP[2]
6274 If PLEN == NULL, print assembler instructions to perform the operation;
6275 otherwise, set *PLEN to the length of the instruction sequence (in words)
6276 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6277 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6279 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6280 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6281 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6282 the subtrahend in the original insn, provided it is a compile time constant.
6283 In all other cases, SIGN is 0.
6285 If OUT_LABEL is true, print the final 0: label which is needed for
6286 saturated addition / subtraction. The only case where OUT_LABEL = false
6287 is useful is for saturated addition / subtraction performed during
6288 fixed-point rounding, cf. `avr_out_round'. */
6291 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6292 enum rtx_code code_sat
, int sign
, bool out_label
)
6294 /* MODE of the operation. */
6295 enum machine_mode mode
= GET_MODE (xop
[0]);
6297 /* INT_MODE of the same size. */
6298 enum machine_mode imode
= int_mode_for_mode (mode
);
6300 /* Number of bytes to operate on. */
6301 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6303 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6304 int clobber_val
= -1;
6306 /* op[0]: 8-bit destination register
6307 op[1]: 8-bit const int
6308 op[2]: 8-bit scratch register */
6311 /* Started the operation? Before starting the operation we may skip
6312 adding 0. This is no more true after the operation started because
6313 carry must be taken into account. */
6314 bool started
= false;
6316 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6319 /* Output a BRVC instruction. Only needed with saturation. */
6320 bool out_brvc
= true;
6327 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
6329 for (i
= 0; i
< n_bytes
; i
++)
6331 /* We operate byte-wise on the destination. */
6332 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6333 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6336 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6339 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6343 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6345 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6354 /* Except in the case of ADIW with 16-bit register (see below)
6355 addition does not set cc0 in a usable way. */
6357 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6359 if (CONST_FIXED_P (xval
))
6360 xval
= avr_to_int_mode (xval
);
6362 /* Adding/Subtracting zero is a no-op. */
6364 if (xval
== const0_rtx
)
6371 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6375 if (SS_PLUS
== code_sat
&& MINUS
== code
6377 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6378 & GET_MODE_MASK (QImode
)))
6380 /* We compute x + 0x80 by means of SUB instructions. We negated the
6381 constant subtrahend above and are left with x - (-128) so that we
6382 need something like SUBI r,128 which does not exist because SUBI sets
6383 V according to the sign of the subtrahend. Notice the only case
6384 where this must be done is when NEG overflowed in case [2s] because
6385 the V computation needs the right sign of the subtrahend. */
6387 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6389 avr_asm_len ("subi %0,128" CR_TAB
6390 "brmi 0f", &msb
, plen
, 2);
6396 for (i
= 0; i
< n_bytes
; i
++)
6398 /* We operate byte-wise on the destination. */
6399 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6400 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6402 /* 8-bit value to operate with this byte. */
6403 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6405 /* Registers R16..R31 can operate with immediate. */
6406 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6409 op
[1] = gen_int_mode (val8
, QImode
);
6411 /* To get usable cc0 no low-bytes must have been skipped. */
6419 && test_hard_reg_class (ADDW_REGS
, reg8
))
6421 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6422 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6424 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6425 i.e. operate word-wise. */
6432 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6435 if (n_bytes
== 2 && PLUS
== code
)
6447 avr_asm_len (code
== PLUS
6448 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6452 else if ((val8
== 1 || val8
== 0xff)
6453 && UNKNOWN
== code_sat
6455 && i
== n_bytes
- 1)
6457 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6467 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6469 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6471 /* This belongs to the x + 0x80 corner case. The code with
6472 ADD instruction is not smaller, thus make this case
6473 expensive so that the caller won't pick it. */
6479 if (clobber_val
!= (int) val8
)
6480 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6481 clobber_val
= (int) val8
;
6483 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6490 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6493 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6495 if (clobber_val
!= (int) val8
)
6496 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6497 clobber_val
= (int) val8
;
6499 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6511 } /* for all sub-bytes */
6515 if (UNKNOWN
== code_sat
)
6518 *pcc
= (int) CC_CLOBBER
;
6520 /* Vanilla addition/subtraction is done. We are left with saturation.
6522 We have to compute A = A <op> B where A is a register and
6523 B is a register or a non-zero compile time constant CONST.
6524 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6525 B stands for the original operand $2 in INSN. In the case of B = CONST,
6526 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6528 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6532 operation | code | sat if | b is | sat value | case
6533 -----------------+-------+----------+--------------+-----------+-------
6534 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6535 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6536 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6537 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6541 operation | code | sat if | b is | sat value | case
6542 -----------------+-------+----------+--------------+-----------+-------
6543 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6544 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6545 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6546 - as a + (-b) | add | V == 1 | const | s- | [4s]
6548 s+ = b < 0 ? -0x80 : 0x7f
6549 s- = b < 0 ? 0x7f : -0x80
6551 The cases a - b actually perform a - (-(-b)) if B is CONST.
6554 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6556 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6559 bool need_copy
= true;
6560 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6571 avr_asm_len ("brvc 0f", op
, plen
, 1);
6573 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6578 avr_asm_len ("ldi %0,0x7f" CR_TAB
6579 "adc %0,__zero_reg__", op
, plen
, 2);
6581 avr_asm_len ("ldi %0,0x7f" CR_TAB
6582 "ldi %1,0xff" CR_TAB
6583 "adc %1,__zero_reg__" CR_TAB
6584 "adc %0,__zero_reg__", op
, plen
, 4);
6586 else if (sign
== 0 && PLUS
== code
)
6590 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6593 avr_asm_len ("ldi %0,0x80" CR_TAB
6595 "dec %0", op
, plen
, 3);
6597 avr_asm_len ("ldi %0,0x80" CR_TAB
6600 "sbci %0,0", op
, plen
, 4);
6602 else if (sign
== 0 && MINUS
== code
)
6606 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6609 avr_asm_len ("ldi %0,0x7f" CR_TAB
6611 "inc %0", op
, plen
, 3);
6613 avr_asm_len ("ldi %0,0x7f" CR_TAB
6616 "sbci %0,-1", op
, plen
, 4);
6618 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6620 /* [1s,const,B < 0] [2s,B < 0] */
6621 /* [3s,const,B > 0] [4s,B > 0] */
6625 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6629 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6630 if (n_bytes
> 1 && need_copy
)
6631 avr_asm_len ("clr %1", op
, plen
, 1);
6633 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6635 /* [1s,const,B > 0] [2s,B > 0] */
6636 /* [3s,const,B < 0] [4s,B < 0] */
6640 avr_asm_len ("sec" CR_TAB
6641 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6645 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6646 if (n_bytes
> 1 && need_copy
)
6647 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6657 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6662 avr_asm_len ("sec", op
, plen
, 1);
6663 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6669 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6670 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6672 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6675 break; /* US_PLUS */
6680 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6684 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6688 avr_asm_len ("clr %0", op
, plen
, 1);
6693 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6694 Now copy the right value to the LSBs. */
6696 if (need_copy
&& n_bytes
> 1)
6698 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6700 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6706 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6708 avr_asm_len ("mov %A0,%1" CR_TAB
6709 "mov %B0,%1", op
, plen
, 2);
6712 else if (n_bytes
> 2)
6715 avr_asm_len ("mov %A0,%1" CR_TAB
6716 "mov %B0,%1", op
, plen
, 2);
6720 if (need_copy
&& n_bytes
== 8)
6723 avr_asm_len ("movw %r0+2,%0" CR_TAB
6724 "movw %r0+4,%0", xop
, plen
, 2);
6726 avr_asm_len ("mov %r0+2,%0" CR_TAB
6727 "mov %r0+3,%0" CR_TAB
6728 "mov %r0+4,%0" CR_TAB
6729 "mov %r0+5,%0", xop
, plen
, 4);
6733 avr_asm_len ("0:", op
, plen
, 0);
6737 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6738 is ont a compile-time constant:
6740 XOP[0] = XOP[0] +/- XOP[2]
6742 This is a helper for the function below. The only insns that need this
6743 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6746 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6748 enum machine_mode mode
= GET_MODE (xop
[0]);
6750 /* Only pointer modes want to add symbols. */
6752 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6754 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6756 avr_asm_len (PLUS
== code
6757 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6758 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6761 if (PSImode
== mode
)
6762 avr_asm_len (PLUS
== code
6763 ? "sbci %C0,hlo8(-(%2))"
6764 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6769 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6771 INSN is a single_set insn or an insn pattern with a binary operation as
6772 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6774 XOP are the operands of INSN. In the case of 64-bit operations with
6775 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6776 The non-saturating insns up to 32 bits may or may not supply a "d" class
6779 If PLEN == NULL output the instructions.
6780 If PLEN != NULL set *PLEN to the length of the sequence in words.
6782 PCC is a pointer to store the instructions' effect on cc0.
6785 PLEN and PCC default to NULL.
6787 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6792 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
6794 int cc_plus
, cc_minus
, cc_dummy
;
6795 int len_plus
, len_minus
;
6797 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
6798 rtx xdest
= SET_DEST (xpattern
);
6799 enum machine_mode mode
= GET_MODE (xdest
);
6800 enum machine_mode imode
= int_mode_for_mode (mode
);
6801 int n_bytes
= GET_MODE_SIZE (mode
);
6802 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
6804 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6810 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6812 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6815 if (n_bytes
<= 4 && REG_P (xop
[2]))
6817 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
6823 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6824 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6825 op
[2] = avr_to_int_mode (xop
[0]);
6830 && !CONST_INT_P (xop
[2])
6831 && !CONST_FIXED_P (xop
[2]))
6833 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6836 op
[0] = avr_to_int_mode (xop
[0]);
6837 op
[1] = avr_to_int_mode (xop
[1]);
6838 op
[2] = avr_to_int_mode (xop
[2]);
6841 /* Saturations and 64-bit operations don't have a clobber operand.
6842 For the other cases, the caller will provide a proper XOP[3]. */
6844 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
6845 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
6847 /* Saturation will need the sign of the original operand. */
6849 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6850 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6852 /* If we subtract and the subtrahend is a constant, then negate it
6853 so that avr_out_plus_1 can be used. */
6856 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6858 /* Work out the shortest sequence. */
6860 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
6861 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
6865 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6866 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6868 else if (len_minus
<= len_plus
)
6869 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
6871 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
6877 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6878 time constant XOP[2]:
6880 XOP[0] = XOP[0] <op> XOP[2]
6882 and return "". If PLEN == NULL, print assembler instructions to perform the
6883 operation; otherwise, set *PLEN to the length of the instruction sequence
6884 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6885 register or SCRATCH if no clobber register is needed for the operation.
6886 INSN is an INSN_P or a pattern of an insn. */
6889 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6891 /* CODE and MODE of the operation. */
6892 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
6893 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
6894 enum machine_mode mode
= GET_MODE (xop
[0]);
6896 /* Number of bytes to operate on. */
6897 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6899 /* Value of T-flag (0 or 1) or -1 if unknow. */
6902 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6903 int clobber_val
= -1;
6905 /* op[0]: 8-bit destination register
6906 op[1]: 8-bit const int
6907 op[2]: 8-bit clobber register or SCRATCH
6908 op[3]: 8-bit register containing 0xff or NULL_RTX */
6917 for (i
= 0; i
< n_bytes
; i
++)
6919 /* We operate byte-wise on the destination. */
6920 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6921 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6923 /* 8-bit value to operate with this byte. */
6924 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6926 /* Number of bits set in the current byte of the constant. */
6927 int pop8
= avr_popcount (val8
);
6929 /* Registers R16..R31 can operate with immediate. */
6930 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6933 op
[1] = GEN_INT (val8
);
6942 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6946 avr_asm_len ("set", op
, plen
, 1);
6949 op
[1] = GEN_INT (exact_log2 (val8
));
6950 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6954 if (op
[3] != NULL_RTX
)
6955 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6957 avr_asm_len ("clr %0" CR_TAB
6958 "dec %0", op
, plen
, 2);
6964 if (clobber_val
!= (int) val8
)
6965 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6966 clobber_val
= (int) val8
;
6968 avr_asm_len ("or %0,%2", op
, plen
, 1);
6978 avr_asm_len ("clr %0", op
, plen
, 1);
6980 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6984 avr_asm_len ("clt", op
, plen
, 1);
6987 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6988 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6992 if (clobber_val
!= (int) val8
)
6993 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6994 clobber_val
= (int) val8
;
6996 avr_asm_len ("and %0,%2", op
, plen
, 1);
7006 avr_asm_len ("com %0", op
, plen
, 1);
7007 else if (ld_reg_p
&& val8
== (1 << 7))
7008 avr_asm_len ("subi %0,%1", op
, plen
, 1);
7011 if (clobber_val
!= (int) val8
)
7012 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7013 clobber_val
= (int) val8
;
7015 avr_asm_len ("eor %0,%2", op
, plen
, 1);
7021 /* Unknown rtx_code */
7024 } /* for all sub-bytes */
7030 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7031 PLEN != NULL: Set *PLEN to the length of that sequence.
7035 avr_out_addto_sp (rtx
*op
, int *plen
)
7037 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
7038 int addend
= INTVAL (op
[0]);
7045 if (flag_verbose_asm
|| flag_print_asm_name
)
7046 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7048 while (addend
<= -pc_len
)
7051 avr_asm_len ("rcall .", op
, plen
, 1);
7054 while (addend
++ < 0)
7055 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7057 else if (addend
> 0)
7059 if (flag_verbose_asm
|| flag_print_asm_name
)
7060 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7062 while (addend
-- > 0)
7063 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7070 /* Outputs instructions needed for fixed point type conversion.
7071 This includes converting between any fixed point type, as well
7072 as converting to any integer type. Conversion between integer
7073 types is not supported.
7075 Converting signed fractional types requires a bit shift if converting
7076 to or from any unsigned fractional type because the decimal place is
7077 shifted by 1 bit. When the destination is a signed fractional, the sign
7078 is stored in either the carry or T bit. */
7081 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
7085 RTX_CODE shift
= UNKNOWN
;
7086 bool sign_in_carry
= false;
7087 bool msb_in_carry
= false;
7088 bool lsb_in_tmp_reg
= false;
7089 bool lsb_in_carry
= false;
7090 bool frac_rounded
= false;
7091 const char *code_ashift
= "lsl %0";
7094 #define MAY_CLOBBER(RR) \
7095 /* Shorthand used below. */ \
7097 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7098 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7099 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7100 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7104 /* bytes : Length of operand in bytes.
7105 ibyte : Length of integral part in bytes.
7106 fbyte, fbit : Length of fractional part in bytes, bits. */
7109 unsigned fbit
, bytes
, ibyte
, fbyte
;
7110 unsigned regno
, regno_msb
;
7111 } dest
, src
, *val
[2] = { &dest
, &src
};
7116 /* Step 0: Determine information on source and destination operand we
7117 ====== will need in the remainder. */
7119 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7121 enum machine_mode mode
;
7123 xop
[i
] = operands
[i
];
7125 mode
= GET_MODE (xop
[i
]);
7127 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7128 val
[i
]->regno
= REGNO (xop
[i
]);
7129 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7131 if (SCALAR_INT_MODE_P (mode
))
7133 val
[i
]->sbit
= intsigned
;
7136 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7138 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7139 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7142 fatal_insn ("unsupported fixed-point conversion", insn
);
7144 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7145 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7148 // Byte offset of the decimal point taking into account different place
7149 // of the decimal point in input and output and different register numbers
7150 // of input and output.
7151 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7153 // Number of destination bytes that will come from sign / zero extension.
7154 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7156 // Number of bytes at the low end to be filled with zeros.
7157 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7159 // Do we have a 16-Bit register that is cleared?
7160 rtx clrw
= NULL_RTX
;
7162 bool sign_extend
= src
.sbit
&& sign_bytes
;
7164 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7166 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7168 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7173 /* If we need to round the fraction part, we might need to save/round it
7174 before clobbering any of it in Step 1. Also, we might to want to do
7175 the rounding now to make use of LD_REGS. */
7176 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7177 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7178 && !TARGET_FRACT_CONV_TRUNC
)
7182 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7183 && dest
.regno
- offset
-1 >= dest
.regno
);
7184 unsigned s0
= dest
.regno
- offset
-1;
7185 bool use_src
= true;
7187 unsigned copied_msb
= src
.regno_msb
;
7188 bool have_carry
= false;
7190 if (src
.ibyte
> dest
.ibyte
)
7191 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7193 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7194 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7195 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
7197 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
7199 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7200 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7204 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
7205 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
7207 avr_asm_len ("sec" CR_TAB
"cpc %0,__zero_reg__",
7208 &all_regs_rtx
[sn
], plen
, 2);
7212 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7213 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
7214 &all_regs_rtx
[s0
], plen
, 1);
7215 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7216 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
7217 avr_asm_len ("\n0:", NULL
, plen
, 0);
7218 frac_rounded
= true;
7220 else if (use_src
&& overlap
)
7222 avr_asm_len ("clr __tmp_reg__" CR_TAB
7223 "sbrc %1,0" CR_TAB
"dec __tmp_reg__", xop
, plen
, 1);
7227 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7231 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7233 avr_asm_len ("clt" CR_TAB
"bld __tmp_reg__,7" CR_TAB
7234 "adc %0,__tmp_reg__",
7235 &all_regs_rtx
[s0
], plen
, 1);
7237 avr_asm_len ("lsr __tmp_reg" CR_TAB
"add %0,__tmp_reg__",
7238 &all_regs_rtx
[s0
], plen
, 2);
7239 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7240 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7241 frac_rounded
= true;
7246 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
7247 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
7248 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
7249 xop
[2] = all_regs_rtx
[s0
];
7250 unsigned sn
= src
.regno
;
7251 if (!use_src
|| sn
== s0
)
7252 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7253 /* We need to consider to-be-discarded bits
7254 if the value is negative. */
7257 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7258 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7259 /* Test to-be-discarded bytes for any nozero bits.
7260 ??? Could use OR or SBIW to test two registers at once. */
7262 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7264 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7265 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7267 avr_asm_len ("breq 0f" CR_TAB
7268 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7271 avr_asm_len ("breq 0f" CR_TAB
7272 "set" CR_TAB
"bld __tmp_reg__,0\n0:",
7275 lsb_in_tmp_reg
= true;
7279 /* Step 1: Clear bytes at the low end and copy payload bits from source
7280 ====== to destination. */
7282 int step
= offset
< 0 ? 1 : -1;
7283 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7285 // We cleared at least that number of registers.
7288 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7290 // Next regno of destination is needed for MOVW
7291 unsigned d1
= d0
+ step
;
7293 // Current and next regno of source
7294 signed s0
= d0
- offset
;
7295 signed s1
= s0
+ step
;
7297 // Must current resp. next regno be CLRed? This applies to the low
7298 // bytes of the destination that have no associated source bytes.
7299 bool clr0
= s0
< (signed) src
.regno
;
7300 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7302 // First gather what code to emit (if any) and additional step to
7303 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7304 // is the source rtx for the current loop iteration.
7305 const char *code
= NULL
;
7310 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7312 xop
[2] = all_regs_rtx
[d0
& ~1];
7314 code
= "movw %2,%3";
7319 xop
[2] = all_regs_rtx
[d0
];
7324 && d0
% 2 == (step
> 0))
7326 clrw
= all_regs_rtx
[d0
& ~1];
7330 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7332 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7333 && d0
% 2 == (offset
> 0)
7334 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7335 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7337 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7338 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7339 code
= movw
? "movw %2,%3" : "mov %2,%3";
7340 stepw
= step
* movw
;
7345 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7346 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7348 /* We are going to override the sign bit. If we sign-extend,
7349 store the sign in the Carry flag. This is not needed if
7350 the destination will be ASHIFT is the remainder because
7351 the ASHIFT will set Carry without extra instruction. */
7353 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7354 sign_in_carry
= true;
7357 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7359 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7360 && src
.ibyte
> dest
.ibyte
7361 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7363 /* We are going to override the MSB. If we shift right,
7364 store the MSB in the Carry flag. This is only needed if
7365 we don't sign-extend becaue with sign-extension the MSB
7366 (the sign) will be produced by the sign extension. */
7368 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7369 msb_in_carry
= true;
7372 unsigned src_lsb
= dest
.regno
- offset
-1;
7374 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7376 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7378 /* We are going to override the new LSB; store it into carry. */
7380 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7381 code_ashift
= "rol %0";
7382 lsb_in_carry
= true;
7385 avr_asm_len (code
, xop
, plen
, 1);
7390 /* Step 2: Shift destination left by 1 bit position. This might be needed
7391 ====== for signed input and unsigned output. */
7393 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7395 unsigned s0
= dest
.regno
- offset
-1;
7397 /* n1169 4.1.4 says:
7398 "Conversions from a fixed-point to an integer type round toward zero."
7399 Hence, converting a fract type to integer only gives a non-zero result
7401 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7402 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
7403 && !TARGET_FRACT_CONV_TRUNC
)
7405 gcc_assert (s0
== src
.regno_msb
);
7406 /* Check if the input is -1. We do that by checking if negating
7407 the input causes an integer overflow. */
7408 unsigned sn
= src
.regno
;
7409 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7411 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7413 /* Overflow goes with set carry. Clear carry otherwise. */
7414 avr_asm_len ("brvs 0f" CR_TAB
"clc\n0:", NULL
, plen
, 2);
7416 /* Likewise, when converting from accumulator types to integer, we
7417 need to round up negative values. */
7418 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7419 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7420 && !TARGET_FRACT_CONV_TRUNC
7423 bool have_carry
= false;
7425 xop
[2] = all_regs_rtx
[s0
];
7426 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
7427 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7428 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7429 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7430 if (!lsb_in_tmp_reg
)
7432 unsigned sn
= src
.regno
;
7435 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
7440 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
7441 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
7443 /* Add in C and the rounding value 127. */
7444 /* If the destination msb is a sign byte, and in LD_REGS,
7445 grab it as a temporary. */
7447 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
7450 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
7451 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
7452 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
7453 : have_carry
? "adc %2,%3"
7454 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
7460 /* Fall back to use __zero_reg__ as a temporary. */
7461 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
7463 avr_asm_len ("clt" CR_TAB
"bld __zero_reg__,7", NULL
, plen
, 2);
7465 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
7466 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
7467 ? "adc __tmp_reg__,__zero_reg__"
7468 : have_carry
? "adc %2,__zero_reg__"
7469 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
7470 : "add %2,__zero_reg__"),
7472 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
7474 for (d0
= dest
.regno
+ zero_bytes
;
7475 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7476 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
7477 avr_asm_len (lsb_in_tmp_reg
7478 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7481 else if (MAY_CLOBBER (s0
))
7482 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7484 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7485 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7487 code_ashift
= "rol %0";
7488 lsb_in_carry
= true;
7491 if (shift
== ASHIFT
)
7493 for (d0
= dest
.regno
+ zero_bytes
;
7494 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7496 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7497 code_ashift
= "rol %0";
7500 lsb_in_carry
= false;
7501 sign_in_carry
= true;
7504 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7505 ======= it in sign-extension below. */
7507 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7508 && src
.ibyte
> dest
.ibyte
)
7510 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7512 if (MAY_CLOBBER (s0
))
7513 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7515 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7516 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7518 msb_in_carry
= true;
7521 /* Step 3: Sign-extend or zero-extend the destination as needed.
7524 if (sign_extend
&& !sign_in_carry
)
7526 unsigned s0
= src
.regno_msb
;
7528 if (MAY_CLOBBER (s0
))
7529 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7531 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7532 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7534 sign_in_carry
= true;
7537 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7539 unsigned copies
= 0;
7540 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7542 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7544 if (AVR_HAVE_MOVW
&& movw
7545 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7547 xop
[2] = all_regs_rtx
[d0
];
7549 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7554 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7555 &all_regs_rtx
[d0
], plen
, 1);
7557 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7558 movw
= all_regs_rtx
[d0
-1];
7563 /* Step 4: Right shift the destination. This might be needed for
7564 ====== conversions from unsigned to signed. */
7566 if (shift
== ASHIFTRT
)
7568 const char *code_ashiftrt
= "lsr %0";
7570 if (sign_extend
|| msb_in_carry
)
7571 code_ashiftrt
= "ror %0";
7573 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7574 code_ashiftrt
= "asr %0";
7576 for (d0
= dest
.regno_msb
- sign_bytes
;
7577 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7579 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7580 code_ashiftrt
= "ror %0";
7590 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7591 XOP[2] is the rounding point, a CONST_INT. The function prints the
7592 instruction sequence if PLEN = NULL and computes the length in words
7593 of the sequence if PLEN != NULL. Most of this function deals with
7594 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7597 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
7599 enum machine_mode mode
= GET_MODE (xop
[0]);
7600 enum machine_mode imode
= int_mode_for_mode (mode
);
7601 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7602 int fbit
= (int) GET_MODE_FBIT (mode
);
7603 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
7604 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
7605 GET_MODE_PRECISION (imode
));
7606 // Lengths of PLUS and AND parts.
7607 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
7608 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
7610 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7611 // the saturated addition so that we can emit the "rjmp 1f" before the
7614 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
7615 rtx xpattern
, xsrc
, op
[4];
7617 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
7618 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
7619 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
7620 xpattern
= gen_rtx_SET (VOIDmode
, xop
[0], xsrc
);
7625 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
7627 avr_asm_len ("rjmp 1f" CR_TAB
7628 "0:", NULL
, plen_add
, 1);
7630 // Keep all bits from RP and higher: ... 2^(-RP)
7631 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7632 // Rounding point ^^^^^^^
7633 // Added above ^^^^^^^^^
7634 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
7635 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
7637 xpattern
= gen_rtx_SET (VOIDmode
, xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
7642 op
[3] = gen_rtx_SCRATCH (QImode
);
7643 avr_out_bitop (xpattern
, op
, plen_and
);
7644 avr_asm_len ("1:", NULL
, plen
, 0);
7647 *plen
= len_add
+ len_and
;
7653 /* Create RTL split patterns for byte sized rotate expressions. This
7654 produces a series of move instructions and considers overlap situations.
7655 Overlapping non-HImode operands need a scratch register. */
7658 avr_rotate_bytes (rtx operands
[])
7661 enum machine_mode mode
= GET_MODE (operands
[0]);
7662 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7663 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7664 int num
= INTVAL (operands
[2]);
7665 rtx scratch
= operands
[3];
7666 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7667 Word move if no scratch is needed, otherwise use size of scratch. */
7668 enum machine_mode move_mode
= QImode
;
7669 int move_size
, offset
, size
;
7673 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7676 move_mode
= GET_MODE (scratch
);
7678 /* Force DI rotate to use QI moves since other DI moves are currently split
7679 into QI moves so forward propagation works better. */
7682 /* Make scratch smaller if needed. */
7683 if (SCRATCH
!= GET_CODE (scratch
)
7684 && HImode
== GET_MODE (scratch
)
7685 && QImode
== move_mode
)
7686 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7688 move_size
= GET_MODE_SIZE (move_mode
);
7689 /* Number of bytes/words to rotate. */
7690 offset
= (num
>> 3) / move_size
;
7691 /* Number of moves needed. */
7692 size
= GET_MODE_SIZE (mode
) / move_size
;
7693 /* Himode byte swap is special case to avoid a scratch register. */
7694 if (mode
== HImode
&& same_reg
)
7696 /* HImode byte swap, using xor. This is as quick as using scratch. */
7698 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7699 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7700 if (!rtx_equal_p (dst
, src
))
7702 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7703 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7704 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7709 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7710 /* Create linked list of moves to determine move order. */
7714 } move
[MAX_SIZE
+ 8];
7717 gcc_assert (size
<= MAX_SIZE
);
7718 /* Generate list of subreg moves. */
7719 for (i
= 0; i
< size
; i
++)
7722 int to
= (from
+ offset
) % size
;
7723 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7724 mode
, from
* move_size
);
7725 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7726 mode
, to
* move_size
);
7729 /* Mark dependence where a dst of one move is the src of another move.
7730 The first move is a conflict as it must wait until second is
7731 performed. We ignore moves to self - we catch this later. */
7733 for (i
= 0; i
< size
; i
++)
7734 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7735 for (j
= 0; j
< size
; j
++)
7736 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7738 /* The dst of move i is the src of move j. */
7745 /* Go through move list and perform non-conflicting moves. As each
7746 non-overlapping move is made, it may remove other conflicts
7747 so the process is repeated until no conflicts remain. */
7752 /* Emit move where dst is not also a src or we have used that
7754 for (i
= 0; i
< size
; i
++)
7755 if (move
[i
].src
!= NULL_RTX
)
7757 if (move
[i
].links
== -1
7758 || move
[move
[i
].links
].src
== NULL_RTX
)
7761 /* Ignore NOP moves to self. */
7762 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7763 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7765 /* Remove conflict from list. */
7766 move
[i
].src
= NULL_RTX
;
7772 /* Check for deadlock. This is when no moves occurred and we have
7773 at least one blocked move. */
7774 if (moves
== 0 && blocked
!= -1)
7776 /* Need to use scratch register to break deadlock.
7777 Add move to put dst of blocked move into scratch.
7778 When this move occurs, it will break chain deadlock.
7779 The scratch register is substituted for real move. */
7781 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7783 move
[size
].src
= move
[blocked
].dst
;
7784 move
[size
].dst
= scratch
;
7785 /* Scratch move is never blocked. */
7786 move
[size
].links
= -1;
7787 /* Make sure we have valid link. */
7788 gcc_assert (move
[blocked
].links
!= -1);
7789 /* Replace src of blocking move with scratch reg. */
7790 move
[move
[blocked
].links
].src
= scratch
;
7791 /* Make dependent on scratch move occurring. */
7792 move
[blocked
].links
= size
;
7796 while (blocked
!= -1);
7802 /* Worker function for `ADJUST_INSN_LENGTH'. */
7803 /* Modifies the length assigned to instruction INSN
7804 LEN is the initially computed length of the insn. */
7807 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
7809 rtx
*op
= recog_data
.operand
;
7810 enum attr_adjust_len adjust_len
;
7812 /* Some complex insns don't need length adjustment and therefore
7813 the length need not/must not be adjusted for these insns.
7814 It is easier to state this in an insn attribute "adjust_len" than
7815 to clutter up code here... */
7817 if (JUMP_TABLE_DATA_P (insn
) || recog_memoized (insn
) == -1)
7822 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7824 adjust_len
= get_attr_adjust_len (insn
);
7826 if (adjust_len
== ADJUST_LEN_NO
)
7828 /* Nothing to adjust: The length from attribute "length" is fine.
7829 This is the default. */
7834 /* Extract insn's operands. */
7836 extract_constrain_insn_cached (insn
);
7838 /* Dispatch to right function. */
7842 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7843 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7844 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7846 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7848 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7849 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7851 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7852 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7853 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7854 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7855 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7856 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7857 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7859 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7860 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7861 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
7863 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7864 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7865 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7866 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7867 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7869 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7870 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7871 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7873 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7874 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7875 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7877 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7878 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7879 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7881 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7882 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7883 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7885 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7887 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7896 /* Return nonzero if register REG dead after INSN. */
7899 reg_unused_after (rtx_insn
*insn
, rtx reg
)
7901 return (dead_or_set_p (insn
, reg
)
7902 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7905 /* Return nonzero if REG is not used after INSN.
7906 We assume REG is a reload reg, and therefore does
7907 not live past labels. It may live past calls or jumps though. */
7910 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
7915 /* If the reg is set by this instruction, then it is safe for our
7916 case. Disregard the case where this is a store to memory, since
7917 we are checking a register used in the store address. */
7918 set
= single_set (insn
);
7919 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7920 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7923 while ((insn
= NEXT_INSN (insn
)))
7926 code
= GET_CODE (insn
);
7929 /* If this is a label that existed before reload, then the register
7930 if dead here. However, if this is a label added by reorg, then
7931 the register may still be live here. We can't tell the difference,
7932 so we just ignore labels completely. */
7933 if (code
== CODE_LABEL
)
7941 if (code
== JUMP_INSN
)
7944 /* If this is a sequence, we must handle them all at once.
7945 We could have for instance a call that sets the target register,
7946 and an insn in a delay slot that uses the register. In this case,
7947 we must return 0. */
7948 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7950 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
7954 for (i
= 0; i
< seq
->len (); i
++)
7956 rtx_insn
*this_insn
= seq
->insn (i
);
7957 rtx set
= single_set (this_insn
);
7959 if (CALL_P (this_insn
))
7961 else if (JUMP_P (this_insn
))
7963 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7968 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7970 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7972 if (GET_CODE (SET_DEST (set
)) != MEM
)
7978 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7983 else if (code
== JUMP_INSN
)
7987 if (code
== CALL_INSN
)
7990 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7991 if (GET_CODE (XEXP (tem
, 0)) == USE
7992 && REG_P (XEXP (XEXP (tem
, 0), 0))
7993 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7995 if (call_used_regs
[REGNO (reg
)])
7999 set
= single_set (insn
);
8001 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
8003 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8004 return GET_CODE (SET_DEST (set
)) != MEM
;
8005 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
8012 /* Implement `TARGET_ASM_INTEGER'. */
8013 /* Target hook for assembling integer objects. The AVR version needs
8014 special handling for references to certain labels. */
8017 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
8019 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
8020 && text_segment_operand (x
, VOIDmode
))
8022 fputs ("\t.word\tgs(", asm_out_file
);
8023 output_addr_const (asm_out_file
, x
);
8024 fputs (")\n", asm_out_file
);
8028 else if (GET_MODE (x
) == PSImode
)
8030 /* This needs binutils 2.23+, see PR binutils/13503 */
8032 fputs ("\t.byte\tlo8(", asm_out_file
);
8033 output_addr_const (asm_out_file
, x
);
8034 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8036 fputs ("\t.byte\thi8(", asm_out_file
);
8037 output_addr_const (asm_out_file
, x
);
8038 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8040 fputs ("\t.byte\thh8(", asm_out_file
);
8041 output_addr_const (asm_out_file
, x
);
8042 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8046 else if (CONST_FIXED_P (x
))
8050 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8052 for (n
= 0; n
< size
; n
++)
8054 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8055 default_assemble_integer (xn
, 1, aligned_p
);
8061 return default_assemble_integer (x
, size
, aligned_p
);
8065 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8066 /* Return value is nonzero if pseudos that have been
8067 assigned to registers of class CLASS would likely be spilled
8068 because registers of CLASS are needed for spill registers. */
8071 avr_class_likely_spilled_p (reg_class_t c
)
8073 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
8077 /* Valid attributes:
8078 progmem - Put data to program memory.
8079 signal - Make a function to be hardware interrupt.
8080 After function prologue interrupts remain disabled.
8081 interrupt - Make a function to be hardware interrupt. Before function
8082 prologue interrupts are enabled by means of SEI.
8083 naked - Don't generate function prologue/epilogue and RET
8086 /* Handle a "progmem" attribute; arguments as in
8087 struct attribute_spec.handler. */
8090 avr_handle_progmem_attribute (tree
*node
, tree name
,
8091 tree args ATTRIBUTE_UNUSED
,
8092 int flags ATTRIBUTE_UNUSED
,
8097 if (TREE_CODE (*node
) == TYPE_DECL
)
8099 /* This is really a decl attribute, not a type attribute,
8100 but try to handle it for GCC 3.0 backwards compatibility. */
8102 tree type
= TREE_TYPE (*node
);
8103 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8104 tree newtype
= build_type_attribute_variant (type
, attr
);
8106 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8107 TREE_TYPE (*node
) = newtype
;
8108 *no_add_attrs
= true;
8110 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8112 *no_add_attrs
= false;
8116 warning (OPT_Wattributes
, "%qE attribute ignored",
8118 *no_add_attrs
= true;
8125 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8126 struct attribute_spec.handler. */
8129 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8130 tree args ATTRIBUTE_UNUSED
,
8131 int flags ATTRIBUTE_UNUSED
,
8134 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8136 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8138 *no_add_attrs
= true;
8145 avr_handle_fntype_attribute (tree
*node
, tree name
,
8146 tree args ATTRIBUTE_UNUSED
,
8147 int flags ATTRIBUTE_UNUSED
,
8150 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8152 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8154 *no_add_attrs
= true;
8161 avr_handle_addr_attribute (tree
*node
, tree name
, tree args
,
8162 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
8164 bool io_p
= (strncmp (IDENTIFIER_POINTER (name
), "io", 2) == 0);
8165 location_t loc
= DECL_SOURCE_LOCATION (*node
);
8167 if (TREE_CODE (*node
) != VAR_DECL
)
8169 warning_at (loc
, 0, "%qE attribute only applies to variables", name
);
8173 if (args
!= NULL_TREE
)
8175 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
8176 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
8177 tree arg
= TREE_VALUE (args
);
8178 if (TREE_CODE (arg
) != INTEGER_CST
)
8180 warning (0, "%qE attribute allows only an integer constant argument",
8185 && (!tree_fits_shwi_p (arg
)
8186 || !(strcmp (IDENTIFIER_POINTER (name
), "io_low") == 0
8187 ? low_io_address_operand
: io_address_operand
)
8188 (GEN_INT (TREE_INT_CST_LOW (arg
)), QImode
)))
8190 warning_at (loc
, 0, "%qE attribute address out of range", name
);
8195 tree attribs
= DECL_ATTRIBUTES (*node
);
8196 const char *names
[] = { "io", "io_low", "address", NULL
} ;
8197 for (const char **p
= names
; *p
; p
++)
8199 tree other
= lookup_attribute (*p
, attribs
);
8200 if (other
&& TREE_VALUE (other
))
8203 "both %s and %qE attribute provide address",
8212 if (*no_add
== false && io_p
&& !TREE_THIS_VOLATILE (*node
))
8213 warning_at (loc
, 0, "%qE attribute on non-volatile variable", name
);
8219 avr_eval_addr_attrib (rtx x
)
8221 if (GET_CODE (x
) == SYMBOL_REF
8222 && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_ADDRESS
))
8224 tree decl
= SYMBOL_REF_DECL (x
);
8225 tree attr
= NULL_TREE
;
8227 if (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
)
8229 attr
= lookup_attribute ("io", DECL_ATTRIBUTES (decl
));
8232 if (!attr
|| !TREE_VALUE (attr
))
8233 attr
= lookup_attribute ("address", DECL_ATTRIBUTES (decl
));
8234 gcc_assert (attr
&& TREE_VALUE (attr
) && TREE_VALUE (TREE_VALUE (attr
)));
8235 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
))));
8241 /* AVR attributes. */
8242 static const struct attribute_spec
8243 avr_attribute_table
[] =
8245 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8246 affects_type_identity } */
8247 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
8249 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8251 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8253 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8255 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8257 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8259 { "io", 0, 1, false, false, false, avr_handle_addr_attribute
,
8261 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute
,
8263 { "address", 1, 1, false, false, false, avr_handle_addr_attribute
,
8265 { NULL
, 0, 0, false, false, false, NULL
, false }
8269 /* Look if DECL shall be placed in program memory space by
8270 means of attribute `progmem' or some address-space qualifier.
8271 Return non-zero if DECL is data that must end up in Flash and
8272 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8274 Return 2 if DECL is located in 24-bit flash address-space
8275 Return 1 if DECL is located in 16-bit flash address-space
8276 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8277 Return 0 otherwise */
8280 avr_progmem_p (tree decl
, tree attributes
)
8284 if (TREE_CODE (decl
) != VAR_DECL
)
8287 if (avr_decl_memx_p (decl
))
8290 if (avr_decl_flash_p (decl
))
8294 != lookup_attribute ("progmem", attributes
))
8301 while (TREE_CODE (a
) == ARRAY_TYPE
);
8303 if (a
== error_mark_node
)
8306 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
8313 /* Scan type TYP for pointer references to address space ASn.
8314 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8315 the AS are also declared to be CONST.
8316 Otherwise, return the respective address space, i.e. a value != 0. */
8319 avr_nonconst_pointer_addrspace (tree typ
)
8321 while (ARRAY_TYPE
== TREE_CODE (typ
))
8322 typ
= TREE_TYPE (typ
);
8324 if (POINTER_TYPE_P (typ
))
8327 tree target
= TREE_TYPE (typ
);
8329 /* Pointer to function: Test the function's return type. */
8331 if (FUNCTION_TYPE
== TREE_CODE (target
))
8332 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
8334 /* "Ordinary" pointers... */
8336 while (TREE_CODE (target
) == ARRAY_TYPE
)
8337 target
= TREE_TYPE (target
);
8339 /* Pointers to non-generic address space must be const.
8340 Refuse address spaces outside the device's flash. */
8342 as
= TYPE_ADDR_SPACE (target
);
8344 if (!ADDR_SPACE_GENERIC_P (as
)
8345 && (!TYPE_READONLY (target
)
8346 || avr_addrspace
[as
].segment
>= avr_n_flash
))
8351 /* Scan pointer's target type. */
8353 return avr_nonconst_pointer_addrspace (target
);
8356 return ADDR_SPACE_GENERIC
;
8360 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8361 go along with CONST qualifier. Writing to these address spaces should
8362 be detected and complained about as early as possible. */
8365 avr_pgm_check_var_decl (tree node
)
8367 const char *reason
= NULL
;
8369 addr_space_t as
= ADDR_SPACE_GENERIC
;
8371 gcc_assert (as
== 0);
8373 if (avr_log
.progmem
)
8374 avr_edump ("%?: %t\n", node
);
8376 switch (TREE_CODE (node
))
8382 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8383 reason
= "variable";
8387 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8388 reason
= "function parameter";
8392 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8393 reason
= "structure field";
8397 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
8399 reason
= "return type of function";
8403 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
8410 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
8413 error ("%qT uses address space %qs beyond flash of %qs",
8414 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8416 error ("%s %q+D uses address space %qs beyond flash of %qs",
8417 reason
, node
, avr_addrspace
[as
].name
,
8418 avr_current_device
->name
);
8423 error ("pointer targeting address space %qs must be const in %qT",
8424 avr_addrspace
[as
].name
, node
);
8426 error ("pointer targeting address space %qs must be const"
8428 avr_addrspace
[as
].name
, reason
, node
);
8432 return reason
== NULL
;
8436 /* Add the section attribute if the variable is in progmem. */
8439 avr_insert_attributes (tree node
, tree
*attributes
)
8441 avr_pgm_check_var_decl (node
);
8443 if (TREE_CODE (node
) == VAR_DECL
8444 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8445 && avr_progmem_p (node
, *attributes
))
8450 /* For C++, we have to peel arrays in order to get correct
8451 determination of readonlyness. */
8454 node0
= TREE_TYPE (node0
);
8455 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8457 if (error_mark_node
== node0
)
8460 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8462 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
8464 error ("variable %q+D located in address space %qs"
8465 " beyond flash of %qs",
8466 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8469 if (!TYPE_READONLY (node0
)
8470 && !TREE_READONLY (node
))
8472 const char *reason
= "__attribute__((progmem))";
8474 if (!ADDR_SPACE_GENERIC_P (as
))
8475 reason
= avr_addrspace
[as
].name
;
8477 if (avr_log
.progmem
)
8478 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8480 error ("variable %q+D must be const in order to be put into"
8481 " read-only section by means of %qs", node
, reason
);
8487 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8488 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8489 /* Track need of __do_clear_bss. */
8492 avr_asm_output_aligned_decl_common (FILE * stream
,
8495 unsigned HOST_WIDE_INT size
,
8496 unsigned int align
, bool local_p
)
8498 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
8501 if (mem
!= NULL_RTX
&& MEM_P (mem
)
8502 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
8503 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
8508 fprintf (stream
, "\t.globl\t");
8509 assemble_name (stream
, name
);
8510 fprintf (stream
, "\n");
8512 if (SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
)
8514 assemble_name (stream
, name
);
8515 fprintf (stream
, " = %ld\n",
8516 (long) INTVAL (avr_eval_addr_attrib (symbol
)));
8519 error_at (DECL_SOURCE_LOCATION (decl
),
8520 "static IO declaration for %q+D needs an address", decl
);
8524 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8525 There is no need to trigger __do_clear_bss code for them. */
8527 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8528 avr_need_clear_bss_p
= true;
8531 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8533 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8537 avr_asm_asm_output_aligned_bss (FILE *file
, tree decl
, const char *name
,
8538 unsigned HOST_WIDE_INT size
, int align
,
8539 void (*default_func
)
8540 (FILE *, tree
, const char *,
8541 unsigned HOST_WIDE_INT
, int))
8543 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
8546 if (mem
!= NULL_RTX
&& MEM_P (mem
)
8547 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
8548 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
8550 if (!(SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
))
8551 error_at (DECL_SOURCE_LOCATION (decl
),
8552 "IO definition for %q+D needs an address", decl
);
8553 avr_asm_output_aligned_decl_common (file
, decl
, name
, size
, align
, false);
8556 default_func (file
, decl
, name
, size
, align
);
8560 /* Unnamed section callback for data_section
8561 to track need of __do_copy_data. */
8564 avr_output_data_section_asm_op (const void *data
)
8566 avr_need_copy_data_p
= true;
8568 /* Dispatch to default. */
8569 output_section_asm_op (data
);
8573 /* Unnamed section callback for bss_section
8574 to track need of __do_clear_bss. */
8577 avr_output_bss_section_asm_op (const void *data
)
8579 avr_need_clear_bss_p
= true;
8581 /* Dispatch to default. */
8582 output_section_asm_op (data
);
8586 /* Unnamed section callback for progmem*.data sections. */
8589 avr_output_progmem_section_asm_op (const void *data
)
8591 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8592 (const char*) data
);
8596 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8599 avr_asm_init_sections (void)
8601 /* Set up a section for jump tables. Alignment is handled by
8602 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8604 if (AVR_HAVE_JMP_CALL
)
8606 progmem_swtable_section
8607 = get_unnamed_section (0, output_section_asm_op
,
8608 "\t.section\t.progmem.gcc_sw_table"
8609 ",\"a\",@progbits");
8613 progmem_swtable_section
8614 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8615 "\t.section\t.progmem.gcc_sw_table"
8616 ",\"ax\",@progbits");
8619 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8620 resp. `avr_need_copy_data_p'. */
8622 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8623 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8624 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8628 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8631 avr_asm_function_rodata_section (tree decl
)
8633 /* If a function is unused and optimized out by -ffunction-sections
8634 and --gc-sections, ensure that the same will happen for its jump
8635 tables by putting them into individual sections. */
8640 /* Get the frodata section from the default function in varasm.c
8641 but treat function-associated data-like jump tables as code
8642 rather than as user defined data. AVR has no constant pools. */
8644 int fdata
= flag_data_sections
;
8646 flag_data_sections
= flag_function_sections
;
8647 frodata
= default_function_rodata_section (decl
);
8648 flag_data_sections
= fdata
;
8649 flags
= frodata
->common
.flags
;
8652 if (frodata
!= readonly_data_section
8653 && flags
& SECTION_NAMED
)
8655 /* Adjust section flags and replace section name prefix. */
8659 static const char* const prefix
[] =
8661 ".rodata", ".progmem.gcc_sw_table",
8662 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8665 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8667 const char * old_prefix
= prefix
[i
];
8668 const char * new_prefix
= prefix
[i
+1];
8669 const char * name
= frodata
->named
.name
;
8671 if (STR_PREFIX_P (name
, old_prefix
))
8673 const char *rname
= ACONCAT ((new_prefix
,
8674 name
+ strlen (old_prefix
), NULL
));
8675 flags
&= ~SECTION_CODE
;
8676 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8678 return get_section (rname
, flags
, frodata
->named
.decl
);
8683 return progmem_swtable_section
;
8687 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8688 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8691 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8693 if (flags
& AVR_SECTION_PROGMEM
)
8695 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8696 const char *old_prefix
= ".rodata";
8697 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8699 if (STR_PREFIX_P (name
, old_prefix
))
8701 const char *sname
= ACONCAT ((new_prefix
,
8702 name
+ strlen (old_prefix
), NULL
));
8703 default_elf_asm_named_section (sname
, flags
, decl
);
8707 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8711 if (!avr_need_copy_data_p
)
8712 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8713 || STR_PREFIX_P (name
, ".rodata")
8714 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8716 if (!avr_need_clear_bss_p
)
8717 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8719 default_elf_asm_named_section (name
, flags
, decl
);
8723 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8726 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8728 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8730 if (STR_PREFIX_P (name
, ".noinit"))
8732 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8733 && DECL_INITIAL (decl
) == NULL_TREE
)
8734 flags
|= SECTION_BSS
; /* @nobits */
8736 warning (0, "only uninitialized variables can be placed in the "
8740 if (decl
&& DECL_P (decl
)
8741 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8743 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8745 /* Attribute progmem puts data in generic address space.
8746 Set section flags as if it was in __flash to get the right
8747 section prefix in the remainder. */
8749 if (ADDR_SPACE_GENERIC_P (as
))
8750 as
= ADDR_SPACE_FLASH
;
8752 flags
|= as
* SECTION_MACH_DEP
;
8753 flags
&= ~SECTION_WRITE
;
8754 flags
&= ~SECTION_BSS
;
8761 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8764 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8766 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8767 readily available, see PR34734. So we postpone the warning
8768 about uninitialized data in program memory section until here. */
8771 && decl
&& DECL_P (decl
)
8772 && NULL_TREE
== DECL_INITIAL (decl
)
8773 && !DECL_EXTERNAL (decl
)
8774 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8776 warning (OPT_Wuninitialized
,
8777 "uninitialized variable %q+D put into "
8778 "program memory area", decl
);
8781 default_encode_section_info (decl
, rtl
, new_decl_p
);
8783 if (decl
&& DECL_P (decl
)
8784 && TREE_CODE (decl
) != FUNCTION_DECL
8786 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8788 rtx sym
= XEXP (rtl
, 0);
8789 tree type
= TREE_TYPE (decl
);
8790 tree attr
= DECL_ATTRIBUTES (decl
);
8791 if (type
== error_mark_node
)
8794 addr_space_t as
= TYPE_ADDR_SPACE (type
);
8796 /* PSTR strings are in generic space but located in flash:
8797 patch address space. */
8799 if (-1 == avr_progmem_p (decl
, attr
))
8800 as
= ADDR_SPACE_FLASH
;
8802 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8804 tree io_low_attr
= lookup_attribute ("io_low", attr
);
8805 tree io_attr
= lookup_attribute ("io", attr
);
8808 && TREE_VALUE (io_low_attr
) && TREE_VALUE (TREE_VALUE (io_low_attr
)))
8809 addr_attr
= io_attr
;
8811 && TREE_VALUE (io_attr
) && TREE_VALUE (TREE_VALUE (io_attr
)))
8812 addr_attr
= io_attr
;
8814 addr_attr
= lookup_attribute ("address", attr
);
8816 || (io_attr
&& addr_attr
&&
8817 low_io_address_operand (GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr
)))), QImode
)))
8818 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO_LOW
;
8819 if (io_attr
|| io_low_attr
)
8820 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO
;
8821 /* If we have an (io) address attribute specification, but the variable
8822 is external, treat the address as only a tentative definition
8823 to be used to determine if an io port is in the lower range, but
8824 don't use the exact value for constant propagation. */
8825 if (addr_attr
&& !DECL_EXTERNAL (decl
))
8826 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_ADDRESS
;
8831 /* Implement `TARGET_ASM_SELECT_SECTION' */
8834 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8836 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8838 if (decl
&& DECL_P (decl
)
8839 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8841 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8843 /* __progmem__ goes in generic space but shall be allocated to
8846 if (ADDR_SPACE_GENERIC_P (as
))
8847 as
= ADDR_SPACE_FLASH
;
8849 if (sect
->common
.flags
& SECTION_NAMED
)
8851 const char * name
= sect
->named
.name
;
8852 const char * old_prefix
= ".rodata";
8853 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8855 if (STR_PREFIX_P (name
, old_prefix
))
8857 const char *sname
= ACONCAT ((new_prefix
,
8858 name
+ strlen (old_prefix
), NULL
));
8859 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8863 if (!progmem_section
[as
])
8866 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8867 avr_addrspace
[as
].section_name
);
8870 return progmem_section
[as
];
8876 /* Implement `TARGET_ASM_FILE_START'. */
8877 /* Outputs some text at the start of each assembler file. */
8880 avr_file_start (void)
8882 int sfr_offset
= avr_current_arch
->sfr_offset
;
8884 if (avr_current_arch
->asm_only
)
8885 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8887 default_file_start ();
8889 /* Print I/O addresses of some SFRs used with IN and OUT. */
8892 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8894 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8895 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8897 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8899 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8901 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8903 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8905 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8906 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8907 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8911 /* Implement `TARGET_ASM_FILE_END'. */
8912 /* Outputs to the stdio stream FILE some
8913 appropriate text to go at the end of an assembler file. */
8918 /* Output these only if there is anything in the
8919 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8920 input section(s) - some code size can be saved by not
8921 linking in the initialization code from libgcc if resp.
8922 sections are empty, see PR18145. */
8924 if (avr_need_copy_data_p
)
8925 fputs (".global __do_copy_data\n", asm_out_file
);
8927 if (avr_need_clear_bss_p
)
8928 fputs (".global __do_clear_bss\n", asm_out_file
);
8932 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8933 /* Choose the order in which to allocate hard registers for
8934 pseudo-registers local to a basic block.
8936 Store the desired register order in the array `reg_alloc_order'.
8937 Element 0 should be the register to allocate first; element 1, the
8938 next register; and so on. */
8941 avr_adjust_reg_alloc_order (void)
8944 static const int order_0
[] =
8947 18, 19, 20, 21, 22, 23,
8950 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8954 static const int order_1
[] =
8956 18, 19, 20, 21, 22, 23, 24, 25,
8959 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8963 static const int order_2
[] =
8965 25, 24, 23, 22, 21, 20, 19, 18,
8968 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8973 const int *order
= (TARGET_ORDER_1
? order_1
:
8974 TARGET_ORDER_2
? order_2
:
8976 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8977 reg_alloc_order
[i
] = order
[i
];
8981 /* Implement `TARGET_REGISTER_MOVE_COST' */
8984 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8985 reg_class_t from
, reg_class_t to
)
8987 return (from
== STACK_REG
? 6
8988 : to
== STACK_REG
? 12
8993 /* Implement `TARGET_MEMORY_MOVE_COST' */
8996 avr_memory_move_cost (enum machine_mode mode
,
8997 reg_class_t rclass ATTRIBUTE_UNUSED
,
8998 bool in ATTRIBUTE_UNUSED
)
9000 return (mode
== QImode
? 2
9001 : mode
== HImode
? 4
9002 : mode
== SImode
? 8
9003 : mode
== SFmode
? 8
9008 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9009 cost of an RTX operand given its context. X is the rtx of the
9010 operand, MODE is its mode, and OUTER is the rtx_code of this
9011 operand's parent operator. */
9014 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
9015 int opno
, bool speed
)
9017 enum rtx_code code
= GET_CODE (x
);
9029 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9036 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
9040 /* Worker function for AVR backend's rtx_cost function.
9041 X is rtx expression whose cost is to be calculated.
9042 Return true if the complete cost has been computed.
9043 Return false if subexpressions should be scanned.
9044 In either case, *TOTAL contains the cost result. */
9047 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
9048 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
9050 enum rtx_code code
= (enum rtx_code
) codearg
;
9051 enum machine_mode mode
= GET_MODE (x
);
9062 /* Immediate constants are as cheap as registers. */
9067 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9075 *total
= COSTS_N_INSNS (1);
9081 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
9087 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9095 *total
= COSTS_N_INSNS (1);
9101 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9105 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9106 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9110 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
9111 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
9112 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9116 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
9117 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
9118 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9126 && MULT
== GET_CODE (XEXP (x
, 0))
9127 && register_operand (XEXP (x
, 1), QImode
))
9130 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9131 /* multiply-add with constant: will be split and load constant. */
9132 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9133 *total
= COSTS_N_INSNS (1) + *total
;
9136 *total
= COSTS_N_INSNS (1);
9137 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9138 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9143 && (MULT
== GET_CODE (XEXP (x
, 0))
9144 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
9145 && register_operand (XEXP (x
, 1), HImode
)
9146 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9147 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
9150 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9151 /* multiply-add with constant: will be split and load constant. */
9152 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9153 *total
= COSTS_N_INSNS (1) + *total
;
9156 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9158 *total
= COSTS_N_INSNS (2);
9159 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9162 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
9163 *total
= COSTS_N_INSNS (1);
9165 *total
= COSTS_N_INSNS (2);
9169 if (!CONST_INT_P (XEXP (x
, 1)))
9171 *total
= COSTS_N_INSNS (3);
9172 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9175 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
9176 *total
= COSTS_N_INSNS (2);
9178 *total
= COSTS_N_INSNS (3);
9182 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9184 *total
= COSTS_N_INSNS (4);
9185 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9188 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
9189 *total
= COSTS_N_INSNS (1);
9191 *total
= COSTS_N_INSNS (4);
9197 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9203 && register_operand (XEXP (x
, 0), QImode
)
9204 && MULT
== GET_CODE (XEXP (x
, 1)))
9207 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9208 /* multiply-sub with constant: will be split and load constant. */
9209 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9210 *total
= COSTS_N_INSNS (1) + *total
;
9215 && register_operand (XEXP (x
, 0), HImode
)
9216 && (MULT
== GET_CODE (XEXP (x
, 1))
9217 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
9218 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
9219 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
9222 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9223 /* multiply-sub with constant: will be split and load constant. */
9224 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9225 *total
= COSTS_N_INSNS (1) + *total
;
9231 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9232 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9233 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9234 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9238 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9239 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9240 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9248 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
9250 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9258 rtx op0
= XEXP (x
, 0);
9259 rtx op1
= XEXP (x
, 1);
9260 enum rtx_code code0
= GET_CODE (op0
);
9261 enum rtx_code code1
= GET_CODE (op1
);
9262 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
9263 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
9266 && (u8_operand (op1
, HImode
)
9267 || s8_operand (op1
, HImode
)))
9269 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9273 && register_operand (op1
, HImode
))
9275 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9278 else if (ex0
|| ex1
)
9280 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
9283 else if (register_operand (op0
, HImode
)
9284 && (u8_operand (op1
, HImode
)
9285 || s8_operand (op1
, HImode
)))
9287 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
9291 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
9294 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9301 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9311 /* Add some additional costs besides CALL like moves etc. */
9313 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9317 /* Just a rough estimate. Even with -O2 we don't want bulky
9318 code expanded inline. */
9320 *total
= COSTS_N_INSNS (25);
9326 *total
= COSTS_N_INSNS (300);
9328 /* Add some additional costs besides CALL like moves etc. */
9329 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9337 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9338 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9346 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9348 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
9349 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9350 /* For div/mod with const-int divisor we have at least the cost of
9351 loading the divisor. */
9352 if (CONST_INT_P (XEXP (x
, 1)))
9353 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9354 /* Add some overall penaly for clobbering and moving around registers */
9355 *total
+= COSTS_N_INSNS (2);
9362 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
9363 *total
= COSTS_N_INSNS (1);
9368 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
9369 *total
= COSTS_N_INSNS (3);
9374 if (CONST_INT_P (XEXP (x
, 1)))
9375 switch (INTVAL (XEXP (x
, 1)))
9379 *total
= COSTS_N_INSNS (5);
9382 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
9390 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9397 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9399 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9400 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9405 val
= INTVAL (XEXP (x
, 1));
9407 *total
= COSTS_N_INSNS (3);
9408 else if (val
>= 0 && val
<= 7)
9409 *total
= COSTS_N_INSNS (val
);
9411 *total
= COSTS_N_INSNS (1);
9418 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
9419 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
9420 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
9422 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9427 if (const1_rtx
== (XEXP (x
, 1))
9428 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
9430 *total
= COSTS_N_INSNS (2);
9434 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9436 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9437 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9441 switch (INTVAL (XEXP (x
, 1)))
9448 *total
= COSTS_N_INSNS (2);
9451 *total
= COSTS_N_INSNS (3);
9457 *total
= COSTS_N_INSNS (4);
9462 *total
= COSTS_N_INSNS (5);
9465 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9468 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9471 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
9474 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9475 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9481 if (!CONST_INT_P (XEXP (x
, 1)))
9483 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9486 switch (INTVAL (XEXP (x
, 1)))
9494 *total
= COSTS_N_INSNS (3);
9497 *total
= COSTS_N_INSNS (5);
9500 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9506 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9508 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9509 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9513 switch (INTVAL (XEXP (x
, 1)))
9519 *total
= COSTS_N_INSNS (3);
9524 *total
= COSTS_N_INSNS (4);
9527 *total
= COSTS_N_INSNS (6);
9530 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9533 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9534 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9542 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9549 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9551 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9552 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9557 val
= INTVAL (XEXP (x
, 1));
9559 *total
= COSTS_N_INSNS (4);
9561 *total
= COSTS_N_INSNS (2);
9562 else if (val
>= 0 && val
<= 7)
9563 *total
= COSTS_N_INSNS (val
);
9565 *total
= COSTS_N_INSNS (1);
9570 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9572 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9573 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9577 switch (INTVAL (XEXP (x
, 1)))
9583 *total
= COSTS_N_INSNS (2);
9586 *total
= COSTS_N_INSNS (3);
9592 *total
= COSTS_N_INSNS (4);
9596 *total
= COSTS_N_INSNS (5);
9599 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9602 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9606 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9609 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9610 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9616 if (!CONST_INT_P (XEXP (x
, 1)))
9618 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9621 switch (INTVAL (XEXP (x
, 1)))
9627 *total
= COSTS_N_INSNS (3);
9631 *total
= COSTS_N_INSNS (5);
9634 *total
= COSTS_N_INSNS (4);
9637 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9643 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9645 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9646 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9650 switch (INTVAL (XEXP (x
, 1)))
9656 *total
= COSTS_N_INSNS (4);
9661 *total
= COSTS_N_INSNS (6);
9664 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9667 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9670 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9671 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9679 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9686 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9688 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9689 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9694 val
= INTVAL (XEXP (x
, 1));
9696 *total
= COSTS_N_INSNS (3);
9697 else if (val
>= 0 && val
<= 7)
9698 *total
= COSTS_N_INSNS (val
);
9700 *total
= COSTS_N_INSNS (1);
9705 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9707 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9708 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9712 switch (INTVAL (XEXP (x
, 1)))
9719 *total
= COSTS_N_INSNS (2);
9722 *total
= COSTS_N_INSNS (3);
9727 *total
= COSTS_N_INSNS (4);
9731 *total
= COSTS_N_INSNS (5);
9737 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9740 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9744 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9747 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9748 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9754 if (!CONST_INT_P (XEXP (x
, 1)))
9756 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9759 switch (INTVAL (XEXP (x
, 1)))
9767 *total
= COSTS_N_INSNS (3);
9770 *total
= COSTS_N_INSNS (5);
9773 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9779 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9781 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9782 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9786 switch (INTVAL (XEXP (x
, 1)))
9792 *total
= COSTS_N_INSNS (4);
9795 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9800 *total
= COSTS_N_INSNS (4);
9803 *total
= COSTS_N_INSNS (6);
9806 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9807 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9815 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9819 switch (GET_MODE (XEXP (x
, 0)))
9822 *total
= COSTS_N_INSNS (1);
9823 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9824 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9828 *total
= COSTS_N_INSNS (2);
9829 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9830 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9831 else if (INTVAL (XEXP (x
, 1)) != 0)
9832 *total
+= COSTS_N_INSNS (1);
9836 *total
= COSTS_N_INSNS (3);
9837 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9838 *total
+= COSTS_N_INSNS (2);
9842 *total
= COSTS_N_INSNS (4);
9843 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9844 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9845 else if (INTVAL (XEXP (x
, 1)) != 0)
9846 *total
+= COSTS_N_INSNS (3);
9852 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9857 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9858 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9859 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9861 if (QImode
== mode
|| HImode
== mode
)
9863 *total
= COSTS_N_INSNS (2);
9876 /* Implement `TARGET_RTX_COSTS'. */
9879 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9880 int opno
, int *total
, bool speed
)
9882 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9883 opno
, total
, speed
);
9885 if (avr_log
.rtx_costs
)
9887 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9888 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9895 /* Implement `TARGET_ADDRESS_COST'. */
9898 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9899 addr_space_t as ATTRIBUTE_UNUSED
,
9900 bool speed ATTRIBUTE_UNUSED
)
9904 if (GET_CODE (x
) == PLUS
9905 && CONST_INT_P (XEXP (x
, 1))
9906 && (REG_P (XEXP (x
, 0))
9907 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9909 if (INTVAL (XEXP (x
, 1)) >= 61)
9912 else if (CONSTANT_ADDRESS_P (x
))
9915 && io_address_operand (x
, QImode
))
9919 if (avr_log
.address_cost
)
9920 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9925 /* Test for extra memory constraint 'Q'.
9926 It's a memory address based on Y or Z pointer with valid displacement. */
9929 extra_constraint_Q (rtx x
)
9933 if (GET_CODE (XEXP (x
,0)) == PLUS
9934 && REG_P (XEXP (XEXP (x
,0), 0))
9935 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9936 && (INTVAL (XEXP (XEXP (x
,0), 1))
9937 <= MAX_LD_OFFSET (GET_MODE (x
))))
9939 rtx xx
= XEXP (XEXP (x
,0), 0);
9940 int regno
= REGNO (xx
);
9942 ok
= (/* allocate pseudos */
9943 regno
>= FIRST_PSEUDO_REGISTER
9944 /* strictly check */
9945 || regno
== REG_Z
|| regno
== REG_Y
9946 /* XXX frame & arg pointer checks */
9947 || xx
== frame_pointer_rtx
9948 || xx
== arg_pointer_rtx
);
9950 if (avr_log
.constraints
)
9951 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9952 ok
, reload_completed
, reload_in_progress
, x
);
9958 /* Convert condition code CONDITION to the valid AVR condition code. */
9961 avr_normalize_condition (RTX_CODE condition
)
9978 /* Helper function for `avr_reorg'. */
9981 avr_compare_pattern (rtx_insn
*insn
)
9983 rtx pattern
= single_set (insn
);
9986 && NONJUMP_INSN_P (insn
)
9987 && SET_DEST (pattern
) == cc0_rtx
9988 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9990 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9991 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9993 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9994 They must not be swapped, thus skip them. */
9996 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9997 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
10004 /* Helper function for `avr_reorg'. */
10006 /* Expansion of switch/case decision trees leads to code like
10008 cc0 = compare (Reg, Num)
10012 cc0 = compare (Reg, Num)
10016 The second comparison is superfluous and can be deleted.
10017 The second jump condition can be transformed from a
10018 "difficult" one to a "simple" one because "cc0 > 0" and
10019 "cc0 >= 0" will have the same effect here.
10021 This function relies on the way switch/case is being expaned
10022 as binary decision tree. For example code see PR 49903.
10024 Return TRUE if optimization performed.
10025 Return FALSE if nothing changed.
10027 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10029 We don't want to do this in text peephole because it is
10030 tedious to work out jump offsets there and the second comparison
10031 might have been transormed by `avr_reorg'.
10033 RTL peephole won't do because peephole2 does not scan across
10037 avr_reorg_remove_redundant_compare (rtx_insn
*insn1
)
10039 rtx comp1
, ifelse1
, xcond1
;
10041 rtx comp2
, ifelse2
, xcond2
;
10042 rtx_insn
*branch2
, *insn2
;
10043 enum rtx_code code
;
10047 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10049 branch1
= next_nonnote_nondebug_insn (insn1
);
10050 if (!branch1
|| !JUMP_P (branch1
))
10053 insn2
= next_nonnote_nondebug_insn (branch1
);
10054 if (!insn2
|| !avr_compare_pattern (insn2
))
10057 branch2
= next_nonnote_nondebug_insn (insn2
);
10058 if (!branch2
|| !JUMP_P (branch2
))
10061 comp1
= avr_compare_pattern (insn1
);
10062 comp2
= avr_compare_pattern (insn2
);
10063 xcond1
= single_set (branch1
);
10064 xcond2
= single_set (branch2
);
10066 if (!comp1
|| !comp2
10067 || !rtx_equal_p (comp1
, comp2
)
10068 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
10069 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
10070 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
10071 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
10076 comp1
= SET_SRC (comp1
);
10077 ifelse1
= SET_SRC (xcond1
);
10078 ifelse2
= SET_SRC (xcond2
);
10080 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10082 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
10083 || !REG_P (XEXP (comp1
, 0))
10084 || !CONST_INT_P (XEXP (comp1
, 1))
10085 || XEXP (ifelse1
, 2) != pc_rtx
10086 || XEXP (ifelse2
, 2) != pc_rtx
10087 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
10088 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
10089 || !COMPARISON_P (XEXP (ifelse2
, 0))
10090 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
10091 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
10092 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
10093 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
10098 /* We filtered the insn sequence to look like
10104 (if_then_else (eq (cc0)
10113 (if_then_else (CODE (cc0)
10119 code
= GET_CODE (XEXP (ifelse2
, 0));
10121 /* Map GT/GTU to GE/GEU which is easier for AVR.
10122 The first two instructions compare/branch on EQ
10123 so we may replace the difficult
10125 if (x == VAL) goto L1;
10126 if (x > VAL) goto L2;
10130 if (x == VAL) goto L1;
10131 if (x >= VAL) goto L2;
10133 Similarly, replace LE/LEU by LT/LTU. */
10144 code
= avr_normalize_condition (code
);
10151 /* Wrap the branches into UNSPECs so they won't be changed or
10152 optimized in the remainder. */
10154 target
= XEXP (XEXP (ifelse1
, 1), 0);
10155 cond
= XEXP (ifelse1
, 0);
10156 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
10158 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
10160 target
= XEXP (XEXP (ifelse2
, 1), 0);
10161 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10162 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
10164 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
10166 /* The comparisons in insn1 and insn2 are exactly the same;
10167 insn2 is superfluous so delete it. */
10169 delete_insn (insn2
);
10170 delete_insn (branch1
);
10171 delete_insn (branch2
);
10177 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
10178 /* Optimize conditional jumps. */
10183 rtx_insn
*insn
= get_insns();
10185 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
10187 rtx pattern
= avr_compare_pattern (insn
);
10193 && avr_reorg_remove_redundant_compare (insn
))
10198 if (compare_diff_p (insn
))
10200 /* Now we work under compare insn with difficult branch. */
10202 rtx next
= next_real_insn (insn
);
10203 rtx pat
= PATTERN (next
);
10205 pattern
= SET_SRC (pattern
);
10207 if (true_regnum (XEXP (pattern
, 0)) >= 0
10208 && true_regnum (XEXP (pattern
, 1)) >= 0)
10210 rtx x
= XEXP (pattern
, 0);
10211 rtx src
= SET_SRC (pat
);
10212 rtx t
= XEXP (src
,0);
10213 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10214 XEXP (pattern
, 0) = XEXP (pattern
, 1);
10215 XEXP (pattern
, 1) = x
;
10216 INSN_CODE (next
) = -1;
10218 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10219 && XEXP (pattern
, 1) == const0_rtx
)
10221 /* This is a tst insn, we can reverse it. */
10222 rtx src
= SET_SRC (pat
);
10223 rtx t
= XEXP (src
,0);
10225 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10226 XEXP (pattern
, 1) = XEXP (pattern
, 0);
10227 XEXP (pattern
, 0) = const0_rtx
;
10228 INSN_CODE (next
) = -1;
10229 INSN_CODE (insn
) = -1;
10231 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10232 && CONST_INT_P (XEXP (pattern
, 1)))
10234 rtx x
= XEXP (pattern
, 1);
10235 rtx src
= SET_SRC (pat
);
10236 rtx t
= XEXP (src
,0);
10237 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
10239 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
10241 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
10242 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
10243 INSN_CODE (next
) = -1;
10244 INSN_CODE (insn
) = -1;
10251 /* Returns register number for function return value.*/
10253 static inline unsigned int
10254 avr_ret_register (void)
10260 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10263 avr_function_value_regno_p (const unsigned int regno
)
10265 return (regno
== avr_ret_register ());
10269 /* Implement `TARGET_LIBCALL_VALUE'. */
10270 /* Create an RTX representing the place where a
10271 library function returns a value of mode MODE. */
10274 avr_libcall_value (enum machine_mode mode
,
10275 const_rtx func ATTRIBUTE_UNUSED
)
10277 int offs
= GET_MODE_SIZE (mode
);
10280 offs
= (offs
+ 1) & ~1;
10282 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
10286 /* Implement `TARGET_FUNCTION_VALUE'. */
10287 /* Create an RTX representing the place where a
10288 function returns a value of data type VALTYPE. */
10291 avr_function_value (const_tree type
,
10292 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
10293 bool outgoing ATTRIBUTE_UNUSED
)
10297 if (TYPE_MODE (type
) != BLKmode
)
10298 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
10300 offs
= int_size_in_bytes (type
);
10303 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
10304 offs
= GET_MODE_SIZE (SImode
);
10305 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
10306 offs
= GET_MODE_SIZE (DImode
);
10308 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
10312 test_hard_reg_class (enum reg_class rclass
, rtx x
)
10314 int regno
= true_regnum (x
);
10318 if (TEST_HARD_REG_CLASS (rclass
, regno
))
10325 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10326 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10329 avr_2word_insn_p (rtx_insn
*insn
)
10331 if (TARGET_SKIP_BUG
10333 || 2 != get_attr_length (insn
))
10338 switch (INSN_CODE (insn
))
10343 case CODE_FOR_movqi_insn
:
10344 case CODE_FOR_movuqq_insn
:
10345 case CODE_FOR_movqq_insn
:
10347 rtx set
= single_set (insn
);
10348 rtx src
= SET_SRC (set
);
10349 rtx dest
= SET_DEST (set
);
10351 /* Factor out LDS and STS from movqi_insn. */
10354 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
10356 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
10358 else if (REG_P (dest
)
10361 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
10367 case CODE_FOR_call_insn
:
10368 case CODE_FOR_call_value_insn
:
10375 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
10377 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
10380 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
10381 int dest_addr
= INSN_ADDRESSES (uid
);
10382 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
10384 return (jump_offset
== 1
10385 || (jump_offset
== 2
10386 && avr_2word_insn_p (next_active_insn (insn
))));
10390 /* Worker function for `HARD_REGNO_MODE_OK'. */
10391 /* Returns 1 if a value of mode MODE can be stored starting with hard
10392 register number REGNO. On the enhanced core, anything larger than
10393 1 byte must start in even numbered register for "movw" to work
10394 (this way we don't have to check for odd registers everywhere). */
10397 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
10399 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10400 Disallowing QI et al. in these regs might lead to code like
10401 (set (subreg:QI (reg:HI 28) n) ...)
10402 which will result in wrong code because reload does not
10403 handle SUBREGs of hard regsisters like this.
10404 This could be fixed in reload. However, it appears
10405 that fixing reload is not wanted by reload people. */
10407 /* Any GENERAL_REGS register can hold 8-bit values. */
10409 if (GET_MODE_SIZE (mode
) == 1)
10412 /* FIXME: Ideally, the following test is not needed.
10413 However, it turned out that it can reduce the number
10414 of spill fails. AVR and it's poor endowment with
10415 address registers is extreme stress test for reload. */
10417 if (GET_MODE_SIZE (mode
) >= 4
10421 /* All modes larger than 8 bits should start in an even register. */
10423 return !(regno
& 1);
10427 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10430 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
10432 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10433 represent valid hard registers like, e.g. HI:29. Returning TRUE
10434 for such registers can lead to performance degradation as mentioned
10435 in PR53595. Thus, report invalid hard registers as FALSE. */
10437 if (!avr_hard_regno_mode_ok (regno
, mode
))
10440 /* Return true if any of the following boundaries is crossed:
10441 17/18, 27/28 and 29/30. */
10443 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
10444 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
10445 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
10449 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10452 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
10453 addr_space_t as
, RTX_CODE outer_code
,
10454 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10456 if (!ADDR_SPACE_GENERIC_P (as
))
10458 return POINTER_Z_REGS
;
10462 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
10464 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
10468 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10471 avr_regno_mode_code_ok_for_base_p (int regno
,
10472 enum machine_mode mode ATTRIBUTE_UNUSED
,
10473 addr_space_t as ATTRIBUTE_UNUSED
,
10474 RTX_CODE outer_code
,
10475 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10479 if (!ADDR_SPACE_GENERIC_P (as
))
10481 if (regno
< FIRST_PSEUDO_REGISTER
10489 regno
= reg_renumber
[regno
];
10491 if (regno
== REG_Z
)
10500 if (regno
< FIRST_PSEUDO_REGISTER
10504 || regno
== ARG_POINTER_REGNUM
))
10508 else if (reg_renumber
)
10510 regno
= reg_renumber
[regno
];
10515 || regno
== ARG_POINTER_REGNUM
)
10522 && PLUS
== outer_code
10532 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10533 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10534 CLOBBER_REG is a QI clobber register or NULL_RTX.
10535 LEN == NULL: output instructions.
10536 LEN != NULL: set *LEN to the length of the instruction sequence
10537 (in words) printed with LEN = NULL.
10538 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10539 If CLEAR_P is false, nothing is known about OP[0].
10541 The effect on cc0 is as follows:
10543 Load 0 to any register except ZERO_REG : NONE
10544 Load ld register with any value : NONE
10545 Anything else: : CLOBBER */
10548 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10552 rtx xval
, xdest
[4];
10554 int clobber_val
= 1234;
10555 bool cooked_clobber_p
= false;
10556 bool set_p
= false;
10557 enum machine_mode mode
= GET_MODE (dest
);
10558 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10560 gcc_assert (REG_P (dest
)
10561 && CONSTANT_P (src
));
10566 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10567 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10569 if (REGNO (dest
) < 16
10570 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10572 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10575 /* We might need a clobber reg but don't have one. Look at the value to
10576 be loaded more closely. A clobber is only needed if it is a symbol
10577 or contains a byte that is neither 0, -1 or a power of 2. */
10579 if (NULL_RTX
== clobber_reg
10580 && !test_hard_reg_class (LD_REGS
, dest
)
10581 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10582 || !avr_popcount_each_byte (src
, n_bytes
,
10583 (1 << 0) | (1 << 1) | (1 << 8))))
10585 /* We have no clobber register but need one. Cook one up.
10586 That's cheaper than loading from constant pool. */
10588 cooked_clobber_p
= true;
10589 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10590 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10593 /* Now start filling DEST from LSB to MSB. */
10595 for (n
= 0; n
< n_bytes
; n
++)
10598 bool done_byte
= false;
10602 /* Crop the n-th destination byte. */
10604 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10605 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10607 if (!CONST_INT_P (src
)
10608 && !CONST_FIXED_P (src
)
10609 && !CONST_DOUBLE_P (src
))
10611 static const char* const asm_code
[][2] =
10613 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10614 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10615 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10616 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10621 xop
[2] = clobber_reg
;
10623 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10628 /* Crop the n-th source byte. */
10630 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10631 ival
[n
] = INTVAL (xval
);
10633 /* Look if we can reuse the low word by means of MOVW. */
10639 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10640 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10642 if (INTVAL (lo16
) == INTVAL (hi16
))
10644 if (0 != INTVAL (lo16
)
10647 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10654 /* Don't use CLR so that cc0 is set as expected. */
10659 avr_asm_len (ldreg_p
? "ldi %0,0"
10660 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10661 : "mov %0,__zero_reg__",
10662 &xdest
[n
], len
, 1);
10666 if (clobber_val
== ival
[n
]
10667 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10672 /* LD_REGS can use LDI to move a constant value */
10678 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10682 /* Try to reuse value already loaded in some lower byte. */
10684 for (j
= 0; j
< n
; j
++)
10685 if (ival
[j
] == ival
[n
])
10690 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10698 /* Need no clobber reg for -1: Use CLR/DEC */
10703 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10705 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10708 else if (1 == ival
[n
])
10711 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10713 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10717 /* Use T flag or INC to manage powers of 2 if we have
10720 if (NULL_RTX
== clobber_reg
10721 && single_one_operand (xval
, QImode
))
10724 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10726 gcc_assert (constm1_rtx
!= xop
[1]);
10731 avr_asm_len ("set", xop
, len
, 1);
10735 avr_asm_len ("clr %0", xop
, len
, 1);
10737 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10741 /* We actually need the LD_REGS clobber reg. */
10743 gcc_assert (NULL_RTX
!= clobber_reg
);
10747 xop
[2] = clobber_reg
;
10748 clobber_val
= ival
[n
];
10750 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10751 "mov %0,%2", xop
, len
, 2);
10754 /* If we cooked up a clobber reg above, restore it. */
10756 if (cooked_clobber_p
)
10758 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10763 /* Reload the constant OP[1] into the HI register OP[0].
10764 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10765 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10766 need a clobber reg or have to cook one up.
10768 PLEN == NULL: Output instructions.
10769 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10770 by the insns printed.
10775 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10777 output_reload_in_const (op
, clobber_reg
, plen
, false);
10782 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10783 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10784 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10785 need a clobber reg or have to cook one up.
10787 LEN == NULL: Output instructions.
10789 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10790 by the insns printed.
10795 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10798 && !test_hard_reg_class (LD_REGS
, op
[0])
10799 && (CONST_INT_P (op
[1])
10800 || CONST_FIXED_P (op
[1])
10801 || CONST_DOUBLE_P (op
[1])))
10803 int len_clr
, len_noclr
;
10805 /* In some cases it is better to clear the destination beforehand, e.g.
10807 CLR R2 CLR R3 MOVW R4,R2 INC R2
10811 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10813 We find it too tedious to work that out in the print function.
10814 Instead, we call the print function twice to get the lengths of
10815 both methods and use the shortest one. */
10817 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10818 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10820 if (len_noclr
- len_clr
== 4)
10822 /* Default needs 4 CLR instructions: clear register beforehand. */
10824 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10825 "mov %B0,__zero_reg__" CR_TAB
10826 "movw %C0,%A0", &op
[0], len
, 3);
10828 output_reload_in_const (op
, clobber_reg
, len
, true);
10837 /* Default: destination not pre-cleared. */
10839 output_reload_in_const (op
, clobber_reg
, len
, false);
10844 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10846 output_reload_in_const (op
, clobber_reg
, len
, false);
10851 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10854 avr_output_addr_vec_elt (FILE *stream
, int value
)
10856 if (AVR_HAVE_JMP_CALL
)
10857 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10859 fprintf (stream
, "\trjmp .L%d\n", value
);
10863 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10864 /* Returns true if SCRATCH are safe to be allocated as a scratch
10865 registers (for a define_peephole2) in the current function. */
10868 avr_hard_regno_scratch_ok (unsigned int regno
)
10870 /* Interrupt functions can only use registers that have already been saved
10871 by the prologue, even if they would normally be call-clobbered. */
10873 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10874 && !df_regs_ever_live_p (regno
))
10877 /* Don't allow hard registers that might be part of the frame pointer.
10878 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10879 and don't care for a frame pointer that spans more than one register. */
10881 if ((!reload_completed
|| frame_pointer_needed
)
10882 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10891 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10892 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10895 avr_hard_regno_rename_ok (unsigned int old_reg
,
10896 unsigned int new_reg
)
10898 /* Interrupt functions can only use registers that have already been
10899 saved by the prologue, even if they would normally be
10902 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10903 && !df_regs_ever_live_p (new_reg
))
10906 /* Don't allow hard registers that might be part of the frame pointer.
10907 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10908 and don't care for a frame pointer that spans more than one register. */
10910 if ((!reload_completed
|| frame_pointer_needed
)
10911 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10912 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10920 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10921 or memory location in the I/O space (QImode only).
10923 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10924 Operand 1: register operand to test, or CONST_INT memory address.
10925 Operand 2: bit number.
10926 Operand 3: label to jump to if the test is true. */
10929 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
10931 enum rtx_code comp
= GET_CODE (operands
[0]);
10932 bool long_jump
= get_attr_length (insn
) >= 4;
10933 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10937 else if (comp
== LT
)
10941 comp
= reverse_condition (comp
);
10943 switch (GET_CODE (operands
[1]))
10952 if (low_io_address_operand (operands
[1], QImode
))
10955 output_asm_insn ("sbis %i1,%2", operands
);
10957 output_asm_insn ("sbic %i1,%2", operands
);
10961 gcc_assert (io_address_operand (operands
[1], QImode
));
10962 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10964 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10966 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10969 break; /* CONST_INT */
10974 output_asm_insn ("sbrs %T1%T2", operands
);
10976 output_asm_insn ("sbrc %T1%T2", operands
);
10982 return ("rjmp .+4" CR_TAB
10991 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10994 avr_asm_out_ctor (rtx symbol
, int priority
)
10996 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10997 default_ctor_section_asm_out_constructor (symbol
, priority
);
11001 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11004 avr_asm_out_dtor (rtx symbol
, int priority
)
11006 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
11007 default_dtor_section_asm_out_destructor (symbol
, priority
);
11011 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11014 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
11016 if (TYPE_MODE (type
) == BLKmode
)
11018 HOST_WIDE_INT size
= int_size_in_bytes (type
);
11019 return (size
== -1 || size
> 8);
11026 /* Implement `CASE_VALUES_THRESHOLD'. */
11027 /* Supply the default for --param case-values-threshold=0 */
11029 static unsigned int
11030 avr_case_values_threshold (void)
11032 /* The exact break-even point between a jump table and an if-else tree
11033 depends on several factors not available here like, e.g. if 8-bit
11034 comparisons can be used in the if-else tree or not, on the
11035 range of the case values, if the case value can be reused, on the
11036 register allocation, etc. '7' appears to be a good choice. */
11042 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11044 static enum machine_mode
11045 avr_addr_space_address_mode (addr_space_t as
)
11047 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
11051 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
11053 static enum machine_mode
11054 avr_addr_space_pointer_mode (addr_space_t as
)
11056 return avr_addr_space_address_mode (as
);
11060 /* Helper for following function. */
11063 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
11065 gcc_assert (REG_P (reg
));
11069 return REGNO (reg
) == REG_Z
;
11072 /* Avoid combine to propagate hard regs. */
11074 if (can_create_pseudo_p()
11075 && REGNO (reg
) < REG_Z
)
11084 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
11087 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
11088 bool strict
, addr_space_t as
)
11097 case ADDR_SPACE_GENERIC
:
11098 return avr_legitimate_address_p (mode
, x
, strict
);
11100 case ADDR_SPACE_FLASH
:
11101 case ADDR_SPACE_FLASH1
:
11102 case ADDR_SPACE_FLASH2
:
11103 case ADDR_SPACE_FLASH3
:
11104 case ADDR_SPACE_FLASH4
:
11105 case ADDR_SPACE_FLASH5
:
11107 switch (GET_CODE (x
))
11110 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
11114 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
11123 case ADDR_SPACE_MEMX
:
11126 && can_create_pseudo_p());
11128 if (LO_SUM
== GET_CODE (x
))
11130 rtx hi
= XEXP (x
, 0);
11131 rtx lo
= XEXP (x
, 1);
11134 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
11136 && REGNO (lo
) == REG_Z
);
11142 if (avr_log
.legitimate_address_p
)
11144 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
11145 "reload_completed=%d reload_in_progress=%d %s:",
11146 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
11147 reg_renumber
? "(reg_renumber)" : "");
11149 if (GET_CODE (x
) == PLUS
11150 && REG_P (XEXP (x
, 0))
11151 && CONST_INT_P (XEXP (x
, 1))
11152 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
11155 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
11156 true_regnum (XEXP (x
, 0)));
11159 avr_edump ("\n%r\n", x
);
11166 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
11169 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
11170 enum machine_mode mode
, addr_space_t as
)
11172 if (ADDR_SPACE_GENERIC_P (as
))
11173 return avr_legitimize_address (x
, old_x
, mode
);
11175 if (avr_log
.legitimize_address
)
11177 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
11184 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
11187 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
11189 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
11190 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
11192 if (avr_log
.progmem
)
11193 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
11194 src
, type_from
, type_to
);
11196 /* Up-casting from 16-bit to 24-bit pointer. */
11198 if (as_from
!= ADDR_SPACE_MEMX
11199 && as_to
== ADDR_SPACE_MEMX
)
11203 rtx reg
= gen_reg_rtx (PSImode
);
11205 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
11206 sym
= XEXP (sym
, 0);
11208 /* Look at symbol flags: avr_encode_section_info set the flags
11209 also if attribute progmem was seen so that we get the right
11210 promotion for, e.g. PSTR-like strings that reside in generic space
11211 but are located in flash. In that case we patch the incoming
11214 if (SYMBOL_REF
== GET_CODE (sym
)
11215 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
11217 as_from
= ADDR_SPACE_FLASH
;
11220 /* Linearize memory: RAM has bit 23 set. */
11222 msb
= ADDR_SPACE_GENERIC_P (as_from
)
11224 : avr_addrspace
[as_from
].segment
;
11226 src
= force_reg (Pmode
, src
);
11228 emit_insn (msb
== 0
11229 ? gen_zero_extendhipsi2 (reg
, src
)
11230 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
11235 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11237 if (as_from
== ADDR_SPACE_MEMX
11238 && as_to
!= ADDR_SPACE_MEMX
)
11240 rtx new_src
= gen_reg_rtx (Pmode
);
11242 src
= force_reg (PSImode
, src
);
11244 emit_move_insn (new_src
,
11245 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
11253 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11256 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
11257 addr_space_t superset ATTRIBUTE_UNUSED
)
11259 /* Allow any kind of pointer mess. */
11265 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11268 avr_convert_to_type (tree type
, tree expr
)
11270 /* Print a diagnose for pointer conversion that changes the address
11271 space of the pointer target to a non-enclosing address space,
11272 provided -Waddr-space-convert is on.
11274 FIXME: Filter out cases where the target object is known to
11275 be located in the right memory, like in
11277 (const __flash*) PSTR ("text")
11279 Also try to distinguish between explicit casts requested by
11280 the user and implicit casts like
11282 void f (const __flash char*);
11284 void g (const char *p)
11286 f ((const __flash*) p);
11289 under the assumption that an explicit casts means that the user
11290 knows what he is doing, e.g. interface with PSTR or old style
11291 code with progmem and pgm_read_xxx.
11294 if (avr_warn_addr_space_convert
11295 && expr
!= error_mark_node
11296 && POINTER_TYPE_P (type
)
11297 && POINTER_TYPE_P (TREE_TYPE (expr
)))
11299 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
11300 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
11302 if (avr_log
.progmem
)
11303 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
11305 if (as_new
!= ADDR_SPACE_MEMX
11306 && as_new
!= as_old
)
11308 location_t loc
= EXPR_LOCATION (expr
);
11309 const char *name_old
= avr_addrspace
[as_old
].name
;
11310 const char *name_new
= avr_addrspace
[as_new
].name
;
11312 warning (OPT_Waddr_space_convert
,
11313 "conversion from address space %qs to address space %qs",
11314 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
11315 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
11317 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
11325 /* Worker function for movmemhi expander.
11326 XOP[0] Destination as MEM:BLK
11328 XOP[2] # Bytes to copy
11330 Return TRUE if the expansion is accomplished.
11331 Return FALSE if the operand compination is not supported. */
11334 avr_emit_movmemhi (rtx
*xop
)
11336 HOST_WIDE_INT count
;
11337 enum machine_mode loop_mode
;
11338 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
11339 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
11340 rtx a_hi8
= NULL_RTX
;
11342 if (avr_mem_flash_p (xop
[0]))
11345 if (!CONST_INT_P (xop
[2]))
11348 count
= INTVAL (xop
[2]);
11352 a_src
= XEXP (xop
[1], 0);
11353 a_dest
= XEXP (xop
[0], 0);
11355 if (PSImode
== GET_MODE (a_src
))
11357 gcc_assert (as
== ADDR_SPACE_MEMX
);
11359 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
11360 loop_reg
= gen_rtx_REG (loop_mode
, 24);
11361 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
11363 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
11364 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
11368 int segment
= avr_addrspace
[as
].segment
;
11371 && avr_n_flash
> 1)
11373 a_hi8
= GEN_INT (segment
);
11374 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
11376 else if (!ADDR_SPACE_GENERIC_P (as
))
11378 as
= ADDR_SPACE_FLASH
;
11383 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
11384 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
11387 xas
= GEN_INT (as
);
11389 /* FIXME: Register allocator might come up with spill fails if it is left
11390 on its own. Thus, we allocate the pointer registers by hand:
11392 X = destination address */
11394 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
11395 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
11397 /* FIXME: Register allocator does a bad job and might spill address
11398 register(s) inside the loop leading to additional move instruction
11399 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11400 load and store as separate insns. Instead, we perform the copy
11401 by means of one monolithic insn. */
11403 gcc_assert (TMP_REGNO
== LPM_REGNO
);
11405 if (as
!= ADDR_SPACE_MEMX
)
11407 /* Load instruction ([E]LPM or LD) is known at compile time:
11408 Do the copy-loop inline. */
11410 rtx (*fun
) (rtx
, rtx
, rtx
)
11411 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
11413 insn
= fun (xas
, loop_reg
, loop_reg
);
11417 rtx (*fun
) (rtx
, rtx
)
11418 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
11420 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
11422 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
11425 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
11432 /* Print assembler for movmem_qi, movmem_hi insns...
11434 $1, $2 : Loop register
11436 X : Destination address
11440 avr_out_movmem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
11442 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
11443 enum machine_mode loop_mode
= GET_MODE (op
[1]);
11444 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
11452 xop
[2] = tmp_reg_rtx
;
11456 avr_asm_len ("0:", xop
, plen
, 0);
11458 /* Load with post-increment */
11465 case ADDR_SPACE_GENERIC
:
11467 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
11470 case ADDR_SPACE_FLASH
:
11473 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
11475 avr_asm_len ("lpm" CR_TAB
11476 "adiw r30,1", xop
, plen
, 2);
11479 case ADDR_SPACE_FLASH1
:
11480 case ADDR_SPACE_FLASH2
:
11481 case ADDR_SPACE_FLASH3
:
11482 case ADDR_SPACE_FLASH4
:
11483 case ADDR_SPACE_FLASH5
:
11485 if (AVR_HAVE_ELPMX
)
11486 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
11488 avr_asm_len ("elpm" CR_TAB
11489 "adiw r30,1", xop
, plen
, 2);
11493 /* Store with post-increment */
11495 avr_asm_len ("st X+,%2", xop
, plen
, 1);
11497 /* Decrement loop-counter and set Z-flag */
11499 if (QImode
== loop_mode
)
11501 avr_asm_len ("dec %1", xop
, plen
, 1);
11505 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
11509 avr_asm_len ("subi %A1,1" CR_TAB
11510 "sbci %B1,0", xop
, plen
, 2);
11513 /* Loop until zero */
11515 return avr_asm_len ("brne 0b", xop
, plen
, 1);
11520 /* Helper for __builtin_avr_delay_cycles */
11523 avr_mem_clobber (void)
11525 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
11526 MEM_VOLATILE_P (mem
) = 1;
11531 avr_expand_delay_cycles (rtx operands0
)
11533 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
11534 unsigned HOST_WIDE_INT cycles_used
;
11535 unsigned HOST_WIDE_INT loop_count
;
11537 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
11539 loop_count
= ((cycles
- 9) / 6) + 1;
11540 cycles_used
= ((loop_count
- 1) * 6) + 9;
11541 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
11542 avr_mem_clobber()));
11543 cycles
-= cycles_used
;
11546 if (IN_RANGE (cycles
, 262145, 83886081))
11548 loop_count
= ((cycles
- 7) / 5) + 1;
11549 if (loop_count
> 0xFFFFFF)
11550 loop_count
= 0xFFFFFF;
11551 cycles_used
= ((loop_count
- 1) * 5) + 7;
11552 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
11553 avr_mem_clobber()));
11554 cycles
-= cycles_used
;
11557 if (IN_RANGE (cycles
, 768, 262144))
11559 loop_count
= ((cycles
- 5) / 4) + 1;
11560 if (loop_count
> 0xFFFF)
11561 loop_count
= 0xFFFF;
11562 cycles_used
= ((loop_count
- 1) * 4) + 5;
11563 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11564 avr_mem_clobber()));
11565 cycles
-= cycles_used
;
11568 if (IN_RANGE (cycles
, 6, 767))
11570 loop_count
= cycles
/ 3;
11571 if (loop_count
> 255)
11573 cycles_used
= loop_count
* 3;
11574 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11575 avr_mem_clobber()));
11576 cycles
-= cycles_used
;
11579 while (cycles
>= 2)
11581 emit_insn (gen_nopv (GEN_INT(2)));
11587 emit_insn (gen_nopv (GEN_INT(1)));
11593 /* Compute the image of x under f, i.e. perform x --> f(x) */
11596 avr_map (unsigned int f
, int x
)
11598 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
11602 /* Return some metrics of map A. */
11606 /* Number of fixed points in { 0 ... 7 } */
11609 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11612 /* Mask representing the fixed points in { 0 ... 7 } */
11613 MAP_MASK_FIXED_0_7
,
11615 /* Size of the preimage of { 0 ... 7 } */
11618 /* Mask that represents the preimage of { f } */
11619 MAP_MASK_PREIMAGE_F
11623 avr_map_metric (unsigned int a
, int mode
)
11625 unsigned i
, metric
= 0;
11627 for (i
= 0; i
< 8; i
++)
11629 unsigned ai
= avr_map (a
, i
);
11631 if (mode
== MAP_FIXED_0_7
)
11633 else if (mode
== MAP_NONFIXED_0_7
)
11634 metric
+= ai
< 8 && ai
!= i
;
11635 else if (mode
== MAP_MASK_FIXED_0_7
)
11636 metric
|= ((unsigned) (ai
== i
)) << i
;
11637 else if (mode
== MAP_PREIMAGE_0_7
)
11639 else if (mode
== MAP_MASK_PREIMAGE_F
)
11640 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11649 /* Return true if IVAL has a 0xf in its hexadecimal representation
11650 and false, otherwise. Only nibbles 0..7 are taken into account.
11651 Used as constraint helper for C0f and Cxf. */
11654 avr_has_nibble_0xf (rtx ival
)
11656 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
11657 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11661 /* We have a set of bits that are mapped by a function F.
11662 Try to decompose F by means of a second function G so that
11668 cost (F o G^-1) + cost (G) < cost (F)
11670 Example: Suppose builtin insert_bits supplies us with the map
11671 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11672 nibble of the result, we can just as well rotate the bits before inserting
11673 them and use the map 0x7654ffff which is cheaper than the original map.
11674 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11678 /* tree code of binary function G */
11679 enum tree_code code
;
11681 /* The constant second argument of G */
11684 /* G^-1, the inverse of G (*, arg) */
11687 /* The cost of appplying G (*, arg) */
11690 /* The composition F o G^-1 (*, arg) for some function F */
11693 /* For debug purpose only */
11697 static const avr_map_op_t avr_map_op
[] =
11699 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
11700 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
11701 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
11702 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
11703 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
11704 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
11705 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
11706 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
11707 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
11708 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
11709 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
11710 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
11711 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
11712 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
11713 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
11717 /* Try to decompose F as F = (F o G^-1) o G as described above.
11718 The result is a struct representing F o G^-1 and G.
11719 If result.cost < 0 then such a decomposition does not exist. */
11721 static avr_map_op_t
11722 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
11725 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11726 avr_map_op_t f_ginv
= *g
;
11727 unsigned int ginv
= g
->ginv
;
11731 /* Step 1: Computing F o G^-1 */
11733 for (i
= 7; i
>= 0; i
--)
11735 int x
= avr_map (f
, i
);
11739 x
= avr_map (ginv
, x
);
11741 /* The bit is no element of the image of G: no avail (cost = -1) */
11747 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
11750 /* Step 2: Compute the cost of the operations.
11751 The overall cost of doing an operation prior to the insertion is
11752 the cost of the insertion plus the cost of the operation. */
11754 /* Step 2a: Compute cost of F o G^-1 */
11756 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11758 /* The mapping consists only of fixed points and can be folded
11759 to AND/OR logic in the remainder. Reasonable cost is 3. */
11761 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11767 /* Get the cost of the insn by calling the output worker with some
11768 fake values. Mimic effect of reloading xop[3]: Unused operands
11769 are mapped to 0 and used operands are reloaded to xop[0]. */
11771 xop
[0] = all_regs_rtx
[24];
11772 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
11773 xop
[2] = all_regs_rtx
[25];
11774 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11776 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11778 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11781 /* Step 2b: Add cost of G */
11783 f_ginv
.cost
+= g
->cost
;
11785 if (avr_log
.builtin
)
11786 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11792 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11793 XOP[0] and XOP[1] don't overlap.
11794 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11795 If FIXP_P = false: Just move the bit if its position in the destination
11796 is different to its source position. */
11799 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
11803 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11804 int t_bit_src
= -1;
11806 /* We order the operations according to the requested source bit b. */
11808 for (b
= 0; b
< 8; b
++)
11809 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11811 int bit_src
= avr_map (map
, bit_dest
);
11815 /* Same position: No need to copy as requested by FIXP_P. */
11816 || (bit_dest
== bit_src
&& !fixp_p
))
11819 if (t_bit_src
!= bit_src
)
11821 /* Source bit is not yet in T: Store it to T. */
11823 t_bit_src
= bit_src
;
11825 xop
[3] = GEN_INT (bit_src
);
11826 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11829 /* Load destination bit with T. */
11831 xop
[3] = GEN_INT (bit_dest
);
11832 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11837 /* PLEN == 0: Print assembler code for `insert_bits'.
11838 PLEN != 0: Compute code length in bytes.
11841 OP[1]: The mapping composed of nibbles. If nibble no. N is
11842 0: Bit N of result is copied from bit OP[2].0
11844 7: Bit N of result is copied from bit OP[2].7
11845 0xf: Bit N of result is copied from bit OP[3].N
11846 OP[2]: Bits to be inserted
11847 OP[3]: Target value */
11850 avr_out_insert_bits (rtx
*op
, int *plen
)
11852 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
11853 unsigned mask_fixed
;
11854 bool fixp_p
= true;
11861 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11865 else if (flag_print_asm_name
)
11866 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
11868 /* If MAP has fixed points it might be better to initialize the result
11869 with the bits to be inserted instead of moving all bits by hand. */
11871 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11873 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11875 /* Avoid early-clobber conflicts */
11877 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11878 xop
[1] = tmp_reg_rtx
;
11882 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11884 /* XOP[2] is used and reloaded to XOP[0] already */
11886 int n_fix
= 0, n_nofix
= 0;
11888 gcc_assert (REG_P (xop
[2]));
11890 /* Get the code size of the bit insertions; once with all bits
11891 moved and once with fixed points omitted. */
11893 avr_move_bits (xop
, map
, true, &n_fix
);
11894 avr_move_bits (xop
, map
, false, &n_nofix
);
11896 if (fixp_p
&& n_fix
- n_nofix
> 3)
11898 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11900 avr_asm_len ("eor %0,%1" CR_TAB
11901 "andi %0,%3" CR_TAB
11902 "eor %0,%1", xop
, plen
, 3);
11908 /* XOP[2] is unused */
11910 if (fixp_p
&& mask_fixed
)
11912 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11917 /* Move/insert remaining bits. */
11919 avr_move_bits (xop
, map
, fixp_p
, plen
);
11925 /* IDs for all the AVR builtins. */
11927 enum avr_builtin_id
11929 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11930 AVR_BUILTIN_ ## NAME,
11931 #include "builtins.def"
11937 struct GTY(()) avr_builtin_description
11939 enum insn_code icode
;
11945 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11946 that a built-in's ID can be used to access the built-in by means of
11949 static GTY(()) struct avr_builtin_description
11950 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11952 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11953 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11954 #include "builtins.def"
11959 /* Implement `TARGET_BUILTIN_DECL'. */
11962 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11964 if (id
< AVR_BUILTIN_COUNT
)
11965 return avr_bdesc
[id
].fndecl
;
11967 return error_mark_node
;
11972 avr_init_builtin_int24 (void)
11974 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11975 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11977 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11978 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11982 /* Implement `TARGET_INIT_BUILTINS' */
11983 /* Set up all builtin functions for this target. */
11986 avr_init_builtins (void)
11988 tree void_ftype_void
11989 = build_function_type_list (void_type_node
, NULL_TREE
);
11990 tree uchar_ftype_uchar
11991 = build_function_type_list (unsigned_char_type_node
,
11992 unsigned_char_type_node
,
11994 tree uint_ftype_uchar_uchar
11995 = build_function_type_list (unsigned_type_node
,
11996 unsigned_char_type_node
,
11997 unsigned_char_type_node
,
11999 tree int_ftype_char_char
12000 = build_function_type_list (integer_type_node
,
12004 tree int_ftype_char_uchar
12005 = build_function_type_list (integer_type_node
,
12007 unsigned_char_type_node
,
12009 tree void_ftype_ulong
12010 = build_function_type_list (void_type_node
,
12011 long_unsigned_type_node
,
12014 tree uchar_ftype_ulong_uchar_uchar
12015 = build_function_type_list (unsigned_char_type_node
,
12016 long_unsigned_type_node
,
12017 unsigned_char_type_node
,
12018 unsigned_char_type_node
,
12021 tree const_memx_void_node
12022 = build_qualified_type (void_type_node
,
12024 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
12026 tree const_memx_ptr_type_node
12027 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
12029 tree char_ftype_const_memx_ptr
12030 = build_function_type_list (char_type_node
,
12031 const_memx_ptr_type_node
,
12035 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
12037 #define FX_FTYPE_FX(fx) \
12038 tree fx##r_ftype_##fx##r \
12039 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
12040 tree fx##k_ftype_##fx##k \
12041 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
12043 #define FX_FTYPE_FX_INT(fx) \
12044 tree fx##r_ftype_##fx##r_int \
12045 = build_function_type_list (node_##fx##r, node_##fx##r, \
12046 integer_type_node, NULL); \
12047 tree fx##k_ftype_##fx##k_int \
12048 = build_function_type_list (node_##fx##k, node_##fx##k, \
12049 integer_type_node, NULL)
12051 #define INT_FTYPE_FX(fx) \
12052 tree int_ftype_##fx##r \
12053 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
12054 tree int_ftype_##fx##k \
12055 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
12057 #define INTX_FTYPE_FX(fx) \
12058 tree int##fx##r_ftype_##fx##r \
12059 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
12060 tree int##fx##k_ftype_##fx##k \
12061 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
12063 #define FX_FTYPE_INTX(fx) \
12064 tree fx##r_ftype_int##fx##r \
12065 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
12066 tree fx##k_ftype_int##fx##k \
12067 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
12069 tree node_hr
= short_fract_type_node
;
12070 tree node_nr
= fract_type_node
;
12071 tree node_lr
= long_fract_type_node
;
12072 tree node_llr
= long_long_fract_type_node
;
12074 tree node_uhr
= unsigned_short_fract_type_node
;
12075 tree node_unr
= unsigned_fract_type_node
;
12076 tree node_ulr
= unsigned_long_fract_type_node
;
12077 tree node_ullr
= unsigned_long_long_fract_type_node
;
12079 tree node_hk
= short_accum_type_node
;
12080 tree node_nk
= accum_type_node
;
12081 tree node_lk
= long_accum_type_node
;
12082 tree node_llk
= long_long_accum_type_node
;
12084 tree node_uhk
= unsigned_short_accum_type_node
;
12085 tree node_unk
= unsigned_accum_type_node
;
12086 tree node_ulk
= unsigned_long_accum_type_node
;
12087 tree node_ullk
= unsigned_long_long_accum_type_node
;
12090 /* For absfx builtins. */
12097 /* For roundfx builtins. */
12099 FX_FTYPE_FX_INT (h
);
12100 FX_FTYPE_FX_INT (n
);
12101 FX_FTYPE_FX_INT (l
);
12102 FX_FTYPE_FX_INT (ll
);
12104 FX_FTYPE_FX_INT (uh
);
12105 FX_FTYPE_FX_INT (un
);
12106 FX_FTYPE_FX_INT (ul
);
12107 FX_FTYPE_FX_INT (ull
);
12109 /* For countlsfx builtins. */
12119 INT_FTYPE_FX (ull
);
12121 /* For bitsfx builtins. */
12126 INTX_FTYPE_FX (ll
);
12128 INTX_FTYPE_FX (uh
);
12129 INTX_FTYPE_FX (un
);
12130 INTX_FTYPE_FX (ul
);
12131 INTX_FTYPE_FX (ull
);
12133 /* For fxbits builtins. */
12138 FX_FTYPE_INTX (ll
);
12140 FX_FTYPE_INTX (uh
);
12141 FX_FTYPE_INTX (un
);
12142 FX_FTYPE_INTX (ul
);
12143 FX_FTYPE_INTX (ull
);
12146 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12148 int id = AVR_BUILTIN_ ## NAME; \
12149 const char *Name = "__builtin_avr_" #NAME; \
12150 char *name = (char*) alloca (1 + strlen (Name)); \
12152 gcc_assert (id < AVR_BUILTIN_COUNT); \
12153 avr_bdesc[id].fndecl \
12154 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
12155 BUILT_IN_MD, LIBNAME, NULL_TREE); \
12157 #include "builtins.def"
12160 avr_init_builtin_int24 ();
12164 /* Subroutine of avr_expand_builtin to expand vanilla builtins
12165 with non-void result and 1 ... 3 arguments. */
12168 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
12171 int n
, n_args
= call_expr_nargs (exp
);
12172 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
12174 gcc_assert (n_args
>= 1 && n_args
<= 3);
12176 if (target
== NULL_RTX
12177 || GET_MODE (target
) != tmode
12178 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
12180 target
= gen_reg_rtx (tmode
);
12183 for (n
= 0; n
< n_args
; n
++)
12185 tree arg
= CALL_EXPR_ARG (exp
, n
);
12186 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12187 enum machine_mode opmode
= GET_MODE (op
);
12188 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
12190 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
12193 op
= gen_lowpart (HImode
, op
);
12196 /* In case the insn wants input operands in modes different from
12197 the result, abort. */
12199 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
12201 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
12202 op
= copy_to_mode_reg (mode
, op
);
12209 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
12210 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
12211 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
12217 if (pat
== NULL_RTX
)
12226 /* Implement `TARGET_EXPAND_BUILTIN'. */
12227 /* Expand an expression EXP that calls a built-in function,
12228 with result going to TARGET if that's convenient
12229 (and in mode MODE if that's convenient).
12230 SUBTARGET may be used as the target for computing one of EXP's operands.
12231 IGNORE is nonzero if the value is to be ignored. */
12234 avr_expand_builtin (tree exp
, rtx target
,
12235 rtx subtarget ATTRIBUTE_UNUSED
,
12236 enum machine_mode mode ATTRIBUTE_UNUSED
,
12239 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12240 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
12241 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
12242 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
12246 gcc_assert (id
< AVR_BUILTIN_COUNT
);
12250 case AVR_BUILTIN_NOP
:
12251 emit_insn (gen_nopv (GEN_INT(1)));
12254 case AVR_BUILTIN_DELAY_CYCLES
:
12256 arg0
= CALL_EXPR_ARG (exp
, 0);
12257 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12259 if (!CONST_INT_P (op0
))
12260 error ("%s expects a compile time integer constant", bname
);
12262 avr_expand_delay_cycles (op0
);
12267 case AVR_BUILTIN_INSERT_BITS
:
12269 arg0
= CALL_EXPR_ARG (exp
, 0);
12270 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12272 if (!CONST_INT_P (op0
))
12274 error ("%s expects a compile time long integer constant"
12275 " as first argument", bname
);
12282 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
12283 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
12284 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
12285 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
12287 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
12288 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
12289 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
12290 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
12292 /* Warn about odd rounding. Rounding points >= FBIT will have
12295 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
12298 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
12300 if (rbit
>= (int) GET_MODE_FBIT (mode
))
12302 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
12303 "fixed-point value with %d fractional bits",
12304 rbit
, GET_MODE_FBIT (mode
));
12306 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
12309 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
12311 warning (0, "rounding result will always be 0");
12312 return CONST0_RTX (mode
);
12315 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12317 TR 18037 only specifies results for RP > 0. However, the
12318 remaining cases of -IBIT < RP <= 0 can easily be supported
12319 without any additional overhead. */
12324 /* No fold found and no insn: Call support function from libgcc. */
12326 if (d
->icode
== CODE_FOR_nothing
12327 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
12329 return expand_call (exp
, target
, ignore
);
12332 /* No special treatment needed: vanilla expand. */
12334 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
12335 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
12337 if (d
->n_args
== 0)
12339 emit_insn ((GEN_FCN (d
->icode
)) (target
));
12343 return avr_default_expand_builtin (d
->icode
, exp
, target
);
12347 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12350 avr_fold_absfx (tree tval
)
12352 if (FIXED_CST
!= TREE_CODE (tval
))
12355 /* Our fixed-points have no padding: Use double_int payload directly. */
12357 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
12358 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
12359 double_int ival
= fval
.data
.sext (bits
);
12361 if (!ival
.is_negative())
12364 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12366 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
12367 ? double_int::max_value (bits
, false)
12370 return build_fixed (TREE_TYPE (tval
), fval
);
12374 /* Implement `TARGET_FOLD_BUILTIN'. */
12377 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
12378 bool ignore ATTRIBUTE_UNUSED
)
12380 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
12381 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
12391 case AVR_BUILTIN_SWAP
:
12393 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
12394 build_int_cst (val_type
, 4));
12397 case AVR_BUILTIN_ABSHR
:
12398 case AVR_BUILTIN_ABSR
:
12399 case AVR_BUILTIN_ABSLR
:
12400 case AVR_BUILTIN_ABSLLR
:
12402 case AVR_BUILTIN_ABSHK
:
12403 case AVR_BUILTIN_ABSK
:
12404 case AVR_BUILTIN_ABSLK
:
12405 case AVR_BUILTIN_ABSLLK
:
12406 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12408 return avr_fold_absfx (arg
[0]);
12410 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
12411 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
12412 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
12413 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
12415 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
12416 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
12417 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
12418 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
12420 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
12421 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
12422 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
12423 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
12425 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
12426 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
12427 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
12428 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
12430 gcc_assert (TYPE_PRECISION (val_type
)
12431 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
12433 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
12435 case AVR_BUILTIN_INSERT_BITS
:
12437 tree tbits
= arg
[1];
12438 tree tval
= arg
[2];
12440 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
12442 bool changed
= false;
12444 avr_map_op_t best_g
;
12446 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
12448 /* No constant as first argument: Don't fold this and run into
12449 error in avr_expand_builtin. */
12454 tmap
= wide_int_to_tree (map_type
, arg
[0]);
12455 map
= TREE_INT_CST_LOW (tmap
);
12457 if (TREE_CODE (tval
) != INTEGER_CST
12458 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12460 /* There are no F in the map, i.e. 3rd operand is unused.
12461 Replace that argument with some constant to render
12462 respective input unused. */
12464 tval
= build_int_cst (val_type
, 0);
12468 if (TREE_CODE (tbits
) != INTEGER_CST
12469 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
12471 /* Similar for the bits to be inserted. If they are unused,
12472 we can just as well pass 0. */
12474 tbits
= build_int_cst (val_type
, 0);
12477 if (TREE_CODE (tbits
) == INTEGER_CST
)
12479 /* Inserting bits known at compile time is easy and can be
12480 performed by AND and OR with appropriate masks. */
12482 int bits
= TREE_INT_CST_LOW (tbits
);
12483 int mask_ior
= 0, mask_and
= 0xff;
12485 for (i
= 0; i
< 8; i
++)
12487 int mi
= avr_map (map
, i
);
12491 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
12492 else mask_and
&= ~(1 << i
);
12496 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
12497 build_int_cst (val_type
, mask_ior
));
12498 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
12499 build_int_cst (val_type
, mask_and
));
12503 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12505 /* If bits don't change their position we can use vanilla logic
12506 to merge the two arguments. */
12508 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
12510 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12511 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
12513 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
12514 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
12515 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
12518 /* Try to decomposing map to reduce overall cost. */
12520 if (avr_log
.builtin
)
12521 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
12523 best_g
= avr_map_op
[0];
12524 best_g
.cost
= 1000;
12526 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
12529 = avr_map_decompose (map
, avr_map_op
+ i
,
12530 TREE_CODE (tval
) == INTEGER_CST
);
12532 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
12536 if (avr_log
.builtin
)
12539 if (best_g
.arg
== 0)
12540 /* No optimization found */
12543 /* Apply operation G to the 2nd argument. */
12545 if (avr_log
.builtin
)
12546 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12547 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12549 /* Do right-shifts arithmetically: They copy the MSB instead of
12550 shifting in a non-usable value (0) as with logic right-shift. */
12552 tbits
= fold_convert (signed_char_type_node
, tbits
);
12553 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12554 build_int_cst (val_type
, best_g
.arg
));
12555 tbits
= fold_convert (val_type
, tbits
);
12557 /* Use map o G^-1 instead of original map to undo the effect of G. */
12559 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
12561 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12562 } /* AVR_BUILTIN_INSERT_BITS */
12570 /* Initialize the GCC target structure. */
12572 #undef TARGET_ASM_ALIGNED_HI_OP
12573 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12574 #undef TARGET_ASM_ALIGNED_SI_OP
12575 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12576 #undef TARGET_ASM_UNALIGNED_HI_OP
12577 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12578 #undef TARGET_ASM_UNALIGNED_SI_OP
12579 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12580 #undef TARGET_ASM_INTEGER
12581 #define TARGET_ASM_INTEGER avr_assemble_integer
12582 #undef TARGET_ASM_FILE_START
12583 #define TARGET_ASM_FILE_START avr_file_start
12584 #undef TARGET_ASM_FILE_END
12585 #define TARGET_ASM_FILE_END avr_file_end
12587 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12588 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12589 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12590 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12592 #undef TARGET_FUNCTION_VALUE
12593 #define TARGET_FUNCTION_VALUE avr_function_value
12594 #undef TARGET_LIBCALL_VALUE
12595 #define TARGET_LIBCALL_VALUE avr_libcall_value
12596 #undef TARGET_FUNCTION_VALUE_REGNO_P
12597 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12599 #undef TARGET_ATTRIBUTE_TABLE
12600 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12601 #undef TARGET_INSERT_ATTRIBUTES
12602 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12603 #undef TARGET_SECTION_TYPE_FLAGS
12604 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12606 #undef TARGET_ASM_NAMED_SECTION
12607 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12608 #undef TARGET_ASM_INIT_SECTIONS
12609 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12610 #undef TARGET_ENCODE_SECTION_INFO
12611 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12612 #undef TARGET_ASM_SELECT_SECTION
12613 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12615 #undef TARGET_REGISTER_MOVE_COST
12616 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12617 #undef TARGET_MEMORY_MOVE_COST
12618 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12619 #undef TARGET_RTX_COSTS
12620 #define TARGET_RTX_COSTS avr_rtx_costs
12621 #undef TARGET_ADDRESS_COST
12622 #define TARGET_ADDRESS_COST avr_address_cost
12623 #undef TARGET_MACHINE_DEPENDENT_REORG
12624 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12625 #undef TARGET_FUNCTION_ARG
12626 #define TARGET_FUNCTION_ARG avr_function_arg
12627 #undef TARGET_FUNCTION_ARG_ADVANCE
12628 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12630 #undef TARGET_SET_CURRENT_FUNCTION
12631 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12633 #undef TARGET_RETURN_IN_MEMORY
12634 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12636 #undef TARGET_STRICT_ARGUMENT_NAMING
12637 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12639 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12640 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12642 #undef TARGET_HARD_REGNO_SCRATCH_OK
12643 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12644 #undef TARGET_CASE_VALUES_THRESHOLD
12645 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12647 #undef TARGET_FRAME_POINTER_REQUIRED
12648 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12649 #undef TARGET_CAN_ELIMINATE
12650 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12652 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12653 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12655 #undef TARGET_WARN_FUNC_RETURN
12656 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12658 #undef TARGET_CLASS_LIKELY_SPILLED_P
12659 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12661 #undef TARGET_OPTION_OVERRIDE
12662 #define TARGET_OPTION_OVERRIDE avr_option_override
12664 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12665 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12667 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12668 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12670 #undef TARGET_INIT_BUILTINS
12671 #define TARGET_INIT_BUILTINS avr_init_builtins
12673 #undef TARGET_BUILTIN_DECL
12674 #define TARGET_BUILTIN_DECL avr_builtin_decl
12676 #undef TARGET_EXPAND_BUILTIN
12677 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12679 #undef TARGET_FOLD_BUILTIN
12680 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12682 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12683 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12685 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12686 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12688 #undef TARGET_BUILD_BUILTIN_VA_LIST
12689 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12691 #undef TARGET_FIXED_POINT_SUPPORTED_P
12692 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12694 #undef TARGET_CONVERT_TO_TYPE
12695 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12697 #undef TARGET_ADDR_SPACE_SUBSET_P
12698 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12700 #undef TARGET_ADDR_SPACE_CONVERT
12701 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12703 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12704 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12706 #undef TARGET_ADDR_SPACE_POINTER_MODE
12707 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12709 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12710 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12711 avr_addr_space_legitimate_address_p
12713 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12714 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12716 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12717 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12719 #undef TARGET_SECONDARY_RELOAD
12720 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12722 #undef TARGET_PRINT_OPERAND
12723 #define TARGET_PRINT_OPERAND avr_print_operand
12724 #undef TARGET_PRINT_OPERAND_ADDRESS
12725 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12726 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12727 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12729 struct gcc_target targetm
= TARGET_INITIALIZER
;
12732 #include "gt-avr.h"