1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "c-family/c-common.h"
35 #include "conditions.h"
36 #include "insn-attr.h"
40 #include "stor-layout.h"
44 #include "langhooks.h"
48 #include "tree-pass.h"
50 /* This file should be included last. */
51 #include "target-def.h"
53 /* Maximal allowed offset for an address in the LD command */
54 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
56 /* Return true if STR starts with PREFIX and false, otherwise. */
57 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
59 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
60 address space where data is to be located.
61 As the only non-generic address spaces are all located in flash,
62 this can be used to test if data shall go into some .progmem* section.
63 This must be the rightmost field of machine dependent section flags. */
64 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
66 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
67 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
69 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
70 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
71 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
73 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
74 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
77 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
78 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
79 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
80 / SYMBOL_FLAG_MACH_DEP)
82 #define TINY_ADIW(REG1, REG2, I) \
83 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
84 "sbci " #REG2 ",hi8(-(" #I "))"
86 #define TINY_SBIW(REG1, REG2, I) \
87 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
88 "sbci " #REG2 ",hi8((" #I "))"
90 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
91 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
93 /* Known address spaces. The order must be the same as in the respective
94 enum from avr.h (or designated initialized must be used). */
95 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
97 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
98 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
99 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
100 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
101 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
102 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
103 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
104 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
108 /* Holding RAM addresses of some SFRs used by the compiler and that
109 are unique over all devices in an architecture like 'avr4'. */
113 /* SREG: The processor status */
116 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
122 /* RAMPZ: The high byte of 24-bit address used with ELPM */
125 /* SP: The stack pointer and its low and high byte */
130 static avr_addr_t avr_addr
;
133 /* Prototypes for local helper functions. */
135 static const char* out_movqi_r_mr (rtx_insn
*, rtx
[], int*);
136 static const char* out_movhi_r_mr (rtx_insn
*, rtx
[], int*);
137 static const char* out_movsi_r_mr (rtx_insn
*, rtx
[], int*);
138 static const char* out_movqi_mr_r (rtx_insn
*, rtx
[], int*);
139 static const char* out_movhi_mr_r (rtx_insn
*, rtx
[], int*);
140 static const char* out_movsi_mr_r (rtx_insn
*, rtx
[], int*);
142 static int get_sequence_length (rtx_insn
*insns
);
143 static int sequent_regs_live (void);
144 static const char *ptrreg_to_str (int);
145 static const char *cond_string (enum rtx_code
);
146 static int avr_num_arg_regs (machine_mode
, const_tree
);
147 static int avr_operand_rtx_cost (rtx
, machine_mode
, enum rtx_code
,
149 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
150 static struct machine_function
* avr_init_machine_status (void);
153 /* Prototypes for hook implementors if needed before their implementation. */
155 static bool avr_rtx_costs (rtx
, machine_mode
, int, int, int*, bool);
158 /* Allocate registers from r25 to r8 for parameters for function calls. */
159 #define FIRST_CUM_REG 26
161 /* Last call saved register */
162 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
164 /* Implicit target register of LPM instruction (R0) */
165 extern GTY(()) rtx lpm_reg_rtx
;
168 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
169 extern GTY(()) rtx lpm_addr_reg_rtx
;
170 rtx lpm_addr_reg_rtx
;
172 /* Temporary register RTX (reg:QI TMP_REGNO) */
173 extern GTY(()) rtx tmp_reg_rtx
;
176 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
177 extern GTY(()) rtx zero_reg_rtx
;
180 /* RTXs for all general purpose registers as QImode */
181 extern GTY(()) rtx all_regs_rtx
[32];
182 rtx all_regs_rtx
[32];
184 /* SREG, the processor status */
185 extern GTY(()) rtx sreg_rtx
;
188 /* RAMP* special function registers */
189 extern GTY(()) rtx rampd_rtx
;
190 extern GTY(()) rtx rampx_rtx
;
191 extern GTY(()) rtx rampy_rtx
;
192 extern GTY(()) rtx rampz_rtx
;
198 /* RTX containing the strings "" and "e", respectively */
199 static GTY(()) rtx xstring_empty
;
200 static GTY(()) rtx xstring_e
;
202 /* Current architecture. */
203 const avr_arch_t
*avr_arch
;
205 /* Section to put switch tables in. */
206 static GTY(()) section
*progmem_swtable_section
;
208 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
209 or to address space __flash* or __memx. Only used as singletons inside
210 avr_asm_select_section, but it must not be local there because of GTY. */
211 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
213 /* Condition for insns/expanders from avr-dimode.md. */
214 bool avr_have_dimode
= true;
216 /* To track if code will use .bss and/or .data. */
217 bool avr_need_clear_bss_p
= false;
218 bool avr_need_copy_data_p
= false;
221 /* Transform UP into lowercase and write the result to LO.
222 You must provide enough space for LO. Return LO. */
225 avr_tolower (char *lo
, const char *up
)
229 for (; *up
; up
++, lo
++)
238 /* Custom function to count number of set bits. */
241 avr_popcount (unsigned int val
)
255 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
256 Return true if the least significant N_BYTES bytes of XVAL all have a
257 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
258 of integers which contains an integer N iff bit N of POP_MASK is set. */
261 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
265 machine_mode mode
= GET_MODE (xval
);
267 if (VOIDmode
== mode
)
270 for (i
= 0; i
< n_bytes
; i
++)
272 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
273 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
275 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
283 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
284 the bit representation of X by "casting" it to CONST_INT. */
287 avr_to_int_mode (rtx x
)
289 machine_mode mode
= GET_MODE (x
);
291 return VOIDmode
== mode
293 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
297 static const pass_data avr_pass_data_recompute_notes
=
300 "", // name (will be patched)
301 OPTGROUP_NONE
, // optinfo_flags
303 0, // properties_required
304 0, // properties_provided
305 0, // properties_destroyed
306 0, // todo_flags_start
307 TODO_df_finish
| TODO_df_verify
// todo_flags_finish
311 class avr_pass_recompute_notes
: public rtl_opt_pass
314 avr_pass_recompute_notes (gcc::context
*ctxt
, const char *name
)
315 : rtl_opt_pass (avr_pass_data_recompute_notes
, ctxt
)
320 virtual unsigned int execute (function
*)
322 df_note_add_problem ();
327 }; // avr_pass_recompute_notes
331 avr_register_passes (void)
333 /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
334 notes which are used by `avr.c::reg_unused_after' and branch offset
335 computations. These notes must be correct, i.e. there must be no
336 dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
338 DF needs (correct) CFG, hence right before free_cfg is the last
339 opportunity to rectify notes. */
341 register_pass (new avr_pass_recompute_notes (g
, "avr-notes-free-cfg"),
342 PASS_POS_INSERT_BEFORE
, "*free_cfg", 1);
346 /* Set `avr_arch' as specified by `-mmcu='.
347 Return true on success. */
350 avr_set_core_architecture (void)
352 /* Search for mcu core architecture. */
355 avr_mmcu
= AVR_MMCU_DEFAULT
;
357 avr_arch
= &avr_arch_types
[0];
359 for (const avr_mcu_t
*mcu
= avr_mcu_types
; ; mcu
++)
361 if (NULL
== mcu
->name
)
363 /* Reached the end of `avr_mcu_types'. This should actually never
364 happen as options are provided by device-specs. It could be a
365 typo in a device-specs or calling the compiler proper directly
366 with -mmcu=<device>. */
368 error ("unknown core architecture %qs specified with %qs",
370 avr_inform_core_architectures ();
373 else if (0 == strcmp (mcu
->name
, avr_mmcu
)
374 // Is this a proper architecture ?
375 && NULL
== mcu
->macro
)
377 avr_arch
= &avr_arch_types
[mcu
->arch_id
];
379 avr_n_flash
= mcu
->n_flash
;
389 /* Implement `TARGET_OPTION_OVERRIDE'. */
392 avr_option_override (void)
394 /* Disable -fdelete-null-pointer-checks option for AVR target.
395 This option compiler assumes that dereferencing of a null pointer
396 would halt the program. For AVR this assumption is not true and
397 programs can safely dereference null pointers. Changes made by this
398 option may not work properly for AVR. So disable this option. */
400 flag_delete_null_pointer_checks
= 0;
402 /* caller-save.c looks for call-clobbered hard registers that are assigned
403 to pseudos that cross calls and tries so save-restore them around calls
404 in order to reduce the number of stack slots needed.
406 This might lead to situations where reload is no more able to cope
407 with the challenge of AVR's very few address registers and fails to
408 perform the requested spills. */
411 flag_caller_saves
= 0;
413 /* Unwind tables currently require a frame pointer for correctness,
414 see toplev.c:process_options(). */
416 if ((flag_unwind_tables
417 || flag_non_call_exceptions
418 || flag_asynchronous_unwind_tables
)
419 && !ACCUMULATE_OUTGOING_ARGS
)
421 flag_omit_frame_pointer
= 0;
425 warning (OPT_fpic
, "-fpic is not supported");
427 warning (OPT_fPIC
, "-fPIC is not supported");
429 warning (OPT_fpie
, "-fpie is not supported");
431 warning (OPT_fPIE
, "-fPIE is not supported");
433 if (!avr_set_core_architecture())
436 /* RAM addresses of some SFRs common to all devices in respective arch. */
438 /* SREG: Status Register containing flags like I (global IRQ) */
439 avr_addr
.sreg
= 0x3F + avr_arch
->sfr_offset
;
441 /* RAMPZ: Address' high part when loading via ELPM */
442 avr_addr
.rampz
= 0x3B + avr_arch
->sfr_offset
;
444 avr_addr
.rampy
= 0x3A + avr_arch
->sfr_offset
;
445 avr_addr
.rampx
= 0x39 + avr_arch
->sfr_offset
;
446 avr_addr
.rampd
= 0x38 + avr_arch
->sfr_offset
;
447 avr_addr
.ccp
= (AVR_TINY
? 0x3C : 0x34) + avr_arch
->sfr_offset
;
449 /* SP: Stack Pointer (SP_H:SP_L) */
450 avr_addr
.sp_l
= 0x3D + avr_arch
->sfr_offset
;
451 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
453 init_machine_status
= avr_init_machine_status
;
455 avr_log_set_avr_log();
457 /* Register some avr-specific pass(es). There is no canonical place for
458 pass registration. This function is convenient. */
460 avr_register_passes ();
463 /* Function to set up the backend function structure. */
465 static struct machine_function
*
466 avr_init_machine_status (void)
468 return ggc_cleared_alloc
<machine_function
> ();
472 /* Implement `INIT_EXPANDERS'. */
473 /* The function works like a singleton. */
476 avr_init_expanders (void)
480 for (regno
= 0; regno
< 32; regno
++)
481 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
483 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
484 tmp_reg_rtx
= all_regs_rtx
[AVR_TMP_REGNO
];
485 zero_reg_rtx
= all_regs_rtx
[AVR_ZERO_REGNO
];
487 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
489 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
490 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
491 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
492 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
493 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
495 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
496 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
498 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
501 avr_have_dimode
= false;
505 /* Implement `REGNO_REG_CLASS'. */
506 /* Return register class for register R. */
509 avr_regno_reg_class (int r
)
511 static const enum reg_class reg_class_tab
[] =
515 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
516 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
517 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
518 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
520 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
521 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
523 ADDW_REGS
, ADDW_REGS
,
525 POINTER_X_REGS
, POINTER_X_REGS
,
527 POINTER_Y_REGS
, POINTER_Y_REGS
,
529 POINTER_Z_REGS
, POINTER_Z_REGS
,
535 return reg_class_tab
[r
];
541 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
544 avr_scalar_mode_supported_p (machine_mode mode
)
546 if (ALL_FIXED_POINT_MODE_P (mode
))
552 return default_scalar_mode_supported_p (mode
);
556 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
559 avr_decl_flash_p (tree decl
)
561 if (TREE_CODE (decl
) != VAR_DECL
562 || TREE_TYPE (decl
) == error_mark_node
)
567 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
571 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
572 address space and FALSE, otherwise. */
575 avr_decl_memx_p (tree decl
)
577 if (TREE_CODE (decl
) != VAR_DECL
578 || TREE_TYPE (decl
) == error_mark_node
)
583 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
587 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
590 avr_mem_flash_p (rtx x
)
593 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
597 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
598 address space and FALSE, otherwise. */
601 avr_mem_memx_p (rtx x
)
604 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
608 /* A helper for the subsequent function attribute used to dig for
609 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
612 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
614 if (FUNCTION_DECL
== TREE_CODE (func
))
616 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
621 func
= TREE_TYPE (func
);
624 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
625 || TREE_CODE (func
) == METHOD_TYPE
);
627 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
630 /* Return nonzero if FUNC is a naked function. */
633 avr_naked_function_p (tree func
)
635 return avr_lookup_function_attribute1 (func
, "naked");
638 /* Return nonzero if FUNC is an interrupt function as specified
639 by the "interrupt" attribute. */
642 avr_interrupt_function_p (tree func
)
644 return avr_lookup_function_attribute1 (func
, "interrupt");
647 /* Return nonzero if FUNC is a signal function as specified
648 by the "signal" attribute. */
651 avr_signal_function_p (tree func
)
653 return avr_lookup_function_attribute1 (func
, "signal");
656 /* Return nonzero if FUNC is an OS_task function. */
659 avr_OS_task_function_p (tree func
)
661 return avr_lookup_function_attribute1 (func
, "OS_task");
664 /* Return nonzero if FUNC is an OS_main function. */
667 avr_OS_main_function_p (tree func
)
669 return avr_lookup_function_attribute1 (func
, "OS_main");
673 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
674 /* Sanity cheching for above function attributes. */
677 avr_set_current_function (tree decl
)
682 if (decl
== NULL_TREE
683 || current_function_decl
== NULL_TREE
684 || current_function_decl
== error_mark_node
686 || cfun
->machine
->attributes_checked_p
)
689 loc
= DECL_SOURCE_LOCATION (decl
);
691 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
692 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
693 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
694 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
695 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
697 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
699 /* Too much attributes make no sense as they request conflicting features. */
701 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
702 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
703 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
704 " exclusive", "OS_task", "OS_main", isr
);
706 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
708 if (cfun
->machine
->is_naked
709 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
710 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
711 " no effect on %qs function", "OS_task", "OS_main", "naked");
713 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
715 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
716 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
719 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
720 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
721 : IDENTIFIER_POINTER (DECL_NAME (decl
));
723 /* Skip a leading '*' that might still prefix the assembler name,
724 e.g. in non-LTO runs. */
726 name
= default_strip_name_encoding (name
);
728 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
729 using this when it switched from SIGNAL and INTERRUPT to ISR. */
731 if (cfun
->machine
->is_interrupt
)
732 cfun
->machine
->is_signal
= 0;
734 /* Interrupt handlers must be void __vector (void) functions. */
736 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
737 error_at (loc
, "%qs function cannot have arguments", isr
);
739 if (TREE_CODE (ret
) != VOID_TYPE
)
740 error_at (loc
, "%qs function cannot return a value", isr
);
742 /* If the function has the 'signal' or 'interrupt' attribute, ensure
743 that the name of the function is "__vector_NN" so as to catch
744 when the user misspells the vector name. */
746 if (!STR_PREFIX_P (name
, "__vector"))
747 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
751 /* Don't print the above diagnostics more than once. */
753 cfun
->machine
->attributes_checked_p
= 1;
757 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
760 avr_accumulate_outgoing_args (void)
763 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
765 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
766 what offset is correct. In some cases it is relative to
767 virtual_outgoing_args_rtx and in others it is relative to
768 virtual_stack_vars_rtx. For example code see
769 gcc.c-torture/execute/built-in-setjmp.c
770 gcc.c-torture/execute/builtins/sprintf-chk.c */
772 return (TARGET_ACCUMULATE_OUTGOING_ARGS
773 && !(cfun
->calls_setjmp
774 || cfun
->has_nonlocal_label
));
778 /* Report contribution of accumulated outgoing arguments to stack size. */
781 avr_outgoing_args_size (void)
783 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
787 /* Implement `STARTING_FRAME_OFFSET'. */
788 /* This is the offset from the frame pointer register to the first stack slot
789 that contains a variable living in the frame. */
792 avr_starting_frame_offset (void)
794 return 1 + avr_outgoing_args_size ();
798 /* Return the number of hard registers to push/pop in the prologue/epilogue
799 of the current function, and optionally store these registers in SET. */
802 avr_regs_to_save (HARD_REG_SET
*set
)
805 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
808 CLEAR_HARD_REG_SET (*set
);
811 /* No need to save any registers if the function never returns or
812 has the "OS_task" or "OS_main" attribute. */
814 if (TREE_THIS_VOLATILE (current_function_decl
)
815 || cfun
->machine
->is_OS_task
816 || cfun
->machine
->is_OS_main
)
819 for (reg
= 0; reg
< 32; reg
++)
821 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
822 any global register variables. */
827 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
828 || (df_regs_ever_live_p (reg
)
829 && (int_or_sig_p
|| !call_used_regs
[reg
])
830 /* Don't record frame pointer registers here. They are treated
831 indivitually in prologue. */
832 && !(frame_pointer_needed
833 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
836 SET_HARD_REG_BIT (*set
, reg
);
844 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
847 avr_allocate_stack_slots_for_args (void)
849 return !cfun
->machine
->is_naked
;
853 /* Return true if register FROM can be eliminated via register TO. */
856 avr_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
858 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
859 || !frame_pointer_needed
);
863 /* Implement `TARGET_WARN_FUNC_RETURN'. */
866 avr_warn_func_return (tree decl
)
868 /* Naked functions are implemented entirely in assembly, including the
869 return sequence, so suppress warnings about this. */
871 return !avr_naked_function_p (decl
);
874 /* Compute offset between arg_pointer and frame_pointer. */
877 avr_initial_elimination_offset (int from
, int to
)
879 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
883 int offset
= frame_pointer_needed
? 2 : 0;
884 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
886 offset
+= avr_regs_to_save (NULL
);
887 return (get_frame_size () + avr_outgoing_args_size()
888 + avr_pc_size
+ 1 + offset
);
893 /* Helper for the function below. */
896 avr_adjust_type_node (tree
*node
, machine_mode mode
, int sat_p
)
898 *node
= make_node (FIXED_POINT_TYPE
);
899 TYPE_SATURATING (*node
) = sat_p
;
900 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
901 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
902 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
903 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
904 TYPE_ALIGN (*node
) = 8;
905 SET_TYPE_MODE (*node
, mode
);
911 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
914 avr_build_builtin_va_list (void)
916 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
917 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
918 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
919 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
920 to the long long accum modes instead of the desired [U]TAmode.
922 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
923 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
924 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
925 libgcc to detect IBIT and FBIT. */
927 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
928 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
929 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
930 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
932 unsigned_long_long_accum_type_node
= uta_type_node
;
933 long_long_accum_type_node
= ta_type_node
;
934 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
935 sat_long_long_accum_type_node
= sat_ta_type_node
;
937 /* Dispatch to the default handler. */
939 return std_build_builtin_va_list ();
943 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
944 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
945 frame pointer by +STARTING_FRAME_OFFSET.
946 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
947 avoids creating add/sub of offset in nonlocal goto and setjmp. */
950 avr_builtin_setjmp_frame_value (void)
952 rtx xval
= gen_reg_rtx (Pmode
);
953 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
954 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
959 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
960 This is return address of function. */
963 avr_return_addr_rtx (int count
, rtx tem
)
967 /* Can only return this function's return address. Others not supported. */
973 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
974 warning (0, "%<builtin_return_address%> contains only 2 bytes"
978 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
980 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
981 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
982 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
986 /* Return 1 if the function epilogue is just a single "ret". */
989 avr_simple_epilogue (void)
991 return (! frame_pointer_needed
992 && get_frame_size () == 0
993 && avr_outgoing_args_size() == 0
994 && avr_regs_to_save (NULL
) == 0
995 && ! cfun
->machine
->is_interrupt
996 && ! cfun
->machine
->is_signal
997 && ! cfun
->machine
->is_naked
998 && ! TREE_THIS_VOLATILE (current_function_decl
));
1001 /* This function checks sequence of live registers. */
1004 sequent_regs_live (void)
1010 for (reg
= 0; reg
<= LAST_CALLEE_SAVED_REG
; ++reg
)
1012 if (fixed_regs
[reg
])
1014 /* Don't recognize sequences that contain global register
1023 if (!call_used_regs
[reg
])
1025 if (df_regs_ever_live_p (reg
))
1035 if (!frame_pointer_needed
)
1037 if (df_regs_ever_live_p (REG_Y
))
1045 if (df_regs_ever_live_p (REG_Y
+1))
1058 return (cur_seq
== live_seq
) ? live_seq
: 0;
1061 /* Obtain the length sequence of insns. */
1064 get_sequence_length (rtx_insn
*insns
)
1069 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
1070 length
+= get_attr_length (insn
);
1076 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
1079 avr_incoming_return_addr_rtx (void)
1081 /* The return address is at the top of the stack. Note that the push
1082 was via post-decrement, which means the actual address is off by one. */
1083 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
1086 /* Helper for expand_prologue. Emit a push of a byte register. */
1089 emit_push_byte (unsigned regno
, bool frame_related_p
)
1094 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
1095 mem
= gen_frame_mem (QImode
, mem
);
1096 reg
= gen_rtx_REG (QImode
, regno
);
1098 insn
= emit_insn (gen_rtx_SET (mem
, reg
));
1099 if (frame_related_p
)
1100 RTX_FRAME_RELATED_P (insn
) = 1;
1102 cfun
->machine
->stack_usage
++;
1106 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1107 SFR is a MEM representing the memory location of the SFR.
1108 If CLR_P then clear the SFR after the push using zero_reg. */
1111 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
1115 gcc_assert (MEM_P (sfr
));
1117 /* IN __tmp_reg__, IO(SFR) */
1118 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1119 if (frame_related_p
)
1120 RTX_FRAME_RELATED_P (insn
) = 1;
1122 /* PUSH __tmp_reg__ */
1123 emit_push_byte (AVR_TMP_REGNO
, frame_related_p
);
1127 /* OUT IO(SFR), __zero_reg__ */
1128 insn
= emit_move_insn (sfr
, const0_rtx
);
1129 if (frame_related_p
)
1130 RTX_FRAME_RELATED_P (insn
) = 1;
1135 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1138 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1139 int live_seq
= sequent_regs_live ();
1141 HOST_WIDE_INT size_max
1142 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1144 bool minimize
= (TARGET_CALL_PROLOGUES
1148 && !cfun
->machine
->is_OS_task
1149 && !cfun
->machine
->is_OS_main
1153 && (frame_pointer_needed
1154 || avr_outgoing_args_size() > 8
1155 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1159 int first_reg
, reg
, offset
;
1161 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1162 gen_int_mode (size
, HImode
));
1164 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1165 gen_int_mode (live_seq
+size
, HImode
));
1166 insn
= emit_insn (pattern
);
1167 RTX_FRAME_RELATED_P (insn
) = 1;
1169 /* Describe the effect of the unspec_volatile call to prologue_saves.
1170 Note that this formulation assumes that add_reg_note pushes the
1171 notes to the front. Thus we build them in the reverse order of
1172 how we want dwarf2out to process them. */
1174 /* The function does always set frame_pointer_rtx, but whether that
1175 is going to be permanent in the function is frame_pointer_needed. */
1177 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1178 gen_rtx_SET ((frame_pointer_needed
1180 : stack_pointer_rtx
),
1181 plus_constant (Pmode
, stack_pointer_rtx
,
1182 -(size
+ live_seq
))));
1184 /* Note that live_seq always contains r28+r29, but the other
1185 registers to be saved are all below 18. */
1187 first_reg
= (LAST_CALLEE_SAVED_REG
+ 1) - (live_seq
- 2);
1189 for (reg
= 29, offset
= -live_seq
+ 1;
1191 reg
= (reg
== 28 ? LAST_CALLEE_SAVED_REG
: reg
- 1), ++offset
)
1195 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1197 r
= gen_rtx_REG (QImode
, reg
);
1198 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (m
, r
));
1201 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1203 else /* !minimize */
1207 for (reg
= 0; reg
< 32; ++reg
)
1208 if (TEST_HARD_REG_BIT (set
, reg
))
1209 emit_push_byte (reg
, true);
1211 if (frame_pointer_needed
1212 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1214 /* Push frame pointer. Always be consistent about the
1215 ordering of pushes -- epilogue_restores expects the
1216 register pair to be pushed low byte first. */
1218 emit_push_byte (REG_Y
, true);
1219 emit_push_byte (REG_Y
+ 1, true);
1222 if (frame_pointer_needed
1225 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1226 RTX_FRAME_RELATED_P (insn
) = 1;
1231 /* Creating a frame can be done by direct manipulation of the
1232 stack or via the frame pointer. These two methods are:
1239 the optimum method depends on function type, stack and
1240 frame size. To avoid a complex logic, both methods are
1241 tested and shortest is selected.
1243 There is also the case where SIZE != 0 and no frame pointer is
1244 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1245 In that case, insn (*) is not needed in that case.
1246 We use the X register as scratch. This is save because in X
1248 In an interrupt routine, the case of SIZE != 0 together with
1249 !frame_pointer_needed can only occur if the function is not a
1250 leaf function and thus X has already been saved. */
1253 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1254 rtx_insn
*fp_plus_insns
;
1257 gcc_assert (frame_pointer_needed
1261 fp
= my_fp
= (frame_pointer_needed
1263 : gen_rtx_REG (Pmode
, REG_X
));
1265 if (AVR_HAVE_8BIT_SP
)
1267 /* The high byte (r29) does not change:
1268 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1270 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1273 /* Cut down size and avoid size = 0 so that we don't run
1274 into ICE like PR52488 in the remainder. */
1276 if (size
> size_max
)
1278 /* Don't error so that insane code from newlib still compiles
1279 and does not break building newlib. As PR51345 is implemented
1280 now, there are multilib variants with -msp8.
1282 If user wants sanity checks he can use -Wstack-usage=
1285 For CFA we emit the original, non-saturated size so that
1286 the generic machinery is aware of the real stack usage and
1287 will print the above diagnostic as expected. */
1292 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1293 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1295 /************ Method 1: Adjust frame pointer ************/
1299 /* Normally, the dwarf2out frame-related-expr interpreter does
1300 not expect to have the CFA change once the frame pointer is
1301 set up. Thus, we avoid marking the move insn below and
1302 instead indicate that the entire operation is complete after
1303 the frame pointer subtraction is done. */
1305 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1306 if (frame_pointer_needed
)
1308 RTX_FRAME_RELATED_P (insn
) = 1;
1309 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1310 gen_rtx_SET (fp
, stack_pointer_rtx
));
1313 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1316 if (frame_pointer_needed
)
1318 RTX_FRAME_RELATED_P (insn
) = 1;
1319 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1320 gen_rtx_SET (fp
, plus_constant (Pmode
, fp
,
1324 /* Copy to stack pointer. Note that since we've already
1325 changed the CFA to the frame pointer this operation
1326 need not be annotated if frame pointer is needed.
1327 Always move through unspec, see PR50063.
1328 For meaning of irq_state see movhi_sp_r insn. */
1330 if (cfun
->machine
->is_interrupt
)
1333 if (TARGET_NO_INTERRUPTS
1334 || cfun
->machine
->is_signal
1335 || cfun
->machine
->is_OS_main
)
1338 if (AVR_HAVE_8BIT_SP
)
1341 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1342 fp
, GEN_INT (irq_state
)));
1343 if (!frame_pointer_needed
)
1345 RTX_FRAME_RELATED_P (insn
) = 1;
1346 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1347 gen_rtx_SET (stack_pointer_rtx
,
1348 plus_constant (Pmode
,
1353 fp_plus_insns
= get_insns ();
1356 /************ Method 2: Adjust Stack pointer ************/
1358 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1359 can only handle specific offsets. */
1361 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1363 rtx_insn
*sp_plus_insns
;
1367 insn
= emit_move_insn (stack_pointer_rtx
,
1368 plus_constant (Pmode
, stack_pointer_rtx
,
1370 RTX_FRAME_RELATED_P (insn
) = 1;
1371 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1372 gen_rtx_SET (stack_pointer_rtx
,
1373 plus_constant (Pmode
,
1376 if (frame_pointer_needed
)
1378 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1379 RTX_FRAME_RELATED_P (insn
) = 1;
1382 sp_plus_insns
= get_insns ();
1385 /************ Use shortest method ************/
1387 emit_insn (get_sequence_length (sp_plus_insns
)
1388 < get_sequence_length (fp_plus_insns
)
1394 emit_insn (fp_plus_insns
);
1397 cfun
->machine
->stack_usage
+= size_cfa
;
1398 } /* !minimize && size != 0 */
1403 /* Output function prologue. */
1406 avr_expand_prologue (void)
1411 size
= get_frame_size() + avr_outgoing_args_size();
1413 cfun
->machine
->stack_usage
= 0;
1415 /* Prologue: naked. */
1416 if (cfun
->machine
->is_naked
)
1421 avr_regs_to_save (&set
);
1423 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1425 /* Enable interrupts. */
1426 if (cfun
->machine
->is_interrupt
)
1427 emit_insn (gen_enable_interrupt ());
1429 /* Push zero reg. */
1430 emit_push_byte (AVR_ZERO_REGNO
, true);
1433 emit_push_byte (AVR_TMP_REGNO
, true);
1436 /* ??? There's no dwarf2 column reserved for SREG. */
1437 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1439 /* Clear zero reg. */
1440 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1442 /* Prevent any attempt to delete the setting of ZERO_REG! */
1443 emit_use (zero_reg_rtx
);
1445 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1446 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1449 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1452 && TEST_HARD_REG_BIT (set
, REG_X
)
1453 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1455 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1459 && (frame_pointer_needed
1460 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1461 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1463 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1467 && TEST_HARD_REG_BIT (set
, REG_Z
)
1468 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1470 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1472 } /* is_interrupt is_signal */
1474 avr_prologue_setup_frame (size
, set
);
1476 if (flag_stack_usage_info
)
1477 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1481 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1482 /* Output summary at end of function prologue. */
1485 avr_asm_function_end_prologue (FILE *file
)
1487 if (cfun
->machine
->is_naked
)
1489 fputs ("/* prologue: naked */\n", file
);
1493 if (cfun
->machine
->is_interrupt
)
1495 fputs ("/* prologue: Interrupt */\n", file
);
1497 else if (cfun
->machine
->is_signal
)
1499 fputs ("/* prologue: Signal */\n", file
);
1502 fputs ("/* prologue: function */\n", file
);
1505 if (ACCUMULATE_OUTGOING_ARGS
)
1506 fprintf (file
, "/* outgoing args size = %d */\n",
1507 avr_outgoing_args_size());
1509 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1511 fprintf (file
, "/* stack size = %d */\n",
1512 cfun
->machine
->stack_usage
);
1513 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1514 usage for offset so that SP + .L__stack_offset = return address. */
1515 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1519 /* Implement `EPILOGUE_USES'. */
1522 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1524 if (reload_completed
1526 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1531 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1534 emit_pop_byte (unsigned regno
)
1538 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1539 mem
= gen_frame_mem (QImode
, mem
);
1540 reg
= gen_rtx_REG (QImode
, regno
);
1542 emit_insn (gen_rtx_SET (reg
, mem
));
1545 /* Output RTL epilogue. */
1548 avr_expand_epilogue (bool sibcall_p
)
1555 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1557 size
= get_frame_size() + avr_outgoing_args_size();
1559 /* epilogue: naked */
1560 if (cfun
->machine
->is_naked
)
1562 gcc_assert (!sibcall_p
);
1564 emit_jump_insn (gen_return ());
1568 avr_regs_to_save (&set
);
1569 live_seq
= sequent_regs_live ();
1571 minimize
= (TARGET_CALL_PROLOGUES
1574 && !cfun
->machine
->is_OS_task
1575 && !cfun
->machine
->is_OS_main
1580 || frame_pointer_needed
1583 /* Get rid of frame. */
1585 if (!frame_pointer_needed
)
1587 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1592 emit_move_insn (frame_pointer_rtx
,
1593 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1596 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1602 /* Try two methods to adjust stack and select shortest. */
1606 rtx_insn
*fp_plus_insns
;
1607 HOST_WIDE_INT size_max
;
1609 gcc_assert (frame_pointer_needed
1613 fp
= my_fp
= (frame_pointer_needed
1615 : gen_rtx_REG (Pmode
, REG_X
));
1617 if (AVR_HAVE_8BIT_SP
)
1619 /* The high byte (r29) does not change:
1620 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1622 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1625 /* For rationale see comment in prologue generation. */
1627 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1628 if (size
> size_max
)
1630 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1632 /********** Method 1: Adjust fp register **********/
1636 if (!frame_pointer_needed
)
1637 emit_move_insn (fp
, stack_pointer_rtx
);
1639 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1641 /* Copy to stack pointer. */
1643 if (TARGET_NO_INTERRUPTS
)
1646 if (AVR_HAVE_8BIT_SP
)
1649 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1650 GEN_INT (irq_state
)));
1652 fp_plus_insns
= get_insns ();
1655 /********** Method 2: Adjust Stack pointer **********/
1657 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1659 rtx_insn
*sp_plus_insns
;
1663 emit_move_insn (stack_pointer_rtx
,
1664 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1666 sp_plus_insns
= get_insns ();
1669 /************ Use shortest method ************/
1671 emit_insn (get_sequence_length (sp_plus_insns
)
1672 < get_sequence_length (fp_plus_insns
)
1677 emit_insn (fp_plus_insns
);
1680 if (frame_pointer_needed
1681 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1683 /* Restore previous frame_pointer. See avr_expand_prologue for
1684 rationale for not using pophi. */
1686 emit_pop_byte (REG_Y
+ 1);
1687 emit_pop_byte (REG_Y
);
1690 /* Restore used registers. */
1692 for (reg
= 31; reg
>= 0; --reg
)
1693 if (TEST_HARD_REG_BIT (set
, reg
))
1694 emit_pop_byte (reg
);
1698 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1699 The conditions to restore them must be tha same as in prologue. */
1702 && TEST_HARD_REG_BIT (set
, REG_Z
)
1703 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1705 emit_pop_byte (TMP_REGNO
);
1706 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1710 && (frame_pointer_needed
1711 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1712 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1714 emit_pop_byte (TMP_REGNO
);
1715 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1719 && TEST_HARD_REG_BIT (set
, REG_X
)
1720 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1722 emit_pop_byte (TMP_REGNO
);
1723 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1728 emit_pop_byte (TMP_REGNO
);
1729 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1732 /* Restore SREG using tmp_reg as scratch. */
1734 emit_pop_byte (AVR_TMP_REGNO
);
1735 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1737 /* Restore tmp REG. */
1738 emit_pop_byte (AVR_TMP_REGNO
);
1740 /* Restore zero REG. */
1741 emit_pop_byte (AVR_ZERO_REGNO
);
1745 emit_jump_insn (gen_return ());
1749 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1752 avr_asm_function_begin_epilogue (FILE *file
)
1754 fprintf (file
, "/* epilogue start */\n");
1758 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1761 avr_cannot_modify_jumps_p (void)
1764 /* Naked Functions must not have any instructions after
1765 their epilogue, see PR42240 */
1767 if (reload_completed
1769 && cfun
->machine
->is_naked
)
1778 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1781 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1783 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1784 This hook just serves to hack around PR rtl-optimization/52543 by
1785 claiming that non-generic addresses were mode-dependent so that
1786 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1787 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1788 generic address space which is not true. */
1790 return !ADDR_SPACE_GENERIC_P (as
);
1794 /* Helper function for `avr_legitimate_address_p'. */
1797 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1798 RTX_CODE outer_code
, bool strict
)
1801 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1802 as
, outer_code
, UNKNOWN
)
1804 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1808 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1809 machine for a memory operand of mode MODE. */
1812 avr_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
1814 bool ok
= CONSTANT_ADDRESS_P (x
);
1816 switch (GET_CODE (x
))
1819 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1823 && GET_MODE_SIZE (mode
) > 4
1824 && REG_X
== REGNO (x
))
1832 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1833 GET_CODE (x
), strict
);
1838 rtx reg
= XEXP (x
, 0);
1839 rtx op1
= XEXP (x
, 1);
1842 && CONST_INT_P (op1
)
1843 && INTVAL (op1
) >= 0)
1845 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1850 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1853 if (reg
== frame_pointer_rtx
1854 || reg
== arg_pointer_rtx
)
1859 else if (frame_pointer_needed
1860 && reg
== frame_pointer_rtx
)
1873 && CONSTANT_ADDRESS_P (x
))
1875 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1876 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
1878 ok
= (CONST_INT_P (x
)
1879 && IN_RANGE (INTVAL (x
), 0, 0xc0 - GET_MODE_SIZE (mode
)));
1882 if (avr_log
.legitimate_address_p
)
1884 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1885 "reload_completed=%d reload_in_progress=%d %s:",
1886 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1887 reg_renumber
? "(reg_renumber)" : "");
1889 if (GET_CODE (x
) == PLUS
1890 && REG_P (XEXP (x
, 0))
1891 && CONST_INT_P (XEXP (x
, 1))
1892 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1895 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1896 true_regnum (XEXP (x
, 0)));
1899 avr_edump ("\n%r\n", x
);
1906 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1907 now only a helper for avr_addr_space_legitimize_address. */
1908 /* Attempts to replace X with a valid
1909 memory address for an operand of mode MODE */
1912 avr_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
)
1914 bool big_offset_p
= false;
1918 if (GET_CODE (oldx
) == PLUS
1919 && REG_P (XEXP (oldx
, 0)))
1921 if (REG_P (XEXP (oldx
, 1)))
1922 x
= force_reg (GET_MODE (oldx
), oldx
);
1923 else if (CONST_INT_P (XEXP (oldx
, 1)))
1925 int offs
= INTVAL (XEXP (oldx
, 1));
1926 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1927 && offs
> MAX_LD_OFFSET (mode
))
1929 big_offset_p
= true;
1930 x
= force_reg (GET_MODE (oldx
), oldx
);
1935 if (avr_log
.legitimize_address
)
1937 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1940 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1947 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1948 /* This will allow register R26/27 to be used where it is no worse than normal
1949 base pointers R28/29 or R30/31. For example, if base offset is greater
1950 than 63 bytes or for R++ or --R addressing. */
1953 avr_legitimize_reload_address (rtx
*px
, machine_mode mode
,
1954 int opnum
, int type
, int addr_type
,
1955 int ind_levels ATTRIBUTE_UNUSED
,
1956 rtx (*mk_memloc
)(rtx
,int))
1960 if (avr_log
.legitimize_reload_address
)
1961 avr_edump ("\n%?:%m %r\n", mode
, x
);
1963 if (1 && (GET_CODE (x
) == POST_INC
1964 || GET_CODE (x
) == PRE_DEC
))
1966 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1967 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1968 opnum
, RELOAD_OTHER
);
1970 if (avr_log
.legitimize_reload_address
)
1971 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1972 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1977 if (GET_CODE (x
) == PLUS
1978 && REG_P (XEXP (x
, 0))
1979 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1980 && CONST_INT_P (XEXP (x
, 1))
1981 && INTVAL (XEXP (x
, 1)) >= 1)
1983 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1987 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1989 int regno
= REGNO (XEXP (x
, 0));
1990 rtx mem
= mk_memloc (x
, regno
);
1992 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1993 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1994 1, (enum reload_type
) addr_type
);
1996 if (avr_log
.legitimize_reload_address
)
1997 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1998 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
2000 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
2001 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2002 opnum
, (enum reload_type
) type
);
2004 if (avr_log
.legitimize_reload_address
)
2005 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2006 BASE_POINTER_REGS
, mem
, NULL_RTX
);
2011 else if (! (frame_pointer_needed
2012 && XEXP (x
, 0) == frame_pointer_rtx
))
2014 push_reload (x
, NULL_RTX
, px
, NULL
,
2015 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
2016 opnum
, (enum reload_type
) type
);
2018 if (avr_log
.legitimize_reload_address
)
2019 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2020 POINTER_REGS
, x
, NULL_RTX
);
2030 /* Implement `TARGET_SECONDARY_RELOAD' */
2033 avr_secondary_reload (bool in_p
, rtx x
,
2034 reg_class_t reload_class ATTRIBUTE_UNUSED
,
2035 machine_mode mode
, secondary_reload_info
*sri
)
2039 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
2040 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
2042 /* For the non-generic 16-bit spaces we need a d-class scratch. */
2049 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
2050 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
2051 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
2053 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
2054 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
2055 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
2056 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
2057 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
2059 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
2061 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
2062 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
2063 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
2064 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
2065 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
2066 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
2074 /* Helper function to print assembler resp. track instruction
2075 sequence lengths. Always return "".
2078 Output assembler code from template TPL with operands supplied
2079 by OPERANDS. This is just forwarding to output_asm_insn.
2082 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2083 If N_WORDS < 0 Set *PLEN to -N_WORDS.
2084 Don't output anything.
2088 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
2092 output_asm_insn (tpl
, operands
);
2106 /* Return a pointer register name as a string. */
2109 ptrreg_to_str (int regno
)
2113 case REG_X
: return "X";
2114 case REG_Y
: return "Y";
2115 case REG_Z
: return "Z";
2117 output_operand_lossage ("address operand requires constraint for"
2118 " X, Y, or Z register");
2123 /* Return the condition name as a string.
2124 Used in conditional jump constructing */
2127 cond_string (enum rtx_code code
)
2136 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2141 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2157 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2158 /* Output ADDR to FILE as address. */
2161 avr_print_operand_address (FILE *file
, rtx addr
)
2163 switch (GET_CODE (addr
))
2166 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2170 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2174 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2178 if (CONSTANT_ADDRESS_P (addr
)
2179 && text_segment_operand (addr
, VOIDmode
))
2182 if (GET_CODE (x
) == CONST
)
2184 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2186 /* Assembler gs() will implant word address. Make offset
2187 a byte offset inside gs() for assembler. This is
2188 needed because the more logical (constant+gs(sym)) is not
2189 accepted by gas. For 128K and smaller devices this is ok.
2190 For large devices it will create a trampoline to offset
2191 from symbol which may not be what the user really wanted. */
2193 fprintf (file
, "gs(");
2194 output_addr_const (file
, XEXP (x
,0));
2195 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2196 2 * INTVAL (XEXP (x
, 1)));
2198 if (warning (0, "pointer offset from symbol maybe incorrect"))
2200 output_addr_const (stderr
, addr
);
2201 fprintf(stderr
,"\n");
2206 fprintf (file
, "gs(");
2207 output_addr_const (file
, addr
);
2208 fprintf (file
, ")");
2212 output_addr_const (file
, addr
);
2217 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2220 avr_print_operand_punct_valid_p (unsigned char code
)
2222 return code
== '~' || code
== '!';
2226 /* Implement `TARGET_PRINT_OPERAND'. */
2227 /* Output X as assembler operand to file FILE.
2228 For a description of supported %-codes, see top of avr.md. */
2231 avr_print_operand (FILE *file
, rtx x
, int code
)
2233 int abcd
= 0, ef
= 0, ij
= 0;
2235 if (code
>= 'A' && code
<= 'D')
2237 else if (code
== 'E' || code
== 'F')
2239 else if (code
== 'I' || code
== 'J')
2244 if (!AVR_HAVE_JMP_CALL
)
2247 else if (code
== '!')
2249 if (AVR_HAVE_EIJMP_EICALL
)
2252 else if (code
== 't'
2255 static int t_regno
= -1;
2256 static int t_nbits
= -1;
2258 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2260 t_regno
= REGNO (x
);
2261 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2263 else if (CONST_INT_P (x
) && t_regno
>= 0
2264 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2266 int bpos
= INTVAL (x
);
2268 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2270 fprintf (file
, ",%d", bpos
% 8);
2275 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2277 else if (code
== 'E' || code
== 'F')
2279 rtx op
= XEXP(x
, 0);
2280 fprintf (file
, reg_names
[REGNO (op
) + ef
]);
2282 else if (code
== 'I' || code
== 'J')
2284 rtx op
= XEXP(XEXP(x
, 0), 0);
2285 fprintf (file
, reg_names
[REGNO (op
) + ij
]);
2289 if (x
== zero_reg_rtx
)
2290 fprintf (file
, "__zero_reg__");
2291 else if (code
== 'r' && REGNO (x
) < 32)
2292 fprintf (file
, "%d", (int) REGNO (x
));
2294 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2296 else if (CONST_INT_P (x
))
2298 HOST_WIDE_INT ival
= INTVAL (x
);
2301 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2302 else if (low_io_address_operand (x
, VOIDmode
)
2303 || high_io_address_operand (x
, VOIDmode
))
2305 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2306 fprintf (file
, "__RAMPZ__");
2307 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2308 fprintf (file
, "__RAMPY__");
2309 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2310 fprintf (file
, "__RAMPX__");
2311 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2312 fprintf (file
, "__RAMPD__");
2313 else if ((AVR_XMEGA
|| AVR_TINY
) && ival
== avr_addr
.ccp
)
2314 fprintf (file
, "__CCP__");
2315 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2316 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2317 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2320 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2321 ival
- avr_arch
->sfr_offset
);
2325 fatal_insn ("bad address, not an I/O address:", x
);
2329 rtx addr
= XEXP (x
, 0);
2333 if (!CONSTANT_P (addr
))
2334 fatal_insn ("bad address, not a constant:", addr
);
2335 /* Assembler template with m-code is data - not progmem section */
2336 if (text_segment_operand (addr
, VOIDmode
))
2337 if (warning (0, "accessing data memory with"
2338 " program memory address"))
2340 output_addr_const (stderr
, addr
);
2341 fprintf(stderr
,"\n");
2343 output_addr_const (file
, addr
);
2345 else if (code
== 'i')
2347 avr_print_operand (file
, addr
, 'i');
2349 else if (code
== 'o')
2351 if (GET_CODE (addr
) != PLUS
)
2352 fatal_insn ("bad address, not (reg+disp):", addr
);
2354 avr_print_operand (file
, XEXP (addr
, 1), 0);
2356 else if (code
== 'b')
2358 if (GET_CODE (addr
) != PLUS
)
2359 fatal_insn ("bad address, not (reg+disp):", addr
);
2361 avr_print_operand_address (file
, XEXP (addr
, 0));
2363 else if (code
== 'p' || code
== 'r')
2365 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2366 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2369 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2371 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2373 else if (GET_CODE (addr
) == PLUS
)
2375 avr_print_operand_address (file
, XEXP (addr
,0));
2376 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2377 fatal_insn ("internal compiler error. Bad address:"
2380 avr_print_operand (file
, XEXP (addr
,1), code
);
2383 avr_print_operand_address (file
, addr
);
2385 else if (code
== 'i')
2387 if (GET_CODE (x
) == SYMBOL_REF
&& (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
))
2388 avr_print_operand_address
2389 (file
, plus_constant (HImode
, x
, -avr_arch
->sfr_offset
));
2391 fatal_insn ("bad address, not an I/O address:", x
);
2393 else if (code
== 'x')
2395 /* Constant progmem address - like used in jmp or call */
2396 if (0 == text_segment_operand (x
, VOIDmode
))
2397 if (warning (0, "accessing program memory"
2398 " with data memory address"))
2400 output_addr_const (stderr
, x
);
2401 fprintf(stderr
,"\n");
2403 /* Use normal symbol for direct address no linker trampoline needed */
2404 output_addr_const (file
, x
);
2406 else if (CONST_FIXED_P (x
))
2408 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2410 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2412 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2414 else if (GET_CODE (x
) == CONST_DOUBLE
)
2417 if (GET_MODE (x
) != SFmode
)
2418 fatal_insn ("internal compiler error. Unknown mode:", x
);
2419 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), val
);
2420 fprintf (file
, "0x%lx", val
);
2422 else if (GET_CODE (x
) == CONST_STRING
)
2423 fputs (XSTR (x
, 0), file
);
2424 else if (code
== 'j')
2425 fputs (cond_string (GET_CODE (x
)), file
);
2426 else if (code
== 'k')
2427 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2429 avr_print_operand_address (file
, x
);
2433 /* Worker function for `NOTICE_UPDATE_CC'. */
2434 /* Update the condition code in the INSN. */
2437 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx_insn
*insn
)
2440 enum attr_cc cc
= get_attr_cc (insn
);
2450 rtx
*op
= recog_data
.operand
;
2453 /* Extract insn's operands. */
2454 extract_constrain_insn_cached (insn
);
2462 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2463 cc
= (enum attr_cc
) icc
;
2468 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2469 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2470 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2472 /* Any other "r,rL" combination does not alter cc0. */
2476 } /* inner switch */
2480 } /* outer swicth */
2485 /* Special values like CC_OUT_PLUS from above have been
2486 mapped to "standard" CC_* values so we never come here. */
2492 /* Insn does not affect CC at all. */
2500 set
= single_set (insn
);
2504 cc_status
.flags
|= CC_NO_OVERFLOW
;
2505 cc_status
.value1
= SET_DEST (set
);
2510 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2511 of this combination, cf. also PR61055. */
2516 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2517 The V flag may or may not be known but that's ok because
2518 alter_cond will change tests to use EQ/NE. */
2519 set
= single_set (insn
);
2523 cc_status
.value1
= SET_DEST (set
);
2524 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2529 set
= single_set (insn
);
2532 cc_status
.value1
= SET_SRC (set
);
2536 /* Insn doesn't leave CC in a usable state. */
2542 /* Choose mode for jump insn:
2543 1 - relative jump in range -63 <= x <= 62 ;
2544 2 - relative jump in range -2046 <= x <= 2045 ;
2545 3 - absolute jump (only for ATmega[16]03). */
2548 avr_jump_mode (rtx x
, rtx_insn
*insn
)
2550 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2551 ? XEXP (x
, 0) : x
));
2552 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2553 int jump_distance
= cur_addr
- dest_addr
;
2555 if (-63 <= jump_distance
&& jump_distance
<= 62)
2557 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2559 else if (AVR_HAVE_JMP_CALL
)
2565 /* Return an AVR condition jump commands.
2566 X is a comparison RTX.
2567 LEN is a number returned by avr_jump_mode function.
2568 If REVERSE nonzero then condition code in X must be reversed. */
2571 ret_cond_branch (rtx x
, int len
, int reverse
)
2573 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2578 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2579 return (len
== 1 ? ("breq .+2" CR_TAB
2581 len
== 2 ? ("breq .+4" CR_TAB
2589 return (len
== 1 ? ("breq .+2" CR_TAB
2591 len
== 2 ? ("breq .+4" CR_TAB
2598 return (len
== 1 ? ("breq .+2" CR_TAB
2600 len
== 2 ? ("breq .+4" CR_TAB
2607 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2608 return (len
== 1 ? ("breq %0" CR_TAB
2610 len
== 2 ? ("breq .+2" CR_TAB
2617 return (len
== 1 ? ("breq %0" CR_TAB
2619 len
== 2 ? ("breq .+2" CR_TAB
2626 return (len
== 1 ? ("breq %0" CR_TAB
2628 len
== 2 ? ("breq .+2" CR_TAB
2642 return ("br%j1 .+2" CR_TAB
2645 return ("br%j1 .+4" CR_TAB
2656 return ("br%k1 .+2" CR_TAB
2659 return ("br%k1 .+4" CR_TAB
2668 /* Worker function for `FINAL_PRESCAN_INSN'. */
2669 /* Output insn cost for next insn. */
2672 avr_final_prescan_insn (rtx_insn
*insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2673 int num_operands ATTRIBUTE_UNUSED
)
2675 if (avr_log
.rtx_costs
)
2677 rtx set
= single_set (insn
);
2680 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2681 set_src_cost (SET_SRC (set
), GET_MODE (SET_DEST (set
)),
2682 optimize_insn_for_speed_p ()));
2684 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2685 rtx_cost (PATTERN (insn
), VOIDmode
, INSN
, 0,
2686 optimize_insn_for_speed_p()));
2690 /* Return 0 if undefined, 1 if always true or always false. */
2693 avr_simplify_comparison_p (machine_mode mode
, RTX_CODE op
, rtx x
)
2695 unsigned int max
= (mode
== QImode
? 0xff :
2696 mode
== HImode
? 0xffff :
2697 mode
== PSImode
? 0xffffff :
2698 mode
== SImode
? 0xffffffff : 0);
2699 if (max
&& op
&& CONST_INT_P (x
))
2701 if (unsigned_condition (op
) != op
)
2704 if (max
!= (INTVAL (x
) & max
)
2705 && INTVAL (x
) != 0xff)
2712 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2713 /* Returns nonzero if REGNO is the number of a hard
2714 register in which function arguments are sometimes passed. */
2717 avr_function_arg_regno_p(int r
)
2719 return (AVR_TINY
? r
>= 20 && r
<= 25 : r
>= 8 && r
<= 25);
2723 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2724 /* Initializing the variable cum for the state at the beginning
2725 of the argument list. */
2728 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2729 tree fndecl ATTRIBUTE_UNUSED
)
2731 cum
->nregs
= AVR_TINY
? 6 : 18;
2732 cum
->regno
= FIRST_CUM_REG
;
2733 if (!libname
&& stdarg_p (fntype
))
2736 /* Assume the calle may be tail called */
2738 cfun
->machine
->sibcall_fails
= 0;
2741 /* Returns the number of registers to allocate for a function argument. */
2744 avr_num_arg_regs (machine_mode mode
, const_tree type
)
2748 if (mode
== BLKmode
)
2749 size
= int_size_in_bytes (type
);
2751 size
= GET_MODE_SIZE (mode
);
2753 /* Align all function arguments to start in even-numbered registers.
2754 Odd-sized arguments leave holes above them. */
2756 return (size
+ 1) & ~1;
2760 /* Implement `TARGET_FUNCTION_ARG'. */
2761 /* Controls whether a function argument is passed
2762 in a register, and which register. */
2765 avr_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
2766 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2768 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2769 int bytes
= avr_num_arg_regs (mode
, type
);
2771 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2772 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2778 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2779 /* Update the summarizer variable CUM to advance past an argument
2780 in the argument list. */
2783 avr_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2784 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2786 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2787 int bytes
= avr_num_arg_regs (mode
, type
);
2789 cum
->nregs
-= bytes
;
2790 cum
->regno
-= bytes
;
2792 /* A parameter is being passed in a call-saved register. As the original
2793 contents of these regs has to be restored before leaving the function,
2794 a function must not pass arguments in call-saved regs in order to get
2799 && !call_used_regs
[cum
->regno
])
2801 /* FIXME: We ship info on failing tail-call in struct machine_function.
2802 This uses internals of calls.c:expand_call() and the way args_so_far
2803 is used. targetm.function_ok_for_sibcall() needs to be extended to
2804 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2805 dependent so that such an extension is not wanted. */
2807 cfun
->machine
->sibcall_fails
= 1;
2810 /* Test if all registers needed by the ABI are actually available. If the
2811 user has fixed a GPR needed to pass an argument, an (implicit) function
2812 call will clobber that fixed register. See PR45099 for an example. */
2819 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2820 if (fixed_regs
[regno
])
2821 warning (0, "fixed register %s used to pass parameter to function",
2825 if (cum
->nregs
<= 0)
2828 cum
->regno
= FIRST_CUM_REG
;
2832 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2833 /* Decide whether we can make a sibling call to a function. DECL is the
2834 declaration of the function being targeted by the call and EXP is the
2835 CALL_EXPR representing the call. */
2838 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2842 /* Tail-calling must fail if callee-saved regs are used to pass
2843 function args. We must not tail-call when `epilogue_restores'
2844 is used. Unfortunately, we cannot tell at this point if that
2845 actually will happen or not, and we cannot step back from
2846 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2848 if (cfun
->machine
->sibcall_fails
2849 || TARGET_CALL_PROLOGUES
)
2854 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2858 decl_callee
= TREE_TYPE (decl_callee
);
2862 decl_callee
= fntype_callee
;
2864 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2865 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2867 decl_callee
= TREE_TYPE (decl_callee
);
2871 /* Ensure that caller and callee have compatible epilogues */
2873 if (cfun
->machine
->is_interrupt
2874 || cfun
->machine
->is_signal
2875 || cfun
->machine
->is_naked
2876 || avr_naked_function_p (decl_callee
)
2877 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2878 || (avr_OS_task_function_p (decl_callee
)
2879 != cfun
->machine
->is_OS_task
)
2880 || (avr_OS_main_function_p (decl_callee
)
2881 != cfun
->machine
->is_OS_main
))
2889 /***********************************************************************
2890 Functions for outputting various mov's for a various modes
2891 ************************************************************************/
2893 /* Return true if a value of mode MODE is read from flash by
2894 __load_* function from libgcc. */
2897 avr_load_libgcc_p (rtx op
)
2899 machine_mode mode
= GET_MODE (op
);
2900 int n_bytes
= GET_MODE_SIZE (mode
);
2904 && avr_mem_flash_p (op
));
2907 /* Return true if a value of mode MODE is read by __xload_* function. */
2910 avr_xload_libgcc_p (machine_mode mode
)
2912 int n_bytes
= GET_MODE_SIZE (mode
);
2915 || avr_n_flash
> 1);
2919 /* Fixme: This is a hack because secondary reloads don't works as expected.
2921 Find an unused d-register to be used as scratch in INSN.
2922 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2923 is a register, skip all possible return values that overlap EXCLUDE.
2924 The policy for the returned register is similar to that of
2925 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2928 Return a QImode d-register or NULL_RTX if nothing found. */
2931 avr_find_unused_d_reg (rtx_insn
*insn
, rtx exclude
)
2934 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2935 || avr_signal_function_p (current_function_decl
));
2937 for (regno
= 16; regno
< 32; regno
++)
2939 rtx reg
= all_regs_rtx
[regno
];
2942 && reg_overlap_mentioned_p (exclude
, reg
))
2943 || fixed_regs
[regno
])
2948 /* Try non-live register */
2950 if (!df_regs_ever_live_p (regno
)
2951 && (TREE_THIS_VOLATILE (current_function_decl
)
2952 || cfun
->machine
->is_OS_task
2953 || cfun
->machine
->is_OS_main
2954 || (!isr_p
&& call_used_regs
[regno
])))
2959 /* Any live register can be used if it is unused after.
2960 Prologue/epilogue will care for it as needed. */
2962 if (df_regs_ever_live_p (regno
)
2963 && reg_unused_after (insn
, reg
))
2973 /* Helper function for the next function in the case where only restricted
2974 version of LPM instruction is available. */
2977 avr_out_lpm_no_lpmx (rtx_insn
*insn
, rtx
*xop
, int *plen
)
2981 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2984 regno_dest
= REGNO (dest
);
2986 /* The implicit target register of LPM. */
2987 xop
[3] = lpm_reg_rtx
;
2989 switch (GET_CODE (addr
))
2996 gcc_assert (REG_Z
== REGNO (addr
));
3004 avr_asm_len ("%4lpm", xop
, plen
, 1);
3006 if (regno_dest
!= LPM_REGNO
)
3007 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3012 if (REGNO (dest
) == REG_Z
)
3013 return avr_asm_len ("%4lpm" CR_TAB
3018 "pop %A0", xop
, plen
, 6);
3020 avr_asm_len ("%4lpm" CR_TAB
3024 "mov %B0,%3", xop
, plen
, 5);
3026 if (!reg_unused_after (insn
, addr
))
3027 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3036 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3039 if (regno_dest
== LPM_REGNO
)
3040 avr_asm_len ("%4lpm" CR_TAB
3041 "adiw %2,1", xop
, plen
, 2);
3043 avr_asm_len ("%4lpm" CR_TAB
3045 "adiw %2,1", xop
, plen
, 3);
3048 avr_asm_len ("%4lpm" CR_TAB
3050 "adiw %2,1", xop
, plen
, 3);
3053 avr_asm_len ("%4lpm" CR_TAB
3055 "adiw %2,1", xop
, plen
, 3);
3058 avr_asm_len ("%4lpm" CR_TAB
3060 "adiw %2,1", xop
, plen
, 3);
3062 break; /* POST_INC */
3064 } /* switch CODE (addr) */
3070 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3071 OP[1] in AS1 to register OP[0].
3072 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3076 avr_out_lpm (rtx_insn
*insn
, rtx
*op
, int *plen
)
3080 rtx src
= SET_SRC (single_set (insn
));
3082 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
3085 addr_space_t as
= MEM_ADDR_SPACE (src
);
3092 warning (0, "writing to address space %qs not supported",
3093 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
3098 addr
= XEXP (src
, 0);
3099 code
= GET_CODE (addr
);
3101 gcc_assert (REG_P (dest
));
3102 gcc_assert (REG
== code
|| POST_INC
== code
);
3106 xop
[2] = lpm_addr_reg_rtx
;
3107 xop
[4] = xstring_empty
;
3108 xop
[5] = tmp_reg_rtx
;
3109 xop
[6] = XEXP (rampz_rtx
, 0);
3111 segment
= avr_addrspace
[as
].segment
;
3113 /* Set RAMPZ as needed. */
3117 xop
[4] = GEN_INT (segment
);
3118 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
3120 if (xop
[3] != NULL_RTX
)
3122 avr_asm_len ("ldi %3,%4" CR_TAB
3123 "out %i6,%3", xop
, plen
, 2);
3125 else if (segment
== 1)
3127 avr_asm_len ("clr %5" CR_TAB
3129 "out %i6,%5", xop
, plen
, 3);
3133 avr_asm_len ("mov %5,%2" CR_TAB
3136 "mov %2,%5", xop
, plen
, 4);
3141 if (!AVR_HAVE_ELPMX
)
3142 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3144 else if (!AVR_HAVE_LPMX
)
3146 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
3149 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3151 switch (GET_CODE (addr
))
3158 gcc_assert (REG_Z
== REGNO (addr
));
3166 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3169 if (REGNO (dest
) == REG_Z
)
3170 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3171 "%4lpm %B0,%a2" CR_TAB
3172 "mov %A0,%5", xop
, plen
, 3);
3175 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3176 "%4lpm %B0,%a2", xop
, plen
, 2);
3178 if (!reg_unused_after (insn
, addr
))
3179 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3186 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3187 "%4lpm %B0,%a2+" CR_TAB
3188 "%4lpm %C0,%a2", xop
, plen
, 3);
3190 if (!reg_unused_after (insn
, addr
))
3191 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3197 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3198 "%4lpm %B0,%a2+", xop
, plen
, 2);
3200 if (REGNO (dest
) == REG_Z
- 2)
3201 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3202 "%4lpm %C0,%a2" CR_TAB
3203 "mov %D0,%5", xop
, plen
, 3);
3206 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3207 "%4lpm %D0,%a2", xop
, plen
, 2);
3209 if (!reg_unused_after (insn
, addr
))
3210 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3220 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3223 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3224 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3225 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3226 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3228 break; /* POST_INC */
3230 } /* switch CODE (addr) */
3232 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3234 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3236 xop
[0] = zero_reg_rtx
;
3237 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3244 /* Worker function for xload_8 insn. */
3247 avr_out_xload (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3253 xop
[2] = lpm_addr_reg_rtx
;
3254 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3256 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3258 avr_asm_len ("sbrc %1,7" CR_TAB
3259 "ld %3,%a2", xop
, plen
, 2);
3261 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3262 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3269 output_movqi (rtx_insn
*insn
, rtx operands
[], int *plen
)
3271 rtx dest
= operands
[0];
3272 rtx src
= operands
[1];
3274 if (avr_mem_flash_p (src
)
3275 || avr_mem_flash_p (dest
))
3277 return avr_out_lpm (insn
, operands
, plen
);
3280 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3284 if (REG_P (src
)) /* mov r,r */
3286 if (test_hard_reg_class (STACK_REG
, dest
))
3287 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3288 else if (test_hard_reg_class (STACK_REG
, src
))
3289 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3291 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3293 else if (CONSTANT_P (src
))
3295 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3298 else if (MEM_P (src
))
3299 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3301 else if (MEM_P (dest
))
3306 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3308 return out_movqi_mr_r (insn
, xop
, plen
);
3316 output_movhi (rtx_insn
*insn
, rtx xop
[], int *plen
)
3321 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3323 if (avr_mem_flash_p (src
)
3324 || avr_mem_flash_p (dest
))
3326 return avr_out_lpm (insn
, xop
, plen
);
3329 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3333 if (REG_P (src
)) /* mov r,r */
3335 if (test_hard_reg_class (STACK_REG
, dest
))
3337 if (AVR_HAVE_8BIT_SP
)
3338 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3341 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3342 "out __SP_H__,%B1", xop
, plen
, -2);
3344 /* Use simple load of SP if no interrupts are used. */
3346 return TARGET_NO_INTERRUPTS
3347 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3348 "out __SP_L__,%A1", xop
, plen
, -2)
3349 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3351 "out __SP_H__,%B1" CR_TAB
3352 "out __SREG__,__tmp_reg__" CR_TAB
3353 "out __SP_L__,%A1", xop
, plen
, -5);
3355 else if (test_hard_reg_class (STACK_REG
, src
))
3357 return !AVR_HAVE_SPH
3358 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3359 "clr %B0", xop
, plen
, -2)
3361 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3362 "in %B0,__SP_H__", xop
, plen
, -2);
3365 return AVR_HAVE_MOVW
3366 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3368 : avr_asm_len ("mov %A0,%A1" CR_TAB
3369 "mov %B0,%B1", xop
, plen
, -2);
3371 else if (CONSTANT_P (src
))
3373 return output_reload_inhi (xop
, NULL
, plen
);
3375 else if (MEM_P (src
))
3377 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3380 else if (MEM_P (dest
))
3385 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3387 return out_movhi_mr_r (insn
, xop
, plen
);
3390 fatal_insn ("invalid insn:", insn
);
3396 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3399 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3403 rtx x
= XEXP (src
, 0);
3405 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3406 "ld %0,%b1" , op
, plen
, -3);
3408 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3409 && !reg_unused_after (insn
, XEXP (x
,0)))
3410 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
), op
, plen
, 2);
3416 out_movqi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3420 rtx x
= XEXP (src
, 0);
3422 if (CONSTANT_ADDRESS_P (x
))
3424 int n_words
= AVR_TINY
? 1 : 2;
3425 return optimize
> 0 && io_address_operand (x
, QImode
)
3426 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3427 : avr_asm_len ("lds %0,%m1", op
, plen
, -n_words
);
3430 if (GET_CODE (x
) == PLUS
3431 && REG_P (XEXP (x
, 0))
3432 && CONST_INT_P (XEXP (x
, 1)))
3434 /* memory access by reg+disp */
3436 int disp
= INTVAL (XEXP (x
, 1));
3439 return avr_out_movqi_r_mr_reg_disp_tiny (insn
, op
, plen
);
3441 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3443 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3444 fatal_insn ("incorrect insn:",insn
);
3446 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3447 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3448 "ldd %0,Y+63" CR_TAB
3449 "sbiw r28,%o1-63", op
, plen
, -3);
3451 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3452 "sbci r29,hi8(-%o1)" CR_TAB
3454 "subi r28,lo8(%o1)" CR_TAB
3455 "sbci r29,hi8(%o1)", op
, plen
, -5);
3457 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3459 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3460 it but I have this situation with extremal optimizing options. */
3462 avr_asm_len ("adiw r26,%o1" CR_TAB
3463 "ld %0,X", op
, plen
, -2);
3465 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3466 && !reg_unused_after (insn
, XEXP (x
,0)))
3468 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3474 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3477 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3481 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3484 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op
[], int *plen
)
3488 rtx base
= XEXP (src
, 0);
3490 int reg_dest
= true_regnum (dest
);
3491 int reg_base
= true_regnum (base
);
3493 if (reg_dest
== reg_base
) /* R = (R) */
3494 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3496 "mov %A0,__tmp_reg__", op
, plen
, -3);
3498 return avr_asm_len ("ld %A0,%1" CR_TAB
3499 TINY_ADIW (%E1
, %F1
, 1) CR_TAB
3501 TINY_SBIW (%E1
, %F1
, 1), op
, plen
, -6);
3505 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3508 avr_out_movhi_r_mr_reg_disp_tiny (rtx op
[], int *plen
)
3512 rtx base
= XEXP (src
, 0);
3514 int reg_dest
= true_regnum (dest
);
3515 int reg_base
= true_regnum (XEXP (base
, 0));
3517 if (reg_base
== reg_dest
)
3519 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3520 "ld __tmp_reg__,%b1+" CR_TAB
3522 "mov %A0,__tmp_reg__", op
, plen
, -5);
3526 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3527 "ld %A0,%b1+" CR_TAB
3529 TINY_SBIW (%I1
, %J1
, %o1
+1), op
, plen
, -6);
3534 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3537 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
3539 int mem_volatile_p
= 0;
3542 rtx base
= XEXP (src
, 0);
3544 /* "volatile" forces reading low byte first, even if less efficient,
3545 for correct operation with 16-bit I/O registers. */
3546 mem_volatile_p
= MEM_VOLATILE_P (src
);
3548 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3549 fatal_insn ("incorrect insn:", insn
);
3551 if (!mem_volatile_p
)
3552 return avr_asm_len ("ld %B0,%1" CR_TAB
3553 "ld %A0,%1", op
, plen
, -2);
3555 return avr_asm_len (TINY_SBIW (%I1
, %J1
, 2) CR_TAB
3556 "ld %A0,%p1+" CR_TAB
3558 TINY_SBIW (%I1
, %J1
, 1), op
, plen
, -6);
3563 out_movhi_r_mr (rtx_insn
*insn
, rtx op
[], int *plen
)
3567 rtx base
= XEXP (src
, 0);
3568 int reg_dest
= true_regnum (dest
);
3569 int reg_base
= true_regnum (base
);
3570 /* "volatile" forces reading low byte first, even if less efficient,
3571 for correct operation with 16-bit I/O registers. */
3572 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3577 return avr_out_movhi_r_mr_reg_no_disp_tiny (op
, plen
);
3579 if (reg_dest
== reg_base
) /* R = (R) */
3580 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3582 "mov %A0,__tmp_reg__", op
, plen
, -3);
3584 if (reg_base
!= REG_X
)
3585 return avr_asm_len ("ld %A0,%1" CR_TAB
3586 "ldd %B0,%1+1", op
, plen
, -2);
3588 avr_asm_len ("ld %A0,X+" CR_TAB
3589 "ld %B0,X", op
, plen
, -2);
3591 if (!reg_unused_after (insn
, base
))
3592 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3596 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3598 int disp
= INTVAL (XEXP (base
, 1));
3599 int reg_base
= true_regnum (XEXP (base
, 0));
3602 return avr_out_movhi_r_mr_reg_disp_tiny (op
, plen
);
3604 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3606 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3607 fatal_insn ("incorrect insn:",insn
);
3609 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3610 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3611 "ldd %A0,Y+62" CR_TAB
3612 "ldd %B0,Y+63" CR_TAB
3613 "sbiw r28,%o1-62", op
, plen
, -4)
3615 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3616 "sbci r29,hi8(-%o1)" CR_TAB
3618 "ldd %B0,Y+1" CR_TAB
3619 "subi r28,lo8(%o1)" CR_TAB
3620 "sbci r29,hi8(%o1)", op
, plen
, -6);
3623 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3624 it but I have this situation with extremal
3625 optimization options. */
3627 if (reg_base
== REG_X
)
3628 return reg_base
== reg_dest
3629 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3630 "ld __tmp_reg__,X+" CR_TAB
3632 "mov %A0,__tmp_reg__", op
, plen
, -4)
3634 : avr_asm_len ("adiw r26,%o1" CR_TAB
3637 "sbiw r26,%o1+1", op
, plen
, -4);
3639 return reg_base
== reg_dest
3640 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3641 "ldd %B0,%B1" CR_TAB
3642 "mov %A0,__tmp_reg__", op
, plen
, -3)
3644 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3645 "ldd %B0,%B1", op
, plen
, -2);
3647 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3650 return avr_out_movhi_r_mr_pre_dec_tiny (insn
, op
, plen
);
3652 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3653 fatal_insn ("incorrect insn:", insn
);
3655 if (!mem_volatile_p
)
3656 return avr_asm_len ("ld %B0,%1" CR_TAB
3657 "ld %A0,%1", op
, plen
, -2);
3659 return REGNO (XEXP (base
, 0)) == REG_X
3660 ? avr_asm_len ("sbiw r26,2" CR_TAB
3663 "sbiw r26,1", op
, plen
, -4)
3665 : avr_asm_len ("sbiw %r1,2" CR_TAB
3667 "ldd %B0,%p1+1", op
, plen
, -3);
3669 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3671 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3672 fatal_insn ("incorrect insn:", insn
);
3674 return avr_asm_len ("ld %A0,%1" CR_TAB
3675 "ld %B0,%1", op
, plen
, -2);
3677 else if (CONSTANT_ADDRESS_P (base
))
3679 int n_words
= AVR_TINY
? 2 : 4;
3680 return optimize
> 0 && io_address_operand (base
, HImode
)
3681 ? avr_asm_len ("in %A0,%i1" CR_TAB
3682 "in %B0,%i1+1", op
, plen
, -2)
3684 : avr_asm_len ("lds %A0,%m1" CR_TAB
3685 "lds %B0,%m1+1", op
, plen
, -n_words
);
3688 fatal_insn ("unknown move insn:",insn
);
3693 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
3697 rtx base
= XEXP (src
, 0);
3698 int reg_dest
= true_regnum (dest
);
3699 int reg_base
= true_regnum (base
);
3701 if (reg_dest
== reg_base
)
3703 /* "ld r26,-X" is undefined */
3704 return *l
= 9, (TINY_ADIW (%E1
, %F1
, 3) CR_TAB
3707 "ld __tmp_reg__,-%1" CR_TAB
3708 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
3710 "mov %B0,__tmp_reg__");
3712 else if (reg_dest
== reg_base
- 2)
3714 return *l
= 5, ("ld %A0,%1+" CR_TAB
3716 "ld __tmp_reg__,%1+" CR_TAB
3718 "mov %C0,__tmp_reg__");
3720 else if (reg_unused_after (insn
, base
))
3722 return *l
= 4, ("ld %A0,%1+" CR_TAB
3729 return *l
= 6, ("ld %A0,%1+" CR_TAB
3733 TINY_SBIW (%E1
, %F1
, 3));
3739 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
3743 rtx base
= XEXP (src
, 0);
3744 int reg_dest
= true_regnum (dest
);
3745 int reg_base
= true_regnum (XEXP (base
, 0));
3747 if (reg_dest
== reg_base
)
3749 /* "ld r26,-X" is undefined */
3750 return *l
= 9, (TINY_ADIW (%I1
, %J1
, %o1
+3) CR_TAB
3752 "ld %C0,-%b1" CR_TAB
3753 "ld __tmp_reg__,-%b1" CR_TAB
3754 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
3756 "mov %B0,__tmp_reg__");
3758 else if (reg_dest
== reg_base
- 2)
3760 return *l
= 7, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3761 "ld %A0,%b1+" CR_TAB
3762 "ld %B0,%b1+" CR_TAB
3763 "ld __tmp_reg__,%b1+" CR_TAB
3765 "mov %C0,__tmp_reg__");
3767 else if (reg_unused_after (insn
, XEXP (base
, 0)))
3769 return *l
= 6, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3770 "ld %A0,%b1+" CR_TAB
3771 "ld %B0,%b1+" CR_TAB
3772 "ld %C0,%b1+" CR_TAB
3777 return *l
= 8, (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
3778 "ld %A0,%b1+" CR_TAB
3779 "ld %B0,%b1+" CR_TAB
3780 "ld %C0,%b1+" CR_TAB
3782 TINY_SBIW (%I1
, %J1
, %o1
+3));
3787 out_movsi_r_mr (rtx_insn
*insn
, rtx op
[], int *l
)
3791 rtx base
= XEXP (src
, 0);
3792 int reg_dest
= true_regnum (dest
);
3793 int reg_base
= true_regnum (base
);
3802 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn
, op
, l
);
3804 if (reg_base
== REG_X
) /* (R26) */
3806 if (reg_dest
== REG_X
)
3807 /* "ld r26,-X" is undefined */
3808 return *l
=7, ("adiw r26,3" CR_TAB
3811 "ld __tmp_reg__,-X" CR_TAB
3814 "mov r27,__tmp_reg__");
3815 else if (reg_dest
== REG_X
- 2)
3816 return *l
=5, ("ld %A0,X+" CR_TAB
3818 "ld __tmp_reg__,X+" CR_TAB
3820 "mov %C0,__tmp_reg__");
3821 else if (reg_unused_after (insn
, base
))
3822 return *l
=4, ("ld %A0,X+" CR_TAB
3827 return *l
=5, ("ld %A0,X+" CR_TAB
3835 if (reg_dest
== reg_base
)
3836 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3837 "ldd %C0,%1+2" CR_TAB
3838 "ldd __tmp_reg__,%1+1" CR_TAB
3840 "mov %B0,__tmp_reg__");
3841 else if (reg_base
== reg_dest
+ 2)
3842 return *l
=5, ("ld %A0,%1" CR_TAB
3843 "ldd %B0,%1+1" CR_TAB
3844 "ldd __tmp_reg__,%1+2" CR_TAB
3845 "ldd %D0,%1+3" CR_TAB
3846 "mov %C0,__tmp_reg__");
3848 return *l
=4, ("ld %A0,%1" CR_TAB
3849 "ldd %B0,%1+1" CR_TAB
3850 "ldd %C0,%1+2" CR_TAB
3854 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3856 int disp
= INTVAL (XEXP (base
, 1));
3859 return avr_out_movsi_r_mr_reg_disp_tiny (insn
, op
, l
);
3861 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3863 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3864 fatal_insn ("incorrect insn:",insn
);
3866 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3867 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3868 "ldd %A0,Y+60" CR_TAB
3869 "ldd %B0,Y+61" CR_TAB
3870 "ldd %C0,Y+62" CR_TAB
3871 "ldd %D0,Y+63" CR_TAB
3874 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3875 "sbci r29,hi8(-%o1)" CR_TAB
3877 "ldd %B0,Y+1" CR_TAB
3878 "ldd %C0,Y+2" CR_TAB
3879 "ldd %D0,Y+3" CR_TAB
3880 "subi r28,lo8(%o1)" CR_TAB
3881 "sbci r29,hi8(%o1)");
3884 reg_base
= true_regnum (XEXP (base
, 0));
3885 if (reg_base
== REG_X
)
3888 if (reg_dest
== REG_X
)
3891 /* "ld r26,-X" is undefined */
3892 return ("adiw r26,%o1+3" CR_TAB
3895 "ld __tmp_reg__,-X" CR_TAB
3898 "mov r27,__tmp_reg__");
3901 if (reg_dest
== REG_X
- 2)
3902 return ("adiw r26,%o1" CR_TAB
3905 "ld __tmp_reg__,X+" CR_TAB
3907 "mov r26,__tmp_reg__");
3909 return ("adiw r26,%o1" CR_TAB
3916 if (reg_dest
== reg_base
)
3917 return *l
=5, ("ldd %D0,%D1" CR_TAB
3918 "ldd %C0,%C1" CR_TAB
3919 "ldd __tmp_reg__,%B1" CR_TAB
3920 "ldd %A0,%A1" CR_TAB
3921 "mov %B0,__tmp_reg__");
3922 else if (reg_dest
== reg_base
- 2)
3923 return *l
=5, ("ldd %A0,%A1" CR_TAB
3924 "ldd %B0,%B1" CR_TAB
3925 "ldd __tmp_reg__,%C1" CR_TAB
3926 "ldd %D0,%D1" CR_TAB
3927 "mov %C0,__tmp_reg__");
3928 return *l
=4, ("ldd %A0,%A1" CR_TAB
3929 "ldd %B0,%B1" CR_TAB
3930 "ldd %C0,%C1" CR_TAB
3933 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3934 return *l
=4, ("ld %D0,%1" CR_TAB
3938 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3939 return *l
=4, ("ld %A0,%1" CR_TAB
3943 else if (CONSTANT_ADDRESS_P (base
))
3945 if (io_address_operand (base
, SImode
))
3948 return ("in %A0,%i1" CR_TAB
3949 "in %B0,%i1+1" CR_TAB
3950 "in %C0,%i1+2" CR_TAB
3955 *l
= AVR_TINY
? 4 : 8;
3956 return ("lds %A0,%m1" CR_TAB
3957 "lds %B0,%m1+1" CR_TAB
3958 "lds %C0,%m1+2" CR_TAB
3963 fatal_insn ("unknown move insn:",insn
);
3968 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *l
)
3972 rtx base
= XEXP (dest
, 0);
3973 int reg_base
= true_regnum (base
);
3974 int reg_src
= true_regnum (src
);
3976 if (reg_base
== reg_src
)
3978 /* "ld r26,-X" is undefined */
3979 if (reg_unused_after (insn
, base
))
3981 return *l
= 7, ("mov __tmp_reg__, %B1" CR_TAB
3983 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
3984 "st %0+,__tmp_reg__" CR_TAB
3990 return *l
= 9, ("mov __tmp_reg__, %B1" CR_TAB
3992 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
3993 "st %0+,__tmp_reg__" CR_TAB
3996 TINY_SBIW (%E0
, %F0
, 3));
3999 else if (reg_base
== reg_src
+ 2)
4001 if (reg_unused_after (insn
, base
))
4002 return *l
= 7, ("mov __zero_reg__,%C1" CR_TAB
4003 "mov __tmp_reg__,%D1" CR_TAB
4006 "st %0+,__zero_reg__" CR_TAB
4007 "st %0,__tmp_reg__" CR_TAB
4008 "clr __zero_reg__");
4010 return *l
= 9, ("mov __zero_reg__,%C1" CR_TAB
4011 "mov __tmp_reg__,%D1" CR_TAB
4014 "st %0+,__zero_reg__" CR_TAB
4015 "st %0,__tmp_reg__" CR_TAB
4016 "clr __zero_reg__" CR_TAB
4017 TINY_SBIW (%E0
, %F0
, 3));
4020 return *l
= 6, ("st %0+,%A1" CR_TAB
4024 TINY_SBIW (%E0
, %F0
, 3));
4028 avr_out_movsi_mr_r_reg_disp_tiny (rtx op
[], int *l
)
4032 rtx base
= XEXP (dest
, 0);
4033 int reg_base
= REGNO (XEXP (base
, 0));
4034 int reg_src
=true_regnum (src
);
4036 if (reg_base
== reg_src
)
4039 return ("mov __tmp_reg__,%A2" CR_TAB
4040 "mov __zero_reg__,%B2" CR_TAB
4041 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4042 "st %b0+,__tmp_reg__" CR_TAB
4043 "st %b0+,__zero_reg__" CR_TAB
4044 "st %b0+,%C2" CR_TAB
4046 "clr __zero_reg__" CR_TAB
4047 TINY_SBIW (%I0
, %J0
, %o0
+3));
4049 else if (reg_src
== reg_base
- 2)
4052 return ("mov __tmp_reg__,%C2" CR_TAB
4053 "mov __zero_reg__,%D2" CR_TAB
4054 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4055 "st %b0+,%A0" CR_TAB
4056 "st %b0+,%B0" CR_TAB
4057 "st %b0+,__tmp_reg__" CR_TAB
4058 "st %b0,__zero_reg__" CR_TAB
4059 "clr __zero_reg__" CR_TAB
4060 TINY_SBIW (%I0
, %J0
, %o0
+3));
4063 return (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4064 "st %b0+,%A1" CR_TAB
4065 "st %b0+,%B1" CR_TAB
4066 "st %b0+,%C1" CR_TAB
4068 TINY_SBIW (%I0
, %J0
, %o0
+3));
4072 out_movsi_mr_r (rtx_insn
*insn
, rtx op
[], int *l
)
4076 rtx base
= XEXP (dest
, 0);
4077 int reg_base
= true_regnum (base
);
4078 int reg_src
= true_regnum (src
);
4084 if (CONSTANT_ADDRESS_P (base
))
4086 if (io_address_operand (base
, SImode
))
4088 return *l
=4,("out %i0, %A1" CR_TAB
4089 "out %i0+1,%B1" CR_TAB
4090 "out %i0+2,%C1" CR_TAB
4095 *l
= AVR_TINY
? 4 : 8;
4096 return ("sts %m0,%A1" CR_TAB
4097 "sts %m0+1,%B1" CR_TAB
4098 "sts %m0+2,%C1" CR_TAB
4103 if (reg_base
> 0) /* (r) */
4106 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn
, op
, l
);
4108 if (reg_base
== REG_X
) /* (R26) */
4110 if (reg_src
== REG_X
)
4112 /* "st X+,r26" is undefined */
4113 if (reg_unused_after (insn
, base
))
4114 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
4117 "st X+,__tmp_reg__" CR_TAB
4121 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
4124 "st X+,__tmp_reg__" CR_TAB
4129 else if (reg_base
== reg_src
+ 2)
4131 if (reg_unused_after (insn
, base
))
4132 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
4133 "mov __tmp_reg__,%D1" CR_TAB
4136 "st %0+,__zero_reg__" CR_TAB
4137 "st %0,__tmp_reg__" CR_TAB
4138 "clr __zero_reg__");
4140 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
4141 "mov __tmp_reg__,%D1" CR_TAB
4144 "st %0+,__zero_reg__" CR_TAB
4145 "st %0,__tmp_reg__" CR_TAB
4146 "clr __zero_reg__" CR_TAB
4149 return *l
=5, ("st %0+,%A1" CR_TAB
4156 return *l
=4, ("st %0,%A1" CR_TAB
4157 "std %0+1,%B1" CR_TAB
4158 "std %0+2,%C1" CR_TAB
4161 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4163 int disp
= INTVAL (XEXP (base
, 1));
4166 return avr_out_movsi_mr_r_reg_disp_tiny (op
, l
);
4168 reg_base
= REGNO (XEXP (base
, 0));
4169 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4171 if (reg_base
!= REG_Y
)
4172 fatal_insn ("incorrect insn:",insn
);
4174 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4175 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
4176 "std Y+60,%A1" CR_TAB
4177 "std Y+61,%B1" CR_TAB
4178 "std Y+62,%C1" CR_TAB
4179 "std Y+63,%D1" CR_TAB
4182 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
4183 "sbci r29,hi8(-%o0)" CR_TAB
4185 "std Y+1,%B1" CR_TAB
4186 "std Y+2,%C1" CR_TAB
4187 "std Y+3,%D1" CR_TAB
4188 "subi r28,lo8(%o0)" CR_TAB
4189 "sbci r29,hi8(%o0)");
4191 if (reg_base
== REG_X
)
4194 if (reg_src
== REG_X
)
4197 return ("mov __tmp_reg__,r26" CR_TAB
4198 "mov __zero_reg__,r27" CR_TAB
4199 "adiw r26,%o0" CR_TAB
4200 "st X+,__tmp_reg__" CR_TAB
4201 "st X+,__zero_reg__" CR_TAB
4204 "clr __zero_reg__" CR_TAB
4207 else if (reg_src
== REG_X
- 2)
4210 return ("mov __tmp_reg__,r26" CR_TAB
4211 "mov __zero_reg__,r27" CR_TAB
4212 "adiw r26,%o0" CR_TAB
4215 "st X+,__tmp_reg__" CR_TAB
4216 "st X,__zero_reg__" CR_TAB
4217 "clr __zero_reg__" CR_TAB
4221 return ("adiw r26,%o0" CR_TAB
4228 return *l
=4, ("std %A0,%A1" CR_TAB
4229 "std %B0,%B1" CR_TAB
4230 "std %C0,%C1" CR_TAB
4233 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4234 return *l
=4, ("st %0,%D1" CR_TAB
4238 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4239 return *l
=4, ("st %0,%A1" CR_TAB
4243 fatal_insn ("unknown move insn:",insn
);
4248 output_movsisf (rtx_insn
*insn
, rtx operands
[], int *l
)
4251 rtx dest
= operands
[0];
4252 rtx src
= operands
[1];
4255 if (avr_mem_flash_p (src
)
4256 || avr_mem_flash_p (dest
))
4258 return avr_out_lpm (insn
, operands
, real_l
);
4264 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
4267 if (REG_P (src
)) /* mov r,r */
4269 if (true_regnum (dest
) > true_regnum (src
))
4274 return ("movw %C0,%C1" CR_TAB
4278 return ("mov %D0,%D1" CR_TAB
4279 "mov %C0,%C1" CR_TAB
4280 "mov %B0,%B1" CR_TAB
4288 return ("movw %A0,%A1" CR_TAB
4292 return ("mov %A0,%A1" CR_TAB
4293 "mov %B0,%B1" CR_TAB
4294 "mov %C0,%C1" CR_TAB
4298 else if (CONSTANT_P (src
))
4300 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
4302 else if (MEM_P (src
))
4303 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
4305 else if (MEM_P (dest
))
4309 if (src
== CONST0_RTX (GET_MODE (dest
)))
4310 operands
[1] = zero_reg_rtx
;
4312 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
4315 output_asm_insn (templ
, operands
);
4320 fatal_insn ("invalid insn:", insn
);
4325 /* Handle loads of 24-bit types from memory to register. */
4328 avr_out_load_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4332 rtx base
= XEXP (src
, 0);
4333 int reg_dest
= true_regnum (dest
);
4334 int reg_base
= true_regnum (base
);
4336 if (reg_base
== reg_dest
)
4338 return avr_asm_len (TINY_ADIW (%E1
, %F1
, 2) CR_TAB
4340 "ld __tmp_reg__,-%1" CR_TAB
4341 TINY_SBIW (%E1
, %F1
, 1) CR_TAB
4343 "mov %B0,__tmp_reg__", op
, plen
, -8);
4347 avr_asm_len ("ld %A0,%1+" CR_TAB
4349 "ld %C0,%1", op
, plen
, -3);
4351 if (reg_dest
!= reg_base
- 2 &&
4352 !reg_unused_after (insn
, base
))
4354 avr_asm_len (TINY_SBIW (%E1
, %F1
, 2), op
, plen
, 2);
4361 avr_out_load_psi_reg_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4365 rtx base
= XEXP (src
, 0);
4366 int reg_dest
= true_regnum (dest
);
4367 int reg_base
= true_regnum (base
);
4369 reg_base
= true_regnum (XEXP (base
, 0));
4370 if (reg_base
== reg_dest
)
4372 return avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
+2) CR_TAB
4374 "ld __tmp_reg__,-%b1" CR_TAB
4375 TINY_SBIW (%I1
, %J1
, 1) CR_TAB
4377 "mov %B0,__tmp_reg__", op
, plen
, -8);
4381 avr_asm_len (TINY_ADIW (%I1
, %J1
, %o1
) CR_TAB
4382 "ld %A0,%b1+" CR_TAB
4383 "ld %B0,%b1+" CR_TAB
4384 "ld %C0,%b1", op
, plen
, -5);
4386 if (reg_dest
!= (reg_base
- 2)
4387 && !reg_unused_after (insn
, XEXP (base
, 0)))
4388 avr_asm_len (TINY_SBIW (%I1
, %J1
, %o1
+2), op
, plen
, 2);
4395 avr_out_load_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4399 rtx base
= XEXP (src
, 0);
4400 int reg_dest
= true_regnum (dest
);
4401 int reg_base
= true_regnum (base
);
4406 return avr_out_load_psi_reg_no_disp_tiny (insn
, op
, plen
);
4408 if (reg_base
== REG_X
) /* (R26) */
4410 if (reg_dest
== REG_X
)
4411 /* "ld r26,-X" is undefined */
4412 return avr_asm_len ("adiw r26,2" CR_TAB
4414 "ld __tmp_reg__,-X" CR_TAB
4417 "mov r27,__tmp_reg__", op
, plen
, -6);
4420 avr_asm_len ("ld %A0,X+" CR_TAB
4422 "ld %C0,X", op
, plen
, -3);
4424 if (reg_dest
!= REG_X
- 2
4425 && !reg_unused_after (insn
, base
))
4427 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4433 else /* reg_base != REG_X */
4435 if (reg_dest
== reg_base
)
4436 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4437 "ldd __tmp_reg__,%1+1" CR_TAB
4439 "mov %B0,__tmp_reg__", op
, plen
, -4);
4441 return avr_asm_len ("ld %A0,%1" CR_TAB
4442 "ldd %B0,%1+1" CR_TAB
4443 "ldd %C0,%1+2", op
, plen
, -3);
4446 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4448 int disp
= INTVAL (XEXP (base
, 1));
4451 return avr_out_load_psi_reg_disp_tiny (insn
, op
, plen
);
4453 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
4455 if (REGNO (XEXP (base
, 0)) != REG_Y
)
4456 fatal_insn ("incorrect insn:",insn
);
4458 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
4459 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4460 "ldd %A0,Y+61" CR_TAB
4461 "ldd %B0,Y+62" CR_TAB
4462 "ldd %C0,Y+63" CR_TAB
4463 "sbiw r28,%o1-61", op
, plen
, -5);
4465 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4466 "sbci r29,hi8(-%o1)" CR_TAB
4468 "ldd %B0,Y+1" CR_TAB
4469 "ldd %C0,Y+2" CR_TAB
4470 "subi r28,lo8(%o1)" CR_TAB
4471 "sbci r29,hi8(%o1)", op
, plen
, -7);
4474 reg_base
= true_regnum (XEXP (base
, 0));
4475 if (reg_base
== REG_X
)
4478 if (reg_dest
== REG_X
)
4480 /* "ld r26,-X" is undefined */
4481 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4483 "ld __tmp_reg__,-X" CR_TAB
4486 "mov r27,__tmp_reg__", op
, plen
, -6);
4489 avr_asm_len ("adiw r26,%o1" CR_TAB
4492 "ld %C0,X", op
, plen
, -4);
4494 if (reg_dest
!= REG_W
4495 && !reg_unused_after (insn
, XEXP (base
, 0)))
4496 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
4501 if (reg_dest
== reg_base
)
4502 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4503 "ldd __tmp_reg__,%B1" CR_TAB
4504 "ldd %A0,%A1" CR_TAB
4505 "mov %B0,__tmp_reg__", op
, plen
, -4);
4507 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4508 "ldd %B0,%B1" CR_TAB
4509 "ldd %C0,%C1", op
, plen
, -3);
4511 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4512 return avr_asm_len ("ld %C0,%1" CR_TAB
4514 "ld %A0,%1", op
, plen
, -3);
4515 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4516 return avr_asm_len ("ld %A0,%1" CR_TAB
4518 "ld %C0,%1", op
, plen
, -3);
4520 else if (CONSTANT_ADDRESS_P (base
))
4522 int n_words
= AVR_TINY
? 3 : 6;
4523 return avr_asm_len ("lds %A0,%m1" CR_TAB
4524 "lds %B0,%m1+1" CR_TAB
4525 "lds %C0,%m1+2", op
, plen
, -n_words
);
4528 fatal_insn ("unknown move insn:",insn
);
4534 avr_out_store_psi_reg_no_disp_tiny (rtx_insn
*insn
, rtx
*op
, int *plen
)
4538 rtx base
= XEXP (dest
, 0);
4539 int reg_base
= true_regnum (base
);
4540 int reg_src
= true_regnum (src
);
4542 if (reg_base
== reg_src
)
4544 avr_asm_len ("st %0,%A1" CR_TAB
4545 "mov __tmp_reg__,%B1" CR_TAB
4546 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
/* st X+, r27 is undefined */
4547 "st %0+,__tmp_reg__" CR_TAB
4548 "st %0,%C1", op
, plen
, -6);
4551 else if (reg_src
== reg_base
- 2)
4553 avr_asm_len ("st %0,%A1" CR_TAB
4554 "mov __tmp_reg__,%C1" CR_TAB
4555 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4557 "st %0,__tmp_reg__", op
, plen
, 6);
4561 avr_asm_len ("st %0+,%A1" CR_TAB
4563 "st %0,%C1", op
, plen
, -3);
4566 if (!reg_unused_after (insn
, base
))
4567 avr_asm_len (TINY_SBIW (%E0
, %F0
, 2), op
, plen
, 2);
4573 avr_out_store_psi_reg_disp_tiny (rtx
*op
, int *plen
)
4577 rtx base
= XEXP (dest
, 0);
4578 int reg_base
= REGNO (XEXP (base
, 0));
4579 int reg_src
= true_regnum (src
);
4581 if (reg_src
== reg_base
)
4583 return avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
4584 "mov __zero_reg__,%B1" CR_TAB
4585 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4586 "st %b0+,__tmp_reg__" CR_TAB
4587 "st %b0+,__zero_reg__" CR_TAB
4589 "clr __zero_reg__" CR_TAB
4590 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -10);
4592 else if (reg_src
== reg_base
- 2)
4594 return avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
4595 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4596 "st %b0+,%A1" CR_TAB
4597 "st %b0+,%B1" CR_TAB
4598 "st %b0,__tmp_reg__" CR_TAB
4599 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -8);
4602 return avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4603 "st %b0+,%A1" CR_TAB
4604 "st %b0+,%B1" CR_TAB
4606 TINY_SBIW (%I0
, %J0
, %o0
+2), op
, plen
, -7);
4609 /* Handle store of 24-bit type from register or zero to memory. */
4612 avr_out_store_psi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4616 rtx base
= XEXP (dest
, 0);
4617 int reg_base
= true_regnum (base
);
4619 if (CONSTANT_ADDRESS_P (base
))
4621 int n_words
= AVR_TINY
? 3 : 6;
4622 return avr_asm_len ("sts %m0,%A1" CR_TAB
4623 "sts %m0+1,%B1" CR_TAB
4624 "sts %m0+2,%C1", op
, plen
, -n_words
);
4627 if (reg_base
> 0) /* (r) */
4630 return avr_out_store_psi_reg_no_disp_tiny (insn
, op
, plen
);
4632 if (reg_base
== REG_X
) /* (R26) */
4634 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
4636 avr_asm_len ("st %0+,%A1" CR_TAB
4638 "st %0,%C1", op
, plen
, -3);
4640 if (!reg_unused_after (insn
, base
))
4641 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
4646 return avr_asm_len ("st %0,%A1" CR_TAB
4647 "std %0+1,%B1" CR_TAB
4648 "std %0+2,%C1", op
, plen
, -3);
4650 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
4652 int disp
= INTVAL (XEXP (base
, 1));
4655 return avr_out_store_psi_reg_disp_tiny (op
, plen
);
4657 reg_base
= REGNO (XEXP (base
, 0));
4659 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4661 if (reg_base
!= REG_Y
)
4662 fatal_insn ("incorrect insn:",insn
);
4664 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4665 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4666 "std Y+61,%A1" CR_TAB
4667 "std Y+62,%B1" CR_TAB
4668 "std Y+63,%C1" CR_TAB
4669 "sbiw r28,%o0-61", op
, plen
, -5);
4671 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4672 "sbci r29,hi8(-%o0)" CR_TAB
4674 "std Y+1,%B1" CR_TAB
4675 "std Y+2,%C1" CR_TAB
4676 "subi r28,lo8(%o0)" CR_TAB
4677 "sbci r29,hi8(%o0)", op
, plen
, -7);
4679 if (reg_base
== REG_X
)
4682 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4684 avr_asm_len ("adiw r26,%o0" CR_TAB
4687 "st X,%C1", op
, plen
, -4);
4689 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4690 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4695 return avr_asm_len ("std %A0,%A1" CR_TAB
4696 "std %B0,%B1" CR_TAB
4697 "std %C0,%C1", op
, plen
, -3);
4699 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4700 return avr_asm_len ("st %0,%C1" CR_TAB
4702 "st %0,%A1", op
, plen
, -3);
4703 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4704 return avr_asm_len ("st %0,%A1" CR_TAB
4706 "st %0,%C1", op
, plen
, -3);
4708 fatal_insn ("unknown move insn:",insn
);
4713 /* Move around 24-bit stuff. */
4716 avr_out_movpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
4721 if (avr_mem_flash_p (src
)
4722 || avr_mem_flash_p (dest
))
4724 return avr_out_lpm (insn
, op
, plen
);
4727 if (register_operand (dest
, VOIDmode
))
4729 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4731 if (true_regnum (dest
) > true_regnum (src
))
4733 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4736 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4738 return avr_asm_len ("mov %B0,%B1" CR_TAB
4739 "mov %A0,%A1", op
, plen
, 2);
4744 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4746 avr_asm_len ("mov %A0,%A1" CR_TAB
4747 "mov %B0,%B1", op
, plen
, -2);
4749 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4752 else if (CONSTANT_P (src
))
4754 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4756 else if (MEM_P (src
))
4757 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4759 else if (MEM_P (dest
))
4764 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4766 return avr_out_store_psi (insn
, xop
, plen
);
4769 fatal_insn ("invalid insn:", insn
);
4774 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4778 rtx x
= XEXP (dest
, 0);
4780 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4782 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4783 TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4784 "st %b0,__tmp_reg__", op
, plen
, -4);
4788 avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
) CR_TAB
4789 "st %b0,%1" , op
, plen
, -3);
4792 if (!reg_unused_after (insn
, XEXP (x
,0)))
4793 avr_asm_len (TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, 2);
4799 out_movqi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
4803 rtx x
= XEXP (dest
, 0);
4805 if (CONSTANT_ADDRESS_P (x
))
4807 int n_words
= AVR_TINY
? 1 : 2;
4808 return optimize
> 0 && io_address_operand (x
, QImode
)
4809 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4810 : avr_asm_len ("sts %m0,%1", op
, plen
, -n_words
);
4812 else if (GET_CODE (x
) == PLUS
4813 && REG_P (XEXP (x
, 0))
4814 && CONST_INT_P (XEXP (x
, 1)))
4816 /* memory access by reg+disp */
4818 int disp
= INTVAL (XEXP (x
, 1));
4821 return avr_out_movqi_mr_r_reg_disp_tiny (insn
, op
, plen
);
4823 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4825 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4826 fatal_insn ("incorrect insn:",insn
);
4828 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4829 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4830 "std Y+63,%1" CR_TAB
4831 "sbiw r28,%o0-63", op
, plen
, -3);
4833 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4834 "sbci r29,hi8(-%o0)" CR_TAB
4836 "subi r28,lo8(%o0)" CR_TAB
4837 "sbci r29,hi8(%o0)", op
, plen
, -5);
4839 else if (REGNO (XEXP (x
,0)) == REG_X
)
4841 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4843 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4844 "adiw r26,%o0" CR_TAB
4845 "st X,__tmp_reg__", op
, plen
, -3);
4849 avr_asm_len ("adiw r26,%o0" CR_TAB
4850 "st X,%1", op
, plen
, -2);
4853 if (!reg_unused_after (insn
, XEXP (x
,0)))
4854 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4859 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4862 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4866 /* Helper for the next function for XMEGA. It does the same
4867 but with low byte first. */
4870 avr_out_movhi_mr_r_xmega (rtx_insn
*insn
, rtx op
[], int *plen
)
4874 rtx base
= XEXP (dest
, 0);
4875 int reg_base
= true_regnum (base
);
4876 int reg_src
= true_regnum (src
);
4878 /* "volatile" forces writing low byte first, even if less efficient,
4879 for correct operation with 16-bit I/O registers like SP. */
4880 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4882 if (CONSTANT_ADDRESS_P (base
))
4884 int n_words
= AVR_TINY
? 2 : 4;
4885 return optimize
> 0 && io_address_operand (base
, HImode
)
4886 ? avr_asm_len ("out %i0,%A1" CR_TAB
4887 "out %i0+1,%B1", op
, plen
, -2)
4889 : avr_asm_len ("sts %m0,%A1" CR_TAB
4890 "sts %m0+1,%B1", op
, plen
, -n_words
);
4895 if (reg_base
!= REG_X
)
4896 return avr_asm_len ("st %0,%A1" CR_TAB
4897 "std %0+1,%B1", op
, plen
, -2);
4899 if (reg_src
== REG_X
)
4900 /* "st X+,r26" and "st -X,r26" are undefined. */
4901 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4904 "st X,__tmp_reg__", op
, plen
, -4);
4906 avr_asm_len ("st X+,%A1" CR_TAB
4907 "st X,%B1", op
, plen
, -2);
4909 return reg_unused_after (insn
, base
)
4911 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4913 else if (GET_CODE (base
) == PLUS
)
4915 int disp
= INTVAL (XEXP (base
, 1));
4916 reg_base
= REGNO (XEXP (base
, 0));
4917 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4919 if (reg_base
!= REG_Y
)
4920 fatal_insn ("incorrect insn:",insn
);
4922 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4923 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4924 "std Y+62,%A1" CR_TAB
4925 "std Y+63,%B1" CR_TAB
4926 "sbiw r28,%o0-62", op
, plen
, -4)
4928 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4929 "sbci r29,hi8(-%o0)" CR_TAB
4931 "std Y+1,%B1" CR_TAB
4932 "subi r28,lo8(%o0)" CR_TAB
4933 "sbci r29,hi8(%o0)", op
, plen
, -6);
4936 if (reg_base
!= REG_X
)
4937 return avr_asm_len ("std %A0,%A1" CR_TAB
4938 "std %B0,%B1", op
, plen
, -2);
4940 return reg_src
== REG_X
4941 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4942 "mov __zero_reg__,r27" CR_TAB
4943 "adiw r26,%o0" CR_TAB
4944 "st X+,__tmp_reg__" CR_TAB
4945 "st X,__zero_reg__" CR_TAB
4946 "clr __zero_reg__" CR_TAB
4947 "sbiw r26,%o0+1", op
, plen
, -7)
4949 : avr_asm_len ("adiw r26,%o0" CR_TAB
4952 "sbiw r26,%o0+1", op
, plen
, -4);
4954 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4956 if (!mem_volatile_p
)
4957 return avr_asm_len ("st %0,%B1" CR_TAB
4958 "st %0,%A1", op
, plen
, -2);
4960 return REGNO (XEXP (base
, 0)) == REG_X
4961 ? avr_asm_len ("sbiw r26,2" CR_TAB
4964 "sbiw r26,1", op
, plen
, -4)
4966 : avr_asm_len ("sbiw %r0,2" CR_TAB
4968 "std %p0+1,%B1", op
, plen
, -3);
4970 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4972 return avr_asm_len ("st %0,%A1" CR_TAB
4973 "st %0,%B1", op
, plen
, -2);
4976 fatal_insn ("unknown move insn:",insn
);
4981 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn
*insn
, rtx op
[], int *plen
)
4985 rtx base
= XEXP (dest
, 0);
4986 int reg_base
= true_regnum (base
);
4987 int reg_src
= true_regnum (src
);
4988 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4990 if (reg_base
== reg_src
)
4992 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4993 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
4995 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4996 "st %0,__tmp_reg__", op
, plen
, -5)
4997 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
4998 TINY_ADIW (%E0
, %F0
, 1) CR_TAB
4999 "st %0,__tmp_reg__" CR_TAB
5000 TINY_SBIW (%E0
, %F0
, 1) CR_TAB
5001 "st %0, %A1", op
, plen
, -7);
5004 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5005 ? avr_asm_len ("st %0+,%A1" CR_TAB
5006 "st %0,%B1", op
, plen
, -2)
5007 : avr_asm_len (TINY_ADIW (%E0
, %F0
, 1) CR_TAB
5009 "st -%0,%A1", op
, plen
, -4);
5013 avr_out_movhi_mr_r_reg_disp_tiny (rtx op
[], int *plen
)
5017 rtx base
= XEXP (dest
, 0);
5018 int reg_base
= REGNO (XEXP (base
, 0));
5019 int reg_src
= true_regnum (src
);
5021 return reg_src
== reg_base
5022 ? avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5023 "mov __zero_reg__,%B1" CR_TAB
5024 TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5025 "st %b0,__zero_reg__" CR_TAB
5026 "st -%b0,__tmp_reg__" CR_TAB
5027 "clr __zero_reg__" CR_TAB
5028 TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, -9)
5030 : avr_asm_len (TINY_ADIW (%I0
, %J0
, %o0
+1) CR_TAB
5032 "st -%b0,%A1" CR_TAB
5033 TINY_SBIW (%I0
, %J0
, %o0
), op
, plen
, -6);
5037 avr_out_movhi_mr_r_post_inc_tiny (rtx op
[], int *plen
)
5039 return avr_asm_len (TINY_ADIW (%I0
, %J0
, 1) CR_TAB
5041 "st -%p0,%A1" CR_TAB
5042 TINY_ADIW (%I0
, %J0
, 2), op
, plen
, -6);
5046 out_movhi_mr_r (rtx_insn
*insn
, rtx op
[], int *plen
)
5050 rtx base
= XEXP (dest
, 0);
5051 int reg_base
= true_regnum (base
);
5052 int reg_src
= true_regnum (src
);
5055 /* "volatile" forces writing high-byte first (no-xmega) resp.
5056 low-byte first (xmega) even if less efficient, for correct
5057 operation with 16-bit I/O registers like. */
5060 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
5062 mem_volatile_p
= MEM_VOLATILE_P (dest
);
5064 if (CONSTANT_ADDRESS_P (base
))
5066 int n_words
= AVR_TINY
? 2 : 4;
5067 return optimize
> 0 && io_address_operand (base
, HImode
)
5068 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5069 "out %i0,%A1", op
, plen
, -2)
5071 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5072 "sts %m0,%A1", op
, plen
, -n_words
);
5078 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn
, op
, plen
);
5080 if (reg_base
!= REG_X
)
5081 return avr_asm_len ("std %0+1,%B1" CR_TAB
5082 "st %0,%A1", op
, plen
, -2);
5084 if (reg_src
== REG_X
)
5085 /* "st X+,r26" and "st -X,r26" are undefined. */
5086 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
5087 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5090 "st X,__tmp_reg__", op
, plen
, -4)
5092 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5094 "st X,__tmp_reg__" CR_TAB
5096 "st X,r26", op
, plen
, -5);
5098 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
5099 ? avr_asm_len ("st X+,%A1" CR_TAB
5100 "st X,%B1", op
, plen
, -2)
5101 : avr_asm_len ("adiw r26,1" CR_TAB
5103 "st -X,%A1", op
, plen
, -3);
5105 else if (GET_CODE (base
) == PLUS
)
5107 int disp
= INTVAL (XEXP (base
, 1));
5110 return avr_out_movhi_mr_r_reg_disp_tiny (op
, plen
);
5112 reg_base
= REGNO (XEXP (base
, 0));
5113 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
5115 if (reg_base
!= REG_Y
)
5116 fatal_insn ("incorrect insn:",insn
);
5118 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
5119 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5120 "std Y+63,%B1" CR_TAB
5121 "std Y+62,%A1" CR_TAB
5122 "sbiw r28,%o0-62", op
, plen
, -4)
5124 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5125 "sbci r29,hi8(-%o0)" CR_TAB
5126 "std Y+1,%B1" CR_TAB
5128 "subi r28,lo8(%o0)" CR_TAB
5129 "sbci r29,hi8(%o0)", op
, plen
, -6);
5132 if (reg_base
!= REG_X
)
5133 return avr_asm_len ("std %B0,%B1" CR_TAB
5134 "std %A0,%A1", op
, plen
, -2);
5136 return reg_src
== REG_X
5137 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5138 "mov __zero_reg__,r27" CR_TAB
5139 "adiw r26,%o0+1" CR_TAB
5140 "st X,__zero_reg__" CR_TAB
5141 "st -X,__tmp_reg__" CR_TAB
5142 "clr __zero_reg__" CR_TAB
5143 "sbiw r26,%o0", op
, plen
, -7)
5145 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5148 "sbiw r26,%o0", op
, plen
, -4);
5150 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
5152 return avr_asm_len ("st %0,%B1" CR_TAB
5153 "st %0,%A1", op
, plen
, -2);
5155 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
5157 if (!mem_volatile_p
)
5158 return avr_asm_len ("st %0,%A1" CR_TAB
5159 "st %0,%B1", op
, plen
, -2);
5162 return avr_out_movhi_mr_r_post_inc_tiny (op
, plen
);
5164 return REGNO (XEXP (base
, 0)) == REG_X
5165 ? avr_asm_len ("adiw r26,1" CR_TAB
5168 "adiw r26,2", op
, plen
, -4)
5170 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5172 "adiw %r0,2", op
, plen
, -3);
5174 fatal_insn ("unknown move insn:",insn
);
5178 /* Return 1 if frame pointer for current function required. */
5181 avr_frame_pointer_required_p (void)
5183 return (cfun
->calls_alloca
5184 || cfun
->calls_setjmp
5185 || cfun
->has_nonlocal_label
5186 || crtl
->args
.info
.nregs
== 0
5187 || get_frame_size () > 0);
5190 /* Returns the condition of compare insn INSN, or UNKNOWN. */
5193 compare_condition (rtx_insn
*insn
)
5195 rtx_insn
*next
= next_real_insn (insn
);
5197 if (next
&& JUMP_P (next
))
5199 rtx pat
= PATTERN (next
);
5200 rtx src
= SET_SRC (pat
);
5202 if (IF_THEN_ELSE
== GET_CODE (src
))
5203 return GET_CODE (XEXP (src
, 0));
5210 /* Returns true iff INSN is a tst insn that only tests the sign. */
5213 compare_sign_p (rtx_insn
*insn
)
5215 RTX_CODE cond
= compare_condition (insn
);
5216 return (cond
== GE
|| cond
== LT
);
5220 /* Returns true iff the next insn is a JUMP_INSN with a condition
5221 that needs to be swapped (GT, GTU, LE, LEU). */
5224 compare_diff_p (rtx_insn
*insn
)
5226 RTX_CODE cond
= compare_condition (insn
);
5227 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
5230 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
5233 compare_eq_p (rtx_insn
*insn
)
5235 RTX_CODE cond
= compare_condition (insn
);
5236 return (cond
== EQ
|| cond
== NE
);
5240 /* Output compare instruction
5242 compare (XOP[0], XOP[1])
5244 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
5245 XOP[2] is an 8-bit scratch register as needed.
5247 PLEN == NULL: Output instructions.
5248 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5249 Don't output anything. */
5252 avr_out_compare (rtx_insn
*insn
, rtx
*xop
, int *plen
)
5254 /* Register to compare and value to compare against. */
5258 /* MODE of the comparison. */
5261 /* Number of bytes to operate on. */
5262 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
5264 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5265 int clobber_val
= -1;
5267 /* Map fixed mode operands to integer operands with the same binary
5268 representation. They are easier to handle in the remainder. */
5270 if (CONST_FIXED_P (xval
))
5272 xreg
= avr_to_int_mode (xop
[0]);
5273 xval
= avr_to_int_mode (xop
[1]);
5276 mode
= GET_MODE (xreg
);
5278 gcc_assert (REG_P (xreg
));
5279 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
5280 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
5285 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5286 against 0 by ORing the bytes. This is one instruction shorter.
5287 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5288 and therefore don't use this. */
5290 if (!test_hard_reg_class (LD_REGS
, xreg
)
5291 && compare_eq_p (insn
)
5292 && reg_unused_after (insn
, xreg
))
5294 if (xval
== const1_rtx
)
5296 avr_asm_len ("dec %A0" CR_TAB
5297 "or %A0,%B0", xop
, plen
, 2);
5300 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
5303 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
5307 else if (xval
== constm1_rtx
)
5310 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
5313 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
5315 return avr_asm_len ("and %A0,%B0" CR_TAB
5316 "com %A0", xop
, plen
, 2);
5320 for (i
= 0; i
< n_bytes
; i
++)
5322 /* We compare byte-wise. */
5323 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
5324 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
5326 /* 8-bit value to compare with this byte. */
5327 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
5329 /* Registers R16..R31 can operate with immediate. */
5330 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
5333 xop
[1] = gen_int_mode (val8
, QImode
);
5335 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5338 && test_hard_reg_class (ADDW_REGS
, reg8
))
5340 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
5342 if (IN_RANGE (val16
, 0, 63)
5344 || reg_unused_after (insn
, xreg
)))
5347 avr_asm_len (TINY_SBIW (%A0
, %B0
, %1), xop
, plen
, 2);
5349 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
5356 && IN_RANGE (val16
, -63, -1)
5357 && compare_eq_p (insn
)
5358 && reg_unused_after (insn
, xreg
))
5361 ? avr_asm_len (TINY_ADIW (%A0
, %B0
, %n1
), xop
, plen
, 2)
5362 : avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
5366 /* Comparing against 0 is easy. */
5371 ? "cp %0,__zero_reg__"
5372 : "cpc %0,__zero_reg__", xop
, plen
, 1);
5376 /* Upper registers can compare and subtract-with-carry immediates.
5377 Notice that compare instructions do the same as respective subtract
5378 instruction; the only difference is that comparisons don't write
5379 the result back to the target register. */
5385 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
5388 else if (reg_unused_after (insn
, xreg
))
5390 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
5395 /* Must load the value into the scratch register. */
5397 gcc_assert (REG_P (xop
[2]));
5399 if (clobber_val
!= (int) val8
)
5400 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
5401 clobber_val
= (int) val8
;
5405 : "cpc %0,%2", xop
, plen
, 1);
5412 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5415 avr_out_compare64 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5419 xop
[0] = gen_rtx_REG (DImode
, 18);
5423 return avr_out_compare (insn
, xop
, plen
);
5426 /* Output test instruction for HImode. */
5429 avr_out_tsthi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5431 if (compare_sign_p (insn
))
5433 avr_asm_len ("tst %B0", op
, plen
, -1);
5435 else if (reg_unused_after (insn
, op
[0])
5436 && compare_eq_p (insn
))
5438 /* Faster than sbiw if we can clobber the operand. */
5439 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
5443 avr_out_compare (insn
, op
, plen
);
5450 /* Output test instruction for PSImode. */
5453 avr_out_tstpsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5455 if (compare_sign_p (insn
))
5457 avr_asm_len ("tst %C0", op
, plen
, -1);
5459 else if (reg_unused_after (insn
, op
[0])
5460 && compare_eq_p (insn
))
5462 /* Faster than sbiw if we can clobber the operand. */
5463 avr_asm_len ("or %A0,%B0" CR_TAB
5464 "or %A0,%C0", op
, plen
, -2);
5468 avr_out_compare (insn
, op
, plen
);
5475 /* Output test instruction for SImode. */
5478 avr_out_tstsi (rtx_insn
*insn
, rtx
*op
, int *plen
)
5480 if (compare_sign_p (insn
))
5482 avr_asm_len ("tst %D0", op
, plen
, -1);
5484 else if (reg_unused_after (insn
, op
[0])
5485 && compare_eq_p (insn
))
5487 /* Faster than sbiw if we can clobber the operand. */
5488 avr_asm_len ("or %A0,%B0" CR_TAB
5490 "or %A0,%D0", op
, plen
, -3);
5494 avr_out_compare (insn
, op
, plen
);
5501 /* Generate asm equivalent for various shifts. This only handles cases
5502 that are not already carefully hand-optimized in ?sh??i3_out.
5504 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5505 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5506 OPERANDS[3] is a QImode scratch register from LD regs if
5507 available and SCRATCH, otherwise (no scratch available)
5509 TEMPL is an assembler template that shifts by one position.
5510 T_LEN is the length of this template. */
5513 out_shift_with_cnt (const char *templ
, rtx_insn
*insn
, rtx operands
[],
5514 int *plen
, int t_len
)
5516 bool second_label
= true;
5517 bool saved_in_tmp
= false;
5518 bool use_zero_reg
= false;
5521 op
[0] = operands
[0];
5522 op
[1] = operands
[1];
5523 op
[2] = operands
[2];
5524 op
[3] = operands
[3];
5529 if (CONST_INT_P (operands
[2]))
5531 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
5532 && REG_P (operands
[3]));
5533 int count
= INTVAL (operands
[2]);
5534 int max_len
= 10; /* If larger than this, always use a loop. */
5539 if (count
< 8 && !scratch
)
5540 use_zero_reg
= true;
5543 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
5545 if (t_len
* count
<= max_len
)
5547 /* Output shifts inline with no loop - faster. */
5550 avr_asm_len (templ
, op
, plen
, t_len
);
5557 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
5559 else if (use_zero_reg
)
5561 /* Hack to save one word: use __zero_reg__ as loop counter.
5562 Set one bit, then shift in a loop until it is 0 again. */
5564 op
[3] = zero_reg_rtx
;
5566 avr_asm_len ("set" CR_TAB
5567 "bld %3,%2-1", op
, plen
, 2);
5571 /* No scratch register available, use one from LD_REGS (saved in
5572 __tmp_reg__) that doesn't overlap with registers to shift. */
5574 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
5575 op
[4] = tmp_reg_rtx
;
5576 saved_in_tmp
= true;
5578 avr_asm_len ("mov %4,%3" CR_TAB
5579 "ldi %3,%2", op
, plen
, 2);
5582 second_label
= false;
5584 else if (MEM_P (op
[2]))
5588 op_mov
[0] = op
[3] = tmp_reg_rtx
;
5591 out_movqi_r_mr (insn
, op_mov
, plen
);
5593 else if (register_operand (op
[2], QImode
))
5597 if (!reg_unused_after (insn
, op
[2])
5598 || reg_overlap_mentioned_p (op
[0], op
[2]))
5600 op
[3] = tmp_reg_rtx
;
5601 avr_asm_len ("mov %3,%2", op
, plen
, 1);
5605 fatal_insn ("bad shift insn:", insn
);
5608 avr_asm_len ("rjmp 2f", op
, plen
, 1);
5610 avr_asm_len ("1:", op
, plen
, 0);
5611 avr_asm_len (templ
, op
, plen
, t_len
);
5614 avr_asm_len ("2:", op
, plen
, 0);
5616 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
5617 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
5620 avr_asm_len ("mov %3,%4", op
, plen
, 1);
5624 /* 8bit shift left ((char)x << i) */
5627 ashlqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5629 if (GET_CODE (operands
[2]) == CONST_INT
)
5636 switch (INTVAL (operands
[2]))
5639 if (INTVAL (operands
[2]) < 8)
5651 return ("lsl %0" CR_TAB
5656 return ("lsl %0" CR_TAB
5661 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5664 return ("swap %0" CR_TAB
5668 return ("lsl %0" CR_TAB
5674 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5677 return ("swap %0" CR_TAB
5682 return ("lsl %0" CR_TAB
5689 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5692 return ("swap %0" CR_TAB
5698 return ("lsl %0" CR_TAB
5707 return ("ror %0" CR_TAB
5712 else if (CONSTANT_P (operands
[2]))
5713 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5715 out_shift_with_cnt ("lsl %0",
5716 insn
, operands
, len
, 1);
5721 /* 16bit shift left ((short)x << i) */
5724 ashlhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
5726 if (GET_CODE (operands
[2]) == CONST_INT
)
5728 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5729 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5736 switch (INTVAL (operands
[2]))
5739 if (INTVAL (operands
[2]) < 16)
5743 return ("clr %B0" CR_TAB
5747 if (optimize_size
&& scratch
)
5752 return ("swap %A0" CR_TAB
5754 "andi %B0,0xf0" CR_TAB
5755 "eor %B0,%A0" CR_TAB
5756 "andi %A0,0xf0" CR_TAB
5762 return ("swap %A0" CR_TAB
5764 "ldi %3,0xf0" CR_TAB
5766 "eor %B0,%A0" CR_TAB
5770 break; /* optimize_size ? 6 : 8 */
5774 break; /* scratch ? 5 : 6 */
5778 return ("lsl %A0" CR_TAB
5782 "andi %B0,0xf0" CR_TAB
5783 "eor %B0,%A0" CR_TAB
5784 "andi %A0,0xf0" CR_TAB
5790 return ("lsl %A0" CR_TAB
5794 "ldi %3,0xf0" CR_TAB
5796 "eor %B0,%A0" CR_TAB
5804 break; /* scratch ? 5 : 6 */
5806 return ("clr __tmp_reg__" CR_TAB
5809 "ror __tmp_reg__" CR_TAB
5812 "ror __tmp_reg__" CR_TAB
5813 "mov %B0,%A0" CR_TAB
5814 "mov %A0,__tmp_reg__");
5818 return ("lsr %B0" CR_TAB
5819 "mov %B0,%A0" CR_TAB
5825 return *len
= 2, ("mov %B0,%A1" CR_TAB
5830 return ("mov %B0,%A0" CR_TAB
5836 return ("mov %B0,%A0" CR_TAB
5843 return ("mov %B0,%A0" CR_TAB
5853 return ("mov %B0,%A0" CR_TAB
5861 return ("mov %B0,%A0" CR_TAB
5864 "ldi %3,0xf0" CR_TAB
5868 return ("mov %B0,%A0" CR_TAB
5879 return ("mov %B0,%A0" CR_TAB
5885 if (AVR_HAVE_MUL
&& scratch
)
5888 return ("ldi %3,0x20" CR_TAB
5892 "clr __zero_reg__");
5894 if (optimize_size
&& scratch
)
5899 return ("mov %B0,%A0" CR_TAB
5903 "ldi %3,0xe0" CR_TAB
5909 return ("set" CR_TAB
5914 "clr __zero_reg__");
5917 return ("mov %B0,%A0" CR_TAB
5926 if (AVR_HAVE_MUL
&& ldi_ok
)
5929 return ("ldi %B0,0x40" CR_TAB
5930 "mul %A0,%B0" CR_TAB
5933 "clr __zero_reg__");
5935 if (AVR_HAVE_MUL
&& scratch
)
5938 return ("ldi %3,0x40" CR_TAB
5942 "clr __zero_reg__");
5944 if (optimize_size
&& ldi_ok
)
5947 return ("mov %B0,%A0" CR_TAB
5948 "ldi %A0,6" "\n1:\t"
5953 if (optimize_size
&& scratch
)
5956 return ("clr %B0" CR_TAB
5965 return ("clr %B0" CR_TAB
5972 out_shift_with_cnt ("lsl %A0" CR_TAB
5973 "rol %B0", insn
, operands
, len
, 2);
5978 /* 24-bit shift left */
5981 avr_out_ashlpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
5986 if (CONST_INT_P (op
[2]))
5988 switch (INTVAL (op
[2]))
5991 if (INTVAL (op
[2]) < 24)
5994 return avr_asm_len ("clr %A0" CR_TAB
5996 "clr %C0", op
, plen
, 3);
6000 int reg0
= REGNO (op
[0]);
6001 int reg1
= REGNO (op
[1]);
6004 return avr_asm_len ("mov %C0,%B1" CR_TAB
6005 "mov %B0,%A1" CR_TAB
6006 "clr %A0", op
, plen
, 3);
6008 return avr_asm_len ("clr %A0" CR_TAB
6009 "mov %B0,%A1" CR_TAB
6010 "mov %C0,%B1", op
, plen
, 3);
6015 int reg0
= REGNO (op
[0]);
6016 int reg1
= REGNO (op
[1]);
6018 if (reg0
+ 2 != reg1
)
6019 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
6021 return avr_asm_len ("clr %B0" CR_TAB
6022 "clr %A0", op
, plen
, 2);
6026 return avr_asm_len ("clr %C0" CR_TAB
6030 "clr %A0", op
, plen
, 5);
6034 out_shift_with_cnt ("lsl %A0" CR_TAB
6036 "rol %C0", insn
, op
, plen
, 3);
6041 /* 32bit shift left ((long)x << i) */
6044 ashlsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6046 if (GET_CODE (operands
[2]) == CONST_INT
)
6054 switch (INTVAL (operands
[2]))
6057 if (INTVAL (operands
[2]) < 32)
6061 return *len
= 3, ("clr %D0" CR_TAB
6065 return ("clr %D0" CR_TAB
6072 int reg0
= true_regnum (operands
[0]);
6073 int reg1
= true_regnum (operands
[1]);
6076 return ("mov %D0,%C1" CR_TAB
6077 "mov %C0,%B1" CR_TAB
6078 "mov %B0,%A1" CR_TAB
6081 return ("clr %A0" CR_TAB
6082 "mov %B0,%A1" CR_TAB
6083 "mov %C0,%B1" CR_TAB
6089 int reg0
= true_regnum (operands
[0]);
6090 int reg1
= true_regnum (operands
[1]);
6091 if (reg0
+ 2 == reg1
)
6092 return *len
= 2, ("clr %B0" CR_TAB
6095 return *len
= 3, ("movw %C0,%A1" CR_TAB
6099 return *len
= 4, ("mov %C0,%A1" CR_TAB
6100 "mov %D0,%B1" CR_TAB
6107 return ("mov %D0,%A1" CR_TAB
6114 return ("clr %D0" CR_TAB
6123 out_shift_with_cnt ("lsl %A0" CR_TAB
6126 "rol %D0", insn
, operands
, len
, 4);
6130 /* 8bit arithmetic shift right ((signed char)x >> i) */
6133 ashrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6135 if (GET_CODE (operands
[2]) == CONST_INT
)
6142 switch (INTVAL (operands
[2]))
6150 return ("asr %0" CR_TAB
6155 return ("asr %0" CR_TAB
6161 return ("asr %0" CR_TAB
6168 return ("asr %0" CR_TAB
6176 return ("bst %0,6" CR_TAB
6182 if (INTVAL (operands
[2]) < 8)
6189 return ("lsl %0" CR_TAB
6193 else if (CONSTANT_P (operands
[2]))
6194 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6196 out_shift_with_cnt ("asr %0",
6197 insn
, operands
, len
, 1);
6202 /* 16bit arithmetic shift right ((signed short)x >> i) */
6205 ashrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6207 if (GET_CODE (operands
[2]) == CONST_INT
)
6209 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6210 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6217 switch (INTVAL (operands
[2]))
6221 /* XXX try to optimize this too? */
6226 break; /* scratch ? 5 : 6 */
6228 return ("mov __tmp_reg__,%A0" CR_TAB
6229 "mov %A0,%B0" CR_TAB
6230 "lsl __tmp_reg__" CR_TAB
6232 "sbc %B0,%B0" CR_TAB
6233 "lsl __tmp_reg__" CR_TAB
6239 return ("lsl %A0" CR_TAB
6240 "mov %A0,%B0" CR_TAB
6246 int reg0
= true_regnum (operands
[0]);
6247 int reg1
= true_regnum (operands
[1]);
6250 return *len
= 3, ("mov %A0,%B0" CR_TAB
6254 return *len
= 4, ("mov %A0,%B1" CR_TAB
6262 return ("mov %A0,%B0" CR_TAB
6264 "sbc %B0,%B0" CR_TAB
6269 return ("mov %A0,%B0" CR_TAB
6271 "sbc %B0,%B0" CR_TAB
6276 if (AVR_HAVE_MUL
&& ldi_ok
)
6279 return ("ldi %A0,0x20" CR_TAB
6280 "muls %B0,%A0" CR_TAB
6282 "sbc %B0,%B0" CR_TAB
6283 "clr __zero_reg__");
6285 if (optimize_size
&& scratch
)
6288 return ("mov %A0,%B0" CR_TAB
6290 "sbc %B0,%B0" CR_TAB
6296 if (AVR_HAVE_MUL
&& ldi_ok
)
6299 return ("ldi %A0,0x10" CR_TAB
6300 "muls %B0,%A0" CR_TAB
6302 "sbc %B0,%B0" CR_TAB
6303 "clr __zero_reg__");
6305 if (optimize_size
&& scratch
)
6308 return ("mov %A0,%B0" CR_TAB
6310 "sbc %B0,%B0" CR_TAB
6317 if (AVR_HAVE_MUL
&& ldi_ok
)
6320 return ("ldi %A0,0x08" CR_TAB
6321 "muls %B0,%A0" CR_TAB
6323 "sbc %B0,%B0" CR_TAB
6324 "clr __zero_reg__");
6327 break; /* scratch ? 5 : 7 */
6329 return ("mov %A0,%B0" CR_TAB
6331 "sbc %B0,%B0" CR_TAB
6340 return ("lsl %B0" CR_TAB
6341 "sbc %A0,%A0" CR_TAB
6343 "mov %B0,%A0" CR_TAB
6347 if (INTVAL (operands
[2]) < 16)
6353 return *len
= 3, ("lsl %B0" CR_TAB
6354 "sbc %A0,%A0" CR_TAB
6359 out_shift_with_cnt ("asr %B0" CR_TAB
6360 "ror %A0", insn
, operands
, len
, 2);
6365 /* 24-bit arithmetic shift right */
6368 avr_out_ashrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6370 int dest
= REGNO (op
[0]);
6371 int src
= REGNO (op
[1]);
6373 if (CONST_INT_P (op
[2]))
6378 switch (INTVAL (op
[2]))
6382 return avr_asm_len ("mov %A0,%B1" CR_TAB
6383 "mov %B0,%C1" CR_TAB
6386 "dec %C0", op
, plen
, 5);
6388 return avr_asm_len ("clr %C0" CR_TAB
6391 "mov %B0,%C1" CR_TAB
6392 "mov %A0,%B1", op
, plen
, 5);
6395 if (dest
!= src
+ 2)
6396 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6398 return avr_asm_len ("clr %B0" CR_TAB
6401 "mov %C0,%B0", op
, plen
, 4);
6404 if (INTVAL (op
[2]) < 24)
6410 return avr_asm_len ("lsl %C0" CR_TAB
6411 "sbc %A0,%A0" CR_TAB
6412 "mov %B0,%A0" CR_TAB
6413 "mov %C0,%A0", op
, plen
, 4);
6417 out_shift_with_cnt ("asr %C0" CR_TAB
6419 "ror %A0", insn
, op
, plen
, 3);
6424 /* 32-bit arithmetic shift right ((signed long)x >> i) */
6427 ashrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6429 if (GET_CODE (operands
[2]) == CONST_INT
)
6437 switch (INTVAL (operands
[2]))
6441 int reg0
= true_regnum (operands
[0]);
6442 int reg1
= true_regnum (operands
[1]);
6445 return ("mov %A0,%B1" CR_TAB
6446 "mov %B0,%C1" CR_TAB
6447 "mov %C0,%D1" CR_TAB
6452 return ("clr %D0" CR_TAB
6455 "mov %C0,%D1" CR_TAB
6456 "mov %B0,%C1" CR_TAB
6462 int reg0
= true_regnum (operands
[0]);
6463 int reg1
= true_regnum (operands
[1]);
6465 if (reg0
== reg1
+ 2)
6466 return *len
= 4, ("clr %D0" CR_TAB
6471 return *len
= 5, ("movw %A0,%C1" CR_TAB
6477 return *len
= 6, ("mov %B0,%D1" CR_TAB
6478 "mov %A0,%C1" CR_TAB
6486 return *len
= 6, ("mov %A0,%D1" CR_TAB
6490 "mov %B0,%D0" CR_TAB
6494 if (INTVAL (operands
[2]) < 32)
6501 return *len
= 4, ("lsl %D0" CR_TAB
6502 "sbc %A0,%A0" CR_TAB
6503 "mov %B0,%A0" CR_TAB
6506 return *len
= 5, ("lsl %D0" CR_TAB
6507 "sbc %A0,%A0" CR_TAB
6508 "mov %B0,%A0" CR_TAB
6509 "mov %C0,%A0" CR_TAB
6514 out_shift_with_cnt ("asr %D0" CR_TAB
6517 "ror %A0", insn
, operands
, len
, 4);
6521 /* 8-bit logic shift right ((unsigned char)x >> i) */
6524 lshrqi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6526 if (GET_CODE (operands
[2]) == CONST_INT
)
6533 switch (INTVAL (operands
[2]))
6536 if (INTVAL (operands
[2]) < 8)
6548 return ("lsr %0" CR_TAB
6552 return ("lsr %0" CR_TAB
6557 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6560 return ("swap %0" CR_TAB
6564 return ("lsr %0" CR_TAB
6570 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6573 return ("swap %0" CR_TAB
6578 return ("lsr %0" CR_TAB
6585 if (test_hard_reg_class (LD_REGS
, operands
[0]))
6588 return ("swap %0" CR_TAB
6594 return ("lsr %0" CR_TAB
6603 return ("rol %0" CR_TAB
6608 else if (CONSTANT_P (operands
[2]))
6609 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
6611 out_shift_with_cnt ("lsr %0",
6612 insn
, operands
, len
, 1);
6616 /* 16-bit logic shift right ((unsigned short)x >> i) */
6619 lshrhi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6621 if (GET_CODE (operands
[2]) == CONST_INT
)
6623 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
6624 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
6631 switch (INTVAL (operands
[2]))
6634 if (INTVAL (operands
[2]) < 16)
6638 return ("clr %B0" CR_TAB
6642 if (optimize_size
&& scratch
)
6647 return ("swap %B0" CR_TAB
6649 "andi %A0,0x0f" CR_TAB
6650 "eor %A0,%B0" CR_TAB
6651 "andi %B0,0x0f" CR_TAB
6657 return ("swap %B0" CR_TAB
6659 "ldi %3,0x0f" CR_TAB
6661 "eor %A0,%B0" CR_TAB
6665 break; /* optimize_size ? 6 : 8 */
6669 break; /* scratch ? 5 : 6 */
6673 return ("lsr %B0" CR_TAB
6677 "andi %A0,0x0f" CR_TAB
6678 "eor %A0,%B0" CR_TAB
6679 "andi %B0,0x0f" CR_TAB
6685 return ("lsr %B0" CR_TAB
6689 "ldi %3,0x0f" CR_TAB
6691 "eor %A0,%B0" CR_TAB
6699 break; /* scratch ? 5 : 6 */
6701 return ("clr __tmp_reg__" CR_TAB
6704 "rol __tmp_reg__" CR_TAB
6707 "rol __tmp_reg__" CR_TAB
6708 "mov %A0,%B0" CR_TAB
6709 "mov %B0,__tmp_reg__");
6713 return ("lsl %A0" CR_TAB
6714 "mov %A0,%B0" CR_TAB
6716 "sbc %B0,%B0" CR_TAB
6720 return *len
= 2, ("mov %A0,%B1" CR_TAB
6725 return ("mov %A0,%B0" CR_TAB
6731 return ("mov %A0,%B0" CR_TAB
6738 return ("mov %A0,%B0" CR_TAB
6748 return ("mov %A0,%B0" CR_TAB
6756 return ("mov %A0,%B0" CR_TAB
6759 "ldi %3,0x0f" CR_TAB
6763 return ("mov %A0,%B0" CR_TAB
6774 return ("mov %A0,%B0" CR_TAB
6780 if (AVR_HAVE_MUL
&& scratch
)
6783 return ("ldi %3,0x08" CR_TAB
6787 "clr __zero_reg__");
6789 if (optimize_size
&& scratch
)
6794 return ("mov %A0,%B0" CR_TAB
6798 "ldi %3,0x07" CR_TAB
6804 return ("set" CR_TAB
6809 "clr __zero_reg__");
6812 return ("mov %A0,%B0" CR_TAB
6821 if (AVR_HAVE_MUL
&& ldi_ok
)
6824 return ("ldi %A0,0x04" CR_TAB
6825 "mul %B0,%A0" CR_TAB
6828 "clr __zero_reg__");
6830 if (AVR_HAVE_MUL
&& scratch
)
6833 return ("ldi %3,0x04" CR_TAB
6837 "clr __zero_reg__");
6839 if (optimize_size
&& ldi_ok
)
6842 return ("mov %A0,%B0" CR_TAB
6843 "ldi %B0,6" "\n1:\t"
6848 if (optimize_size
&& scratch
)
6851 return ("clr %A0" CR_TAB
6860 return ("clr %A0" CR_TAB
6867 out_shift_with_cnt ("lsr %B0" CR_TAB
6868 "ror %A0", insn
, operands
, len
, 2);
6873 /* 24-bit logic shift right */
6876 avr_out_lshrpsi3 (rtx_insn
*insn
, rtx
*op
, int *plen
)
6878 int dest
= REGNO (op
[0]);
6879 int src
= REGNO (op
[1]);
6881 if (CONST_INT_P (op
[2]))
6886 switch (INTVAL (op
[2]))
6890 return avr_asm_len ("mov %A0,%B1" CR_TAB
6891 "mov %B0,%C1" CR_TAB
6892 "clr %C0", op
, plen
, 3);
6894 return avr_asm_len ("clr %C0" CR_TAB
6895 "mov %B0,%C1" CR_TAB
6896 "mov %A0,%B1", op
, plen
, 3);
6899 if (dest
!= src
+ 2)
6900 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6902 return avr_asm_len ("clr %B0" CR_TAB
6903 "clr %C0", op
, plen
, 2);
6906 if (INTVAL (op
[2]) < 24)
6912 return avr_asm_len ("clr %A0" CR_TAB
6916 "clr %C0", op
, plen
, 5);
6920 out_shift_with_cnt ("lsr %C0" CR_TAB
6922 "ror %A0", insn
, op
, plen
, 3);
6927 /* 32-bit logic shift right ((unsigned int)x >> i) */
6930 lshrsi3_out (rtx_insn
*insn
, rtx operands
[], int *len
)
6932 if (GET_CODE (operands
[2]) == CONST_INT
)
6940 switch (INTVAL (operands
[2]))
6943 if (INTVAL (operands
[2]) < 32)
6947 return *len
= 3, ("clr %D0" CR_TAB
6951 return ("clr %D0" CR_TAB
6958 int reg0
= true_regnum (operands
[0]);
6959 int reg1
= true_regnum (operands
[1]);
6962 return ("mov %A0,%B1" CR_TAB
6963 "mov %B0,%C1" CR_TAB
6964 "mov %C0,%D1" CR_TAB
6967 return ("clr %D0" CR_TAB
6968 "mov %C0,%D1" CR_TAB
6969 "mov %B0,%C1" CR_TAB
6975 int reg0
= true_regnum (operands
[0]);
6976 int reg1
= true_regnum (operands
[1]);
6978 if (reg0
== reg1
+ 2)
6979 return *len
= 2, ("clr %C0" CR_TAB
6982 return *len
= 3, ("movw %A0,%C1" CR_TAB
6986 return *len
= 4, ("mov %B0,%D1" CR_TAB
6987 "mov %A0,%C1" CR_TAB
6993 return *len
= 4, ("mov %A0,%D1" CR_TAB
7000 return ("clr %A0" CR_TAB
7009 out_shift_with_cnt ("lsr %D0" CR_TAB
7012 "ror %A0", insn
, operands
, len
, 4);
7017 /* Output addition of register XOP[0] and compile time constant XOP[2].
7018 CODE == PLUS: perform addition by using ADD instructions or
7019 CODE == MINUS: perform addition by using SUB instructions:
7021 XOP[0] = XOP[0] + XOP[2]
7023 Or perform addition/subtraction with register XOP[2] depending on CODE:
7025 XOP[0] = XOP[0] +/- XOP[2]
7027 If PLEN == NULL, print assembler instructions to perform the operation;
7028 otherwise, set *PLEN to the length of the instruction sequence (in words)
7029 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7030 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7032 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7033 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7034 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7035 the subtrahend in the original insn, provided it is a compile time constant.
7036 In all other cases, SIGN is 0.
7038 If OUT_LABEL is true, print the final 0: label which is needed for
7039 saturated addition / subtraction. The only case where OUT_LABEL = false
7040 is useful is for saturated addition / subtraction performed during
7041 fixed-point rounding, cf. `avr_out_round'. */
7044 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
7045 enum rtx_code code_sat
, int sign
, bool out_label
)
7047 /* MODE of the operation. */
7048 machine_mode mode
= GET_MODE (xop
[0]);
7050 /* INT_MODE of the same size. */
7051 machine_mode imode
= int_mode_for_mode (mode
);
7053 /* Number of bytes to operate on. */
7054 int i
, n_bytes
= GET_MODE_SIZE (mode
);
7056 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7057 int clobber_val
= -1;
7059 /* op[0]: 8-bit destination register
7060 op[1]: 8-bit const int
7061 op[2]: 8-bit scratch register */
7064 /* Started the operation? Before starting the operation we may skip
7065 adding 0. This is no more true after the operation started because
7066 carry must be taken into account. */
7067 bool started
= false;
7069 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7072 /* Output a BRVC instruction. Only needed with saturation. */
7073 bool out_brvc
= true;
7080 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_CLOBBER
;
7082 for (i
= 0; i
< n_bytes
; i
++)
7084 /* We operate byte-wise on the destination. */
7085 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7086 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
7089 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
7092 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
7096 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7098 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
7107 /* Except in the case of ADIW with 16-bit register (see below)
7108 addition does not set cc0 in a usable way. */
7110 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
7112 if (CONST_FIXED_P (xval
))
7113 xval
= avr_to_int_mode (xval
);
7115 /* Adding/Subtracting zero is a no-op. */
7117 if (xval
== const0_rtx
)
7124 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
7128 if (SS_PLUS
== code_sat
&& MINUS
== code
7130 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
7131 & GET_MODE_MASK (QImode
)))
7133 /* We compute x + 0x80 by means of SUB instructions. We negated the
7134 constant subtrahend above and are left with x - (-128) so that we
7135 need something like SUBI r,128 which does not exist because SUBI sets
7136 V according to the sign of the subtrahend. Notice the only case
7137 where this must be done is when NEG overflowed in case [2s] because
7138 the V computation needs the right sign of the subtrahend. */
7140 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7142 avr_asm_len ("subi %0,128" CR_TAB
7143 "brmi 0f", &msb
, plen
, 2);
7149 for (i
= 0; i
< n_bytes
; i
++)
7151 /* We operate byte-wise on the destination. */
7152 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7153 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
7155 /* 8-bit value to operate with this byte. */
7156 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
7158 /* Registers R16..R31 can operate with immediate. */
7159 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
7162 op
[1] = gen_int_mode (val8
, QImode
);
7164 /* To get usable cc0 no low-bytes must have been skipped. */
7172 && test_hard_reg_class (ADDW_REGS
, reg8
))
7174 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
7175 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
7177 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7178 i.e. operate word-wise. */
7185 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
7188 if (n_bytes
== 2 && PLUS
== code
)
7200 avr_asm_len (code
== PLUS
7201 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7205 else if ((val8
== 1 || val8
== 0xff)
7206 && UNKNOWN
== code_sat
7208 && i
== n_bytes
- 1)
7210 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
7220 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
7222 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
7224 /* This belongs to the x + 0x80 corner case. The code with
7225 ADD instruction is not smaller, thus make this case
7226 expensive so that the caller won't pick it. */
7232 if (clobber_val
!= (int) val8
)
7233 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7234 clobber_val
= (int) val8
;
7236 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
7243 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
7246 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
7248 if (clobber_val
!= (int) val8
)
7249 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7250 clobber_val
= (int) val8
;
7252 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
7264 } /* for all sub-bytes */
7268 if (UNKNOWN
== code_sat
)
7271 *pcc
= (int) CC_CLOBBER
;
7273 /* Vanilla addition/subtraction is done. We are left with saturation.
7275 We have to compute A = A <op> B where A is a register and
7276 B is a register or a non-zero compile time constant CONST.
7277 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
7278 B stands for the original operand $2 in INSN. In the case of B = CONST,
7279 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
7281 CODE is the instruction flavor we use in the asm sequence to perform <op>.
7285 operation | code | sat if | b is | sat value | case
7286 -----------------+-------+----------+--------------+-----------+-------
7287 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7288 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7289 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7290 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
7294 operation | code | sat if | b is | sat value | case
7295 -----------------+-------+----------+--------------+-----------+-------
7296 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7297 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7298 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7299 - as a + (-b) | add | V == 1 | const | s- | [4s]
7301 s+ = b < 0 ? -0x80 : 0x7f
7302 s- = b < 0 ? 0x7f : -0x80
7304 The cases a - b actually perform a - (-(-b)) if B is CONST.
7307 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
7309 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
7312 bool need_copy
= true;
7313 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
7324 avr_asm_len ("brvc 0f", op
, plen
, 1);
7326 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
7331 avr_asm_len ("ldi %0,0x7f" CR_TAB
7332 "adc %0,__zero_reg__", op
, plen
, 2);
7334 avr_asm_len ("ldi %0,0x7f" CR_TAB
7335 "ldi %1,0xff" CR_TAB
7336 "adc %1,__zero_reg__" CR_TAB
7337 "adc %0,__zero_reg__", op
, plen
, 4);
7339 else if (sign
== 0 && PLUS
== code
)
7343 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7346 avr_asm_len ("ldi %0,0x80" CR_TAB
7348 "dec %0", op
, plen
, 3);
7350 avr_asm_len ("ldi %0,0x80" CR_TAB
7353 "sbci %0,0", op
, plen
, 4);
7355 else if (sign
== 0 && MINUS
== code
)
7359 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
7362 avr_asm_len ("ldi %0,0x7f" CR_TAB
7364 "inc %0", op
, plen
, 3);
7366 avr_asm_len ("ldi %0,0x7f" CR_TAB
7369 "sbci %0,-1", op
, plen
, 4);
7371 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
7373 /* [1s,const,B < 0] [2s,B < 0] */
7374 /* [3s,const,B > 0] [4s,B > 0] */
7378 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7382 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
7383 if (n_bytes
> 1 && need_copy
)
7384 avr_asm_len ("clr %1", op
, plen
, 1);
7386 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
7388 /* [1s,const,B > 0] [2s,B > 0] */
7389 /* [3s,const,B < 0] [4s,B < 0] */
7393 avr_asm_len ("sec" CR_TAB
7394 "%~call __sbc_8", op
, plen
, 1 + len_call
);
7398 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
7399 if (n_bytes
> 1 && need_copy
)
7400 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
7410 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
7415 avr_asm_len ("sec", op
, plen
, 1);
7416 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
7422 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
7423 avr_asm_len ("sec" CR_TAB
7424 "sbc %0,%0", op
, plen
, 2);
7426 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
7429 break; /* US_PLUS */
7434 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
7438 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
7442 avr_asm_len ("clr %0", op
, plen
, 1);
7447 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7448 Now copy the right value to the LSBs. */
7450 if (need_copy
&& n_bytes
> 1)
7452 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
7454 avr_asm_len ("mov %1,%0", op
, plen
, 1);
7460 avr_asm_len ("movw %0,%1", op
, plen
, 1);
7462 avr_asm_len ("mov %A0,%1" CR_TAB
7463 "mov %B0,%1", op
, plen
, 2);
7466 else if (n_bytes
> 2)
7469 avr_asm_len ("mov %A0,%1" CR_TAB
7470 "mov %B0,%1", op
, plen
, 2);
7474 if (need_copy
&& n_bytes
== 8)
7477 avr_asm_len ("movw %r0+2,%0" CR_TAB
7478 "movw %r0+4,%0", xop
, plen
, 2);
7480 avr_asm_len ("mov %r0+2,%0" CR_TAB
7481 "mov %r0+3,%0" CR_TAB
7482 "mov %r0+4,%0" CR_TAB
7483 "mov %r0+5,%0", xop
, plen
, 4);
7487 avr_asm_len ("0:", op
, plen
, 0);
7491 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7492 is ont a compile-time constant:
7494 XOP[0] = XOP[0] +/- XOP[2]
7496 This is a helper for the function below. The only insns that need this
7497 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7500 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
7502 machine_mode mode
= GET_MODE (xop
[0]);
7504 /* Only pointer modes want to add symbols. */
7506 gcc_assert (mode
== HImode
|| mode
== PSImode
);
7508 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
7510 avr_asm_len (PLUS
== code
7511 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
7512 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
7515 if (PSImode
== mode
)
7516 avr_asm_len (PLUS
== code
7517 ? "sbci %C0,hlo8(-(%2))"
7518 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
7523 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7525 INSN is a single_set insn or an insn pattern with a binary operation as
7526 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7528 XOP are the operands of INSN. In the case of 64-bit operations with
7529 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7530 The non-saturating insns up to 32 bits may or may not supply a "d" class
7533 If PLEN == NULL output the instructions.
7534 If PLEN != NULL set *PLEN to the length of the sequence in words.
7536 PCC is a pointer to store the instructions' effect on cc0.
7539 PLEN and PCC default to NULL.
7541 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
7546 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
7548 int cc_plus
, cc_minus
, cc_dummy
;
7549 int len_plus
, len_minus
;
7551 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
7552 rtx xdest
= SET_DEST (xpattern
);
7553 machine_mode mode
= GET_MODE (xdest
);
7554 machine_mode imode
= int_mode_for_mode (mode
);
7555 int n_bytes
= GET_MODE_SIZE (mode
);
7556 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
7558 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
7564 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
7566 if (PLUS
== code_sat
|| MINUS
== code_sat
)
7569 if (n_bytes
<= 4 && REG_P (xop
[2]))
7571 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
7577 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
7578 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
7579 op
[2] = avr_to_int_mode (xop
[0]);
7584 && !CONST_INT_P (xop
[2])
7585 && !CONST_FIXED_P (xop
[2]))
7587 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
7590 op
[0] = avr_to_int_mode (xop
[0]);
7591 op
[1] = avr_to_int_mode (xop
[1]);
7592 op
[2] = avr_to_int_mode (xop
[2]);
7595 /* Saturations and 64-bit operations don't have a clobber operand.
7596 For the other cases, the caller will provide a proper XOP[3]. */
7598 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
7599 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
7601 /* Saturation will need the sign of the original operand. */
7603 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
7604 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
7606 /* If we subtract and the subtrahend is a constant, then negate it
7607 so that avr_out_plus_1 can be used. */
7610 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
7612 /* Work out the shortest sequence. */
7614 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
7615 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
7619 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
7620 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
7622 else if (len_minus
<= len_plus
)
7623 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
7625 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
7631 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7632 time constant XOP[2]:
7634 XOP[0] = XOP[0] <op> XOP[2]
7636 and return "". If PLEN == NULL, print assembler instructions to perform the
7637 operation; otherwise, set *PLEN to the length of the instruction sequence
7638 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
7639 register or SCRATCH if no clobber register is needed for the operation.
7640 INSN is an INSN_P or a pattern of an insn. */
7643 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
7645 /* CODE and MODE of the operation. */
7646 rtx xpattern
= INSN_P (insn
) ? single_set (as_a
<rtx_insn
*> (insn
)) : insn
;
7647 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
7648 machine_mode mode
= GET_MODE (xop
[0]);
7650 /* Number of bytes to operate on. */
7651 int i
, n_bytes
= GET_MODE_SIZE (mode
);
7653 /* Value of T-flag (0 or 1) or -1 if unknow. */
7656 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7657 int clobber_val
= -1;
7659 /* op[0]: 8-bit destination register
7660 op[1]: 8-bit const int
7661 op[2]: 8-bit clobber register or SCRATCH
7662 op[3]: 8-bit register containing 0xff or NULL_RTX */
7671 for (i
= 0; i
< n_bytes
; i
++)
7673 /* We operate byte-wise on the destination. */
7674 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
7675 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
7677 /* 8-bit value to operate with this byte. */
7678 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
7680 /* Number of bits set in the current byte of the constant. */
7681 int pop8
= avr_popcount (val8
);
7683 /* Registers R16..R31 can operate with immediate. */
7684 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
7687 op
[1] = GEN_INT (val8
);
7696 avr_asm_len ("ori %0,%1", op
, plen
, 1);
7700 avr_asm_len ("set", op
, plen
, 1);
7703 op
[1] = GEN_INT (exact_log2 (val8
));
7704 avr_asm_len ("bld %0,%1", op
, plen
, 1);
7708 if (op
[3] != NULL_RTX
)
7709 avr_asm_len ("mov %0,%3", op
, plen
, 1);
7711 avr_asm_len ("clr %0" CR_TAB
7712 "dec %0", op
, plen
, 2);
7718 if (clobber_val
!= (int) val8
)
7719 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7720 clobber_val
= (int) val8
;
7722 avr_asm_len ("or %0,%2", op
, plen
, 1);
7732 avr_asm_len ("clr %0", op
, plen
, 1);
7734 avr_asm_len ("andi %0,%1", op
, plen
, 1);
7738 avr_asm_len ("clt", op
, plen
, 1);
7741 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
7742 avr_asm_len ("bld %0,%1", op
, plen
, 1);
7746 if (clobber_val
!= (int) val8
)
7747 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7748 clobber_val
= (int) val8
;
7750 avr_asm_len ("and %0,%2", op
, plen
, 1);
7760 avr_asm_len ("com %0", op
, plen
, 1);
7761 else if (ld_reg_p
&& val8
== (1 << 7))
7762 avr_asm_len ("subi %0,%1", op
, plen
, 1);
7765 if (clobber_val
!= (int) val8
)
7766 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
7767 clobber_val
= (int) val8
;
7769 avr_asm_len ("eor %0,%2", op
, plen
, 1);
7775 /* Unknown rtx_code */
7778 } /* for all sub-bytes */
7784 /* Output sign extension from XOP[1] to XOP[0] and return "".
7785 If PLEN == NULL, print assembler instructions to perform the operation;
7786 otherwise, set *PLEN to the length of the instruction sequence (in words)
7787 as printed with PLEN == NULL. */
7790 avr_out_sign_extend (rtx_insn
*insn
, rtx
*xop
, int *plen
)
7792 // Size in bytes of source resp. destination operand.
7793 unsigned n_src
= GET_MODE_SIZE (GET_MODE (xop
[1]));
7794 unsigned n_dest
= GET_MODE_SIZE (GET_MODE (xop
[0]));
7795 rtx r_msb
= all_regs_rtx
[REGNO (xop
[1]) + n_src
- 1];
7800 // Copy destination to source
7802 if (REGNO (xop
[0]) != REGNO (xop
[1]))
7804 gcc_assert (n_src
<= 2);
7807 avr_asm_len (AVR_HAVE_MOVW
7809 : "mov %B0,%B1", xop
, plen
, 1);
7810 if (n_src
== 1 || !AVR_HAVE_MOVW
)
7811 avr_asm_len ("mov %A0,%A1", xop
, plen
, 1);
7814 // Set Carry to the sign bit MSB.7...
7816 if (REGNO (xop
[0]) == REGNO (xop
[1])
7817 || !reg_unused_after (insn
, r_msb
))
7819 avr_asm_len ("mov __tmp_reg__,%0", &r_msb
, plen
, 1);
7820 r_msb
= tmp_reg_rtx
;
7823 avr_asm_len ("lsl %0", &r_msb
, plen
, 1);
7825 // ...and propagate it to all the new sign bits
7827 for (unsigned n
= n_src
; n
< n_dest
; n
++)
7828 avr_asm_len ("sbc %0,%0", &all_regs_rtx
[REGNO (xop
[0]) + n
], plen
, 1);
7834 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7835 PLEN != NULL: Set *PLEN to the length of that sequence.
7839 avr_out_addto_sp (rtx
*op
, int *plen
)
7841 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
7842 int addend
= INTVAL (op
[0]);
7849 if (flag_verbose_asm
|| flag_print_asm_name
)
7850 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7852 while (addend
<= -pc_len
)
7855 avr_asm_len ("rcall .", op
, plen
, 1);
7858 while (addend
++ < 0)
7859 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7861 else if (addend
> 0)
7863 if (flag_verbose_asm
|| flag_print_asm_name
)
7864 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7866 while (addend
-- > 0)
7867 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7874 /* Outputs instructions needed for fixed point type conversion.
7875 This includes converting between any fixed point type, as well
7876 as converting to any integer type. Conversion between integer
7877 types is not supported.
7879 Converting signed fractional types requires a bit shift if converting
7880 to or from any unsigned fractional type because the decimal place is
7881 shifted by 1 bit. When the destination is a signed fractional, the sign
7882 is stored in either the carry or T bit. */
7885 avr_out_fract (rtx_insn
*insn
, rtx operands
[], bool intsigned
, int *plen
)
7889 RTX_CODE shift
= UNKNOWN
;
7890 bool sign_in_carry
= false;
7891 bool msb_in_carry
= false;
7892 bool lsb_in_tmp_reg
= false;
7893 bool lsb_in_carry
= false;
7894 bool frac_rounded
= false;
7895 const char *code_ashift
= "lsl %0";
7898 #define MAY_CLOBBER(RR) \
7899 /* Shorthand used below. */ \
7901 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7902 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7903 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7904 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7908 /* bytes : Length of operand in bytes.
7909 ibyte : Length of integral part in bytes.
7910 fbyte, fbit : Length of fractional part in bytes, bits. */
7913 unsigned fbit
, bytes
, ibyte
, fbyte
;
7914 unsigned regno
, regno_msb
;
7915 } dest
, src
, *val
[2] = { &dest
, &src
};
7920 /* Step 0: Determine information on source and destination operand we
7921 ====== will need in the remainder. */
7923 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7927 xop
[i
] = operands
[i
];
7929 mode
= GET_MODE (xop
[i
]);
7931 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7932 val
[i
]->regno
= REGNO (xop
[i
]);
7933 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7935 if (SCALAR_INT_MODE_P (mode
))
7937 val
[i
]->sbit
= intsigned
;
7940 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7942 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7943 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7946 fatal_insn ("unsupported fixed-point conversion", insn
);
7948 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7949 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7952 // Byte offset of the decimal point taking into account different place
7953 // of the decimal point in input and output and different register numbers
7954 // of input and output.
7955 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7957 // Number of destination bytes that will come from sign / zero extension.
7958 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7960 // Number of bytes at the low end to be filled with zeros.
7961 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7963 // Do we have a 16-Bit register that is cleared?
7964 rtx clrw
= NULL_RTX
;
7966 bool sign_extend
= src
.sbit
&& sign_bytes
;
7968 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7970 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7972 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7977 /* If we need to round the fraction part, we might need to save/round it
7978 before clobbering any of it in Step 1. Also, we might want to do
7979 the rounding now to make use of LD_REGS. */
7980 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7981 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7982 && !TARGET_FRACT_CONV_TRUNC
)
7986 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7987 && dest
.regno
- offset
-1 >= dest
.regno
);
7988 unsigned s0
= dest
.regno
- offset
-1;
7989 bool use_src
= true;
7991 unsigned copied_msb
= src
.regno_msb
;
7992 bool have_carry
= false;
7994 if (src
.ibyte
> dest
.ibyte
)
7995 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7997 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7998 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7999 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
8001 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
8003 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8004 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8008 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
8009 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
8011 avr_asm_len ("sec" CR_TAB
8012 "cpc %0,__zero_reg__",
8013 &all_regs_rtx
[sn
], plen
, 2);
8017 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8019 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
8020 &all_regs_rtx
[s0
], plen
, 1);
8021 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8022 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
8023 avr_asm_len ("\n0:", NULL
, plen
, 0);
8024 frac_rounded
= true;
8026 else if (use_src
&& overlap
)
8028 avr_asm_len ("clr __tmp_reg__" CR_TAB
8030 "dec __tmp_reg__", xop
, plen
, 1);
8034 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8039 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
8042 avr_asm_len ("clt" CR_TAB
8043 "bld __tmp_reg__,7" CR_TAB
8044 "adc %0,__tmp_reg__",
8045 &all_regs_rtx
[s0
], plen
, 1);
8047 avr_asm_len ("lsr __tmp_reg" CR_TAB
8048 "add %0,__tmp_reg__",
8049 &all_regs_rtx
[s0
], plen
, 2);
8050 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
8051 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8052 frac_rounded
= true;
8057 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
8058 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
8059 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
8060 xop
[2] = all_regs_rtx
[s0
];
8061 unsigned sn
= src
.regno
;
8062 if (!use_src
|| sn
== s0
)
8063 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8064 /* We need to consider to-be-discarded bits
8065 if the value is negative. */
8068 avr_asm_len ("tst %0" CR_TAB
8070 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8071 /* Test to-be-discarded bytes for any nozero bits.
8072 ??? Could use OR or SBIW to test two registers at once. */
8074 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8077 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
8078 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8080 avr_asm_len ("breq 0f" CR_TAB
8082 "\n0:\t" "mov __tmp_reg__,%2",
8085 avr_asm_len ("breq 0f" CR_TAB
8087 "bld __tmp_reg__,0\n0:",
8090 lsb_in_tmp_reg
= true;
8094 /* Step 1: Clear bytes at the low end and copy payload bits from source
8095 ====== to destination. */
8097 int step
= offset
< 0 ? 1 : -1;
8098 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
8100 // We cleared at least that number of registers.
8103 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
8105 // Next regno of destination is needed for MOVW
8106 unsigned d1
= d0
+ step
;
8108 // Current and next regno of source
8109 signed s0
= d0
- offset
;
8110 signed s1
= s0
+ step
;
8112 // Must current resp. next regno be CLRed? This applies to the low
8113 // bytes of the destination that have no associated source bytes.
8114 bool clr0
= s0
< (signed) src
.regno
;
8115 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
8117 // First gather what code to emit (if any) and additional step to
8118 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8119 // is the source rtx for the current loop iteration.
8120 const char *code
= NULL
;
8125 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
8127 xop
[2] = all_regs_rtx
[d0
& ~1];
8129 code
= "movw %2,%3";
8134 xop
[2] = all_regs_rtx
[d0
];
8139 && d0
% 2 == (step
> 0))
8141 clrw
= all_regs_rtx
[d0
& ~1];
8145 else if (offset
&& s0
<= (signed) src
.regno_msb
)
8147 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
8148 && d0
% 2 == (offset
> 0)
8149 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
8150 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
8152 xop
[2] = all_regs_rtx
[d0
& ~movw
];
8153 xop
[3] = all_regs_rtx
[s0
& ~movw
];
8154 code
= movw
? "movw %2,%3" : "mov %2,%3";
8155 stepw
= step
* movw
;
8160 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
8161 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
8163 /* We are going to override the sign bit. If we sign-extend,
8164 store the sign in the Carry flag. This is not needed if
8165 the destination will be ASHIFT in the remainder because
8166 the ASHIFT will set Carry without extra instruction. */
8168 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
8169 sign_in_carry
= true;
8172 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8174 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8175 && src
.ibyte
> dest
.ibyte
8176 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
8178 /* We are going to override the MSB. If we shift right,
8179 store the MSB in the Carry flag. This is only needed if
8180 we don't sign-extend becaue with sign-extension the MSB
8181 (the sign) will be produced by the sign extension. */
8183 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
8184 msb_in_carry
= true;
8187 unsigned src_lsb
= dest
.regno
- offset
-1;
8189 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
8191 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
8193 /* We are going to override the new LSB; store it into carry. */
8195 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
8196 code_ashift
= "rol %0";
8197 lsb_in_carry
= true;
8200 avr_asm_len (code
, xop
, plen
, 1);
8205 /* Step 2: Shift destination left by 1 bit position. This might be needed
8206 ====== for signed input and unsigned output. */
8208 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
8210 unsigned s0
= dest
.regno
- offset
-1;
8212 /* n1169 4.1.4 says:
8213 "Conversions from a fixed-point to an integer type round toward zero."
8214 Hence, converting a fract type to integer only gives a non-zero result
8216 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8217 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
8218 && !TARGET_FRACT_CONV_TRUNC
)
8220 gcc_assert (s0
== src
.regno_msb
);
8221 /* Check if the input is -1. We do that by checking if negating
8222 the input causes an integer overflow. */
8223 unsigned sn
= src
.regno
;
8224 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8226 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
8228 /* Overflow goes with set carry. Clear carry otherwise. */
8229 avr_asm_len ("brvs 0f" CR_TAB
8230 "clc\n0:", NULL
, plen
, 2);
8232 /* Likewise, when converting from accumulator types to integer, we
8233 need to round up negative values. */
8234 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
8235 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
8236 && !TARGET_FRACT_CONV_TRUNC
8239 bool have_carry
= false;
8241 xop
[2] = all_regs_rtx
[s0
];
8242 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
8243 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
8244 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
8245 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
8246 if (!lsb_in_tmp_reg
)
8248 unsigned sn
= src
.regno
;
8251 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
8256 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
8257 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
8259 /* Add in C and the rounding value 127. */
8260 /* If the destination msb is a sign byte, and in LD_REGS,
8261 grab it as a temporary. */
8263 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
8266 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
8267 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
8268 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
8269 : have_carry
? "adc %2,%3"
8270 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
8276 /* Fall back to use __zero_reg__ as a temporary. */
8277 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
8279 avr_asm_len ("clt" CR_TAB
8280 "bld __zero_reg__,7", NULL
, plen
, 2);
8282 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
8283 avr_asm_len (have_carry
&& lsb_in_tmp_reg
8284 ? "adc __tmp_reg__,__zero_reg__"
8285 : have_carry
? "adc %2,__zero_reg__"
8286 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
8287 : "add %2,__zero_reg__",
8289 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
8292 for (d0
= dest
.regno
+ zero_bytes
;
8293 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8294 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
8296 avr_asm_len (lsb_in_tmp_reg
8297 ? "\n0:\t" "lsl __tmp_reg__"
8298 : "\n0:\t" "lsl %2",
8301 else if (MAY_CLOBBER (s0
))
8302 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8304 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8305 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8307 code_ashift
= "rol %0";
8308 lsb_in_carry
= true;
8311 if (shift
== ASHIFT
)
8313 for (d0
= dest
.regno
+ zero_bytes
;
8314 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
8316 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
8317 code_ashift
= "rol %0";
8320 lsb_in_carry
= false;
8321 sign_in_carry
= true;
8324 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8325 ======= it in sign-extension below. */
8327 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
8328 && src
.ibyte
> dest
.ibyte
)
8330 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
8332 if (MAY_CLOBBER (s0
))
8333 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
8335 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8336 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8338 msb_in_carry
= true;
8341 /* Step 3: Sign-extend or zero-extend the destination as needed.
8344 if (sign_extend
&& !sign_in_carry
)
8346 unsigned s0
= src
.regno_msb
;
8348 if (MAY_CLOBBER (s0
))
8349 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
8351 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8352 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
8354 sign_in_carry
= true;
8357 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
8359 unsigned copies
= 0;
8360 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
8362 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
8364 if (AVR_HAVE_MOVW
&& movw
8365 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
8367 xop
[2] = all_regs_rtx
[d0
];
8369 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
8374 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
8375 &all_regs_rtx
[d0
], plen
, 1);
8377 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
8378 movw
= all_regs_rtx
[d0
-1];
8383 /* Step 4: Right shift the destination. This might be needed for
8384 ====== conversions from unsigned to signed. */
8386 if (shift
== ASHIFTRT
)
8388 const char *code_ashiftrt
= "lsr %0";
8390 if (sign_extend
|| msb_in_carry
)
8391 code_ashiftrt
= "ror %0";
8393 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
8394 code_ashiftrt
= "asr %0";
8396 for (d0
= dest
.regno_msb
- sign_bytes
;
8397 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
8399 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
8400 code_ashiftrt
= "ror %0";
8410 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8411 XOP[2] is the rounding point, a CONST_INT. The function prints the
8412 instruction sequence if PLEN = NULL and computes the length in words
8413 of the sequence if PLEN != NULL. Most of this function deals with
8414 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8417 avr_out_round (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
8419 machine_mode mode
= GET_MODE (xop
[0]);
8420 machine_mode imode
= int_mode_for_mode (mode
);
8421 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8422 int fbit
= (int) GET_MODE_FBIT (mode
);
8423 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
8424 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
8425 GET_MODE_PRECISION (imode
));
8426 // Lengths of PLUS and AND parts.
8427 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
8428 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
8430 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8431 // the saturated addition so that we can emit the "rjmp 1f" before the
8434 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
8435 rtx xpattern
, xsrc
, op
[4];
8437 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
8438 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
8439 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
8440 xpattern
= gen_rtx_SET (xop
[0], xsrc
);
8445 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
8447 avr_asm_len ("rjmp 1f" CR_TAB
8448 "0:", NULL
, plen_add
, 1);
8450 // Keep all bits from RP and higher: ... 2^(-RP)
8451 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8452 // Rounding point ^^^^^^^
8453 // Added above ^^^^^^^^^
8454 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
8455 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
8457 xpattern
= gen_rtx_SET (xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
8462 op
[3] = gen_rtx_SCRATCH (QImode
);
8463 avr_out_bitop (xpattern
, op
, plen_and
);
8464 avr_asm_len ("1:", NULL
, plen
, 0);
8467 *plen
= len_add
+ len_and
;
8473 /* Create RTL split patterns for byte sized rotate expressions. This
8474 produces a series of move instructions and considers overlap situations.
8475 Overlapping non-HImode operands need a scratch register. */
8478 avr_rotate_bytes (rtx operands
[])
8481 machine_mode mode
= GET_MODE (operands
[0]);
8482 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
8483 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
8484 int num
= INTVAL (operands
[2]);
8485 rtx scratch
= operands
[3];
8486 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
8487 Word move if no scratch is needed, otherwise use size of scratch. */
8488 machine_mode move_mode
= QImode
;
8489 int move_size
, offset
, size
;
8493 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
8496 move_mode
= GET_MODE (scratch
);
8498 /* Force DI rotate to use QI moves since other DI moves are currently split
8499 into QI moves so forward propagation works better. */
8502 /* Make scratch smaller if needed. */
8503 if (SCRATCH
!= GET_CODE (scratch
)
8504 && HImode
== GET_MODE (scratch
)
8505 && QImode
== move_mode
)
8506 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
8508 move_size
= GET_MODE_SIZE (move_mode
);
8509 /* Number of bytes/words to rotate. */
8510 offset
= (num
>> 3) / move_size
;
8511 /* Number of moves needed. */
8512 size
= GET_MODE_SIZE (mode
) / move_size
;
8513 /* Himode byte swap is special case to avoid a scratch register. */
8514 if (mode
== HImode
&& same_reg
)
8516 /* HImode byte swap, using xor. This is as quick as using scratch. */
8518 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
8519 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
8520 if (!rtx_equal_p (dst
, src
))
8522 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
8523 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
8524 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
8529 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
8530 /* Create linked list of moves to determine move order. */
8534 } move
[MAX_SIZE
+ 8];
8537 gcc_assert (size
<= MAX_SIZE
);
8538 /* Generate list of subreg moves. */
8539 for (i
= 0; i
< size
; i
++)
8542 int to
= (from
+ offset
) % size
;
8543 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
8544 mode
, from
* move_size
);
8545 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
8546 mode
, to
* move_size
);
8549 /* Mark dependence where a dst of one move is the src of another move.
8550 The first move is a conflict as it must wait until second is
8551 performed. We ignore moves to self - we catch this later. */
8553 for (i
= 0; i
< size
; i
++)
8554 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
8555 for (j
= 0; j
< size
; j
++)
8556 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
8558 /* The dst of move i is the src of move j. */
8565 /* Go through move list and perform non-conflicting moves. As each
8566 non-overlapping move is made, it may remove other conflicts
8567 so the process is repeated until no conflicts remain. */
8572 /* Emit move where dst is not also a src or we have used that
8574 for (i
= 0; i
< size
; i
++)
8575 if (move
[i
].src
!= NULL_RTX
)
8577 if (move
[i
].links
== -1
8578 || move
[move
[i
].links
].src
== NULL_RTX
)
8581 /* Ignore NOP moves to self. */
8582 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
8583 emit_move_insn (move
[i
].dst
, move
[i
].src
);
8585 /* Remove conflict from list. */
8586 move
[i
].src
= NULL_RTX
;
8592 /* Check for deadlock. This is when no moves occurred and we have
8593 at least one blocked move. */
8594 if (moves
== 0 && blocked
!= -1)
8596 /* Need to use scratch register to break deadlock.
8597 Add move to put dst of blocked move into scratch.
8598 When this move occurs, it will break chain deadlock.
8599 The scratch register is substituted for real move. */
8601 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
8603 move
[size
].src
= move
[blocked
].dst
;
8604 move
[size
].dst
= scratch
;
8605 /* Scratch move is never blocked. */
8606 move
[size
].links
= -1;
8607 /* Make sure we have valid link. */
8608 gcc_assert (move
[blocked
].links
!= -1);
8609 /* Replace src of blocking move with scratch reg. */
8610 move
[move
[blocked
].links
].src
= scratch
;
8611 /* Make dependent on scratch move occurring. */
8612 move
[blocked
].links
= size
;
8616 while (blocked
!= -1);
8622 /* Worker function for `ADJUST_INSN_LENGTH'. */
8623 /* Modifies the length assigned to instruction INSN
8624 LEN is the initially computed length of the insn. */
8627 avr_adjust_insn_length (rtx_insn
*insn
, int len
)
8629 rtx
*op
= recog_data
.operand
;
8630 enum attr_adjust_len adjust_len
;
8632 /* Some complex insns don't need length adjustment and therefore
8633 the length need not/must not be adjusted for these insns.
8634 It is easier to state this in an insn attribute "adjust_len" than
8635 to clutter up code here... */
8637 if (!NONDEBUG_INSN_P (insn
)
8638 || -1 == recog_memoized (insn
))
8643 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
8645 adjust_len
= get_attr_adjust_len (insn
);
8647 if (adjust_len
== ADJUST_LEN_NO
)
8649 /* Nothing to adjust: The length from attribute "length" is fine.
8650 This is the default. */
8655 /* Extract insn's operands. */
8657 extract_constrain_insn_cached (insn
);
8659 /* Dispatch to right function. */
8663 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
8664 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
8665 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
8667 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
8669 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
8670 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
8672 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
8673 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
8674 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
8675 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
8676 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
8677 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
8678 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
8679 case ADJUST_LEN_SEXT
: avr_out_sign_extend (insn
, op
, &len
); break;
8681 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
8682 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
8683 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
8685 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
8686 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
8687 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
8688 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
8689 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
8691 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
8692 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
8693 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
8695 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
8696 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
8697 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
8699 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
8700 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
8701 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
8703 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
8704 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
8705 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
8707 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
8709 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
8718 /* Return nonzero if register REG dead after INSN. */
8721 reg_unused_after (rtx_insn
*insn
, rtx reg
)
8723 return (dead_or_set_p (insn
, reg
)
8724 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
8727 /* Return nonzero if REG is not used after INSN.
8728 We assume REG is a reload reg, and therefore does
8729 not live past labels. It may live past calls or jumps though. */
8732 _reg_unused_after (rtx_insn
*insn
, rtx reg
)
8737 /* If the reg is set by this instruction, then it is safe for our
8738 case. Disregard the case where this is a store to memory, since
8739 we are checking a register used in the store address. */
8740 set
= single_set (insn
);
8741 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
8742 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8745 while ((insn
= NEXT_INSN (insn
)))
8748 code
= GET_CODE (insn
);
8751 /* If this is a label that existed before reload, then the register
8752 if dead here. However, if this is a label added by reorg, then
8753 the register may still be live here. We can't tell the difference,
8754 so we just ignore labels completely. */
8755 if (code
== CODE_LABEL
)
8763 if (code
== JUMP_INSN
)
8766 /* If this is a sequence, we must handle them all at once.
8767 We could have for instance a call that sets the target register,
8768 and an insn in a delay slot that uses the register. In this case,
8769 we must return 0. */
8770 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
8772 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
8776 for (i
= 0; i
< seq
->len (); i
++)
8778 rtx_insn
*this_insn
= seq
->insn (i
);
8779 rtx set
= single_set (this_insn
);
8781 if (CALL_P (this_insn
))
8783 else if (JUMP_P (this_insn
))
8785 if (INSN_ANNULLED_BRANCH_P (this_insn
))
8790 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
8792 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8794 if (GET_CODE (SET_DEST (set
)) != MEM
)
8800 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
8805 else if (code
== JUMP_INSN
)
8809 if (code
== CALL_INSN
)
8812 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
8813 if (GET_CODE (XEXP (tem
, 0)) == USE
8814 && REG_P (XEXP (XEXP (tem
, 0), 0))
8815 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
8817 if (call_used_regs
[REGNO (reg
)])
8821 set
= single_set (insn
);
8823 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
8825 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
8826 return GET_CODE (SET_DEST (set
)) != MEM
;
8827 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
8834 /* Implement `TARGET_ASM_INTEGER'. */
8835 /* Target hook for assembling integer objects. The AVR version needs
8836 special handling for references to certain labels. */
8839 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
8841 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
8842 && text_segment_operand (x
, VOIDmode
))
8844 fputs ("\t.word\tgs(", asm_out_file
);
8845 output_addr_const (asm_out_file
, x
);
8846 fputs (")\n", asm_out_file
);
8850 else if (GET_MODE (x
) == PSImode
)
8852 /* This needs binutils 2.23+, see PR binutils/13503 */
8854 fputs ("\t.byte\tlo8(", asm_out_file
);
8855 output_addr_const (asm_out_file
, x
);
8856 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8858 fputs ("\t.byte\thi8(", asm_out_file
);
8859 output_addr_const (asm_out_file
, x
);
8860 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8862 fputs ("\t.byte\thh8(", asm_out_file
);
8863 output_addr_const (asm_out_file
, x
);
8864 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8868 else if (CONST_FIXED_P (x
))
8872 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8874 for (n
= 0; n
< size
; n
++)
8876 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8877 default_assemble_integer (xn
, 1, aligned_p
);
8883 return default_assemble_integer (x
, size
, aligned_p
);
8887 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8888 /* Return value is nonzero if pseudos that have been
8889 assigned to registers of class CLASS would likely be spilled
8890 because registers of CLASS are needed for spill registers. */
8893 avr_class_likely_spilled_p (reg_class_t c
)
8895 return (c
!= ALL_REGS
&&
8896 (AVR_TINY
? 1 : c
!= ADDW_REGS
));
8900 /* Valid attributes:
8901 progmem - Put data to program memory.
8902 signal - Make a function to be hardware interrupt.
8903 After function prologue interrupts remain disabled.
8904 interrupt - Make a function to be hardware interrupt. Before function
8905 prologue interrupts are enabled by means of SEI.
8906 naked - Don't generate function prologue/epilogue and RET
8909 /* Handle a "progmem" attribute; arguments as in
8910 struct attribute_spec.handler. */
8913 avr_handle_progmem_attribute (tree
*node
, tree name
,
8914 tree args ATTRIBUTE_UNUSED
,
8915 int flags ATTRIBUTE_UNUSED
,
8920 if (TREE_CODE (*node
) == TYPE_DECL
)
8922 /* This is really a decl attribute, not a type attribute,
8923 but try to handle it for GCC 3.0 backwards compatibility. */
8925 tree type
= TREE_TYPE (*node
);
8926 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8927 tree newtype
= build_type_attribute_variant (type
, attr
);
8929 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8930 TREE_TYPE (*node
) = newtype
;
8931 *no_add_attrs
= true;
8933 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8935 *no_add_attrs
= false;
8939 warning (OPT_Wattributes
, "%qE attribute ignored",
8941 *no_add_attrs
= true;
8948 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8949 struct attribute_spec.handler. */
8952 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8953 tree args ATTRIBUTE_UNUSED
,
8954 int flags ATTRIBUTE_UNUSED
,
8957 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8959 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8961 *no_add_attrs
= true;
8968 avr_handle_fntype_attribute (tree
*node
, tree name
,
8969 tree args ATTRIBUTE_UNUSED
,
8970 int flags ATTRIBUTE_UNUSED
,
8973 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8975 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8977 *no_add_attrs
= true;
8984 avr_handle_addr_attribute (tree
*node
, tree name
, tree args
,
8985 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
8987 bool io_p
= (strncmp (IDENTIFIER_POINTER (name
), "io", 2) == 0);
8988 location_t loc
= DECL_SOURCE_LOCATION (*node
);
8990 if (TREE_CODE (*node
) != VAR_DECL
)
8992 warning_at (loc
, 0, "%qE attribute only applies to variables", name
);
8996 if (args
!= NULL_TREE
)
8998 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
8999 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
9000 tree arg
= TREE_VALUE (args
);
9001 if (TREE_CODE (arg
) != INTEGER_CST
)
9003 warning (0, "%qE attribute allows only an integer constant argument",
9008 && (!tree_fits_shwi_p (arg
)
9009 || !(strcmp (IDENTIFIER_POINTER (name
), "io_low") == 0
9010 ? low_io_address_operand
: io_address_operand
)
9011 (GEN_INT (TREE_INT_CST_LOW (arg
)), QImode
)))
9013 warning_at (loc
, 0, "%qE attribute address out of range", name
);
9018 tree attribs
= DECL_ATTRIBUTES (*node
);
9019 const char *names
[] = { "io", "io_low", "address", NULL
} ;
9020 for (const char **p
= names
; *p
; p
++)
9022 tree other
= lookup_attribute (*p
, attribs
);
9023 if (other
&& TREE_VALUE (other
))
9026 "both %s and %qE attribute provide address",
9035 if (*no_add
== false && io_p
&& !TREE_THIS_VOLATILE (*node
))
9036 warning_at (loc
, 0, "%qE attribute on non-volatile variable", name
);
9042 avr_eval_addr_attrib (rtx x
)
9044 if (GET_CODE (x
) == SYMBOL_REF
9045 && (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_ADDRESS
))
9047 tree decl
= SYMBOL_REF_DECL (x
);
9048 tree attr
= NULL_TREE
;
9050 if (SYMBOL_REF_FLAGS (x
) & SYMBOL_FLAG_IO
)
9052 attr
= lookup_attribute ("io", DECL_ATTRIBUTES (decl
));
9053 if (!attr
|| !TREE_VALUE (attr
))
9054 attr
= lookup_attribute ("io_low", DECL_ATTRIBUTES (decl
));
9057 if (!attr
|| !TREE_VALUE (attr
))
9058 attr
= lookup_attribute ("address", DECL_ATTRIBUTES (decl
));
9059 gcc_assert (attr
&& TREE_VALUE (attr
) && TREE_VALUE (TREE_VALUE (attr
)));
9060 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
))));
9066 /* AVR attributes. */
9067 static const struct attribute_spec
9068 avr_attribute_table
[] =
9070 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9071 affects_type_identity } */
9072 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
9074 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9076 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
9078 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9080 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9082 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
9084 { "io", 0, 1, false, false, false, avr_handle_addr_attribute
,
9086 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute
,
9088 { "address", 1, 1, false, false, false, avr_handle_addr_attribute
,
9090 { NULL
, 0, 0, false, false, false, NULL
, false }
9094 /* Look if DECL shall be placed in program memory space by
9095 means of attribute `progmem' or some address-space qualifier.
9096 Return non-zero if DECL is data that must end up in Flash and
9097 zero if the data lives in RAM (.bss, .data, .rodata, ...).
9099 Return 2 if DECL is located in 24-bit flash address-space
9100 Return 1 if DECL is located in 16-bit flash address-space
9101 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9102 Return 0 otherwise */
9105 avr_progmem_p (tree decl
, tree attributes
)
9109 if (TREE_CODE (decl
) != VAR_DECL
)
9112 if (avr_decl_memx_p (decl
))
9115 if (avr_decl_flash_p (decl
))
9119 != lookup_attribute ("progmem", attributes
))
9126 while (TREE_CODE (a
) == ARRAY_TYPE
);
9128 if (a
== error_mark_node
)
9131 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
9138 /* Scan type TYP for pointer references to address space ASn.
9139 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9140 the AS are also declared to be CONST.
9141 Otherwise, return the respective address space, i.e. a value != 0. */
9144 avr_nonconst_pointer_addrspace (tree typ
)
9146 while (ARRAY_TYPE
== TREE_CODE (typ
))
9147 typ
= TREE_TYPE (typ
);
9149 if (POINTER_TYPE_P (typ
))
9152 tree target
= TREE_TYPE (typ
);
9154 /* Pointer to function: Test the function's return type. */
9156 if (FUNCTION_TYPE
== TREE_CODE (target
))
9157 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
9159 /* "Ordinary" pointers... */
9161 while (TREE_CODE (target
) == ARRAY_TYPE
)
9162 target
= TREE_TYPE (target
);
9164 /* Pointers to non-generic address space must be const.
9165 Refuse address spaces outside the device's flash. */
9167 as
= TYPE_ADDR_SPACE (target
);
9169 if (!ADDR_SPACE_GENERIC_P (as
)
9170 && (!TYPE_READONLY (target
)
9171 || avr_addrspace
[as
].segment
>= avr_n_flash
9172 /* Also refuse __memx address space if we can't support it. */
9173 || (!AVR_HAVE_LPM
&& avr_addrspace
[as
].pointer_size
> 2)))
9178 /* Scan pointer's target type. */
9180 return avr_nonconst_pointer_addrspace (target
);
9183 return ADDR_SPACE_GENERIC
;
9187 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9188 go along with CONST qualifier. Writing to these address spaces should
9189 be detected and complained about as early as possible. */
9192 avr_pgm_check_var_decl (tree node
)
9194 const char *reason
= NULL
;
9196 addr_space_t as
= ADDR_SPACE_GENERIC
;
9198 gcc_assert (as
== 0);
9200 if (avr_log
.progmem
)
9201 avr_edump ("%?: %t\n", node
);
9203 switch (TREE_CODE (node
))
9209 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9210 reason
= "variable";
9214 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9215 reason
= "function parameter";
9219 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
9220 reason
= "structure field";
9224 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
9226 reason
= "return type of function";
9230 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
9237 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
9240 error ("%qT uses address space %qs beyond flash of %d KiB",
9241 node
, avr_addrspace
[as
].name
, 64 * avr_n_flash
);
9243 error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9244 reason
, node
, avr_addrspace
[as
].name
, 64 * avr_n_flash
);
9249 error ("pointer targeting address space %qs must be const in %qT",
9250 avr_addrspace
[as
].name
, node
);
9252 error ("pointer targeting address space %qs must be const"
9254 avr_addrspace
[as
].name
, reason
, node
);
9258 return reason
== NULL
;
9262 /* Add the section attribute if the variable is in progmem. */
9265 avr_insert_attributes (tree node
, tree
*attributes
)
9267 avr_pgm_check_var_decl (node
);
9269 if (TREE_CODE (node
) == VAR_DECL
9270 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
9271 && avr_progmem_p (node
, *attributes
))
9276 /* For C++, we have to peel arrays in order to get correct
9277 determination of readonlyness. */
9280 node0
= TREE_TYPE (node0
);
9281 while (TREE_CODE (node0
) == ARRAY_TYPE
);
9283 if (error_mark_node
== node0
)
9286 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
9288 if (avr_addrspace
[as
].segment
>= avr_n_flash
)
9290 error ("variable %q+D located in address space %qs beyond flash "
9291 "of %d KiB", node
, avr_addrspace
[as
].name
, 64 * avr_n_flash
);
9293 else if (!AVR_HAVE_LPM
&& avr_addrspace
[as
].pointer_size
> 2)
9295 error ("variable %q+D located in address space %qs"
9296 " which is not supported for architecture %qs",
9297 node
, avr_addrspace
[as
].name
, avr_arch
->name
);
9300 if (!TYPE_READONLY (node0
)
9301 && !TREE_READONLY (node
))
9303 const char *reason
= "__attribute__((progmem))";
9305 if (!ADDR_SPACE_GENERIC_P (as
))
9306 reason
= avr_addrspace
[as
].name
;
9308 if (avr_log
.progmem
)
9309 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
9311 error ("variable %q+D must be const in order to be put into"
9312 " read-only section by means of %qs", node
, reason
);
9318 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9319 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9320 /* Track need of __do_clear_bss. */
9323 avr_asm_output_aligned_decl_common (FILE * stream
,
9326 unsigned HOST_WIDE_INT size
,
9327 unsigned int align
, bool local_p
)
9329 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9332 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9333 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9334 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9339 fprintf (stream
, "\t.globl\t");
9340 assemble_name (stream
, name
);
9341 fprintf (stream
, "\n");
9343 if (SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
)
9345 assemble_name (stream
, name
);
9346 fprintf (stream
, " = %ld\n",
9347 (long) INTVAL (avr_eval_addr_attrib (symbol
)));
9350 error_at (DECL_SOURCE_LOCATION (decl
),
9351 "static IO declaration for %q+D needs an address", decl
);
9355 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9356 There is no need to trigger __do_clear_bss code for them. */
9358 if (!STR_PREFIX_P (name
, "__gnu_lto"))
9359 avr_need_clear_bss_p
= true;
9362 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
9364 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
9368 avr_asm_asm_output_aligned_bss (FILE *file
, tree decl
, const char *name
,
9369 unsigned HOST_WIDE_INT size
, int align
,
9370 void (*default_func
)
9371 (FILE *, tree
, const char *,
9372 unsigned HOST_WIDE_INT
, int))
9374 rtx mem
= decl
== NULL_TREE
? NULL_RTX
: DECL_RTL (decl
);
9377 if (mem
!= NULL_RTX
&& MEM_P (mem
)
9378 && GET_CODE ((symbol
= XEXP (mem
, 0))) == SYMBOL_REF
9379 && (SYMBOL_REF_FLAGS (symbol
) & (SYMBOL_FLAG_IO
| SYMBOL_FLAG_ADDRESS
)))
9381 if (!(SYMBOL_REF_FLAGS (symbol
) & SYMBOL_FLAG_ADDRESS
))
9382 error_at (DECL_SOURCE_LOCATION (decl
),
9383 "IO definition for %q+D needs an address", decl
);
9384 avr_asm_output_aligned_decl_common (file
, decl
, name
, size
, align
, false);
9387 default_func (file
, decl
, name
, size
, align
);
9391 /* Unnamed section callback for data_section
9392 to track need of __do_copy_data. */
9395 avr_output_data_section_asm_op (const void *data
)
9397 avr_need_copy_data_p
= true;
9399 /* Dispatch to default. */
9400 output_section_asm_op (data
);
9404 /* Unnamed section callback for bss_section
9405 to track need of __do_clear_bss. */
9408 avr_output_bss_section_asm_op (const void *data
)
9410 avr_need_clear_bss_p
= true;
9412 /* Dispatch to default. */
9413 output_section_asm_op (data
);
9417 /* Unnamed section callback for progmem*.data sections. */
9420 avr_output_progmem_section_asm_op (const void *data
)
9422 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
9423 (const char*) data
);
9427 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
9430 avr_asm_init_sections (void)
9432 /* Set up a section for jump tables. Alignment is handled by
9433 ASM_OUTPUT_BEFORE_CASE_LABEL. */
9435 if (AVR_HAVE_JMP_CALL
)
9437 progmem_swtable_section
9438 = get_unnamed_section (0, output_section_asm_op
,
9439 "\t.section\t.progmem.gcc_sw_table"
9440 ",\"a\",@progbits");
9444 progmem_swtable_section
9445 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
9446 "\t.section\t.progmem.gcc_sw_table"
9447 ",\"ax\",@progbits");
9450 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9451 resp. `avr_need_copy_data_p'. */
9453 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
9454 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
9455 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
9459 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
9462 avr_asm_function_rodata_section (tree decl
)
9464 /* If a function is unused and optimized out by -ffunction-sections
9465 and --gc-sections, ensure that the same will happen for its jump
9466 tables by putting them into individual sections. */
9471 /* Get the frodata section from the default function in varasm.c
9472 but treat function-associated data-like jump tables as code
9473 rather than as user defined data. AVR has no constant pools. */
9475 int fdata
= flag_data_sections
;
9477 flag_data_sections
= flag_function_sections
;
9478 frodata
= default_function_rodata_section (decl
);
9479 flag_data_sections
= fdata
;
9480 flags
= frodata
->common
.flags
;
9483 if (frodata
!= readonly_data_section
9484 && flags
& SECTION_NAMED
)
9486 /* Adjust section flags and replace section name prefix. */
9490 static const char* const prefix
[] =
9492 ".rodata", ".progmem.gcc_sw_table",
9493 ".gnu.linkonce.r.", ".gnu.linkonce.t."
9496 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
9498 const char * old_prefix
= prefix
[i
];
9499 const char * new_prefix
= prefix
[i
+1];
9500 const char * name
= frodata
->named
.name
;
9502 if (STR_PREFIX_P (name
, old_prefix
))
9504 const char *rname
= ACONCAT ((new_prefix
,
9505 name
+ strlen (old_prefix
), NULL
));
9506 flags
&= ~SECTION_CODE
;
9507 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
9509 return get_section (rname
, flags
, frodata
->named
.decl
);
9514 return progmem_swtable_section
;
9518 /* Implement `TARGET_ASM_NAMED_SECTION'. */
9519 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
9522 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
9524 if (flags
& AVR_SECTION_PROGMEM
)
9526 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
9527 const char *old_prefix
= ".rodata";
9528 const char *new_prefix
= avr_addrspace
[as
].section_name
;
9530 if (STR_PREFIX_P (name
, old_prefix
))
9532 const char *sname
= ACONCAT ((new_prefix
,
9533 name
+ strlen (old_prefix
), NULL
));
9534 default_elf_asm_named_section (sname
, flags
, decl
);
9538 default_elf_asm_named_section (new_prefix
, flags
, decl
);
9542 if (!avr_need_copy_data_p
)
9543 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
9544 || STR_PREFIX_P (name
, ".rodata")
9545 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
9547 if (!avr_need_clear_bss_p
)
9548 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
9550 default_elf_asm_named_section (name
, flags
, decl
);
9554 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
9557 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
9559 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
9561 if (STR_PREFIX_P (name
, ".noinit"))
9563 if (decl
&& TREE_CODE (decl
) == VAR_DECL
9564 && DECL_INITIAL (decl
) == NULL_TREE
)
9565 flags
|= SECTION_BSS
; /* @nobits */
9567 warning (0, "only uninitialized variables can be placed in the "
9571 if (decl
&& DECL_P (decl
)
9572 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9574 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
9576 /* Attribute progmem puts data in generic address space.
9577 Set section flags as if it was in __flash to get the right
9578 section prefix in the remainder. */
9580 if (ADDR_SPACE_GENERIC_P (as
))
9581 as
= ADDR_SPACE_FLASH
;
9583 flags
|= as
* SECTION_MACH_DEP
;
9584 flags
&= ~SECTION_WRITE
;
9585 flags
&= ~SECTION_BSS
;
9592 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
9595 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
9597 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9598 readily available, see PR34734. So we postpone the warning
9599 about uninitialized data in program memory section until here. */
9602 && decl
&& DECL_P (decl
)
9603 && NULL_TREE
== DECL_INITIAL (decl
)
9604 && !DECL_EXTERNAL (decl
)
9605 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9607 warning (OPT_Wuninitialized
,
9608 "uninitialized variable %q+D put into "
9609 "program memory area", decl
);
9612 default_encode_section_info (decl
, rtl
, new_decl_p
);
9614 if (decl
&& DECL_P (decl
)
9615 && TREE_CODE (decl
) != FUNCTION_DECL
9617 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
9619 rtx sym
= XEXP (rtl
, 0);
9620 tree type
= TREE_TYPE (decl
);
9621 tree attr
= DECL_ATTRIBUTES (decl
);
9622 if (type
== error_mark_node
)
9625 addr_space_t as
= TYPE_ADDR_SPACE (type
);
9627 /* PSTR strings are in generic space but located in flash:
9628 patch address space. */
9630 if (-1 == avr_progmem_p (decl
, attr
))
9631 as
= ADDR_SPACE_FLASH
;
9633 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
9635 tree io_low_attr
= lookup_attribute ("io_low", attr
);
9636 tree io_attr
= lookup_attribute ("io", attr
);
9639 && TREE_VALUE (io_low_attr
) && TREE_VALUE (TREE_VALUE (io_low_attr
)))
9640 addr_attr
= io_attr
;
9642 && TREE_VALUE (io_attr
) && TREE_VALUE (TREE_VALUE (io_attr
)))
9643 addr_attr
= io_attr
;
9645 addr_attr
= lookup_attribute ("address", attr
);
9647 || (io_attr
&& addr_attr
9648 && low_io_address_operand
9649 (GEN_INT (TREE_INT_CST_LOW
9650 (TREE_VALUE (TREE_VALUE (addr_attr
)))), QImode
)))
9651 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO_LOW
;
9652 if (io_attr
|| io_low_attr
)
9653 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_IO
;
9654 /* If we have an (io) address attribute specification, but the variable
9655 is external, treat the address as only a tentative definition
9656 to be used to determine if an io port is in the lower range, but
9657 don't use the exact value for constant propagation. */
9658 if (addr_attr
&& !DECL_EXTERNAL (decl
))
9659 SYMBOL_REF_FLAGS (sym
) |= SYMBOL_FLAG_ADDRESS
;
9664 /* Implement `TARGET_ASM_SELECT_SECTION' */
9667 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
9669 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
9671 if (decl
&& DECL_P (decl
)
9672 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
9674 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
9676 /* __progmem__ goes in generic space but shall be allocated to
9679 if (ADDR_SPACE_GENERIC_P (as
))
9680 as
= ADDR_SPACE_FLASH
;
9682 if (sect
->common
.flags
& SECTION_NAMED
)
9684 const char * name
= sect
->named
.name
;
9685 const char * old_prefix
= ".rodata";
9686 const char * new_prefix
= avr_addrspace
[as
].section_name
;
9688 if (STR_PREFIX_P (name
, old_prefix
))
9690 const char *sname
= ACONCAT ((new_prefix
,
9691 name
+ strlen (old_prefix
), NULL
));
9692 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
9696 if (!progmem_section
[as
])
9699 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
9700 avr_addrspace
[as
].section_name
);
9703 return progmem_section
[as
];
9709 /* Implement `TARGET_ASM_FILE_START'. */
9710 /* Outputs some text at the start of each assembler file. */
9713 avr_file_start (void)
9715 int sfr_offset
= avr_arch
->sfr_offset
;
9717 if (avr_arch
->asm_only
)
9718 error ("architecture %qs supported for assembler only", avr_mmcu
);
9720 default_file_start ();
9722 /* Print I/O addresses of some SFRs used with IN and OUT. */
9725 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
9727 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
9728 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
9730 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
9732 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
9734 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
9736 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
9737 if (AVR_XMEGA
|| AVR_TINY
)
9738 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
9739 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", AVR_TMP_REGNO
);
9740 fprintf (asm_out_file
, "__zero_reg__ = %d\n", AVR_ZERO_REGNO
);
9744 /* Implement `TARGET_ASM_FILE_END'. */
9745 /* Outputs to the stdio stream FILE some
9746 appropriate text to go at the end of an assembler file. */
9751 /* Output these only if there is anything in the
9752 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9753 input section(s) - some code size can be saved by not
9754 linking in the initialization code from libgcc if resp.
9755 sections are empty, see PR18145. */
9757 if (avr_need_copy_data_p
)
9758 fputs (".global __do_copy_data\n", asm_out_file
);
9760 if (avr_need_clear_bss_p
)
9761 fputs (".global __do_clear_bss\n", asm_out_file
);
9765 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
9766 /* Choose the order in which to allocate hard registers for
9767 pseudo-registers local to a basic block.
9769 Store the desired register order in the array `reg_alloc_order'.
9770 Element 0 should be the register to allocate first; element 1, the
9771 next register; and so on. */
9774 avr_adjust_reg_alloc_order (void)
9777 static const int order_0
[] =
9780 18, 19, 20, 21, 22, 23,
9783 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9787 static const int tiny_order_0
[] = {
9797 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9799 static const int order_1
[] =
9801 18, 19, 20, 21, 22, 23, 24, 25,
9804 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9808 static const int tiny_order_1
[] = {
9817 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9819 static const int order_2
[] =
9821 25, 24, 23, 22, 21, 20, 19, 18,
9824 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9829 /* Select specific register allocation order.
9830 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9831 so different allocation order should be used. */
9833 const int *order
= (TARGET_ORDER_1
? (AVR_TINY
? tiny_order_1
: order_1
)
9834 : TARGET_ORDER_2
? (AVR_TINY
? tiny_order_0
: order_2
)
9835 : (AVR_TINY
? tiny_order_0
: order_0
));
9837 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
9838 reg_alloc_order
[i
] = order
[i
];
9842 /* Implement `TARGET_REGISTER_MOVE_COST' */
9845 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
9846 reg_class_t from
, reg_class_t to
)
9848 return (from
== STACK_REG
? 6
9849 : to
== STACK_REG
? 12
9854 /* Implement `TARGET_MEMORY_MOVE_COST' */
9857 avr_memory_move_cost (machine_mode mode
,
9858 reg_class_t rclass ATTRIBUTE_UNUSED
,
9859 bool in ATTRIBUTE_UNUSED
)
9861 return (mode
== QImode
? 2
9862 : mode
== HImode
? 4
9863 : mode
== SImode
? 8
9864 : mode
== SFmode
? 8
9869 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9870 cost of an RTX operand given its context. X is the rtx of the
9871 operand, MODE is its mode, and OUTER is the rtx_code of this
9872 operand's parent operator. */
9875 avr_operand_rtx_cost (rtx x
, machine_mode mode
, enum rtx_code outer
,
9876 int opno
, bool speed
)
9878 enum rtx_code code
= GET_CODE (x
);
9890 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9897 avr_rtx_costs (x
, mode
, outer
, opno
, &total
, speed
);
9901 /* Worker function for AVR backend's rtx_cost function.
9902 X is rtx expression whose cost is to be calculated.
9903 Return true if the complete cost has been computed.
9904 Return false if subexpressions should be scanned.
9905 In either case, *TOTAL contains the cost result. */
9908 avr_rtx_costs_1 (rtx x
, machine_mode mode
, int outer_code ATTRIBUTE_UNUSED
,
9909 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
9911 enum rtx_code code
= GET_CODE (x
);
9922 /* Immediate constants are as cheap as registers. */
9927 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9935 *total
= COSTS_N_INSNS (1);
9941 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
9947 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9955 *total
= COSTS_N_INSNS (1);
9961 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9965 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9966 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9970 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
9971 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
9972 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
9977 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
9978 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
9979 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
9988 && MULT
== GET_CODE (XEXP (x
, 0))
9989 && register_operand (XEXP (x
, 1), QImode
))
9992 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9993 /* multiply-add with constant: will be split and load constant. */
9994 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9995 *total
= COSTS_N_INSNS (1) + *total
;
9998 *total
= COSTS_N_INSNS (1);
9999 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10000 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10005 && (MULT
== GET_CODE (XEXP (x
, 0))
10006 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
10007 && register_operand (XEXP (x
, 1), HImode
)
10008 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
10009 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
10012 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10013 /* multiply-add with constant: will be split and load constant. */
10014 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10015 *total
= COSTS_N_INSNS (1) + *total
;
10018 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10020 *total
= COSTS_N_INSNS (2);
10021 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10024 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10025 *total
= COSTS_N_INSNS (1);
10027 *total
= COSTS_N_INSNS (2);
10031 if (!CONST_INT_P (XEXP (x
, 1)))
10033 *total
= COSTS_N_INSNS (3);
10034 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10037 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10038 *total
= COSTS_N_INSNS (2);
10040 *total
= COSTS_N_INSNS (3);
10044 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10046 *total
= COSTS_N_INSNS (4);
10047 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10050 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
10051 *total
= COSTS_N_INSNS (1);
10053 *total
= COSTS_N_INSNS (4);
10059 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10065 && register_operand (XEXP (x
, 0), QImode
)
10066 && MULT
== GET_CODE (XEXP (x
, 1)))
10069 *total
= COSTS_N_INSNS (speed
? 4 : 3);
10070 /* multiply-sub with constant: will be split and load constant. */
10071 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10072 *total
= COSTS_N_INSNS (1) + *total
;
10077 && register_operand (XEXP (x
, 0), HImode
)
10078 && (MULT
== GET_CODE (XEXP (x
, 1))
10079 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
10080 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
10081 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
10084 *total
= COSTS_N_INSNS (speed
? 5 : 4);
10085 /* multiply-sub with constant: will be split and load constant. */
10086 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
10087 *total
= COSTS_N_INSNS (1) + *total
;
10093 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10094 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10095 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10096 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10100 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10101 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10102 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10110 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
10112 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10120 rtx op0
= XEXP (x
, 0);
10121 rtx op1
= XEXP (x
, 1);
10122 enum rtx_code code0
= GET_CODE (op0
);
10123 enum rtx_code code1
= GET_CODE (op1
);
10124 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
10125 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
10128 && (u8_operand (op1
, HImode
)
10129 || s8_operand (op1
, HImode
)))
10131 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10135 && register_operand (op1
, HImode
))
10137 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10140 else if (ex0
|| ex1
)
10142 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
10145 else if (register_operand (op0
, HImode
)
10146 && (u8_operand (op1
, HImode
)
10147 || s8_operand (op1
, HImode
)))
10149 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
10153 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
10156 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10163 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10173 /* Add some additional costs besides CALL like moves etc. */
10175 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10179 /* Just a rough estimate. Even with -O2 we don't want bulky
10180 code expanded inline. */
10182 *total
= COSTS_N_INSNS (25);
10188 *total
= COSTS_N_INSNS (300);
10190 /* Add some additional costs besides CALL like moves etc. */
10191 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
10199 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10200 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
10208 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
10210 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
10211 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10212 /* For div/mod with const-int divisor we have at least the cost of
10213 loading the divisor. */
10214 if (CONST_INT_P (XEXP (x
, 1)))
10215 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
10216 /* Add some overall penaly for clobbering and moving around registers */
10217 *total
+= COSTS_N_INSNS (2);
10224 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
10225 *total
= COSTS_N_INSNS (1);
10230 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
10231 *total
= COSTS_N_INSNS (3);
10236 if (CONST_INT_P (XEXP (x
, 1)))
10237 switch (INTVAL (XEXP (x
, 1)))
10241 *total
= COSTS_N_INSNS (5);
10244 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
10252 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10259 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10261 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10262 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10267 val
= INTVAL (XEXP (x
, 1));
10269 *total
= COSTS_N_INSNS (3);
10270 else if (val
>= 0 && val
<= 7)
10271 *total
= COSTS_N_INSNS (val
);
10273 *total
= COSTS_N_INSNS (1);
10280 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
10281 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
10282 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
10284 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
10289 if (const1_rtx
== (XEXP (x
, 1))
10290 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
10292 *total
= COSTS_N_INSNS (2);
10296 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10298 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10299 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10303 switch (INTVAL (XEXP (x
, 1)))
10310 *total
= COSTS_N_INSNS (2);
10313 *total
= COSTS_N_INSNS (3);
10319 *total
= COSTS_N_INSNS (4);
10324 *total
= COSTS_N_INSNS (5);
10327 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10330 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
10333 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
10336 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10337 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10343 if (!CONST_INT_P (XEXP (x
, 1)))
10345 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10348 switch (INTVAL (XEXP (x
, 1)))
10356 *total
= COSTS_N_INSNS (3);
10359 *total
= COSTS_N_INSNS (5);
10362 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10368 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10370 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10371 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10375 switch (INTVAL (XEXP (x
, 1)))
10381 *total
= COSTS_N_INSNS (3);
10386 *total
= COSTS_N_INSNS (4);
10389 *total
= COSTS_N_INSNS (6);
10392 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10395 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10396 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10404 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10411 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10413 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10414 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10419 val
= INTVAL (XEXP (x
, 1));
10421 *total
= COSTS_N_INSNS (4);
10423 *total
= COSTS_N_INSNS (2);
10424 else if (val
>= 0 && val
<= 7)
10425 *total
= COSTS_N_INSNS (val
);
10427 *total
= COSTS_N_INSNS (1);
10432 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10434 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10435 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10439 switch (INTVAL (XEXP (x
, 1)))
10445 *total
= COSTS_N_INSNS (2);
10448 *total
= COSTS_N_INSNS (3);
10454 *total
= COSTS_N_INSNS (4);
10458 *total
= COSTS_N_INSNS (5);
10461 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
10464 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
10468 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
10471 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10472 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10478 if (!CONST_INT_P (XEXP (x
, 1)))
10480 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10483 switch (INTVAL (XEXP (x
, 1)))
10489 *total
= COSTS_N_INSNS (3);
10493 *total
= COSTS_N_INSNS (5);
10496 *total
= COSTS_N_INSNS (4);
10499 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10505 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10507 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10508 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10512 switch (INTVAL (XEXP (x
, 1)))
10518 *total
= COSTS_N_INSNS (4);
10523 *total
= COSTS_N_INSNS (6);
10526 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10529 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
10532 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10533 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10541 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10548 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10550 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
10551 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10556 val
= INTVAL (XEXP (x
, 1));
10558 *total
= COSTS_N_INSNS (3);
10559 else if (val
>= 0 && val
<= 7)
10560 *total
= COSTS_N_INSNS (val
);
10562 *total
= COSTS_N_INSNS (1);
10567 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10569 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10570 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10574 switch (INTVAL (XEXP (x
, 1)))
10581 *total
= COSTS_N_INSNS (2);
10584 *total
= COSTS_N_INSNS (3);
10589 *total
= COSTS_N_INSNS (4);
10593 *total
= COSTS_N_INSNS (5);
10599 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
10602 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
10606 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
10609 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
10610 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10616 if (!CONST_INT_P (XEXP (x
, 1)))
10618 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
10621 switch (INTVAL (XEXP (x
, 1)))
10629 *total
= COSTS_N_INSNS (3);
10632 *total
= COSTS_N_INSNS (5);
10635 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
10641 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10643 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10644 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10648 switch (INTVAL (XEXP (x
, 1)))
10654 *total
= COSTS_N_INSNS (4);
10657 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
10662 *total
= COSTS_N_INSNS (4);
10665 *total
= COSTS_N_INSNS (6);
10668 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
10669 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
10677 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
10681 switch (GET_MODE (XEXP (x
, 0)))
10684 *total
= COSTS_N_INSNS (1);
10685 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10686 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), QImode
, code
,
10691 *total
= COSTS_N_INSNS (2);
10692 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10693 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), HImode
, code
,
10695 else if (INTVAL (XEXP (x
, 1)) != 0)
10696 *total
+= COSTS_N_INSNS (1);
10700 *total
= COSTS_N_INSNS (3);
10701 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
10702 *total
+= COSTS_N_INSNS (2);
10706 *total
= COSTS_N_INSNS (4);
10707 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
10708 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), SImode
, code
,
10710 else if (INTVAL (XEXP (x
, 1)) != 0)
10711 *total
+= COSTS_N_INSNS (3);
10717 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)),
10723 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
10724 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
10725 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
10727 if (QImode
== mode
|| HImode
== mode
)
10729 *total
= COSTS_N_INSNS (2);
10742 /* Implement `TARGET_RTX_COSTS'. */
10745 avr_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
10746 int opno
, int *total
, bool speed
)
10748 bool done
= avr_rtx_costs_1 (x
, mode
, outer_code
,
10749 opno
, total
, speed
);
10751 if (avr_log
.rtx_costs
)
10753 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10754 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
10761 /* Implement `TARGET_ADDRESS_COST'. */
10764 avr_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
10765 addr_space_t as ATTRIBUTE_UNUSED
,
10766 bool speed ATTRIBUTE_UNUSED
)
10770 if (GET_CODE (x
) == PLUS
10771 && CONST_INT_P (XEXP (x
, 1))
10772 && (REG_P (XEXP (x
, 0))
10773 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
10775 if (INTVAL (XEXP (x
, 1)) >= 61)
10778 else if (CONSTANT_ADDRESS_P (x
))
10781 && io_address_operand (x
, QImode
))
10785 if (avr_log
.address_cost
)
10786 avr_edump ("\n%?: %d = %r\n", cost
, x
);
10791 /* Test for extra memory constraint 'Q'.
10792 It's a memory address based on Y or Z pointer with valid displacement. */
10795 extra_constraint_Q (rtx x
)
10799 if (GET_CODE (XEXP (x
,0)) == PLUS
10800 && REG_P (XEXP (XEXP (x
,0), 0))
10801 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
10802 && (INTVAL (XEXP (XEXP (x
,0), 1))
10803 <= MAX_LD_OFFSET (GET_MODE (x
))))
10805 rtx xx
= XEXP (XEXP (x
,0), 0);
10806 int regno
= REGNO (xx
);
10808 ok
= (/* allocate pseudos */
10809 regno
>= FIRST_PSEUDO_REGISTER
10810 /* strictly check */
10811 || regno
== REG_Z
|| regno
== REG_Y
10812 /* XXX frame & arg pointer checks */
10813 || xx
== frame_pointer_rtx
10814 || xx
== arg_pointer_rtx
);
10816 if (avr_log
.constraints
)
10817 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10818 ok
, reload_completed
, reload_in_progress
, x
);
10824 /* Convert condition code CONDITION to the valid AVR condition code. */
10827 avr_normalize_condition (RTX_CODE condition
)
10840 gcc_unreachable ();
10844 /* Helper function for `avr_reorg'. */
10847 avr_compare_pattern (rtx_insn
*insn
)
10849 rtx pattern
= single_set (insn
);
10852 && NONJUMP_INSN_P (insn
)
10853 && SET_DEST (pattern
) == cc0_rtx
10854 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
10856 machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
10857 machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
10859 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10860 They must not be swapped, thus skip them. */
10862 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
10863 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
10870 /* Helper function for `avr_reorg'. */
10872 /* Expansion of switch/case decision trees leads to code like
10874 cc0 = compare (Reg, Num)
10878 cc0 = compare (Reg, Num)
10882 The second comparison is superfluous and can be deleted.
10883 The second jump condition can be transformed from a
10884 "difficult" one to a "simple" one because "cc0 > 0" and
10885 "cc0 >= 0" will have the same effect here.
10887 This function relies on the way switch/case is being expaned
10888 as binary decision tree. For example code see PR 49903.
10890 Return TRUE if optimization performed.
10891 Return FALSE if nothing changed.
10893 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10895 We don't want to do this in text peephole because it is
10896 tedious to work out jump offsets there and the second comparison
10897 might have been transormed by `avr_reorg'.
10899 RTL peephole won't do because peephole2 does not scan across
10903 avr_reorg_remove_redundant_compare (rtx_insn
*insn1
)
10905 rtx comp1
, ifelse1
, xcond1
;
10907 rtx comp2
, ifelse2
, xcond2
;
10908 rtx_insn
*branch2
, *insn2
;
10909 enum rtx_code code
;
10913 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10915 branch1
= next_nonnote_nondebug_insn (insn1
);
10916 if (!branch1
|| !JUMP_P (branch1
))
10919 insn2
= next_nonnote_nondebug_insn (branch1
);
10920 if (!insn2
|| !avr_compare_pattern (insn2
))
10923 branch2
= next_nonnote_nondebug_insn (insn2
);
10924 if (!branch2
|| !JUMP_P (branch2
))
10927 comp1
= avr_compare_pattern (insn1
);
10928 comp2
= avr_compare_pattern (insn2
);
10929 xcond1
= single_set (branch1
);
10930 xcond2
= single_set (branch2
);
10932 if (!comp1
|| !comp2
10933 || !rtx_equal_p (comp1
, comp2
)
10934 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
10935 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
10936 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
10937 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
10942 comp1
= SET_SRC (comp1
);
10943 ifelse1
= SET_SRC (xcond1
);
10944 ifelse2
= SET_SRC (xcond2
);
10946 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10948 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
10949 || !REG_P (XEXP (comp1
, 0))
10950 || !CONST_INT_P (XEXP (comp1
, 1))
10951 || XEXP (ifelse1
, 2) != pc_rtx
10952 || XEXP (ifelse2
, 2) != pc_rtx
10953 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
10954 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
10955 || !COMPARISON_P (XEXP (ifelse2
, 0))
10956 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
10957 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
10958 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
10959 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
10964 /* We filtered the insn sequence to look like
10970 (if_then_else (eq (cc0)
10979 (if_then_else (CODE (cc0)
10985 code
= GET_CODE (XEXP (ifelse2
, 0));
10987 /* Map GT/GTU to GE/GEU which is easier for AVR.
10988 The first two instructions compare/branch on EQ
10989 so we may replace the difficult
10991 if (x == VAL) goto L1;
10992 if (x > VAL) goto L2;
10996 if (x == VAL) goto L1;
10997 if (x >= VAL) goto L2;
10999 Similarly, replace LE/LEU by LT/LTU. */
11010 code
= avr_normalize_condition (code
);
11017 /* Wrap the branches into UNSPECs so they won't be changed or
11018 optimized in the remainder. */
11020 target
= XEXP (XEXP (ifelse1
, 1), 0);
11021 cond
= XEXP (ifelse1
, 0);
11022 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
11024 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
11026 target
= XEXP (XEXP (ifelse2
, 1), 0);
11027 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
11028 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
11030 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
11032 /* The comparisons in insn1 and insn2 are exactly the same;
11033 insn2 is superfluous so delete it. */
11035 delete_insn (insn2
);
11036 delete_insn (branch1
);
11037 delete_insn (branch2
);
11043 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11044 /* Optimize conditional jumps. */
11049 rtx_insn
*insn
= get_insns();
11051 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
11053 rtx pattern
= avr_compare_pattern (insn
);
11059 && avr_reorg_remove_redundant_compare (insn
))
11064 if (compare_diff_p (insn
))
11066 /* Now we work under compare insn with difficult branch. */
11068 rtx_insn
*next
= next_real_insn (insn
);
11069 rtx pat
= PATTERN (next
);
11071 pattern
= SET_SRC (pattern
);
11073 if (true_regnum (XEXP (pattern
, 0)) >= 0
11074 && true_regnum (XEXP (pattern
, 1)) >= 0)
11076 rtx x
= XEXP (pattern
, 0);
11077 rtx src
= SET_SRC (pat
);
11078 rtx t
= XEXP (src
,0);
11079 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11080 XEXP (pattern
, 0) = XEXP (pattern
, 1);
11081 XEXP (pattern
, 1) = x
;
11082 INSN_CODE (next
) = -1;
11084 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11085 && XEXP (pattern
, 1) == const0_rtx
)
11087 /* This is a tst insn, we can reverse it. */
11088 rtx src
= SET_SRC (pat
);
11089 rtx t
= XEXP (src
,0);
11091 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
11092 XEXP (pattern
, 1) = XEXP (pattern
, 0);
11093 XEXP (pattern
, 0) = const0_rtx
;
11094 INSN_CODE (next
) = -1;
11095 INSN_CODE (insn
) = -1;
11097 else if (true_regnum (XEXP (pattern
, 0)) >= 0
11098 && CONST_INT_P (XEXP (pattern
, 1)))
11100 rtx x
= XEXP (pattern
, 1);
11101 rtx src
= SET_SRC (pat
);
11102 rtx t
= XEXP (src
,0);
11103 machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
11105 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
11107 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
11108 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
11109 INSN_CODE (next
) = -1;
11110 INSN_CODE (insn
) = -1;
11117 /* Returns register number for function return value.*/
11119 static inline unsigned int
11120 avr_ret_register (void)
11126 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
11129 avr_function_value_regno_p (const unsigned int regno
)
11131 return (regno
== avr_ret_register ());
11135 /* Implement `TARGET_LIBCALL_VALUE'. */
11136 /* Create an RTX representing the place where a
11137 library function returns a value of mode MODE. */
11140 avr_libcall_value (machine_mode mode
,
11141 const_rtx func ATTRIBUTE_UNUSED
)
11143 int offs
= GET_MODE_SIZE (mode
);
11146 offs
= (offs
+ 1) & ~1;
11148 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
11152 /* Implement `TARGET_FUNCTION_VALUE'. */
11153 /* Create an RTX representing the place where a
11154 function returns a value of data type VALTYPE. */
11157 avr_function_value (const_tree type
,
11158 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
11159 bool outgoing ATTRIBUTE_UNUSED
)
11163 if (TYPE_MODE (type
) != BLKmode
)
11164 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
11166 offs
= int_size_in_bytes (type
);
11169 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
11170 offs
= GET_MODE_SIZE (SImode
);
11171 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
11172 offs
= GET_MODE_SIZE (DImode
);
11174 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
11178 test_hard_reg_class (enum reg_class rclass
, rtx x
)
11180 int regno
= true_regnum (x
);
11184 if (TEST_HARD_REG_CLASS (rclass
, regno
))
11191 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11192 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11195 avr_2word_insn_p (rtx_insn
*insn
)
11197 if (TARGET_SKIP_BUG
11199 || 2 != get_attr_length (insn
))
11204 switch (INSN_CODE (insn
))
11209 case CODE_FOR_movqi_insn
:
11210 case CODE_FOR_movuqq_insn
:
11211 case CODE_FOR_movqq_insn
:
11213 rtx set
= single_set (insn
);
11214 rtx src
= SET_SRC (set
);
11215 rtx dest
= SET_DEST (set
);
11217 /* Factor out LDS and STS from movqi_insn. */
11220 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
11222 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
11224 else if (REG_P (dest
)
11227 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
11233 case CODE_FOR_call_insn
:
11234 case CODE_FOR_call_value_insn
:
11241 jump_over_one_insn_p (rtx_insn
*insn
, rtx dest
)
11243 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
11246 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
11247 int dest_addr
= INSN_ADDRESSES (uid
);
11248 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
11250 return (jump_offset
== 1
11251 || (jump_offset
== 2
11252 && avr_2word_insn_p (next_active_insn (insn
))));
11256 /* Worker function for `HARD_REGNO_MODE_OK'. */
11257 /* Returns 1 if a value of mode MODE can be stored starting with hard
11258 register number REGNO. On the enhanced core, anything larger than
11259 1 byte must start in even numbered register for "movw" to work
11260 (this way we don't have to check for odd registers everywhere). */
11263 avr_hard_regno_mode_ok (int regno
, machine_mode mode
)
11265 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11266 Disallowing QI et al. in these regs might lead to code like
11267 (set (subreg:QI (reg:HI 28) n) ...)
11268 which will result in wrong code because reload does not
11269 handle SUBREGs of hard regsisters like this.
11270 This could be fixed in reload. However, it appears
11271 that fixing reload is not wanted by reload people. */
11273 /* Any GENERAL_REGS register can hold 8-bit values. */
11275 if (GET_MODE_SIZE (mode
) == 1)
11278 /* FIXME: Ideally, the following test is not needed.
11279 However, it turned out that it can reduce the number
11280 of spill fails. AVR and it's poor endowment with
11281 address registers is extreme stress test for reload. */
11283 if (GET_MODE_SIZE (mode
) >= 4
11287 /* All modes larger than 8 bits should start in an even register. */
11289 return !(regno
& 1);
11293 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11296 avr_hard_regno_call_part_clobbered (unsigned regno
, machine_mode mode
)
11298 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11299 represent valid hard registers like, e.g. HI:29. Returning TRUE
11300 for such registers can lead to performance degradation as mentioned
11301 in PR53595. Thus, report invalid hard registers as FALSE. */
11303 if (!avr_hard_regno_mode_ok (regno
, mode
))
11306 /* Return true if any of the following boundaries is crossed:
11307 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
11309 return ((regno
<= LAST_CALLEE_SAVED_REG
&&
11310 regno
+ GET_MODE_SIZE (mode
) > (LAST_CALLEE_SAVED_REG
+ 1))
11311 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
11312 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
11316 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
11319 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED
,
11320 addr_space_t as
, RTX_CODE outer_code
,
11321 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11323 if (!ADDR_SPACE_GENERIC_P (as
))
11325 return POINTER_Z_REGS
;
11329 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
11331 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
11335 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11338 avr_regno_mode_code_ok_for_base_p (int regno
,
11339 machine_mode mode ATTRIBUTE_UNUSED
,
11340 addr_space_t as ATTRIBUTE_UNUSED
,
11341 RTX_CODE outer_code
,
11342 RTX_CODE index_code ATTRIBUTE_UNUSED
)
11346 if (!ADDR_SPACE_GENERIC_P (as
))
11348 if (regno
< FIRST_PSEUDO_REGISTER
11356 regno
= reg_renumber
[regno
];
11358 if (regno
== REG_Z
)
11367 if (regno
< FIRST_PSEUDO_REGISTER
11371 || regno
== ARG_POINTER_REGNUM
))
11375 else if (reg_renumber
)
11377 regno
= reg_renumber
[regno
];
11382 || regno
== ARG_POINTER_REGNUM
)
11389 && PLUS
== outer_code
11399 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
11400 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11401 CLOBBER_REG is a QI clobber register or NULL_RTX.
11402 LEN == NULL: output instructions.
11403 LEN != NULL: set *LEN to the length of the instruction sequence
11404 (in words) printed with LEN = NULL.
11405 If CLEAR_P is true, OP[0] had been cleard to Zero already.
11406 If CLEAR_P is false, nothing is known about OP[0].
11408 The effect on cc0 is as follows:
11410 Load 0 to any register except ZERO_REG : NONE
11411 Load ld register with any value : NONE
11412 Anything else: : CLOBBER */
11415 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
11419 rtx xval
, xdest
[4];
11421 int clobber_val
= 1234;
11422 bool cooked_clobber_p
= false;
11423 bool set_p
= false;
11424 machine_mode mode
= GET_MODE (dest
);
11425 int n
, n_bytes
= GET_MODE_SIZE (mode
);
11427 gcc_assert (REG_P (dest
)
11428 && CONSTANT_P (src
));
11433 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11434 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
11436 if (REGNO (dest
) < 16
11437 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
11439 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
11442 /* We might need a clobber reg but don't have one. Look at the value to
11443 be loaded more closely. A clobber is only needed if it is a symbol
11444 or contains a byte that is neither 0, -1 or a power of 2. */
11446 if (NULL_RTX
== clobber_reg
11447 && !test_hard_reg_class (LD_REGS
, dest
)
11448 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
11449 || !avr_popcount_each_byte (src
, n_bytes
,
11450 (1 << 0) | (1 << 1) | (1 << 8))))
11452 /* We have no clobber register but need one. Cook one up.
11453 That's cheaper than loading from constant pool. */
11455 cooked_clobber_p
= true;
11456 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
11457 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
11460 /* Now start filling DEST from LSB to MSB. */
11462 for (n
= 0; n
< n_bytes
; n
++)
11465 bool done_byte
= false;
11469 /* Crop the n-th destination byte. */
11471 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
11472 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
11474 if (!CONST_INT_P (src
)
11475 && !CONST_FIXED_P (src
)
11476 && !CONST_DOUBLE_P (src
))
11478 static const char* const asm_code
[][2] =
11480 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
11481 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
11482 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
11483 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
11488 xop
[2] = clobber_reg
;
11490 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
11495 /* Crop the n-th source byte. */
11497 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
11498 ival
[n
] = INTVAL (xval
);
11500 /* Look if we can reuse the low word by means of MOVW. */
11506 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
11507 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
11509 if (INTVAL (lo16
) == INTVAL (hi16
))
11511 if (0 != INTVAL (lo16
)
11514 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
11521 /* Don't use CLR so that cc0 is set as expected. */
11526 avr_asm_len (ldreg_p
? "ldi %0,0"
11527 : AVR_ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
11528 : "mov %0,__zero_reg__",
11529 &xdest
[n
], len
, 1);
11533 if (clobber_val
== ival
[n
]
11534 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
11539 /* LD_REGS can use LDI to move a constant value */
11545 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
11549 /* Try to reuse value already loaded in some lower byte. */
11551 for (j
= 0; j
< n
; j
++)
11552 if (ival
[j
] == ival
[n
])
11557 avr_asm_len ("mov %0,%1", xop
, len
, 1);
11565 /* Need no clobber reg for -1: Use CLR/DEC */
11570 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
11572 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
11575 else if (1 == ival
[n
])
11578 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
11580 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
11584 /* Use T flag or INC to manage powers of 2 if we have
11587 if (NULL_RTX
== clobber_reg
11588 && single_one_operand (xval
, QImode
))
11591 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
11593 gcc_assert (constm1_rtx
!= xop
[1]);
11598 avr_asm_len ("set", xop
, len
, 1);
11602 avr_asm_len ("clr %0", xop
, len
, 1);
11604 avr_asm_len ("bld %0,%1", xop
, len
, 1);
11608 /* We actually need the LD_REGS clobber reg. */
11610 gcc_assert (NULL_RTX
!= clobber_reg
);
11614 xop
[2] = clobber_reg
;
11615 clobber_val
= ival
[n
];
11617 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11618 "mov %0,%2", xop
, len
, 2);
11621 /* If we cooked up a clobber reg above, restore it. */
11623 if (cooked_clobber_p
)
11625 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
11630 /* Reload the constant OP[1] into the HI register OP[0].
11631 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11632 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11633 need a clobber reg or have to cook one up.
11635 PLEN == NULL: Output instructions.
11636 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
11637 by the insns printed.
11642 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
11644 output_reload_in_const (op
, clobber_reg
, plen
, false);
11649 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11650 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11651 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11652 need a clobber reg or have to cook one up.
11654 LEN == NULL: Output instructions.
11656 LEN != NULL: Output nothing. Set *LEN to number of words occupied
11657 by the insns printed.
11662 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
11665 && !test_hard_reg_class (LD_REGS
, op
[0])
11666 && (CONST_INT_P (op
[1])
11667 || CONST_FIXED_P (op
[1])
11668 || CONST_DOUBLE_P (op
[1])))
11670 int len_clr
, len_noclr
;
11672 /* In some cases it is better to clear the destination beforehand, e.g.
11674 CLR R2 CLR R3 MOVW R4,R2 INC R2
11678 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
11680 We find it too tedious to work that out in the print function.
11681 Instead, we call the print function twice to get the lengths of
11682 both methods and use the shortest one. */
11684 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
11685 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
11687 if (len_noclr
- len_clr
== 4)
11689 /* Default needs 4 CLR instructions: clear register beforehand. */
11691 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11692 "mov %B0,__zero_reg__" CR_TAB
11693 "movw %C0,%A0", &op
[0], len
, 3);
11695 output_reload_in_const (op
, clobber_reg
, len
, true);
11704 /* Default: destination not pre-cleared. */
11706 output_reload_in_const (op
, clobber_reg
, len
, false);
11711 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
11713 output_reload_in_const (op
, clobber_reg
, len
, false);
11718 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
11721 avr_output_addr_vec_elt (FILE *stream
, int value
)
11723 if (AVR_HAVE_JMP_CALL
)
11724 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
11726 fprintf (stream
, "\trjmp .L%d\n", value
);
11730 avr_conditional_register_usage(void)
11736 const int tiny_reg_alloc_order
[] = {
11745 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11748 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11749 - R0-R15 are not available in Tiny Core devices
11750 - R16 and R17 are fixed registers. */
11752 for (i
= 0; i
<= 17; i
++)
11755 call_used_regs
[i
] = 1;
11758 /* Set R18 to R21 as callee saved registers
11759 - R18, R19, R20 and R21 are the callee saved registers in
11760 Tiny Core devices */
11762 for (i
= 18; i
<= LAST_CALLEE_SAVED_REG
; i
++)
11764 call_used_regs
[i
] = 0;
11767 /* Update register allocation order for Tiny Core devices */
11769 for (i
= 0; i
< ARRAY_SIZE (tiny_reg_alloc_order
); i
++)
11771 reg_alloc_order
[i
] = tiny_reg_alloc_order
[i
];
11774 CLEAR_HARD_REG_SET (reg_class_contents
[(int) ADDW_REGS
]);
11775 CLEAR_HARD_REG_SET (reg_class_contents
[(int) NO_LD_REGS
]);
11779 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
11780 /* Returns true if SCRATCH are safe to be allocated as a scratch
11781 registers (for a define_peephole2) in the current function. */
11784 avr_hard_regno_scratch_ok (unsigned int regno
)
11786 /* Interrupt functions can only use registers that have already been saved
11787 by the prologue, even if they would normally be call-clobbered. */
11789 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
11790 && !df_regs_ever_live_p (regno
))
11793 /* Don't allow hard registers that might be part of the frame pointer.
11794 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11795 and don't care for a frame pointer that spans more than one register. */
11797 if ((!reload_completed
|| frame_pointer_needed
)
11798 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
11807 /* Worker function for `HARD_REGNO_RENAME_OK'. */
11808 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
11811 avr_hard_regno_rename_ok (unsigned int old_reg
,
11812 unsigned int new_reg
)
11814 /* Interrupt functions can only use registers that have already been
11815 saved by the prologue, even if they would normally be
11818 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
11819 && !df_regs_ever_live_p (new_reg
))
11822 /* Don't allow hard registers that might be part of the frame pointer.
11823 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11824 and don't care for a frame pointer that spans more than one register. */
11826 if ((!reload_completed
|| frame_pointer_needed
)
11827 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
11828 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
11836 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11837 or memory location in the I/O space (QImode only).
11839 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11840 Operand 1: register operand to test, or CONST_INT memory address.
11841 Operand 2: bit number.
11842 Operand 3: label to jump to if the test is true. */
11845 avr_out_sbxx_branch (rtx_insn
*insn
, rtx operands
[])
11847 enum rtx_code comp
= GET_CODE (operands
[0]);
11848 bool long_jump
= get_attr_length (insn
) >= 4;
11849 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
11853 else if (comp
== LT
)
11857 comp
= reverse_condition (comp
);
11859 switch (GET_CODE (operands
[1]))
11868 if (low_io_address_operand (operands
[1], QImode
))
11871 output_asm_insn ("sbis %i1,%2", operands
);
11873 output_asm_insn ("sbic %i1,%2", operands
);
11877 gcc_assert (io_address_operand (operands
[1], QImode
));
11878 output_asm_insn ("in __tmp_reg__,%i1", operands
);
11880 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
11882 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
11885 break; /* CONST_INT */
11890 output_asm_insn ("sbrs %T1%T2", operands
);
11892 output_asm_insn ("sbrc %T1%T2", operands
);
11898 return ("rjmp .+4" CR_TAB
11907 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
11910 avr_asm_out_ctor (rtx symbol
, int priority
)
11912 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
11913 default_ctor_section_asm_out_constructor (symbol
, priority
);
11917 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11920 avr_asm_out_dtor (rtx symbol
, int priority
)
11922 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
11923 default_dtor_section_asm_out_destructor (symbol
, priority
);
11927 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11930 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
11932 HOST_WIDE_INT size
= int_size_in_bytes (type
);
11933 HOST_WIDE_INT ret_size_limit
= AVR_TINY
? 4 : 8;
11935 /* In avr, there are 8 return registers. But, for Tiny Core
11936 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11937 Return true if size is unknown or greater than the limit. */
11939 if (size
== -1 || size
> ret_size_limit
)
11950 /* Implement `CASE_VALUES_THRESHOLD'. */
11951 /* Supply the default for --param case-values-threshold=0 */
11953 static unsigned int
11954 avr_case_values_threshold (void)
11956 /* The exact break-even point between a jump table and an if-else tree
11957 depends on several factors not available here like, e.g. if 8-bit
11958 comparisons can be used in the if-else tree or not, on the
11959 range of the case values, if the case value can be reused, on the
11960 register allocation, etc. '7' appears to be a good choice. */
11966 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11968 static machine_mode
11969 avr_addr_space_address_mode (addr_space_t as
)
11971 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
11975 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
11977 static machine_mode
11978 avr_addr_space_pointer_mode (addr_space_t as
)
11980 return avr_addr_space_address_mode (as
);
11984 /* Helper for following function. */
11987 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
11989 gcc_assert (REG_P (reg
));
11993 return REGNO (reg
) == REG_Z
;
11996 /* Avoid combine to propagate hard regs. */
11998 if (can_create_pseudo_p()
11999 && REGNO (reg
) < REG_Z
)
12008 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12011 avr_addr_space_legitimate_address_p (machine_mode mode
, rtx x
,
12012 bool strict
, addr_space_t as
)
12021 case ADDR_SPACE_GENERIC
:
12022 return avr_legitimate_address_p (mode
, x
, strict
);
12024 case ADDR_SPACE_FLASH
:
12025 case ADDR_SPACE_FLASH1
:
12026 case ADDR_SPACE_FLASH2
:
12027 case ADDR_SPACE_FLASH3
:
12028 case ADDR_SPACE_FLASH4
:
12029 case ADDR_SPACE_FLASH5
:
12031 switch (GET_CODE (x
))
12034 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
12038 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
12047 case ADDR_SPACE_MEMX
:
12050 && can_create_pseudo_p());
12052 if (LO_SUM
== GET_CODE (x
))
12054 rtx hi
= XEXP (x
, 0);
12055 rtx lo
= XEXP (x
, 1);
12058 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
12060 && REGNO (lo
) == REG_Z
);
12066 if (avr_log
.legitimate_address_p
)
12068 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12069 "reload_completed=%d reload_in_progress=%d %s:",
12070 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
12071 reg_renumber
? "(reg_renumber)" : "");
12073 if (GET_CODE (x
) == PLUS
12074 && REG_P (XEXP (x
, 0))
12075 && CONST_INT_P (XEXP (x
, 1))
12076 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
12079 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
12080 true_regnum (XEXP (x
, 0)));
12083 avr_edump ("\n%r\n", x
);
12090 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12093 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
12094 machine_mode mode
, addr_space_t as
)
12096 if (ADDR_SPACE_GENERIC_P (as
))
12097 return avr_legitimize_address (x
, old_x
, mode
);
12099 if (avr_log
.legitimize_address
)
12101 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
12108 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12111 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
12113 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
12114 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
12116 if (avr_log
.progmem
)
12117 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12118 src
, type_from
, type_to
);
12120 /* Up-casting from 16-bit to 24-bit pointer. */
12122 if (as_from
!= ADDR_SPACE_MEMX
12123 && as_to
== ADDR_SPACE_MEMX
)
12127 rtx reg
= gen_reg_rtx (PSImode
);
12129 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
12130 sym
= XEXP (sym
, 0);
12132 /* Look at symbol flags: avr_encode_section_info set the flags
12133 also if attribute progmem was seen so that we get the right
12134 promotion for, e.g. PSTR-like strings that reside in generic space
12135 but are located in flash. In that case we patch the incoming
12138 if (SYMBOL_REF
== GET_CODE (sym
)
12139 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
12141 as_from
= ADDR_SPACE_FLASH
;
12144 /* Linearize memory: RAM has bit 23 set. */
12146 msb
= ADDR_SPACE_GENERIC_P (as_from
)
12148 : avr_addrspace
[as_from
].segment
;
12150 src
= force_reg (Pmode
, src
);
12152 emit_insn (msb
== 0
12153 ? gen_zero_extendhipsi2 (reg
, src
)
12154 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
12159 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
12161 if (as_from
== ADDR_SPACE_MEMX
12162 && as_to
!= ADDR_SPACE_MEMX
)
12164 rtx new_src
= gen_reg_rtx (Pmode
);
12166 src
= force_reg (PSImode
, src
);
12168 emit_move_insn (new_src
,
12169 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
12177 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12180 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
12181 addr_space_t superset ATTRIBUTE_UNUSED
)
12183 /* Allow any kind of pointer mess. */
12189 /* Implement `TARGET_CONVERT_TO_TYPE'. */
12192 avr_convert_to_type (tree type
, tree expr
)
12194 /* Print a diagnose for pointer conversion that changes the address
12195 space of the pointer target to a non-enclosing address space,
12196 provided -Waddr-space-convert is on.
12198 FIXME: Filter out cases where the target object is known to
12199 be located in the right memory, like in
12201 (const __flash*) PSTR ("text")
12203 Also try to distinguish between explicit casts requested by
12204 the user and implicit casts like
12206 void f (const __flash char*);
12208 void g (const char *p)
12210 f ((const __flash*) p);
12213 under the assumption that an explicit casts means that the user
12214 knows what he is doing, e.g. interface with PSTR or old style
12215 code with progmem and pgm_read_xxx.
12218 if (avr_warn_addr_space_convert
12219 && expr
!= error_mark_node
12220 && POINTER_TYPE_P (type
)
12221 && POINTER_TYPE_P (TREE_TYPE (expr
)))
12223 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
12224 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
12226 if (avr_log
.progmem
)
12227 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
12229 if (as_new
!= ADDR_SPACE_MEMX
12230 && as_new
!= as_old
)
12232 location_t loc
= EXPR_LOCATION (expr
);
12233 const char *name_old
= avr_addrspace
[as_old
].name
;
12234 const char *name_new
= avr_addrspace
[as_new
].name
;
12236 warning (OPT_Waddr_space_convert
,
12237 "conversion from address space %qs to address space %qs",
12238 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
12239 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
12241 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
12249 /* PR63633: The middle-end might come up with hard regs as input operands.
12251 RMASK is a bit mask representing a subset of hard registers R0...R31:
12252 Rn is an element of that set iff bit n of RMASK is set.
12253 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12254 OP[n] has to be fixed; otherwise OP[n] is left alone.
12256 For each element of OPMASK which is a hard register overlapping RMASK,
12257 replace OP[n] with a newly created pseudo register
12259 HREG == 0: Also emit a move insn that copies the contents of that
12260 hard register into the new pseudo.
12262 HREG != 0: Also set HREG[n] to the hard register. */
12265 avr_fix_operands (rtx
*op
, rtx
*hreg
, unsigned opmask
, unsigned rmask
)
12267 for (; opmask
; opmask
>>= 1, op
++)
12276 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
12277 // This hard-reg overlaps other prohibited hard regs?
12278 && (rmask
& regmask (GET_MODE (reg
), REGNO (reg
))))
12280 *op
= gen_reg_rtx (GET_MODE (reg
));
12282 emit_move_insn (*op
, reg
);
12294 avr_fix_inputs (rtx
*op
, unsigned opmask
, unsigned rmask
)
12296 avr_fix_operands (op
, NULL
, opmask
, rmask
);
12300 /* Helper for the function below: If bit n of MASK is set and
12301 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12302 Otherwise do nothing for that n. Return TRUE. */
12305 avr_move_fixed_operands (rtx
*op
, rtx
*hreg
, unsigned mask
)
12307 for (; mask
; mask
>>= 1, op
++, hreg
++)
12310 emit_move_insn (*hreg
, *op
);
12316 /* PR63633: The middle-end might come up with hard regs as output operands.
12318 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12319 RMASK is a bit mask representing a subset of hard registers R0...R31:
12320 Rn is an element of that set iff bit n of RMASK is set.
12321 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12322 OP[n] has to be fixed; otherwise OP[n] is left alone.
12324 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12325 which are hard registers overlapping RMASK replaced by newly created
12326 pseudo registers. After the sequence has been emitted, emit insns that
12327 move the contents of respective pseudos to their hard regs. */
12330 avr_emit3_fix_outputs (rtx (*gen
)(rtx
,rtx
,rtx
), rtx
*op
,
12331 unsigned opmask
, unsigned rmask
)
12336 /* It is letigimate for GEN to call this function, and in order not to
12337 get self-recursive we use the following static kludge. This is the
12338 only way not to duplicate all expanders and to avoid ugly and
12339 hard-to-maintain C-code instead of the much more appreciated RTL
12340 representation as supplied by define_expand. */
12341 static bool lock
= false;
12343 gcc_assert (opmask
< (1u << n
));
12348 avr_fix_operands (op
, hreg
, opmask
, rmask
);
12351 emit_insn (gen (op
[0], op
[1], op
[2]));
12354 return avr_move_fixed_operands (op
, hreg
, opmask
);
12358 /* Worker function for movmemhi expander.
12359 XOP[0] Destination as MEM:BLK
12361 XOP[2] # Bytes to copy
12363 Return TRUE if the expansion is accomplished.
12364 Return FALSE if the operand compination is not supported. */
12367 avr_emit_movmemhi (rtx
*xop
)
12369 HOST_WIDE_INT count
;
12370 machine_mode loop_mode
;
12371 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
12372 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
12373 rtx a_hi8
= NULL_RTX
;
12375 if (avr_mem_flash_p (xop
[0]))
12378 if (!CONST_INT_P (xop
[2]))
12381 count
= INTVAL (xop
[2]);
12385 a_src
= XEXP (xop
[1], 0);
12386 a_dest
= XEXP (xop
[0], 0);
12388 if (PSImode
== GET_MODE (a_src
))
12390 gcc_assert (as
== ADDR_SPACE_MEMX
);
12392 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
12393 loop_reg
= gen_rtx_REG (loop_mode
, 24);
12394 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
12396 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
12397 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
12401 int segment
= avr_addrspace
[as
].segment
;
12404 && avr_n_flash
> 1)
12406 a_hi8
= GEN_INT (segment
);
12407 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
12409 else if (!ADDR_SPACE_GENERIC_P (as
))
12411 as
= ADDR_SPACE_FLASH
;
12416 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
12417 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
12420 xas
= GEN_INT (as
);
12422 /* FIXME: Register allocator might come up with spill fails if it is left
12423 on its own. Thus, we allocate the pointer registers by hand:
12425 X = destination address */
12427 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
12428 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
12430 /* FIXME: Register allocator does a bad job and might spill address
12431 register(s) inside the loop leading to additional move instruction
12432 to/from stack which could clobber tmp_reg. Thus, do *not* emit
12433 load and store as separate insns. Instead, we perform the copy
12434 by means of one monolithic insn. */
12436 gcc_assert (TMP_REGNO
== LPM_REGNO
);
12438 if (as
!= ADDR_SPACE_MEMX
)
12440 /* Load instruction ([E]LPM or LD) is known at compile time:
12441 Do the copy-loop inline. */
12443 rtx (*fun
) (rtx
, rtx
, rtx
)
12444 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
12446 insn
= fun (xas
, loop_reg
, loop_reg
);
12450 rtx (*fun
) (rtx
, rtx
)
12451 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
12453 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
12455 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
12458 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
12465 /* Print assembler for movmem_qi, movmem_hi insns...
12467 $1, $2 : Loop register
12469 X : Destination address
12473 avr_out_movmem (rtx_insn
*insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
12475 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
12476 machine_mode loop_mode
= GET_MODE (op
[1]);
12477 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
12485 xop
[2] = tmp_reg_rtx
;
12489 avr_asm_len ("0:", xop
, plen
, 0);
12491 /* Load with post-increment */
12498 case ADDR_SPACE_GENERIC
:
12500 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
12503 case ADDR_SPACE_FLASH
:
12506 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
12508 avr_asm_len ("lpm" CR_TAB
12509 "adiw r30,1", xop
, plen
, 2);
12512 case ADDR_SPACE_FLASH1
:
12513 case ADDR_SPACE_FLASH2
:
12514 case ADDR_SPACE_FLASH3
:
12515 case ADDR_SPACE_FLASH4
:
12516 case ADDR_SPACE_FLASH5
:
12518 if (AVR_HAVE_ELPMX
)
12519 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
12521 avr_asm_len ("elpm" CR_TAB
12522 "adiw r30,1", xop
, plen
, 2);
12526 /* Store with post-increment */
12528 avr_asm_len ("st X+,%2", xop
, plen
, 1);
12530 /* Decrement loop-counter and set Z-flag */
12532 if (QImode
== loop_mode
)
12534 avr_asm_len ("dec %1", xop
, plen
, 1);
12538 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
12542 avr_asm_len ("subi %A1,1" CR_TAB
12543 "sbci %B1,0", xop
, plen
, 2);
12546 /* Loop until zero */
12548 return avr_asm_len ("brne 0b", xop
, plen
, 1);
12553 /* Helper for __builtin_avr_delay_cycles */
12556 avr_mem_clobber (void)
12558 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
12559 MEM_VOLATILE_P (mem
) = 1;
12564 avr_expand_delay_cycles (rtx operands0
)
12566 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
12567 unsigned HOST_WIDE_INT cycles_used
;
12568 unsigned HOST_WIDE_INT loop_count
;
12570 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
12572 loop_count
= ((cycles
- 9) / 6) + 1;
12573 cycles_used
= ((loop_count
- 1) * 6) + 9;
12574 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
12575 avr_mem_clobber()));
12576 cycles
-= cycles_used
;
12579 if (IN_RANGE (cycles
, 262145, 83886081))
12581 loop_count
= ((cycles
- 7) / 5) + 1;
12582 if (loop_count
> 0xFFFFFF)
12583 loop_count
= 0xFFFFFF;
12584 cycles_used
= ((loop_count
- 1) * 5) + 7;
12585 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
12586 avr_mem_clobber()));
12587 cycles
-= cycles_used
;
12590 if (IN_RANGE (cycles
, 768, 262144))
12592 loop_count
= ((cycles
- 5) / 4) + 1;
12593 if (loop_count
> 0xFFFF)
12594 loop_count
= 0xFFFF;
12595 cycles_used
= ((loop_count
- 1) * 4) + 5;
12596 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
12597 avr_mem_clobber()));
12598 cycles
-= cycles_used
;
12601 if (IN_RANGE (cycles
, 6, 767))
12603 loop_count
= cycles
/ 3;
12604 if (loop_count
> 255)
12606 cycles_used
= loop_count
* 3;
12607 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
12608 avr_mem_clobber()));
12609 cycles
-= cycles_used
;
12612 while (cycles
>= 2)
12614 emit_insn (gen_nopv (GEN_INT(2)));
12620 emit_insn (gen_nopv (GEN_INT(1)));
12626 /* Compute the image of x under f, i.e. perform x --> f(x) */
12629 avr_map (unsigned int f
, int x
)
12631 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
12635 /* Return some metrics of map A. */
12639 /* Number of fixed points in { 0 ... 7 } */
12642 /* Size of preimage of non-fixed points in { 0 ... 7 } */
12645 /* Mask representing the fixed points in { 0 ... 7 } */
12646 MAP_MASK_FIXED_0_7
,
12648 /* Size of the preimage of { 0 ... 7 } */
12651 /* Mask that represents the preimage of { f } */
12652 MAP_MASK_PREIMAGE_F
12656 avr_map_metric (unsigned int a
, int mode
)
12658 unsigned i
, metric
= 0;
12660 for (i
= 0; i
< 8; i
++)
12662 unsigned ai
= avr_map (a
, i
);
12664 if (mode
== MAP_FIXED_0_7
)
12666 else if (mode
== MAP_NONFIXED_0_7
)
12667 metric
+= ai
< 8 && ai
!= i
;
12668 else if (mode
== MAP_MASK_FIXED_0_7
)
12669 metric
|= ((unsigned) (ai
== i
)) << i
;
12670 else if (mode
== MAP_PREIMAGE_0_7
)
12672 else if (mode
== MAP_MASK_PREIMAGE_F
)
12673 metric
|= ((unsigned) (ai
== 0xf)) << i
;
12682 /* Return true if IVAL has a 0xf in its hexadecimal representation
12683 and false, otherwise. Only nibbles 0..7 are taken into account.
12684 Used as constraint helper for C0f and Cxf. */
12687 avr_has_nibble_0xf (rtx ival
)
12689 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
12690 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12694 /* We have a set of bits that are mapped by a function F.
12695 Try to decompose F by means of a second function G so that
12701 cost (F o G^-1) + cost (G) < cost (F)
12703 Example: Suppose builtin insert_bits supplies us with the map
12704 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
12705 nibble of the result, we can just as well rotate the bits before inserting
12706 them and use the map 0x7654ffff which is cheaper than the original map.
12707 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
12711 /* tree code of binary function G */
12712 enum tree_code code
;
12714 /* The constant second argument of G */
12717 /* G^-1, the inverse of G (*, arg) */
12720 /* The cost of appplying G (*, arg) */
12723 /* The composition F o G^-1 (*, arg) for some function F */
12726 /* For debug purpose only */
12730 static const avr_map_op_t avr_map_op
[] =
12732 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
12733 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
12734 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
12735 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
12736 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
12737 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
12738 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
12739 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
12740 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
12741 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
12742 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
12743 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
12744 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
12745 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
12746 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
12750 /* Try to decompose F as F = (F o G^-1) o G as described above.
12751 The result is a struct representing F o G^-1 and G.
12752 If result.cost < 0 then such a decomposition does not exist. */
12754 static avr_map_op_t
12755 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
12758 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
12759 avr_map_op_t f_ginv
= *g
;
12760 unsigned int ginv
= g
->ginv
;
12764 /* Step 1: Computing F o G^-1 */
12766 for (i
= 7; i
>= 0; i
--)
12768 int x
= avr_map (f
, i
);
12772 x
= avr_map (ginv
, x
);
12774 /* The bit is no element of the image of G: no avail (cost = -1) */
12780 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
12783 /* Step 2: Compute the cost of the operations.
12784 The overall cost of doing an operation prior to the insertion is
12785 the cost of the insertion plus the cost of the operation. */
12787 /* Step 2a: Compute cost of F o G^-1 */
12789 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
12791 /* The mapping consists only of fixed points and can be folded
12792 to AND/OR logic in the remainder. Reasonable cost is 3. */
12794 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
12800 /* Get the cost of the insn by calling the output worker with some
12801 fake values. Mimic effect of reloading xop[3]: Unused operands
12802 are mapped to 0 and used operands are reloaded to xop[0]. */
12804 xop
[0] = all_regs_rtx
[24];
12805 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
12806 xop
[2] = all_regs_rtx
[25];
12807 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
12809 avr_out_insert_bits (xop
, &f_ginv
.cost
);
12811 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
12814 /* Step 2b: Add cost of G */
12816 f_ginv
.cost
+= g
->cost
;
12818 if (avr_log
.builtin
)
12819 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
12825 /* Insert bits from XOP[1] into XOP[0] according to MAP.
12826 XOP[0] and XOP[1] don't overlap.
12827 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
12828 If FIXP_P = false: Just move the bit if its position in the destination
12829 is different to its source position. */
12832 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
12836 /* T-flag contains this bit of the source, i.e. of XOP[1] */
12837 int t_bit_src
= -1;
12839 /* We order the operations according to the requested source bit b. */
12841 for (b
= 0; b
< 8; b
++)
12842 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
12844 int bit_src
= avr_map (map
, bit_dest
);
12848 /* Same position: No need to copy as requested by FIXP_P. */
12849 || (bit_dest
== bit_src
&& !fixp_p
))
12852 if (t_bit_src
!= bit_src
)
12854 /* Source bit is not yet in T: Store it to T. */
12856 t_bit_src
= bit_src
;
12858 xop
[3] = GEN_INT (bit_src
);
12859 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
12862 /* Load destination bit with T. */
12864 xop
[3] = GEN_INT (bit_dest
);
12865 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
12870 /* PLEN == 0: Print assembler code for `insert_bits'.
12871 PLEN != 0: Compute code length in bytes.
12874 OP[1]: The mapping composed of nibbles. If nibble no. N is
12875 0: Bit N of result is copied from bit OP[2].0
12877 7: Bit N of result is copied from bit OP[2].7
12878 0xf: Bit N of result is copied from bit OP[3].N
12879 OP[2]: Bits to be inserted
12880 OP[3]: Target value */
12883 avr_out_insert_bits (rtx
*op
, int *plen
)
12885 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
12886 unsigned mask_fixed
;
12887 bool fixp_p
= true;
12894 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
12898 else if (flag_print_asm_name
)
12899 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
12901 /* If MAP has fixed points it might be better to initialize the result
12902 with the bits to be inserted instead of moving all bits by hand. */
12904 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
12906 if (REGNO (xop
[0]) == REGNO (xop
[1]))
12908 /* Avoid early-clobber conflicts */
12910 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
12911 xop
[1] = tmp_reg_rtx
;
12915 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12917 /* XOP[2] is used and reloaded to XOP[0] already */
12919 int n_fix
= 0, n_nofix
= 0;
12921 gcc_assert (REG_P (xop
[2]));
12923 /* Get the code size of the bit insertions; once with all bits
12924 moved and once with fixed points omitted. */
12926 avr_move_bits (xop
, map
, true, &n_fix
);
12927 avr_move_bits (xop
, map
, false, &n_nofix
);
12929 if (fixp_p
&& n_fix
- n_nofix
> 3)
12931 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
12933 avr_asm_len ("eor %0,%1" CR_TAB
12934 "andi %0,%3" CR_TAB
12935 "eor %0,%1", xop
, plen
, 3);
12941 /* XOP[2] is unused */
12943 if (fixp_p
&& mask_fixed
)
12945 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
12950 /* Move/insert remaining bits. */
12952 avr_move_bits (xop
, map
, fixp_p
, plen
);
12958 /* IDs for all the AVR builtins. */
12960 enum avr_builtin_id
12962 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12963 AVR_BUILTIN_ ## NAME,
12964 #include "builtins.def"
12970 struct GTY(()) avr_builtin_description
12972 enum insn_code icode
;
12978 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
12979 that a built-in's ID can be used to access the built-in by means of
12982 static GTY(()) struct avr_builtin_description
12983 avr_bdesc
[AVR_BUILTIN_COUNT
] =
12985 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
12986 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
12987 #include "builtins.def"
12992 /* Implement `TARGET_BUILTIN_DECL'. */
12995 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
12997 if (id
< AVR_BUILTIN_COUNT
)
12998 return avr_bdesc
[id
].fndecl
;
13000 return error_mark_node
;
13005 avr_init_builtin_int24 (void)
13007 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
13008 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
13010 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
13011 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
13015 /* Implement `TARGET_INIT_BUILTINS' */
13016 /* Set up all builtin functions for this target. */
13019 avr_init_builtins (void)
13021 tree void_ftype_void
13022 = build_function_type_list (void_type_node
, NULL_TREE
);
13023 tree uchar_ftype_uchar
13024 = build_function_type_list (unsigned_char_type_node
,
13025 unsigned_char_type_node
,
13027 tree uint_ftype_uchar_uchar
13028 = build_function_type_list (unsigned_type_node
,
13029 unsigned_char_type_node
,
13030 unsigned_char_type_node
,
13032 tree int_ftype_char_char
13033 = build_function_type_list (integer_type_node
,
13037 tree int_ftype_char_uchar
13038 = build_function_type_list (integer_type_node
,
13040 unsigned_char_type_node
,
13042 tree void_ftype_ulong
13043 = build_function_type_list (void_type_node
,
13044 long_unsigned_type_node
,
13047 tree uchar_ftype_ulong_uchar_uchar
13048 = build_function_type_list (unsigned_char_type_node
,
13049 long_unsigned_type_node
,
13050 unsigned_char_type_node
,
13051 unsigned_char_type_node
,
13054 tree const_memx_void_node
13055 = build_qualified_type (void_type_node
,
13057 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
13059 tree const_memx_ptr_type_node
13060 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
13062 tree char_ftype_const_memx_ptr
13063 = build_function_type_list (char_type_node
,
13064 const_memx_ptr_type_node
,
13068 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13070 #define FX_FTYPE_FX(fx) \
13071 tree fx##r_ftype_##fx##r \
13072 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13073 tree fx##k_ftype_##fx##k \
13074 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13076 #define FX_FTYPE_FX_INT(fx) \
13077 tree fx##r_ftype_##fx##r_int \
13078 = build_function_type_list (node_##fx##r, node_##fx##r, \
13079 integer_type_node, NULL); \
13080 tree fx##k_ftype_##fx##k_int \
13081 = build_function_type_list (node_##fx##k, node_##fx##k, \
13082 integer_type_node, NULL)
13084 #define INT_FTYPE_FX(fx) \
13085 tree int_ftype_##fx##r \
13086 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13087 tree int_ftype_##fx##k \
13088 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13090 #define INTX_FTYPE_FX(fx) \
13091 tree int##fx##r_ftype_##fx##r \
13092 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13093 tree int##fx##k_ftype_##fx##k \
13094 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13096 #define FX_FTYPE_INTX(fx) \
13097 tree fx##r_ftype_int##fx##r \
13098 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13099 tree fx##k_ftype_int##fx##k \
13100 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13102 tree node_hr
= short_fract_type_node
;
13103 tree node_nr
= fract_type_node
;
13104 tree node_lr
= long_fract_type_node
;
13105 tree node_llr
= long_long_fract_type_node
;
13107 tree node_uhr
= unsigned_short_fract_type_node
;
13108 tree node_unr
= unsigned_fract_type_node
;
13109 tree node_ulr
= unsigned_long_fract_type_node
;
13110 tree node_ullr
= unsigned_long_long_fract_type_node
;
13112 tree node_hk
= short_accum_type_node
;
13113 tree node_nk
= accum_type_node
;
13114 tree node_lk
= long_accum_type_node
;
13115 tree node_llk
= long_long_accum_type_node
;
13117 tree node_uhk
= unsigned_short_accum_type_node
;
13118 tree node_unk
= unsigned_accum_type_node
;
13119 tree node_ulk
= unsigned_long_accum_type_node
;
13120 tree node_ullk
= unsigned_long_long_accum_type_node
;
13123 /* For absfx builtins. */
13130 /* For roundfx builtins. */
13132 FX_FTYPE_FX_INT (h
);
13133 FX_FTYPE_FX_INT (n
);
13134 FX_FTYPE_FX_INT (l
);
13135 FX_FTYPE_FX_INT (ll
);
13137 FX_FTYPE_FX_INT (uh
);
13138 FX_FTYPE_FX_INT (un
);
13139 FX_FTYPE_FX_INT (ul
);
13140 FX_FTYPE_FX_INT (ull
);
13142 /* For countlsfx builtins. */
13152 INT_FTYPE_FX (ull
);
13154 /* For bitsfx builtins. */
13159 INTX_FTYPE_FX (ll
);
13161 INTX_FTYPE_FX (uh
);
13162 INTX_FTYPE_FX (un
);
13163 INTX_FTYPE_FX (ul
);
13164 INTX_FTYPE_FX (ull
);
13166 /* For fxbits builtins. */
13171 FX_FTYPE_INTX (ll
);
13173 FX_FTYPE_INTX (uh
);
13174 FX_FTYPE_INTX (un
);
13175 FX_FTYPE_INTX (ul
);
13176 FX_FTYPE_INTX (ull
);
13179 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
13181 int id = AVR_BUILTIN_ ## NAME; \
13182 const char *Name = "__builtin_avr_" #NAME; \
13183 char *name = (char*) alloca (1 + strlen (Name)); \
13185 gcc_assert (id < AVR_BUILTIN_COUNT); \
13186 avr_bdesc[id].fndecl \
13187 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
13188 BUILT_IN_MD, LIBNAME, NULL_TREE); \
13190 #include "builtins.def"
13193 avr_init_builtin_int24 ();
13197 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13198 with non-void result and 1 ... 3 arguments. */
13201 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
13204 int n
, n_args
= call_expr_nargs (exp
);
13205 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
13207 gcc_assert (n_args
>= 1 && n_args
<= 3);
13209 if (target
== NULL_RTX
13210 || GET_MODE (target
) != tmode
13211 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
13213 target
= gen_reg_rtx (tmode
);
13216 for (n
= 0; n
< n_args
; n
++)
13218 tree arg
= CALL_EXPR_ARG (exp
, n
);
13219 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13220 machine_mode opmode
= GET_MODE (op
);
13221 machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
13223 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
13226 op
= gen_lowpart (HImode
, op
);
13229 /* In case the insn wants input operands in modes different from
13230 the result, abort. */
13232 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
13234 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
13235 op
= copy_to_mode_reg (mode
, op
);
13242 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
13243 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
13244 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
13250 if (pat
== NULL_RTX
)
13259 /* Implement `TARGET_EXPAND_BUILTIN'. */
13260 /* Expand an expression EXP that calls a built-in function,
13261 with result going to TARGET if that's convenient
13262 (and in mode MODE if that's convenient).
13263 SUBTARGET may be used as the target for computing one of EXP's operands.
13264 IGNORE is nonzero if the value is to be ignored. */
13267 avr_expand_builtin (tree exp
, rtx target
,
13268 rtx subtarget ATTRIBUTE_UNUSED
,
13269 machine_mode mode ATTRIBUTE_UNUSED
,
13272 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
13273 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
13274 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
13275 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
13279 gcc_assert (id
< AVR_BUILTIN_COUNT
);
13283 case AVR_BUILTIN_NOP
:
13284 emit_insn (gen_nopv (GEN_INT(1)));
13287 case AVR_BUILTIN_DELAY_CYCLES
:
13289 arg0
= CALL_EXPR_ARG (exp
, 0);
13290 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13292 if (!CONST_INT_P (op0
))
13293 error ("%s expects a compile time integer constant", bname
);
13295 avr_expand_delay_cycles (op0
);
13300 case AVR_BUILTIN_INSERT_BITS
:
13302 arg0
= CALL_EXPR_ARG (exp
, 0);
13303 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
13305 if (!CONST_INT_P (op0
))
13307 error ("%s expects a compile time long integer constant"
13308 " as first argument", bname
);
13315 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
13316 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
13317 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
13318 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
13320 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
13321 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
13322 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
13323 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
13325 /* Warn about odd rounding. Rounding points >= FBIT will have
13328 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
13331 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
13333 if (rbit
>= (int) GET_MODE_FBIT (mode
))
13335 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
13336 "fixed-point value with %d fractional bits",
13337 rbit
, GET_MODE_FBIT (mode
));
13339 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
13342 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
13344 warning (0, "rounding result will always be 0");
13345 return CONST0_RTX (mode
);
13348 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13350 TR 18037 only specifies results for RP > 0. However, the
13351 remaining cases of -IBIT < RP <= 0 can easily be supported
13352 without any additional overhead. */
13357 /* No fold found and no insn: Call support function from libgcc. */
13359 if (d
->icode
== CODE_FOR_nothing
13360 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
13362 return expand_call (exp
, target
, ignore
);
13365 /* No special treatment needed: vanilla expand. */
13367 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
13368 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
13370 if (d
->n_args
== 0)
13372 emit_insn ((GEN_FCN (d
->icode
)) (target
));
13376 return avr_default_expand_builtin (d
->icode
, exp
, target
);
13380 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13383 avr_fold_absfx (tree tval
)
13385 if (FIXED_CST
!= TREE_CODE (tval
))
13388 /* Our fixed-points have no padding: Use double_int payload directly. */
13390 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
13391 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
13392 double_int ival
= fval
.data
.sext (bits
);
13394 if (!ival
.is_negative())
13397 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13399 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
13400 ? double_int::max_value (bits
, false)
13403 return build_fixed (TREE_TYPE (tval
), fval
);
13407 /* Implement `TARGET_FOLD_BUILTIN'. */
13410 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
13411 bool ignore ATTRIBUTE_UNUSED
)
13413 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
13414 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
13424 case AVR_BUILTIN_SWAP
:
13426 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
13427 build_int_cst (val_type
, 4));
13430 case AVR_BUILTIN_ABSHR
:
13431 case AVR_BUILTIN_ABSR
:
13432 case AVR_BUILTIN_ABSLR
:
13433 case AVR_BUILTIN_ABSLLR
:
13435 case AVR_BUILTIN_ABSHK
:
13436 case AVR_BUILTIN_ABSK
:
13437 case AVR_BUILTIN_ABSLK
:
13438 case AVR_BUILTIN_ABSLLK
:
13439 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
13441 return avr_fold_absfx (arg
[0]);
13443 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
13444 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
13445 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
13446 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
13448 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
13449 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
13450 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
13451 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
13453 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
13454 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
13455 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
13456 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
13458 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
13459 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
13460 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
13461 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
13463 gcc_assert (TYPE_PRECISION (val_type
)
13464 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
13466 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
13468 case AVR_BUILTIN_INSERT_BITS
:
13470 tree tbits
= arg
[1];
13471 tree tval
= arg
[2];
13473 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
13475 bool changed
= false;
13477 avr_map_op_t best_g
;
13479 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
13481 /* No constant as first argument: Don't fold this and run into
13482 error in avr_expand_builtin. */
13487 tmap
= wide_int_to_tree (map_type
, arg
[0]);
13488 map
= TREE_INT_CST_LOW (tmap
);
13490 if (TREE_CODE (tval
) != INTEGER_CST
13491 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
13493 /* There are no F in the map, i.e. 3rd operand is unused.
13494 Replace that argument with some constant to render
13495 respective input unused. */
13497 tval
= build_int_cst (val_type
, 0);
13501 if (TREE_CODE (tbits
) != INTEGER_CST
13502 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
13504 /* Similar for the bits to be inserted. If they are unused,
13505 we can just as well pass 0. */
13507 tbits
= build_int_cst (val_type
, 0);
13510 if (TREE_CODE (tbits
) == INTEGER_CST
)
13512 /* Inserting bits known at compile time is easy and can be
13513 performed by AND and OR with appropriate masks. */
13515 int bits
= TREE_INT_CST_LOW (tbits
);
13516 int mask_ior
= 0, mask_and
= 0xff;
13518 for (i
= 0; i
< 8; i
++)
13520 int mi
= avr_map (map
, i
);
13524 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
13525 else mask_and
&= ~(1 << i
);
13529 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
13530 build_int_cst (val_type
, mask_ior
));
13531 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
13532 build_int_cst (val_type
, mask_and
));
13536 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
13538 /* If bits don't change their position we can use vanilla logic
13539 to merge the two arguments. */
13541 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
13543 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
13544 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
13546 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
13547 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
13548 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
13551 /* Try to decomposing map to reduce overall cost. */
13553 if (avr_log
.builtin
)
13554 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
13556 best_g
= avr_map_op
[0];
13557 best_g
.cost
= 1000;
13559 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
13562 = avr_map_decompose (map
, avr_map_op
+ i
,
13563 TREE_CODE (tval
) == INTEGER_CST
);
13565 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
13569 if (avr_log
.builtin
)
13572 if (best_g
.arg
== 0)
13573 /* No optimization found */
13576 /* Apply operation G to the 2nd argument. */
13578 if (avr_log
.builtin
)
13579 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13580 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
13582 /* Do right-shifts arithmetically: They copy the MSB instead of
13583 shifting in a non-usable value (0) as with logic right-shift. */
13585 tbits
= fold_convert (signed_char_type_node
, tbits
);
13586 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
13587 build_int_cst (val_type
, best_g
.arg
));
13588 tbits
= fold_convert (val_type
, tbits
);
13590 /* Use map o G^-1 instead of original map to undo the effect of G. */
13592 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
13594 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
13595 } /* AVR_BUILTIN_INSERT_BITS */
13603 /* Initialize the GCC target structure. */
13605 #undef TARGET_ASM_ALIGNED_HI_OP
13606 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13607 #undef TARGET_ASM_ALIGNED_SI_OP
13608 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13609 #undef TARGET_ASM_UNALIGNED_HI_OP
13610 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13611 #undef TARGET_ASM_UNALIGNED_SI_OP
13612 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13613 #undef TARGET_ASM_INTEGER
13614 #define TARGET_ASM_INTEGER avr_assemble_integer
13615 #undef TARGET_ASM_FILE_START
13616 #define TARGET_ASM_FILE_START avr_file_start
13617 #undef TARGET_ASM_FILE_END
13618 #define TARGET_ASM_FILE_END avr_file_end
13620 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
13621 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13622 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13623 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13625 #undef TARGET_FUNCTION_VALUE
13626 #define TARGET_FUNCTION_VALUE avr_function_value
13627 #undef TARGET_LIBCALL_VALUE
13628 #define TARGET_LIBCALL_VALUE avr_libcall_value
13629 #undef TARGET_FUNCTION_VALUE_REGNO_P
13630 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13632 #undef TARGET_ATTRIBUTE_TABLE
13633 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13634 #undef TARGET_INSERT_ATTRIBUTES
13635 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13636 #undef TARGET_SECTION_TYPE_FLAGS
13637 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13639 #undef TARGET_ASM_NAMED_SECTION
13640 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13641 #undef TARGET_ASM_INIT_SECTIONS
13642 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13643 #undef TARGET_ENCODE_SECTION_INFO
13644 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13645 #undef TARGET_ASM_SELECT_SECTION
13646 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13648 #undef TARGET_REGISTER_MOVE_COST
13649 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13650 #undef TARGET_MEMORY_MOVE_COST
13651 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13652 #undef TARGET_RTX_COSTS
13653 #define TARGET_RTX_COSTS avr_rtx_costs
13654 #undef TARGET_ADDRESS_COST
13655 #define TARGET_ADDRESS_COST avr_address_cost
13656 #undef TARGET_MACHINE_DEPENDENT_REORG
13657 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13658 #undef TARGET_FUNCTION_ARG
13659 #define TARGET_FUNCTION_ARG avr_function_arg
13660 #undef TARGET_FUNCTION_ARG_ADVANCE
13661 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13663 #undef TARGET_SET_CURRENT_FUNCTION
13664 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13666 #undef TARGET_RETURN_IN_MEMORY
13667 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13669 #undef TARGET_STRICT_ARGUMENT_NAMING
13670 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13672 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
13673 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13675 #undef TARGET_CONDITIONAL_REGISTER_USAGE
13676 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13678 #undef TARGET_HARD_REGNO_SCRATCH_OK
13679 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13680 #undef TARGET_CASE_VALUES_THRESHOLD
13681 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13683 #undef TARGET_FRAME_POINTER_REQUIRED
13684 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13685 #undef TARGET_CAN_ELIMINATE
13686 #define TARGET_CAN_ELIMINATE avr_can_eliminate
13688 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13689 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13691 #undef TARGET_WARN_FUNC_RETURN
13692 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13694 #undef TARGET_CLASS_LIKELY_SPILLED_P
13695 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13697 #undef TARGET_OPTION_OVERRIDE
13698 #define TARGET_OPTION_OVERRIDE avr_option_override
13700 #undef TARGET_CANNOT_MODIFY_JUMPS_P
13701 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13703 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
13704 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13706 #undef TARGET_INIT_BUILTINS
13707 #define TARGET_INIT_BUILTINS avr_init_builtins
13709 #undef TARGET_BUILTIN_DECL
13710 #define TARGET_BUILTIN_DECL avr_builtin_decl
13712 #undef TARGET_EXPAND_BUILTIN
13713 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
13715 #undef TARGET_FOLD_BUILTIN
13716 #define TARGET_FOLD_BUILTIN avr_fold_builtin
13718 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
13719 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13721 #undef TARGET_SCALAR_MODE_SUPPORTED_P
13722 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13724 #undef TARGET_BUILD_BUILTIN_VA_LIST
13725 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13727 #undef TARGET_FIXED_POINT_SUPPORTED_P
13728 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13730 #undef TARGET_CONVERT_TO_TYPE
13731 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13733 #undef TARGET_ADDR_SPACE_SUBSET_P
13734 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13736 #undef TARGET_ADDR_SPACE_CONVERT
13737 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13739 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
13740 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13742 #undef TARGET_ADDR_SPACE_POINTER_MODE
13743 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13745 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13746 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
13747 avr_addr_space_legitimate_address_p
13749 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13750 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13752 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
13753 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13755 #undef TARGET_SECONDARY_RELOAD
13756 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
13758 #undef TARGET_PRINT_OPERAND
13759 #define TARGET_PRINT_OPERAND avr_print_operand
13760 #undef TARGET_PRINT_OPERAND_ADDRESS
13761 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13762 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
13763 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13765 struct gcc_target targetm
= TARGET_INITIALIZER
;
13768 #include "gt-avr.h"