1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
35 #include "print-tree.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
48 #include "langhooks.h"
51 #include "target-def.h"
55 /* Maximal allowed offset for an address in the LD command */
56 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
58 /* Return true if STR starts with PREFIX and false, otherwise. */
59 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
61 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
62 address space where data is to be located.
63 As the only non-generic address spaces are all located in flash,
64 this can be used to test if data shall go into some .progmem* section.
65 This must be the rightmost field of machine dependent section flags. */
66 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
68 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
69 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
71 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
72 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
73 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
75 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
76 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
79 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
81 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
82 / SYMBOL_FLAG_MACH_DEP)
84 /* Known address spaces. The order must be the same as in the respective
85 enum from avr.h (or designated initialized must be used). */
86 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
88 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
89 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
90 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
91 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
92 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
93 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
94 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
95 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
99 /* Holding RAM addresses of some SFRs used by the compiler and that
100 are unique over all devices in an architecture like 'avr4'. */
104 /* SREG: The processor status */
107 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
113 /* RAMPZ: The high byte of 24-bit address used with ELPM */
116 /* SP: The stack pointer and its low and high byte */
121 static avr_addr_t avr_addr
;
124 /* Prototypes for local helper functions. */
126 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
127 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
128 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
129 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
130 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
131 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
133 static int get_sequence_length (rtx insns
);
134 static int sequent_regs_live (void);
135 static const char *ptrreg_to_str (int);
136 static const char *cond_string (enum rtx_code
);
137 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
138 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
140 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
141 static struct machine_function
* avr_init_machine_status (void);
144 /* Prototypes for hook implementors if needed before their implementation. */
146 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
149 /* Allocate registers from r25 to r8 for parameters for function calls. */
150 #define FIRST_CUM_REG 26
152 /* Implicit target register of LPM instruction (R0) */
153 extern GTY(()) rtx lpm_reg_rtx
;
156 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
157 extern GTY(()) rtx lpm_addr_reg_rtx
;
158 rtx lpm_addr_reg_rtx
;
160 /* Temporary register RTX (reg:QI TMP_REGNO) */
161 extern GTY(()) rtx tmp_reg_rtx
;
164 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
165 extern GTY(()) rtx zero_reg_rtx
;
168 /* RTXs for all general purpose registers as QImode */
169 extern GTY(()) rtx all_regs_rtx
[32];
170 rtx all_regs_rtx
[32];
172 /* SREG, the processor status */
173 extern GTY(()) rtx sreg_rtx
;
176 /* RAMP* special function registers */
177 extern GTY(()) rtx rampd_rtx
;
178 extern GTY(()) rtx rampx_rtx
;
179 extern GTY(()) rtx rampy_rtx
;
180 extern GTY(()) rtx rampz_rtx
;
186 /* RTX containing the strings "" and "e", respectively */
187 static GTY(()) rtx xstring_empty
;
188 static GTY(()) rtx xstring_e
;
190 /* Current architecture. */
191 const avr_arch_t
*avr_current_arch
;
193 /* Current device. */
194 const avr_mcu_t
*avr_current_device
;
196 /* Section to put switch tables in. */
197 static GTY(()) section
*progmem_swtable_section
;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode
= true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p
= false;
209 bool avr_need_copy_data_p
= false;
212 /* Transform UP into lowercase and write the result to LO.
213 You must provide enough space for LO. Return LO. */
216 avr_tolower (char *lo
, const char *up
)
220 for (; *up
; up
++, lo
++)
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val
)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
256 enum machine_mode mode
= GET_MODE (xval
);
258 if (VOIDmode
== mode
)
261 for (i
= 0; i
< n_bytes
; i
++)
263 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
264 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
266 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
274 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
275 the bit representation of X by "casting" it to CONST_INT. */
278 avr_to_int_mode (rtx x
)
280 enum machine_mode mode
= GET_MODE (x
);
282 return VOIDmode
== mode
284 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
288 /* Implement `TARGET_OPTION_OVERRIDE'. */
291 avr_option_override (void)
293 flag_delete_null_pointer_checks
= 0;
295 /* caller-save.c looks for call-clobbered hard registers that are assigned
296 to pseudos that cross calls and tries so save-restore them around calls
297 in order to reduce the number of stack slots needed.
299 This might lead to situations where reload is no more able to cope
300 with the challenge of AVR's very few address registers and fails to
301 perform the requested spills. */
304 flag_caller_saves
= 0;
306 /* Unwind tables currently require a frame pointer for correctness,
307 see toplev.c:process_options(). */
309 if ((flag_unwind_tables
310 || flag_non_call_exceptions
311 || flag_asynchronous_unwind_tables
)
312 && !ACCUMULATE_OUTGOING_ARGS
)
314 flag_omit_frame_pointer
= 0;
318 warning (OPT_fpic
, "-fpic is not supported");
320 warning (OPT_fPIC
, "-fPIC is not supported");
322 warning (OPT_fpie
, "-fpie is not supported");
324 warning (OPT_fPIE
, "-fPIE is not supported");
326 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
327 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
329 /* RAM addresses of some SFRs common to all devices in respective arch. */
331 /* SREG: Status Register containing flags like I (global IRQ) */
332 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
334 /* RAMPZ: Address' high part when loading via ELPM */
335 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
337 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
338 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
339 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
340 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
342 /* SP: Stack Pointer (SP_H:SP_L) */
343 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
344 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
346 init_machine_status
= avr_init_machine_status
;
348 avr_log_set_avr_log();
351 /* Function to set up the backend function structure. */
353 static struct machine_function
*
354 avr_init_machine_status (void)
356 return ggc_alloc_cleared_machine_function ();
360 /* Implement `INIT_EXPANDERS'. */
361 /* The function works like a singleton. */
364 avr_init_expanders (void)
368 for (regno
= 0; regno
< 32; regno
++)
369 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
371 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
372 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
373 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
375 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
377 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
378 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
379 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
380 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
381 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
383 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
384 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
388 /* Implement `REGNO_REG_CLASS'. */
389 /* Return register class for register R. */
392 avr_regno_reg_class (int r
)
394 static const enum reg_class reg_class_tab
[] =
398 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
399 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
400 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
401 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
403 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
404 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
406 ADDW_REGS
, ADDW_REGS
,
408 POINTER_X_REGS
, POINTER_X_REGS
,
410 POINTER_Y_REGS
, POINTER_Y_REGS
,
412 POINTER_Z_REGS
, POINTER_Z_REGS
,
418 return reg_class_tab
[r
];
424 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
427 avr_scalar_mode_supported_p (enum machine_mode mode
)
429 if (ALL_FIXED_POINT_MODE_P (mode
))
435 return default_scalar_mode_supported_p (mode
);
439 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
442 avr_decl_flash_p (tree decl
)
444 if (TREE_CODE (decl
) != VAR_DECL
445 || TREE_TYPE (decl
) == error_mark_node
)
450 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
454 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
455 address space and FALSE, otherwise. */
458 avr_decl_memx_p (tree decl
)
460 if (TREE_CODE (decl
) != VAR_DECL
461 || TREE_TYPE (decl
) == error_mark_node
)
466 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
470 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
473 avr_mem_flash_p (rtx x
)
476 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
480 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
481 address space and FALSE, otherwise. */
484 avr_mem_memx_p (rtx x
)
487 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
491 /* A helper for the subsequent function attribute used to dig for
492 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
495 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
497 if (FUNCTION_DECL
== TREE_CODE (func
))
499 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
504 func
= TREE_TYPE (func
);
507 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
508 || TREE_CODE (func
) == METHOD_TYPE
);
510 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
513 /* Return nonzero if FUNC is a naked function. */
516 avr_naked_function_p (tree func
)
518 return avr_lookup_function_attribute1 (func
, "naked");
521 /* Return nonzero if FUNC is an interrupt function as specified
522 by the "interrupt" attribute. */
525 avr_interrupt_function_p (tree func
)
527 return avr_lookup_function_attribute1 (func
, "interrupt");
530 /* Return nonzero if FUNC is a signal function as specified
531 by the "signal" attribute. */
534 avr_signal_function_p (tree func
)
536 return avr_lookup_function_attribute1 (func
, "signal");
539 /* Return nonzero if FUNC is an OS_task function. */
542 avr_OS_task_function_p (tree func
)
544 return avr_lookup_function_attribute1 (func
, "OS_task");
547 /* Return nonzero if FUNC is an OS_main function. */
550 avr_OS_main_function_p (tree func
)
552 return avr_lookup_function_attribute1 (func
, "OS_main");
556 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
557 /* Sanity cheching for above function attributes. */
560 avr_set_current_function (tree decl
)
565 if (decl
== NULL_TREE
566 || current_function_decl
== NULL_TREE
567 || current_function_decl
== error_mark_node
569 || cfun
->machine
->attributes_checked_p
)
572 loc
= DECL_SOURCE_LOCATION (decl
);
574 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
575 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
576 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
577 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
578 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
580 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
582 /* Too much attributes make no sense as they request conflicting features. */
584 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
585 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
586 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
587 " exclusive", "OS_task", "OS_main", isr
);
589 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
591 if (cfun
->machine
->is_naked
592 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
593 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
594 " no effect on %qs function", "OS_task", "OS_main", "naked");
596 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
598 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
599 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
602 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
603 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
604 : IDENTIFIER_POINTER (DECL_NAME (decl
));
606 /* Skip a leading '*' that might still prefix the assembler name,
607 e.g. in non-LTO runs. */
609 name
= default_strip_name_encoding (name
);
611 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
612 using this when it switched from SIGNAL and INTERRUPT to ISR. */
614 if (cfun
->machine
->is_interrupt
)
615 cfun
->machine
->is_signal
= 0;
617 /* Interrupt handlers must be void __vector (void) functions. */
619 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
620 error_at (loc
, "%qs function cannot have arguments", isr
);
622 if (TREE_CODE (ret
) != VOID_TYPE
)
623 error_at (loc
, "%qs function cannot return a value", isr
);
625 /* If the function has the 'signal' or 'interrupt' attribute, ensure
626 that the name of the function is "__vector_NN" so as to catch
627 when the user misspells the vector name. */
629 if (!STR_PREFIX_P (name
, "__vector"))
630 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
634 /* Don't print the above diagnostics more than once. */
636 cfun
->machine
->attributes_checked_p
= 1;
640 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
643 avr_accumulate_outgoing_args (void)
646 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
648 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
649 what offset is correct. In some cases it is relative to
650 virtual_outgoing_args_rtx and in others it is relative to
651 virtual_stack_vars_rtx. For example code see
652 gcc.c-torture/execute/built-in-setjmp.c
653 gcc.c-torture/execute/builtins/sprintf-chk.c */
655 return (TARGET_ACCUMULATE_OUTGOING_ARGS
656 && !(cfun
->calls_setjmp
657 || cfun
->has_nonlocal_label
));
661 /* Report contribution of accumulated outgoing arguments to stack size. */
664 avr_outgoing_args_size (void)
666 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
670 /* Implement `STARTING_FRAME_OFFSET'. */
671 /* This is the offset from the frame pointer register to the first stack slot
672 that contains a variable living in the frame. */
675 avr_starting_frame_offset (void)
677 return 1 + avr_outgoing_args_size ();
681 /* Return the number of hard registers to push/pop in the prologue/epilogue
682 of the current function, and optionally store these registers in SET. */
685 avr_regs_to_save (HARD_REG_SET
*set
)
688 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
691 CLEAR_HARD_REG_SET (*set
);
694 /* No need to save any registers if the function never returns or
695 has the "OS_task" or "OS_main" attribute. */
697 if (TREE_THIS_VOLATILE (current_function_decl
)
698 || cfun
->machine
->is_OS_task
699 || cfun
->machine
->is_OS_main
)
702 for (reg
= 0; reg
< 32; reg
++)
704 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
705 any global register variables. */
710 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
711 || (df_regs_ever_live_p (reg
)
712 && (int_or_sig_p
|| !call_used_regs
[reg
])
713 /* Don't record frame pointer registers here. They are treated
714 indivitually in prologue. */
715 && !(frame_pointer_needed
716 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
719 SET_HARD_REG_BIT (*set
, reg
);
727 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
730 avr_allocate_stack_slots_for_args (void)
732 return !cfun
->machine
->is_naked
;
736 /* Return true if register FROM can be eliminated via register TO. */
739 avr_can_eliminate (const int from
, const int to
)
741 return ((frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
742 || !frame_pointer_needed
);
746 /* Implement `TARGET_WARN_FUNC_RETURN'. */
749 avr_warn_func_return (tree decl
)
751 /* Naked functions are implemented entirely in assembly, including the
752 return sequence, so suppress warnings about this. */
754 return !avr_naked_function_p (decl
);
757 /* Compute offset between arg_pointer and frame_pointer. */
760 avr_initial_elimination_offset (int from
, int to
)
762 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
766 int offset
= frame_pointer_needed
? 2 : 0;
767 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
769 offset
+= avr_regs_to_save (NULL
);
770 return (get_frame_size () + avr_outgoing_args_size()
771 + avr_pc_size
+ 1 + offset
);
776 /* Helper for the function below. */
779 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
781 *node
= make_node (FIXED_POINT_TYPE
);
782 TYPE_SATURATING (*node
) = sat_p
;
783 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
784 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
785 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
786 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
787 TYPE_ALIGN (*node
) = 8;
788 SET_TYPE_MODE (*node
, mode
);
794 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
797 avr_build_builtin_va_list (void)
799 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
800 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
801 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
802 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
803 to the long long accum modes instead of the desired [U]TAmode.
805 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
806 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
807 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
808 libgcc to detect IBIT and FBIT. */
810 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
811 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
812 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
813 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
815 unsigned_long_long_accum_type_node
= uta_type_node
;
816 long_long_accum_type_node
= ta_type_node
;
817 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
818 sat_long_long_accum_type_node
= sat_ta_type_node
;
820 /* Dispatch to the default handler. */
822 return std_build_builtin_va_list ();
826 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
827 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
828 frame pointer by +STARTING_FRAME_OFFSET.
829 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
830 avoids creating add/sub of offset in nonlocal goto and setjmp. */
833 avr_builtin_setjmp_frame_value (void)
835 rtx xval
= gen_reg_rtx (Pmode
);
836 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
837 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
842 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
843 This is return address of function. */
846 avr_return_addr_rtx (int count
, rtx tem
)
850 /* Can only return this function's return address. Others not supported. */
856 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
857 warning (0, "%<builtin_return_address%> contains only 2 bytes"
861 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
863 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
864 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
865 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
869 /* Return 1 if the function epilogue is just a single "ret". */
872 avr_simple_epilogue (void)
874 return (! frame_pointer_needed
875 && get_frame_size () == 0
876 && avr_outgoing_args_size() == 0
877 && avr_regs_to_save (NULL
) == 0
878 && ! cfun
->machine
->is_interrupt
879 && ! cfun
->machine
->is_signal
880 && ! cfun
->machine
->is_naked
881 && ! TREE_THIS_VOLATILE (current_function_decl
));
884 /* This function checks sequence of live registers. */
887 sequent_regs_live (void)
893 for (reg
= 0; reg
< 18; ++reg
)
897 /* Don't recognize sequences that contain global register
906 if (!call_used_regs
[reg
])
908 if (df_regs_ever_live_p (reg
))
918 if (!frame_pointer_needed
)
920 if (df_regs_ever_live_p (REG_Y
))
928 if (df_regs_ever_live_p (REG_Y
+1))
941 return (cur_seq
== live_seq
) ? live_seq
: 0;
944 /* Obtain the length sequence of insns. */
947 get_sequence_length (rtx insns
)
952 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
953 length
+= get_attr_length (insn
);
959 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
962 avr_incoming_return_addr_rtx (void)
964 /* The return address is at the top of the stack. Note that the push
965 was via post-decrement, which means the actual address is off by one. */
966 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
969 /* Helper for expand_prologue. Emit a push of a byte register. */
972 emit_push_byte (unsigned regno
, bool frame_related_p
)
976 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
977 mem
= gen_frame_mem (QImode
, mem
);
978 reg
= gen_rtx_REG (QImode
, regno
);
980 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
982 RTX_FRAME_RELATED_P (insn
) = 1;
984 cfun
->machine
->stack_usage
++;
988 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
989 SFR is a MEM representing the memory location of the SFR.
990 If CLR_P then clear the SFR after the push using zero_reg. */
993 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
997 gcc_assert (MEM_P (sfr
));
999 /* IN __tmp_reg__, IO(SFR) */
1000 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1001 if (frame_related_p
)
1002 RTX_FRAME_RELATED_P (insn
) = 1;
1004 /* PUSH __tmp_reg__ */
1005 emit_push_byte (TMP_REGNO
, frame_related_p
);
1009 /* OUT IO(SFR), __zero_reg__ */
1010 insn
= emit_move_insn (sfr
, const0_rtx
);
1011 if (frame_related_p
)
1012 RTX_FRAME_RELATED_P (insn
) = 1;
1017 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1020 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1021 int live_seq
= sequent_regs_live ();
1023 HOST_WIDE_INT size_max
1024 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1026 bool minimize
= (TARGET_CALL_PROLOGUES
1030 && !cfun
->machine
->is_OS_task
1031 && !cfun
->machine
->is_OS_main
);
1034 && (frame_pointer_needed
1035 || avr_outgoing_args_size() > 8
1036 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1040 int first_reg
, reg
, offset
;
1042 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1043 gen_int_mode (size
, HImode
));
1045 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1046 gen_int_mode (live_seq
+size
, HImode
));
1047 insn
= emit_insn (pattern
);
1048 RTX_FRAME_RELATED_P (insn
) = 1;
1050 /* Describe the effect of the unspec_volatile call to prologue_saves.
1051 Note that this formulation assumes that add_reg_note pushes the
1052 notes to the front. Thus we build them in the reverse order of
1053 how we want dwarf2out to process them. */
1055 /* The function does always set frame_pointer_rtx, but whether that
1056 is going to be permanent in the function is frame_pointer_needed. */
1058 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1059 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1061 : stack_pointer_rtx
),
1062 plus_constant (Pmode
, stack_pointer_rtx
,
1063 -(size
+ live_seq
))));
1065 /* Note that live_seq always contains r28+r29, but the other
1066 registers to be saved are all below 18. */
1068 first_reg
= 18 - (live_seq
- 2);
1070 for (reg
= 29, offset
= -live_seq
+ 1;
1072 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1076 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1078 r
= gen_rtx_REG (QImode
, reg
);
1079 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1082 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1084 else /* !minimize */
1088 for (reg
= 0; reg
< 32; ++reg
)
1089 if (TEST_HARD_REG_BIT (set
, reg
))
1090 emit_push_byte (reg
, true);
1092 if (frame_pointer_needed
1093 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1095 /* Push frame pointer. Always be consistent about the
1096 ordering of pushes -- epilogue_restores expects the
1097 register pair to be pushed low byte first. */
1099 emit_push_byte (REG_Y
, true);
1100 emit_push_byte (REG_Y
+ 1, true);
1103 if (frame_pointer_needed
1106 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1107 RTX_FRAME_RELATED_P (insn
) = 1;
1112 /* Creating a frame can be done by direct manipulation of the
1113 stack or via the frame pointer. These two methods are:
1120 the optimum method depends on function type, stack and
1121 frame size. To avoid a complex logic, both methods are
1122 tested and shortest is selected.
1124 There is also the case where SIZE != 0 and no frame pointer is
1125 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1126 In that case, insn (*) is not needed in that case.
1127 We use the X register as scratch. This is save because in X
1129 In an interrupt routine, the case of SIZE != 0 together with
1130 !frame_pointer_needed can only occur if the function is not a
1131 leaf function and thus X has already been saved. */
1134 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1135 rtx fp_plus_insns
, fp
, my_fp
;
1137 gcc_assert (frame_pointer_needed
1141 fp
= my_fp
= (frame_pointer_needed
1143 : gen_rtx_REG (Pmode
, REG_X
));
1145 if (AVR_HAVE_8BIT_SP
)
1147 /* The high byte (r29) does not change:
1148 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1150 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1153 /* Cut down size and avoid size = 0 so that we don't run
1154 into ICE like PR52488 in the remainder. */
1156 if (size
> size_max
)
1158 /* Don't error so that insane code from newlib still compiles
1159 and does not break building newlib. As PR51345 is implemented
1160 now, there are multilib variants with -msp8.
1162 If user wants sanity checks he can use -Wstack-usage=
1165 For CFA we emit the original, non-saturated size so that
1166 the generic machinery is aware of the real stack usage and
1167 will print the above diagnostic as expected. */
1172 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1173 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1175 /************ Method 1: Adjust frame pointer ************/
1179 /* Normally, the dwarf2out frame-related-expr interpreter does
1180 not expect to have the CFA change once the frame pointer is
1181 set up. Thus, we avoid marking the move insn below and
1182 instead indicate that the entire operation is complete after
1183 the frame pointer subtraction is done. */
1185 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1186 if (frame_pointer_needed
)
1188 RTX_FRAME_RELATED_P (insn
) = 1;
1189 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1190 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1193 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1196 if (frame_pointer_needed
)
1198 RTX_FRAME_RELATED_P (insn
) = 1;
1199 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1200 gen_rtx_SET (VOIDmode
, fp
,
1201 plus_constant (Pmode
, fp
,
1205 /* Copy to stack pointer. Note that since we've already
1206 changed the CFA to the frame pointer this operation
1207 need not be annotated if frame pointer is needed.
1208 Always move through unspec, see PR50063.
1209 For meaning of irq_state see movhi_sp_r insn. */
1211 if (cfun
->machine
->is_interrupt
)
1214 if (TARGET_NO_INTERRUPTS
1215 || cfun
->machine
->is_signal
1216 || cfun
->machine
->is_OS_main
)
1219 if (AVR_HAVE_8BIT_SP
)
1222 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1223 fp
, GEN_INT (irq_state
)));
1224 if (!frame_pointer_needed
)
1226 RTX_FRAME_RELATED_P (insn
) = 1;
1227 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1228 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1229 plus_constant (Pmode
,
1234 fp_plus_insns
= get_insns ();
1237 /************ Method 2: Adjust Stack pointer ************/
1239 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1240 can only handle specific offsets. */
1242 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1248 insn
= emit_move_insn (stack_pointer_rtx
,
1249 plus_constant (Pmode
, stack_pointer_rtx
,
1251 RTX_FRAME_RELATED_P (insn
) = 1;
1252 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1253 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1254 plus_constant (Pmode
,
1257 if (frame_pointer_needed
)
1259 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1260 RTX_FRAME_RELATED_P (insn
) = 1;
1263 sp_plus_insns
= get_insns ();
1266 /************ Use shortest method ************/
1268 emit_insn (get_sequence_length (sp_plus_insns
)
1269 < get_sequence_length (fp_plus_insns
)
1275 emit_insn (fp_plus_insns
);
1278 cfun
->machine
->stack_usage
+= size_cfa
;
1279 } /* !minimize && size != 0 */
1284 /* Output function prologue. */
1287 avr_expand_prologue (void)
1292 size
= get_frame_size() + avr_outgoing_args_size();
1294 cfun
->machine
->stack_usage
= 0;
1296 /* Prologue: naked. */
1297 if (cfun
->machine
->is_naked
)
1302 avr_regs_to_save (&set
);
1304 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1306 /* Enable interrupts. */
1307 if (cfun
->machine
->is_interrupt
)
1308 emit_insn (gen_enable_interrupt ());
1310 /* Push zero reg. */
1311 emit_push_byte (ZERO_REGNO
, true);
1314 emit_push_byte (TMP_REGNO
, true);
1317 /* ??? There's no dwarf2 column reserved for SREG. */
1318 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1320 /* Clear zero reg. */
1321 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1323 /* Prevent any attempt to delete the setting of ZERO_REG! */
1324 emit_use (zero_reg_rtx
);
1326 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1327 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1330 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1333 && TEST_HARD_REG_BIT (set
, REG_X
)
1334 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1336 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1340 && (frame_pointer_needed
1341 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1342 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1344 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1348 && TEST_HARD_REG_BIT (set
, REG_Z
)
1349 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1351 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1353 } /* is_interrupt is_signal */
1355 avr_prologue_setup_frame (size
, set
);
1357 if (flag_stack_usage_info
)
1358 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1362 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1363 /* Output summary at end of function prologue. */
1366 avr_asm_function_end_prologue (FILE *file
)
1368 if (cfun
->machine
->is_naked
)
1370 fputs ("/* prologue: naked */\n", file
);
1374 if (cfun
->machine
->is_interrupt
)
1376 fputs ("/* prologue: Interrupt */\n", file
);
1378 else if (cfun
->machine
->is_signal
)
1380 fputs ("/* prologue: Signal */\n", file
);
1383 fputs ("/* prologue: function */\n", file
);
1386 if (ACCUMULATE_OUTGOING_ARGS
)
1387 fprintf (file
, "/* outgoing args size = %d */\n",
1388 avr_outgoing_args_size());
1390 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1392 fprintf (file
, "/* stack size = %d */\n",
1393 cfun
->machine
->stack_usage
);
1394 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1395 usage for offset so that SP + .L__stack_offset = return address. */
1396 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1400 /* Implement `EPILOGUE_USES'. */
1403 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1405 if (reload_completed
1407 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1412 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1415 emit_pop_byte (unsigned regno
)
1419 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1420 mem
= gen_frame_mem (QImode
, mem
);
1421 reg
= gen_rtx_REG (QImode
, regno
);
1423 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1426 /* Output RTL epilogue. */
1429 avr_expand_epilogue (bool sibcall_p
)
1436 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1438 size
= get_frame_size() + avr_outgoing_args_size();
1440 /* epilogue: naked */
1441 if (cfun
->machine
->is_naked
)
1443 gcc_assert (!sibcall_p
);
1445 emit_jump_insn (gen_return ());
1449 avr_regs_to_save (&set
);
1450 live_seq
= sequent_regs_live ();
1452 minimize
= (TARGET_CALL_PROLOGUES
1455 && !cfun
->machine
->is_OS_task
1456 && !cfun
->machine
->is_OS_main
);
1460 || frame_pointer_needed
1463 /* Get rid of frame. */
1465 if (!frame_pointer_needed
)
1467 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1472 emit_move_insn (frame_pointer_rtx
,
1473 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1476 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1482 /* Try two methods to adjust stack and select shortest. */
1487 HOST_WIDE_INT size_max
;
1489 gcc_assert (frame_pointer_needed
1493 fp
= my_fp
= (frame_pointer_needed
1495 : gen_rtx_REG (Pmode
, REG_X
));
1497 if (AVR_HAVE_8BIT_SP
)
1499 /* The high byte (r29) does not change:
1500 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1502 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1505 /* For rationale see comment in prologue generation. */
1507 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1508 if (size
> size_max
)
1510 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1512 /********** Method 1: Adjust fp register **********/
1516 if (!frame_pointer_needed
)
1517 emit_move_insn (fp
, stack_pointer_rtx
);
1519 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1521 /* Copy to stack pointer. */
1523 if (TARGET_NO_INTERRUPTS
)
1526 if (AVR_HAVE_8BIT_SP
)
1529 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1530 GEN_INT (irq_state
)));
1532 fp_plus_insns
= get_insns ();
1535 /********** Method 2: Adjust Stack pointer **********/
1537 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1543 emit_move_insn (stack_pointer_rtx
,
1544 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1546 sp_plus_insns
= get_insns ();
1549 /************ Use shortest method ************/
1551 emit_insn (get_sequence_length (sp_plus_insns
)
1552 < get_sequence_length (fp_plus_insns
)
1557 emit_insn (fp_plus_insns
);
1560 if (frame_pointer_needed
1561 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1563 /* Restore previous frame_pointer. See avr_expand_prologue for
1564 rationale for not using pophi. */
1566 emit_pop_byte (REG_Y
+ 1);
1567 emit_pop_byte (REG_Y
);
1570 /* Restore used registers. */
1572 for (reg
= 31; reg
>= 0; --reg
)
1573 if (TEST_HARD_REG_BIT (set
, reg
))
1574 emit_pop_byte (reg
);
1578 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1579 The conditions to restore them must be tha same as in prologue. */
1582 && TEST_HARD_REG_BIT (set
, REG_Z
)
1583 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1585 emit_pop_byte (TMP_REGNO
);
1586 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1590 && (frame_pointer_needed
1591 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1592 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1594 emit_pop_byte (TMP_REGNO
);
1595 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1599 && TEST_HARD_REG_BIT (set
, REG_X
)
1600 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1602 emit_pop_byte (TMP_REGNO
);
1603 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1608 emit_pop_byte (TMP_REGNO
);
1609 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1612 /* Restore SREG using tmp_reg as scratch. */
1614 emit_pop_byte (TMP_REGNO
);
1615 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1617 /* Restore tmp REG. */
1618 emit_pop_byte (TMP_REGNO
);
1620 /* Restore zero REG. */
1621 emit_pop_byte (ZERO_REGNO
);
1625 emit_jump_insn (gen_return ());
1629 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1632 avr_asm_function_begin_epilogue (FILE *file
)
1634 fprintf (file
, "/* epilogue start */\n");
1638 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1641 avr_cannot_modify_jumps_p (void)
1644 /* Naked Functions must not have any instructions after
1645 their epilogue, see PR42240 */
1647 if (reload_completed
1649 && cfun
->machine
->is_naked
)
1658 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1661 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1663 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1664 This hook just serves to hack around PR rtl-optimization/52543 by
1665 claiming that non-generic addresses were mode-dependent so that
1666 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1667 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1668 generic address space which is not true. */
1670 return !ADDR_SPACE_GENERIC_P (as
);
1674 /* Helper function for `avr_legitimate_address_p'. */
1677 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1678 RTX_CODE outer_code
, bool strict
)
1681 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1682 as
, outer_code
, UNKNOWN
)
1684 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1688 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1689 machine for a memory operand of mode MODE. */
1692 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1694 bool ok
= CONSTANT_ADDRESS_P (x
);
1696 switch (GET_CODE (x
))
1699 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1703 && GET_MODE_SIZE (mode
) > 4
1704 && REG_X
== REGNO (x
))
1712 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1713 GET_CODE (x
), strict
);
1718 rtx reg
= XEXP (x
, 0);
1719 rtx op1
= XEXP (x
, 1);
1722 && CONST_INT_P (op1
)
1723 && INTVAL (op1
) >= 0)
1725 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1730 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1733 if (reg
== frame_pointer_rtx
1734 || reg
== arg_pointer_rtx
)
1739 else if (frame_pointer_needed
1740 && reg
== frame_pointer_rtx
)
1752 if (avr_log
.legitimate_address_p
)
1754 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1755 "reload_completed=%d reload_in_progress=%d %s:",
1756 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1757 reg_renumber
? "(reg_renumber)" : "");
1759 if (GET_CODE (x
) == PLUS
1760 && REG_P (XEXP (x
, 0))
1761 && CONST_INT_P (XEXP (x
, 1))
1762 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1765 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1766 true_regnum (XEXP (x
, 0)));
1769 avr_edump ("\n%r\n", x
);
1776 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1777 now only a helper for avr_addr_space_legitimize_address. */
1778 /* Attempts to replace X with a valid
1779 memory address for an operand of mode MODE */
1782 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1784 bool big_offset_p
= false;
1788 if (GET_CODE (oldx
) == PLUS
1789 && REG_P (XEXP (oldx
, 0)))
1791 if (REG_P (XEXP (oldx
, 1)))
1792 x
= force_reg (GET_MODE (oldx
), oldx
);
1793 else if (CONST_INT_P (XEXP (oldx
, 1)))
1795 int offs
= INTVAL (XEXP (oldx
, 1));
1796 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1797 && offs
> MAX_LD_OFFSET (mode
))
1799 big_offset_p
= true;
1800 x
= force_reg (GET_MODE (oldx
), oldx
);
1805 if (avr_log
.legitimize_address
)
1807 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1810 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1817 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1818 /* This will allow register R26/27 to be used where it is no worse than normal
1819 base pointers R28/29 or R30/31. For example, if base offset is greater
1820 than 63 bytes or for R++ or --R addressing. */
1823 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1824 int opnum
, int type
, int addr_type
,
1825 int ind_levels ATTRIBUTE_UNUSED
,
1826 rtx (*mk_memloc
)(rtx
,int))
1830 if (avr_log
.legitimize_reload_address
)
1831 avr_edump ("\n%?:%m %r\n", mode
, x
);
1833 if (1 && (GET_CODE (x
) == POST_INC
1834 || GET_CODE (x
) == PRE_DEC
))
1836 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1837 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1838 opnum
, RELOAD_OTHER
);
1840 if (avr_log
.legitimize_reload_address
)
1841 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1842 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1847 if (GET_CODE (x
) == PLUS
1848 && REG_P (XEXP (x
, 0))
1849 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1850 && CONST_INT_P (XEXP (x
, 1))
1851 && INTVAL (XEXP (x
, 1)) >= 1)
1853 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1857 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1859 int regno
= REGNO (XEXP (x
, 0));
1860 rtx mem
= mk_memloc (x
, regno
);
1862 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1863 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1864 1, (enum reload_type
) addr_type
);
1866 if (avr_log
.legitimize_reload_address
)
1867 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1868 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1870 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1871 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1872 opnum
, (enum reload_type
) type
);
1874 if (avr_log
.legitimize_reload_address
)
1875 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1876 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1881 else if (! (frame_pointer_needed
1882 && XEXP (x
, 0) == frame_pointer_rtx
))
1884 push_reload (x
, NULL_RTX
, px
, NULL
,
1885 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1886 opnum
, (enum reload_type
) type
);
1888 if (avr_log
.legitimize_reload_address
)
1889 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1890 POINTER_REGS
, x
, NULL_RTX
);
1900 /* Implement `TARGET_SECONDARY_RELOAD' */
1903 avr_secondary_reload (bool in_p
, rtx x
,
1904 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1905 enum machine_mode mode
, secondary_reload_info
*sri
)
1909 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1910 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1912 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1919 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1920 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1921 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1923 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1924 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1925 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1926 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1927 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1929 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1931 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1932 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1933 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1934 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1935 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1936 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1944 /* Helper function to print assembler resp. track instruction
1945 sequence lengths. Always return "".
1948 Output assembler code from template TPL with operands supplied
1949 by OPERANDS. This is just forwarding to output_asm_insn.
1952 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1953 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1954 Don't output anything.
1958 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1962 output_asm_insn (tpl
, operands
);
1976 /* Return a pointer register name as a string. */
1979 ptrreg_to_str (int regno
)
1983 case REG_X
: return "X";
1984 case REG_Y
: return "Y";
1985 case REG_Z
: return "Z";
1987 output_operand_lossage ("address operand requires constraint for"
1988 " X, Y, or Z register");
1993 /* Return the condition name as a string.
1994 Used in conditional jump constructing */
1997 cond_string (enum rtx_code code
)
2006 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2011 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2027 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2028 /* Output ADDR to FILE as address. */
2031 avr_print_operand_address (FILE *file
, rtx addr
)
2033 switch (GET_CODE (addr
))
2036 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2040 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2044 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2048 if (CONSTANT_ADDRESS_P (addr
)
2049 && text_segment_operand (addr
, VOIDmode
))
2052 if (GET_CODE (x
) == CONST
)
2054 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2056 /* Assembler gs() will implant word address. Make offset
2057 a byte offset inside gs() for assembler. This is
2058 needed because the more logical (constant+gs(sym)) is not
2059 accepted by gas. For 128K and smaller devices this is ok.
2060 For large devices it will create a trampoline to offset
2061 from symbol which may not be what the user really wanted. */
2063 fprintf (file
, "gs(");
2064 output_addr_const (file
, XEXP (x
,0));
2065 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2066 2 * INTVAL (XEXP (x
, 1)));
2068 if (warning (0, "pointer offset from symbol maybe incorrect"))
2070 output_addr_const (stderr
, addr
);
2071 fprintf(stderr
,"\n");
2076 fprintf (file
, "gs(");
2077 output_addr_const (file
, addr
);
2078 fprintf (file
, ")");
2082 output_addr_const (file
, addr
);
2087 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2090 avr_print_operand_punct_valid_p (unsigned char code
)
2092 return code
== '~' || code
== '!';
2096 /* Implement `TARGET_PRINT_OPERAND'. */
2097 /* Output X as assembler operand to file FILE.
2098 For a description of supported %-codes, see top of avr.md. */
2101 avr_print_operand (FILE *file
, rtx x
, int code
)
2105 if (code
>= 'A' && code
<= 'D')
2110 if (!AVR_HAVE_JMP_CALL
)
2113 else if (code
== '!')
2115 if (AVR_HAVE_EIJMP_EICALL
)
2118 else if (code
== 't'
2121 static int t_regno
= -1;
2122 static int t_nbits
= -1;
2124 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2126 t_regno
= REGNO (x
);
2127 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2129 else if (CONST_INT_P (x
) && t_regno
>= 0
2130 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2132 int bpos
= INTVAL (x
);
2134 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2136 fprintf (file
, ",%d", bpos
% 8);
2141 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2145 if (x
== zero_reg_rtx
)
2146 fprintf (file
, "__zero_reg__");
2147 else if (code
== 'r' && REGNO (x
) < 32)
2148 fprintf (file
, "%d", (int) REGNO (x
));
2150 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2152 else if (CONST_INT_P (x
))
2154 HOST_WIDE_INT ival
= INTVAL (x
);
2157 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2158 else if (low_io_address_operand (x
, VOIDmode
)
2159 || high_io_address_operand (x
, VOIDmode
))
2161 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2162 fprintf (file
, "__RAMPZ__");
2163 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2164 fprintf (file
, "__RAMPY__");
2165 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2166 fprintf (file
, "__RAMPX__");
2167 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2168 fprintf (file
, "__RAMPD__");
2169 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2170 fprintf (file
, "__CCP__");
2171 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2172 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2173 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2176 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2177 ival
- avr_current_arch
->sfr_offset
);
2181 fatal_insn ("bad address, not an I/O address:", x
);
2185 rtx addr
= XEXP (x
, 0);
2189 if (!CONSTANT_P (addr
))
2190 fatal_insn ("bad address, not a constant:", addr
);
2191 /* Assembler template with m-code is data - not progmem section */
2192 if (text_segment_operand (addr
, VOIDmode
))
2193 if (warning (0, "accessing data memory with"
2194 " program memory address"))
2196 output_addr_const (stderr
, addr
);
2197 fprintf(stderr
,"\n");
2199 output_addr_const (file
, addr
);
2201 else if (code
== 'i')
2203 avr_print_operand (file
, addr
, 'i');
2205 else if (code
== 'o')
2207 if (GET_CODE (addr
) != PLUS
)
2208 fatal_insn ("bad address, not (reg+disp):", addr
);
2210 avr_print_operand (file
, XEXP (addr
, 1), 0);
2212 else if (code
== 'p' || code
== 'r')
2214 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2215 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2218 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2220 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2222 else if (GET_CODE (addr
) == PLUS
)
2224 avr_print_operand_address (file
, XEXP (addr
,0));
2225 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2226 fatal_insn ("internal compiler error. Bad address:"
2229 avr_print_operand (file
, XEXP (addr
,1), code
);
2232 avr_print_operand_address (file
, addr
);
2234 else if (code
== 'i')
2236 fatal_insn ("bad address, not an I/O address:", x
);
2238 else if (code
== 'x')
2240 /* Constant progmem address - like used in jmp or call */
2241 if (0 == text_segment_operand (x
, VOIDmode
))
2242 if (warning (0, "accessing program memory"
2243 " with data memory address"))
2245 output_addr_const (stderr
, x
);
2246 fprintf(stderr
,"\n");
2248 /* Use normal symbol for direct address no linker trampoline needed */
2249 output_addr_const (file
, x
);
2251 else if (CONST_FIXED_P (x
))
2253 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2255 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2257 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2259 else if (GET_CODE (x
) == CONST_DOUBLE
)
2263 if (GET_MODE (x
) != SFmode
)
2264 fatal_insn ("internal compiler error. Unknown mode:", x
);
2265 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2266 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2267 fprintf (file
, "0x%lx", val
);
2269 else if (GET_CODE (x
) == CONST_STRING
)
2270 fputs (XSTR (x
, 0), file
);
2271 else if (code
== 'j')
2272 fputs (cond_string (GET_CODE (x
)), file
);
2273 else if (code
== 'k')
2274 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2276 avr_print_operand_address (file
, x
);
2280 /* Worker function for `NOTICE_UPDATE_CC'. */
2281 /* Update the condition code in the INSN. */
2284 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2287 enum attr_cc cc
= get_attr_cc (insn
);
2297 rtx
*op
= recog_data
.operand
;
2300 /* Extract insn's operands. */
2301 extract_constrain_insn_cached (insn
);
2309 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2310 cc
= (enum attr_cc
) icc
;
2315 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2316 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2317 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2319 /* Any other "r,rL" combination does not alter cc0. */
2323 } /* inner switch */
2327 } /* outer swicth */
2332 /* Special values like CC_OUT_PLUS from above have been
2333 mapped to "standard" CC_* values so we never come here. */
2339 /* Insn does not affect CC at all. */
2347 set
= single_set (insn
);
2351 cc_status
.flags
|= CC_NO_OVERFLOW
;
2352 cc_status
.value1
= SET_DEST (set
);
2357 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2358 The V flag may or may not be known but that's ok because
2359 alter_cond will change tests to use EQ/NE. */
2360 set
= single_set (insn
);
2364 cc_status
.value1
= SET_DEST (set
);
2365 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2370 set
= single_set (insn
);
2373 cc_status
.value1
= SET_SRC (set
);
2377 /* Insn doesn't leave CC in a usable state. */
2383 /* Choose mode for jump insn:
2384 1 - relative jump in range -63 <= x <= 62 ;
2385 2 - relative jump in range -2046 <= x <= 2045 ;
2386 3 - absolute jump (only for ATmega[16]03). */
2389 avr_jump_mode (rtx x
, rtx insn
)
2391 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2392 ? XEXP (x
, 0) : x
));
2393 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2394 int jump_distance
= cur_addr
- dest_addr
;
2396 if (-63 <= jump_distance
&& jump_distance
<= 62)
2398 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2400 else if (AVR_HAVE_JMP_CALL
)
2406 /* Return an AVR condition jump commands.
2407 X is a comparison RTX.
2408 LEN is a number returned by avr_jump_mode function.
2409 If REVERSE nonzero then condition code in X must be reversed. */
2412 ret_cond_branch (rtx x
, int len
, int reverse
)
2414 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2419 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2420 return (len
== 1 ? ("breq .+2" CR_TAB
2422 len
== 2 ? ("breq .+4" CR_TAB
2430 return (len
== 1 ? ("breq .+2" CR_TAB
2432 len
== 2 ? ("breq .+4" CR_TAB
2439 return (len
== 1 ? ("breq .+2" CR_TAB
2441 len
== 2 ? ("breq .+4" CR_TAB
2448 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2449 return (len
== 1 ? ("breq %0" CR_TAB
2451 len
== 2 ? ("breq .+2" CR_TAB
2458 return (len
== 1 ? ("breq %0" CR_TAB
2460 len
== 2 ? ("breq .+2" CR_TAB
2467 return (len
== 1 ? ("breq %0" CR_TAB
2469 len
== 2 ? ("breq .+2" CR_TAB
2483 return ("br%j1 .+2" CR_TAB
2486 return ("br%j1 .+4" CR_TAB
2497 return ("br%k1 .+2" CR_TAB
2500 return ("br%k1 .+4" CR_TAB
2509 /* Worker function for `FINAL_PRESCAN_INSN'. */
2510 /* Output insn cost for next insn. */
2513 avr_final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2514 int num_operands ATTRIBUTE_UNUSED
)
2516 if (avr_log
.rtx_costs
)
2518 rtx set
= single_set (insn
);
2521 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2522 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2524 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2525 rtx_cost (PATTERN (insn
), INSN
, 0,
2526 optimize_insn_for_speed_p()));
2530 /* Return 0 if undefined, 1 if always true or always false. */
2533 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2535 unsigned int max
= (mode
== QImode
? 0xff :
2536 mode
== HImode
? 0xffff :
2537 mode
== PSImode
? 0xffffff :
2538 mode
== SImode
? 0xffffffff : 0);
2539 if (max
&& op
&& CONST_INT_P (x
))
2541 if (unsigned_condition (op
) != op
)
2544 if (max
!= (INTVAL (x
) & max
)
2545 && INTVAL (x
) != 0xff)
2552 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2553 /* Returns nonzero if REGNO is the number of a hard
2554 register in which function arguments are sometimes passed. */
2557 avr_function_arg_regno_p(int r
)
2559 return (r
>= 8 && r
<= 25);
2563 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2564 /* Initializing the variable cum for the state at the beginning
2565 of the argument list. */
2568 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2569 tree fndecl ATTRIBUTE_UNUSED
)
2572 cum
->regno
= FIRST_CUM_REG
;
2573 if (!libname
&& stdarg_p (fntype
))
2576 /* Assume the calle may be tail called */
2578 cfun
->machine
->sibcall_fails
= 0;
2581 /* Returns the number of registers to allocate for a function argument. */
2584 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2588 if (mode
== BLKmode
)
2589 size
= int_size_in_bytes (type
);
2591 size
= GET_MODE_SIZE (mode
);
2593 /* Align all function arguments to start in even-numbered registers.
2594 Odd-sized arguments leave holes above them. */
2596 return (size
+ 1) & ~1;
2600 /* Implement `TARGET_FUNCTION_ARG'. */
2601 /* Controls whether a function argument is passed
2602 in a register, and which register. */
2605 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2606 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2608 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2609 int bytes
= avr_num_arg_regs (mode
, type
);
2611 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2612 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2618 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2619 /* Update the summarizer variable CUM to advance past an argument
2620 in the argument list. */
2623 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2624 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2626 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2627 int bytes
= avr_num_arg_regs (mode
, type
);
2629 cum
->nregs
-= bytes
;
2630 cum
->regno
-= bytes
;
2632 /* A parameter is being passed in a call-saved register. As the original
2633 contents of these regs has to be restored before leaving the function,
2634 a function must not pass arguments in call-saved regs in order to get
2639 && !call_used_regs
[cum
->regno
])
2641 /* FIXME: We ship info on failing tail-call in struct machine_function.
2642 This uses internals of calls.c:expand_call() and the way args_so_far
2643 is used. targetm.function_ok_for_sibcall() needs to be extended to
2644 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2645 dependent so that such an extension is not wanted. */
2647 cfun
->machine
->sibcall_fails
= 1;
2650 /* Test if all registers needed by the ABI are actually available. If the
2651 user has fixed a GPR needed to pass an argument, an (implicit) function
2652 call will clobber that fixed register. See PR45099 for an example. */
2659 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2660 if (fixed_regs
[regno
])
2661 warning (0, "fixed register %s used to pass parameter to function",
2665 if (cum
->nregs
<= 0)
2668 cum
->regno
= FIRST_CUM_REG
;
2672 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2673 /* Decide whether we can make a sibling call to a function. DECL is the
2674 declaration of the function being targeted by the call and EXP is the
2675 CALL_EXPR representing the call. */
2678 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2682 /* Tail-calling must fail if callee-saved regs are used to pass
2683 function args. We must not tail-call when `epilogue_restores'
2684 is used. Unfortunately, we cannot tell at this point if that
2685 actually will happen or not, and we cannot step back from
2686 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2688 if (cfun
->machine
->sibcall_fails
2689 || TARGET_CALL_PROLOGUES
)
2694 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2698 decl_callee
= TREE_TYPE (decl_callee
);
2702 decl_callee
= fntype_callee
;
2704 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2705 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2707 decl_callee
= TREE_TYPE (decl_callee
);
2711 /* Ensure that caller and callee have compatible epilogues */
2713 if (cfun
->machine
->is_interrupt
2714 || cfun
->machine
->is_signal
2715 || cfun
->machine
->is_naked
2716 || avr_naked_function_p (decl_callee
)
2717 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2718 || (avr_OS_task_function_p (decl_callee
)
2719 != cfun
->machine
->is_OS_task
)
2720 || (avr_OS_main_function_p (decl_callee
)
2721 != cfun
->machine
->is_OS_main
))
2729 /***********************************************************************
2730 Functions for outputting various mov's for a various modes
2731 ************************************************************************/
2733 /* Return true if a value of mode MODE is read from flash by
2734 __load_* function from libgcc. */
2737 avr_load_libgcc_p (rtx op
)
2739 enum machine_mode mode
= GET_MODE (op
);
2740 int n_bytes
= GET_MODE_SIZE (mode
);
2744 && avr_mem_flash_p (op
));
2747 /* Return true if a value of mode MODE is read by __xload_* function. */
2750 avr_xload_libgcc_p (enum machine_mode mode
)
2752 int n_bytes
= GET_MODE_SIZE (mode
);
2755 || avr_current_device
->n_flash
> 1);
2759 /* Fixme: This is a hack because secondary reloads don't works as expected.
2761 Find an unused d-register to be used as scratch in INSN.
2762 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2763 is a register, skip all possible return values that overlap EXCLUDE.
2764 The policy for the returned register is similar to that of
2765 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2768 Return a QImode d-register or NULL_RTX if nothing found. */
2771 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2774 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2775 || avr_signal_function_p (current_function_decl
));
2777 for (regno
= 16; regno
< 32; regno
++)
2779 rtx reg
= all_regs_rtx
[regno
];
2782 && reg_overlap_mentioned_p (exclude
, reg
))
2783 || fixed_regs
[regno
])
2788 /* Try non-live register */
2790 if (!df_regs_ever_live_p (regno
)
2791 && (TREE_THIS_VOLATILE (current_function_decl
)
2792 || cfun
->machine
->is_OS_task
2793 || cfun
->machine
->is_OS_main
2794 || (!isr_p
&& call_used_regs
[regno
])))
2799 /* Any live register can be used if it is unused after.
2800 Prologue/epilogue will care for it as needed. */
2802 if (df_regs_ever_live_p (regno
)
2803 && reg_unused_after (insn
, reg
))
2813 /* Helper function for the next function in the case where only restricted
2814 version of LPM instruction is available. */
2817 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2821 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2824 regno_dest
= REGNO (dest
);
2826 /* The implicit target register of LPM. */
2827 xop
[3] = lpm_reg_rtx
;
2829 switch (GET_CODE (addr
))
2836 gcc_assert (REG_Z
== REGNO (addr
));
2844 avr_asm_len ("%4lpm", xop
, plen
, 1);
2846 if (regno_dest
!= LPM_REGNO
)
2847 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2852 if (REGNO (dest
) == REG_Z
)
2853 return avr_asm_len ("%4lpm" CR_TAB
2858 "pop %A0", xop
, plen
, 6);
2860 avr_asm_len ("%4lpm" CR_TAB
2864 "mov %B0,%3", xop
, plen
, 5);
2866 if (!reg_unused_after (insn
, addr
))
2867 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2876 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2879 if (regno_dest
== LPM_REGNO
)
2880 avr_asm_len ("%4lpm" CR_TAB
2881 "adiw %2,1", xop
, plen
, 2);
2883 avr_asm_len ("%4lpm" CR_TAB
2885 "adiw %2,1", xop
, plen
, 3);
2888 avr_asm_len ("%4lpm" CR_TAB
2890 "adiw %2,1", xop
, plen
, 3);
2893 avr_asm_len ("%4lpm" CR_TAB
2895 "adiw %2,1", xop
, plen
, 3);
2898 avr_asm_len ("%4lpm" CR_TAB
2900 "adiw %2,1", xop
, plen
, 3);
2902 break; /* POST_INC */
2904 } /* switch CODE (addr) */
2910 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2911 OP[1] in AS1 to register OP[0].
2912 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2916 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2920 rtx src
= SET_SRC (single_set (insn
));
2922 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2925 addr_space_t as
= MEM_ADDR_SPACE (src
);
2932 warning (0, "writing to address space %qs not supported",
2933 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2938 addr
= XEXP (src
, 0);
2939 code
= GET_CODE (addr
);
2941 gcc_assert (REG_P (dest
));
2942 gcc_assert (REG
== code
|| POST_INC
== code
);
2946 xop
[2] = lpm_addr_reg_rtx
;
2947 xop
[4] = xstring_empty
;
2948 xop
[5] = tmp_reg_rtx
;
2949 xop
[6] = XEXP (rampz_rtx
, 0);
2951 segment
= avr_addrspace
[as
].segment
;
2953 /* Set RAMPZ as needed. */
2957 xop
[4] = GEN_INT (segment
);
2958 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
2960 if (xop
[3] != NULL_RTX
)
2962 avr_asm_len ("ldi %3,%4" CR_TAB
2963 "out %i6,%3", xop
, plen
, 2);
2965 else if (segment
== 1)
2967 avr_asm_len ("clr %5" CR_TAB
2969 "out %i6,%5", xop
, plen
, 3);
2973 avr_asm_len ("mov %5,%2" CR_TAB
2976 "mov %2,%5", xop
, plen
, 4);
2981 if (!AVR_HAVE_ELPMX
)
2982 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2984 else if (!AVR_HAVE_LPMX
)
2986 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2989 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2991 switch (GET_CODE (addr
))
2998 gcc_assert (REG_Z
== REGNO (addr
));
3006 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3009 if (REGNO (dest
) == REG_Z
)
3010 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3011 "%4lpm %B0,%a2" CR_TAB
3012 "mov %A0,%5", xop
, plen
, 3);
3015 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3016 "%4lpm %B0,%a2", xop
, plen
, 2);
3018 if (!reg_unused_after (insn
, addr
))
3019 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3026 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3027 "%4lpm %B0,%a2+" CR_TAB
3028 "%4lpm %C0,%a2", xop
, plen
, 3);
3030 if (!reg_unused_after (insn
, addr
))
3031 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3037 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3038 "%4lpm %B0,%a2+", xop
, plen
, 2);
3040 if (REGNO (dest
) == REG_Z
- 2)
3041 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3042 "%4lpm %C0,%a2" CR_TAB
3043 "mov %D0,%5", xop
, plen
, 3);
3046 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3047 "%4lpm %D0,%a2", xop
, plen
, 2);
3049 if (!reg_unused_after (insn
, addr
))
3050 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3060 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3063 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3064 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3065 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3066 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3068 break; /* POST_INC */
3070 } /* switch CODE (addr) */
3072 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3074 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3076 xop
[0] = zero_reg_rtx
;
3077 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3084 /* Worker function for xload_8 insn. */
3087 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3093 xop
[2] = lpm_addr_reg_rtx
;
3094 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3096 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3098 avr_asm_len ("sbrc %1,7" CR_TAB
3099 "ld %3,%a2", xop
, plen
, 2);
3101 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3102 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3109 output_movqi (rtx insn
, rtx operands
[], int *plen
)
3111 rtx dest
= operands
[0];
3112 rtx src
= operands
[1];
3114 if (avr_mem_flash_p (src
)
3115 || avr_mem_flash_p (dest
))
3117 return avr_out_lpm (insn
, operands
, plen
);
3120 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3124 if (REG_P (src
)) /* mov r,r */
3126 if (test_hard_reg_class (STACK_REG
, dest
))
3127 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3128 else if (test_hard_reg_class (STACK_REG
, src
))
3129 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3131 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3133 else if (CONSTANT_P (src
))
3135 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3138 else if (MEM_P (src
))
3139 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3141 else if (MEM_P (dest
))
3146 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3148 return out_movqi_mr_r (insn
, xop
, plen
);
3156 output_movhi (rtx insn
, rtx xop
[], int *plen
)
3161 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3163 if (avr_mem_flash_p (src
)
3164 || avr_mem_flash_p (dest
))
3166 return avr_out_lpm (insn
, xop
, plen
);
3169 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3173 if (REG_P (src
)) /* mov r,r */
3175 if (test_hard_reg_class (STACK_REG
, dest
))
3177 if (AVR_HAVE_8BIT_SP
)
3178 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3181 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3182 "out __SP_H__,%B1", xop
, plen
, -2);
3184 /* Use simple load of SP if no interrupts are used. */
3186 return TARGET_NO_INTERRUPTS
3187 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3188 "out __SP_L__,%A1", xop
, plen
, -2)
3189 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3191 "out __SP_H__,%B1" CR_TAB
3192 "out __SREG__,__tmp_reg__" CR_TAB
3193 "out __SP_L__,%A1", xop
, plen
, -5);
3195 else if (test_hard_reg_class (STACK_REG
, src
))
3197 return !AVR_HAVE_SPH
3198 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3199 "clr %B0", xop
, plen
, -2)
3201 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3202 "in %B0,__SP_H__", xop
, plen
, -2);
3205 return AVR_HAVE_MOVW
3206 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3208 : avr_asm_len ("mov %A0,%A1" CR_TAB
3209 "mov %B0,%B1", xop
, plen
, -2);
3211 else if (CONSTANT_P (src
))
3213 return output_reload_inhi (xop
, NULL
, plen
);
3215 else if (MEM_P (src
))
3217 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3220 else if (MEM_P (dest
))
3225 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3227 return out_movhi_mr_r (insn
, xop
, plen
);
3230 fatal_insn ("invalid insn:", insn
);
3236 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3240 rtx x
= XEXP (src
, 0);
3242 if (CONSTANT_ADDRESS_P (x
))
3244 return optimize
> 0 && io_address_operand (x
, QImode
)
3245 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3246 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3248 else if (GET_CODE (x
) == PLUS
3249 && REG_P (XEXP (x
, 0))
3250 && CONST_INT_P (XEXP (x
, 1)))
3252 /* memory access by reg+disp */
3254 int disp
= INTVAL (XEXP (x
, 1));
3256 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3258 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3259 fatal_insn ("incorrect insn:",insn
);
3261 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3262 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3263 "ldd %0,Y+63" CR_TAB
3264 "sbiw r28,%o1-63", op
, plen
, -3);
3266 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3267 "sbci r29,hi8(-%o1)" CR_TAB
3269 "subi r28,lo8(%o1)" CR_TAB
3270 "sbci r29,hi8(%o1)", op
, plen
, -5);
3272 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3274 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3275 it but I have this situation with extremal optimizing options. */
3277 avr_asm_len ("adiw r26,%o1" CR_TAB
3278 "ld %0,X", op
, plen
, -2);
3280 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3281 && !reg_unused_after (insn
, XEXP (x
,0)))
3283 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3289 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3292 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3296 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3300 rtx base
= XEXP (src
, 0);
3301 int reg_dest
= true_regnum (dest
);
3302 int reg_base
= true_regnum (base
);
3303 /* "volatile" forces reading low byte first, even if less efficient,
3304 for correct operation with 16-bit I/O registers. */
3305 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3309 if (reg_dest
== reg_base
) /* R = (R) */
3310 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3312 "mov %A0,__tmp_reg__", op
, plen
, -3);
3314 if (reg_base
!= REG_X
)
3315 return avr_asm_len ("ld %A0,%1" CR_TAB
3316 "ldd %B0,%1+1", op
, plen
, -2);
3318 avr_asm_len ("ld %A0,X+" CR_TAB
3319 "ld %B0,X", op
, plen
, -2);
3321 if (!reg_unused_after (insn
, base
))
3322 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3326 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3328 int disp
= INTVAL (XEXP (base
, 1));
3329 int reg_base
= true_regnum (XEXP (base
, 0));
3331 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3333 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3334 fatal_insn ("incorrect insn:",insn
);
3336 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3337 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3338 "ldd %A0,Y+62" CR_TAB
3339 "ldd %B0,Y+63" CR_TAB
3340 "sbiw r28,%o1-62", op
, plen
, -4)
3342 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3343 "sbci r29,hi8(-%o1)" CR_TAB
3345 "ldd %B0,Y+1" CR_TAB
3346 "subi r28,lo8(%o1)" CR_TAB
3347 "sbci r29,hi8(%o1)", op
, plen
, -6);
3350 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3351 it but I have this situation with extremal
3352 optimization options. */
3354 if (reg_base
== REG_X
)
3355 return reg_base
== reg_dest
3356 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3357 "ld __tmp_reg__,X+" CR_TAB
3359 "mov %A0,__tmp_reg__", op
, plen
, -4)
3361 : avr_asm_len ("adiw r26,%o1" CR_TAB
3364 "sbiw r26,%o1+1", op
, plen
, -4);
3366 return reg_base
== reg_dest
3367 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3368 "ldd %B0,%B1" CR_TAB
3369 "mov %A0,__tmp_reg__", op
, plen
, -3)
3371 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3372 "ldd %B0,%B1", op
, plen
, -2);
3374 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3376 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3377 fatal_insn ("incorrect insn:", insn
);
3379 if (!mem_volatile_p
)
3380 return avr_asm_len ("ld %B0,%1" CR_TAB
3381 "ld %A0,%1", op
, plen
, -2);
3383 return REGNO (XEXP (base
, 0)) == REG_X
3384 ? avr_asm_len ("sbiw r26,2" CR_TAB
3387 "sbiw r26,1", op
, plen
, -4)
3389 : avr_asm_len ("sbiw %r1,2" CR_TAB
3391 "ldd %B0,%p1+1", op
, plen
, -3);
3393 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3395 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3396 fatal_insn ("incorrect insn:", insn
);
3398 return avr_asm_len ("ld %A0,%1" CR_TAB
3399 "ld %B0,%1", op
, plen
, -2);
3401 else if (CONSTANT_ADDRESS_P (base
))
3403 return optimize
> 0 && io_address_operand (base
, HImode
)
3404 ? avr_asm_len ("in %A0,%i1" CR_TAB
3405 "in %B0,%i1+1", op
, plen
, -2)
3407 : avr_asm_len ("lds %A0,%m1" CR_TAB
3408 "lds %B0,%m1+1", op
, plen
, -4);
3411 fatal_insn ("unknown move insn:",insn
);
3416 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3420 rtx base
= XEXP (src
, 0);
3421 int reg_dest
= true_regnum (dest
);
3422 int reg_base
= true_regnum (base
);
3430 if (reg_base
== REG_X
) /* (R26) */
3432 if (reg_dest
== REG_X
)
3433 /* "ld r26,-X" is undefined */
3434 return *l
=7, ("adiw r26,3" CR_TAB
3437 "ld __tmp_reg__,-X" CR_TAB
3440 "mov r27,__tmp_reg__");
3441 else if (reg_dest
== REG_X
- 2)
3442 return *l
=5, ("ld %A0,X+" CR_TAB
3444 "ld __tmp_reg__,X+" CR_TAB
3446 "mov %C0,__tmp_reg__");
3447 else if (reg_unused_after (insn
, base
))
3448 return *l
=4, ("ld %A0,X+" CR_TAB
3453 return *l
=5, ("ld %A0,X+" CR_TAB
3461 if (reg_dest
== reg_base
)
3462 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3463 "ldd %C0,%1+2" CR_TAB
3464 "ldd __tmp_reg__,%1+1" CR_TAB
3466 "mov %B0,__tmp_reg__");
3467 else if (reg_base
== reg_dest
+ 2)
3468 return *l
=5, ("ld %A0,%1" CR_TAB
3469 "ldd %B0,%1+1" CR_TAB
3470 "ldd __tmp_reg__,%1+2" CR_TAB
3471 "ldd %D0,%1+3" CR_TAB
3472 "mov %C0,__tmp_reg__");
3474 return *l
=4, ("ld %A0,%1" CR_TAB
3475 "ldd %B0,%1+1" CR_TAB
3476 "ldd %C0,%1+2" CR_TAB
3480 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3482 int disp
= INTVAL (XEXP (base
, 1));
3484 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3486 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3487 fatal_insn ("incorrect insn:",insn
);
3489 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3490 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3491 "ldd %A0,Y+60" CR_TAB
3492 "ldd %B0,Y+61" CR_TAB
3493 "ldd %C0,Y+62" CR_TAB
3494 "ldd %D0,Y+63" CR_TAB
3497 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3498 "sbci r29,hi8(-%o1)" CR_TAB
3500 "ldd %B0,Y+1" CR_TAB
3501 "ldd %C0,Y+2" CR_TAB
3502 "ldd %D0,Y+3" CR_TAB
3503 "subi r28,lo8(%o1)" CR_TAB
3504 "sbci r29,hi8(%o1)");
3507 reg_base
= true_regnum (XEXP (base
, 0));
3508 if (reg_base
== REG_X
)
3511 if (reg_dest
== REG_X
)
3514 /* "ld r26,-X" is undefined */
3515 return ("adiw r26,%o1+3" CR_TAB
3518 "ld __tmp_reg__,-X" CR_TAB
3521 "mov r27,__tmp_reg__");
3524 if (reg_dest
== REG_X
- 2)
3525 return ("adiw r26,%o1" CR_TAB
3528 "ld __tmp_reg__,X+" CR_TAB
3530 "mov r26,__tmp_reg__");
3532 return ("adiw r26,%o1" CR_TAB
3539 if (reg_dest
== reg_base
)
3540 return *l
=5, ("ldd %D0,%D1" CR_TAB
3541 "ldd %C0,%C1" CR_TAB
3542 "ldd __tmp_reg__,%B1" CR_TAB
3543 "ldd %A0,%A1" CR_TAB
3544 "mov %B0,__tmp_reg__");
3545 else if (reg_dest
== reg_base
- 2)
3546 return *l
=5, ("ldd %A0,%A1" CR_TAB
3547 "ldd %B0,%B1" CR_TAB
3548 "ldd __tmp_reg__,%C1" CR_TAB
3549 "ldd %D0,%D1" CR_TAB
3550 "mov %C0,__tmp_reg__");
3551 return *l
=4, ("ldd %A0,%A1" CR_TAB
3552 "ldd %B0,%B1" CR_TAB
3553 "ldd %C0,%C1" CR_TAB
3556 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3557 return *l
=4, ("ld %D0,%1" CR_TAB
3561 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3562 return *l
=4, ("ld %A0,%1" CR_TAB
3566 else if (CONSTANT_ADDRESS_P (base
))
3567 return *l
=8, ("lds %A0,%m1" CR_TAB
3568 "lds %B0,%m1+1" CR_TAB
3569 "lds %C0,%m1+2" CR_TAB
3572 fatal_insn ("unknown move insn:",insn
);
3577 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3581 rtx base
= XEXP (dest
, 0);
3582 int reg_base
= true_regnum (base
);
3583 int reg_src
= true_regnum (src
);
3589 if (CONSTANT_ADDRESS_P (base
))
3590 return *l
=8,("sts %m0,%A1" CR_TAB
3591 "sts %m0+1,%B1" CR_TAB
3592 "sts %m0+2,%C1" CR_TAB
3594 if (reg_base
> 0) /* (r) */
3596 if (reg_base
== REG_X
) /* (R26) */
3598 if (reg_src
== REG_X
)
3600 /* "st X+,r26" is undefined */
3601 if (reg_unused_after (insn
, base
))
3602 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3605 "st X+,__tmp_reg__" CR_TAB
3609 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3612 "st X+,__tmp_reg__" CR_TAB
3617 else if (reg_base
== reg_src
+ 2)
3619 if (reg_unused_after (insn
, base
))
3620 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3621 "mov __tmp_reg__,%D1" CR_TAB
3624 "st %0+,__zero_reg__" CR_TAB
3625 "st %0,__tmp_reg__" CR_TAB
3626 "clr __zero_reg__");
3628 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3629 "mov __tmp_reg__,%D1" CR_TAB
3632 "st %0+,__zero_reg__" CR_TAB
3633 "st %0,__tmp_reg__" CR_TAB
3634 "clr __zero_reg__" CR_TAB
3637 return *l
=5, ("st %0+,%A1" CR_TAB
3644 return *l
=4, ("st %0,%A1" CR_TAB
3645 "std %0+1,%B1" CR_TAB
3646 "std %0+2,%C1" CR_TAB
3649 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3651 int disp
= INTVAL (XEXP (base
, 1));
3652 reg_base
= REGNO (XEXP (base
, 0));
3653 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3655 if (reg_base
!= REG_Y
)
3656 fatal_insn ("incorrect insn:",insn
);
3658 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3659 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3660 "std Y+60,%A1" CR_TAB
3661 "std Y+61,%B1" CR_TAB
3662 "std Y+62,%C1" CR_TAB
3663 "std Y+63,%D1" CR_TAB
3666 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3667 "sbci r29,hi8(-%o0)" CR_TAB
3669 "std Y+1,%B1" CR_TAB
3670 "std Y+2,%C1" CR_TAB
3671 "std Y+3,%D1" CR_TAB
3672 "subi r28,lo8(%o0)" CR_TAB
3673 "sbci r29,hi8(%o0)");
3675 if (reg_base
== REG_X
)
3678 if (reg_src
== REG_X
)
3681 return ("mov __tmp_reg__,r26" CR_TAB
3682 "mov __zero_reg__,r27" CR_TAB
3683 "adiw r26,%o0" CR_TAB
3684 "st X+,__tmp_reg__" CR_TAB
3685 "st X+,__zero_reg__" CR_TAB
3688 "clr __zero_reg__" CR_TAB
3691 else if (reg_src
== REG_X
- 2)
3694 return ("mov __tmp_reg__,r26" CR_TAB
3695 "mov __zero_reg__,r27" CR_TAB
3696 "adiw r26,%o0" CR_TAB
3699 "st X+,__tmp_reg__" CR_TAB
3700 "st X,__zero_reg__" CR_TAB
3701 "clr __zero_reg__" CR_TAB
3705 return ("adiw r26,%o0" CR_TAB
3712 return *l
=4, ("std %A0,%A1" CR_TAB
3713 "std %B0,%B1" CR_TAB
3714 "std %C0,%C1" CR_TAB
3717 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3718 return *l
=4, ("st %0,%D1" CR_TAB
3722 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3723 return *l
=4, ("st %0,%A1" CR_TAB
3727 fatal_insn ("unknown move insn:",insn
);
3732 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3735 rtx dest
= operands
[0];
3736 rtx src
= operands
[1];
3739 if (avr_mem_flash_p (src
)
3740 || avr_mem_flash_p (dest
))
3742 return avr_out_lpm (insn
, operands
, real_l
);
3748 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3751 if (REG_P (src
)) /* mov r,r */
3753 if (true_regnum (dest
) > true_regnum (src
))
3758 return ("movw %C0,%C1" CR_TAB
3762 return ("mov %D0,%D1" CR_TAB
3763 "mov %C0,%C1" CR_TAB
3764 "mov %B0,%B1" CR_TAB
3772 return ("movw %A0,%A1" CR_TAB
3776 return ("mov %A0,%A1" CR_TAB
3777 "mov %B0,%B1" CR_TAB
3778 "mov %C0,%C1" CR_TAB
3782 else if (CONSTANT_P (src
))
3784 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3786 else if (MEM_P (src
))
3787 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3789 else if (MEM_P (dest
))
3793 if (src
== CONST0_RTX (GET_MODE (dest
)))
3794 operands
[1] = zero_reg_rtx
;
3796 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3799 output_asm_insn (templ
, operands
);
3804 fatal_insn ("invalid insn:", insn
);
3809 /* Handle loads of 24-bit types from memory to register. */
3812 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3816 rtx base
= XEXP (src
, 0);
3817 int reg_dest
= true_regnum (dest
);
3818 int reg_base
= true_regnum (base
);
3822 if (reg_base
== REG_X
) /* (R26) */
3824 if (reg_dest
== REG_X
)
3825 /* "ld r26,-X" is undefined */
3826 return avr_asm_len ("adiw r26,2" CR_TAB
3828 "ld __tmp_reg__,-X" CR_TAB
3831 "mov r27,__tmp_reg__", op
, plen
, -6);
3834 avr_asm_len ("ld %A0,X+" CR_TAB
3836 "ld %C0,X", op
, plen
, -3);
3838 if (reg_dest
!= REG_X
- 2
3839 && !reg_unused_after (insn
, base
))
3841 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3847 else /* reg_base != REG_X */
3849 if (reg_dest
== reg_base
)
3850 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3851 "ldd __tmp_reg__,%1+1" CR_TAB
3853 "mov %B0,__tmp_reg__", op
, plen
, -4);
3855 return avr_asm_len ("ld %A0,%1" CR_TAB
3856 "ldd %B0,%1+1" CR_TAB
3857 "ldd %C0,%1+2", op
, plen
, -3);
3860 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3862 int disp
= INTVAL (XEXP (base
, 1));
3864 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3866 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3867 fatal_insn ("incorrect insn:",insn
);
3869 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3870 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3871 "ldd %A0,Y+61" CR_TAB
3872 "ldd %B0,Y+62" CR_TAB
3873 "ldd %C0,Y+63" CR_TAB
3874 "sbiw r28,%o1-61", op
, plen
, -5);
3876 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3877 "sbci r29,hi8(-%o1)" CR_TAB
3879 "ldd %B0,Y+1" CR_TAB
3880 "ldd %C0,Y+2" CR_TAB
3881 "subi r28,lo8(%o1)" CR_TAB
3882 "sbci r29,hi8(%o1)", op
, plen
, -7);
3885 reg_base
= true_regnum (XEXP (base
, 0));
3886 if (reg_base
== REG_X
)
3889 if (reg_dest
== REG_X
)
3891 /* "ld r26,-X" is undefined */
3892 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3894 "ld __tmp_reg__,-X" CR_TAB
3897 "mov r27,__tmp_reg__", op
, plen
, -6);
3900 avr_asm_len ("adiw r26,%o1" CR_TAB
3903 "ld %C0,X", op
, plen
, -4);
3905 if (reg_dest
!= REG_W
3906 && !reg_unused_after (insn
, XEXP (base
, 0)))
3907 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3912 if (reg_dest
== reg_base
)
3913 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3914 "ldd __tmp_reg__,%B1" CR_TAB
3915 "ldd %A0,%A1" CR_TAB
3916 "mov %B0,__tmp_reg__", op
, plen
, -4);
3918 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3919 "ldd %B0,%B1" CR_TAB
3920 "ldd %C0,%C1", op
, plen
, -3);
3922 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3923 return avr_asm_len ("ld %C0,%1" CR_TAB
3925 "ld %A0,%1", op
, plen
, -3);
3926 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3927 return avr_asm_len ("ld %A0,%1" CR_TAB
3929 "ld %C0,%1", op
, plen
, -3);
3931 else if (CONSTANT_ADDRESS_P (base
))
3932 return avr_asm_len ("lds %A0,%m1" CR_TAB
3933 "lds %B0,%m1+1" CR_TAB
3934 "lds %C0,%m1+2", op
, plen
, -6);
3936 fatal_insn ("unknown move insn:",insn
);
3940 /* Handle store of 24-bit type from register or zero to memory. */
3943 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3947 rtx base
= XEXP (dest
, 0);
3948 int reg_base
= true_regnum (base
);
3950 if (CONSTANT_ADDRESS_P (base
))
3951 return avr_asm_len ("sts %m0,%A1" CR_TAB
3952 "sts %m0+1,%B1" CR_TAB
3953 "sts %m0+2,%C1", op
, plen
, -6);
3955 if (reg_base
> 0) /* (r) */
3957 if (reg_base
== REG_X
) /* (R26) */
3959 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3961 avr_asm_len ("st %0+,%A1" CR_TAB
3963 "st %0,%C1", op
, plen
, -3);
3965 if (!reg_unused_after (insn
, base
))
3966 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3971 return avr_asm_len ("st %0,%A1" CR_TAB
3972 "std %0+1,%B1" CR_TAB
3973 "std %0+2,%C1", op
, plen
, -3);
3975 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3977 int disp
= INTVAL (XEXP (base
, 1));
3978 reg_base
= REGNO (XEXP (base
, 0));
3980 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3982 if (reg_base
!= REG_Y
)
3983 fatal_insn ("incorrect insn:",insn
);
3985 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3986 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3987 "std Y+61,%A1" CR_TAB
3988 "std Y+62,%B1" CR_TAB
3989 "std Y+63,%C1" CR_TAB
3990 "sbiw r28,%o0-60", op
, plen
, -5);
3992 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3993 "sbci r29,hi8(-%o0)" CR_TAB
3995 "std Y+1,%B1" CR_TAB
3996 "std Y+2,%C1" CR_TAB
3997 "subi r28,lo8(%o0)" CR_TAB
3998 "sbci r29,hi8(%o0)", op
, plen
, -7);
4000 if (reg_base
== REG_X
)
4003 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4005 avr_asm_len ("adiw r26,%o0" CR_TAB
4008 "st X,%C1", op
, plen
, -4);
4010 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4011 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4016 return avr_asm_len ("std %A0,%A1" CR_TAB
4017 "std %B0,%B1" CR_TAB
4018 "std %C0,%C1", op
, plen
, -3);
4020 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4021 return avr_asm_len ("st %0,%C1" CR_TAB
4023 "st %0,%A1", op
, plen
, -3);
4024 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4025 return avr_asm_len ("st %0,%A1" CR_TAB
4027 "st %0,%C1", op
, plen
, -3);
4029 fatal_insn ("unknown move insn:",insn
);
4034 /* Move around 24-bit stuff. */
4037 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
4042 if (avr_mem_flash_p (src
)
4043 || avr_mem_flash_p (dest
))
4045 return avr_out_lpm (insn
, op
, plen
);
4048 if (register_operand (dest
, VOIDmode
))
4050 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4052 if (true_regnum (dest
) > true_regnum (src
))
4054 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4057 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4059 return avr_asm_len ("mov %B0,%B1" CR_TAB
4060 "mov %A0,%A1", op
, plen
, 2);
4065 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4067 avr_asm_len ("mov %A0,%A1" CR_TAB
4068 "mov %B0,%B1", op
, plen
, -2);
4070 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4073 else if (CONSTANT_P (src
))
4075 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4077 else if (MEM_P (src
))
4078 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4080 else if (MEM_P (dest
))
4085 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4087 return avr_out_store_psi (insn
, xop
, plen
);
4090 fatal_insn ("invalid insn:", insn
);
4096 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
4100 rtx x
= XEXP (dest
, 0);
4102 if (CONSTANT_ADDRESS_P (x
))
4104 return optimize
> 0 && io_address_operand (x
, QImode
)
4105 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4106 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4108 else if (GET_CODE (x
) == PLUS
4109 && REG_P (XEXP (x
, 0))
4110 && CONST_INT_P (XEXP (x
, 1)))
4112 /* memory access by reg+disp */
4114 int disp
= INTVAL (XEXP (x
, 1));
4116 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4118 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4119 fatal_insn ("incorrect insn:",insn
);
4121 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4122 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4123 "std Y+63,%1" CR_TAB
4124 "sbiw r28,%o0-63", op
, plen
, -3);
4126 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4127 "sbci r29,hi8(-%o0)" CR_TAB
4129 "subi r28,lo8(%o0)" CR_TAB
4130 "sbci r29,hi8(%o0)", op
, plen
, -5);
4132 else if (REGNO (XEXP (x
,0)) == REG_X
)
4134 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4136 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4137 "adiw r26,%o0" CR_TAB
4138 "st X,__tmp_reg__", op
, plen
, -3);
4142 avr_asm_len ("adiw r26,%o0" CR_TAB
4143 "st X,%1", op
, plen
, -2);
4146 if (!reg_unused_after (insn
, XEXP (x
,0)))
4147 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4152 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4155 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4159 /* Helper for the next function for XMEGA. It does the same
4160 but with low byte first. */
4163 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
4167 rtx base
= XEXP (dest
, 0);
4168 int reg_base
= true_regnum (base
);
4169 int reg_src
= true_regnum (src
);
4171 /* "volatile" forces writing low byte first, even if less efficient,
4172 for correct operation with 16-bit I/O registers like SP. */
4173 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4175 if (CONSTANT_ADDRESS_P (base
))
4176 return optimize
> 0 && io_address_operand (base
, HImode
)
4177 ? avr_asm_len ("out %i0,%A1" CR_TAB
4178 "out %i0+1,%B1", op
, plen
, -2)
4180 : avr_asm_len ("sts %m0,%A1" CR_TAB
4181 "sts %m0+1,%B1", op
, plen
, -4);
4185 if (reg_base
!= REG_X
)
4186 return avr_asm_len ("st %0,%A1" CR_TAB
4187 "std %0+1,%B1", op
, plen
, -2);
4189 if (reg_src
== REG_X
)
4190 /* "st X+,r26" and "st -X,r26" are undefined. */
4191 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4194 "st X,__tmp_reg__", op
, plen
, -4);
4196 avr_asm_len ("st X+,%A1" CR_TAB
4197 "st X,%B1", op
, plen
, -2);
4199 return reg_unused_after (insn
, base
)
4201 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4203 else if (GET_CODE (base
) == PLUS
)
4205 int disp
= INTVAL (XEXP (base
, 1));
4206 reg_base
= REGNO (XEXP (base
, 0));
4207 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4209 if (reg_base
!= REG_Y
)
4210 fatal_insn ("incorrect insn:",insn
);
4212 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4213 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4214 "std Y+62,%A1" CR_TAB
4215 "std Y+63,%B1" CR_TAB
4216 "sbiw r28,%o0-62", op
, plen
, -4)
4218 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4219 "sbci r29,hi8(-%o0)" CR_TAB
4221 "std Y+1,%B1" CR_TAB
4222 "subi r28,lo8(%o0)" CR_TAB
4223 "sbci r29,hi8(%o0)", op
, plen
, -6);
4226 if (reg_base
!= REG_X
)
4227 return avr_asm_len ("std %A0,%A1" CR_TAB
4228 "std %B0,%B1", op
, plen
, -2);
4230 return reg_src
== REG_X
4231 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4232 "mov __zero_reg__,r27" CR_TAB
4233 "adiw r26,%o0" CR_TAB
4234 "st X+,__tmp_reg__" CR_TAB
4235 "st X,__zero_reg__" CR_TAB
4236 "clr __zero_reg__" CR_TAB
4237 "sbiw r26,%o0+1", op
, plen
, -7)
4239 : avr_asm_len ("adiw r26,%o0" CR_TAB
4242 "sbiw r26,%o0+1", op
, plen
, -4);
4244 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4246 if (!mem_volatile_p
)
4247 return avr_asm_len ("st %0,%B1" CR_TAB
4248 "st %0,%A1", op
, plen
, -2);
4250 return REGNO (XEXP (base
, 0)) == REG_X
4251 ? avr_asm_len ("sbiw r26,2" CR_TAB
4254 "sbiw r26,1", op
, plen
, -4)
4256 : avr_asm_len ("sbiw %r0,2" CR_TAB
4258 "std %p0+1,%B1", op
, plen
, -3);
4260 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4262 return avr_asm_len ("st %0,%A1" CR_TAB
4263 "st %0,%B1", op
, plen
, -2);
4266 fatal_insn ("unknown move insn:",insn
);
4272 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4276 rtx base
= XEXP (dest
, 0);
4277 int reg_base
= true_regnum (base
);
4278 int reg_src
= true_regnum (src
);
4281 /* "volatile" forces writing high-byte first (no-xmega) resp.
4282 low-byte first (xmega) even if less efficient, for correct
4283 operation with 16-bit I/O registers like. */
4286 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4288 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4290 if (CONSTANT_ADDRESS_P (base
))
4291 return optimize
> 0 && io_address_operand (base
, HImode
)
4292 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4293 "out %i0,%A1", op
, plen
, -2)
4295 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4296 "sts %m0,%A1", op
, plen
, -4);
4300 if (reg_base
!= REG_X
)
4301 return avr_asm_len ("std %0+1,%B1" CR_TAB
4302 "st %0,%A1", op
, plen
, -2);
4304 if (reg_src
== REG_X
)
4305 /* "st X+,r26" and "st -X,r26" are undefined. */
4306 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4307 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4310 "st X,__tmp_reg__", op
, plen
, -4)
4312 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4314 "st X,__tmp_reg__" CR_TAB
4316 "st X,r26", op
, plen
, -5);
4318 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4319 ? avr_asm_len ("st X+,%A1" CR_TAB
4320 "st X,%B1", op
, plen
, -2)
4321 : avr_asm_len ("adiw r26,1" CR_TAB
4323 "st -X,%A1", op
, plen
, -3);
4325 else if (GET_CODE (base
) == PLUS
)
4327 int disp
= INTVAL (XEXP (base
, 1));
4328 reg_base
= REGNO (XEXP (base
, 0));
4329 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4331 if (reg_base
!= REG_Y
)
4332 fatal_insn ("incorrect insn:",insn
);
4334 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4335 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4336 "std Y+63,%B1" CR_TAB
4337 "std Y+62,%A1" CR_TAB
4338 "sbiw r28,%o0-62", op
, plen
, -4)
4340 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4341 "sbci r29,hi8(-%o0)" CR_TAB
4342 "std Y+1,%B1" CR_TAB
4344 "subi r28,lo8(%o0)" CR_TAB
4345 "sbci r29,hi8(%o0)", op
, plen
, -6);
4348 if (reg_base
!= REG_X
)
4349 return avr_asm_len ("std %B0,%B1" CR_TAB
4350 "std %A0,%A1", op
, plen
, -2);
4352 return reg_src
== REG_X
4353 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4354 "mov __zero_reg__,r27" CR_TAB
4355 "adiw r26,%o0+1" CR_TAB
4356 "st X,__zero_reg__" CR_TAB
4357 "st -X,__tmp_reg__" CR_TAB
4358 "clr __zero_reg__" CR_TAB
4359 "sbiw r26,%o0", op
, plen
, -7)
4361 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4364 "sbiw r26,%o0", op
, plen
, -4);
4366 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4368 return avr_asm_len ("st %0,%B1" CR_TAB
4369 "st %0,%A1", op
, plen
, -2);
4371 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4373 if (!mem_volatile_p
)
4374 return avr_asm_len ("st %0,%A1" CR_TAB
4375 "st %0,%B1", op
, plen
, -2);
4377 return REGNO (XEXP (base
, 0)) == REG_X
4378 ? avr_asm_len ("adiw r26,1" CR_TAB
4381 "adiw r26,2", op
, plen
, -4)
4383 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4385 "adiw %r0,2", op
, plen
, -3);
4387 fatal_insn ("unknown move insn:",insn
);
4391 /* Return 1 if frame pointer for current function required. */
4394 avr_frame_pointer_required_p (void)
4396 return (cfun
->calls_alloca
4397 || cfun
->calls_setjmp
4398 || cfun
->has_nonlocal_label
4399 || crtl
->args
.info
.nregs
== 0
4400 || get_frame_size () > 0);
4403 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4406 compare_condition (rtx insn
)
4408 rtx next
= next_real_insn (insn
);
4410 if (next
&& JUMP_P (next
))
4412 rtx pat
= PATTERN (next
);
4413 rtx src
= SET_SRC (pat
);
4415 if (IF_THEN_ELSE
== GET_CODE (src
))
4416 return GET_CODE (XEXP (src
, 0));
4423 /* Returns true iff INSN is a tst insn that only tests the sign. */
4426 compare_sign_p (rtx insn
)
4428 RTX_CODE cond
= compare_condition (insn
);
4429 return (cond
== GE
|| cond
== LT
);
4433 /* Returns true iff the next insn is a JUMP_INSN with a condition
4434 that needs to be swapped (GT, GTU, LE, LEU). */
4437 compare_diff_p (rtx insn
)
4439 RTX_CODE cond
= compare_condition (insn
);
4440 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4443 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4446 compare_eq_p (rtx insn
)
4448 RTX_CODE cond
= compare_condition (insn
);
4449 return (cond
== EQ
|| cond
== NE
);
4453 /* Output compare instruction
4455 compare (XOP[0], XOP[1])
4457 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4458 XOP[2] is an 8-bit scratch register as needed.
4460 PLEN == NULL: Output instructions.
4461 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4462 Don't output anything. */
4465 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4467 /* Register to compare and value to compare against. */
4471 /* MODE of the comparison. */
4472 enum machine_mode mode
;
4474 /* Number of bytes to operate on. */
4475 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4477 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4478 int clobber_val
= -1;
4480 /* Map fixed mode operands to integer operands with the same binary
4481 representation. They are easier to handle in the remainder. */
4483 if (CONST_FIXED_P (xval
))
4485 xreg
= avr_to_int_mode (xop
[0]);
4486 xval
= avr_to_int_mode (xop
[1]);
4489 mode
= GET_MODE (xreg
);
4491 gcc_assert (REG_P (xreg
));
4492 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4493 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4498 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4499 against 0 by ORing the bytes. This is one instruction shorter.
4500 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4501 and therefore don't use this. */
4503 if (!test_hard_reg_class (LD_REGS
, xreg
)
4504 && compare_eq_p (insn
)
4505 && reg_unused_after (insn
, xreg
))
4507 if (xval
== const1_rtx
)
4509 avr_asm_len ("dec %A0" CR_TAB
4510 "or %A0,%B0", xop
, plen
, 2);
4513 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4516 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4520 else if (xval
== constm1_rtx
)
4523 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4526 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4528 return avr_asm_len ("and %A0,%B0" CR_TAB
4529 "com %A0", xop
, plen
, 2);
4533 for (i
= 0; i
< n_bytes
; i
++)
4535 /* We compare byte-wise. */
4536 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4537 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4539 /* 8-bit value to compare with this byte. */
4540 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4542 /* Registers R16..R31 can operate with immediate. */
4543 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4546 xop
[1] = gen_int_mode (val8
, QImode
);
4548 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4551 && test_hard_reg_class (ADDW_REGS
, reg8
))
4553 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4555 if (IN_RANGE (val16
, 0, 63)
4557 || reg_unused_after (insn
, xreg
)))
4559 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4565 && IN_RANGE (val16
, -63, -1)
4566 && compare_eq_p (insn
)
4567 && reg_unused_after (insn
, xreg
))
4569 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4573 /* Comparing against 0 is easy. */
4578 ? "cp %0,__zero_reg__"
4579 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4583 /* Upper registers can compare and subtract-with-carry immediates.
4584 Notice that compare instructions do the same as respective subtract
4585 instruction; the only difference is that comparisons don't write
4586 the result back to the target register. */
4592 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4595 else if (reg_unused_after (insn
, xreg
))
4597 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4602 /* Must load the value into the scratch register. */
4604 gcc_assert (REG_P (xop
[2]));
4606 if (clobber_val
!= (int) val8
)
4607 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4608 clobber_val
= (int) val8
;
4612 : "cpc %0,%2", xop
, plen
, 1);
4619 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4622 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4626 xop
[0] = gen_rtx_REG (DImode
, 18);
4630 return avr_out_compare (insn
, xop
, plen
);
4633 /* Output test instruction for HImode. */
4636 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4638 if (compare_sign_p (insn
))
4640 avr_asm_len ("tst %B0", op
, plen
, -1);
4642 else if (reg_unused_after (insn
, op
[0])
4643 && compare_eq_p (insn
))
4645 /* Faster than sbiw if we can clobber the operand. */
4646 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4650 avr_out_compare (insn
, op
, plen
);
4657 /* Output test instruction for PSImode. */
4660 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4662 if (compare_sign_p (insn
))
4664 avr_asm_len ("tst %C0", op
, plen
, -1);
4666 else if (reg_unused_after (insn
, op
[0])
4667 && compare_eq_p (insn
))
4669 /* Faster than sbiw if we can clobber the operand. */
4670 avr_asm_len ("or %A0,%B0" CR_TAB
4671 "or %A0,%C0", op
, plen
, -2);
4675 avr_out_compare (insn
, op
, plen
);
4682 /* Output test instruction for SImode. */
4685 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4687 if (compare_sign_p (insn
))
4689 avr_asm_len ("tst %D0", op
, plen
, -1);
4691 else if (reg_unused_after (insn
, op
[0])
4692 && compare_eq_p (insn
))
4694 /* Faster than sbiw if we can clobber the operand. */
4695 avr_asm_len ("or %A0,%B0" CR_TAB
4697 "or %A0,%D0", op
, plen
, -3);
4701 avr_out_compare (insn
, op
, plen
);
4708 /* Generate asm equivalent for various shifts. This only handles cases
4709 that are not already carefully hand-optimized in ?sh??i3_out.
4711 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4712 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4713 OPERANDS[3] is a QImode scratch register from LD regs if
4714 available and SCRATCH, otherwise (no scratch available)
4716 TEMPL is an assembler template that shifts by one position.
4717 T_LEN is the length of this template. */
4720 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4721 int *plen
, int t_len
)
4723 bool second_label
= true;
4724 bool saved_in_tmp
= false;
4725 bool use_zero_reg
= false;
4728 op
[0] = operands
[0];
4729 op
[1] = operands
[1];
4730 op
[2] = operands
[2];
4731 op
[3] = operands
[3];
4736 if (CONST_INT_P (operands
[2]))
4738 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4739 && REG_P (operands
[3]));
4740 int count
= INTVAL (operands
[2]);
4741 int max_len
= 10; /* If larger than this, always use a loop. */
4746 if (count
< 8 && !scratch
)
4747 use_zero_reg
= true;
4750 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4752 if (t_len
* count
<= max_len
)
4754 /* Output shifts inline with no loop - faster. */
4757 avr_asm_len (templ
, op
, plen
, t_len
);
4764 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4766 else if (use_zero_reg
)
4768 /* Hack to save one word: use __zero_reg__ as loop counter.
4769 Set one bit, then shift in a loop until it is 0 again. */
4771 op
[3] = zero_reg_rtx
;
4773 avr_asm_len ("set" CR_TAB
4774 "bld %3,%2-1", op
, plen
, 2);
4778 /* No scratch register available, use one from LD_REGS (saved in
4779 __tmp_reg__) that doesn't overlap with registers to shift. */
4781 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4782 op
[4] = tmp_reg_rtx
;
4783 saved_in_tmp
= true;
4785 avr_asm_len ("mov %4,%3" CR_TAB
4786 "ldi %3,%2", op
, plen
, 2);
4789 second_label
= false;
4791 else if (MEM_P (op
[2]))
4795 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4798 out_movqi_r_mr (insn
, op_mov
, plen
);
4800 else if (register_operand (op
[2], QImode
))
4804 if (!reg_unused_after (insn
, op
[2])
4805 || reg_overlap_mentioned_p (op
[0], op
[2]))
4807 op
[3] = tmp_reg_rtx
;
4808 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4812 fatal_insn ("bad shift insn:", insn
);
4815 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4817 avr_asm_len ("1:", op
, plen
, 0);
4818 avr_asm_len (templ
, op
, plen
, t_len
);
4821 avr_asm_len ("2:", op
, plen
, 0);
4823 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4824 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4827 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4831 /* 8bit shift left ((char)x << i) */
4834 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4836 if (GET_CODE (operands
[2]) == CONST_INT
)
4843 switch (INTVAL (operands
[2]))
4846 if (INTVAL (operands
[2]) < 8)
4858 return ("lsl %0" CR_TAB
4863 return ("lsl %0" CR_TAB
4868 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4871 return ("swap %0" CR_TAB
4875 return ("lsl %0" CR_TAB
4881 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4884 return ("swap %0" CR_TAB
4889 return ("lsl %0" CR_TAB
4896 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4899 return ("swap %0" CR_TAB
4905 return ("lsl %0" CR_TAB
4914 return ("ror %0" CR_TAB
4919 else if (CONSTANT_P (operands
[2]))
4920 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4922 out_shift_with_cnt ("lsl %0",
4923 insn
, operands
, len
, 1);
4928 /* 16bit shift left ((short)x << i) */
4931 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4933 if (GET_CODE (operands
[2]) == CONST_INT
)
4935 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4936 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4943 switch (INTVAL (operands
[2]))
4946 if (INTVAL (operands
[2]) < 16)
4950 return ("clr %B0" CR_TAB
4954 if (optimize_size
&& scratch
)
4959 return ("swap %A0" CR_TAB
4961 "andi %B0,0xf0" CR_TAB
4962 "eor %B0,%A0" CR_TAB
4963 "andi %A0,0xf0" CR_TAB
4969 return ("swap %A0" CR_TAB
4971 "ldi %3,0xf0" CR_TAB
4973 "eor %B0,%A0" CR_TAB
4977 break; /* optimize_size ? 6 : 8 */
4981 break; /* scratch ? 5 : 6 */
4985 return ("lsl %A0" CR_TAB
4989 "andi %B0,0xf0" CR_TAB
4990 "eor %B0,%A0" CR_TAB
4991 "andi %A0,0xf0" CR_TAB
4997 return ("lsl %A0" CR_TAB
5001 "ldi %3,0xf0" CR_TAB
5003 "eor %B0,%A0" CR_TAB
5011 break; /* scratch ? 5 : 6 */
5013 return ("clr __tmp_reg__" CR_TAB
5016 "ror __tmp_reg__" CR_TAB
5019 "ror __tmp_reg__" CR_TAB
5020 "mov %B0,%A0" CR_TAB
5021 "mov %A0,__tmp_reg__");
5025 return ("lsr %B0" CR_TAB
5026 "mov %B0,%A0" CR_TAB
5032 return *len
= 2, ("mov %B0,%A1" CR_TAB
5037 return ("mov %B0,%A0" CR_TAB
5043 return ("mov %B0,%A0" CR_TAB
5050 return ("mov %B0,%A0" CR_TAB
5060 return ("mov %B0,%A0" CR_TAB
5068 return ("mov %B0,%A0" CR_TAB
5071 "ldi %3,0xf0" CR_TAB
5075 return ("mov %B0,%A0" CR_TAB
5086 return ("mov %B0,%A0" CR_TAB
5092 if (AVR_HAVE_MUL
&& scratch
)
5095 return ("ldi %3,0x20" CR_TAB
5099 "clr __zero_reg__");
5101 if (optimize_size
&& scratch
)
5106 return ("mov %B0,%A0" CR_TAB
5110 "ldi %3,0xe0" CR_TAB
5116 return ("set" CR_TAB
5121 "clr __zero_reg__");
5124 return ("mov %B0,%A0" CR_TAB
5133 if (AVR_HAVE_MUL
&& ldi_ok
)
5136 return ("ldi %B0,0x40" CR_TAB
5137 "mul %A0,%B0" CR_TAB
5140 "clr __zero_reg__");
5142 if (AVR_HAVE_MUL
&& scratch
)
5145 return ("ldi %3,0x40" CR_TAB
5149 "clr __zero_reg__");
5151 if (optimize_size
&& ldi_ok
)
5154 return ("mov %B0,%A0" CR_TAB
5155 "ldi %A0,6" "\n1:\t"
5160 if (optimize_size
&& scratch
)
5163 return ("clr %B0" CR_TAB
5172 return ("clr %B0" CR_TAB
5179 out_shift_with_cnt ("lsl %A0" CR_TAB
5180 "rol %B0", insn
, operands
, len
, 2);
5185 /* 24-bit shift left */
5188 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5193 if (CONST_INT_P (op
[2]))
5195 switch (INTVAL (op
[2]))
5198 if (INTVAL (op
[2]) < 24)
5201 return avr_asm_len ("clr %A0" CR_TAB
5203 "clr %C0", op
, plen
, 3);
5207 int reg0
= REGNO (op
[0]);
5208 int reg1
= REGNO (op
[1]);
5211 return avr_asm_len ("mov %C0,%B1" CR_TAB
5212 "mov %B0,%A1" CR_TAB
5213 "clr %A0", op
, plen
, 3);
5215 return avr_asm_len ("clr %A0" CR_TAB
5216 "mov %B0,%A1" CR_TAB
5217 "mov %C0,%B1", op
, plen
, 3);
5222 int reg0
= REGNO (op
[0]);
5223 int reg1
= REGNO (op
[1]);
5225 if (reg0
+ 2 != reg1
)
5226 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5228 return avr_asm_len ("clr %B0" CR_TAB
5229 "clr %A0", op
, plen
, 2);
5233 return avr_asm_len ("clr %C0" CR_TAB
5237 "clr %A0", op
, plen
, 5);
5241 out_shift_with_cnt ("lsl %A0" CR_TAB
5243 "rol %C0", insn
, op
, plen
, 3);
5248 /* 32bit shift left ((long)x << i) */
5251 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5253 if (GET_CODE (operands
[2]) == CONST_INT
)
5261 switch (INTVAL (operands
[2]))
5264 if (INTVAL (operands
[2]) < 32)
5268 return *len
= 3, ("clr %D0" CR_TAB
5272 return ("clr %D0" CR_TAB
5279 int reg0
= true_regnum (operands
[0]);
5280 int reg1
= true_regnum (operands
[1]);
5283 return ("mov %D0,%C1" CR_TAB
5284 "mov %C0,%B1" CR_TAB
5285 "mov %B0,%A1" CR_TAB
5288 return ("clr %A0" CR_TAB
5289 "mov %B0,%A1" CR_TAB
5290 "mov %C0,%B1" CR_TAB
5296 int reg0
= true_regnum (operands
[0]);
5297 int reg1
= true_regnum (operands
[1]);
5298 if (reg0
+ 2 == reg1
)
5299 return *len
= 2, ("clr %B0" CR_TAB
5302 return *len
= 3, ("movw %C0,%A1" CR_TAB
5306 return *len
= 4, ("mov %C0,%A1" CR_TAB
5307 "mov %D0,%B1" CR_TAB
5314 return ("mov %D0,%A1" CR_TAB
5321 return ("clr %D0" CR_TAB
5330 out_shift_with_cnt ("lsl %A0" CR_TAB
5333 "rol %D0", insn
, operands
, len
, 4);
5337 /* 8bit arithmetic shift right ((signed char)x >> i) */
5340 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5342 if (GET_CODE (operands
[2]) == CONST_INT
)
5349 switch (INTVAL (operands
[2]))
5357 return ("asr %0" CR_TAB
5362 return ("asr %0" CR_TAB
5368 return ("asr %0" CR_TAB
5375 return ("asr %0" CR_TAB
5383 return ("bst %0,6" CR_TAB
5389 if (INTVAL (operands
[2]) < 8)
5396 return ("lsl %0" CR_TAB
5400 else if (CONSTANT_P (operands
[2]))
5401 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5403 out_shift_with_cnt ("asr %0",
5404 insn
, operands
, len
, 1);
5409 /* 16bit arithmetic shift right ((signed short)x >> i) */
5412 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5414 if (GET_CODE (operands
[2]) == CONST_INT
)
5416 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5417 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5424 switch (INTVAL (operands
[2]))
5428 /* XXX try to optimize this too? */
5433 break; /* scratch ? 5 : 6 */
5435 return ("mov __tmp_reg__,%A0" CR_TAB
5436 "mov %A0,%B0" CR_TAB
5437 "lsl __tmp_reg__" CR_TAB
5439 "sbc %B0,%B0" CR_TAB
5440 "lsl __tmp_reg__" CR_TAB
5446 return ("lsl %A0" CR_TAB
5447 "mov %A0,%B0" CR_TAB
5453 int reg0
= true_regnum (operands
[0]);
5454 int reg1
= true_regnum (operands
[1]);
5457 return *len
= 3, ("mov %A0,%B0" CR_TAB
5461 return *len
= 4, ("mov %A0,%B1" CR_TAB
5469 return ("mov %A0,%B0" CR_TAB
5471 "sbc %B0,%B0" CR_TAB
5476 return ("mov %A0,%B0" CR_TAB
5478 "sbc %B0,%B0" CR_TAB
5483 if (AVR_HAVE_MUL
&& ldi_ok
)
5486 return ("ldi %A0,0x20" CR_TAB
5487 "muls %B0,%A0" CR_TAB
5489 "sbc %B0,%B0" CR_TAB
5490 "clr __zero_reg__");
5492 if (optimize_size
&& scratch
)
5495 return ("mov %A0,%B0" CR_TAB
5497 "sbc %B0,%B0" CR_TAB
5503 if (AVR_HAVE_MUL
&& ldi_ok
)
5506 return ("ldi %A0,0x10" CR_TAB
5507 "muls %B0,%A0" CR_TAB
5509 "sbc %B0,%B0" CR_TAB
5510 "clr __zero_reg__");
5512 if (optimize_size
&& scratch
)
5515 return ("mov %A0,%B0" CR_TAB
5517 "sbc %B0,%B0" CR_TAB
5524 if (AVR_HAVE_MUL
&& ldi_ok
)
5527 return ("ldi %A0,0x08" CR_TAB
5528 "muls %B0,%A0" CR_TAB
5530 "sbc %B0,%B0" CR_TAB
5531 "clr __zero_reg__");
5534 break; /* scratch ? 5 : 7 */
5536 return ("mov %A0,%B0" CR_TAB
5538 "sbc %B0,%B0" CR_TAB
5547 return ("lsl %B0" CR_TAB
5548 "sbc %A0,%A0" CR_TAB
5550 "mov %B0,%A0" CR_TAB
5554 if (INTVAL (operands
[2]) < 16)
5560 return *len
= 3, ("lsl %B0" CR_TAB
5561 "sbc %A0,%A0" CR_TAB
5566 out_shift_with_cnt ("asr %B0" CR_TAB
5567 "ror %A0", insn
, operands
, len
, 2);
5572 /* 24-bit arithmetic shift right */
5575 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5577 int dest
= REGNO (op
[0]);
5578 int src
= REGNO (op
[1]);
5580 if (CONST_INT_P (op
[2]))
5585 switch (INTVAL (op
[2]))
5589 return avr_asm_len ("mov %A0,%B1" CR_TAB
5590 "mov %B0,%C1" CR_TAB
5593 "dec %C0", op
, plen
, 5);
5595 return avr_asm_len ("clr %C0" CR_TAB
5598 "mov %B0,%C1" CR_TAB
5599 "mov %A0,%B1", op
, plen
, 5);
5602 if (dest
!= src
+ 2)
5603 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5605 return avr_asm_len ("clr %B0" CR_TAB
5608 "mov %C0,%B0", op
, plen
, 4);
5611 if (INTVAL (op
[2]) < 24)
5617 return avr_asm_len ("lsl %C0" CR_TAB
5618 "sbc %A0,%A0" CR_TAB
5619 "mov %B0,%A0" CR_TAB
5620 "mov %C0,%A0", op
, plen
, 4);
5624 out_shift_with_cnt ("asr %C0" CR_TAB
5626 "ror %A0", insn
, op
, plen
, 3);
5631 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5634 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5636 if (GET_CODE (operands
[2]) == CONST_INT
)
5644 switch (INTVAL (operands
[2]))
5648 int reg0
= true_regnum (operands
[0]);
5649 int reg1
= true_regnum (operands
[1]);
5652 return ("mov %A0,%B1" CR_TAB
5653 "mov %B0,%C1" CR_TAB
5654 "mov %C0,%D1" CR_TAB
5659 return ("clr %D0" CR_TAB
5662 "mov %C0,%D1" CR_TAB
5663 "mov %B0,%C1" CR_TAB
5669 int reg0
= true_regnum (operands
[0]);
5670 int reg1
= true_regnum (operands
[1]);
5672 if (reg0
== reg1
+ 2)
5673 return *len
= 4, ("clr %D0" CR_TAB
5678 return *len
= 5, ("movw %A0,%C1" CR_TAB
5684 return *len
= 6, ("mov %B0,%D1" CR_TAB
5685 "mov %A0,%C1" CR_TAB
5693 return *len
= 6, ("mov %A0,%D1" CR_TAB
5697 "mov %B0,%D0" CR_TAB
5701 if (INTVAL (operands
[2]) < 32)
5708 return *len
= 4, ("lsl %D0" CR_TAB
5709 "sbc %A0,%A0" CR_TAB
5710 "mov %B0,%A0" CR_TAB
5713 return *len
= 5, ("lsl %D0" CR_TAB
5714 "sbc %A0,%A0" CR_TAB
5715 "mov %B0,%A0" CR_TAB
5716 "mov %C0,%A0" CR_TAB
5721 out_shift_with_cnt ("asr %D0" CR_TAB
5724 "ror %A0", insn
, operands
, len
, 4);
5728 /* 8-bit logic shift right ((unsigned char)x >> i) */
5731 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5733 if (GET_CODE (operands
[2]) == CONST_INT
)
5740 switch (INTVAL (operands
[2]))
5743 if (INTVAL (operands
[2]) < 8)
5755 return ("lsr %0" CR_TAB
5759 return ("lsr %0" CR_TAB
5764 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5767 return ("swap %0" CR_TAB
5771 return ("lsr %0" CR_TAB
5777 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5780 return ("swap %0" CR_TAB
5785 return ("lsr %0" CR_TAB
5792 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5795 return ("swap %0" CR_TAB
5801 return ("lsr %0" CR_TAB
5810 return ("rol %0" CR_TAB
5815 else if (CONSTANT_P (operands
[2]))
5816 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5818 out_shift_with_cnt ("lsr %0",
5819 insn
, operands
, len
, 1);
5823 /* 16-bit logic shift right ((unsigned short)x >> i) */
5826 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5828 if (GET_CODE (operands
[2]) == CONST_INT
)
5830 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5831 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5838 switch (INTVAL (operands
[2]))
5841 if (INTVAL (operands
[2]) < 16)
5845 return ("clr %B0" CR_TAB
5849 if (optimize_size
&& scratch
)
5854 return ("swap %B0" CR_TAB
5856 "andi %A0,0x0f" CR_TAB
5857 "eor %A0,%B0" CR_TAB
5858 "andi %B0,0x0f" CR_TAB
5864 return ("swap %B0" CR_TAB
5866 "ldi %3,0x0f" CR_TAB
5868 "eor %A0,%B0" CR_TAB
5872 break; /* optimize_size ? 6 : 8 */
5876 break; /* scratch ? 5 : 6 */
5880 return ("lsr %B0" CR_TAB
5884 "andi %A0,0x0f" CR_TAB
5885 "eor %A0,%B0" CR_TAB
5886 "andi %B0,0x0f" CR_TAB
5892 return ("lsr %B0" CR_TAB
5896 "ldi %3,0x0f" CR_TAB
5898 "eor %A0,%B0" CR_TAB
5906 break; /* scratch ? 5 : 6 */
5908 return ("clr __tmp_reg__" CR_TAB
5911 "rol __tmp_reg__" CR_TAB
5914 "rol __tmp_reg__" CR_TAB
5915 "mov %A0,%B0" CR_TAB
5916 "mov %B0,__tmp_reg__");
5920 return ("lsl %A0" CR_TAB
5921 "mov %A0,%B0" CR_TAB
5923 "sbc %B0,%B0" CR_TAB
5927 return *len
= 2, ("mov %A0,%B1" CR_TAB
5932 return ("mov %A0,%B0" CR_TAB
5938 return ("mov %A0,%B0" CR_TAB
5945 return ("mov %A0,%B0" CR_TAB
5955 return ("mov %A0,%B0" CR_TAB
5963 return ("mov %A0,%B0" CR_TAB
5966 "ldi %3,0x0f" CR_TAB
5970 return ("mov %A0,%B0" CR_TAB
5981 return ("mov %A0,%B0" CR_TAB
5987 if (AVR_HAVE_MUL
&& scratch
)
5990 return ("ldi %3,0x08" CR_TAB
5994 "clr __zero_reg__");
5996 if (optimize_size
&& scratch
)
6001 return ("mov %A0,%B0" CR_TAB
6005 "ldi %3,0x07" CR_TAB
6011 return ("set" CR_TAB
6016 "clr __zero_reg__");
6019 return ("mov %A0,%B0" CR_TAB
6028 if (AVR_HAVE_MUL
&& ldi_ok
)
6031 return ("ldi %A0,0x04" CR_TAB
6032 "mul %B0,%A0" CR_TAB
6035 "clr __zero_reg__");
6037 if (AVR_HAVE_MUL
&& scratch
)
6040 return ("ldi %3,0x04" CR_TAB
6044 "clr __zero_reg__");
6046 if (optimize_size
&& ldi_ok
)
6049 return ("mov %A0,%B0" CR_TAB
6050 "ldi %B0,6" "\n1:\t"
6055 if (optimize_size
&& scratch
)
6058 return ("clr %A0" CR_TAB
6067 return ("clr %A0" CR_TAB
6074 out_shift_with_cnt ("lsr %B0" CR_TAB
6075 "ror %A0", insn
, operands
, len
, 2);
6080 /* 24-bit logic shift right */
6083 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
6085 int dest
= REGNO (op
[0]);
6086 int src
= REGNO (op
[1]);
6088 if (CONST_INT_P (op
[2]))
6093 switch (INTVAL (op
[2]))
6097 return avr_asm_len ("mov %A0,%B1" CR_TAB
6098 "mov %B0,%C1" CR_TAB
6099 "clr %C0", op
, plen
, 3);
6101 return avr_asm_len ("clr %C0" CR_TAB
6102 "mov %B0,%C1" CR_TAB
6103 "mov %A0,%B1", op
, plen
, 3);
6106 if (dest
!= src
+ 2)
6107 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6109 return avr_asm_len ("clr %B0" CR_TAB
6110 "clr %C0", op
, plen
, 2);
6113 if (INTVAL (op
[2]) < 24)
6119 return avr_asm_len ("clr %A0" CR_TAB
6123 "clr %C0", op
, plen
, 5);
6127 out_shift_with_cnt ("lsr %C0" CR_TAB
6129 "ror %A0", insn
, op
, plen
, 3);
6134 /* 32-bit logic shift right ((unsigned int)x >> i) */
6137 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
6139 if (GET_CODE (operands
[2]) == CONST_INT
)
6147 switch (INTVAL (operands
[2]))
6150 if (INTVAL (operands
[2]) < 32)
6154 return *len
= 3, ("clr %D0" CR_TAB
6158 return ("clr %D0" CR_TAB
6165 int reg0
= true_regnum (operands
[0]);
6166 int reg1
= true_regnum (operands
[1]);
6169 return ("mov %A0,%B1" CR_TAB
6170 "mov %B0,%C1" CR_TAB
6171 "mov %C0,%D1" CR_TAB
6174 return ("clr %D0" CR_TAB
6175 "mov %C0,%D1" CR_TAB
6176 "mov %B0,%C1" CR_TAB
6182 int reg0
= true_regnum (operands
[0]);
6183 int reg1
= true_regnum (operands
[1]);
6185 if (reg0
== reg1
+ 2)
6186 return *len
= 2, ("clr %C0" CR_TAB
6189 return *len
= 3, ("movw %A0,%C1" CR_TAB
6193 return *len
= 4, ("mov %B0,%D1" CR_TAB
6194 "mov %A0,%C1" CR_TAB
6200 return *len
= 4, ("mov %A0,%D1" CR_TAB
6207 return ("clr %A0" CR_TAB
6216 out_shift_with_cnt ("lsr %D0" CR_TAB
6219 "ror %A0", insn
, operands
, len
, 4);
6224 /* Output addition of register XOP[0] and compile time constant XOP[2].
6225 CODE == PLUS: perform addition by using ADD instructions or
6226 CODE == MINUS: perform addition by using SUB instructions:
6228 XOP[0] = XOP[0] + XOP[2]
6230 Or perform addition/subtraction with register XOP[2] depending on CODE:
6232 XOP[0] = XOP[0] +/- XOP[2]
6234 If PLEN == NULL, print assembler instructions to perform the operation;
6235 otherwise, set *PLEN to the length of the instruction sequence (in words)
6236 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6237 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6239 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6240 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6241 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6242 the subtrahend in the original insn, provided it is a compile time constant.
6243 In all other cases, SIGN is 0.
6245 If OUT_LABEL is true, print the final 0: label which is needed for
6246 saturated addition / subtraction. The only case where OUT_LABEL = false
6247 is useful is for saturated addition / subtraction performed during
6248 fixed-point rounding, cf. `avr_out_round'. */
6251 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6252 enum rtx_code code_sat
, int sign
, bool out_label
)
6254 /* MODE of the operation. */
6255 enum machine_mode mode
= GET_MODE (xop
[0]);
6257 /* INT_MODE of the same size. */
6258 enum machine_mode imode
= int_mode_for_mode (mode
);
6260 /* Number of bytes to operate on. */
6261 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6263 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6264 int clobber_val
= -1;
6266 /* op[0]: 8-bit destination register
6267 op[1]: 8-bit const int
6268 op[2]: 8-bit scratch register */
6271 /* Started the operation? Before starting the operation we may skip
6272 adding 0. This is no more true after the operation started because
6273 carry must be taken into account. */
6274 bool started
= false;
6276 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6279 /* Output a BRVC instruction. Only needed with saturation. */
6280 bool out_brvc
= true;
6287 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6289 for (i
= 0; i
< n_bytes
; i
++)
6291 /* We operate byte-wise on the destination. */
6292 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6293 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6296 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6299 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6303 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6305 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6314 /* Except in the case of ADIW with 16-bit register (see below)
6315 addition does not set cc0 in a usable way. */
6317 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6319 if (CONST_FIXED_P (xval
))
6320 xval
= avr_to_int_mode (xval
);
6322 /* Adding/Subtracting zero is a no-op. */
6324 if (xval
== const0_rtx
)
6331 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6335 if (SS_PLUS
== code_sat
&& MINUS
== code
6337 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6338 & GET_MODE_MASK (QImode
)))
6340 /* We compute x + 0x80 by means of SUB instructions. We negated the
6341 constant subtrahend above and are left with x - (-128) so that we
6342 need something like SUBI r,128 which does not exist because SUBI sets
6343 V according to the sign of the subtrahend. Notice the only case
6344 where this must be done is when NEG overflowed in case [2s] because
6345 the V computation needs the right sign of the subtrahend. */
6347 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6349 avr_asm_len ("subi %0,128" CR_TAB
6350 "brmi 0f", &msb
, plen
, 2);
6356 for (i
= 0; i
< n_bytes
; i
++)
6358 /* We operate byte-wise on the destination. */
6359 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6360 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6362 /* 8-bit value to operate with this byte. */
6363 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6365 /* Registers R16..R31 can operate with immediate. */
6366 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6369 op
[1] = gen_int_mode (val8
, QImode
);
6371 /* To get usable cc0 no low-bytes must have been skipped. */
6379 && test_hard_reg_class (ADDW_REGS
, reg8
))
6381 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6382 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6384 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6385 i.e. operate word-wise. */
6392 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6395 if (n_bytes
== 2 && PLUS
== code
)
6407 avr_asm_len (code
== PLUS
6408 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6412 else if ((val8
== 1 || val8
== 0xff)
6413 && UNKNOWN
== code_sat
6415 && i
== n_bytes
- 1)
6417 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6426 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6428 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6430 /* This belongs to the x + 0x80 corner case. The code with
6431 ADD instruction is not smaller, thus make this case
6432 expensive so that the caller won't pick it. */
6438 if (clobber_val
!= (int) val8
)
6439 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6440 clobber_val
= (int) val8
;
6442 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6449 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6452 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6454 if (clobber_val
!= (int) val8
)
6455 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6456 clobber_val
= (int) val8
;
6458 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6470 } /* for all sub-bytes */
6474 if (UNKNOWN
== code_sat
)
6477 *pcc
= (int) CC_CLOBBER
;
6479 /* Vanilla addition/subtraction is done. We are left with saturation.
6481 We have to compute A = A <op> B where A is a register and
6482 B is a register or a non-zero compile time constant CONST.
6483 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6484 B stands for the original operand $2 in INSN. In the case of B = CONST,
6485 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6487 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6491 operation | code | sat if | b is | sat value | case
6492 -----------------+-------+----------+--------------+-----------+-------
6493 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6494 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6495 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6496 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6500 operation | code | sat if | b is | sat value | case
6501 -----------------+-------+----------+--------------+-----------+-------
6502 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6503 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6504 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6505 - as a + (-b) | add | V == 1 | const | s- | [4s]
6507 s+ = b < 0 ? -0x80 : 0x7f
6508 s- = b < 0 ? 0x7f : -0x80
6510 The cases a - b actually perform a - (-(-b)) if B is CONST.
6513 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6515 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6518 bool need_copy
= true;
6519 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6530 avr_asm_len ("brvc 0f", op
, plen
, 1);
6532 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6537 avr_asm_len ("ldi %0,0x7f" CR_TAB
6538 "adc %0,__zero_reg__", op
, plen
, 2);
6540 avr_asm_len ("ldi %0,0x7f" CR_TAB
6541 "ldi %1,0xff" CR_TAB
6542 "adc %1,__zero_reg__" CR_TAB
6543 "adc %0,__zero_reg__", op
, plen
, 4);
6545 else if (sign
== 0 && PLUS
== code
)
6549 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6552 avr_asm_len ("ldi %0,0x80" CR_TAB
6554 "dec %0", op
, plen
, 3);
6556 avr_asm_len ("ldi %0,0x80" CR_TAB
6559 "sbci %0,0", op
, plen
, 4);
6561 else if (sign
== 0 && MINUS
== code
)
6565 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6568 avr_asm_len ("ldi %0,0x7f" CR_TAB
6570 "inc %0", op
, plen
, 3);
6572 avr_asm_len ("ldi %0,0x7f" CR_TAB
6575 "sbci %0,-1", op
, plen
, 4);
6577 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6579 /* [1s,const,B < 0] [2s,B < 0] */
6580 /* [3s,const,B > 0] [4s,B > 0] */
6584 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6588 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6589 if (n_bytes
> 1 && need_copy
)
6590 avr_asm_len ("clr %1", op
, plen
, 1);
6592 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6594 /* [1s,const,B > 0] [2s,B > 0] */
6595 /* [3s,const,B < 0] [4s,B < 0] */
6599 avr_asm_len ("sec" CR_TAB
6600 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6604 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6605 if (n_bytes
> 1 && need_copy
)
6606 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6616 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6621 avr_asm_len ("sec", op
, plen
, 1);
6622 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6628 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6629 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6631 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6634 break; /* US_PLUS */
6639 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6643 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6647 avr_asm_len ("clr %0", op
, plen
, 1);
6652 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6653 Now copy the right value to the LSBs. */
6655 if (need_copy
&& n_bytes
> 1)
6657 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6659 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6665 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6667 avr_asm_len ("mov %A0,%1" CR_TAB
6668 "mov %B0,%1", op
, plen
, 2);
6671 else if (n_bytes
> 2)
6674 avr_asm_len ("mov %A0,%1" CR_TAB
6675 "mov %B0,%1", op
, plen
, 2);
6679 if (need_copy
&& n_bytes
== 8)
6682 avr_asm_len ("movw %r0+2,%0" CR_TAB
6683 "movw %r0+4,%0", xop
, plen
, 2);
6685 avr_asm_len ("mov %r0+2,%0" CR_TAB
6686 "mov %r0+3,%0" CR_TAB
6687 "mov %r0+4,%0" CR_TAB
6688 "mov %r0+5,%0", xop
, plen
, 4);
6692 avr_asm_len ("0:", op
, plen
, 0);
6696 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6697 is ont a compile-time constant:
6699 XOP[0] = XOP[0] +/- XOP[2]
6701 This is a helper for the function below. The only insns that need this
6702 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6705 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6707 enum machine_mode mode
= GET_MODE (xop
[0]);
6709 /* Only pointer modes want to add symbols. */
6711 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6713 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6715 avr_asm_len (PLUS
== code
6716 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6717 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6720 if (PSImode
== mode
)
6721 avr_asm_len (PLUS
== code
6722 ? "sbci %C0,hlo8(-(%2))"
6723 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6728 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6730 INSN is a single_set insn or an insn pattern with a binary operation as
6731 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6733 XOP are the operands of INSN. In the case of 64-bit operations with
6734 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6735 The non-saturating insns up to 32 bits may or may not supply a "d" class
6738 If PLEN == NULL output the instructions.
6739 If PLEN != NULL set *PLEN to the length of the sequence in words.
6741 PCC is a pointer to store the instructions' effect on cc0.
6744 PLEN and PCC default to NULL.
6746 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6751 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
6753 int cc_plus
, cc_minus
, cc_dummy
;
6754 int len_plus
, len_minus
;
6756 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6757 rtx xdest
= SET_DEST (xpattern
);
6758 enum machine_mode mode
= GET_MODE (xdest
);
6759 enum machine_mode imode
= int_mode_for_mode (mode
);
6760 int n_bytes
= GET_MODE_SIZE (mode
);
6761 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
6763 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6769 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6771 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6774 if (n_bytes
<= 4 && REG_P (xop
[2]))
6776 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
6782 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6783 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6784 op
[2] = avr_to_int_mode (xop
[0]);
6789 && !CONST_INT_P (xop
[2])
6790 && !CONST_FIXED_P (xop
[2]))
6792 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6795 op
[0] = avr_to_int_mode (xop
[0]);
6796 op
[1] = avr_to_int_mode (xop
[1]);
6797 op
[2] = avr_to_int_mode (xop
[2]);
6800 /* Saturations and 64-bit operations don't have a clobber operand.
6801 For the other cases, the caller will provide a proper XOP[3]. */
6803 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
6804 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
6806 /* Saturation will need the sign of the original operand. */
6808 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6809 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6811 /* If we subtract and the subtrahend is a constant, then negate it
6812 so that avr_out_plus_1 can be used. */
6815 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6817 /* Work out the shortest sequence. */
6819 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_minus
, code_sat
, sign
, out_label
);
6820 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_plus
, code_sat
, sign
, out_label
);
6824 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6825 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6827 else if (len_minus
<= len_plus
)
6828 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
6830 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
6836 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6837 time constant XOP[2]:
6839 XOP[0] = XOP[0] <op> XOP[2]
6841 and return "". If PLEN == NULL, print assembler instructions to perform the
6842 operation; otherwise, set *PLEN to the length of the instruction sequence
6843 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6844 register or SCRATCH if no clobber register is needed for the operation.
6845 INSN is an INSN_P or a pattern of an insn. */
6848 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6850 /* CODE and MODE of the operation. */
6851 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6852 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
6853 enum machine_mode mode
= GET_MODE (xop
[0]);
6855 /* Number of bytes to operate on. */
6856 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6858 /* Value of T-flag (0 or 1) or -1 if unknow. */
6861 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6862 int clobber_val
= -1;
6864 /* op[0]: 8-bit destination register
6865 op[1]: 8-bit const int
6866 op[2]: 8-bit clobber register or SCRATCH
6867 op[3]: 8-bit register containing 0xff or NULL_RTX */
6876 for (i
= 0; i
< n_bytes
; i
++)
6878 /* We operate byte-wise on the destination. */
6879 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6880 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6882 /* 8-bit value to operate with this byte. */
6883 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6885 /* Number of bits set in the current byte of the constant. */
6886 int pop8
= avr_popcount (val8
);
6888 /* Registers R16..R31 can operate with immediate. */
6889 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6892 op
[1] = GEN_INT (val8
);
6901 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6905 avr_asm_len ("set", op
, plen
, 1);
6908 op
[1] = GEN_INT (exact_log2 (val8
));
6909 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6913 if (op
[3] != NULL_RTX
)
6914 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6916 avr_asm_len ("clr %0" CR_TAB
6917 "dec %0", op
, plen
, 2);
6923 if (clobber_val
!= (int) val8
)
6924 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6925 clobber_val
= (int) val8
;
6927 avr_asm_len ("or %0,%2", op
, plen
, 1);
6937 avr_asm_len ("clr %0", op
, plen
, 1);
6939 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6943 avr_asm_len ("clt", op
, plen
, 1);
6946 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6947 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6951 if (clobber_val
!= (int) val8
)
6952 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6953 clobber_val
= (int) val8
;
6955 avr_asm_len ("and %0,%2", op
, plen
, 1);
6965 avr_asm_len ("com %0", op
, plen
, 1);
6966 else if (ld_reg_p
&& val8
== (1 << 7))
6967 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6970 if (clobber_val
!= (int) val8
)
6971 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6972 clobber_val
= (int) val8
;
6974 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6980 /* Unknown rtx_code */
6983 } /* for all sub-bytes */
6989 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6990 PLEN != NULL: Set *PLEN to the length of that sequence.
6994 avr_out_addto_sp (rtx
*op
, int *plen
)
6996 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6997 int addend
= INTVAL (op
[0]);
7004 if (flag_verbose_asm
|| flag_print_asm_name
)
7005 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7007 while (addend
<= -pc_len
)
7010 avr_asm_len ("rcall .", op
, plen
, 1);
7013 while (addend
++ < 0)
7014 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7016 else if (addend
> 0)
7018 if (flag_verbose_asm
|| flag_print_asm_name
)
7019 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7021 while (addend
-- > 0)
7022 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7029 /* Outputs instructions needed for fixed point type conversion.
7030 This includes converting between any fixed point type, as well
7031 as converting to any integer type. Conversion between integer
7032 types is not supported.
7034 Converting signed fractional types requires a bit shift if converting
7035 to or from any unsigned fractional type because the decimal place is
7036 shifted by 1 bit. When the destination is a signed fractional, the sign
7037 is stored in either the carry or T bit. */
7040 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
7044 RTX_CODE shift
= UNKNOWN
;
7045 bool sign_in_carry
= false;
7046 bool msb_in_carry
= false;
7047 bool lsb_in_tmp_reg
= false;
7048 bool lsb_in_carry
= false;
7049 bool frac_rounded
= false;
7050 const char *code_ashift
= "lsl %0";
7053 #define MAY_CLOBBER(RR) \
7054 /* Shorthand used below. */ \
7056 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7057 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7058 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7059 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7063 /* bytes : Length of operand in bytes.
7064 ibyte : Length of integral part in bytes.
7065 fbyte, fbit : Length of fractional part in bytes, bits. */
7068 unsigned fbit
, bytes
, ibyte
, fbyte
;
7069 unsigned regno
, regno_msb
;
7070 } dest
, src
, *val
[2] = { &dest
, &src
};
7075 /* Step 0: Determine information on source and destination operand we
7076 ====== will need in the remainder. */
7078 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7080 enum machine_mode mode
;
7082 xop
[i
] = operands
[i
];
7084 mode
= GET_MODE (xop
[i
]);
7086 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7087 val
[i
]->regno
= REGNO (xop
[i
]);
7088 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7090 if (SCALAR_INT_MODE_P (mode
))
7092 val
[i
]->sbit
= intsigned
;
7095 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7097 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7098 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7101 fatal_insn ("unsupported fixed-point conversion", insn
);
7103 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7104 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7107 // Byte offset of the decimal point taking into account different place
7108 // of the decimal point in input and output and different register numbers
7109 // of input and output.
7110 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7112 // Number of destination bytes that will come from sign / zero extension.
7113 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7115 // Number of bytes at the low end to be filled with zeros.
7116 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7118 // Do we have a 16-Bit register that is cleared?
7119 rtx clrw
= NULL_RTX
;
7121 bool sign_extend
= src
.sbit
&& sign_bytes
;
7123 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7125 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7127 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7132 /* If we need to round the fraction part, we might need to save/round it
7133 before clobbering any of it in Step 1. Also, we might to want to do
7134 the rounding now to make use of LD_REGS. */
7135 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7136 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7137 && !TARGET_FRACT_CONV_TRUNC
)
7141 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7142 && dest
.regno
- offset
-1 >= dest
.regno
);
7143 unsigned s0
= dest
.regno
- offset
-1;
7144 bool use_src
= true;
7146 unsigned copied_msb
= src
.regno_msb
;
7147 bool have_carry
= false;
7149 if (src
.ibyte
> dest
.ibyte
)
7150 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7152 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7153 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7154 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
7156 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
7158 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7159 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7163 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
7164 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
7166 avr_asm_len ("sec" CR_TAB
"cpc %0,__zero_reg__",
7167 &all_regs_rtx
[sn
], plen
, 2);
7171 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7172 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
7173 &all_regs_rtx
[s0
], plen
, 1);
7174 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7175 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
7176 avr_asm_len ("\n0:", NULL
, plen
, 0);
7177 frac_rounded
= true;
7179 else if (use_src
&& overlap
)
7181 avr_asm_len ("clr __tmp_reg__" CR_TAB
7182 "sbrc %1,0" CR_TAB
"dec __tmp_reg__", xop
, plen
, 1);
7186 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7190 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7192 avr_asm_len ("clt" CR_TAB
"bld __tmp_reg__,7" CR_TAB
7193 "adc %0,__tmp_reg__",
7194 &all_regs_rtx
[s0
], plen
, 1);
7196 avr_asm_len ("lsr __tmp_reg" CR_TAB
"add %0,__tmp_reg__",
7197 &all_regs_rtx
[s0
], plen
, 2);
7198 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7199 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7200 frac_rounded
= true;
7205 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
7206 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
7207 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
7208 xop
[2] = all_regs_rtx
[s0
];
7209 unsigned sn
= src
.regno
;
7210 if (!use_src
|| sn
== s0
)
7211 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7212 /* We need to consider to-be-discarded bits
7213 if the value is negative. */
7216 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7217 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7218 /* Test to-be-discarded bytes for any nozero bits.
7219 ??? Could use OR or SBIW to test two registers at once. */
7221 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7223 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7224 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7226 avr_asm_len ("breq 0f" CR_TAB
7227 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7230 avr_asm_len ("breq 0f" CR_TAB
7231 "set" CR_TAB
"bld __tmp_reg__,0\n0:",
7234 lsb_in_tmp_reg
= true;
7238 /* Step 1: Clear bytes at the low end and copy payload bits from source
7239 ====== to destination. */
7241 int step
= offset
< 0 ? 1 : -1;
7242 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7244 // We cleared at least that number of registers.
7247 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7249 // Next regno of destination is needed for MOVW
7250 unsigned d1
= d0
+ step
;
7252 // Current and next regno of source
7253 signed s0
= d0
- offset
;
7254 signed s1
= s0
+ step
;
7256 // Must current resp. next regno be CLRed? This applies to the low
7257 // bytes of the destination that have no associated source bytes.
7258 bool clr0
= s0
< (signed) src
.regno
;
7259 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7261 // First gather what code to emit (if any) and additional step to
7262 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7263 // is the source rtx for the current loop iteration.
7264 const char *code
= NULL
;
7269 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7271 xop
[2] = all_regs_rtx
[d0
& ~1];
7273 code
= "movw %2,%3";
7278 xop
[2] = all_regs_rtx
[d0
];
7283 && d0
% 2 == (step
> 0))
7285 clrw
= all_regs_rtx
[d0
& ~1];
7289 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7291 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7292 && d0
% 2 == (offset
> 0)
7293 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7294 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7296 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7297 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7298 code
= movw
? "movw %2,%3" : "mov %2,%3";
7299 stepw
= step
* movw
;
7304 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7305 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7307 /* We are going to override the sign bit. If we sign-extend,
7308 store the sign in the Carry flag. This is not needed if
7309 the destination will be ASHIFT is the remainder because
7310 the ASHIFT will set Carry without extra instruction. */
7312 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7313 sign_in_carry
= true;
7316 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7318 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7319 && src
.ibyte
> dest
.ibyte
7320 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7322 /* We are going to override the MSB. If we shift right,
7323 store the MSB in the Carry flag. This is only needed if
7324 we don't sign-extend becaue with sign-extension the MSB
7325 (the sign) will be produced by the sign extension. */
7327 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7328 msb_in_carry
= true;
7331 unsigned src_lsb
= dest
.regno
- offset
-1;
7333 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7335 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7337 /* We are going to override the new LSB; store it into carry. */
7339 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7340 code_ashift
= "rol %0";
7341 lsb_in_carry
= true;
7344 avr_asm_len (code
, xop
, plen
, 1);
7349 /* Step 2: Shift destination left by 1 bit position. This might be needed
7350 ====== for signed input and unsigned output. */
7352 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7354 unsigned s0
= dest
.regno
- offset
-1;
7356 /* n1169 4.1.4 says:
7357 "Conversions from a fixed-point to an integer type round toward zero."
7358 Hence, converting a fract type to integer only gives a non-zero result
7360 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7361 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
7362 && !TARGET_FRACT_CONV_TRUNC
)
7364 gcc_assert (s0
== src
.regno_msb
);
7365 /* Check if the input is -1. We do that by checking if negating
7366 the input causes an integer overflow. */
7367 unsigned sn
= src
.regno
;
7368 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7370 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7372 /* Overflow goes with set carry. Clear carry otherwise. */
7373 avr_asm_len ("brvs 0f" CR_TAB
"clc\n0:", NULL
, plen
, 2);
7375 /* Likewise, when converting from accumulator types to integer, we
7376 need to round up negative values. */
7377 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7378 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7379 && !TARGET_FRACT_CONV_TRUNC
7382 bool have_carry
= false;
7384 xop
[2] = all_regs_rtx
[s0
];
7385 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
7386 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7387 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7388 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7389 if (!lsb_in_tmp_reg
)
7391 unsigned sn
= src
.regno
;
7394 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
7399 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
7400 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
7402 /* Add in C and the rounding value 127. */
7403 /* If the destination msb is a sign byte, and in LD_REGS,
7404 grab it as a temporary. */
7406 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
7409 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
7410 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
7411 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
7412 : have_carry
? "adc %2,%3"
7413 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
7419 /* Fall back to use __zero_reg__ as a temporary. */
7420 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
7422 avr_asm_len ("clt" CR_TAB
"bld __zero_reg__,7", NULL
, plen
, 2);
7424 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
7425 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
7426 ? "adc __tmp_reg__,__zero_reg__"
7427 : have_carry
? "adc %2,__zero_reg__"
7428 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
7429 : "add %2,__zero_reg__"),
7431 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
7433 for (d0
= dest
.regno
+ zero_bytes
;
7434 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7435 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
7436 avr_asm_len (lsb_in_tmp_reg
7437 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7440 else if (MAY_CLOBBER (s0
))
7441 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7443 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7444 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7446 code_ashift
= "rol %0";
7447 lsb_in_carry
= true;
7450 if (shift
== ASHIFT
)
7452 for (d0
= dest
.regno
+ zero_bytes
;
7453 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7455 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7456 code_ashift
= "rol %0";
7459 lsb_in_carry
= false;
7460 sign_in_carry
= true;
7463 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7464 ======= it in sign-extension below. */
7466 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7467 && src
.ibyte
> dest
.ibyte
)
7469 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7471 if (MAY_CLOBBER (s0
))
7472 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7474 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7475 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7477 msb_in_carry
= true;
7480 /* Step 3: Sign-extend or zero-extend the destination as needed.
7483 if (sign_extend
&& !sign_in_carry
)
7485 unsigned s0
= src
.regno_msb
;
7487 if (MAY_CLOBBER (s0
))
7488 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7490 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7491 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7493 sign_in_carry
= true;
7496 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7498 unsigned copies
= 0;
7499 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7501 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7503 if (AVR_HAVE_MOVW
&& movw
7504 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7506 xop
[2] = all_regs_rtx
[d0
];
7508 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7513 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7514 &all_regs_rtx
[d0
], plen
, 1);
7516 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7517 movw
= all_regs_rtx
[d0
-1];
7522 /* Step 4: Right shift the destination. This might be needed for
7523 ====== conversions from unsigned to signed. */
7525 if (shift
== ASHIFTRT
)
7527 const char *code_ashiftrt
= "lsr %0";
7529 if (sign_extend
|| msb_in_carry
)
7530 code_ashiftrt
= "ror %0";
7532 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7533 code_ashiftrt
= "asr %0";
7535 for (d0
= dest
.regno_msb
- sign_bytes
;
7536 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7538 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7539 code_ashiftrt
= "ror %0";
7549 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7550 XOP[2] is the rounding point, a CONST_INT. The function prints the
7551 instruction sequence if PLEN = NULL and computes the length in words
7552 of the sequence if PLEN != NULL. Most of this function deals with
7553 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7556 avr_out_round (rtx insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
7558 enum machine_mode mode
= GET_MODE (xop
[0]);
7559 enum machine_mode imode
= int_mode_for_mode (mode
);
7560 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7561 int fbit
= (int) GET_MODE_FBIT (mode
);
7562 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
7563 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
7564 GET_MODE_PRECISION (imode
));
7565 // Lengths of PLUS and AND parts.
7566 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
7567 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
7569 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7570 // the saturated addition so that we can emit the "rjmp 1f" before the
7573 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
7574 rtx xpattern
, xsrc
, op
[4];
7576 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
7577 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
7578 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
7579 xpattern
= gen_rtx_SET (VOIDmode
, xop
[0], xsrc
);
7584 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
7586 avr_asm_len ("rjmp 1f" CR_TAB
7587 "0:", NULL
, plen_add
, 1);
7589 // Keep all bits from RP and higher: ... 2^(-RP)
7590 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7591 // Rounding point ^^^^^^^
7592 // Added above ^^^^^^^^^
7593 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
7594 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
7596 xpattern
= gen_rtx_SET (VOIDmode
, xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
7601 op
[3] = gen_rtx_SCRATCH (QImode
);
7602 avr_out_bitop (xpattern
, op
, plen_and
);
7603 avr_asm_len ("1:", NULL
, plen
, 0);
7606 *plen
= len_add
+ len_and
;
7612 /* Create RTL split patterns for byte sized rotate expressions. This
7613 produces a series of move instructions and considers overlap situations.
7614 Overlapping non-HImode operands need a scratch register. */
7617 avr_rotate_bytes (rtx operands
[])
7620 enum machine_mode mode
= GET_MODE (operands
[0]);
7621 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7622 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7623 int num
= INTVAL (operands
[2]);
7624 rtx scratch
= operands
[3];
7625 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7626 Word move if no scratch is needed, otherwise use size of scratch. */
7627 enum machine_mode move_mode
= QImode
;
7628 int move_size
, offset
, size
;
7632 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7635 move_mode
= GET_MODE (scratch
);
7637 /* Force DI rotate to use QI moves since other DI moves are currently split
7638 into QI moves so forward propagation works better. */
7641 /* Make scratch smaller if needed. */
7642 if (SCRATCH
!= GET_CODE (scratch
)
7643 && HImode
== GET_MODE (scratch
)
7644 && QImode
== move_mode
)
7645 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7647 move_size
= GET_MODE_SIZE (move_mode
);
7648 /* Number of bytes/words to rotate. */
7649 offset
= (num
>> 3) / move_size
;
7650 /* Number of moves needed. */
7651 size
= GET_MODE_SIZE (mode
) / move_size
;
7652 /* Himode byte swap is special case to avoid a scratch register. */
7653 if (mode
== HImode
&& same_reg
)
7655 /* HImode byte swap, using xor. This is as quick as using scratch. */
7657 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7658 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7659 if (!rtx_equal_p (dst
, src
))
7661 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7662 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7663 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7668 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7669 /* Create linked list of moves to determine move order. */
7673 } move
[MAX_SIZE
+ 8];
7676 gcc_assert (size
<= MAX_SIZE
);
7677 /* Generate list of subreg moves. */
7678 for (i
= 0; i
< size
; i
++)
7681 int to
= (from
+ offset
) % size
;
7682 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7683 mode
, from
* move_size
);
7684 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7685 mode
, to
* move_size
);
7688 /* Mark dependence where a dst of one move is the src of another move.
7689 The first move is a conflict as it must wait until second is
7690 performed. We ignore moves to self - we catch this later. */
7692 for (i
= 0; i
< size
; i
++)
7693 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7694 for (j
= 0; j
< size
; j
++)
7695 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7697 /* The dst of move i is the src of move j. */
7704 /* Go through move list and perform non-conflicting moves. As each
7705 non-overlapping move is made, it may remove other conflicts
7706 so the process is repeated until no conflicts remain. */
7711 /* Emit move where dst is not also a src or we have used that
7713 for (i
= 0; i
< size
; i
++)
7714 if (move
[i
].src
!= NULL_RTX
)
7716 if (move
[i
].links
== -1
7717 || move
[move
[i
].links
].src
== NULL_RTX
)
7720 /* Ignore NOP moves to self. */
7721 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7722 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7724 /* Remove conflict from list. */
7725 move
[i
].src
= NULL_RTX
;
7731 /* Check for deadlock. This is when no moves occurred and we have
7732 at least one blocked move. */
7733 if (moves
== 0 && blocked
!= -1)
7735 /* Need to use scratch register to break deadlock.
7736 Add move to put dst of blocked move into scratch.
7737 When this move occurs, it will break chain deadlock.
7738 The scratch register is substituted for real move. */
7740 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7742 move
[size
].src
= move
[blocked
].dst
;
7743 move
[size
].dst
= scratch
;
7744 /* Scratch move is never blocked. */
7745 move
[size
].links
= -1;
7746 /* Make sure we have valid link. */
7747 gcc_assert (move
[blocked
].links
!= -1);
7748 /* Replace src of blocking move with scratch reg. */
7749 move
[move
[blocked
].links
].src
= scratch
;
7750 /* Make dependent on scratch move occurring. */
7751 move
[blocked
].links
= size
;
7755 while (blocked
!= -1);
7761 /* Worker function for `ADJUST_INSN_LENGTH'. */
7762 /* Modifies the length assigned to instruction INSN
7763 LEN is the initially computed length of the insn. */
7766 avr_adjust_insn_length (rtx insn
, int len
)
7768 rtx
*op
= recog_data
.operand
;
7769 enum attr_adjust_len adjust_len
;
7771 /* Some complex insns don't need length adjustment and therefore
7772 the length need not/must not be adjusted for these insns.
7773 It is easier to state this in an insn attribute "adjust_len" than
7774 to clutter up code here... */
7776 if (-1 == recog_memoized (insn
))
7781 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7783 adjust_len
= get_attr_adjust_len (insn
);
7785 if (adjust_len
== ADJUST_LEN_NO
)
7787 /* Nothing to adjust: The length from attribute "length" is fine.
7788 This is the default. */
7793 /* Extract insn's operands. */
7795 extract_constrain_insn_cached (insn
);
7797 /* Dispatch to right function. */
7801 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7802 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7803 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7805 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7807 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7808 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7810 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7811 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7812 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7813 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7814 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7815 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7816 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7818 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7819 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7820 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
7822 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7823 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7824 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7825 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7826 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7828 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7829 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7830 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7832 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7833 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7834 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7836 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7837 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7838 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7840 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7841 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7842 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7844 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7846 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7855 /* Return nonzero if register REG dead after INSN. */
7858 reg_unused_after (rtx insn
, rtx reg
)
7860 return (dead_or_set_p (insn
, reg
)
7861 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7864 /* Return nonzero if REG is not used after INSN.
7865 We assume REG is a reload reg, and therefore does
7866 not live past labels. It may live past calls or jumps though. */
7869 _reg_unused_after (rtx insn
, rtx reg
)
7874 /* If the reg is set by this instruction, then it is safe for our
7875 case. Disregard the case where this is a store to memory, since
7876 we are checking a register used in the store address. */
7877 set
= single_set (insn
);
7878 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7879 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7882 while ((insn
= NEXT_INSN (insn
)))
7885 code
= GET_CODE (insn
);
7888 /* If this is a label that existed before reload, then the register
7889 if dead here. However, if this is a label added by reorg, then
7890 the register may still be live here. We can't tell the difference,
7891 so we just ignore labels completely. */
7892 if (code
== CODE_LABEL
)
7900 if (code
== JUMP_INSN
)
7903 /* If this is a sequence, we must handle them all at once.
7904 We could have for instance a call that sets the target register,
7905 and an insn in a delay slot that uses the register. In this case,
7906 we must return 0. */
7907 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7912 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7914 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7915 rtx set
= single_set (this_insn
);
7917 if (CALL_P (this_insn
))
7919 else if (JUMP_P (this_insn
))
7921 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7926 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7928 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7930 if (GET_CODE (SET_DEST (set
)) != MEM
)
7936 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7941 else if (code
== JUMP_INSN
)
7945 if (code
== CALL_INSN
)
7948 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7949 if (GET_CODE (XEXP (tem
, 0)) == USE
7950 && REG_P (XEXP (XEXP (tem
, 0), 0))
7951 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7953 if (call_used_regs
[REGNO (reg
)])
7957 set
= single_set (insn
);
7959 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7961 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7962 return GET_CODE (SET_DEST (set
)) != MEM
;
7963 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7970 /* Implement `TARGET_ASM_INTEGER'. */
7971 /* Target hook for assembling integer objects. The AVR version needs
7972 special handling for references to certain labels. */
7975 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7977 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7978 && text_segment_operand (x
, VOIDmode
))
7980 fputs ("\t.word\tgs(", asm_out_file
);
7981 output_addr_const (asm_out_file
, x
);
7982 fputs (")\n", asm_out_file
);
7986 else if (GET_MODE (x
) == PSImode
)
7988 /* This needs binutils 2.23+, see PR binutils/13503 */
7990 fputs ("\t.byte\tlo8(", asm_out_file
);
7991 output_addr_const (asm_out_file
, x
);
7992 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7994 fputs ("\t.byte\thi8(", asm_out_file
);
7995 output_addr_const (asm_out_file
, x
);
7996 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7998 fputs ("\t.byte\thh8(", asm_out_file
);
7999 output_addr_const (asm_out_file
, x
);
8000 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8004 else if (CONST_FIXED_P (x
))
8008 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8010 for (n
= 0; n
< size
; n
++)
8012 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8013 default_assemble_integer (xn
, 1, aligned_p
);
8019 return default_assemble_integer (x
, size
, aligned_p
);
8023 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8024 /* Return value is nonzero if pseudos that have been
8025 assigned to registers of class CLASS would likely be spilled
8026 because registers of CLASS are needed for spill registers. */
8029 avr_class_likely_spilled_p (reg_class_t c
)
8031 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
8035 /* Valid attributes:
8036 progmem - Put data to program memory.
8037 signal - Make a function to be hardware interrupt.
8038 After function prologue interrupts remain disabled.
8039 interrupt - Make a function to be hardware interrupt. Before function
8040 prologue interrupts are enabled by means of SEI.
8041 naked - Don't generate function prologue/epilogue and RET
8044 /* Handle a "progmem" attribute; arguments as in
8045 struct attribute_spec.handler. */
8048 avr_handle_progmem_attribute (tree
*node
, tree name
,
8049 tree args ATTRIBUTE_UNUSED
,
8050 int flags ATTRIBUTE_UNUSED
,
8055 if (TREE_CODE (*node
) == TYPE_DECL
)
8057 /* This is really a decl attribute, not a type attribute,
8058 but try to handle it for GCC 3.0 backwards compatibility. */
8060 tree type
= TREE_TYPE (*node
);
8061 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8062 tree newtype
= build_type_attribute_variant (type
, attr
);
8064 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8065 TREE_TYPE (*node
) = newtype
;
8066 *no_add_attrs
= true;
8068 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8070 *no_add_attrs
= false;
8074 warning (OPT_Wattributes
, "%qE attribute ignored",
8076 *no_add_attrs
= true;
8083 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8084 struct attribute_spec.handler. */
8087 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8088 tree args ATTRIBUTE_UNUSED
,
8089 int flags ATTRIBUTE_UNUSED
,
8092 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8094 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8096 *no_add_attrs
= true;
8103 avr_handle_fntype_attribute (tree
*node
, tree name
,
8104 tree args ATTRIBUTE_UNUSED
,
8105 int flags ATTRIBUTE_UNUSED
,
8108 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8110 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8112 *no_add_attrs
= true;
8119 /* AVR attributes. */
8120 static const struct attribute_spec
8121 avr_attribute_table
[] =
8123 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8124 affects_type_identity } */
8125 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
8127 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8129 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8131 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8133 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8135 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8137 { NULL
, 0, 0, false, false, false, NULL
, false }
8141 /* Look if DECL shall be placed in program memory space by
8142 means of attribute `progmem' or some address-space qualifier.
8143 Return non-zero if DECL is data that must end up in Flash and
8144 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8146 Return 2 if DECL is located in 24-bit flash address-space
8147 Return 1 if DECL is located in 16-bit flash address-space
8148 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8149 Return 0 otherwise */
8152 avr_progmem_p (tree decl
, tree attributes
)
8156 if (TREE_CODE (decl
) != VAR_DECL
)
8159 if (avr_decl_memx_p (decl
))
8162 if (avr_decl_flash_p (decl
))
8166 != lookup_attribute ("progmem", attributes
))
8173 while (TREE_CODE (a
) == ARRAY_TYPE
);
8175 if (a
== error_mark_node
)
8178 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
8185 /* Scan type TYP for pointer references to address space ASn.
8186 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8187 the AS are also declared to be CONST.
8188 Otherwise, return the respective address space, i.e. a value != 0. */
8191 avr_nonconst_pointer_addrspace (tree typ
)
8193 while (ARRAY_TYPE
== TREE_CODE (typ
))
8194 typ
= TREE_TYPE (typ
);
8196 if (POINTER_TYPE_P (typ
))
8199 tree target
= TREE_TYPE (typ
);
8201 /* Pointer to function: Test the function's return type. */
8203 if (FUNCTION_TYPE
== TREE_CODE (target
))
8204 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
8206 /* "Ordinary" pointers... */
8208 while (TREE_CODE (target
) == ARRAY_TYPE
)
8209 target
= TREE_TYPE (target
);
8211 /* Pointers to non-generic address space must be const.
8212 Refuse address spaces outside the device's flash. */
8214 as
= TYPE_ADDR_SPACE (target
);
8216 if (!ADDR_SPACE_GENERIC_P (as
)
8217 && (!TYPE_READONLY (target
)
8218 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
8223 /* Scan pointer's target type. */
8225 return avr_nonconst_pointer_addrspace (target
);
8228 return ADDR_SPACE_GENERIC
;
8232 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8233 go along with CONST qualifier. Writing to these address spaces should
8234 be detected and complained about as early as possible. */
8237 avr_pgm_check_var_decl (tree node
)
8239 const char *reason
= NULL
;
8241 addr_space_t as
= ADDR_SPACE_GENERIC
;
8243 gcc_assert (as
== 0);
8245 if (avr_log
.progmem
)
8246 avr_edump ("%?: %t\n", node
);
8248 switch (TREE_CODE (node
))
8254 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8255 reason
= "variable";
8259 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8260 reason
= "function parameter";
8264 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8265 reason
= "structure field";
8269 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
8271 reason
= "return type of function";
8275 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
8282 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8285 error ("%qT uses address space %qs beyond flash of %qs",
8286 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8288 error ("%s %q+D uses address space %qs beyond flash of %qs",
8289 reason
, node
, avr_addrspace
[as
].name
,
8290 avr_current_device
->name
);
8295 error ("pointer targeting address space %qs must be const in %qT",
8296 avr_addrspace
[as
].name
, node
);
8298 error ("pointer targeting address space %qs must be const"
8300 avr_addrspace
[as
].name
, reason
, node
);
8304 return reason
== NULL
;
8308 /* Add the section attribute if the variable is in progmem. */
8311 avr_insert_attributes (tree node
, tree
*attributes
)
8313 avr_pgm_check_var_decl (node
);
8315 if (TREE_CODE (node
) == VAR_DECL
8316 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8317 && avr_progmem_p (node
, *attributes
))
8322 /* For C++, we have to peel arrays in order to get correct
8323 determination of readonlyness. */
8326 node0
= TREE_TYPE (node0
);
8327 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8329 if (error_mark_node
== node0
)
8332 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8334 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8336 error ("variable %q+D located in address space %qs"
8337 " beyond flash of %qs",
8338 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8341 if (!TYPE_READONLY (node0
)
8342 && !TREE_READONLY (node
))
8344 const char *reason
= "__attribute__((progmem))";
8346 if (!ADDR_SPACE_GENERIC_P (as
))
8347 reason
= avr_addrspace
[as
].name
;
8349 if (avr_log
.progmem
)
8350 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8352 error ("variable %q+D must be const in order to be put into"
8353 " read-only section by means of %qs", node
, reason
);
8359 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8360 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8361 /* Track need of __do_clear_bss. */
8364 avr_asm_output_aligned_decl_common (FILE * stream
,
8365 const_tree decl ATTRIBUTE_UNUSED
,
8367 unsigned HOST_WIDE_INT size
,
8368 unsigned int align
, bool local_p
)
8370 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8371 There is no need to trigger __do_clear_bss code for them. */
8373 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8374 avr_need_clear_bss_p
= true;
8377 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8379 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8383 /* Unnamed section callback for data_section
8384 to track need of __do_copy_data. */
8387 avr_output_data_section_asm_op (const void *data
)
8389 avr_need_copy_data_p
= true;
8391 /* Dispatch to default. */
8392 output_section_asm_op (data
);
8396 /* Unnamed section callback for bss_section
8397 to track need of __do_clear_bss. */
8400 avr_output_bss_section_asm_op (const void *data
)
8402 avr_need_clear_bss_p
= true;
8404 /* Dispatch to default. */
8405 output_section_asm_op (data
);
8409 /* Unnamed section callback for progmem*.data sections. */
8412 avr_output_progmem_section_asm_op (const void *data
)
8414 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8415 (const char*) data
);
8419 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8422 avr_asm_init_sections (void)
8424 /* Set up a section for jump tables. Alignment is handled by
8425 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8427 if (AVR_HAVE_JMP_CALL
)
8429 progmem_swtable_section
8430 = get_unnamed_section (0, output_section_asm_op
,
8431 "\t.section\t.progmem.gcc_sw_table"
8432 ",\"a\",@progbits");
8436 progmem_swtable_section
8437 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8438 "\t.section\t.progmem.gcc_sw_table"
8439 ",\"ax\",@progbits");
8442 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8443 resp. `avr_need_copy_data_p'. */
8445 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8446 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8447 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8451 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8454 avr_asm_function_rodata_section (tree decl
)
8456 /* If a function is unused and optimized out by -ffunction-sections
8457 and --gc-sections, ensure that the same will happen for its jump
8458 tables by putting them into individual sections. */
8463 /* Get the frodata section from the default function in varasm.c
8464 but treat function-associated data-like jump tables as code
8465 rather than as user defined data. AVR has no constant pools. */
8467 int fdata
= flag_data_sections
;
8469 flag_data_sections
= flag_function_sections
;
8470 frodata
= default_function_rodata_section (decl
);
8471 flag_data_sections
= fdata
;
8472 flags
= frodata
->common
.flags
;
8475 if (frodata
!= readonly_data_section
8476 && flags
& SECTION_NAMED
)
8478 /* Adjust section flags and replace section name prefix. */
8482 static const char* const prefix
[] =
8484 ".rodata", ".progmem.gcc_sw_table",
8485 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8488 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8490 const char * old_prefix
= prefix
[i
];
8491 const char * new_prefix
= prefix
[i
+1];
8492 const char * name
= frodata
->named
.name
;
8494 if (STR_PREFIX_P (name
, old_prefix
))
8496 const char *rname
= ACONCAT ((new_prefix
,
8497 name
+ strlen (old_prefix
), NULL
));
8498 flags
&= ~SECTION_CODE
;
8499 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8501 return get_section (rname
, flags
, frodata
->named
.decl
);
8506 return progmem_swtable_section
;
8510 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8511 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8514 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8516 if (flags
& AVR_SECTION_PROGMEM
)
8518 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8519 const char *old_prefix
= ".rodata";
8520 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8522 if (STR_PREFIX_P (name
, old_prefix
))
8524 const char *sname
= ACONCAT ((new_prefix
,
8525 name
+ strlen (old_prefix
), NULL
));
8526 default_elf_asm_named_section (sname
, flags
, decl
);
8530 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8534 if (!avr_need_copy_data_p
)
8535 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8536 || STR_PREFIX_P (name
, ".rodata")
8537 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8539 if (!avr_need_clear_bss_p
)
8540 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8542 default_elf_asm_named_section (name
, flags
, decl
);
8546 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8549 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8551 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8553 if (STR_PREFIX_P (name
, ".noinit"))
8555 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8556 && DECL_INITIAL (decl
) == NULL_TREE
)
8557 flags
|= SECTION_BSS
; /* @nobits */
8559 warning (0, "only uninitialized variables can be placed in the "
8563 if (decl
&& DECL_P (decl
)
8564 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8566 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8568 /* Attribute progmem puts data in generic address space.
8569 Set section flags as if it was in __flash to get the right
8570 section prefix in the remainder. */
8572 if (ADDR_SPACE_GENERIC_P (as
))
8573 as
= ADDR_SPACE_FLASH
;
8575 flags
|= as
* SECTION_MACH_DEP
;
8576 flags
&= ~SECTION_WRITE
;
8577 flags
&= ~SECTION_BSS
;
8584 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8587 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8589 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8590 readily available, see PR34734. So we postpone the warning
8591 about uninitialized data in program memory section until here. */
8594 && decl
&& DECL_P (decl
)
8595 && NULL_TREE
== DECL_INITIAL (decl
)
8596 && !DECL_EXTERNAL (decl
)
8597 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8599 warning (OPT_Wuninitialized
,
8600 "uninitialized variable %q+D put into "
8601 "program memory area", decl
);
8604 default_encode_section_info (decl
, rtl
, new_decl_p
);
8606 if (decl
&& DECL_P (decl
)
8607 && TREE_CODE (decl
) != FUNCTION_DECL
8609 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8611 rtx sym
= XEXP (rtl
, 0);
8612 tree type
= TREE_TYPE (decl
);
8613 if (type
== error_mark_node
)
8615 addr_space_t as
= TYPE_ADDR_SPACE (type
);
8617 /* PSTR strings are in generic space but located in flash:
8618 patch address space. */
8620 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8621 as
= ADDR_SPACE_FLASH
;
8623 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8628 /* Implement `TARGET_ASM_SELECT_SECTION' */
8631 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8633 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8635 if (decl
&& DECL_P (decl
)
8636 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8638 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8640 /* __progmem__ goes in generic space but shall be allocated to
8643 if (ADDR_SPACE_GENERIC_P (as
))
8644 as
= ADDR_SPACE_FLASH
;
8646 if (sect
->common
.flags
& SECTION_NAMED
)
8648 const char * name
= sect
->named
.name
;
8649 const char * old_prefix
= ".rodata";
8650 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8652 if (STR_PREFIX_P (name
, old_prefix
))
8654 const char *sname
= ACONCAT ((new_prefix
,
8655 name
+ strlen (old_prefix
), NULL
));
8656 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8660 if (!progmem_section
[as
])
8663 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8664 avr_addrspace
[as
].section_name
);
8667 return progmem_section
[as
];
8673 /* Implement `TARGET_ASM_FILE_START'. */
8674 /* Outputs some text at the start of each assembler file. */
8677 avr_file_start (void)
8679 int sfr_offset
= avr_current_arch
->sfr_offset
;
8681 if (avr_current_arch
->asm_only
)
8682 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8684 default_file_start ();
8686 /* Print I/O addresses of some SFRs used with IN and OUT. */
8689 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8691 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8692 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8694 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8696 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8698 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8700 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8702 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8703 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8704 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8708 /* Implement `TARGET_ASM_FILE_END'. */
8709 /* Outputs to the stdio stream FILE some
8710 appropriate text to go at the end of an assembler file. */
8715 /* Output these only if there is anything in the
8716 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8717 input section(s) - some code size can be saved by not
8718 linking in the initialization code from libgcc if resp.
8719 sections are empty, see PR18145. */
8721 if (avr_need_copy_data_p
)
8722 fputs (".global __do_copy_data\n", asm_out_file
);
8724 if (avr_need_clear_bss_p
)
8725 fputs (".global __do_clear_bss\n", asm_out_file
);
8729 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8730 /* Choose the order in which to allocate hard registers for
8731 pseudo-registers local to a basic block.
8733 Store the desired register order in the array `reg_alloc_order'.
8734 Element 0 should be the register to allocate first; element 1, the
8735 next register; and so on. */
8738 avr_adjust_reg_alloc_order (void)
8741 static const int order_0
[] =
8744 18, 19, 20, 21, 22, 23,
8747 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8751 static const int order_1
[] =
8753 18, 19, 20, 21, 22, 23, 24, 25,
8756 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8760 static const int order_2
[] =
8762 25, 24, 23, 22, 21, 20, 19, 18,
8765 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8770 const int *order
= (TARGET_ORDER_1
? order_1
:
8771 TARGET_ORDER_2
? order_2
:
8773 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8774 reg_alloc_order
[i
] = order
[i
];
8778 /* Implement `TARGET_REGISTER_MOVE_COST' */
8781 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8782 reg_class_t from
, reg_class_t to
)
8784 return (from
== STACK_REG
? 6
8785 : to
== STACK_REG
? 12
8790 /* Implement `TARGET_MEMORY_MOVE_COST' */
8793 avr_memory_move_cost (enum machine_mode mode
,
8794 reg_class_t rclass ATTRIBUTE_UNUSED
,
8795 bool in ATTRIBUTE_UNUSED
)
8797 return (mode
== QImode
? 2
8798 : mode
== HImode
? 4
8799 : mode
== SImode
? 8
8800 : mode
== SFmode
? 8
8805 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8806 cost of an RTX operand given its context. X is the rtx of the
8807 operand, MODE is its mode, and OUTER is the rtx_code of this
8808 operand's parent operator. */
8811 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8812 int opno
, bool speed
)
8814 enum rtx_code code
= GET_CODE (x
);
8826 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8833 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8837 /* Worker function for AVR backend's rtx_cost function.
8838 X is rtx expression whose cost is to be calculated.
8839 Return true if the complete cost has been computed.
8840 Return false if subexpressions should be scanned.
8841 In either case, *TOTAL contains the cost result. */
8844 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8845 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8847 enum rtx_code code
= (enum rtx_code
) codearg
;
8848 enum machine_mode mode
= GET_MODE (x
);
8859 /* Immediate constants are as cheap as registers. */
8864 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8872 *total
= COSTS_N_INSNS (1);
8878 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8884 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8892 *total
= COSTS_N_INSNS (1);
8898 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8902 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8903 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8907 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8908 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8909 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8913 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8914 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8915 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8923 && MULT
== GET_CODE (XEXP (x
, 0))
8924 && register_operand (XEXP (x
, 1), QImode
))
8927 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8928 /* multiply-add with constant: will be split and load constant. */
8929 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8930 *total
= COSTS_N_INSNS (1) + *total
;
8933 *total
= COSTS_N_INSNS (1);
8934 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8935 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8940 && (MULT
== GET_CODE (XEXP (x
, 0))
8941 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8942 && register_operand (XEXP (x
, 1), HImode
)
8943 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8944 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8947 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8948 /* multiply-add with constant: will be split and load constant. */
8949 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8950 *total
= COSTS_N_INSNS (1) + *total
;
8953 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8955 *total
= COSTS_N_INSNS (2);
8956 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8959 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8960 *total
= COSTS_N_INSNS (1);
8962 *total
= COSTS_N_INSNS (2);
8966 if (!CONST_INT_P (XEXP (x
, 1)))
8968 *total
= COSTS_N_INSNS (3);
8969 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8972 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8973 *total
= COSTS_N_INSNS (2);
8975 *total
= COSTS_N_INSNS (3);
8979 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8981 *total
= COSTS_N_INSNS (4);
8982 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8985 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8986 *total
= COSTS_N_INSNS (1);
8988 *total
= COSTS_N_INSNS (4);
8994 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9000 && register_operand (XEXP (x
, 0), QImode
)
9001 && MULT
== GET_CODE (XEXP (x
, 1)))
9004 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9005 /* multiply-sub with constant: will be split and load constant. */
9006 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9007 *total
= COSTS_N_INSNS (1) + *total
;
9012 && register_operand (XEXP (x
, 0), HImode
)
9013 && (MULT
== GET_CODE (XEXP (x
, 1))
9014 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
9015 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
9016 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
9019 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9020 /* multiply-sub with constant: will be split and load constant. */
9021 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9022 *total
= COSTS_N_INSNS (1) + *total
;
9028 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9029 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9030 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9031 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9035 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9036 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9037 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9045 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
9047 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9055 rtx op0
= XEXP (x
, 0);
9056 rtx op1
= XEXP (x
, 1);
9057 enum rtx_code code0
= GET_CODE (op0
);
9058 enum rtx_code code1
= GET_CODE (op1
);
9059 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
9060 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
9063 && (u8_operand (op1
, HImode
)
9064 || s8_operand (op1
, HImode
)))
9066 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9070 && register_operand (op1
, HImode
))
9072 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9075 else if (ex0
|| ex1
)
9077 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
9080 else if (register_operand (op0
, HImode
)
9081 && (u8_operand (op1
, HImode
)
9082 || s8_operand (op1
, HImode
)))
9084 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
9088 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
9091 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9098 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9108 /* Add some additional costs besides CALL like moves etc. */
9110 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9114 /* Just a rough estimate. Even with -O2 we don't want bulky
9115 code expanded inline. */
9117 *total
= COSTS_N_INSNS (25);
9123 *total
= COSTS_N_INSNS (300);
9125 /* Add some additional costs besides CALL like moves etc. */
9126 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9134 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9135 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9143 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9145 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
9146 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9147 /* For div/mod with const-int divisor we have at least the cost of
9148 loading the divisor. */
9149 if (CONST_INT_P (XEXP (x
, 1)))
9150 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9151 /* Add some overall penaly for clobbering and moving around registers */
9152 *total
+= COSTS_N_INSNS (2);
9159 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
9160 *total
= COSTS_N_INSNS (1);
9165 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
9166 *total
= COSTS_N_INSNS (3);
9171 if (CONST_INT_P (XEXP (x
, 1)))
9172 switch (INTVAL (XEXP (x
, 1)))
9176 *total
= COSTS_N_INSNS (5);
9179 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
9187 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9194 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9196 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9197 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9202 val
= INTVAL (XEXP (x
, 1));
9204 *total
= COSTS_N_INSNS (3);
9205 else if (val
>= 0 && val
<= 7)
9206 *total
= COSTS_N_INSNS (val
);
9208 *total
= COSTS_N_INSNS (1);
9215 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
9216 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
9217 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
9219 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9224 if (const1_rtx
== (XEXP (x
, 1))
9225 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
9227 *total
= COSTS_N_INSNS (2);
9231 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9233 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9234 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9238 switch (INTVAL (XEXP (x
, 1)))
9245 *total
= COSTS_N_INSNS (2);
9248 *total
= COSTS_N_INSNS (3);
9254 *total
= COSTS_N_INSNS (4);
9259 *total
= COSTS_N_INSNS (5);
9262 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9265 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9268 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
9271 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9272 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9278 if (!CONST_INT_P (XEXP (x
, 1)))
9280 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9283 switch (INTVAL (XEXP (x
, 1)))
9291 *total
= COSTS_N_INSNS (3);
9294 *total
= COSTS_N_INSNS (5);
9297 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9303 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9305 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9306 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9310 switch (INTVAL (XEXP (x
, 1)))
9316 *total
= COSTS_N_INSNS (3);
9321 *total
= COSTS_N_INSNS (4);
9324 *total
= COSTS_N_INSNS (6);
9327 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9330 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9331 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9339 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9346 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9348 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9349 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9354 val
= INTVAL (XEXP (x
, 1));
9356 *total
= COSTS_N_INSNS (4);
9358 *total
= COSTS_N_INSNS (2);
9359 else if (val
>= 0 && val
<= 7)
9360 *total
= COSTS_N_INSNS (val
);
9362 *total
= COSTS_N_INSNS (1);
9367 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9369 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9370 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9374 switch (INTVAL (XEXP (x
, 1)))
9380 *total
= COSTS_N_INSNS (2);
9383 *total
= COSTS_N_INSNS (3);
9389 *total
= COSTS_N_INSNS (4);
9393 *total
= COSTS_N_INSNS (5);
9396 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9399 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9403 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9406 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9407 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9413 if (!CONST_INT_P (XEXP (x
, 1)))
9415 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9418 switch (INTVAL (XEXP (x
, 1)))
9424 *total
= COSTS_N_INSNS (3);
9428 *total
= COSTS_N_INSNS (5);
9431 *total
= COSTS_N_INSNS (4);
9434 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9440 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9442 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9443 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9447 switch (INTVAL (XEXP (x
, 1)))
9453 *total
= COSTS_N_INSNS (4);
9458 *total
= COSTS_N_INSNS (6);
9461 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9464 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9467 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9468 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9476 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9483 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9485 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9486 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9491 val
= INTVAL (XEXP (x
, 1));
9493 *total
= COSTS_N_INSNS (3);
9494 else if (val
>= 0 && val
<= 7)
9495 *total
= COSTS_N_INSNS (val
);
9497 *total
= COSTS_N_INSNS (1);
9502 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9504 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9505 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9509 switch (INTVAL (XEXP (x
, 1)))
9516 *total
= COSTS_N_INSNS (2);
9519 *total
= COSTS_N_INSNS (3);
9524 *total
= COSTS_N_INSNS (4);
9528 *total
= COSTS_N_INSNS (5);
9534 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9537 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9541 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9544 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9545 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9551 if (!CONST_INT_P (XEXP (x
, 1)))
9553 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9556 switch (INTVAL (XEXP (x
, 1)))
9564 *total
= COSTS_N_INSNS (3);
9567 *total
= COSTS_N_INSNS (5);
9570 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9576 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9578 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9579 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9583 switch (INTVAL (XEXP (x
, 1)))
9589 *total
= COSTS_N_INSNS (4);
9592 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9597 *total
= COSTS_N_INSNS (4);
9600 *total
= COSTS_N_INSNS (6);
9603 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9604 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9612 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9616 switch (GET_MODE (XEXP (x
, 0)))
9619 *total
= COSTS_N_INSNS (1);
9620 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9621 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9625 *total
= COSTS_N_INSNS (2);
9626 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9627 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9628 else if (INTVAL (XEXP (x
, 1)) != 0)
9629 *total
+= COSTS_N_INSNS (1);
9633 *total
= COSTS_N_INSNS (3);
9634 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9635 *total
+= COSTS_N_INSNS (2);
9639 *total
= COSTS_N_INSNS (4);
9640 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9641 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9642 else if (INTVAL (XEXP (x
, 1)) != 0)
9643 *total
+= COSTS_N_INSNS (3);
9649 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9654 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9655 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9656 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9658 if (QImode
== mode
|| HImode
== mode
)
9660 *total
= COSTS_N_INSNS (2);
9673 /* Implement `TARGET_RTX_COSTS'. */
9676 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9677 int opno
, int *total
, bool speed
)
9679 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9680 opno
, total
, speed
);
9682 if (avr_log
.rtx_costs
)
9684 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9685 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9692 /* Implement `TARGET_ADDRESS_COST'. */
9695 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9696 addr_space_t as ATTRIBUTE_UNUSED
,
9697 bool speed ATTRIBUTE_UNUSED
)
9701 if (GET_CODE (x
) == PLUS
9702 && CONST_INT_P (XEXP (x
, 1))
9703 && (REG_P (XEXP (x
, 0))
9704 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9706 if (INTVAL (XEXP (x
, 1)) >= 61)
9709 else if (CONSTANT_ADDRESS_P (x
))
9712 && io_address_operand (x
, QImode
))
9716 if (avr_log
.address_cost
)
9717 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9722 /* Test for extra memory constraint 'Q'.
9723 It's a memory address based on Y or Z pointer with valid displacement. */
9726 extra_constraint_Q (rtx x
)
9730 if (GET_CODE (XEXP (x
,0)) == PLUS
9731 && REG_P (XEXP (XEXP (x
,0), 0))
9732 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9733 && (INTVAL (XEXP (XEXP (x
,0), 1))
9734 <= MAX_LD_OFFSET (GET_MODE (x
))))
9736 rtx xx
= XEXP (XEXP (x
,0), 0);
9737 int regno
= REGNO (xx
);
9739 ok
= (/* allocate pseudos */
9740 regno
>= FIRST_PSEUDO_REGISTER
9741 /* strictly check */
9742 || regno
== REG_Z
|| regno
== REG_Y
9743 /* XXX frame & arg pointer checks */
9744 || xx
== frame_pointer_rtx
9745 || xx
== arg_pointer_rtx
);
9747 if (avr_log
.constraints
)
9748 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9749 ok
, reload_completed
, reload_in_progress
, x
);
9755 /* Convert condition code CONDITION to the valid AVR condition code. */
9758 avr_normalize_condition (RTX_CODE condition
)
9775 /* Helper function for `avr_reorg'. */
9778 avr_compare_pattern (rtx insn
)
9780 rtx pattern
= single_set (insn
);
9783 && NONJUMP_INSN_P (insn
)
9784 && SET_DEST (pattern
) == cc0_rtx
9785 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9787 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9788 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9790 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9791 They must not be swapped, thus skip them. */
9793 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9794 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9801 /* Helper function for `avr_reorg'. */
9803 /* Expansion of switch/case decision trees leads to code like
9805 cc0 = compare (Reg, Num)
9809 cc0 = compare (Reg, Num)
9813 The second comparison is superfluous and can be deleted.
9814 The second jump condition can be transformed from a
9815 "difficult" one to a "simple" one because "cc0 > 0" and
9816 "cc0 >= 0" will have the same effect here.
9818 This function relies on the way switch/case is being expaned
9819 as binary decision tree. For example code see PR 49903.
9821 Return TRUE if optimization performed.
9822 Return FALSE if nothing changed.
9824 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9826 We don't want to do this in text peephole because it is
9827 tedious to work out jump offsets there and the second comparison
9828 might have been transormed by `avr_reorg'.
9830 RTL peephole won't do because peephole2 does not scan across
9834 avr_reorg_remove_redundant_compare (rtx insn1
)
9836 rtx comp1
, ifelse1
, xcond1
, branch1
;
9837 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9839 rtx jump
, target
, cond
;
9841 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9843 branch1
= next_nonnote_nondebug_insn (insn1
);
9844 if (!branch1
|| !JUMP_P (branch1
))
9847 insn2
= next_nonnote_nondebug_insn (branch1
);
9848 if (!insn2
|| !avr_compare_pattern (insn2
))
9851 branch2
= next_nonnote_nondebug_insn (insn2
);
9852 if (!branch2
|| !JUMP_P (branch2
))
9855 comp1
= avr_compare_pattern (insn1
);
9856 comp2
= avr_compare_pattern (insn2
);
9857 xcond1
= single_set (branch1
);
9858 xcond2
= single_set (branch2
);
9860 if (!comp1
|| !comp2
9861 || !rtx_equal_p (comp1
, comp2
)
9862 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9863 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9864 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9865 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9870 comp1
= SET_SRC (comp1
);
9871 ifelse1
= SET_SRC (xcond1
);
9872 ifelse2
= SET_SRC (xcond2
);
9874 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9876 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9877 || !REG_P (XEXP (comp1
, 0))
9878 || !CONST_INT_P (XEXP (comp1
, 1))
9879 || XEXP (ifelse1
, 2) != pc_rtx
9880 || XEXP (ifelse2
, 2) != pc_rtx
9881 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9882 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9883 || !COMPARISON_P (XEXP (ifelse2
, 0))
9884 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9885 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9886 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9887 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9892 /* We filtered the insn sequence to look like
9898 (if_then_else (eq (cc0)
9907 (if_then_else (CODE (cc0)
9913 code
= GET_CODE (XEXP (ifelse2
, 0));
9915 /* Map GT/GTU to GE/GEU which is easier for AVR.
9916 The first two instructions compare/branch on EQ
9917 so we may replace the difficult
9919 if (x == VAL) goto L1;
9920 if (x > VAL) goto L2;
9924 if (x == VAL) goto L1;
9925 if (x >= VAL) goto L2;
9927 Similarly, replace LE/LEU by LT/LTU. */
9938 code
= avr_normalize_condition (code
);
9945 /* Wrap the branches into UNSPECs so they won't be changed or
9946 optimized in the remainder. */
9948 target
= XEXP (XEXP (ifelse1
, 1), 0);
9949 cond
= XEXP (ifelse1
, 0);
9950 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9952 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9954 target
= XEXP (XEXP (ifelse2
, 1), 0);
9955 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9956 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9958 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9960 /* The comparisons in insn1 and insn2 are exactly the same;
9961 insn2 is superfluous so delete it. */
9963 delete_insn (insn2
);
9964 delete_insn (branch1
);
9965 delete_insn (branch2
);
9971 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9972 /* Optimize conditional jumps. */
9977 rtx insn
= get_insns();
9979 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9981 rtx pattern
= avr_compare_pattern (insn
);
9987 && avr_reorg_remove_redundant_compare (insn
))
9992 if (compare_diff_p (insn
))
9994 /* Now we work under compare insn with difficult branch. */
9996 rtx next
= next_real_insn (insn
);
9997 rtx pat
= PATTERN (next
);
9999 pattern
= SET_SRC (pattern
);
10001 if (true_regnum (XEXP (pattern
, 0)) >= 0
10002 && true_regnum (XEXP (pattern
, 1)) >= 0)
10004 rtx x
= XEXP (pattern
, 0);
10005 rtx src
= SET_SRC (pat
);
10006 rtx t
= XEXP (src
,0);
10007 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10008 XEXP (pattern
, 0) = XEXP (pattern
, 1);
10009 XEXP (pattern
, 1) = x
;
10010 INSN_CODE (next
) = -1;
10012 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10013 && XEXP (pattern
, 1) == const0_rtx
)
10015 /* This is a tst insn, we can reverse it. */
10016 rtx src
= SET_SRC (pat
);
10017 rtx t
= XEXP (src
,0);
10019 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10020 XEXP (pattern
, 1) = XEXP (pattern
, 0);
10021 XEXP (pattern
, 0) = const0_rtx
;
10022 INSN_CODE (next
) = -1;
10023 INSN_CODE (insn
) = -1;
10025 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10026 && CONST_INT_P (XEXP (pattern
, 1)))
10028 rtx x
= XEXP (pattern
, 1);
10029 rtx src
= SET_SRC (pat
);
10030 rtx t
= XEXP (src
,0);
10031 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
10033 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
10035 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
10036 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
10037 INSN_CODE (next
) = -1;
10038 INSN_CODE (insn
) = -1;
10045 /* Returns register number for function return value.*/
10047 static inline unsigned int
10048 avr_ret_register (void)
10054 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10057 avr_function_value_regno_p (const unsigned int regno
)
10059 return (regno
== avr_ret_register ());
10063 /* Implement `TARGET_LIBCALL_VALUE'. */
10064 /* Create an RTX representing the place where a
10065 library function returns a value of mode MODE. */
10068 avr_libcall_value (enum machine_mode mode
,
10069 const_rtx func ATTRIBUTE_UNUSED
)
10071 int offs
= GET_MODE_SIZE (mode
);
10074 offs
= (offs
+ 1) & ~1;
10076 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
10080 /* Implement `TARGET_FUNCTION_VALUE'. */
10081 /* Create an RTX representing the place where a
10082 function returns a value of data type VALTYPE. */
10085 avr_function_value (const_tree type
,
10086 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
10087 bool outgoing ATTRIBUTE_UNUSED
)
10091 if (TYPE_MODE (type
) != BLKmode
)
10092 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
10094 offs
= int_size_in_bytes (type
);
10097 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
10098 offs
= GET_MODE_SIZE (SImode
);
10099 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
10100 offs
= GET_MODE_SIZE (DImode
);
10102 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
10106 test_hard_reg_class (enum reg_class rclass
, rtx x
)
10108 int regno
= true_regnum (x
);
10112 if (TEST_HARD_REG_CLASS (rclass
, regno
))
10119 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10120 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10123 avr_2word_insn_p (rtx insn
)
10125 if (avr_current_device
->errata_skip
10127 || 2 != get_attr_length (insn
))
10132 switch (INSN_CODE (insn
))
10137 case CODE_FOR_movqi_insn
:
10138 case CODE_FOR_movuqq_insn
:
10139 case CODE_FOR_movqq_insn
:
10141 rtx set
= single_set (insn
);
10142 rtx src
= SET_SRC (set
);
10143 rtx dest
= SET_DEST (set
);
10145 /* Factor out LDS and STS from movqi_insn. */
10148 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
10150 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
10152 else if (REG_P (dest
)
10155 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
10161 case CODE_FOR_call_insn
:
10162 case CODE_FOR_call_value_insn
:
10169 jump_over_one_insn_p (rtx insn
, rtx dest
)
10171 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
10174 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
10175 int dest_addr
= INSN_ADDRESSES (uid
);
10176 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
10178 return (jump_offset
== 1
10179 || (jump_offset
== 2
10180 && avr_2word_insn_p (next_active_insn (insn
))));
10184 /* Worker function for `HARD_REGNO_MODE_OK'. */
10185 /* Returns 1 if a value of mode MODE can be stored starting with hard
10186 register number REGNO. On the enhanced core, anything larger than
10187 1 byte must start in even numbered register for "movw" to work
10188 (this way we don't have to check for odd registers everywhere). */
10191 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
10193 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10194 Disallowing QI et al. in these regs might lead to code like
10195 (set (subreg:QI (reg:HI 28) n) ...)
10196 which will result in wrong code because reload does not
10197 handle SUBREGs of hard regsisters like this.
10198 This could be fixed in reload. However, it appears
10199 that fixing reload is not wanted by reload people. */
10201 /* Any GENERAL_REGS register can hold 8-bit values. */
10203 if (GET_MODE_SIZE (mode
) == 1)
10206 /* FIXME: Ideally, the following test is not needed.
10207 However, it turned out that it can reduce the number
10208 of spill fails. AVR and it's poor endowment with
10209 address registers is extreme stress test for reload. */
10211 if (GET_MODE_SIZE (mode
) >= 4
10215 /* All modes larger than 8 bits should start in an even register. */
10217 return !(regno
& 1);
10221 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10224 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
10226 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10227 represent valid hard registers like, e.g. HI:29. Returning TRUE
10228 for such registers can lead to performance degradation as mentioned
10229 in PR53595. Thus, report invalid hard registers as FALSE. */
10231 if (!avr_hard_regno_mode_ok (regno
, mode
))
10234 /* Return true if any of the following boundaries is crossed:
10235 17/18, 27/28 and 29/30. */
10237 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
10238 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
10239 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
10243 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10246 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
10247 addr_space_t as
, RTX_CODE outer_code
,
10248 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10250 if (!ADDR_SPACE_GENERIC_P (as
))
10252 return POINTER_Z_REGS
;
10256 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
10258 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
10262 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10265 avr_regno_mode_code_ok_for_base_p (int regno
,
10266 enum machine_mode mode ATTRIBUTE_UNUSED
,
10267 addr_space_t as ATTRIBUTE_UNUSED
,
10268 RTX_CODE outer_code
,
10269 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10273 if (!ADDR_SPACE_GENERIC_P (as
))
10275 if (regno
< FIRST_PSEUDO_REGISTER
10283 regno
= reg_renumber
[regno
];
10285 if (regno
== REG_Z
)
10294 if (regno
< FIRST_PSEUDO_REGISTER
10298 || regno
== ARG_POINTER_REGNUM
))
10302 else if (reg_renumber
)
10304 regno
= reg_renumber
[regno
];
10309 || regno
== ARG_POINTER_REGNUM
)
10316 && PLUS
== outer_code
10326 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10327 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10328 CLOBBER_REG is a QI clobber register or NULL_RTX.
10329 LEN == NULL: output instructions.
10330 LEN != NULL: set *LEN to the length of the instruction sequence
10331 (in words) printed with LEN = NULL.
10332 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10333 If CLEAR_P is false, nothing is known about OP[0].
10335 The effect on cc0 is as follows:
10337 Load 0 to any register except ZERO_REG : NONE
10338 Load ld register with any value : NONE
10339 Anything else: : CLOBBER */
10342 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10346 rtx xval
, xdest
[4];
10348 int clobber_val
= 1234;
10349 bool cooked_clobber_p
= false;
10350 bool set_p
= false;
10351 enum machine_mode mode
= GET_MODE (dest
);
10352 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10354 gcc_assert (REG_P (dest
)
10355 && CONSTANT_P (src
));
10360 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10361 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10363 if (REGNO (dest
) < 16
10364 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10366 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10369 /* We might need a clobber reg but don't have one. Look at the value to
10370 be loaded more closely. A clobber is only needed if it is a symbol
10371 or contains a byte that is neither 0, -1 or a power of 2. */
10373 if (NULL_RTX
== clobber_reg
10374 && !test_hard_reg_class (LD_REGS
, dest
)
10375 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10376 || !avr_popcount_each_byte (src
, n_bytes
,
10377 (1 << 0) | (1 << 1) | (1 << 8))))
10379 /* We have no clobber register but need one. Cook one up.
10380 That's cheaper than loading from constant pool. */
10382 cooked_clobber_p
= true;
10383 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10384 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10387 /* Now start filling DEST from LSB to MSB. */
10389 for (n
= 0; n
< n_bytes
; n
++)
10392 bool done_byte
= false;
10396 /* Crop the n-th destination byte. */
10398 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10399 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10401 if (!CONST_INT_P (src
)
10402 && !CONST_FIXED_P (src
)
10403 && !CONST_DOUBLE_P (src
))
10405 static const char* const asm_code
[][2] =
10407 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10408 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10409 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10410 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10415 xop
[2] = clobber_reg
;
10417 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10422 /* Crop the n-th source byte. */
10424 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10425 ival
[n
] = INTVAL (xval
);
10427 /* Look if we can reuse the low word by means of MOVW. */
10433 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10434 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10436 if (INTVAL (lo16
) == INTVAL (hi16
))
10438 if (0 != INTVAL (lo16
)
10441 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10448 /* Don't use CLR so that cc0 is set as expected. */
10453 avr_asm_len (ldreg_p
? "ldi %0,0"
10454 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10455 : "mov %0,__zero_reg__",
10456 &xdest
[n
], len
, 1);
10460 if (clobber_val
== ival
[n
]
10461 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10466 /* LD_REGS can use LDI to move a constant value */
10472 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10476 /* Try to reuse value already loaded in some lower byte. */
10478 for (j
= 0; j
< n
; j
++)
10479 if (ival
[j
] == ival
[n
])
10484 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10492 /* Need no clobber reg for -1: Use CLR/DEC */
10497 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10499 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10502 else if (1 == ival
[n
])
10505 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10507 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10511 /* Use T flag or INC to manage powers of 2 if we have
10514 if (NULL_RTX
== clobber_reg
10515 && single_one_operand (xval
, QImode
))
10518 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10520 gcc_assert (constm1_rtx
!= xop
[1]);
10525 avr_asm_len ("set", xop
, len
, 1);
10529 avr_asm_len ("clr %0", xop
, len
, 1);
10531 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10535 /* We actually need the LD_REGS clobber reg. */
10537 gcc_assert (NULL_RTX
!= clobber_reg
);
10541 xop
[2] = clobber_reg
;
10542 clobber_val
= ival
[n
];
10544 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10545 "mov %0,%2", xop
, len
, 2);
10548 /* If we cooked up a clobber reg above, restore it. */
10550 if (cooked_clobber_p
)
10552 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10557 /* Reload the constant OP[1] into the HI register OP[0].
10558 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10559 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10560 need a clobber reg or have to cook one up.
10562 PLEN == NULL: Output instructions.
10563 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10564 by the insns printed.
10569 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10571 output_reload_in_const (op
, clobber_reg
, plen
, false);
10576 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10577 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10578 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10579 need a clobber reg or have to cook one up.
10581 LEN == NULL: Output instructions.
10583 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10584 by the insns printed.
10589 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10592 && !test_hard_reg_class (LD_REGS
, op
[0])
10593 && (CONST_INT_P (op
[1])
10594 || CONST_FIXED_P (op
[1])
10595 || CONST_DOUBLE_P (op
[1])))
10597 int len_clr
, len_noclr
;
10599 /* In some cases it is better to clear the destination beforehand, e.g.
10601 CLR R2 CLR R3 MOVW R4,R2 INC R2
10605 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10607 We find it too tedious to work that out in the print function.
10608 Instead, we call the print function twice to get the lengths of
10609 both methods and use the shortest one. */
10611 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10612 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10614 if (len_noclr
- len_clr
== 4)
10616 /* Default needs 4 CLR instructions: clear register beforehand. */
10618 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10619 "mov %B0,__zero_reg__" CR_TAB
10620 "movw %C0,%A0", &op
[0], len
, 3);
10622 output_reload_in_const (op
, clobber_reg
, len
, true);
10631 /* Default: destination not pre-cleared. */
10633 output_reload_in_const (op
, clobber_reg
, len
, false);
10638 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10640 output_reload_in_const (op
, clobber_reg
, len
, false);
10645 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10648 avr_output_addr_vec_elt (FILE *stream
, int value
)
10650 if (AVR_HAVE_JMP_CALL
)
10651 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10653 fprintf (stream
, "\trjmp .L%d\n", value
);
10657 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10658 /* Returns true if SCRATCH are safe to be allocated as a scratch
10659 registers (for a define_peephole2) in the current function. */
10662 avr_hard_regno_scratch_ok (unsigned int regno
)
10664 /* Interrupt functions can only use registers that have already been saved
10665 by the prologue, even if they would normally be call-clobbered. */
10667 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10668 && !df_regs_ever_live_p (regno
))
10671 /* Don't allow hard registers that might be part of the frame pointer.
10672 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10673 and don't care for a frame pointer that spans more than one register. */
10675 if ((!reload_completed
|| frame_pointer_needed
)
10676 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10685 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10686 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10689 avr_hard_regno_rename_ok (unsigned int old_reg
,
10690 unsigned int new_reg
)
10692 /* Interrupt functions can only use registers that have already been
10693 saved by the prologue, even if they would normally be
10696 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10697 && !df_regs_ever_live_p (new_reg
))
10700 /* Don't allow hard registers that might be part of the frame pointer.
10701 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10702 and don't care for a frame pointer that spans more than one register. */
10704 if ((!reload_completed
|| frame_pointer_needed
)
10705 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10706 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10714 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10715 or memory location in the I/O space (QImode only).
10717 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10718 Operand 1: register operand to test, or CONST_INT memory address.
10719 Operand 2: bit number.
10720 Operand 3: label to jump to if the test is true. */
10723 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10725 enum rtx_code comp
= GET_CODE (operands
[0]);
10726 bool long_jump
= get_attr_length (insn
) >= 4;
10727 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10731 else if (comp
== LT
)
10735 comp
= reverse_condition (comp
);
10737 switch (GET_CODE (operands
[1]))
10744 if (low_io_address_operand (operands
[1], QImode
))
10747 output_asm_insn ("sbis %i1,%2", operands
);
10749 output_asm_insn ("sbic %i1,%2", operands
);
10753 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10755 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10757 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10760 break; /* CONST_INT */
10765 output_asm_insn ("sbrs %T1%T2", operands
);
10767 output_asm_insn ("sbrc %T1%T2", operands
);
10773 return ("rjmp .+4" CR_TAB
10782 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10785 avr_asm_out_ctor (rtx symbol
, int priority
)
10787 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10788 default_ctor_section_asm_out_constructor (symbol
, priority
);
10792 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10795 avr_asm_out_dtor (rtx symbol
, int priority
)
10797 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10798 default_dtor_section_asm_out_destructor (symbol
, priority
);
10802 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10805 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10807 if (TYPE_MODE (type
) == BLKmode
)
10809 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10810 return (size
== -1 || size
> 8);
10817 /* Implement `CASE_VALUES_THRESHOLD'. */
10818 /* Supply the default for --param case-values-threshold=0 */
10820 static unsigned int
10821 avr_case_values_threshold (void)
10823 /* The exact break-even point between a jump table and an if-else tree
10824 depends on several factors not available here like, e.g. if 8-bit
10825 comparisons can be used in the if-else tree or not, on the
10826 range of the case values, if the case value can be reused, on the
10827 register allocation, etc. '7' appears to be a good choice. */
10833 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10835 static enum machine_mode
10836 avr_addr_space_address_mode (addr_space_t as
)
10838 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10842 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10844 static enum machine_mode
10845 avr_addr_space_pointer_mode (addr_space_t as
)
10847 return avr_addr_space_address_mode (as
);
10851 /* Helper for following function. */
10854 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10856 gcc_assert (REG_P (reg
));
10860 return REGNO (reg
) == REG_Z
;
10863 /* Avoid combine to propagate hard regs. */
10865 if (can_create_pseudo_p()
10866 && REGNO (reg
) < REG_Z
)
10875 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10878 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10879 bool strict
, addr_space_t as
)
10888 case ADDR_SPACE_GENERIC
:
10889 return avr_legitimate_address_p (mode
, x
, strict
);
10891 case ADDR_SPACE_FLASH
:
10892 case ADDR_SPACE_FLASH1
:
10893 case ADDR_SPACE_FLASH2
:
10894 case ADDR_SPACE_FLASH3
:
10895 case ADDR_SPACE_FLASH4
:
10896 case ADDR_SPACE_FLASH5
:
10898 switch (GET_CODE (x
))
10901 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10905 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10914 case ADDR_SPACE_MEMX
:
10917 && can_create_pseudo_p());
10919 if (LO_SUM
== GET_CODE (x
))
10921 rtx hi
= XEXP (x
, 0);
10922 rtx lo
= XEXP (x
, 1);
10925 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10927 && REGNO (lo
) == REG_Z
);
10933 if (avr_log
.legitimate_address_p
)
10935 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10936 "reload_completed=%d reload_in_progress=%d %s:",
10937 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10938 reg_renumber
? "(reg_renumber)" : "");
10940 if (GET_CODE (x
) == PLUS
10941 && REG_P (XEXP (x
, 0))
10942 && CONST_INT_P (XEXP (x
, 1))
10943 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10946 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10947 true_regnum (XEXP (x
, 0)));
10950 avr_edump ("\n%r\n", x
);
10957 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10960 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10961 enum machine_mode mode
, addr_space_t as
)
10963 if (ADDR_SPACE_GENERIC_P (as
))
10964 return avr_legitimize_address (x
, old_x
, mode
);
10966 if (avr_log
.legitimize_address
)
10968 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10975 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10978 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10980 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10981 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10983 if (avr_log
.progmem
)
10984 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10985 src
, type_from
, type_to
);
10987 /* Up-casting from 16-bit to 24-bit pointer. */
10989 if (as_from
!= ADDR_SPACE_MEMX
10990 && as_to
== ADDR_SPACE_MEMX
)
10994 rtx reg
= gen_reg_rtx (PSImode
);
10996 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10997 sym
= XEXP (sym
, 0);
10999 /* Look at symbol flags: avr_encode_section_info set the flags
11000 also if attribute progmem was seen so that we get the right
11001 promotion for, e.g. PSTR-like strings that reside in generic space
11002 but are located in flash. In that case we patch the incoming
11005 if (SYMBOL_REF
== GET_CODE (sym
)
11006 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
11008 as_from
= ADDR_SPACE_FLASH
;
11011 /* Linearize memory: RAM has bit 23 set. */
11013 msb
= ADDR_SPACE_GENERIC_P (as_from
)
11015 : avr_addrspace
[as_from
].segment
;
11017 src
= force_reg (Pmode
, src
);
11019 emit_insn (msb
== 0
11020 ? gen_zero_extendhipsi2 (reg
, src
)
11021 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
11026 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11028 if (as_from
== ADDR_SPACE_MEMX
11029 && as_to
!= ADDR_SPACE_MEMX
)
11031 rtx new_src
= gen_reg_rtx (Pmode
);
11033 src
= force_reg (PSImode
, src
);
11035 emit_move_insn (new_src
,
11036 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
11044 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11047 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
11048 addr_space_t superset ATTRIBUTE_UNUSED
)
11050 /* Allow any kind of pointer mess. */
11056 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11059 avr_convert_to_type (tree type
, tree expr
)
11061 /* Print a diagnose for pointer conversion that changes the address
11062 space of the pointer target to a non-enclosing address space,
11063 provided -Waddr-space-convert is on.
11065 FIXME: Filter out cases where the target object is known to
11066 be located in the right memory, like in
11068 (const __flash*) PSTR ("text")
11070 Also try to distinguish between explicit casts requested by
11071 the user and implicit casts like
11073 void f (const __flash char*);
11075 void g (const char *p)
11077 f ((const __flash*) p);
11080 under the assumption that an explicit casts means that the user
11081 knows what he is doing, e.g. interface with PSTR or old style
11082 code with progmem and pgm_read_xxx.
11085 if (avr_warn_addr_space_convert
11086 && expr
!= error_mark_node
11087 && POINTER_TYPE_P (type
)
11088 && POINTER_TYPE_P (TREE_TYPE (expr
)))
11090 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
11091 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
11093 if (avr_log
.progmem
)
11094 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
11096 if (as_new
!= ADDR_SPACE_MEMX
11097 && as_new
!= as_old
)
11099 location_t loc
= EXPR_LOCATION (expr
);
11100 const char *name_old
= avr_addrspace
[as_old
].name
;
11101 const char *name_new
= avr_addrspace
[as_new
].name
;
11103 warning (OPT_Waddr_space_convert
,
11104 "conversion from address space %qs to address space %qs",
11105 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
11106 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
11108 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
11116 /* Worker function for movmemhi expander.
11117 XOP[0] Destination as MEM:BLK
11119 XOP[2] # Bytes to copy
11121 Return TRUE if the expansion is accomplished.
11122 Return FALSE if the operand compination is not supported. */
11125 avr_emit_movmemhi (rtx
*xop
)
11127 HOST_WIDE_INT count
;
11128 enum machine_mode loop_mode
;
11129 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
11130 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
11131 rtx a_hi8
= NULL_RTX
;
11133 if (avr_mem_flash_p (xop
[0]))
11136 if (!CONST_INT_P (xop
[2]))
11139 count
= INTVAL (xop
[2]);
11143 a_src
= XEXP (xop
[1], 0);
11144 a_dest
= XEXP (xop
[0], 0);
11146 if (PSImode
== GET_MODE (a_src
))
11148 gcc_assert (as
== ADDR_SPACE_MEMX
);
11150 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
11151 loop_reg
= gen_rtx_REG (loop_mode
, 24);
11152 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
11154 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
11155 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
11159 int segment
= avr_addrspace
[as
].segment
;
11162 && avr_current_device
->n_flash
> 1)
11164 a_hi8
= GEN_INT (segment
);
11165 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
11167 else if (!ADDR_SPACE_GENERIC_P (as
))
11169 as
= ADDR_SPACE_FLASH
;
11174 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
11175 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
11178 xas
= GEN_INT (as
);
11180 /* FIXME: Register allocator might come up with spill fails if it is left
11181 on its own. Thus, we allocate the pointer registers by hand:
11183 X = destination address */
11185 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
11186 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
11188 /* FIXME: Register allocator does a bad job and might spill address
11189 register(s) inside the loop leading to additional move instruction
11190 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11191 load and store as separate insns. Instead, we perform the copy
11192 by means of one monolithic insn. */
11194 gcc_assert (TMP_REGNO
== LPM_REGNO
);
11196 if (as
!= ADDR_SPACE_MEMX
)
11198 /* Load instruction ([E]LPM or LD) is known at compile time:
11199 Do the copy-loop inline. */
11201 rtx (*fun
) (rtx
, rtx
, rtx
)
11202 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
11204 insn
= fun (xas
, loop_reg
, loop_reg
);
11208 rtx (*fun
) (rtx
, rtx
)
11209 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
11211 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
11213 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
11216 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
11223 /* Print assembler for movmem_qi, movmem_hi insns...
11225 $1, $2 : Loop register
11227 X : Destination address
11231 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
11233 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
11234 enum machine_mode loop_mode
= GET_MODE (op
[1]);
11235 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
11243 xop
[2] = tmp_reg_rtx
;
11247 avr_asm_len ("0:", xop
, plen
, 0);
11249 /* Load with post-increment */
11256 case ADDR_SPACE_GENERIC
:
11258 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
11261 case ADDR_SPACE_FLASH
:
11264 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
11266 avr_asm_len ("lpm" CR_TAB
11267 "adiw r30,1", xop
, plen
, 2);
11270 case ADDR_SPACE_FLASH1
:
11271 case ADDR_SPACE_FLASH2
:
11272 case ADDR_SPACE_FLASH3
:
11273 case ADDR_SPACE_FLASH4
:
11274 case ADDR_SPACE_FLASH5
:
11276 if (AVR_HAVE_ELPMX
)
11277 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
11279 avr_asm_len ("elpm" CR_TAB
11280 "adiw r30,1", xop
, plen
, 2);
11284 /* Store with post-increment */
11286 avr_asm_len ("st X+,%2", xop
, plen
, 1);
11288 /* Decrement loop-counter and set Z-flag */
11290 if (QImode
== loop_mode
)
11292 avr_asm_len ("dec %1", xop
, plen
, 1);
11296 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
11300 avr_asm_len ("subi %A1,1" CR_TAB
11301 "sbci %B1,0", xop
, plen
, 2);
11304 /* Loop until zero */
11306 return avr_asm_len ("brne 0b", xop
, plen
, 1);
11311 /* Helper for __builtin_avr_delay_cycles */
11314 avr_mem_clobber (void)
11316 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
11317 MEM_VOLATILE_P (mem
) = 1;
11322 avr_expand_delay_cycles (rtx operands0
)
11324 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
11325 unsigned HOST_WIDE_INT cycles_used
;
11326 unsigned HOST_WIDE_INT loop_count
;
11328 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
11330 loop_count
= ((cycles
- 9) / 6) + 1;
11331 cycles_used
= ((loop_count
- 1) * 6) + 9;
11332 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
11333 avr_mem_clobber()));
11334 cycles
-= cycles_used
;
11337 if (IN_RANGE (cycles
, 262145, 83886081))
11339 loop_count
= ((cycles
- 7) / 5) + 1;
11340 if (loop_count
> 0xFFFFFF)
11341 loop_count
= 0xFFFFFF;
11342 cycles_used
= ((loop_count
- 1) * 5) + 7;
11343 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
11344 avr_mem_clobber()));
11345 cycles
-= cycles_used
;
11348 if (IN_RANGE (cycles
, 768, 262144))
11350 loop_count
= ((cycles
- 5) / 4) + 1;
11351 if (loop_count
> 0xFFFF)
11352 loop_count
= 0xFFFF;
11353 cycles_used
= ((loop_count
- 1) * 4) + 5;
11354 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11355 avr_mem_clobber()));
11356 cycles
-= cycles_used
;
11359 if (IN_RANGE (cycles
, 6, 767))
11361 loop_count
= cycles
/ 3;
11362 if (loop_count
> 255)
11364 cycles_used
= loop_count
* 3;
11365 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11366 avr_mem_clobber()));
11367 cycles
-= cycles_used
;
11370 while (cycles
>= 2)
11372 emit_insn (gen_nopv (GEN_INT(2)));
11378 emit_insn (gen_nopv (GEN_INT(1)));
11384 /* Compute the image of x under f, i.e. perform x --> f(x) */
11387 avr_map (unsigned int f
, int x
)
11389 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
11393 /* Return some metrics of map A. */
11397 /* Number of fixed points in { 0 ... 7 } */
11400 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11403 /* Mask representing the fixed points in { 0 ... 7 } */
11404 MAP_MASK_FIXED_0_7
,
11406 /* Size of the preimage of { 0 ... 7 } */
11409 /* Mask that represents the preimage of { f } */
11410 MAP_MASK_PREIMAGE_F
11414 avr_map_metric (unsigned int a
, int mode
)
11416 unsigned i
, metric
= 0;
11418 for (i
= 0; i
< 8; i
++)
11420 unsigned ai
= avr_map (a
, i
);
11422 if (mode
== MAP_FIXED_0_7
)
11424 else if (mode
== MAP_NONFIXED_0_7
)
11425 metric
+= ai
< 8 && ai
!= i
;
11426 else if (mode
== MAP_MASK_FIXED_0_7
)
11427 metric
|= ((unsigned) (ai
== i
)) << i
;
11428 else if (mode
== MAP_PREIMAGE_0_7
)
11430 else if (mode
== MAP_MASK_PREIMAGE_F
)
11431 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11440 /* Return true if IVAL has a 0xf in its hexadecimal representation
11441 and false, otherwise. Only nibbles 0..7 are taken into account.
11442 Used as constraint helper for C0f and Cxf. */
11445 avr_has_nibble_0xf (rtx ival
)
11447 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
11448 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11452 /* We have a set of bits that are mapped by a function F.
11453 Try to decompose F by means of a second function G so that
11459 cost (F o G^-1) + cost (G) < cost (F)
11461 Example: Suppose builtin insert_bits supplies us with the map
11462 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11463 nibble of the result, we can just as well rotate the bits before inserting
11464 them and use the map 0x7654ffff which is cheaper than the original map.
11465 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11469 /* tree code of binary function G */
11470 enum tree_code code
;
11472 /* The constant second argument of G */
11475 /* G^-1, the inverse of G (*, arg) */
11478 /* The cost of appplying G (*, arg) */
11481 /* The composition F o G^-1 (*, arg) for some function F */
11484 /* For debug purpose only */
11488 static const avr_map_op_t avr_map_op
[] =
11490 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
11491 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
11492 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
11493 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
11494 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
11495 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
11496 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
11497 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
11498 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
11499 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
11500 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
11501 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
11502 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
11503 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
11504 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
11508 /* Try to decompose F as F = (F o G^-1) o G as described above.
11509 The result is a struct representing F o G^-1 and G.
11510 If result.cost < 0 then such a decomposition does not exist. */
11512 static avr_map_op_t
11513 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
11516 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11517 avr_map_op_t f_ginv
= *g
;
11518 unsigned int ginv
= g
->ginv
;
11522 /* Step 1: Computing F o G^-1 */
11524 for (i
= 7; i
>= 0; i
--)
11526 int x
= avr_map (f
, i
);
11530 x
= avr_map (ginv
, x
);
11532 /* The bit is no element of the image of G: no avail (cost = -1) */
11538 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
11541 /* Step 2: Compute the cost of the operations.
11542 The overall cost of doing an operation prior to the insertion is
11543 the cost of the insertion plus the cost of the operation. */
11545 /* Step 2a: Compute cost of F o G^-1 */
11547 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11549 /* The mapping consists only of fixed points and can be folded
11550 to AND/OR logic in the remainder. Reasonable cost is 3. */
11552 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11558 /* Get the cost of the insn by calling the output worker with some
11559 fake values. Mimic effect of reloading xop[3]: Unused operands
11560 are mapped to 0 and used operands are reloaded to xop[0]. */
11562 xop
[0] = all_regs_rtx
[24];
11563 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
11564 xop
[2] = all_regs_rtx
[25];
11565 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11567 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11569 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11572 /* Step 2b: Add cost of G */
11574 f_ginv
.cost
+= g
->cost
;
11576 if (avr_log
.builtin
)
11577 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11583 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11584 XOP[0] and XOP[1] don't overlap.
11585 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11586 If FIXP_P = false: Just move the bit if its position in the destination
11587 is different to its source position. */
11590 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
11594 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11595 int t_bit_src
= -1;
11597 /* We order the operations according to the requested source bit b. */
11599 for (b
= 0; b
< 8; b
++)
11600 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11602 int bit_src
= avr_map (map
, bit_dest
);
11606 /* Same position: No need to copy as requested by FIXP_P. */
11607 || (bit_dest
== bit_src
&& !fixp_p
))
11610 if (t_bit_src
!= bit_src
)
11612 /* Source bit is not yet in T: Store it to T. */
11614 t_bit_src
= bit_src
;
11616 xop
[3] = GEN_INT (bit_src
);
11617 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11620 /* Load destination bit with T. */
11622 xop
[3] = GEN_INT (bit_dest
);
11623 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11628 /* PLEN == 0: Print assembler code for `insert_bits'.
11629 PLEN != 0: Compute code length in bytes.
11632 OP[1]: The mapping composed of nibbles. If nibble no. N is
11633 0: Bit N of result is copied from bit OP[2].0
11635 7: Bit N of result is copied from bit OP[2].7
11636 0xf: Bit N of result is copied from bit OP[3].N
11637 OP[2]: Bits to be inserted
11638 OP[3]: Target value */
11641 avr_out_insert_bits (rtx
*op
, int *plen
)
11643 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
11644 unsigned mask_fixed
;
11645 bool fixp_p
= true;
11652 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11656 else if (flag_print_asm_name
)
11657 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
11659 /* If MAP has fixed points it might be better to initialize the result
11660 with the bits to be inserted instead of moving all bits by hand. */
11662 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11664 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11666 /* Avoid early-clobber conflicts */
11668 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11669 xop
[1] = tmp_reg_rtx
;
11673 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11675 /* XOP[2] is used and reloaded to XOP[0] already */
11677 int n_fix
= 0, n_nofix
= 0;
11679 gcc_assert (REG_P (xop
[2]));
11681 /* Get the code size of the bit insertions; once with all bits
11682 moved and once with fixed points omitted. */
11684 avr_move_bits (xop
, map
, true, &n_fix
);
11685 avr_move_bits (xop
, map
, false, &n_nofix
);
11687 if (fixp_p
&& n_fix
- n_nofix
> 3)
11689 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11691 avr_asm_len ("eor %0,%1" CR_TAB
11692 "andi %0,%3" CR_TAB
11693 "eor %0,%1", xop
, plen
, 3);
11699 /* XOP[2] is unused */
11701 if (fixp_p
&& mask_fixed
)
11703 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11708 /* Move/insert remaining bits. */
11710 avr_move_bits (xop
, map
, fixp_p
, plen
);
11716 /* IDs for all the AVR builtins. */
11718 enum avr_builtin_id
11720 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11721 AVR_BUILTIN_ ## NAME,
11722 #include "builtins.def"
11728 struct GTY(()) avr_builtin_description
11730 enum insn_code icode
;
11736 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11737 that a built-in's ID can be used to access the built-in by means of
11740 static GTY(()) struct avr_builtin_description
11741 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11743 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11744 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11745 #include "builtins.def"
11750 /* Implement `TARGET_BUILTIN_DECL'. */
11753 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11755 if (id
< AVR_BUILTIN_COUNT
)
11756 return avr_bdesc
[id
].fndecl
;
11758 return error_mark_node
;
11763 avr_init_builtin_int24 (void)
11765 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11766 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11768 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11769 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11773 /* Implement `TARGET_INIT_BUILTINS' */
11774 /* Set up all builtin functions for this target. */
11777 avr_init_builtins (void)
11779 tree void_ftype_void
11780 = build_function_type_list (void_type_node
, NULL_TREE
);
11781 tree uchar_ftype_uchar
11782 = build_function_type_list (unsigned_char_type_node
,
11783 unsigned_char_type_node
,
11785 tree uint_ftype_uchar_uchar
11786 = build_function_type_list (unsigned_type_node
,
11787 unsigned_char_type_node
,
11788 unsigned_char_type_node
,
11790 tree int_ftype_char_char
11791 = build_function_type_list (integer_type_node
,
11795 tree int_ftype_char_uchar
11796 = build_function_type_list (integer_type_node
,
11798 unsigned_char_type_node
,
11800 tree void_ftype_ulong
11801 = build_function_type_list (void_type_node
,
11802 long_unsigned_type_node
,
11805 tree uchar_ftype_ulong_uchar_uchar
11806 = build_function_type_list (unsigned_char_type_node
,
11807 long_unsigned_type_node
,
11808 unsigned_char_type_node
,
11809 unsigned_char_type_node
,
11812 tree const_memx_void_node
11813 = build_qualified_type (void_type_node
,
11815 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11817 tree const_memx_ptr_type_node
11818 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11820 tree char_ftype_const_memx_ptr
11821 = build_function_type_list (char_type_node
,
11822 const_memx_ptr_type_node
,
11826 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11828 #define FX_FTYPE_FX(fx) \
11829 tree fx##r_ftype_##fx##r \
11830 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11831 tree fx##k_ftype_##fx##k \
11832 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11834 #define FX_FTYPE_FX_INT(fx) \
11835 tree fx##r_ftype_##fx##r_int \
11836 = build_function_type_list (node_##fx##r, node_##fx##r, \
11837 integer_type_node, NULL); \
11838 tree fx##k_ftype_##fx##k_int \
11839 = build_function_type_list (node_##fx##k, node_##fx##k, \
11840 integer_type_node, NULL)
11842 #define INT_FTYPE_FX(fx) \
11843 tree int_ftype_##fx##r \
11844 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11845 tree int_ftype_##fx##k \
11846 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11848 #define INTX_FTYPE_FX(fx) \
11849 tree int##fx##r_ftype_##fx##r \
11850 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11851 tree int##fx##k_ftype_##fx##k \
11852 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11854 #define FX_FTYPE_INTX(fx) \
11855 tree fx##r_ftype_int##fx##r \
11856 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11857 tree fx##k_ftype_int##fx##k \
11858 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11860 tree node_hr
= short_fract_type_node
;
11861 tree node_nr
= fract_type_node
;
11862 tree node_lr
= long_fract_type_node
;
11863 tree node_llr
= long_long_fract_type_node
;
11865 tree node_uhr
= unsigned_short_fract_type_node
;
11866 tree node_unr
= unsigned_fract_type_node
;
11867 tree node_ulr
= unsigned_long_fract_type_node
;
11868 tree node_ullr
= unsigned_long_long_fract_type_node
;
11870 tree node_hk
= short_accum_type_node
;
11871 tree node_nk
= accum_type_node
;
11872 tree node_lk
= long_accum_type_node
;
11873 tree node_llk
= long_long_accum_type_node
;
11875 tree node_uhk
= unsigned_short_accum_type_node
;
11876 tree node_unk
= unsigned_accum_type_node
;
11877 tree node_ulk
= unsigned_long_accum_type_node
;
11878 tree node_ullk
= unsigned_long_long_accum_type_node
;
11881 /* For absfx builtins. */
11888 /* For roundfx builtins. */
11890 FX_FTYPE_FX_INT (h
);
11891 FX_FTYPE_FX_INT (n
);
11892 FX_FTYPE_FX_INT (l
);
11893 FX_FTYPE_FX_INT (ll
);
11895 FX_FTYPE_FX_INT (uh
);
11896 FX_FTYPE_FX_INT (un
);
11897 FX_FTYPE_FX_INT (ul
);
11898 FX_FTYPE_FX_INT (ull
);
11900 /* For countlsfx builtins. */
11910 INT_FTYPE_FX (ull
);
11912 /* For bitsfx builtins. */
11917 INTX_FTYPE_FX (ll
);
11919 INTX_FTYPE_FX (uh
);
11920 INTX_FTYPE_FX (un
);
11921 INTX_FTYPE_FX (ul
);
11922 INTX_FTYPE_FX (ull
);
11924 /* For fxbits builtins. */
11929 FX_FTYPE_INTX (ll
);
11931 FX_FTYPE_INTX (uh
);
11932 FX_FTYPE_INTX (un
);
11933 FX_FTYPE_INTX (ul
);
11934 FX_FTYPE_INTX (ull
);
11937 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11939 int id = AVR_BUILTIN_ ## NAME; \
11940 const char *Name = "__builtin_avr_" #NAME; \
11941 char *name = (char*) alloca (1 + strlen (Name)); \
11943 gcc_assert (id < AVR_BUILTIN_COUNT); \
11944 avr_bdesc[id].fndecl \
11945 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11946 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11948 #include "builtins.def"
11951 avr_init_builtin_int24 ();
11955 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11956 with non-void result and 1 ... 3 arguments. */
11959 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11962 int n
, n_args
= call_expr_nargs (exp
);
11963 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11965 gcc_assert (n_args
>= 1 && n_args
<= 3);
11967 if (target
== NULL_RTX
11968 || GET_MODE (target
) != tmode
11969 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11971 target
= gen_reg_rtx (tmode
);
11974 for (n
= 0; n
< n_args
; n
++)
11976 tree arg
= CALL_EXPR_ARG (exp
, n
);
11977 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11978 enum machine_mode opmode
= GET_MODE (op
);
11979 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11981 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11984 op
= gen_lowpart (HImode
, op
);
11987 /* In case the insn wants input operands in modes different from
11988 the result, abort. */
11990 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11992 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11993 op
= copy_to_mode_reg (mode
, op
);
12000 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
12001 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
12002 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
12008 if (pat
== NULL_RTX
)
12017 /* Implement `TARGET_EXPAND_BUILTIN'. */
12018 /* Expand an expression EXP that calls a built-in function,
12019 with result going to TARGET if that's convenient
12020 (and in mode MODE if that's convenient).
12021 SUBTARGET may be used as the target for computing one of EXP's operands.
12022 IGNORE is nonzero if the value is to be ignored. */
12025 avr_expand_builtin (tree exp
, rtx target
,
12026 rtx subtarget ATTRIBUTE_UNUSED
,
12027 enum machine_mode mode ATTRIBUTE_UNUSED
,
12030 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12031 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
12032 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
12033 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
12037 gcc_assert (id
< AVR_BUILTIN_COUNT
);
12041 case AVR_BUILTIN_NOP
:
12042 emit_insn (gen_nopv (GEN_INT(1)));
12045 case AVR_BUILTIN_DELAY_CYCLES
:
12047 arg0
= CALL_EXPR_ARG (exp
, 0);
12048 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12050 if (!CONST_INT_P (op0
))
12051 error ("%s expects a compile time integer constant", bname
);
12053 avr_expand_delay_cycles (op0
);
12058 case AVR_BUILTIN_INSERT_BITS
:
12060 arg0
= CALL_EXPR_ARG (exp
, 0);
12061 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12063 if (!CONST_INT_P (op0
))
12065 error ("%s expects a compile time long integer constant"
12066 " as first argument", bname
);
12073 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
12074 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
12075 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
12076 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
12078 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
12079 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
12080 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
12081 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
12083 /* Warn about odd rounding. Rounding points >= FBIT will have
12086 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
12089 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
12091 if (rbit
>= (int) GET_MODE_FBIT (mode
))
12093 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
12094 "fixed-point value with %d fractional bits",
12095 rbit
, GET_MODE_FBIT (mode
));
12097 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
12100 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
12102 warning (0, "rounding result will always be 0");
12103 return CONST0_RTX (mode
);
12106 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12108 TR 18037 only specifies results for RP > 0. However, the
12109 remaining cases of -IBIT < RP <= 0 can easily be supported
12110 without any additional overhead. */
12115 /* No fold found and no insn: Call support function from libgcc. */
12117 if (d
->icode
== CODE_FOR_nothing
12118 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
12120 return expand_call (exp
, target
, ignore
);
12123 /* No special treatment needed: vanilla expand. */
12125 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
12126 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
12128 if (d
->n_args
== 0)
12130 emit_insn ((GEN_FCN (d
->icode
)) (target
));
12134 return avr_default_expand_builtin (d
->icode
, exp
, target
);
12138 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12141 avr_fold_absfx (tree tval
)
12143 if (FIXED_CST
!= TREE_CODE (tval
))
12146 /* Our fixed-points have no padding: Use double_int payload directly. */
12148 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
12149 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
12150 double_int ival
= fval
.data
.sext (bits
);
12152 if (!ival
.is_negative())
12155 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12157 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
12158 ? double_int::max_value (bits
, false)
12161 return build_fixed (TREE_TYPE (tval
), fval
);
12165 /* Implement `TARGET_FOLD_BUILTIN'. */
12168 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
12169 bool ignore ATTRIBUTE_UNUSED
)
12171 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
12172 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
12182 case AVR_BUILTIN_SWAP
:
12184 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
12185 build_int_cst (val_type
, 4));
12188 case AVR_BUILTIN_ABSHR
:
12189 case AVR_BUILTIN_ABSR
:
12190 case AVR_BUILTIN_ABSLR
:
12191 case AVR_BUILTIN_ABSLLR
:
12193 case AVR_BUILTIN_ABSHK
:
12194 case AVR_BUILTIN_ABSK
:
12195 case AVR_BUILTIN_ABSLK
:
12196 case AVR_BUILTIN_ABSLLK
:
12197 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12199 return avr_fold_absfx (arg
[0]);
12201 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
12202 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
12203 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
12204 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
12206 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
12207 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
12208 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
12209 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
12211 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
12212 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
12213 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
12214 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
12216 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
12217 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
12218 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
12219 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
12221 gcc_assert (TYPE_PRECISION (val_type
)
12222 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
12224 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
12226 case AVR_BUILTIN_INSERT_BITS
:
12228 tree tbits
= arg
[1];
12229 tree tval
= arg
[2];
12231 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
12233 bool changed
= false;
12235 avr_map_op_t best_g
;
12237 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
12239 /* No constant as first argument: Don't fold this and run into
12240 error in avr_expand_builtin. */
12245 tmap
= wide_int_to_tree (map_type
, arg
[0]);
12246 map
= TREE_INT_CST_LOW (tmap
);
12248 if (TREE_CODE (tval
) != INTEGER_CST
12249 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12251 /* There are no F in the map, i.e. 3rd operand is unused.
12252 Replace that argument with some constant to render
12253 respective input unused. */
12255 tval
= build_int_cst (val_type
, 0);
12259 if (TREE_CODE (tbits
) != INTEGER_CST
12260 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
12262 /* Similar for the bits to be inserted. If they are unused,
12263 we can just as well pass 0. */
12265 tbits
= build_int_cst (val_type
, 0);
12268 if (TREE_CODE (tbits
) == INTEGER_CST
)
12270 /* Inserting bits known at compile time is easy and can be
12271 performed by AND and OR with appropriate masks. */
12273 int bits
= TREE_INT_CST_LOW (tbits
);
12274 int mask_ior
= 0, mask_and
= 0xff;
12276 for (i
= 0; i
< 8; i
++)
12278 int mi
= avr_map (map
, i
);
12282 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
12283 else mask_and
&= ~(1 << i
);
12287 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
12288 build_int_cst (val_type
, mask_ior
));
12289 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
12290 build_int_cst (val_type
, mask_and
));
12294 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12296 /* If bits don't change their position we can use vanilla logic
12297 to merge the two arguments. */
12299 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
12301 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12302 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
12304 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
12305 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
12306 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
12309 /* Try to decomposing map to reduce overall cost. */
12311 if (avr_log
.builtin
)
12312 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
12314 best_g
= avr_map_op
[0];
12315 best_g
.cost
= 1000;
12317 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
12320 = avr_map_decompose (map
, avr_map_op
+ i
,
12321 TREE_CODE (tval
) == INTEGER_CST
);
12323 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
12327 if (avr_log
.builtin
)
12330 if (best_g
.arg
== 0)
12331 /* No optimization found */
12334 /* Apply operation G to the 2nd argument. */
12336 if (avr_log
.builtin
)
12337 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12338 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12340 /* Do right-shifts arithmetically: They copy the MSB instead of
12341 shifting in a non-usable value (0) as with logic right-shift. */
12343 tbits
= fold_convert (signed_char_type_node
, tbits
);
12344 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12345 build_int_cst (val_type
, best_g
.arg
));
12346 tbits
= fold_convert (val_type
, tbits
);
12348 /* Use map o G^-1 instead of original map to undo the effect of G. */
12350 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
12352 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12353 } /* AVR_BUILTIN_INSERT_BITS */
12361 /* Initialize the GCC target structure. */
12363 #undef TARGET_ASM_ALIGNED_HI_OP
12364 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12365 #undef TARGET_ASM_ALIGNED_SI_OP
12366 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12367 #undef TARGET_ASM_UNALIGNED_HI_OP
12368 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12369 #undef TARGET_ASM_UNALIGNED_SI_OP
12370 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12371 #undef TARGET_ASM_INTEGER
12372 #define TARGET_ASM_INTEGER avr_assemble_integer
12373 #undef TARGET_ASM_FILE_START
12374 #define TARGET_ASM_FILE_START avr_file_start
12375 #undef TARGET_ASM_FILE_END
12376 #define TARGET_ASM_FILE_END avr_file_end
12378 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12379 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12380 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12381 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12383 #undef TARGET_FUNCTION_VALUE
12384 #define TARGET_FUNCTION_VALUE avr_function_value
12385 #undef TARGET_LIBCALL_VALUE
12386 #define TARGET_LIBCALL_VALUE avr_libcall_value
12387 #undef TARGET_FUNCTION_VALUE_REGNO_P
12388 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12390 #undef TARGET_ATTRIBUTE_TABLE
12391 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12392 #undef TARGET_INSERT_ATTRIBUTES
12393 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12394 #undef TARGET_SECTION_TYPE_FLAGS
12395 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12397 #undef TARGET_ASM_NAMED_SECTION
12398 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12399 #undef TARGET_ASM_INIT_SECTIONS
12400 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12401 #undef TARGET_ENCODE_SECTION_INFO
12402 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12403 #undef TARGET_ASM_SELECT_SECTION
12404 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12406 #undef TARGET_REGISTER_MOVE_COST
12407 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12408 #undef TARGET_MEMORY_MOVE_COST
12409 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12410 #undef TARGET_RTX_COSTS
12411 #define TARGET_RTX_COSTS avr_rtx_costs
12412 #undef TARGET_ADDRESS_COST
12413 #define TARGET_ADDRESS_COST avr_address_cost
12414 #undef TARGET_MACHINE_DEPENDENT_REORG
12415 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12416 #undef TARGET_FUNCTION_ARG
12417 #define TARGET_FUNCTION_ARG avr_function_arg
12418 #undef TARGET_FUNCTION_ARG_ADVANCE
12419 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12421 #undef TARGET_SET_CURRENT_FUNCTION
12422 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12424 #undef TARGET_RETURN_IN_MEMORY
12425 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12427 #undef TARGET_STRICT_ARGUMENT_NAMING
12428 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12430 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12431 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12433 #undef TARGET_HARD_REGNO_SCRATCH_OK
12434 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12435 #undef TARGET_CASE_VALUES_THRESHOLD
12436 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12438 #undef TARGET_FRAME_POINTER_REQUIRED
12439 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12440 #undef TARGET_CAN_ELIMINATE
12441 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12443 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12444 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12446 #undef TARGET_WARN_FUNC_RETURN
12447 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12449 #undef TARGET_CLASS_LIKELY_SPILLED_P
12450 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12452 #undef TARGET_OPTION_OVERRIDE
12453 #define TARGET_OPTION_OVERRIDE avr_option_override
12455 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12456 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12458 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12459 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12461 #undef TARGET_INIT_BUILTINS
12462 #define TARGET_INIT_BUILTINS avr_init_builtins
12464 #undef TARGET_BUILTIN_DECL
12465 #define TARGET_BUILTIN_DECL avr_builtin_decl
12467 #undef TARGET_EXPAND_BUILTIN
12468 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12470 #undef TARGET_FOLD_BUILTIN
12471 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12473 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12474 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12476 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12477 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12479 #undef TARGET_BUILD_BUILTIN_VA_LIST
12480 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12482 #undef TARGET_FIXED_POINT_SUPPORTED_P
12483 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12485 #undef TARGET_CONVERT_TO_TYPE
12486 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12488 #undef TARGET_ADDR_SPACE_SUBSET_P
12489 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12491 #undef TARGET_ADDR_SPACE_CONVERT
12492 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12494 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12495 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12497 #undef TARGET_ADDR_SPACE_POINTER_MODE
12498 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12500 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12501 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12502 avr_addr_space_legitimate_address_p
12504 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12505 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12507 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12508 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12510 #undef TARGET_SECONDARY_RELOAD
12511 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12513 #undef TARGET_PRINT_OPERAND
12514 #define TARGET_PRINT_OPERAND avr_print_operand
12515 #undef TARGET_PRINT_OPERAND_ADDRESS
12516 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12517 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12518 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12520 struct gcc_target targetm
= TARGET_INITIALIZER
;
12523 #include "gt-avr.h"