1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2013 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
35 #include "print-tree.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
41 #include "c-family/c-common.h"
42 #include "diagnostic-core.h"
48 #include "langhooks.h"
51 #include "target-def.h"
55 /* Maximal allowed offset for an address in the LD command */
56 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
58 /* Return true if STR starts with PREFIX and false, otherwise. */
59 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
61 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
62 address space where data is to be located.
63 As the only non-generic address spaces are all located in flash,
64 this can be used to test if data shall go into some .progmem* section.
65 This must be the rightmost field of machine dependent section flags. */
66 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
68 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
69 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
71 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
72 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
73 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
75 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
76 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
79 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
81 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
82 / SYMBOL_FLAG_MACH_DEP)
84 /* Known address spaces. The order must be the same as in the respective
85 enum from avr.h (or designated initialized must be used). */
86 const avr_addrspace_t avr_addrspace
[ADDR_SPACE_COUNT
] =
88 { ADDR_SPACE_RAM
, 0, 2, "", 0, NULL
},
89 { ADDR_SPACE_FLASH
, 1, 2, "__flash", 0, ".progmem.data" },
90 { ADDR_SPACE_FLASH1
, 1, 2, "__flash1", 1, ".progmem1.data" },
91 { ADDR_SPACE_FLASH2
, 1, 2, "__flash2", 2, ".progmem2.data" },
92 { ADDR_SPACE_FLASH3
, 1, 2, "__flash3", 3, ".progmem3.data" },
93 { ADDR_SPACE_FLASH4
, 1, 2, "__flash4", 4, ".progmem4.data" },
94 { ADDR_SPACE_FLASH5
, 1, 2, "__flash5", 5, ".progmem5.data" },
95 { ADDR_SPACE_MEMX
, 1, 3, "__memx", 0, ".progmemx.data" },
99 /* Holding RAM addresses of some SFRs used by the compiler and that
100 are unique over all devices in an architecture like 'avr4'. */
104 /* SREG: The processor status */
107 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
113 /* RAMPZ: The high byte of 24-bit address used with ELPM */
116 /* SP: The stack pointer and its low and high byte */
121 static avr_addr_t avr_addr
;
124 /* Prototypes for local helper functions. */
126 static const char* out_movqi_r_mr (rtx
, rtx
[], int*);
127 static const char* out_movhi_r_mr (rtx
, rtx
[], int*);
128 static const char* out_movsi_r_mr (rtx
, rtx
[], int*);
129 static const char* out_movqi_mr_r (rtx
, rtx
[], int*);
130 static const char* out_movhi_mr_r (rtx
, rtx
[], int*);
131 static const char* out_movsi_mr_r (rtx
, rtx
[], int*);
133 static int get_sequence_length (rtx insns
);
134 static int sequent_regs_live (void);
135 static const char *ptrreg_to_str (int);
136 static const char *cond_string (enum rtx_code
);
137 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
138 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
,
140 static void output_reload_in_const (rtx
*, rtx
, int*, bool);
141 static struct machine_function
* avr_init_machine_status (void);
144 /* Prototypes for hook implementors if needed before their implementation. */
146 static bool avr_rtx_costs (rtx
, int, int, int, int*, bool);
149 /* Allocate registers from r25 to r8 for parameters for function calls. */
150 #define FIRST_CUM_REG 26
152 /* Implicit target register of LPM instruction (R0) */
153 extern GTY(()) rtx lpm_reg_rtx
;
156 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
157 extern GTY(()) rtx lpm_addr_reg_rtx
;
158 rtx lpm_addr_reg_rtx
;
160 /* Temporary register RTX (reg:QI TMP_REGNO) */
161 extern GTY(()) rtx tmp_reg_rtx
;
164 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
165 extern GTY(()) rtx zero_reg_rtx
;
168 /* RTXs for all general purpose registers as QImode */
169 extern GTY(()) rtx all_regs_rtx
[32];
170 rtx all_regs_rtx
[32];
172 /* SREG, the processor status */
173 extern GTY(()) rtx sreg_rtx
;
176 /* RAMP* special function registers */
177 extern GTY(()) rtx rampd_rtx
;
178 extern GTY(()) rtx rampx_rtx
;
179 extern GTY(()) rtx rampy_rtx
;
180 extern GTY(()) rtx rampz_rtx
;
186 /* RTX containing the strings "" and "e", respectively */
187 static GTY(()) rtx xstring_empty
;
188 static GTY(()) rtx xstring_e
;
190 /* Current architecture. */
191 const avr_arch_t
*avr_current_arch
;
193 /* Current device. */
194 const avr_mcu_t
*avr_current_device
;
196 /* Section to put switch tables in. */
197 static GTY(()) section
*progmem_swtable_section
;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section
*progmem_section
[ADDR_SPACE_COUNT
];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode
= true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p
= false;
209 bool avr_need_copy_data_p
= false;
212 /* Transform UP into lowercase and write the result to LO.
213 You must provide enough space for LO. Return LO. */
216 avr_tolower (char *lo
, const char *up
)
220 for (; *up
; up
++, lo
++)
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val
)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval
, int n_bytes
, int pop_mask
)
256 enum machine_mode mode
= GET_MODE (xval
);
258 if (VOIDmode
== mode
)
261 for (i
= 0; i
< n_bytes
; i
++)
263 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
264 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
266 if (0 == (pop_mask
& (1 << avr_popcount (val8
))))
274 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
275 the bit representation of X by "casting" it to CONST_INT. */
278 avr_to_int_mode (rtx x
)
280 enum machine_mode mode
= GET_MODE (x
);
282 return VOIDmode
== mode
284 : simplify_gen_subreg (int_mode_for_mode (mode
), x
, mode
, 0);
288 /* Implement `TARGET_OPTION_OVERRIDE'. */
291 avr_option_override (void)
293 flag_delete_null_pointer_checks
= 0;
295 /* caller-save.c looks for call-clobbered hard registers that are assigned
296 to pseudos that cross calls and tries so save-restore them around calls
297 in order to reduce the number of stack slots needed.
299 This might lead to situations where reload is no more able to cope
300 with the challenge of AVR's very few address registers and fails to
301 perform the requested spills. */
304 flag_caller_saves
= 0;
306 /* Unwind tables currently require a frame pointer for correctness,
307 see toplev.c:process_options(). */
309 if ((flag_unwind_tables
310 || flag_non_call_exceptions
311 || flag_asynchronous_unwind_tables
)
312 && !ACCUMULATE_OUTGOING_ARGS
)
314 flag_omit_frame_pointer
= 0;
318 warning (OPT_fpic
, "-fpic is not supported");
320 warning (OPT_fPIC
, "-fPIC is not supported");
322 warning (OPT_fpie
, "-fpie is not supported");
324 warning (OPT_fPIE
, "-fPIE is not supported");
326 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
327 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
329 /* RAM addresses of some SFRs common to all devices in respective arch. */
331 /* SREG: Status Register containing flags like I (global IRQ) */
332 avr_addr
.sreg
= 0x3F + avr_current_arch
->sfr_offset
;
334 /* RAMPZ: Address' high part when loading via ELPM */
335 avr_addr
.rampz
= 0x3B + avr_current_arch
->sfr_offset
;
337 avr_addr
.rampy
= 0x3A + avr_current_arch
->sfr_offset
;
338 avr_addr
.rampx
= 0x39 + avr_current_arch
->sfr_offset
;
339 avr_addr
.rampd
= 0x38 + avr_current_arch
->sfr_offset
;
340 avr_addr
.ccp
= 0x34 + avr_current_arch
->sfr_offset
;
342 /* SP: Stack Pointer (SP_H:SP_L) */
343 avr_addr
.sp_l
= 0x3D + avr_current_arch
->sfr_offset
;
344 avr_addr
.sp_h
= avr_addr
.sp_l
+ 1;
346 init_machine_status
= avr_init_machine_status
;
348 avr_log_set_avr_log();
351 /* Function to set up the backend function structure. */
353 static struct machine_function
*
354 avr_init_machine_status (void)
356 return ggc_alloc_cleared_machine_function ();
360 /* Implement `INIT_EXPANDERS'. */
361 /* The function works like a singleton. */
364 avr_init_expanders (void)
368 for (regno
= 0; regno
< 32; regno
++)
369 all_regs_rtx
[regno
] = gen_rtx_REG (QImode
, regno
);
371 lpm_reg_rtx
= all_regs_rtx
[LPM_REGNO
];
372 tmp_reg_rtx
= all_regs_rtx
[TMP_REGNO
];
373 zero_reg_rtx
= all_regs_rtx
[ZERO_REGNO
];
375 lpm_addr_reg_rtx
= gen_rtx_REG (HImode
, REG_Z
);
377 sreg_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.sreg
));
378 rampd_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampd
));
379 rampx_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampx
));
380 rampy_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampy
));
381 rampz_rtx
= gen_rtx_MEM (QImode
, GEN_INT (avr_addr
.rampz
));
383 xstring_empty
= gen_rtx_CONST_STRING (VOIDmode
, "");
384 xstring_e
= gen_rtx_CONST_STRING (VOIDmode
, "e");
388 /* Implement `REGNO_REG_CLASS'. */
389 /* Return register class for register R. */
392 avr_regno_reg_class (int r
)
394 static const enum reg_class reg_class_tab
[] =
398 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
399 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
400 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
401 NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
, NO_LD_REGS
,
403 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
404 SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
, SIMPLE_LD_REGS
,
406 ADDW_REGS
, ADDW_REGS
,
408 POINTER_X_REGS
, POINTER_X_REGS
,
410 POINTER_Y_REGS
, POINTER_Y_REGS
,
412 POINTER_Z_REGS
, POINTER_Z_REGS
,
418 return reg_class_tab
[r
];
424 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
427 avr_scalar_mode_supported_p (enum machine_mode mode
)
429 if (ALL_FIXED_POINT_MODE_P (mode
))
435 return default_scalar_mode_supported_p (mode
);
439 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
442 avr_decl_flash_p (tree decl
)
444 if (TREE_CODE (decl
) != VAR_DECL
445 || TREE_TYPE (decl
) == error_mark_node
)
450 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
454 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
455 address space and FALSE, otherwise. */
458 avr_decl_memx_p (tree decl
)
460 if (TREE_CODE (decl
) != VAR_DECL
461 || TREE_TYPE (decl
) == error_mark_node
)
466 return (ADDR_SPACE_MEMX
== TYPE_ADDR_SPACE (TREE_TYPE (decl
)));
470 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
473 avr_mem_flash_p (rtx x
)
476 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
)));
480 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
481 address space and FALSE, otherwise. */
484 avr_mem_memx_p (rtx x
)
487 && ADDR_SPACE_MEMX
== MEM_ADDR_SPACE (x
));
491 /* A helper for the subsequent function attribute used to dig for
492 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
495 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
497 if (FUNCTION_DECL
== TREE_CODE (func
))
499 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
504 func
= TREE_TYPE (func
);
507 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
508 || TREE_CODE (func
) == METHOD_TYPE
);
510 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
513 /* Return nonzero if FUNC is a naked function. */
516 avr_naked_function_p (tree func
)
518 return avr_lookup_function_attribute1 (func
, "naked");
521 /* Return nonzero if FUNC is an interrupt function as specified
522 by the "interrupt" attribute. */
525 avr_interrupt_function_p (tree func
)
527 return avr_lookup_function_attribute1 (func
, "interrupt");
530 /* Return nonzero if FUNC is a signal function as specified
531 by the "signal" attribute. */
534 avr_signal_function_p (tree func
)
536 return avr_lookup_function_attribute1 (func
, "signal");
539 /* Return nonzero if FUNC is an OS_task function. */
542 avr_OS_task_function_p (tree func
)
544 return avr_lookup_function_attribute1 (func
, "OS_task");
547 /* Return nonzero if FUNC is an OS_main function. */
550 avr_OS_main_function_p (tree func
)
552 return avr_lookup_function_attribute1 (func
, "OS_main");
556 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
557 /* Sanity cheching for above function attributes. */
560 avr_set_current_function (tree decl
)
565 if (decl
== NULL_TREE
566 || current_function_decl
== NULL_TREE
567 || current_function_decl
== error_mark_node
569 || cfun
->machine
->attributes_checked_p
)
572 loc
= DECL_SOURCE_LOCATION (decl
);
574 cfun
->machine
->is_naked
= avr_naked_function_p (decl
);
575 cfun
->machine
->is_signal
= avr_signal_function_p (decl
);
576 cfun
->machine
->is_interrupt
= avr_interrupt_function_p (decl
);
577 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (decl
);
578 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (decl
);
580 isr
= cfun
->machine
->is_interrupt
? "interrupt" : "signal";
582 /* Too much attributes make no sense as they request conflicting features. */
584 if (cfun
->machine
->is_OS_task
+ cfun
->machine
->is_OS_main
585 + (cfun
->machine
->is_signal
|| cfun
->machine
->is_interrupt
) > 1)
586 error_at (loc
, "function attributes %qs, %qs and %qs are mutually"
587 " exclusive", "OS_task", "OS_main", isr
);
589 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
591 if (cfun
->machine
->is_naked
592 && (cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
593 warning_at (loc
, OPT_Wattributes
, "function attributes %qs and %qs have"
594 " no effect on %qs function", "OS_task", "OS_main", "naked");
596 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
598 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
599 tree ret
= TREE_TYPE (TREE_TYPE (decl
));
602 name
= DECL_ASSEMBLER_NAME_SET_P (decl
)
603 /* Remove the leading '*' added in set_user_assembler_name. */
604 ? 1 + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))
605 : IDENTIFIER_POINTER (DECL_NAME (decl
));
607 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
608 using this when it switched from SIGNAL and INTERRUPT to ISR. */
610 if (cfun
->machine
->is_interrupt
)
611 cfun
->machine
->is_signal
= 0;
613 /* Interrupt handlers must be void __vector (void) functions. */
615 if (args
&& TREE_CODE (TREE_VALUE (args
)) != VOID_TYPE
)
616 error_at (loc
, "%qs function cannot have arguments", isr
);
618 if (TREE_CODE (ret
) != VOID_TYPE
)
619 error_at (loc
, "%qs function cannot return a value", isr
);
621 /* If the function has the 'signal' or 'interrupt' attribute, ensure
622 that the name of the function is "__vector_NN" so as to catch
623 when the user misspells the vector name. */
625 if (!STR_PREFIX_P (name
, "__vector"))
626 warning_at (loc
, 0, "%qs appears to be a misspelled %s handler",
630 /* Don't print the above diagnostics more than once. */
632 cfun
->machine
->attributes_checked_p
= 1;
636 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
639 avr_accumulate_outgoing_args (void)
642 return TARGET_ACCUMULATE_OUTGOING_ARGS
;
644 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
645 what offset is correct. In some cases it is relative to
646 virtual_outgoing_args_rtx and in others it is relative to
647 virtual_stack_vars_rtx. For example code see
648 gcc.c-torture/execute/built-in-setjmp.c
649 gcc.c-torture/execute/builtins/sprintf-chk.c */
651 return (TARGET_ACCUMULATE_OUTGOING_ARGS
652 && !(cfun
->calls_setjmp
653 || cfun
->has_nonlocal_label
));
657 /* Report contribution of accumulated outgoing arguments to stack size. */
660 avr_outgoing_args_size (void)
662 return ACCUMULATE_OUTGOING_ARGS
? crtl
->outgoing_args_size
: 0;
666 /* Implement `STARTING_FRAME_OFFSET'. */
667 /* This is the offset from the frame pointer register to the first stack slot
668 that contains a variable living in the frame. */
671 avr_starting_frame_offset (void)
673 return 1 + avr_outgoing_args_size ();
677 /* Return the number of hard registers to push/pop in the prologue/epilogue
678 of the current function, and optionally store these registers in SET. */
681 avr_regs_to_save (HARD_REG_SET
*set
)
684 int int_or_sig_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
687 CLEAR_HARD_REG_SET (*set
);
690 /* No need to save any registers if the function never returns or
691 has the "OS_task" or "OS_main" attribute. */
693 if (TREE_THIS_VOLATILE (current_function_decl
)
694 || cfun
->machine
->is_OS_task
695 || cfun
->machine
->is_OS_main
)
698 for (reg
= 0; reg
< 32; reg
++)
700 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
701 any global register variables. */
706 if ((int_or_sig_p
&& !crtl
->is_leaf
&& call_used_regs
[reg
])
707 || (df_regs_ever_live_p (reg
)
708 && (int_or_sig_p
|| !call_used_regs
[reg
])
709 /* Don't record frame pointer registers here. They are treated
710 indivitually in prologue. */
711 && !(frame_pointer_needed
712 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
715 SET_HARD_REG_BIT (*set
, reg
);
723 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
726 avr_allocate_stack_slots_for_args (void)
728 return !cfun
->machine
->is_naked
;
732 /* Return true if register FROM can be eliminated via register TO. */
735 avr_can_eliminate (const int from
, const int to
)
737 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
738 || (frame_pointer_needed
&& to
== FRAME_POINTER_REGNUM
)
739 || ((from
== FRAME_POINTER_REGNUM
740 || from
== FRAME_POINTER_REGNUM
+ 1)
741 && !frame_pointer_needed
));
745 /* Implement `TARGET_WARN_FUNC_RETURN'. */
748 avr_warn_func_return (tree decl
)
750 /* Naked functions are implemented entirely in assembly, including the
751 return sequence, so suppress warnings about this. */
753 return !avr_naked_function_p (decl
);
756 /* Compute offset between arg_pointer and frame_pointer. */
759 avr_initial_elimination_offset (int from
, int to
)
761 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
765 int offset
= frame_pointer_needed
? 2 : 0;
766 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
768 offset
+= avr_regs_to_save (NULL
);
769 return (get_frame_size () + avr_outgoing_args_size()
770 + avr_pc_size
+ 1 + offset
);
775 /* Helper for the function below. */
778 avr_adjust_type_node (tree
*node
, enum machine_mode mode
, int sat_p
)
780 *node
= make_node (FIXED_POINT_TYPE
);
781 TYPE_SATURATING (*node
) = sat_p
;
782 TYPE_UNSIGNED (*node
) = UNSIGNED_FIXED_POINT_MODE_P (mode
);
783 TYPE_IBIT (*node
) = GET_MODE_IBIT (mode
);
784 TYPE_FBIT (*node
) = GET_MODE_FBIT (mode
);
785 TYPE_PRECISION (*node
) = GET_MODE_BITSIZE (mode
);
786 TYPE_ALIGN (*node
) = 8;
787 SET_TYPE_MODE (*node
, mode
);
793 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
796 avr_build_builtin_va_list (void)
798 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
799 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
800 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
801 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
802 to the long long accum modes instead of the desired [U]TAmode.
804 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
805 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
806 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
807 libgcc to detect IBIT and FBIT. */
809 avr_adjust_type_node (&ta_type_node
, TAmode
, 0);
810 avr_adjust_type_node (&uta_type_node
, UTAmode
, 0);
811 avr_adjust_type_node (&sat_ta_type_node
, TAmode
, 1);
812 avr_adjust_type_node (&sat_uta_type_node
, UTAmode
, 1);
814 unsigned_long_long_accum_type_node
= uta_type_node
;
815 long_long_accum_type_node
= ta_type_node
;
816 sat_unsigned_long_long_accum_type_node
= sat_uta_type_node
;
817 sat_long_long_accum_type_node
= sat_ta_type_node
;
819 /* Dispatch to the default handler. */
821 return std_build_builtin_va_list ();
825 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
826 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
827 frame pointer by +STARTING_FRAME_OFFSET.
828 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
829 avoids creating add/sub of offset in nonlocal goto and setjmp. */
832 avr_builtin_setjmp_frame_value (void)
834 rtx xval
= gen_reg_rtx (Pmode
);
835 emit_insn (gen_subhi3 (xval
, virtual_stack_vars_rtx
,
836 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
)));
841 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
842 This is return address of function. */
845 avr_return_addr_rtx (int count
, rtx tem
)
849 /* Can only return this function's return address. Others not supported. */
855 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
856 warning (0, "%<builtin_return_address%> contains only 2 bytes"
860 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
862 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
863 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
864 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
868 /* Return 1 if the function epilogue is just a single "ret". */
871 avr_simple_epilogue (void)
873 return (! frame_pointer_needed
874 && get_frame_size () == 0
875 && avr_outgoing_args_size() == 0
876 && avr_regs_to_save (NULL
) == 0
877 && ! cfun
->machine
->is_interrupt
878 && ! cfun
->machine
->is_signal
879 && ! cfun
->machine
->is_naked
880 && ! TREE_THIS_VOLATILE (current_function_decl
));
883 /* This function checks sequence of live registers. */
886 sequent_regs_live (void)
892 for (reg
= 0; reg
< 18; ++reg
)
896 /* Don't recognize sequences that contain global register
905 if (!call_used_regs
[reg
])
907 if (df_regs_ever_live_p (reg
))
917 if (!frame_pointer_needed
)
919 if (df_regs_ever_live_p (REG_Y
))
927 if (df_regs_ever_live_p (REG_Y
+1))
940 return (cur_seq
== live_seq
) ? live_seq
: 0;
943 /* Obtain the length sequence of insns. */
946 get_sequence_length (rtx insns
)
951 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
952 length
+= get_attr_length (insn
);
958 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
961 avr_incoming_return_addr_rtx (void)
963 /* The return address is at the top of the stack. Note that the push
964 was via post-decrement, which means the actual address is off by one. */
965 return gen_frame_mem (HImode
, plus_constant (Pmode
, stack_pointer_rtx
, 1));
968 /* Helper for expand_prologue. Emit a push of a byte register. */
971 emit_push_byte (unsigned regno
, bool frame_related_p
)
975 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
976 mem
= gen_frame_mem (QImode
, mem
);
977 reg
= gen_rtx_REG (QImode
, regno
);
979 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
981 RTX_FRAME_RELATED_P (insn
) = 1;
983 cfun
->machine
->stack_usage
++;
987 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
988 SFR is a MEM representing the memory location of the SFR.
989 If CLR_P then clear the SFR after the push using zero_reg. */
992 emit_push_sfr (rtx sfr
, bool frame_related_p
, bool clr_p
)
996 gcc_assert (MEM_P (sfr
));
998 /* IN __tmp_reg__, IO(SFR) */
999 insn
= emit_move_insn (tmp_reg_rtx
, sfr
);
1000 if (frame_related_p
)
1001 RTX_FRAME_RELATED_P (insn
) = 1;
1003 /* PUSH __tmp_reg__ */
1004 emit_push_byte (TMP_REGNO
, frame_related_p
);
1008 /* OUT IO(SFR), __zero_reg__ */
1009 insn
= emit_move_insn (sfr
, const0_rtx
);
1010 if (frame_related_p
)
1011 RTX_FRAME_RELATED_P (insn
) = 1;
1016 avr_prologue_setup_frame (HOST_WIDE_INT size
, HARD_REG_SET set
)
1019 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1020 int live_seq
= sequent_regs_live ();
1022 HOST_WIDE_INT size_max
1023 = (HOST_WIDE_INT
) GET_MODE_MASK (AVR_HAVE_8BIT_SP
? QImode
: Pmode
);
1025 bool minimize
= (TARGET_CALL_PROLOGUES
1029 && !cfun
->machine
->is_OS_task
1030 && !cfun
->machine
->is_OS_main
);
1033 && (frame_pointer_needed
1034 || avr_outgoing_args_size() > 8
1035 || (AVR_2_BYTE_PC
&& live_seq
> 6)
1039 int first_reg
, reg
, offset
;
1041 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
1042 gen_int_mode (size
, HImode
));
1044 pattern
= gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
1045 gen_int_mode (live_seq
+size
, HImode
));
1046 insn
= emit_insn (pattern
);
1047 RTX_FRAME_RELATED_P (insn
) = 1;
1049 /* Describe the effect of the unspec_volatile call to prologue_saves.
1050 Note that this formulation assumes that add_reg_note pushes the
1051 notes to the front. Thus we build them in the reverse order of
1052 how we want dwarf2out to process them. */
1054 /* The function does always set frame_pointer_rtx, but whether that
1055 is going to be permanent in the function is frame_pointer_needed. */
1057 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1058 gen_rtx_SET (VOIDmode
, (frame_pointer_needed
1060 : stack_pointer_rtx
),
1061 plus_constant (Pmode
, stack_pointer_rtx
,
1062 -(size
+ live_seq
))));
1064 /* Note that live_seq always contains r28+r29, but the other
1065 registers to be saved are all below 18. */
1067 first_reg
= 18 - (live_seq
- 2);
1069 for (reg
= 29, offset
= -live_seq
+ 1;
1071 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
1075 m
= gen_rtx_MEM (QImode
, plus_constant (Pmode
, stack_pointer_rtx
,
1077 r
= gen_rtx_REG (QImode
, reg
);
1078 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
1081 cfun
->machine
->stack_usage
+= size
+ live_seq
;
1083 else /* !minimize */
1087 for (reg
= 0; reg
< 32; ++reg
)
1088 if (TEST_HARD_REG_BIT (set
, reg
))
1089 emit_push_byte (reg
, true);
1091 if (frame_pointer_needed
1092 && (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
)))
1094 /* Push frame pointer. Always be consistent about the
1095 ordering of pushes -- epilogue_restores expects the
1096 register pair to be pushed low byte first. */
1098 emit_push_byte (REG_Y
, true);
1099 emit_push_byte (REG_Y
+ 1, true);
1102 if (frame_pointer_needed
1105 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1106 RTX_FRAME_RELATED_P (insn
) = 1;
1111 /* Creating a frame can be done by direct manipulation of the
1112 stack or via the frame pointer. These two methods are:
1119 the optimum method depends on function type, stack and
1120 frame size. To avoid a complex logic, both methods are
1121 tested and shortest is selected.
1123 There is also the case where SIZE != 0 and no frame pointer is
1124 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1125 In that case, insn (*) is not needed in that case.
1126 We use the X register as scratch. This is save because in X
1128 In an interrupt routine, the case of SIZE != 0 together with
1129 !frame_pointer_needed can only occur if the function is not a
1130 leaf function and thus X has already been saved. */
1133 HOST_WIDE_INT size_cfa
= size
, neg_size
;
1134 rtx fp_plus_insns
, fp
, my_fp
;
1136 gcc_assert (frame_pointer_needed
1140 fp
= my_fp
= (frame_pointer_needed
1142 : gen_rtx_REG (Pmode
, REG_X
));
1144 if (AVR_HAVE_8BIT_SP
)
1146 /* The high byte (r29) does not change:
1147 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1149 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1152 /* Cut down size and avoid size = 0 so that we don't run
1153 into ICE like PR52488 in the remainder. */
1155 if (size
> size_max
)
1157 /* Don't error so that insane code from newlib still compiles
1158 and does not break building newlib. As PR51345 is implemented
1159 now, there are multilib variants with -msp8.
1161 If user wants sanity checks he can use -Wstack-usage=
1164 For CFA we emit the original, non-saturated size so that
1165 the generic machinery is aware of the real stack usage and
1166 will print the above diagnostic as expected. */
1171 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1172 neg_size
= trunc_int_for_mode (-size
, GET_MODE (my_fp
));
1174 /************ Method 1: Adjust frame pointer ************/
1178 /* Normally, the dwarf2out frame-related-expr interpreter does
1179 not expect to have the CFA change once the frame pointer is
1180 set up. Thus, we avoid marking the move insn below and
1181 instead indicate that the entire operation is complete after
1182 the frame pointer subtraction is done. */
1184 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1185 if (frame_pointer_needed
)
1187 RTX_FRAME_RELATED_P (insn
) = 1;
1188 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1189 gen_rtx_SET (VOIDmode
, fp
, stack_pointer_rtx
));
1192 insn
= emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
),
1195 if (frame_pointer_needed
)
1197 RTX_FRAME_RELATED_P (insn
) = 1;
1198 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1199 gen_rtx_SET (VOIDmode
, fp
,
1200 plus_constant (Pmode
, fp
,
1204 /* Copy to stack pointer. Note that since we've already
1205 changed the CFA to the frame pointer this operation
1206 need not be annotated if frame pointer is needed.
1207 Always move through unspec, see PR50063.
1208 For meaning of irq_state see movhi_sp_r insn. */
1210 if (cfun
->machine
->is_interrupt
)
1213 if (TARGET_NO_INTERRUPTS
1214 || cfun
->machine
->is_signal
1215 || cfun
->machine
->is_OS_main
)
1218 if (AVR_HAVE_8BIT_SP
)
1221 insn
= emit_insn (gen_movhi_sp_r (stack_pointer_rtx
,
1222 fp
, GEN_INT (irq_state
)));
1223 if (!frame_pointer_needed
)
1225 RTX_FRAME_RELATED_P (insn
) = 1;
1226 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1227 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1228 plus_constant (Pmode
,
1233 fp_plus_insns
= get_insns ();
1236 /************ Method 2: Adjust Stack pointer ************/
1238 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1239 can only handle specific offsets. */
1241 if (avr_sp_immediate_operand (gen_int_mode (-size
, HImode
), HImode
))
1247 insn
= emit_move_insn (stack_pointer_rtx
,
1248 plus_constant (Pmode
, stack_pointer_rtx
,
1250 RTX_FRAME_RELATED_P (insn
) = 1;
1251 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
1252 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1253 plus_constant (Pmode
,
1256 if (frame_pointer_needed
)
1258 insn
= emit_move_insn (fp
, stack_pointer_rtx
);
1259 RTX_FRAME_RELATED_P (insn
) = 1;
1262 sp_plus_insns
= get_insns ();
1265 /************ Use shortest method ************/
1267 emit_insn (get_sequence_length (sp_plus_insns
)
1268 < get_sequence_length (fp_plus_insns
)
1274 emit_insn (fp_plus_insns
);
1277 cfun
->machine
->stack_usage
+= size_cfa
;
1278 } /* !minimize && size != 0 */
1283 /* Output function prologue. */
1286 avr_expand_prologue (void)
1291 size
= get_frame_size() + avr_outgoing_args_size();
1293 cfun
->machine
->stack_usage
= 0;
1295 /* Prologue: naked. */
1296 if (cfun
->machine
->is_naked
)
1301 avr_regs_to_save (&set
);
1303 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1305 /* Enable interrupts. */
1306 if (cfun
->machine
->is_interrupt
)
1307 emit_insn (gen_enable_interrupt ());
1309 /* Push zero reg. */
1310 emit_push_byte (ZERO_REGNO
, true);
1313 emit_push_byte (TMP_REGNO
, true);
1316 /* ??? There's no dwarf2 column reserved for SREG. */
1317 emit_push_sfr (sreg_rtx
, false, false /* clr */);
1319 /* Clear zero reg. */
1320 emit_move_insn (zero_reg_rtx
, const0_rtx
);
1322 /* Prevent any attempt to delete the setting of ZERO_REG! */
1323 emit_use (zero_reg_rtx
);
1325 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1326 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1329 emit_push_sfr (rampd_rtx
, false /* frame-related */, true /* clr */);
1332 && TEST_HARD_REG_BIT (set
, REG_X
)
1333 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1335 emit_push_sfr (rampx_rtx
, false /* frame-related */, true /* clr */);
1339 && (frame_pointer_needed
1340 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1341 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1343 emit_push_sfr (rampy_rtx
, false /* frame-related */, true /* clr */);
1347 && TEST_HARD_REG_BIT (set
, REG_Z
)
1348 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1350 emit_push_sfr (rampz_rtx
, false /* frame-related */, AVR_HAVE_RAMPD
);
1352 } /* is_interrupt is_signal */
1354 avr_prologue_setup_frame (size
, set
);
1356 if (flag_stack_usage_info
)
1357 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
1361 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1362 /* Output summary at end of function prologue. */
1365 avr_asm_function_end_prologue (FILE *file
)
1367 if (cfun
->machine
->is_naked
)
1369 fputs ("/* prologue: naked */\n", file
);
1373 if (cfun
->machine
->is_interrupt
)
1375 fputs ("/* prologue: Interrupt */\n", file
);
1377 else if (cfun
->machine
->is_signal
)
1379 fputs ("/* prologue: Signal */\n", file
);
1382 fputs ("/* prologue: function */\n", file
);
1385 if (ACCUMULATE_OUTGOING_ARGS
)
1386 fprintf (file
, "/* outgoing args size = %d */\n",
1387 avr_outgoing_args_size());
1389 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
1391 fprintf (file
, "/* stack size = %d */\n",
1392 cfun
->machine
->stack_usage
);
1393 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1394 usage for offset so that SP + .L__stack_offset = return address. */
1395 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
1399 /* Implement `EPILOGUE_USES'. */
1402 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
1404 if (reload_completed
1406 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
1411 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1414 emit_pop_byte (unsigned regno
)
1418 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
1419 mem
= gen_frame_mem (QImode
, mem
);
1420 reg
= gen_rtx_REG (QImode
, regno
);
1422 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
1425 /* Output RTL epilogue. */
1428 avr_expand_epilogue (bool sibcall_p
)
1435 bool isr_p
= cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
;
1437 size
= get_frame_size() + avr_outgoing_args_size();
1439 /* epilogue: naked */
1440 if (cfun
->machine
->is_naked
)
1442 gcc_assert (!sibcall_p
);
1444 emit_jump_insn (gen_return ());
1448 avr_regs_to_save (&set
);
1449 live_seq
= sequent_regs_live ();
1451 minimize
= (TARGET_CALL_PROLOGUES
1454 && !cfun
->machine
->is_OS_task
1455 && !cfun
->machine
->is_OS_main
);
1459 || frame_pointer_needed
1462 /* Get rid of frame. */
1464 if (!frame_pointer_needed
)
1466 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1471 emit_move_insn (frame_pointer_rtx
,
1472 plus_constant (Pmode
, frame_pointer_rtx
, size
));
1475 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1481 /* Try two methods to adjust stack and select shortest. */
1486 HOST_WIDE_INT size_max
;
1488 gcc_assert (frame_pointer_needed
1492 fp
= my_fp
= (frame_pointer_needed
1494 : gen_rtx_REG (Pmode
, REG_X
));
1496 if (AVR_HAVE_8BIT_SP
)
1498 /* The high byte (r29) does not change:
1499 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1501 my_fp
= all_regs_rtx
[FRAME_POINTER_REGNUM
];
1504 /* For rationale see comment in prologue generation. */
1506 size_max
= (HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (my_fp
));
1507 if (size
> size_max
)
1509 size
= trunc_int_for_mode (size
, GET_MODE (my_fp
));
1511 /********** Method 1: Adjust fp register **********/
1515 if (!frame_pointer_needed
)
1516 emit_move_insn (fp
, stack_pointer_rtx
);
1518 emit_move_insn (my_fp
, plus_constant (GET_MODE (my_fp
), my_fp
, size
));
1520 /* Copy to stack pointer. */
1522 if (TARGET_NO_INTERRUPTS
)
1525 if (AVR_HAVE_8BIT_SP
)
1528 emit_insn (gen_movhi_sp_r (stack_pointer_rtx
, fp
,
1529 GEN_INT (irq_state
)));
1531 fp_plus_insns
= get_insns ();
1534 /********** Method 2: Adjust Stack pointer **********/
1536 if (avr_sp_immediate_operand (gen_int_mode (size
, HImode
), HImode
))
1542 emit_move_insn (stack_pointer_rtx
,
1543 plus_constant (Pmode
, stack_pointer_rtx
, size
));
1545 sp_plus_insns
= get_insns ();
1548 /************ Use shortest method ************/
1550 emit_insn (get_sequence_length (sp_plus_insns
)
1551 < get_sequence_length (fp_plus_insns
)
1556 emit_insn (fp_plus_insns
);
1559 if (frame_pointer_needed
1560 && !(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1562 /* Restore previous frame_pointer. See avr_expand_prologue for
1563 rationale for not using pophi. */
1565 emit_pop_byte (REG_Y
+ 1);
1566 emit_pop_byte (REG_Y
);
1569 /* Restore used registers. */
1571 for (reg
= 31; reg
>= 0; --reg
)
1572 if (TEST_HARD_REG_BIT (set
, reg
))
1573 emit_pop_byte (reg
);
1577 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1578 The conditions to restore them must be tha same as in prologue. */
1581 && TEST_HARD_REG_BIT (set
, REG_Z
)
1582 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1584 emit_pop_byte (TMP_REGNO
);
1585 emit_move_insn (rampz_rtx
, tmp_reg_rtx
);
1589 && (frame_pointer_needed
1590 || (TEST_HARD_REG_BIT (set
, REG_Y
)
1591 && TEST_HARD_REG_BIT (set
, REG_Y
+ 1))))
1593 emit_pop_byte (TMP_REGNO
);
1594 emit_move_insn (rampy_rtx
, tmp_reg_rtx
);
1598 && TEST_HARD_REG_BIT (set
, REG_X
)
1599 && TEST_HARD_REG_BIT (set
, REG_X
+ 1))
1601 emit_pop_byte (TMP_REGNO
);
1602 emit_move_insn (rampx_rtx
, tmp_reg_rtx
);
1607 emit_pop_byte (TMP_REGNO
);
1608 emit_move_insn (rampd_rtx
, tmp_reg_rtx
);
1611 /* Restore SREG using tmp_reg as scratch. */
1613 emit_pop_byte (TMP_REGNO
);
1614 emit_move_insn (sreg_rtx
, tmp_reg_rtx
);
1616 /* Restore tmp REG. */
1617 emit_pop_byte (TMP_REGNO
);
1619 /* Restore zero REG. */
1620 emit_pop_byte (ZERO_REGNO
);
1624 emit_jump_insn (gen_return ());
1628 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1631 avr_asm_function_begin_epilogue (FILE *file
)
1633 fprintf (file
, "/* epilogue start */\n");
1637 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1640 avr_cannot_modify_jumps_p (void)
1643 /* Naked Functions must not have any instructions after
1644 their epilogue, see PR42240 */
1646 if (reload_completed
1648 && cfun
->machine
->is_naked
)
1657 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1660 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
, addr_space_t as
)
1662 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1663 This hook just serves to hack around PR rtl-optimization/52543 by
1664 claiming that non-generic addresses were mode-dependent so that
1665 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1666 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1667 generic address space which is not true. */
1669 return !ADDR_SPACE_GENERIC_P (as
);
1673 /* Helper function for `avr_legitimate_address_p'. */
1676 avr_reg_ok_for_addr_p (rtx reg
, addr_space_t as
,
1677 RTX_CODE outer_code
, bool strict
)
1680 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg
), QImode
,
1681 as
, outer_code
, UNKNOWN
)
1683 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)));
1687 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1688 machine for a memory operand of mode MODE. */
1691 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1693 bool ok
= CONSTANT_ADDRESS_P (x
);
1695 switch (GET_CODE (x
))
1698 ok
= avr_reg_ok_for_addr_p (x
, ADDR_SPACE_GENERIC
,
1702 && GET_MODE_SIZE (mode
) > 4
1703 && REG_X
== REGNO (x
))
1711 ok
= avr_reg_ok_for_addr_p (XEXP (x
, 0), ADDR_SPACE_GENERIC
,
1712 GET_CODE (x
), strict
);
1717 rtx reg
= XEXP (x
, 0);
1718 rtx op1
= XEXP (x
, 1);
1721 && CONST_INT_P (op1
)
1722 && INTVAL (op1
) >= 0)
1724 bool fit
= IN_RANGE (INTVAL (op1
), 0, MAX_LD_OFFSET (mode
));
1729 || avr_reg_ok_for_addr_p (reg
, ADDR_SPACE_GENERIC
,
1732 if (reg
== frame_pointer_rtx
1733 || reg
== arg_pointer_rtx
)
1738 else if (frame_pointer_needed
1739 && reg
== frame_pointer_rtx
)
1751 if (avr_log
.legitimate_address_p
)
1753 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1754 "reload_completed=%d reload_in_progress=%d %s:",
1755 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
1756 reg_renumber
? "(reg_renumber)" : "");
1758 if (GET_CODE (x
) == PLUS
1759 && REG_P (XEXP (x
, 0))
1760 && CONST_INT_P (XEXP (x
, 1))
1761 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
1764 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1765 true_regnum (XEXP (x
, 0)));
1768 avr_edump ("\n%r\n", x
);
1775 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1776 now only a helper for avr_addr_space_legitimize_address. */
1777 /* Attempts to replace X with a valid
1778 memory address for an operand of mode MODE */
1781 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1783 bool big_offset_p
= false;
1787 if (GET_CODE (oldx
) == PLUS
1788 && REG_P (XEXP (oldx
, 0)))
1790 if (REG_P (XEXP (oldx
, 1)))
1791 x
= force_reg (GET_MODE (oldx
), oldx
);
1792 else if (CONST_INT_P (XEXP (oldx
, 1)))
1794 int offs
= INTVAL (XEXP (oldx
, 1));
1795 if (frame_pointer_rtx
!= XEXP (oldx
, 0)
1796 && offs
> MAX_LD_OFFSET (mode
))
1798 big_offset_p
= true;
1799 x
= force_reg (GET_MODE (oldx
), oldx
);
1804 if (avr_log
.legitimize_address
)
1806 avr_edump ("\n%?: mode=%m\n %r\n", mode
, oldx
);
1809 avr_edump (" %s --> %r\n", big_offset_p
? "(big offset)" : "", x
);
1816 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1817 /* This will allow register R26/27 to be used where it is no worse than normal
1818 base pointers R28/29 or R30/31. For example, if base offset is greater
1819 than 63 bytes or for R++ or --R addressing. */
1822 avr_legitimize_reload_address (rtx
*px
, enum machine_mode mode
,
1823 int opnum
, int type
, int addr_type
,
1824 int ind_levels ATTRIBUTE_UNUSED
,
1825 rtx (*mk_memloc
)(rtx
,int))
1829 if (avr_log
.legitimize_reload_address
)
1830 avr_edump ("\n%?:%m %r\n", mode
, x
);
1832 if (1 && (GET_CODE (x
) == POST_INC
1833 || GET_CODE (x
) == PRE_DEC
))
1835 push_reload (XEXP (x
, 0), XEXP (x
, 0), &XEXP (x
, 0), &XEXP (x
, 0),
1836 POINTER_REGS
, GET_MODE (x
), GET_MODE (x
), 0, 0,
1837 opnum
, RELOAD_OTHER
);
1839 if (avr_log
.legitimize_reload_address
)
1840 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1841 POINTER_REGS
, XEXP (x
, 0), XEXP (x
, 0));
1846 if (GET_CODE (x
) == PLUS
1847 && REG_P (XEXP (x
, 0))
1848 && 0 == reg_equiv_constant (REGNO (XEXP (x
, 0)))
1849 && CONST_INT_P (XEXP (x
, 1))
1850 && INTVAL (XEXP (x
, 1)) >= 1)
1852 bool fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1856 if (reg_equiv_address (REGNO (XEXP (x
, 0))) != 0)
1858 int regno
= REGNO (XEXP (x
, 0));
1859 rtx mem
= mk_memloc (x
, regno
);
1861 push_reload (XEXP (mem
, 0), NULL_RTX
, &XEXP (mem
, 0), NULL
,
1862 POINTER_REGS
, Pmode
, VOIDmode
, 0, 0,
1863 1, (enum reload_type
) addr_type
);
1865 if (avr_log
.legitimize_reload_address
)
1866 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1867 POINTER_REGS
, XEXP (mem
, 0), NULL_RTX
);
1869 push_reload (mem
, NULL_RTX
, &XEXP (x
, 0), NULL
,
1870 BASE_POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1871 opnum
, (enum reload_type
) type
);
1873 if (avr_log
.legitimize_reload_address
)
1874 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1875 BASE_POINTER_REGS
, mem
, NULL_RTX
);
1880 else if (! (frame_pointer_needed
1881 && XEXP (x
, 0) == frame_pointer_rtx
))
1883 push_reload (x
, NULL_RTX
, px
, NULL
,
1884 POINTER_REGS
, GET_MODE (x
), VOIDmode
, 0, 0,
1885 opnum
, (enum reload_type
) type
);
1887 if (avr_log
.legitimize_reload_address
)
1888 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1889 POINTER_REGS
, x
, NULL_RTX
);
1899 /* Implement `TARGET_SECONDARY_RELOAD' */
1902 avr_secondary_reload (bool in_p
, rtx x
,
1903 reg_class_t reload_class ATTRIBUTE_UNUSED
,
1904 enum machine_mode mode
, secondary_reload_info
*sri
)
1908 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1909 && ADDR_SPACE_MEMX
!= MEM_ADDR_SPACE (x
))
1911 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1918 case QImode
: sri
->icode
= CODE_FOR_reload_inqi
; break;
1919 case QQmode
: sri
->icode
= CODE_FOR_reload_inqq
; break;
1920 case UQQmode
: sri
->icode
= CODE_FOR_reload_inuqq
; break;
1922 case HImode
: sri
->icode
= CODE_FOR_reload_inhi
; break;
1923 case HQmode
: sri
->icode
= CODE_FOR_reload_inhq
; break;
1924 case HAmode
: sri
->icode
= CODE_FOR_reload_inha
; break;
1925 case UHQmode
: sri
->icode
= CODE_FOR_reload_inuhq
; break;
1926 case UHAmode
: sri
->icode
= CODE_FOR_reload_inuha
; break;
1928 case PSImode
: sri
->icode
= CODE_FOR_reload_inpsi
; break;
1930 case SImode
: sri
->icode
= CODE_FOR_reload_insi
; break;
1931 case SFmode
: sri
->icode
= CODE_FOR_reload_insf
; break;
1932 case SQmode
: sri
->icode
= CODE_FOR_reload_insq
; break;
1933 case SAmode
: sri
->icode
= CODE_FOR_reload_insa
; break;
1934 case USQmode
: sri
->icode
= CODE_FOR_reload_inusq
; break;
1935 case USAmode
: sri
->icode
= CODE_FOR_reload_inusa
; break;
1943 /* Helper function to print assembler resp. track instruction
1944 sequence lengths. Always return "".
1947 Output assembler code from template TPL with operands supplied
1948 by OPERANDS. This is just forwarding to output_asm_insn.
1951 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1952 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1953 Don't output anything.
1957 avr_asm_len (const char* tpl
, rtx
* operands
, int* plen
, int n_words
)
1961 output_asm_insn (tpl
, operands
);
1975 /* Return a pointer register name as a string. */
1978 ptrreg_to_str (int regno
)
1982 case REG_X
: return "X";
1983 case REG_Y
: return "Y";
1984 case REG_Z
: return "Z";
1986 output_operand_lossage ("address operand requires constraint for"
1987 " X, Y, or Z register");
1992 /* Return the condition name as a string.
1993 Used in conditional jump constructing */
1996 cond_string (enum rtx_code code
)
2005 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2010 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2026 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2027 /* Output ADDR to FILE as address. */
2030 avr_print_operand_address (FILE *file
, rtx addr
)
2032 switch (GET_CODE (addr
))
2035 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
2039 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2043 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
2047 if (CONSTANT_ADDRESS_P (addr
)
2048 && text_segment_operand (addr
, VOIDmode
))
2051 if (GET_CODE (x
) == CONST
)
2053 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
2055 /* Assembler gs() will implant word address. Make offset
2056 a byte offset inside gs() for assembler. This is
2057 needed because the more logical (constant+gs(sym)) is not
2058 accepted by gas. For 128K and smaller devices this is ok.
2059 For large devices it will create a trampoline to offset
2060 from symbol which may not be what the user really wanted. */
2062 fprintf (file
, "gs(");
2063 output_addr_const (file
, XEXP (x
,0));
2064 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
")",
2065 2 * INTVAL (XEXP (x
, 1)));
2067 if (warning (0, "pointer offset from symbol maybe incorrect"))
2069 output_addr_const (stderr
, addr
);
2070 fprintf(stderr
,"\n");
2075 fprintf (file
, "gs(");
2076 output_addr_const (file
, addr
);
2077 fprintf (file
, ")");
2081 output_addr_const (file
, addr
);
2086 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2089 avr_print_operand_punct_valid_p (unsigned char code
)
2091 return code
== '~' || code
== '!';
2095 /* Implement `TARGET_PRINT_OPERAND'. */
2096 /* Output X as assembler operand to file FILE.
2097 For a description of supported %-codes, see top of avr.md. */
2100 avr_print_operand (FILE *file
, rtx x
, int code
)
2104 if (code
>= 'A' && code
<= 'D')
2109 if (!AVR_HAVE_JMP_CALL
)
2112 else if (code
== '!')
2114 if (AVR_HAVE_EIJMP_EICALL
)
2117 else if (code
== 't'
2120 static int t_regno
= -1;
2121 static int t_nbits
= -1;
2123 if (REG_P (x
) && t_regno
< 0 && code
== 'T')
2125 t_regno
= REGNO (x
);
2126 t_nbits
= GET_MODE_BITSIZE (GET_MODE (x
));
2128 else if (CONST_INT_P (x
) && t_regno
>= 0
2129 && IN_RANGE (INTVAL (x
), 0, t_nbits
- 1))
2131 int bpos
= INTVAL (x
);
2133 fprintf (file
, "%s", reg_names
[t_regno
+ bpos
/ 8]);
2135 fprintf (file
, ",%d", bpos
% 8);
2140 fatal_insn ("operands to %T/%t must be reg + const_int:", x
);
2144 if (x
== zero_reg_rtx
)
2145 fprintf (file
, "__zero_reg__");
2146 else if (code
== 'r' && REGNO (x
) < 32)
2147 fprintf (file
, "%d", (int) REGNO (x
));
2149 fprintf (file
, reg_names
[REGNO (x
) + abcd
]);
2151 else if (CONST_INT_P (x
))
2153 HOST_WIDE_INT ival
= INTVAL (x
);
2156 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
+ abcd
);
2157 else if (low_io_address_operand (x
, VOIDmode
)
2158 || high_io_address_operand (x
, VOIDmode
))
2160 if (AVR_HAVE_RAMPZ
&& ival
== avr_addr
.rampz
)
2161 fprintf (file
, "__RAMPZ__");
2162 else if (AVR_HAVE_RAMPY
&& ival
== avr_addr
.rampy
)
2163 fprintf (file
, "__RAMPY__");
2164 else if (AVR_HAVE_RAMPX
&& ival
== avr_addr
.rampx
)
2165 fprintf (file
, "__RAMPX__");
2166 else if (AVR_HAVE_RAMPD
&& ival
== avr_addr
.rampd
)
2167 fprintf (file
, "__RAMPD__");
2168 else if (AVR_XMEGA
&& ival
== avr_addr
.ccp
)
2169 fprintf (file
, "__CCP__");
2170 else if (ival
== avr_addr
.sreg
) fprintf (file
, "__SREG__");
2171 else if (ival
== avr_addr
.sp_l
) fprintf (file
, "__SP_L__");
2172 else if (ival
== avr_addr
.sp_h
) fprintf (file
, "__SP_H__");
2175 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
2176 ival
- avr_current_arch
->sfr_offset
);
2180 fatal_insn ("bad address, not an I/O address:", x
);
2184 rtx addr
= XEXP (x
, 0);
2188 if (!CONSTANT_P (addr
))
2189 fatal_insn ("bad address, not a constant:", addr
);
2190 /* Assembler template with m-code is data - not progmem section */
2191 if (text_segment_operand (addr
, VOIDmode
))
2192 if (warning (0, "accessing data memory with"
2193 " program memory address"))
2195 output_addr_const (stderr
, addr
);
2196 fprintf(stderr
,"\n");
2198 output_addr_const (file
, addr
);
2200 else if (code
== 'i')
2202 avr_print_operand (file
, addr
, 'i');
2204 else if (code
== 'o')
2206 if (GET_CODE (addr
) != PLUS
)
2207 fatal_insn ("bad address, not (reg+disp):", addr
);
2209 avr_print_operand (file
, XEXP (addr
, 1), 0);
2211 else if (code
== 'p' || code
== 'r')
2213 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
2214 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
2217 avr_print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
2219 avr_print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
2221 else if (GET_CODE (addr
) == PLUS
)
2223 avr_print_operand_address (file
, XEXP (addr
,0));
2224 if (REGNO (XEXP (addr
, 0)) == REG_X
)
2225 fatal_insn ("internal compiler error. Bad address:"
2228 avr_print_operand (file
, XEXP (addr
,1), code
);
2231 avr_print_operand_address (file
, addr
);
2233 else if (code
== 'i')
2235 fatal_insn ("bad address, not an I/O address:", x
);
2237 else if (code
== 'x')
2239 /* Constant progmem address - like used in jmp or call */
2240 if (0 == text_segment_operand (x
, VOIDmode
))
2241 if (warning (0, "accessing program memory"
2242 " with data memory address"))
2244 output_addr_const (stderr
, x
);
2245 fprintf(stderr
,"\n");
2247 /* Use normal symbol for direct address no linker trampoline needed */
2248 output_addr_const (file
, x
);
2250 else if (CONST_FIXED_P (x
))
2252 HOST_WIDE_INT ival
= INTVAL (avr_to_int_mode (x
));
2254 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2256 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ival
);
2258 else if (GET_CODE (x
) == CONST_DOUBLE
)
2262 if (GET_MODE (x
) != SFmode
)
2263 fatal_insn ("internal compiler error. Unknown mode:", x
);
2264 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
2265 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
2266 fprintf (file
, "0x%lx", val
);
2268 else if (GET_CODE (x
) == CONST_STRING
)
2269 fputs (XSTR (x
, 0), file
);
2270 else if (code
== 'j')
2271 fputs (cond_string (GET_CODE (x
)), file
);
2272 else if (code
== 'k')
2273 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
2275 avr_print_operand_address (file
, x
);
2279 /* Worker function for `NOTICE_UPDATE_CC'. */
2280 /* Update the condition code in the INSN. */
2283 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
2286 enum attr_cc cc
= get_attr_cc (insn
);
2296 rtx
*op
= recog_data
.operand
;
2299 /* Extract insn's operands. */
2300 extract_constrain_insn_cached (insn
);
2308 avr_out_plus (insn
, op
, &len_dummy
, &icc
);
2309 cc
= (enum attr_cc
) icc
;
2314 cc
= (op
[1] == CONST0_RTX (GET_MODE (op
[0]))
2315 && reg_overlap_mentioned_p (op
[0], zero_reg_rtx
))
2316 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2318 /* Any other "r,rL" combination does not alter cc0. */
2322 } /* inner switch */
2326 } /* outer swicth */
2331 /* Special values like CC_OUT_PLUS from above have been
2332 mapped to "standard" CC_* values so we never come here. */
2338 /* Insn does not affect CC at all. */
2346 set
= single_set (insn
);
2350 cc_status
.flags
|= CC_NO_OVERFLOW
;
2351 cc_status
.value1
= SET_DEST (set
);
2356 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2357 The V flag may or may not be known but that's ok because
2358 alter_cond will change tests to use EQ/NE. */
2359 set
= single_set (insn
);
2363 cc_status
.value1
= SET_DEST (set
);
2364 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
2369 set
= single_set (insn
);
2372 cc_status
.value1
= SET_SRC (set
);
2376 /* Insn doesn't leave CC in a usable state. */
2382 /* Choose mode for jump insn:
2383 1 - relative jump in range -63 <= x <= 62 ;
2384 2 - relative jump in range -2046 <= x <= 2045 ;
2385 3 - absolute jump (only for ATmega[16]03). */
2388 avr_jump_mode (rtx x
, rtx insn
)
2390 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
2391 ? XEXP (x
, 0) : x
));
2392 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2393 int jump_distance
= cur_addr
- dest_addr
;
2395 if (-63 <= jump_distance
&& jump_distance
<= 62)
2397 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
2399 else if (AVR_HAVE_JMP_CALL
)
2405 /* Return an AVR condition jump commands.
2406 X is a comparison RTX.
2407 LEN is a number returned by avr_jump_mode function.
2408 If REVERSE nonzero then condition code in X must be reversed. */
2411 ret_cond_branch (rtx x
, int len
, int reverse
)
2413 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
2418 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2419 return (len
== 1 ? ("breq .+2" CR_TAB
2421 len
== 2 ? ("breq .+4" CR_TAB
2429 return (len
== 1 ? ("breq .+2" CR_TAB
2431 len
== 2 ? ("breq .+4" CR_TAB
2438 return (len
== 1 ? ("breq .+2" CR_TAB
2440 len
== 2 ? ("breq .+4" CR_TAB
2447 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
2448 return (len
== 1 ? ("breq %0" CR_TAB
2450 len
== 2 ? ("breq .+2" CR_TAB
2457 return (len
== 1 ? ("breq %0" CR_TAB
2459 len
== 2 ? ("breq .+2" CR_TAB
2466 return (len
== 1 ? ("breq %0" CR_TAB
2468 len
== 2 ? ("breq .+2" CR_TAB
2482 return ("br%j1 .+2" CR_TAB
2485 return ("br%j1 .+4" CR_TAB
2496 return ("br%k1 .+2" CR_TAB
2499 return ("br%k1 .+4" CR_TAB
2508 /* Worker function for `FINAL_PRESCAN_INSN'. */
2509 /* Output insn cost for next insn. */
2512 avr_final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
2513 int num_operands ATTRIBUTE_UNUSED
)
2515 if (avr_log
.rtx_costs
)
2517 rtx set
= single_set (insn
);
2520 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
2521 set_src_cost (SET_SRC (set
), optimize_insn_for_speed_p ()));
2523 fprintf (asm_out_file
, "/* DEBUG: pattern-cost = %d. */\n",
2524 rtx_cost (PATTERN (insn
), INSN
, 0,
2525 optimize_insn_for_speed_p()));
2529 /* Return 0 if undefined, 1 if always true or always false. */
2532 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
2534 unsigned int max
= (mode
== QImode
? 0xff :
2535 mode
== HImode
? 0xffff :
2536 mode
== PSImode
? 0xffffff :
2537 mode
== SImode
? 0xffffffff : 0);
2538 if (max
&& op
&& CONST_INT_P (x
))
2540 if (unsigned_condition (op
) != op
)
2543 if (max
!= (INTVAL (x
) & max
)
2544 && INTVAL (x
) != 0xff)
2551 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2552 /* Returns nonzero if REGNO is the number of a hard
2553 register in which function arguments are sometimes passed. */
2556 avr_function_arg_regno_p(int r
)
2558 return (r
>= 8 && r
<= 25);
2562 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2563 /* Initializing the variable cum for the state at the beginning
2564 of the argument list. */
2567 avr_init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
2568 tree fndecl ATTRIBUTE_UNUSED
)
2571 cum
->regno
= FIRST_CUM_REG
;
2572 if (!libname
&& stdarg_p (fntype
))
2575 /* Assume the calle may be tail called */
2577 cfun
->machine
->sibcall_fails
= 0;
2580 /* Returns the number of registers to allocate for a function argument. */
2583 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
2587 if (mode
== BLKmode
)
2588 size
= int_size_in_bytes (type
);
2590 size
= GET_MODE_SIZE (mode
);
2592 /* Align all function arguments to start in even-numbered registers.
2593 Odd-sized arguments leave holes above them. */
2595 return (size
+ 1) & ~1;
2599 /* Implement `TARGET_FUNCTION_ARG'. */
2600 /* Controls whether a function argument is passed
2601 in a register, and which register. */
2604 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
2605 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2607 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2608 int bytes
= avr_num_arg_regs (mode
, type
);
2610 if (cum
->nregs
&& bytes
<= cum
->nregs
)
2611 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
2617 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2618 /* Update the summarizer variable CUM to advance past an argument
2619 in the argument list. */
2622 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
2623 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2625 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2626 int bytes
= avr_num_arg_regs (mode
, type
);
2628 cum
->nregs
-= bytes
;
2629 cum
->regno
-= bytes
;
2631 /* A parameter is being passed in a call-saved register. As the original
2632 contents of these regs has to be restored before leaving the function,
2633 a function must not pass arguments in call-saved regs in order to get
2638 && !call_used_regs
[cum
->regno
])
2640 /* FIXME: We ship info on failing tail-call in struct machine_function.
2641 This uses internals of calls.c:expand_call() and the way args_so_far
2642 is used. targetm.function_ok_for_sibcall() needs to be extended to
2643 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2644 dependent so that such an extension is not wanted. */
2646 cfun
->machine
->sibcall_fails
= 1;
2649 /* Test if all registers needed by the ABI are actually available. If the
2650 user has fixed a GPR needed to pass an argument, an (implicit) function
2651 call will clobber that fixed register. See PR45099 for an example. */
2658 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
2659 if (fixed_regs
[regno
])
2660 warning (0, "fixed register %s used to pass parameter to function",
2664 if (cum
->nregs
<= 0)
2667 cum
->regno
= FIRST_CUM_REG
;
2671 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2672 /* Decide whether we can make a sibling call to a function. DECL is the
2673 declaration of the function being targeted by the call and EXP is the
2674 CALL_EXPR representing the call. */
2677 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
2681 /* Tail-calling must fail if callee-saved regs are used to pass
2682 function args. We must not tail-call when `epilogue_restores'
2683 is used. Unfortunately, we cannot tell at this point if that
2684 actually will happen or not, and we cannot step back from
2685 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2687 if (cfun
->machine
->sibcall_fails
2688 || TARGET_CALL_PROLOGUES
)
2693 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
2697 decl_callee
= TREE_TYPE (decl_callee
);
2701 decl_callee
= fntype_callee
;
2703 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
2704 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
2706 decl_callee
= TREE_TYPE (decl_callee
);
2710 /* Ensure that caller and callee have compatible epilogues */
2712 if (cfun
->machine
->is_interrupt
2713 || cfun
->machine
->is_signal
2714 || cfun
->machine
->is_naked
2715 || avr_naked_function_p (decl_callee
)
2716 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2717 || (avr_OS_task_function_p (decl_callee
)
2718 != cfun
->machine
->is_OS_task
)
2719 || (avr_OS_main_function_p (decl_callee
)
2720 != cfun
->machine
->is_OS_main
))
2728 /***********************************************************************
2729 Functions for outputting various mov's for a various modes
2730 ************************************************************************/
2732 /* Return true if a value of mode MODE is read from flash by
2733 __load_* function from libgcc. */
2736 avr_load_libgcc_p (rtx op
)
2738 enum machine_mode mode
= GET_MODE (op
);
2739 int n_bytes
= GET_MODE_SIZE (mode
);
2743 && avr_mem_flash_p (op
));
2746 /* Return true if a value of mode MODE is read by __xload_* function. */
2749 avr_xload_libgcc_p (enum machine_mode mode
)
2751 int n_bytes
= GET_MODE_SIZE (mode
);
2754 || avr_current_device
->n_flash
> 1);
2758 /* Fixme: This is a hack because secondary reloads don't works as expected.
2760 Find an unused d-register to be used as scratch in INSN.
2761 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2762 is a register, skip all possible return values that overlap EXCLUDE.
2763 The policy for the returned register is similar to that of
2764 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2767 Return a QImode d-register or NULL_RTX if nothing found. */
2770 avr_find_unused_d_reg (rtx insn
, rtx exclude
)
2773 bool isr_p
= (avr_interrupt_function_p (current_function_decl
)
2774 || avr_signal_function_p (current_function_decl
));
2776 for (regno
= 16; regno
< 32; regno
++)
2778 rtx reg
= all_regs_rtx
[regno
];
2781 && reg_overlap_mentioned_p (exclude
, reg
))
2782 || fixed_regs
[regno
])
2787 /* Try non-live register */
2789 if (!df_regs_ever_live_p (regno
)
2790 && (TREE_THIS_VOLATILE (current_function_decl
)
2791 || cfun
->machine
->is_OS_task
2792 || cfun
->machine
->is_OS_main
2793 || (!isr_p
&& call_used_regs
[regno
])))
2798 /* Any live register can be used if it is unused after.
2799 Prologue/epilogue will care for it as needed. */
2801 if (df_regs_ever_live_p (regno
)
2802 && reg_unused_after (insn
, reg
))
2812 /* Helper function for the next function in the case where only restricted
2813 version of LPM instruction is available. */
2816 avr_out_lpm_no_lpmx (rtx insn
, rtx
*xop
, int *plen
)
2820 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2823 regno_dest
= REGNO (dest
);
2825 /* The implicit target register of LPM. */
2826 xop
[3] = lpm_reg_rtx
;
2828 switch (GET_CODE (addr
))
2835 gcc_assert (REG_Z
== REGNO (addr
));
2843 avr_asm_len ("%4lpm", xop
, plen
, 1);
2845 if (regno_dest
!= LPM_REGNO
)
2846 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
2851 if (REGNO (dest
) == REG_Z
)
2852 return avr_asm_len ("%4lpm" CR_TAB
2857 "pop %A0", xop
, plen
, 6);
2859 avr_asm_len ("%4lpm" CR_TAB
2863 "mov %B0,%3", xop
, plen
, 5);
2865 if (!reg_unused_after (insn
, addr
))
2866 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
2875 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
2878 if (regno_dest
== LPM_REGNO
)
2879 avr_asm_len ("%4lpm" CR_TAB
2880 "adiw %2,1", xop
, plen
, 2);
2882 avr_asm_len ("%4lpm" CR_TAB
2884 "adiw %2,1", xop
, plen
, 3);
2887 avr_asm_len ("%4lpm" CR_TAB
2889 "adiw %2,1", xop
, plen
, 3);
2892 avr_asm_len ("%4lpm" CR_TAB
2894 "adiw %2,1", xop
, plen
, 3);
2897 avr_asm_len ("%4lpm" CR_TAB
2899 "adiw %2,1", xop
, plen
, 3);
2901 break; /* POST_INC */
2903 } /* switch CODE (addr) */
2909 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2910 OP[1] in AS1 to register OP[0].
2911 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2915 avr_out_lpm (rtx insn
, rtx
*op
, int *plen
)
2919 rtx src
= SET_SRC (single_set (insn
));
2921 int n_bytes
= GET_MODE_SIZE (GET_MODE (dest
));
2924 addr_space_t as
= MEM_ADDR_SPACE (src
);
2931 warning (0, "writing to address space %qs not supported",
2932 avr_addrspace
[MEM_ADDR_SPACE (dest
)].name
);
2937 addr
= XEXP (src
, 0);
2938 code
= GET_CODE (addr
);
2940 gcc_assert (REG_P (dest
));
2941 gcc_assert (REG
== code
|| POST_INC
== code
);
2945 xop
[2] = lpm_addr_reg_rtx
;
2946 xop
[4] = xstring_empty
;
2947 xop
[5] = tmp_reg_rtx
;
2948 xop
[6] = XEXP (rampz_rtx
, 0);
2950 segment
= avr_addrspace
[as
].segment
;
2952 /* Set RAMPZ as needed. */
2956 xop
[4] = GEN_INT (segment
);
2957 xop
[3] = avr_find_unused_d_reg (insn
, lpm_addr_reg_rtx
);
2959 if (xop
[3] != NULL_RTX
)
2961 avr_asm_len ("ldi %3,%4" CR_TAB
2962 "out %i6,%3", xop
, plen
, 2);
2964 else if (segment
== 1)
2966 avr_asm_len ("clr %5" CR_TAB
2968 "out %i6,%5", xop
, plen
, 3);
2972 avr_asm_len ("mov %5,%2" CR_TAB
2975 "mov %2,%5", xop
, plen
, 4);
2980 if (!AVR_HAVE_ELPMX
)
2981 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2983 else if (!AVR_HAVE_LPMX
)
2985 return avr_out_lpm_no_lpmx (insn
, xop
, plen
);
2988 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2990 switch (GET_CODE (addr
))
2997 gcc_assert (REG_Z
== REGNO (addr
));
3005 return avr_asm_len ("%4lpm %0,%a2", xop
, plen
, 1);
3008 if (REGNO (dest
) == REG_Z
)
3009 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3010 "%4lpm %B0,%a2" CR_TAB
3011 "mov %A0,%5", xop
, plen
, 3);
3014 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3015 "%4lpm %B0,%a2", xop
, plen
, 2);
3017 if (!reg_unused_after (insn
, addr
))
3018 avr_asm_len ("sbiw %2,1", xop
, plen
, 1);
3025 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3026 "%4lpm %B0,%a2+" CR_TAB
3027 "%4lpm %C0,%a2", xop
, plen
, 3);
3029 if (!reg_unused_after (insn
, addr
))
3030 avr_asm_len ("sbiw %2,2", xop
, plen
, 1);
3036 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3037 "%4lpm %B0,%a2+", xop
, plen
, 2);
3039 if (REGNO (dest
) == REG_Z
- 2)
3040 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3041 "%4lpm %C0,%a2" CR_TAB
3042 "mov %D0,%5", xop
, plen
, 3);
3045 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3046 "%4lpm %D0,%a2", xop
, plen
, 2);
3048 if (!reg_unused_after (insn
, addr
))
3049 avr_asm_len ("sbiw %2,3", xop
, plen
, 1);
3059 gcc_assert (REG_Z
== REGNO (XEXP (addr
, 0))
3062 avr_asm_len ("%4lpm %A0,%a2+", xop
, plen
, 1);
3063 if (n_bytes
>= 2) avr_asm_len ("%4lpm %B0,%a2+", xop
, plen
, 1);
3064 if (n_bytes
>= 3) avr_asm_len ("%4lpm %C0,%a2+", xop
, plen
, 1);
3065 if (n_bytes
>= 4) avr_asm_len ("%4lpm %D0,%a2+", xop
, plen
, 1);
3067 break; /* POST_INC */
3069 } /* switch CODE (addr) */
3071 if (xop
[4] == xstring_e
&& AVR_HAVE_RAMPD
)
3073 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3075 xop
[0] = zero_reg_rtx
;
3076 avr_asm_len ("out %i6,%0", xop
, plen
, 1);
3083 /* Worker function for xload_8 insn. */
3086 avr_out_xload (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
3092 xop
[2] = lpm_addr_reg_rtx
;
3093 xop
[3] = AVR_HAVE_LPMX
? op
[0] : lpm_reg_rtx
;
3095 avr_asm_len (AVR_HAVE_LPMX
? "lpm %3,%a2" : "lpm", xop
, plen
, -1);
3097 avr_asm_len ("sbrc %1,7" CR_TAB
3098 "ld %3,%a2", xop
, plen
, 2);
3100 if (REGNO (xop
[0]) != REGNO (xop
[3]))
3101 avr_asm_len ("mov %0,%3", xop
, plen
, 1);
3108 output_movqi (rtx insn
, rtx operands
[], int *plen
)
3110 rtx dest
= operands
[0];
3111 rtx src
= operands
[1];
3113 if (avr_mem_flash_p (src
)
3114 || avr_mem_flash_p (dest
))
3116 return avr_out_lpm (insn
, operands
, plen
);
3119 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest
)));
3123 if (REG_P (src
)) /* mov r,r */
3125 if (test_hard_reg_class (STACK_REG
, dest
))
3126 return avr_asm_len ("out %0,%1", operands
, plen
, -1);
3127 else if (test_hard_reg_class (STACK_REG
, src
))
3128 return avr_asm_len ("in %0,%1", operands
, plen
, -1);
3130 return avr_asm_len ("mov %0,%1", operands
, plen
, -1);
3132 else if (CONSTANT_P (src
))
3134 output_reload_in_const (operands
, NULL_RTX
, plen
, false);
3137 else if (MEM_P (src
))
3138 return out_movqi_r_mr (insn
, operands
, plen
); /* mov r,m */
3140 else if (MEM_P (dest
))
3145 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3147 return out_movqi_mr_r (insn
, xop
, plen
);
3155 output_movhi (rtx insn
, rtx xop
[], int *plen
)
3160 gcc_assert (GET_MODE_SIZE (GET_MODE (dest
)) == 2);
3162 if (avr_mem_flash_p (src
)
3163 || avr_mem_flash_p (dest
))
3165 return avr_out_lpm (insn
, xop
, plen
);
3168 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest
)));
3172 if (REG_P (src
)) /* mov r,r */
3174 if (test_hard_reg_class (STACK_REG
, dest
))
3176 if (AVR_HAVE_8BIT_SP
)
3177 return avr_asm_len ("out __SP_L__,%A1", xop
, plen
, -1);
3180 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3181 "out __SP_H__,%B1", xop
, plen
, -2);
3183 /* Use simple load of SP if no interrupts are used. */
3185 return TARGET_NO_INTERRUPTS
3186 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3187 "out __SP_L__,%A1", xop
, plen
, -2)
3188 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3190 "out __SP_H__,%B1" CR_TAB
3191 "out __SREG__,__tmp_reg__" CR_TAB
3192 "out __SP_L__,%A1", xop
, plen
, -5);
3194 else if (test_hard_reg_class (STACK_REG
, src
))
3196 return !AVR_HAVE_SPH
3197 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3198 "clr %B0", xop
, plen
, -2)
3200 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3201 "in %B0,__SP_H__", xop
, plen
, -2);
3204 return AVR_HAVE_MOVW
3205 ? avr_asm_len ("movw %0,%1", xop
, plen
, -1)
3207 : avr_asm_len ("mov %A0,%A1" CR_TAB
3208 "mov %B0,%B1", xop
, plen
, -2);
3210 else if (CONSTANT_P (src
))
3212 return output_reload_inhi (xop
, NULL
, plen
);
3214 else if (MEM_P (src
))
3216 return out_movhi_r_mr (insn
, xop
, plen
); /* mov r,m */
3219 else if (MEM_P (dest
))
3224 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
3226 return out_movhi_mr_r (insn
, xop
, plen
);
3229 fatal_insn ("invalid insn:", insn
);
3235 out_movqi_r_mr (rtx insn
, rtx op
[], int *plen
)
3239 rtx x
= XEXP (src
, 0);
3241 if (CONSTANT_ADDRESS_P (x
))
3243 return optimize
> 0 && io_address_operand (x
, QImode
)
3244 ? avr_asm_len ("in %0,%i1", op
, plen
, -1)
3245 : avr_asm_len ("lds %0,%m1", op
, plen
, -2);
3247 else if (GET_CODE (x
) == PLUS
3248 && REG_P (XEXP (x
, 0))
3249 && CONST_INT_P (XEXP (x
, 1)))
3251 /* memory access by reg+disp */
3253 int disp
= INTVAL (XEXP (x
, 1));
3255 if (disp
- GET_MODE_SIZE (GET_MODE (src
)) >= 63)
3257 if (REGNO (XEXP (x
, 0)) != REG_Y
)
3258 fatal_insn ("incorrect insn:",insn
);
3260 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3261 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3262 "ldd %0,Y+63" CR_TAB
3263 "sbiw r28,%o1-63", op
, plen
, -3);
3265 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3266 "sbci r29,hi8(-%o1)" CR_TAB
3268 "subi r28,lo8(%o1)" CR_TAB
3269 "sbci r29,hi8(%o1)", op
, plen
, -5);
3271 else if (REGNO (XEXP (x
, 0)) == REG_X
)
3273 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3274 it but I have this situation with extremal optimizing options. */
3276 avr_asm_len ("adiw r26,%o1" CR_TAB
3277 "ld %0,X", op
, plen
, -2);
3279 if (!reg_overlap_mentioned_p (dest
, XEXP (x
,0))
3280 && !reg_unused_after (insn
, XEXP (x
,0)))
3282 avr_asm_len ("sbiw r26,%o1", op
, plen
, 1);
3288 return avr_asm_len ("ldd %0,%1", op
, plen
, -1);
3291 return avr_asm_len ("ld %0,%1", op
, plen
, -1);
3295 out_movhi_r_mr (rtx insn
, rtx op
[], int *plen
)
3299 rtx base
= XEXP (src
, 0);
3300 int reg_dest
= true_regnum (dest
);
3301 int reg_base
= true_regnum (base
);
3302 /* "volatile" forces reading low byte first, even if less efficient,
3303 for correct operation with 16-bit I/O registers. */
3304 int mem_volatile_p
= MEM_VOLATILE_P (src
);
3308 if (reg_dest
== reg_base
) /* R = (R) */
3309 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3311 "mov %A0,__tmp_reg__", op
, plen
, -3);
3313 if (reg_base
!= REG_X
)
3314 return avr_asm_len ("ld %A0,%1" CR_TAB
3315 "ldd %B0,%1+1", op
, plen
, -2);
3317 avr_asm_len ("ld %A0,X+" CR_TAB
3318 "ld %B0,X", op
, plen
, -2);
3320 if (!reg_unused_after (insn
, base
))
3321 avr_asm_len ("sbiw r26,1", op
, plen
, 1);
3325 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3327 int disp
= INTVAL (XEXP (base
, 1));
3328 int reg_base
= true_regnum (XEXP (base
, 0));
3330 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3332 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3333 fatal_insn ("incorrect insn:",insn
);
3335 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
))
3336 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3337 "ldd %A0,Y+62" CR_TAB
3338 "ldd %B0,Y+63" CR_TAB
3339 "sbiw r28,%o1-62", op
, plen
, -4)
3341 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3342 "sbci r29,hi8(-%o1)" CR_TAB
3344 "ldd %B0,Y+1" CR_TAB
3345 "subi r28,lo8(%o1)" CR_TAB
3346 "sbci r29,hi8(%o1)", op
, plen
, -6);
3349 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3350 it but I have this situation with extremal
3351 optimization options. */
3353 if (reg_base
== REG_X
)
3354 return reg_base
== reg_dest
3355 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3356 "ld __tmp_reg__,X+" CR_TAB
3358 "mov %A0,__tmp_reg__", op
, plen
, -4)
3360 : avr_asm_len ("adiw r26,%o1" CR_TAB
3363 "sbiw r26,%o1+1", op
, plen
, -4);
3365 return reg_base
== reg_dest
3366 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3367 "ldd %B0,%B1" CR_TAB
3368 "mov %A0,__tmp_reg__", op
, plen
, -3)
3370 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3371 "ldd %B0,%B1", op
, plen
, -2);
3373 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3375 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3376 fatal_insn ("incorrect insn:", insn
);
3378 if (!mem_volatile_p
)
3379 return avr_asm_len ("ld %B0,%1" CR_TAB
3380 "ld %A0,%1", op
, plen
, -2);
3382 return REGNO (XEXP (base
, 0)) == REG_X
3383 ? avr_asm_len ("sbiw r26,2" CR_TAB
3386 "sbiw r26,1", op
, plen
, -4)
3388 : avr_asm_len ("sbiw %r1,2" CR_TAB
3390 "ldd %B0,%p1+1", op
, plen
, -3);
3392 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3394 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
3395 fatal_insn ("incorrect insn:", insn
);
3397 return avr_asm_len ("ld %A0,%1" CR_TAB
3398 "ld %B0,%1", op
, plen
, -2);
3400 else if (CONSTANT_ADDRESS_P (base
))
3402 return optimize
> 0 && io_address_operand (base
, HImode
)
3403 ? avr_asm_len ("in %A0,%i1" CR_TAB
3404 "in %B0,%i1+1", op
, plen
, -2)
3406 : avr_asm_len ("lds %A0,%m1" CR_TAB
3407 "lds %B0,%m1+1", op
, plen
, -4);
3410 fatal_insn ("unknown move insn:",insn
);
3415 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
3419 rtx base
= XEXP (src
, 0);
3420 int reg_dest
= true_regnum (dest
);
3421 int reg_base
= true_regnum (base
);
3429 if (reg_base
== REG_X
) /* (R26) */
3431 if (reg_dest
== REG_X
)
3432 /* "ld r26,-X" is undefined */
3433 return *l
=7, ("adiw r26,3" CR_TAB
3436 "ld __tmp_reg__,-X" CR_TAB
3439 "mov r27,__tmp_reg__");
3440 else if (reg_dest
== REG_X
- 2)
3441 return *l
=5, ("ld %A0,X+" CR_TAB
3443 "ld __tmp_reg__,X+" CR_TAB
3445 "mov %C0,__tmp_reg__");
3446 else if (reg_unused_after (insn
, base
))
3447 return *l
=4, ("ld %A0,X+" CR_TAB
3452 return *l
=5, ("ld %A0,X+" CR_TAB
3460 if (reg_dest
== reg_base
)
3461 return *l
=5, ("ldd %D0,%1+3" CR_TAB
3462 "ldd %C0,%1+2" CR_TAB
3463 "ldd __tmp_reg__,%1+1" CR_TAB
3465 "mov %B0,__tmp_reg__");
3466 else if (reg_base
== reg_dest
+ 2)
3467 return *l
=5, ("ld %A0,%1" CR_TAB
3468 "ldd %B0,%1+1" CR_TAB
3469 "ldd __tmp_reg__,%1+2" CR_TAB
3470 "ldd %D0,%1+3" CR_TAB
3471 "mov %C0,__tmp_reg__");
3473 return *l
=4, ("ld %A0,%1" CR_TAB
3474 "ldd %B0,%1+1" CR_TAB
3475 "ldd %C0,%1+2" CR_TAB
3479 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3481 int disp
= INTVAL (XEXP (base
, 1));
3483 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3485 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3486 fatal_insn ("incorrect insn:",insn
);
3488 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3489 return *l
= 6, ("adiw r28,%o1-60" CR_TAB
3490 "ldd %A0,Y+60" CR_TAB
3491 "ldd %B0,Y+61" CR_TAB
3492 "ldd %C0,Y+62" CR_TAB
3493 "ldd %D0,Y+63" CR_TAB
3496 return *l
= 8, ("subi r28,lo8(-%o1)" CR_TAB
3497 "sbci r29,hi8(-%o1)" CR_TAB
3499 "ldd %B0,Y+1" CR_TAB
3500 "ldd %C0,Y+2" CR_TAB
3501 "ldd %D0,Y+3" CR_TAB
3502 "subi r28,lo8(%o1)" CR_TAB
3503 "sbci r29,hi8(%o1)");
3506 reg_base
= true_regnum (XEXP (base
, 0));
3507 if (reg_base
== REG_X
)
3510 if (reg_dest
== REG_X
)
3513 /* "ld r26,-X" is undefined */
3514 return ("adiw r26,%o1+3" CR_TAB
3517 "ld __tmp_reg__,-X" CR_TAB
3520 "mov r27,__tmp_reg__");
3523 if (reg_dest
== REG_X
- 2)
3524 return ("adiw r26,%o1" CR_TAB
3527 "ld __tmp_reg__,X+" CR_TAB
3529 "mov r26,__tmp_reg__");
3531 return ("adiw r26,%o1" CR_TAB
3538 if (reg_dest
== reg_base
)
3539 return *l
=5, ("ldd %D0,%D1" CR_TAB
3540 "ldd %C0,%C1" CR_TAB
3541 "ldd __tmp_reg__,%B1" CR_TAB
3542 "ldd %A0,%A1" CR_TAB
3543 "mov %B0,__tmp_reg__");
3544 else if (reg_dest
== reg_base
- 2)
3545 return *l
=5, ("ldd %A0,%A1" CR_TAB
3546 "ldd %B0,%B1" CR_TAB
3547 "ldd __tmp_reg__,%C1" CR_TAB
3548 "ldd %D0,%D1" CR_TAB
3549 "mov %C0,__tmp_reg__");
3550 return *l
=4, ("ldd %A0,%A1" CR_TAB
3551 "ldd %B0,%B1" CR_TAB
3552 "ldd %C0,%C1" CR_TAB
3555 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3556 return *l
=4, ("ld %D0,%1" CR_TAB
3560 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3561 return *l
=4, ("ld %A0,%1" CR_TAB
3565 else if (CONSTANT_ADDRESS_P (base
))
3566 return *l
=8, ("lds %A0,%m1" CR_TAB
3567 "lds %B0,%m1+1" CR_TAB
3568 "lds %C0,%m1+2" CR_TAB
3571 fatal_insn ("unknown move insn:",insn
);
3576 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
3580 rtx base
= XEXP (dest
, 0);
3581 int reg_base
= true_regnum (base
);
3582 int reg_src
= true_regnum (src
);
3588 if (CONSTANT_ADDRESS_P (base
))
3589 return *l
=8,("sts %m0,%A1" CR_TAB
3590 "sts %m0+1,%B1" CR_TAB
3591 "sts %m0+2,%C1" CR_TAB
3593 if (reg_base
> 0) /* (r) */
3595 if (reg_base
== REG_X
) /* (R26) */
3597 if (reg_src
== REG_X
)
3599 /* "st X+,r26" is undefined */
3600 if (reg_unused_after (insn
, base
))
3601 return *l
=6, ("mov __tmp_reg__,r27" CR_TAB
3604 "st X+,__tmp_reg__" CR_TAB
3608 return *l
=7, ("mov __tmp_reg__,r27" CR_TAB
3611 "st X+,__tmp_reg__" CR_TAB
3616 else if (reg_base
== reg_src
+ 2)
3618 if (reg_unused_after (insn
, base
))
3619 return *l
=7, ("mov __zero_reg__,%C1" CR_TAB
3620 "mov __tmp_reg__,%D1" CR_TAB
3623 "st %0+,__zero_reg__" CR_TAB
3624 "st %0,__tmp_reg__" CR_TAB
3625 "clr __zero_reg__");
3627 return *l
=8, ("mov __zero_reg__,%C1" CR_TAB
3628 "mov __tmp_reg__,%D1" CR_TAB
3631 "st %0+,__zero_reg__" CR_TAB
3632 "st %0,__tmp_reg__" CR_TAB
3633 "clr __zero_reg__" CR_TAB
3636 return *l
=5, ("st %0+,%A1" CR_TAB
3643 return *l
=4, ("st %0,%A1" CR_TAB
3644 "std %0+1,%B1" CR_TAB
3645 "std %0+2,%C1" CR_TAB
3648 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3650 int disp
= INTVAL (XEXP (base
, 1));
3651 reg_base
= REGNO (XEXP (base
, 0));
3652 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3654 if (reg_base
!= REG_Y
)
3655 fatal_insn ("incorrect insn:",insn
);
3657 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3658 return *l
= 6, ("adiw r28,%o0-60" CR_TAB
3659 "std Y+60,%A1" CR_TAB
3660 "std Y+61,%B1" CR_TAB
3661 "std Y+62,%C1" CR_TAB
3662 "std Y+63,%D1" CR_TAB
3665 return *l
= 8, ("subi r28,lo8(-%o0)" CR_TAB
3666 "sbci r29,hi8(-%o0)" CR_TAB
3668 "std Y+1,%B1" CR_TAB
3669 "std Y+2,%C1" CR_TAB
3670 "std Y+3,%D1" CR_TAB
3671 "subi r28,lo8(%o0)" CR_TAB
3672 "sbci r29,hi8(%o0)");
3674 if (reg_base
== REG_X
)
3677 if (reg_src
== REG_X
)
3680 return ("mov __tmp_reg__,r26" CR_TAB
3681 "mov __zero_reg__,r27" CR_TAB
3682 "adiw r26,%o0" CR_TAB
3683 "st X+,__tmp_reg__" CR_TAB
3684 "st X+,__zero_reg__" CR_TAB
3687 "clr __zero_reg__" CR_TAB
3690 else if (reg_src
== REG_X
- 2)
3693 return ("mov __tmp_reg__,r26" CR_TAB
3694 "mov __zero_reg__,r27" CR_TAB
3695 "adiw r26,%o0" CR_TAB
3698 "st X+,__tmp_reg__" CR_TAB
3699 "st X,__zero_reg__" CR_TAB
3700 "clr __zero_reg__" CR_TAB
3704 return ("adiw r26,%o0" CR_TAB
3711 return *l
=4, ("std %A0,%A1" CR_TAB
3712 "std %B0,%B1" CR_TAB
3713 "std %C0,%C1" CR_TAB
3716 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3717 return *l
=4, ("st %0,%D1" CR_TAB
3721 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3722 return *l
=4, ("st %0,%A1" CR_TAB
3726 fatal_insn ("unknown move insn:",insn
);
3731 output_movsisf (rtx insn
, rtx operands
[], int *l
)
3734 rtx dest
= operands
[0];
3735 rtx src
= operands
[1];
3738 if (avr_mem_flash_p (src
)
3739 || avr_mem_flash_p (dest
))
3741 return avr_out_lpm (insn
, operands
, real_l
);
3747 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest
)));
3750 if (REG_P (src
)) /* mov r,r */
3752 if (true_regnum (dest
) > true_regnum (src
))
3757 return ("movw %C0,%C1" CR_TAB
3761 return ("mov %D0,%D1" CR_TAB
3762 "mov %C0,%C1" CR_TAB
3763 "mov %B0,%B1" CR_TAB
3771 return ("movw %A0,%A1" CR_TAB
3775 return ("mov %A0,%A1" CR_TAB
3776 "mov %B0,%B1" CR_TAB
3777 "mov %C0,%C1" CR_TAB
3781 else if (CONSTANT_P (src
))
3783 return output_reload_insisf (operands
, NULL_RTX
, real_l
);
3785 else if (MEM_P (src
))
3786 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
3788 else if (MEM_P (dest
))
3792 if (src
== CONST0_RTX (GET_MODE (dest
)))
3793 operands
[1] = zero_reg_rtx
;
3795 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
3798 output_asm_insn (templ
, operands
);
3803 fatal_insn ("invalid insn:", insn
);
3808 /* Handle loads of 24-bit types from memory to register. */
3811 avr_out_load_psi (rtx insn
, rtx
*op
, int *plen
)
3815 rtx base
= XEXP (src
, 0);
3816 int reg_dest
= true_regnum (dest
);
3817 int reg_base
= true_regnum (base
);
3821 if (reg_base
== REG_X
) /* (R26) */
3823 if (reg_dest
== REG_X
)
3824 /* "ld r26,-X" is undefined */
3825 return avr_asm_len ("adiw r26,2" CR_TAB
3827 "ld __tmp_reg__,-X" CR_TAB
3830 "mov r27,__tmp_reg__", op
, plen
, -6);
3833 avr_asm_len ("ld %A0,X+" CR_TAB
3835 "ld %C0,X", op
, plen
, -3);
3837 if (reg_dest
!= REG_X
- 2
3838 && !reg_unused_after (insn
, base
))
3840 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3846 else /* reg_base != REG_X */
3848 if (reg_dest
== reg_base
)
3849 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3850 "ldd __tmp_reg__,%1+1" CR_TAB
3852 "mov %B0,__tmp_reg__", op
, plen
, -4);
3854 return avr_asm_len ("ld %A0,%1" CR_TAB
3855 "ldd %B0,%1+1" CR_TAB
3856 "ldd %C0,%1+2", op
, plen
, -3);
3859 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3861 int disp
= INTVAL (XEXP (base
, 1));
3863 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
3865 if (REGNO (XEXP (base
, 0)) != REG_Y
)
3866 fatal_insn ("incorrect insn:",insn
);
3868 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
3869 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3870 "ldd %A0,Y+61" CR_TAB
3871 "ldd %B0,Y+62" CR_TAB
3872 "ldd %C0,Y+63" CR_TAB
3873 "sbiw r28,%o1-61", op
, plen
, -5);
3875 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3876 "sbci r29,hi8(-%o1)" CR_TAB
3878 "ldd %B0,Y+1" CR_TAB
3879 "ldd %C0,Y+2" CR_TAB
3880 "subi r28,lo8(%o1)" CR_TAB
3881 "sbci r29,hi8(%o1)", op
, plen
, -7);
3884 reg_base
= true_regnum (XEXP (base
, 0));
3885 if (reg_base
== REG_X
)
3888 if (reg_dest
== REG_X
)
3890 /* "ld r26,-X" is undefined */
3891 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3893 "ld __tmp_reg__,-X" CR_TAB
3896 "mov r27,__tmp_reg__", op
, plen
, -6);
3899 avr_asm_len ("adiw r26,%o1" CR_TAB
3902 "ld %C0,X", op
, plen
, -4);
3904 if (reg_dest
!= REG_W
3905 && !reg_unused_after (insn
, XEXP (base
, 0)))
3906 avr_asm_len ("sbiw r26,%o1+2", op
, plen
, 1);
3911 if (reg_dest
== reg_base
)
3912 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3913 "ldd __tmp_reg__,%B1" CR_TAB
3914 "ldd %A0,%A1" CR_TAB
3915 "mov %B0,__tmp_reg__", op
, plen
, -4);
3917 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3918 "ldd %B0,%B1" CR_TAB
3919 "ldd %C0,%C1", op
, plen
, -3);
3921 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3922 return avr_asm_len ("ld %C0,%1" CR_TAB
3924 "ld %A0,%1", op
, plen
, -3);
3925 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3926 return avr_asm_len ("ld %A0,%1" CR_TAB
3928 "ld %C0,%1", op
, plen
, -3);
3930 else if (CONSTANT_ADDRESS_P (base
))
3931 return avr_asm_len ("lds %A0,%m1" CR_TAB
3932 "lds %B0,%m1+1" CR_TAB
3933 "lds %C0,%m1+2", op
, plen
, -6);
3935 fatal_insn ("unknown move insn:",insn
);
3939 /* Handle store of 24-bit type from register or zero to memory. */
3942 avr_out_store_psi (rtx insn
, rtx
*op
, int *plen
)
3946 rtx base
= XEXP (dest
, 0);
3947 int reg_base
= true_regnum (base
);
3949 if (CONSTANT_ADDRESS_P (base
))
3950 return avr_asm_len ("sts %m0,%A1" CR_TAB
3951 "sts %m0+1,%B1" CR_TAB
3952 "sts %m0+2,%C1", op
, plen
, -6);
3954 if (reg_base
> 0) /* (r) */
3956 if (reg_base
== REG_X
) /* (R26) */
3958 gcc_assert (!reg_overlap_mentioned_p (base
, src
));
3960 avr_asm_len ("st %0+,%A1" CR_TAB
3962 "st %0,%C1", op
, plen
, -3);
3964 if (!reg_unused_after (insn
, base
))
3965 avr_asm_len ("sbiw r26,2", op
, plen
, 1);
3970 return avr_asm_len ("st %0,%A1" CR_TAB
3971 "std %0+1,%B1" CR_TAB
3972 "std %0+2,%C1", op
, plen
, -3);
3974 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
3976 int disp
= INTVAL (XEXP (base
, 1));
3977 reg_base
= REGNO (XEXP (base
, 0));
3979 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
3981 if (reg_base
!= REG_Y
)
3982 fatal_insn ("incorrect insn:",insn
);
3984 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
3985 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3986 "std Y+61,%A1" CR_TAB
3987 "std Y+62,%B1" CR_TAB
3988 "std Y+63,%C1" CR_TAB
3989 "sbiw r28,%o0-60", op
, plen
, -5);
3991 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3992 "sbci r29,hi8(-%o0)" CR_TAB
3994 "std Y+1,%B1" CR_TAB
3995 "std Y+2,%C1" CR_TAB
3996 "subi r28,lo8(%o0)" CR_TAB
3997 "sbci r29,hi8(%o0)", op
, plen
, -7);
3999 if (reg_base
== REG_X
)
4002 gcc_assert (!reg_overlap_mentioned_p (XEXP (base
, 0), src
));
4004 avr_asm_len ("adiw r26,%o0" CR_TAB
4007 "st X,%C1", op
, plen
, -4);
4009 if (!reg_unused_after (insn
, XEXP (base
, 0)))
4010 avr_asm_len ("sbiw r26,%o0+2", op
, plen
, 1);
4015 return avr_asm_len ("std %A0,%A1" CR_TAB
4016 "std %B0,%B1" CR_TAB
4017 "std %C0,%C1", op
, plen
, -3);
4019 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4020 return avr_asm_len ("st %0,%C1" CR_TAB
4022 "st %0,%A1", op
, plen
, -3);
4023 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4024 return avr_asm_len ("st %0,%A1" CR_TAB
4026 "st %0,%C1", op
, plen
, -3);
4028 fatal_insn ("unknown move insn:",insn
);
4033 /* Move around 24-bit stuff. */
4036 avr_out_movpsi (rtx insn
, rtx
*op
, int *plen
)
4041 if (avr_mem_flash_p (src
)
4042 || avr_mem_flash_p (dest
))
4044 return avr_out_lpm (insn
, op
, plen
);
4047 if (register_operand (dest
, VOIDmode
))
4049 if (register_operand (src
, VOIDmode
)) /* mov r,r */
4051 if (true_regnum (dest
) > true_regnum (src
))
4053 avr_asm_len ("mov %C0,%C1", op
, plen
, -1);
4056 return avr_asm_len ("movw %A0,%A1", op
, plen
, 1);
4058 return avr_asm_len ("mov %B0,%B1" CR_TAB
4059 "mov %A0,%A1", op
, plen
, 2);
4064 avr_asm_len ("movw %A0,%A1", op
, plen
, -1);
4066 avr_asm_len ("mov %A0,%A1" CR_TAB
4067 "mov %B0,%B1", op
, plen
, -2);
4069 return avr_asm_len ("mov %C0,%C1", op
, plen
, 1);
4072 else if (CONSTANT_P (src
))
4074 return avr_out_reload_inpsi (op
, NULL_RTX
, plen
);
4076 else if (MEM_P (src
))
4077 return avr_out_load_psi (insn
, op
, plen
); /* mov r,m */
4079 else if (MEM_P (dest
))
4084 xop
[1] = src
== CONST0_RTX (GET_MODE (dest
)) ? zero_reg_rtx
: src
;
4086 return avr_out_store_psi (insn
, xop
, plen
);
4089 fatal_insn ("invalid insn:", insn
);
4095 out_movqi_mr_r (rtx insn
, rtx op
[], int *plen
)
4099 rtx x
= XEXP (dest
, 0);
4101 if (CONSTANT_ADDRESS_P (x
))
4103 return optimize
> 0 && io_address_operand (x
, QImode
)
4104 ? avr_asm_len ("out %i0,%1", op
, plen
, -1)
4105 : avr_asm_len ("sts %m0,%1", op
, plen
, -2);
4107 else if (GET_CODE (x
) == PLUS
4108 && REG_P (XEXP (x
, 0))
4109 && CONST_INT_P (XEXP (x
, 1)))
4111 /* memory access by reg+disp */
4113 int disp
= INTVAL (XEXP (x
, 1));
4115 if (disp
- GET_MODE_SIZE (GET_MODE (dest
)) >= 63)
4117 if (REGNO (XEXP (x
, 0)) != REG_Y
)
4118 fatal_insn ("incorrect insn:",insn
);
4120 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
4121 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4122 "std Y+63,%1" CR_TAB
4123 "sbiw r28,%o0-63", op
, plen
, -3);
4125 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4126 "sbci r29,hi8(-%o0)" CR_TAB
4128 "subi r28,lo8(%o0)" CR_TAB
4129 "sbci r29,hi8(%o0)", op
, plen
, -5);
4131 else if (REGNO (XEXP (x
,0)) == REG_X
)
4133 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
4135 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4136 "adiw r26,%o0" CR_TAB
4137 "st X,__tmp_reg__", op
, plen
, -3);
4141 avr_asm_len ("adiw r26,%o0" CR_TAB
4142 "st X,%1", op
, plen
, -2);
4145 if (!reg_unused_after (insn
, XEXP (x
,0)))
4146 avr_asm_len ("sbiw r26,%o0", op
, plen
, 1);
4151 return avr_asm_len ("std %0,%1", op
, plen
, -1);
4154 return avr_asm_len ("st %0,%1", op
, plen
, -1);
4158 /* Helper for the next function for XMEGA. It does the same
4159 but with low byte first. */
4162 avr_out_movhi_mr_r_xmega (rtx insn
, rtx op
[], int *plen
)
4166 rtx base
= XEXP (dest
, 0);
4167 int reg_base
= true_regnum (base
);
4168 int reg_src
= true_regnum (src
);
4170 /* "volatile" forces writing low byte first, even if less efficient,
4171 for correct operation with 16-bit I/O registers like SP. */
4172 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
4174 if (CONSTANT_ADDRESS_P (base
))
4175 return optimize
> 0 && io_address_operand (base
, HImode
)
4176 ? avr_asm_len ("out %i0,%A1" CR_TAB
4177 "out %i0+1,%B1", op
, plen
, -2)
4179 : avr_asm_len ("sts %m0,%A1" CR_TAB
4180 "sts %m0+1,%B1", op
, plen
, -4);
4184 if (reg_base
!= REG_X
)
4185 return avr_asm_len ("st %0,%A1" CR_TAB
4186 "std %0+1,%B1", op
, plen
, -2);
4188 if (reg_src
== REG_X
)
4189 /* "st X+,r26" and "st -X,r26" are undefined. */
4190 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4193 "st X,__tmp_reg__", op
, plen
, -4);
4195 avr_asm_len ("st X+,%A1" CR_TAB
4196 "st X,%B1", op
, plen
, -2);
4198 return reg_unused_after (insn
, base
)
4200 : avr_asm_len ("sbiw r26,1", op
, plen
, 1);
4202 else if (GET_CODE (base
) == PLUS
)
4204 int disp
= INTVAL (XEXP (base
, 1));
4205 reg_base
= REGNO (XEXP (base
, 0));
4206 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4208 if (reg_base
!= REG_Y
)
4209 fatal_insn ("incorrect insn:",insn
);
4211 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4212 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4213 "std Y+62,%A1" CR_TAB
4214 "std Y+63,%B1" CR_TAB
4215 "sbiw r28,%o0-62", op
, plen
, -4)
4217 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4218 "sbci r29,hi8(-%o0)" CR_TAB
4220 "std Y+1,%B1" CR_TAB
4221 "subi r28,lo8(%o0)" CR_TAB
4222 "sbci r29,hi8(%o0)", op
, plen
, -6);
4225 if (reg_base
!= REG_X
)
4226 return avr_asm_len ("std %A0,%A1" CR_TAB
4227 "std %B0,%B1", op
, plen
, -2);
4229 return reg_src
== REG_X
4230 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4231 "mov __zero_reg__,r27" CR_TAB
4232 "adiw r26,%o0" CR_TAB
4233 "st X+,__tmp_reg__" CR_TAB
4234 "st X,__zero_reg__" CR_TAB
4235 "clr __zero_reg__" CR_TAB
4236 "sbiw r26,%o0+1", op
, plen
, -7)
4238 : avr_asm_len ("adiw r26,%o0" CR_TAB
4241 "sbiw r26,%o0+1", op
, plen
, -4);
4243 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4245 if (!mem_volatile_p
)
4246 return avr_asm_len ("st %0,%B1" CR_TAB
4247 "st %0,%A1", op
, plen
, -2);
4249 return REGNO (XEXP (base
, 0)) == REG_X
4250 ? avr_asm_len ("sbiw r26,2" CR_TAB
4253 "sbiw r26,1", op
, plen
, -4)
4255 : avr_asm_len ("sbiw %r0,2" CR_TAB
4257 "std %p0+1,%B1", op
, plen
, -3);
4259 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4261 return avr_asm_len ("st %0,%A1" CR_TAB
4262 "st %0,%B1", op
, plen
, -2);
4265 fatal_insn ("unknown move insn:",insn
);
4271 out_movhi_mr_r (rtx insn
, rtx op
[], int *plen
)
4275 rtx base
= XEXP (dest
, 0);
4276 int reg_base
= true_regnum (base
);
4277 int reg_src
= true_regnum (src
);
4280 /* "volatile" forces writing high-byte first (no-xmega) resp.
4281 low-byte first (xmega) even if less efficient, for correct
4282 operation with 16-bit I/O registers like. */
4285 return avr_out_movhi_mr_r_xmega (insn
, op
, plen
);
4287 mem_volatile_p
= MEM_VOLATILE_P (dest
);
4289 if (CONSTANT_ADDRESS_P (base
))
4290 return optimize
> 0 && io_address_operand (base
, HImode
)
4291 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4292 "out %i0,%A1", op
, plen
, -2)
4294 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4295 "sts %m0,%A1", op
, plen
, -4);
4299 if (reg_base
!= REG_X
)
4300 return avr_asm_len ("std %0+1,%B1" CR_TAB
4301 "st %0,%A1", op
, plen
, -2);
4303 if (reg_src
== REG_X
)
4304 /* "st X+,r26" and "st -X,r26" are undefined. */
4305 return !mem_volatile_p
&& reg_unused_after (insn
, src
)
4306 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4309 "st X,__tmp_reg__", op
, plen
, -4)
4311 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4313 "st X,__tmp_reg__" CR_TAB
4315 "st X,r26", op
, plen
, -5);
4317 return !mem_volatile_p
&& reg_unused_after (insn
, base
)
4318 ? avr_asm_len ("st X+,%A1" CR_TAB
4319 "st X,%B1", op
, plen
, -2)
4320 : avr_asm_len ("adiw r26,1" CR_TAB
4322 "st -X,%A1", op
, plen
, -3);
4324 else if (GET_CODE (base
) == PLUS
)
4326 int disp
= INTVAL (XEXP (base
, 1));
4327 reg_base
= REGNO (XEXP (base
, 0));
4328 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
4330 if (reg_base
!= REG_Y
)
4331 fatal_insn ("incorrect insn:",insn
);
4333 return disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
))
4334 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4335 "std Y+63,%B1" CR_TAB
4336 "std Y+62,%A1" CR_TAB
4337 "sbiw r28,%o0-62", op
, plen
, -4)
4339 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4340 "sbci r29,hi8(-%o0)" CR_TAB
4341 "std Y+1,%B1" CR_TAB
4343 "subi r28,lo8(%o0)" CR_TAB
4344 "sbci r29,hi8(%o0)", op
, plen
, -6);
4347 if (reg_base
!= REG_X
)
4348 return avr_asm_len ("std %B0,%B1" CR_TAB
4349 "std %A0,%A1", op
, plen
, -2);
4351 return reg_src
== REG_X
4352 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4353 "mov __zero_reg__,r27" CR_TAB
4354 "adiw r26,%o0+1" CR_TAB
4355 "st X,__zero_reg__" CR_TAB
4356 "st -X,__tmp_reg__" CR_TAB
4357 "clr __zero_reg__" CR_TAB
4358 "sbiw r26,%o0", op
, plen
, -7)
4360 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4363 "sbiw r26,%o0", op
, plen
, -4);
4365 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
4367 return avr_asm_len ("st %0,%B1" CR_TAB
4368 "st %0,%A1", op
, plen
, -2);
4370 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
4372 if (!mem_volatile_p
)
4373 return avr_asm_len ("st %0,%A1" CR_TAB
4374 "st %0,%B1", op
, plen
, -2);
4376 return REGNO (XEXP (base
, 0)) == REG_X
4377 ? avr_asm_len ("adiw r26,1" CR_TAB
4380 "adiw r26,2", op
, plen
, -4)
4382 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4384 "adiw %r0,2", op
, plen
, -3);
4386 fatal_insn ("unknown move insn:",insn
);
4390 /* Return 1 if frame pointer for current function required. */
4393 avr_frame_pointer_required_p (void)
4395 return (cfun
->calls_alloca
4396 || cfun
->calls_setjmp
4397 || cfun
->has_nonlocal_label
4398 || crtl
->args
.info
.nregs
== 0
4399 || get_frame_size () > 0);
4402 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4405 compare_condition (rtx insn
)
4407 rtx next
= next_real_insn (insn
);
4409 if (next
&& JUMP_P (next
))
4411 rtx pat
= PATTERN (next
);
4412 rtx src
= SET_SRC (pat
);
4414 if (IF_THEN_ELSE
== GET_CODE (src
))
4415 return GET_CODE (XEXP (src
, 0));
4422 /* Returns true iff INSN is a tst insn that only tests the sign. */
4425 compare_sign_p (rtx insn
)
4427 RTX_CODE cond
= compare_condition (insn
);
4428 return (cond
== GE
|| cond
== LT
);
4432 /* Returns true iff the next insn is a JUMP_INSN with a condition
4433 that needs to be swapped (GT, GTU, LE, LEU). */
4436 compare_diff_p (rtx insn
)
4438 RTX_CODE cond
= compare_condition (insn
);
4439 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
4442 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4445 compare_eq_p (rtx insn
)
4447 RTX_CODE cond
= compare_condition (insn
);
4448 return (cond
== EQ
|| cond
== NE
);
4452 /* Output compare instruction
4454 compare (XOP[0], XOP[1])
4456 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4457 XOP[2] is an 8-bit scratch register as needed.
4459 PLEN == NULL: Output instructions.
4460 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4461 Don't output anything. */
4464 avr_out_compare (rtx insn
, rtx
*xop
, int *plen
)
4466 /* Register to compare and value to compare against. */
4470 /* MODE of the comparison. */
4471 enum machine_mode mode
;
4473 /* Number of bytes to operate on. */
4474 int i
, n_bytes
= GET_MODE_SIZE (GET_MODE (xreg
));
4476 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4477 int clobber_val
= -1;
4479 /* Map fixed mode operands to integer operands with the same binary
4480 representation. They are easier to handle in the remainder. */
4482 if (CONST_FIXED_P (xval
))
4484 xreg
= avr_to_int_mode (xop
[0]);
4485 xval
= avr_to_int_mode (xop
[1]);
4488 mode
= GET_MODE (xreg
);
4490 gcc_assert (REG_P (xreg
));
4491 gcc_assert ((CONST_INT_P (xval
) && n_bytes
<= 4)
4492 || (const_double_operand (xval
, VOIDmode
) && n_bytes
== 8));
4497 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4498 against 0 by ORing the bytes. This is one instruction shorter.
4499 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4500 and therefore don't use this. */
4502 if (!test_hard_reg_class (LD_REGS
, xreg
)
4503 && compare_eq_p (insn
)
4504 && reg_unused_after (insn
, xreg
))
4506 if (xval
== const1_rtx
)
4508 avr_asm_len ("dec %A0" CR_TAB
4509 "or %A0,%B0", xop
, plen
, 2);
4512 avr_asm_len ("or %A0,%C0", xop
, plen
, 1);
4515 avr_asm_len ("or %A0,%D0", xop
, plen
, 1);
4519 else if (xval
== constm1_rtx
)
4522 avr_asm_len ("and %A0,%D0", xop
, plen
, 1);
4525 avr_asm_len ("and %A0,%C0", xop
, plen
, 1);
4527 return avr_asm_len ("and %A0,%B0" CR_TAB
4528 "com %A0", xop
, plen
, 2);
4532 for (i
= 0; i
< n_bytes
; i
++)
4534 /* We compare byte-wise. */
4535 rtx reg8
= simplify_gen_subreg (QImode
, xreg
, mode
, i
);
4536 rtx xval8
= simplify_gen_subreg (QImode
, xval
, mode
, i
);
4538 /* 8-bit value to compare with this byte. */
4539 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
4541 /* Registers R16..R31 can operate with immediate. */
4542 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
4545 xop
[1] = gen_int_mode (val8
, QImode
);
4547 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4550 && test_hard_reg_class (ADDW_REGS
, reg8
))
4552 int val16
= trunc_int_for_mode (INTVAL (xval
), HImode
);
4554 if (IN_RANGE (val16
, 0, 63)
4556 || reg_unused_after (insn
, xreg
)))
4558 avr_asm_len ("sbiw %0,%1", xop
, plen
, 1);
4564 && IN_RANGE (val16
, -63, -1)
4565 && compare_eq_p (insn
)
4566 && reg_unused_after (insn
, xreg
))
4568 return avr_asm_len ("adiw %0,%n1", xop
, plen
, 1);
4572 /* Comparing against 0 is easy. */
4577 ? "cp %0,__zero_reg__"
4578 : "cpc %0,__zero_reg__", xop
, plen
, 1);
4582 /* Upper registers can compare and subtract-with-carry immediates.
4583 Notice that compare instructions do the same as respective subtract
4584 instruction; the only difference is that comparisons don't write
4585 the result back to the target register. */
4591 avr_asm_len ("cpi %0,%1", xop
, plen
, 1);
4594 else if (reg_unused_after (insn
, xreg
))
4596 avr_asm_len ("sbci %0,%1", xop
, plen
, 1);
4601 /* Must load the value into the scratch register. */
4603 gcc_assert (REG_P (xop
[2]));
4605 if (clobber_val
!= (int) val8
)
4606 avr_asm_len ("ldi %2,%1", xop
, plen
, 1);
4607 clobber_val
= (int) val8
;
4611 : "cpc %0,%2", xop
, plen
, 1);
4618 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4621 avr_out_compare64 (rtx insn
, rtx
*op
, int *plen
)
4625 xop
[0] = gen_rtx_REG (DImode
, 18);
4629 return avr_out_compare (insn
, xop
, plen
);
4632 /* Output test instruction for HImode. */
4635 avr_out_tsthi (rtx insn
, rtx
*op
, int *plen
)
4637 if (compare_sign_p (insn
))
4639 avr_asm_len ("tst %B0", op
, plen
, -1);
4641 else if (reg_unused_after (insn
, op
[0])
4642 && compare_eq_p (insn
))
4644 /* Faster than sbiw if we can clobber the operand. */
4645 avr_asm_len ("or %A0,%B0", op
, plen
, -1);
4649 avr_out_compare (insn
, op
, plen
);
4656 /* Output test instruction for PSImode. */
4659 avr_out_tstpsi (rtx insn
, rtx
*op
, int *plen
)
4661 if (compare_sign_p (insn
))
4663 avr_asm_len ("tst %C0", op
, plen
, -1);
4665 else if (reg_unused_after (insn
, op
[0])
4666 && compare_eq_p (insn
))
4668 /* Faster than sbiw if we can clobber the operand. */
4669 avr_asm_len ("or %A0,%B0" CR_TAB
4670 "or %A0,%C0", op
, plen
, -2);
4674 avr_out_compare (insn
, op
, plen
);
4681 /* Output test instruction for SImode. */
4684 avr_out_tstsi (rtx insn
, rtx
*op
, int *plen
)
4686 if (compare_sign_p (insn
))
4688 avr_asm_len ("tst %D0", op
, plen
, -1);
4690 else if (reg_unused_after (insn
, op
[0])
4691 && compare_eq_p (insn
))
4693 /* Faster than sbiw if we can clobber the operand. */
4694 avr_asm_len ("or %A0,%B0" CR_TAB
4696 "or %A0,%D0", op
, plen
, -3);
4700 avr_out_compare (insn
, op
, plen
);
4707 /* Generate asm equivalent for various shifts. This only handles cases
4708 that are not already carefully hand-optimized in ?sh??i3_out.
4710 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4711 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4712 OPERANDS[3] is a QImode scratch register from LD regs if
4713 available and SCRATCH, otherwise (no scratch available)
4715 TEMPL is an assembler template that shifts by one position.
4716 T_LEN is the length of this template. */
4719 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
4720 int *plen
, int t_len
)
4722 bool second_label
= true;
4723 bool saved_in_tmp
= false;
4724 bool use_zero_reg
= false;
4727 op
[0] = operands
[0];
4728 op
[1] = operands
[1];
4729 op
[2] = operands
[2];
4730 op
[3] = operands
[3];
4735 if (CONST_INT_P (operands
[2]))
4737 bool scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
4738 && REG_P (operands
[3]));
4739 int count
= INTVAL (operands
[2]);
4740 int max_len
= 10; /* If larger than this, always use a loop. */
4745 if (count
< 8 && !scratch
)
4746 use_zero_reg
= true;
4749 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
4751 if (t_len
* count
<= max_len
)
4753 /* Output shifts inline with no loop - faster. */
4756 avr_asm_len (templ
, op
, plen
, t_len
);
4763 avr_asm_len ("ldi %3,%2", op
, plen
, 1);
4765 else if (use_zero_reg
)
4767 /* Hack to save one word: use __zero_reg__ as loop counter.
4768 Set one bit, then shift in a loop until it is 0 again. */
4770 op
[3] = zero_reg_rtx
;
4772 avr_asm_len ("set" CR_TAB
4773 "bld %3,%2-1", op
, plen
, 2);
4777 /* No scratch register available, use one from LD_REGS (saved in
4778 __tmp_reg__) that doesn't overlap with registers to shift. */
4780 op
[3] = all_regs_rtx
[((REGNO (op
[0]) - 1) & 15) + 16];
4781 op
[4] = tmp_reg_rtx
;
4782 saved_in_tmp
= true;
4784 avr_asm_len ("mov %4,%3" CR_TAB
4785 "ldi %3,%2", op
, plen
, 2);
4788 second_label
= false;
4790 else if (MEM_P (op
[2]))
4794 op_mov
[0] = op
[3] = tmp_reg_rtx
;
4797 out_movqi_r_mr (insn
, op_mov
, plen
);
4799 else if (register_operand (op
[2], QImode
))
4803 if (!reg_unused_after (insn
, op
[2])
4804 || reg_overlap_mentioned_p (op
[0], op
[2]))
4806 op
[3] = tmp_reg_rtx
;
4807 avr_asm_len ("mov %3,%2", op
, plen
, 1);
4811 fatal_insn ("bad shift insn:", insn
);
4814 avr_asm_len ("rjmp 2f", op
, plen
, 1);
4816 avr_asm_len ("1:", op
, plen
, 0);
4817 avr_asm_len (templ
, op
, plen
, t_len
);
4820 avr_asm_len ("2:", op
, plen
, 0);
4822 avr_asm_len (use_zero_reg
? "lsr %3" : "dec %3", op
, plen
, 1);
4823 avr_asm_len (second_label
? "brpl 1b" : "brne 1b", op
, plen
, 1);
4826 avr_asm_len ("mov %3,%4", op
, plen
, 1);
4830 /* 8bit shift left ((char)x << i) */
4833 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
4835 if (GET_CODE (operands
[2]) == CONST_INT
)
4842 switch (INTVAL (operands
[2]))
4845 if (INTVAL (operands
[2]) < 8)
4857 return ("lsl %0" CR_TAB
4862 return ("lsl %0" CR_TAB
4867 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4870 return ("swap %0" CR_TAB
4874 return ("lsl %0" CR_TAB
4880 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4883 return ("swap %0" CR_TAB
4888 return ("lsl %0" CR_TAB
4895 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4898 return ("swap %0" CR_TAB
4904 return ("lsl %0" CR_TAB
4913 return ("ror %0" CR_TAB
4918 else if (CONSTANT_P (operands
[2]))
4919 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4921 out_shift_with_cnt ("lsl %0",
4922 insn
, operands
, len
, 1);
4927 /* 16bit shift left ((short)x << i) */
4930 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
4932 if (GET_CODE (operands
[2]) == CONST_INT
)
4934 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4935 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4942 switch (INTVAL (operands
[2]))
4945 if (INTVAL (operands
[2]) < 16)
4949 return ("clr %B0" CR_TAB
4953 if (optimize_size
&& scratch
)
4958 return ("swap %A0" CR_TAB
4960 "andi %B0,0xf0" CR_TAB
4961 "eor %B0,%A0" CR_TAB
4962 "andi %A0,0xf0" CR_TAB
4968 return ("swap %A0" CR_TAB
4970 "ldi %3,0xf0" CR_TAB
4972 "eor %B0,%A0" CR_TAB
4976 break; /* optimize_size ? 6 : 8 */
4980 break; /* scratch ? 5 : 6 */
4984 return ("lsl %A0" CR_TAB
4988 "andi %B0,0xf0" CR_TAB
4989 "eor %B0,%A0" CR_TAB
4990 "andi %A0,0xf0" CR_TAB
4996 return ("lsl %A0" CR_TAB
5000 "ldi %3,0xf0" CR_TAB
5002 "eor %B0,%A0" CR_TAB
5010 break; /* scratch ? 5 : 6 */
5012 return ("clr __tmp_reg__" CR_TAB
5015 "ror __tmp_reg__" CR_TAB
5018 "ror __tmp_reg__" CR_TAB
5019 "mov %B0,%A0" CR_TAB
5020 "mov %A0,__tmp_reg__");
5024 return ("lsr %B0" CR_TAB
5025 "mov %B0,%A0" CR_TAB
5031 return *len
= 2, ("mov %B0,%A1" CR_TAB
5036 return ("mov %B0,%A0" CR_TAB
5042 return ("mov %B0,%A0" CR_TAB
5049 return ("mov %B0,%A0" CR_TAB
5059 return ("mov %B0,%A0" CR_TAB
5067 return ("mov %B0,%A0" CR_TAB
5070 "ldi %3,0xf0" CR_TAB
5074 return ("mov %B0,%A0" CR_TAB
5085 return ("mov %B0,%A0" CR_TAB
5091 if (AVR_HAVE_MUL
&& scratch
)
5094 return ("ldi %3,0x20" CR_TAB
5098 "clr __zero_reg__");
5100 if (optimize_size
&& scratch
)
5105 return ("mov %B0,%A0" CR_TAB
5109 "ldi %3,0xe0" CR_TAB
5115 return ("set" CR_TAB
5120 "clr __zero_reg__");
5123 return ("mov %B0,%A0" CR_TAB
5132 if (AVR_HAVE_MUL
&& ldi_ok
)
5135 return ("ldi %B0,0x40" CR_TAB
5136 "mul %A0,%B0" CR_TAB
5139 "clr __zero_reg__");
5141 if (AVR_HAVE_MUL
&& scratch
)
5144 return ("ldi %3,0x40" CR_TAB
5148 "clr __zero_reg__");
5150 if (optimize_size
&& ldi_ok
)
5153 return ("mov %B0,%A0" CR_TAB
5154 "ldi %A0,6" "\n1:\t"
5159 if (optimize_size
&& scratch
)
5162 return ("clr %B0" CR_TAB
5171 return ("clr %B0" CR_TAB
5178 out_shift_with_cnt ("lsl %A0" CR_TAB
5179 "rol %B0", insn
, operands
, len
, 2);
5184 /* 24-bit shift left */
5187 avr_out_ashlpsi3 (rtx insn
, rtx
*op
, int *plen
)
5192 if (CONST_INT_P (op
[2]))
5194 switch (INTVAL (op
[2]))
5197 if (INTVAL (op
[2]) < 24)
5200 return avr_asm_len ("clr %A0" CR_TAB
5202 "clr %C0", op
, plen
, 3);
5206 int reg0
= REGNO (op
[0]);
5207 int reg1
= REGNO (op
[1]);
5210 return avr_asm_len ("mov %C0,%B1" CR_TAB
5211 "mov %B0,%A1" CR_TAB
5212 "clr %A0", op
, plen
, 3);
5214 return avr_asm_len ("clr %A0" CR_TAB
5215 "mov %B0,%A1" CR_TAB
5216 "mov %C0,%B1", op
, plen
, 3);
5221 int reg0
= REGNO (op
[0]);
5222 int reg1
= REGNO (op
[1]);
5224 if (reg0
+ 2 != reg1
)
5225 avr_asm_len ("mov %C0,%A0", op
, plen
, 1);
5227 return avr_asm_len ("clr %B0" CR_TAB
5228 "clr %A0", op
, plen
, 2);
5232 return avr_asm_len ("clr %C0" CR_TAB
5236 "clr %A0", op
, plen
, 5);
5240 out_shift_with_cnt ("lsl %A0" CR_TAB
5242 "rol %C0", insn
, op
, plen
, 3);
5247 /* 32bit shift left ((long)x << i) */
5250 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
5252 if (GET_CODE (operands
[2]) == CONST_INT
)
5260 switch (INTVAL (operands
[2]))
5263 if (INTVAL (operands
[2]) < 32)
5267 return *len
= 3, ("clr %D0" CR_TAB
5271 return ("clr %D0" CR_TAB
5278 int reg0
= true_regnum (operands
[0]);
5279 int reg1
= true_regnum (operands
[1]);
5282 return ("mov %D0,%C1" CR_TAB
5283 "mov %C0,%B1" CR_TAB
5284 "mov %B0,%A1" CR_TAB
5287 return ("clr %A0" CR_TAB
5288 "mov %B0,%A1" CR_TAB
5289 "mov %C0,%B1" CR_TAB
5295 int reg0
= true_regnum (operands
[0]);
5296 int reg1
= true_regnum (operands
[1]);
5297 if (reg0
+ 2 == reg1
)
5298 return *len
= 2, ("clr %B0" CR_TAB
5301 return *len
= 3, ("movw %C0,%A1" CR_TAB
5305 return *len
= 4, ("mov %C0,%A1" CR_TAB
5306 "mov %D0,%B1" CR_TAB
5313 return ("mov %D0,%A1" CR_TAB
5320 return ("clr %D0" CR_TAB
5329 out_shift_with_cnt ("lsl %A0" CR_TAB
5332 "rol %D0", insn
, operands
, len
, 4);
5336 /* 8bit arithmetic shift right ((signed char)x >> i) */
5339 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
5341 if (GET_CODE (operands
[2]) == CONST_INT
)
5348 switch (INTVAL (operands
[2]))
5356 return ("asr %0" CR_TAB
5361 return ("asr %0" CR_TAB
5367 return ("asr %0" CR_TAB
5374 return ("asr %0" CR_TAB
5382 return ("bst %0,6" CR_TAB
5388 if (INTVAL (operands
[2]) < 8)
5395 return ("lsl %0" CR_TAB
5399 else if (CONSTANT_P (operands
[2]))
5400 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5402 out_shift_with_cnt ("asr %0",
5403 insn
, operands
, len
, 1);
5408 /* 16bit arithmetic shift right ((signed short)x >> i) */
5411 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
5413 if (GET_CODE (operands
[2]) == CONST_INT
)
5415 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5416 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5423 switch (INTVAL (operands
[2]))
5427 /* XXX try to optimize this too? */
5432 break; /* scratch ? 5 : 6 */
5434 return ("mov __tmp_reg__,%A0" CR_TAB
5435 "mov %A0,%B0" CR_TAB
5436 "lsl __tmp_reg__" CR_TAB
5438 "sbc %B0,%B0" CR_TAB
5439 "lsl __tmp_reg__" CR_TAB
5445 return ("lsl %A0" CR_TAB
5446 "mov %A0,%B0" CR_TAB
5452 int reg0
= true_regnum (operands
[0]);
5453 int reg1
= true_regnum (operands
[1]);
5456 return *len
= 3, ("mov %A0,%B0" CR_TAB
5460 return *len
= 4, ("mov %A0,%B1" CR_TAB
5468 return ("mov %A0,%B0" CR_TAB
5470 "sbc %B0,%B0" CR_TAB
5475 return ("mov %A0,%B0" CR_TAB
5477 "sbc %B0,%B0" CR_TAB
5482 if (AVR_HAVE_MUL
&& ldi_ok
)
5485 return ("ldi %A0,0x20" CR_TAB
5486 "muls %B0,%A0" CR_TAB
5488 "sbc %B0,%B0" CR_TAB
5489 "clr __zero_reg__");
5491 if (optimize_size
&& scratch
)
5494 return ("mov %A0,%B0" CR_TAB
5496 "sbc %B0,%B0" CR_TAB
5502 if (AVR_HAVE_MUL
&& ldi_ok
)
5505 return ("ldi %A0,0x10" CR_TAB
5506 "muls %B0,%A0" CR_TAB
5508 "sbc %B0,%B0" CR_TAB
5509 "clr __zero_reg__");
5511 if (optimize_size
&& scratch
)
5514 return ("mov %A0,%B0" CR_TAB
5516 "sbc %B0,%B0" CR_TAB
5523 if (AVR_HAVE_MUL
&& ldi_ok
)
5526 return ("ldi %A0,0x08" CR_TAB
5527 "muls %B0,%A0" CR_TAB
5529 "sbc %B0,%B0" CR_TAB
5530 "clr __zero_reg__");
5533 break; /* scratch ? 5 : 7 */
5535 return ("mov %A0,%B0" CR_TAB
5537 "sbc %B0,%B0" CR_TAB
5546 return ("lsl %B0" CR_TAB
5547 "sbc %A0,%A0" CR_TAB
5549 "mov %B0,%A0" CR_TAB
5553 if (INTVAL (operands
[2]) < 16)
5559 return *len
= 3, ("lsl %B0" CR_TAB
5560 "sbc %A0,%A0" CR_TAB
5565 out_shift_with_cnt ("asr %B0" CR_TAB
5566 "ror %A0", insn
, operands
, len
, 2);
5571 /* 24-bit arithmetic shift right */
5574 avr_out_ashrpsi3 (rtx insn
, rtx
*op
, int *plen
)
5576 int dest
= REGNO (op
[0]);
5577 int src
= REGNO (op
[1]);
5579 if (CONST_INT_P (op
[2]))
5584 switch (INTVAL (op
[2]))
5588 return avr_asm_len ("mov %A0,%B1" CR_TAB
5589 "mov %B0,%C1" CR_TAB
5592 "dec %C0", op
, plen
, 5);
5594 return avr_asm_len ("clr %C0" CR_TAB
5597 "mov %B0,%C1" CR_TAB
5598 "mov %A0,%B1", op
, plen
, 5);
5601 if (dest
!= src
+ 2)
5602 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
5604 return avr_asm_len ("clr %B0" CR_TAB
5607 "mov %C0,%B0", op
, plen
, 4);
5610 if (INTVAL (op
[2]) < 24)
5616 return avr_asm_len ("lsl %C0" CR_TAB
5617 "sbc %A0,%A0" CR_TAB
5618 "mov %B0,%A0" CR_TAB
5619 "mov %C0,%A0", op
, plen
, 4);
5623 out_shift_with_cnt ("asr %C0" CR_TAB
5625 "ror %A0", insn
, op
, plen
, 3);
5630 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5633 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
5635 if (GET_CODE (operands
[2]) == CONST_INT
)
5643 switch (INTVAL (operands
[2]))
5647 int reg0
= true_regnum (operands
[0]);
5648 int reg1
= true_regnum (operands
[1]);
5651 return ("mov %A0,%B1" CR_TAB
5652 "mov %B0,%C1" CR_TAB
5653 "mov %C0,%D1" CR_TAB
5658 return ("clr %D0" CR_TAB
5661 "mov %C0,%D1" CR_TAB
5662 "mov %B0,%C1" CR_TAB
5668 int reg0
= true_regnum (operands
[0]);
5669 int reg1
= true_regnum (operands
[1]);
5671 if (reg0
== reg1
+ 2)
5672 return *len
= 4, ("clr %D0" CR_TAB
5677 return *len
= 5, ("movw %A0,%C1" CR_TAB
5683 return *len
= 6, ("mov %B0,%D1" CR_TAB
5684 "mov %A0,%C1" CR_TAB
5692 return *len
= 6, ("mov %A0,%D1" CR_TAB
5696 "mov %B0,%D0" CR_TAB
5700 if (INTVAL (operands
[2]) < 32)
5707 return *len
= 4, ("lsl %D0" CR_TAB
5708 "sbc %A0,%A0" CR_TAB
5709 "mov %B0,%A0" CR_TAB
5712 return *len
= 5, ("lsl %D0" CR_TAB
5713 "sbc %A0,%A0" CR_TAB
5714 "mov %B0,%A0" CR_TAB
5715 "mov %C0,%A0" CR_TAB
5720 out_shift_with_cnt ("asr %D0" CR_TAB
5723 "ror %A0", insn
, operands
, len
, 4);
5727 /* 8-bit logic shift right ((unsigned char)x >> i) */
5730 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
5732 if (GET_CODE (operands
[2]) == CONST_INT
)
5739 switch (INTVAL (operands
[2]))
5742 if (INTVAL (operands
[2]) < 8)
5754 return ("lsr %0" CR_TAB
5758 return ("lsr %0" CR_TAB
5763 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5766 return ("swap %0" CR_TAB
5770 return ("lsr %0" CR_TAB
5776 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5779 return ("swap %0" CR_TAB
5784 return ("lsr %0" CR_TAB
5791 if (test_hard_reg_class (LD_REGS
, operands
[0]))
5794 return ("swap %0" CR_TAB
5800 return ("lsr %0" CR_TAB
5809 return ("rol %0" CR_TAB
5814 else if (CONSTANT_P (operands
[2]))
5815 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
5817 out_shift_with_cnt ("lsr %0",
5818 insn
, operands
, len
, 1);
5822 /* 16-bit logic shift right ((unsigned short)x >> i) */
5825 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
5827 if (GET_CODE (operands
[2]) == CONST_INT
)
5829 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
5830 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
5837 switch (INTVAL (operands
[2]))
5840 if (INTVAL (operands
[2]) < 16)
5844 return ("clr %B0" CR_TAB
5848 if (optimize_size
&& scratch
)
5853 return ("swap %B0" CR_TAB
5855 "andi %A0,0x0f" CR_TAB
5856 "eor %A0,%B0" CR_TAB
5857 "andi %B0,0x0f" CR_TAB
5863 return ("swap %B0" CR_TAB
5865 "ldi %3,0x0f" CR_TAB
5867 "eor %A0,%B0" CR_TAB
5871 break; /* optimize_size ? 6 : 8 */
5875 break; /* scratch ? 5 : 6 */
5879 return ("lsr %B0" CR_TAB
5883 "andi %A0,0x0f" CR_TAB
5884 "eor %A0,%B0" CR_TAB
5885 "andi %B0,0x0f" CR_TAB
5891 return ("lsr %B0" CR_TAB
5895 "ldi %3,0x0f" CR_TAB
5897 "eor %A0,%B0" CR_TAB
5905 break; /* scratch ? 5 : 6 */
5907 return ("clr __tmp_reg__" CR_TAB
5910 "rol __tmp_reg__" CR_TAB
5913 "rol __tmp_reg__" CR_TAB
5914 "mov %A0,%B0" CR_TAB
5915 "mov %B0,__tmp_reg__");
5919 return ("lsl %A0" CR_TAB
5920 "mov %A0,%B0" CR_TAB
5922 "sbc %B0,%B0" CR_TAB
5926 return *len
= 2, ("mov %A0,%B1" CR_TAB
5931 return ("mov %A0,%B0" CR_TAB
5937 return ("mov %A0,%B0" CR_TAB
5944 return ("mov %A0,%B0" CR_TAB
5954 return ("mov %A0,%B0" CR_TAB
5962 return ("mov %A0,%B0" CR_TAB
5965 "ldi %3,0x0f" CR_TAB
5969 return ("mov %A0,%B0" CR_TAB
5980 return ("mov %A0,%B0" CR_TAB
5986 if (AVR_HAVE_MUL
&& scratch
)
5989 return ("ldi %3,0x08" CR_TAB
5993 "clr __zero_reg__");
5995 if (optimize_size
&& scratch
)
6000 return ("mov %A0,%B0" CR_TAB
6004 "ldi %3,0x07" CR_TAB
6010 return ("set" CR_TAB
6015 "clr __zero_reg__");
6018 return ("mov %A0,%B0" CR_TAB
6027 if (AVR_HAVE_MUL
&& ldi_ok
)
6030 return ("ldi %A0,0x04" CR_TAB
6031 "mul %B0,%A0" CR_TAB
6034 "clr __zero_reg__");
6036 if (AVR_HAVE_MUL
&& scratch
)
6039 return ("ldi %3,0x04" CR_TAB
6043 "clr __zero_reg__");
6045 if (optimize_size
&& ldi_ok
)
6048 return ("mov %A0,%B0" CR_TAB
6049 "ldi %B0,6" "\n1:\t"
6054 if (optimize_size
&& scratch
)
6057 return ("clr %A0" CR_TAB
6066 return ("clr %A0" CR_TAB
6073 out_shift_with_cnt ("lsr %B0" CR_TAB
6074 "ror %A0", insn
, operands
, len
, 2);
6079 /* 24-bit logic shift right */
6082 avr_out_lshrpsi3 (rtx insn
, rtx
*op
, int *plen
)
6084 int dest
= REGNO (op
[0]);
6085 int src
= REGNO (op
[1]);
6087 if (CONST_INT_P (op
[2]))
6092 switch (INTVAL (op
[2]))
6096 return avr_asm_len ("mov %A0,%B1" CR_TAB
6097 "mov %B0,%C1" CR_TAB
6098 "clr %C0", op
, plen
, 3);
6100 return avr_asm_len ("clr %C0" CR_TAB
6101 "mov %B0,%C1" CR_TAB
6102 "mov %A0,%B1", op
, plen
, 3);
6105 if (dest
!= src
+ 2)
6106 avr_asm_len ("mov %A0,%C1", op
, plen
, 1);
6108 return avr_asm_len ("clr %B0" CR_TAB
6109 "clr %C0", op
, plen
, 2);
6112 if (INTVAL (op
[2]) < 24)
6118 return avr_asm_len ("clr %A0" CR_TAB
6122 "clr %C0", op
, plen
, 5);
6126 out_shift_with_cnt ("lsr %C0" CR_TAB
6128 "ror %A0", insn
, op
, plen
, 3);
6133 /* 32-bit logic shift right ((unsigned int)x >> i) */
6136 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
6138 if (GET_CODE (operands
[2]) == CONST_INT
)
6146 switch (INTVAL (operands
[2]))
6149 if (INTVAL (operands
[2]) < 32)
6153 return *len
= 3, ("clr %D0" CR_TAB
6157 return ("clr %D0" CR_TAB
6164 int reg0
= true_regnum (operands
[0]);
6165 int reg1
= true_regnum (operands
[1]);
6168 return ("mov %A0,%B1" CR_TAB
6169 "mov %B0,%C1" CR_TAB
6170 "mov %C0,%D1" CR_TAB
6173 return ("clr %D0" CR_TAB
6174 "mov %C0,%D1" CR_TAB
6175 "mov %B0,%C1" CR_TAB
6181 int reg0
= true_regnum (operands
[0]);
6182 int reg1
= true_regnum (operands
[1]);
6184 if (reg0
== reg1
+ 2)
6185 return *len
= 2, ("clr %C0" CR_TAB
6188 return *len
= 3, ("movw %A0,%C1" CR_TAB
6192 return *len
= 4, ("mov %B0,%D1" CR_TAB
6193 "mov %A0,%C1" CR_TAB
6199 return *len
= 4, ("mov %A0,%D1" CR_TAB
6206 return ("clr %A0" CR_TAB
6215 out_shift_with_cnt ("lsr %D0" CR_TAB
6218 "ror %A0", insn
, operands
, len
, 4);
6223 /* Output addition of register XOP[0] and compile time constant XOP[2].
6224 CODE == PLUS: perform addition by using ADD instructions or
6225 CODE == MINUS: perform addition by using SUB instructions:
6227 XOP[0] = XOP[0] + XOP[2]
6229 Or perform addition/subtraction with register XOP[2] depending on CODE:
6231 XOP[0] = XOP[0] +/- XOP[2]
6233 If PLEN == NULL, print assembler instructions to perform the operation;
6234 otherwise, set *PLEN to the length of the instruction sequence (in words)
6235 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6236 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6238 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6239 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6240 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6241 the subtrahend in the original insn, provided it is a compile time constant.
6242 In all other cases, SIGN is 0.
6244 If OUT_LABEL is true, print the final 0: label which is needed for
6245 saturated addition / subtraction. The only case where OUT_LABEL = false
6246 is useful is for saturated addition / subtraction performed during
6247 fixed-point rounding, cf. `avr_out_round'. */
6250 avr_out_plus_1 (rtx
*xop
, int *plen
, enum rtx_code code
, int *pcc
,
6251 enum rtx_code code_sat
, int sign
, bool out_label
)
6253 /* MODE of the operation. */
6254 enum machine_mode mode
= GET_MODE (xop
[0]);
6256 /* INT_MODE of the same size. */
6257 enum machine_mode imode
= int_mode_for_mode (mode
);
6259 /* Number of bytes to operate on. */
6260 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6262 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6263 int clobber_val
= -1;
6265 /* op[0]: 8-bit destination register
6266 op[1]: 8-bit const int
6267 op[2]: 8-bit scratch register */
6270 /* Started the operation? Before starting the operation we may skip
6271 adding 0. This is no more true after the operation started because
6272 carry must be taken into account. */
6273 bool started
= false;
6275 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6278 /* Output a BRVC instruction. Only needed with saturation. */
6279 bool out_brvc
= true;
6286 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6288 for (i
= 0; i
< n_bytes
; i
++)
6290 /* We operate byte-wise on the destination. */
6291 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6292 op
[1] = simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6295 avr_asm_len (code
== PLUS
? "add %0,%1" : "sub %0,%1",
6298 avr_asm_len (code
== PLUS
? "adc %0,%1" : "sbc %0,%1",
6302 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6304 gcc_assert (REGNO (xop
[0]) == REGNO (xop
[2]));
6313 /* Except in the case of ADIW with 16-bit register (see below)
6314 addition does not set cc0 in a usable way. */
6316 *pcc
= (MINUS
== code
) ? CC_SET_CZN
: CC_CLOBBER
;
6318 if (CONST_FIXED_P (xval
))
6319 xval
= avr_to_int_mode (xval
);
6321 /* Adding/Subtracting zero is a no-op. */
6323 if (xval
== const0_rtx
)
6330 xval
= simplify_unary_operation (NEG
, imode
, xval
, imode
);
6334 if (SS_PLUS
== code_sat
&& MINUS
== code
6336 && 0x80 == (INTVAL (simplify_gen_subreg (QImode
, xval
, imode
, n_bytes
-1))
6337 & GET_MODE_MASK (QImode
)))
6339 /* We compute x + 0x80 by means of SUB instructions. We negated the
6340 constant subtrahend above and are left with x - (-128) so that we
6341 need something like SUBI r,128 which does not exist because SUBI sets
6342 V according to the sign of the subtrahend. Notice the only case
6343 where this must be done is when NEG overflowed in case [2s] because
6344 the V computation needs the right sign of the subtrahend. */
6346 rtx msb
= simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6348 avr_asm_len ("subi %0,128" CR_TAB
6349 "brmi 0f", &msb
, plen
, 2);
6355 for (i
= 0; i
< n_bytes
; i
++)
6357 /* We operate byte-wise on the destination. */
6358 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6359 rtx xval8
= simplify_gen_subreg (QImode
, xval
, imode
, i
);
6361 /* 8-bit value to operate with this byte. */
6362 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6364 /* Registers R16..R31 can operate with immediate. */
6365 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6368 op
[1] = gen_int_mode (val8
, QImode
);
6370 /* To get usable cc0 no low-bytes must have been skipped. */
6378 && test_hard_reg_class (ADDW_REGS
, reg8
))
6380 rtx xval16
= simplify_gen_subreg (HImode
, xval
, imode
, i
);
6381 unsigned int val16
= UINTVAL (xval16
) & GET_MODE_MASK (HImode
);
6383 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6384 i.e. operate word-wise. */
6391 avr_asm_len (code
== PLUS
? "adiw %0,%1" : "sbiw %0,%1",
6394 if (n_bytes
== 2 && PLUS
== code
)
6406 avr_asm_len (code
== PLUS
6407 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6411 else if ((val8
== 1 || val8
== 0xff)
6412 && UNKNOWN
== code_sat
6414 && i
== n_bytes
- 1)
6416 avr_asm_len ((code
== PLUS
) ^ (val8
== 1) ? "dec %0" : "inc %0",
6425 gcc_assert (plen
!= NULL
|| (op
[2] && REG_P (op
[2])));
6427 if (plen
!= NULL
&& UNKNOWN
!= code_sat
)
6429 /* This belongs to the x + 0x80 corner case. The code with
6430 ADD instruction is not smaller, thus make this case
6431 expensive so that the caller won't pick it. */
6437 if (clobber_val
!= (int) val8
)
6438 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6439 clobber_val
= (int) val8
;
6441 avr_asm_len (started
? "adc %0,%2" : "add %0,%2", op
, plen
, 1);
6448 avr_asm_len (started
? "sbci %0,%1" : "subi %0,%1", op
, plen
, 1);
6451 gcc_assert (plen
!= NULL
|| REG_P (op
[2]));
6453 if (clobber_val
!= (int) val8
)
6454 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6455 clobber_val
= (int) val8
;
6457 avr_asm_len (started
? "sbc %0,%2" : "sub %0,%2", op
, plen
, 1);
6469 } /* for all sub-bytes */
6473 if (UNKNOWN
== code_sat
)
6476 *pcc
= (int) CC_CLOBBER
;
6478 /* Vanilla addition/subtraction is done. We are left with saturation.
6480 We have to compute A = A <op> B where A is a register and
6481 B is a register or a non-zero compile time constant CONST.
6482 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6483 B stands for the original operand $2 in INSN. In the case of B = CONST,
6484 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6486 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6490 operation | code | sat if | b is | sat value | case
6491 -----------------+-------+----------+--------------+-----------+-------
6492 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6493 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6494 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6495 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6499 operation | code | sat if | b is | sat value | case
6500 -----------------+-------+----------+--------------+-----------+-------
6501 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6502 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6503 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6504 - as a + (-b) | add | V == 1 | const | s- | [4s]
6506 s+ = b < 0 ? -0x80 : 0x7f
6507 s- = b < 0 ? 0x7f : -0x80
6509 The cases a - b actually perform a - (-(-b)) if B is CONST.
6512 op
[0] = simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-1);
6514 ? simplify_gen_subreg (QImode
, xop
[0], mode
, n_bytes
-2)
6517 bool need_copy
= true;
6518 int len_call
= 1 + AVR_HAVE_JMP_CALL
;
6529 avr_asm_len ("brvc 0f", op
, plen
, 1);
6531 if (reg_overlap_mentioned_p (xop
[0], xop
[2]))
6536 avr_asm_len ("ldi %0,0x7f" CR_TAB
6537 "adc %0,__zero_reg__", op
, plen
, 2);
6539 avr_asm_len ("ldi %0,0x7f" CR_TAB
6540 "ldi %1,0xff" CR_TAB
6541 "adc %1,__zero_reg__" CR_TAB
6542 "adc %0,__zero_reg__", op
, plen
, 4);
6544 else if (sign
== 0 && PLUS
== code
)
6548 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6551 avr_asm_len ("ldi %0,0x80" CR_TAB
6553 "dec %0", op
, plen
, 3);
6555 avr_asm_len ("ldi %0,0x80" CR_TAB
6558 "sbci %0,0", op
, plen
, 4);
6560 else if (sign
== 0 && MINUS
== code
)
6564 op
[2] = simplify_gen_subreg (QImode
, xop
[2], mode
, n_bytes
-1);
6567 avr_asm_len ("ldi %0,0x7f" CR_TAB
6569 "inc %0", op
, plen
, 3);
6571 avr_asm_len ("ldi %0,0x7f" CR_TAB
6574 "sbci %0,-1", op
, plen
, 4);
6576 else if ((sign
< 0) ^ (SS_MINUS
== code_sat
))
6578 /* [1s,const,B < 0] [2s,B < 0] */
6579 /* [3s,const,B > 0] [4s,B > 0] */
6583 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6587 avr_asm_len ("ldi %0,0x80", op
, plen
, 1);
6588 if (n_bytes
> 1 && need_copy
)
6589 avr_asm_len ("clr %1", op
, plen
, 1);
6591 else if ((sign
> 0) ^ (SS_MINUS
== code_sat
))
6593 /* [1s,const,B > 0] [2s,B > 0] */
6594 /* [3s,const,B < 0] [4s,B < 0] */
6598 avr_asm_len ("sec" CR_TAB
6599 "%~call __sbc_8", op
, plen
, 1 + len_call
);
6603 avr_asm_len ("ldi %0,0x7f", op
, plen
, 1);
6604 if (n_bytes
> 1 && need_copy
)
6605 avr_asm_len ("ldi %1,0xff", op
, plen
, 1);
6615 avr_asm_len (PLUS
== code
? "brcc 0f" : "brcs 0f", op
, plen
, 1);
6620 avr_asm_len ("sec", op
, plen
, 1);
6621 avr_asm_len ("%~call __sbc_8", op
, plen
, len_call
);
6627 if (MINUS
== code
&& !test_hard_reg_class (LD_REGS
, op
[0]))
6628 avr_asm_len ("sec" CR_TAB
"sbc %0,%0", op
, plen
, 2);
6630 avr_asm_len (PLUS
== code
? "sbc %0,%0" : "ldi %0,0xff",
6633 break; /* US_PLUS */
6638 avr_asm_len (PLUS
== code
? "brcs 0f" : "brcc 0f", op
, plen
, 1);
6642 avr_asm_len ("%~call __clr_8", op
, plen
, len_call
);
6646 avr_asm_len ("clr %0", op
, plen
, 1);
6651 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6652 Now copy the right value to the LSBs. */
6654 if (need_copy
&& n_bytes
> 1)
6656 if (US_MINUS
== code_sat
|| US_PLUS
== code_sat
)
6658 avr_asm_len ("mov %1,%0", op
, plen
, 1);
6664 avr_asm_len ("movw %0,%1", op
, plen
, 1);
6666 avr_asm_len ("mov %A0,%1" CR_TAB
6667 "mov %B0,%1", op
, plen
, 2);
6670 else if (n_bytes
> 2)
6673 avr_asm_len ("mov %A0,%1" CR_TAB
6674 "mov %B0,%1", op
, plen
, 2);
6678 if (need_copy
&& n_bytes
== 8)
6681 avr_asm_len ("movw %r0+2,%0" CR_TAB
6682 "movw %r0+4,%0", xop
, plen
, 2);
6684 avr_asm_len ("mov %r0+2,%0" CR_TAB
6685 "mov %r0+3,%0" CR_TAB
6686 "mov %r0+4,%0" CR_TAB
6687 "mov %r0+5,%0", xop
, plen
, 4);
6691 avr_asm_len ("0:", op
, plen
, 0);
6695 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6696 is ont a compile-time constant:
6698 XOP[0] = XOP[0] +/- XOP[2]
6700 This is a helper for the function below. The only insns that need this
6701 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6704 avr_out_plus_symbol (rtx
*xop
, enum rtx_code code
, int *plen
, int *pcc
)
6706 enum machine_mode mode
= GET_MODE (xop
[0]);
6708 /* Only pointer modes want to add symbols. */
6710 gcc_assert (mode
== HImode
|| mode
== PSImode
);
6712 *pcc
= MINUS
== code
? (int) CC_SET_CZN
: (int) CC_SET_N
;
6714 avr_asm_len (PLUS
== code
6715 ? "subi %A0,lo8(-(%2))" CR_TAB
"sbci %B0,hi8(-(%2))"
6716 : "subi %A0,lo8(%2)" CR_TAB
"sbci %B0,hi8(%2)",
6719 if (PSImode
== mode
)
6720 avr_asm_len (PLUS
== code
6721 ? "sbci %C0,hlo8(-(%2))"
6722 : "sbci %C0,hlo8(%2)", xop
, plen
, 1);
6727 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6729 INSN is a single_set insn or an insn pattern with a binary operation as
6730 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6732 XOP are the operands of INSN. In the case of 64-bit operations with
6733 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6734 The non-saturating insns up to 32 bits may or may not supply a "d" class
6737 If PLEN == NULL output the instructions.
6738 If PLEN != NULL set *PLEN to the length of the sequence in words.
6740 PCC is a pointer to store the instructions' effect on cc0.
6743 PLEN and PCC default to NULL.
6745 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6750 avr_out_plus (rtx insn
, rtx
*xop
, int *plen
, int *pcc
, bool out_label
)
6752 int cc_plus
, cc_minus
, cc_dummy
;
6753 int len_plus
, len_minus
;
6755 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6756 rtx xdest
= SET_DEST (xpattern
);
6757 enum machine_mode mode
= GET_MODE (xdest
);
6758 enum machine_mode imode
= int_mode_for_mode (mode
);
6759 int n_bytes
= GET_MODE_SIZE (mode
);
6760 enum rtx_code code_sat
= GET_CODE (SET_SRC (xpattern
));
6762 = (PLUS
== code_sat
|| SS_PLUS
== code_sat
|| US_PLUS
== code_sat
6768 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6770 if (PLUS
== code_sat
|| MINUS
== code_sat
)
6773 if (n_bytes
<= 4 && REG_P (xop
[2]))
6775 avr_out_plus_1 (xop
, plen
, code
, pcc
, code_sat
, 0, out_label
);
6781 op
[0] = gen_rtx_REG (DImode
, ACC_A
);
6782 op
[1] = gen_rtx_REG (DImode
, ACC_A
);
6783 op
[2] = avr_to_int_mode (xop
[0]);
6788 && !CONST_INT_P (xop
[2])
6789 && !CONST_FIXED_P (xop
[2]))
6791 return avr_out_plus_symbol (xop
, code
, plen
, pcc
);
6794 op
[0] = avr_to_int_mode (xop
[0]);
6795 op
[1] = avr_to_int_mode (xop
[1]);
6796 op
[2] = avr_to_int_mode (xop
[2]);
6799 /* Saturations and 64-bit operations don't have a clobber operand.
6800 For the other cases, the caller will provide a proper XOP[3]. */
6802 xpattern
= INSN_P (insn
) ? PATTERN (insn
) : insn
;
6803 op
[3] = PARALLEL
== GET_CODE (xpattern
) ? xop
[3] : NULL_RTX
;
6805 /* Saturation will need the sign of the original operand. */
6807 rtx xmsb
= simplify_gen_subreg (QImode
, op
[2], imode
, n_bytes
-1);
6808 int sign
= INTVAL (xmsb
) < 0 ? -1 : 1;
6810 /* If we subtract and the subtrahend is a constant, then negate it
6811 so that avr_out_plus_1 can be used. */
6814 op
[2] = simplify_unary_operation (NEG
, imode
, op
[2], imode
);
6816 /* Work out the shortest sequence. */
6818 avr_out_plus_1 (op
, &len_minus
, MINUS
, &cc_plus
, code_sat
, sign
, out_label
);
6819 avr_out_plus_1 (op
, &len_plus
, PLUS
, &cc_minus
, code_sat
, sign
, out_label
);
6823 *plen
= (len_minus
<= len_plus
) ? len_minus
: len_plus
;
6824 *pcc
= (len_minus
<= len_plus
) ? cc_minus
: cc_plus
;
6826 else if (len_minus
<= len_plus
)
6827 avr_out_plus_1 (op
, NULL
, MINUS
, pcc
, code_sat
, sign
, out_label
);
6829 avr_out_plus_1 (op
, NULL
, PLUS
, pcc
, code_sat
, sign
, out_label
);
6835 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6836 time constant XOP[2]:
6838 XOP[0] = XOP[0] <op> XOP[2]
6840 and return "". If PLEN == NULL, print assembler instructions to perform the
6841 operation; otherwise, set *PLEN to the length of the instruction sequence
6842 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6843 register or SCRATCH if no clobber register is needed for the operation.
6844 INSN is an INSN_P or a pattern of an insn. */
6847 avr_out_bitop (rtx insn
, rtx
*xop
, int *plen
)
6849 /* CODE and MODE of the operation. */
6850 rtx xpattern
= INSN_P (insn
) ? single_set (insn
) : insn
;
6851 enum rtx_code code
= GET_CODE (SET_SRC (xpattern
));
6852 enum machine_mode mode
= GET_MODE (xop
[0]);
6854 /* Number of bytes to operate on. */
6855 int i
, n_bytes
= GET_MODE_SIZE (mode
);
6857 /* Value of T-flag (0 or 1) or -1 if unknow. */
6860 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6861 int clobber_val
= -1;
6863 /* op[0]: 8-bit destination register
6864 op[1]: 8-bit const int
6865 op[2]: 8-bit clobber register or SCRATCH
6866 op[3]: 8-bit register containing 0xff or NULL_RTX */
6875 for (i
= 0; i
< n_bytes
; i
++)
6877 /* We operate byte-wise on the destination. */
6878 rtx reg8
= simplify_gen_subreg (QImode
, xop
[0], mode
, i
);
6879 rtx xval8
= simplify_gen_subreg (QImode
, xop
[2], mode
, i
);
6881 /* 8-bit value to operate with this byte. */
6882 unsigned int val8
= UINTVAL (xval8
) & GET_MODE_MASK (QImode
);
6884 /* Number of bits set in the current byte of the constant. */
6885 int pop8
= avr_popcount (val8
);
6887 /* Registers R16..R31 can operate with immediate. */
6888 bool ld_reg_p
= test_hard_reg_class (LD_REGS
, reg8
);
6891 op
[1] = GEN_INT (val8
);
6900 avr_asm_len ("ori %0,%1", op
, plen
, 1);
6904 avr_asm_len ("set", op
, plen
, 1);
6907 op
[1] = GEN_INT (exact_log2 (val8
));
6908 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6912 if (op
[3] != NULL_RTX
)
6913 avr_asm_len ("mov %0,%3", op
, plen
, 1);
6915 avr_asm_len ("clr %0" CR_TAB
6916 "dec %0", op
, plen
, 2);
6922 if (clobber_val
!= (int) val8
)
6923 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6924 clobber_val
= (int) val8
;
6926 avr_asm_len ("or %0,%2", op
, plen
, 1);
6936 avr_asm_len ("clr %0", op
, plen
, 1);
6938 avr_asm_len ("andi %0,%1", op
, plen
, 1);
6942 avr_asm_len ("clt", op
, plen
, 1);
6945 op
[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode
) & ~val8
));
6946 avr_asm_len ("bld %0,%1", op
, plen
, 1);
6950 if (clobber_val
!= (int) val8
)
6951 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6952 clobber_val
= (int) val8
;
6954 avr_asm_len ("and %0,%2", op
, plen
, 1);
6964 avr_asm_len ("com %0", op
, plen
, 1);
6965 else if (ld_reg_p
&& val8
== (1 << 7))
6966 avr_asm_len ("subi %0,%1", op
, plen
, 1);
6969 if (clobber_val
!= (int) val8
)
6970 avr_asm_len ("ldi %2,%1", op
, plen
, 1);
6971 clobber_val
= (int) val8
;
6973 avr_asm_len ("eor %0,%2", op
, plen
, 1);
6979 /* Unknown rtx_code */
6982 } /* for all sub-bytes */
6988 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6989 PLEN != NULL: Set *PLEN to the length of that sequence.
6993 avr_out_addto_sp (rtx
*op
, int *plen
)
6995 int pc_len
= AVR_2_BYTE_PC
? 2 : 3;
6996 int addend
= INTVAL (op
[0]);
7003 if (flag_verbose_asm
|| flag_print_asm_name
)
7004 avr_asm_len (ASM_COMMENT_START
"SP -= %n0", op
, plen
, 0);
7006 while (addend
<= -pc_len
)
7009 avr_asm_len ("rcall .", op
, plen
, 1);
7012 while (addend
++ < 0)
7013 avr_asm_len ("push __zero_reg__", op
, plen
, 1);
7015 else if (addend
> 0)
7017 if (flag_verbose_asm
|| flag_print_asm_name
)
7018 avr_asm_len (ASM_COMMENT_START
"SP += %0", op
, plen
, 0);
7020 while (addend
-- > 0)
7021 avr_asm_len ("pop __tmp_reg__", op
, plen
, 1);
7028 /* Outputs instructions needed for fixed point type conversion.
7029 This includes converting between any fixed point type, as well
7030 as converting to any integer type. Conversion between integer
7031 types is not supported.
7033 Converting signed fractional types requires a bit shift if converting
7034 to or from any unsigned fractional type because the decimal place is
7035 shifted by 1 bit. When the destination is a signed fractional, the sign
7036 is stored in either the carry or T bit. */
7039 avr_out_fract (rtx insn
, rtx operands
[], bool intsigned
, int *plen
)
7043 RTX_CODE shift
= UNKNOWN
;
7044 bool sign_in_carry
= false;
7045 bool msb_in_carry
= false;
7046 bool lsb_in_tmp_reg
= false;
7047 bool lsb_in_carry
= false;
7048 bool frac_rounded
= false;
7049 const char *code_ashift
= "lsl %0";
7052 #define MAY_CLOBBER(RR) \
7053 /* Shorthand used below. */ \
7055 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7056 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7057 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7058 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7062 /* bytes : Length of operand in bytes.
7063 ibyte : Length of integral part in bytes.
7064 fbyte, fbit : Length of fractional part in bytes, bits. */
7067 unsigned fbit
, bytes
, ibyte
, fbyte
;
7068 unsigned regno
, regno_msb
;
7069 } dest
, src
, *val
[2] = { &dest
, &src
};
7074 /* Step 0: Determine information on source and destination operand we
7075 ====== will need in the remainder. */
7077 for (i
= 0; i
< sizeof (val
) / sizeof (*val
); i
++)
7079 enum machine_mode mode
;
7081 xop
[i
] = operands
[i
];
7083 mode
= GET_MODE (xop
[i
]);
7085 val
[i
]->bytes
= GET_MODE_SIZE (mode
);
7086 val
[i
]->regno
= REGNO (xop
[i
]);
7087 val
[i
]->regno_msb
= REGNO (xop
[i
]) + val
[i
]->bytes
- 1;
7089 if (SCALAR_INT_MODE_P (mode
))
7091 val
[i
]->sbit
= intsigned
;
7094 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
7096 val
[i
]->sbit
= SIGNED_SCALAR_FIXED_POINT_MODE_P (mode
);
7097 val
[i
]->fbit
= GET_MODE_FBIT (mode
);
7100 fatal_insn ("unsupported fixed-point conversion", insn
);
7102 val
[i
]->fbyte
= (1 + val
[i
]->fbit
) / BITS_PER_UNIT
;
7103 val
[i
]->ibyte
= val
[i
]->bytes
- val
[i
]->fbyte
;
7106 // Byte offset of the decimal point taking into account different place
7107 // of the decimal point in input and output and different register numbers
7108 // of input and output.
7109 int offset
= dest
.regno
- src
.regno
+ dest
.fbyte
- src
.fbyte
;
7111 // Number of destination bytes that will come from sign / zero extension.
7112 int sign_bytes
= (dest
.ibyte
- src
.ibyte
) * (dest
.ibyte
> src
.ibyte
);
7114 // Number of bytes at the low end to be filled with zeros.
7115 int zero_bytes
= (dest
.fbyte
- src
.fbyte
) * (dest
.fbyte
> src
.fbyte
);
7117 // Do we have a 16-Bit register that is cleared?
7118 rtx clrw
= NULL_RTX
;
7120 bool sign_extend
= src
.sbit
&& sign_bytes
;
7122 if (0 == dest
.fbit
% 8 && 7 == src
.fbit
% 8)
7124 else if (7 == dest
.fbit
% 8 && 0 == src
.fbit
% 8)
7126 else if (dest
.fbit
% 8 == src
.fbit
% 8)
7131 /* If we need to round the fraction part, we might need to save/round it
7132 before clobbering any of it in Step 1. Also, we might to want to do
7133 the rounding now to make use of LD_REGS. */
7134 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7135 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7136 && !TARGET_FRACT_CONV_TRUNC
)
7140 (offset
? dest
.regno_msb
- sign_bytes
: dest
.regno
+ zero_bytes
- 1)
7141 && dest
.regno
- offset
-1 >= dest
.regno
);
7142 unsigned s0
= dest
.regno
- offset
-1;
7143 bool use_src
= true;
7145 unsigned copied_msb
= src
.regno_msb
;
7146 bool have_carry
= false;
7148 if (src
.ibyte
> dest
.ibyte
)
7149 copied_msb
-= src
.ibyte
- dest
.ibyte
;
7151 for (sn
= s0
; sn
<= copied_msb
; sn
++)
7152 if (!IN_RANGE (sn
, dest
.regno
, dest
.regno_msb
)
7153 && !reg_unused_after (insn
, all_regs_rtx
[sn
]))
7155 if (use_src
&& TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
))
7157 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7158 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7162 if (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], sn
))
7163 avr_asm_len ("cpi %0,1", &all_regs_rtx
[sn
], plen
, 1);
7165 avr_asm_len ("sec" CR_TAB
"cpc %0,__zero_reg__",
7166 &all_regs_rtx
[sn
], plen
, 2);
7170 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7171 avr_asm_len (have_carry
? "sbci %0,128" : "subi %0,129",
7172 &all_regs_rtx
[s0
], plen
, 1);
7173 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7174 avr_asm_len ("sbci %0,255", &all_regs_rtx
[sn
], plen
, 1);
7175 avr_asm_len ("\n0:", NULL
, plen
, 0);
7176 frac_rounded
= true;
7178 else if (use_src
&& overlap
)
7180 avr_asm_len ("clr __tmp_reg__" CR_TAB
7181 "sbrc %1,0" CR_TAB
"dec __tmp_reg__", xop
, plen
, 1);
7185 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7189 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx
[sn
], plen
, 1);
7191 avr_asm_len ("clt" CR_TAB
"bld __tmp_reg__,7" CR_TAB
7192 "adc %0,__tmp_reg__",
7193 &all_regs_rtx
[s0
], plen
, 1);
7195 avr_asm_len ("lsr __tmp_reg" CR_TAB
"add %0,__tmp_reg__",
7196 &all_regs_rtx
[s0
], plen
, 2);
7197 for (sn
= src
.regno
+ src
.fbyte
; sn
<= copied_msb
; sn
++)
7198 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7199 frac_rounded
= true;
7204 = (TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
], s0
)
7205 && (IN_RANGE (s0
, dest
.regno
, dest
.regno_msb
)
7206 || reg_unused_after (insn
, all_regs_rtx
[s0
])));
7207 xop
[2] = all_regs_rtx
[s0
];
7208 unsigned sn
= src
.regno
;
7209 if (!use_src
|| sn
== s0
)
7210 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7211 /* We need to consider to-be-discarded bits
7212 if the value is negative. */
7215 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7216 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7217 /* Test to-be-discarded bytes for any nozero bits.
7218 ??? Could use OR or SBIW to test two registers at once. */
7220 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7222 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx
[sn
], plen
, 1);
7223 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7225 avr_asm_len ("breq 0f" CR_TAB
7226 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7229 avr_asm_len ("breq 0f" CR_TAB
7230 "set" CR_TAB
"bld __tmp_reg__,0\n0:",
7233 lsb_in_tmp_reg
= true;
7237 /* Step 1: Clear bytes at the low end and copy payload bits from source
7238 ====== to destination. */
7240 int step
= offset
< 0 ? 1 : -1;
7241 unsigned d0
= offset
< 0 ? dest
.regno
: dest
.regno_msb
;
7243 // We cleared at least that number of registers.
7246 for (; d0
>= dest
.regno
&& d0
<= dest
.regno_msb
; d0
+= step
)
7248 // Next regno of destination is needed for MOVW
7249 unsigned d1
= d0
+ step
;
7251 // Current and next regno of source
7252 signed s0
= d0
- offset
;
7253 signed s1
= s0
+ step
;
7255 // Must current resp. next regno be CLRed? This applies to the low
7256 // bytes of the destination that have no associated source bytes.
7257 bool clr0
= s0
< (signed) src
.regno
;
7258 bool clr1
= s1
< (signed) src
.regno
&& d1
>= dest
.regno
;
7260 // First gather what code to emit (if any) and additional step to
7261 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7262 // is the source rtx for the current loop iteration.
7263 const char *code
= NULL
;
7268 if (AVR_HAVE_MOVW
&& clr1
&& clrw
)
7270 xop
[2] = all_regs_rtx
[d0
& ~1];
7272 code
= "movw %2,%3";
7277 xop
[2] = all_regs_rtx
[d0
];
7282 && d0
% 2 == (step
> 0))
7284 clrw
= all_regs_rtx
[d0
& ~1];
7288 else if (offset
&& s0
<= (signed) src
.regno_msb
)
7290 int movw
= AVR_HAVE_MOVW
&& offset
% 2 == 0
7291 && d0
% 2 == (offset
> 0)
7292 && d1
<= dest
.regno_msb
&& d1
>= dest
.regno
7293 && s1
<= (signed) src
.regno_msb
&& s1
>= (signed) src
.regno
;
7295 xop
[2] = all_regs_rtx
[d0
& ~movw
];
7296 xop
[3] = all_regs_rtx
[s0
& ~movw
];
7297 code
= movw
? "movw %2,%3" : "mov %2,%3";
7298 stepw
= step
* movw
;
7303 if (sign_extend
&& shift
!= ASHIFT
&& !sign_in_carry
7304 && (d0
== src
.regno_msb
|| d0
+ stepw
== src
.regno_msb
))
7306 /* We are going to override the sign bit. If we sign-extend,
7307 store the sign in the Carry flag. This is not needed if
7308 the destination will be ASHIFT is the remainder because
7309 the ASHIFT will set Carry without extra instruction. */
7311 avr_asm_len ("lsl %0", &all_regs_rtx
[src
.regno_msb
], plen
, 1);
7312 sign_in_carry
= true;
7315 unsigned src_msb
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7317 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7318 && src
.ibyte
> dest
.ibyte
7319 && (d0
== src_msb
|| d0
+ stepw
== src_msb
))
7321 /* We are going to override the MSB. If we shift right,
7322 store the MSB in the Carry flag. This is only needed if
7323 we don't sign-extend becaue with sign-extension the MSB
7324 (the sign) will be produced by the sign extension. */
7326 avr_asm_len ("lsr %0", &all_regs_rtx
[src_msb
], plen
, 1);
7327 msb_in_carry
= true;
7330 unsigned src_lsb
= dest
.regno
- offset
-1;
7332 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
7334 && (d0
== src_lsb
|| d0
+ stepw
== src_lsb
))
7336 /* We are going to override the new LSB; store it into carry. */
7338 avr_asm_len ("lsl %0", &all_regs_rtx
[src_lsb
], plen
, 1);
7339 code_ashift
= "rol %0";
7340 lsb_in_carry
= true;
7343 avr_asm_len (code
, xop
, plen
, 1);
7348 /* Step 2: Shift destination left by 1 bit position. This might be needed
7349 ====== for signed input and unsigned output. */
7351 if (shift
== ASHIFT
&& src
.fbyte
> dest
.fbyte
&& !lsb_in_carry
)
7353 unsigned s0
= dest
.regno
- offset
-1;
7355 /* n1169 4.1.4 says:
7356 "Conversions from a fixed-point to an integer type round toward zero."
7357 Hence, converting a fract type to integer only gives a non-zero result
7359 if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7360 && SCALAR_FRACT_MODE_P (GET_MODE (xop
[1]))
7361 && !TARGET_FRACT_CONV_TRUNC
)
7363 gcc_assert (s0
== src
.regno_msb
);
7364 /* Check if the input is -1. We do that by checking if negating
7365 the input causes an integer overflow. */
7366 unsigned sn
= src
.regno
;
7367 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7369 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
++], plen
, 1);
7371 /* Overflow goes with set carry. Clear carry otherwise. */
7372 avr_asm_len ("brvs 0f" CR_TAB
"clc\n0:", NULL
, plen
, 2);
7374 /* Likewise, when converting from accumulator types to integer, we
7375 need to round up negative values. */
7376 else if (SCALAR_INT_MODE_P (GET_MODE (xop
[0]))
7377 && SCALAR_ACCUM_MODE_P (GET_MODE (xop
[1]))
7378 && !TARGET_FRACT_CONV_TRUNC
7381 bool have_carry
= false;
7383 xop
[2] = all_regs_rtx
[s0
];
7384 if (!lsb_in_tmp_reg
&& !MAY_CLOBBER (s0
))
7385 avr_asm_len ("mov __tmp_reg__,%2", xop
, plen
, 1);
7386 avr_asm_len ("tst %0" CR_TAB
"brpl 0f",
7387 &all_regs_rtx
[src
.regno_msb
], plen
, 2);
7388 if (!lsb_in_tmp_reg
)
7390 unsigned sn
= src
.regno
;
7393 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx
[sn
],
7398 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx
[sn
], plen
, 1);
7399 lsb_in_tmp_reg
= !MAY_CLOBBER (s0
);
7401 /* Add in C and the rounding value 127. */
7402 /* If the destination msb is a sign byte, and in LD_REGS,
7403 grab it as a temporary. */
7405 && TEST_HARD_REG_BIT (reg_class_contents
[LD_REGS
],
7408 xop
[3] = all_regs_rtx
[dest
.regno_msb
];
7409 avr_asm_len ("ldi %3,127", xop
, plen
, 1);
7410 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
? "adc __tmp_reg__,%3"
7411 : have_carry
? "adc %2,%3"
7412 : lsb_in_tmp_reg
? "add __tmp_reg__,%3"
7418 /* Fall back to use __zero_reg__ as a temporary. */
7419 avr_asm_len ("dec __zero_reg__", NULL
, plen
, 1);
7421 avr_asm_len ("clt" CR_TAB
"bld __zero_reg__,7", NULL
, plen
, 2);
7423 avr_asm_len ("lsr __zero_reg__", NULL
, plen
, 1);
7424 avr_asm_len ((have_carry
&& lsb_in_tmp_reg
7425 ? "adc __tmp_reg__,__zero_reg__"
7426 : have_carry
? "adc %2,__zero_reg__"
7427 : lsb_in_tmp_reg
? "add __tmp_reg__,__zero_reg__"
7428 : "add %2,__zero_reg__"),
7430 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL
, plen
, 1);
7432 for (d0
= dest
.regno
+ zero_bytes
;
7433 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7434 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx
[d0
], plen
, 1);
7435 avr_asm_len (lsb_in_tmp_reg
7436 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7439 else if (MAY_CLOBBER (s0
))
7440 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7442 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7443 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7445 code_ashift
= "rol %0";
7446 lsb_in_carry
= true;
7449 if (shift
== ASHIFT
)
7451 for (d0
= dest
.regno
+ zero_bytes
;
7452 d0
<= dest
.regno_msb
- sign_bytes
; d0
++)
7454 avr_asm_len (code_ashift
, &all_regs_rtx
[d0
], plen
, 1);
7455 code_ashift
= "rol %0";
7458 lsb_in_carry
= false;
7459 sign_in_carry
= true;
7462 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7463 ======= it in sign-extension below. */
7465 if (!sign_extend
&& shift
== ASHIFTRT
&& !msb_in_carry
7466 && src
.ibyte
> dest
.ibyte
)
7468 unsigned s0
= dest
.regno_msb
- sign_bytes
- offset
+ 1;
7470 if (MAY_CLOBBER (s0
))
7471 avr_asm_len ("lsr %0", &all_regs_rtx
[s0
], plen
, 1);
7473 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7474 "lsr __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7476 msb_in_carry
= true;
7479 /* Step 3: Sign-extend or zero-extend the destination as needed.
7482 if (sign_extend
&& !sign_in_carry
)
7484 unsigned s0
= src
.regno_msb
;
7486 if (MAY_CLOBBER (s0
))
7487 avr_asm_len ("lsl %0", &all_regs_rtx
[s0
], plen
, 1);
7489 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7490 "lsl __tmp_reg__", &all_regs_rtx
[s0
], plen
, 2);
7492 sign_in_carry
= true;
7495 gcc_assert (sign_in_carry
+ msb_in_carry
+ lsb_in_carry
<= 1);
7497 unsigned copies
= 0;
7498 rtx movw
= sign_extend
? NULL_RTX
: clrw
;
7500 for (d0
= dest
.regno_msb
- sign_bytes
+ 1; d0
<= dest
.regno_msb
; d0
++)
7502 if (AVR_HAVE_MOVW
&& movw
7503 && d0
% 2 == 0 && d0
+ 1 <= dest
.regno_msb
)
7505 xop
[2] = all_regs_rtx
[d0
];
7507 avr_asm_len ("movw %2,%3", xop
, plen
, 1);
7512 avr_asm_len (sign_extend
? "sbc %0,%0" : "clr %0",
7513 &all_regs_rtx
[d0
], plen
, 1);
7515 if (++copies
>= 2 && !movw
&& d0
% 2 == 1)
7516 movw
= all_regs_rtx
[d0
-1];
7521 /* Step 4: Right shift the destination. This might be needed for
7522 ====== conversions from unsigned to signed. */
7524 if (shift
== ASHIFTRT
)
7526 const char *code_ashiftrt
= "lsr %0";
7528 if (sign_extend
|| msb_in_carry
)
7529 code_ashiftrt
= "ror %0";
7531 if (src
.sbit
&& src
.ibyte
== dest
.ibyte
)
7532 code_ashiftrt
= "asr %0";
7534 for (d0
= dest
.regno_msb
- sign_bytes
;
7535 d0
>= dest
.regno
+ zero_bytes
- 1 && d0
>= dest
.regno
; d0
--)
7537 avr_asm_len (code_ashiftrt
, &all_regs_rtx
[d0
], plen
, 1);
7538 code_ashiftrt
= "ror %0";
7548 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7549 XOP[2] is the rounding point, a CONST_INT. The function prints the
7550 instruction sequence if PLEN = NULL and computes the length in words
7551 of the sequence if PLEN != NULL. Most of this function deals with
7552 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7555 avr_out_round (rtx insn ATTRIBUTE_UNUSED
, rtx
*xop
, int *plen
)
7557 enum machine_mode mode
= GET_MODE (xop
[0]);
7558 enum machine_mode imode
= int_mode_for_mode (mode
);
7559 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7560 int fbit
= (int) GET_MODE_FBIT (mode
);
7561 double_int i_add
= double_int_zero
.set_bit (fbit
-1 - INTVAL (xop
[2]));
7562 wide_int wi_add
= wi::set_bit_in_zero (fbit
-1 - INTVAL (xop
[2]),
7563 GET_MODE_PRECISION (imode
));
7564 // Lengths of PLUS and AND parts.
7565 int len_add
= 0, *plen_add
= plen
? &len_add
: NULL
;
7566 int len_and
= 0, *plen_and
= plen
? &len_and
: NULL
;
7568 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7569 // the saturated addition so that we can emit the "rjmp 1f" before the
7572 rtx xadd
= const_fixed_from_double_int (i_add
, mode
);
7573 rtx xpattern
, xsrc
, op
[4];
7575 xsrc
= SIGNED_FIXED_POINT_MODE_P (mode
)
7576 ? gen_rtx_SS_PLUS (mode
, xop
[1], xadd
)
7577 : gen_rtx_US_PLUS (mode
, xop
[1], xadd
);
7578 xpattern
= gen_rtx_SET (VOIDmode
, xop
[0], xsrc
);
7583 avr_out_plus (xpattern
, op
, plen_add
, NULL
, false /* Don't print "0:" */);
7585 avr_asm_len ("rjmp 1f" CR_TAB
7586 "0:", NULL
, plen_add
, 1);
7588 // Keep all bits from RP and higher: ... 2^(-RP)
7589 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7590 // Rounding point ^^^^^^^
7591 // Added above ^^^^^^^^^
7592 rtx xreg
= simplify_gen_subreg (imode
, xop
[0], mode
, 0);
7593 rtx xmask
= immed_wide_int_const (-wi_add
- wi_add
, imode
);
7595 xpattern
= gen_rtx_SET (VOIDmode
, xreg
, gen_rtx_AND (imode
, xreg
, xmask
));
7600 op
[3] = gen_rtx_SCRATCH (QImode
);
7601 avr_out_bitop (xpattern
, op
, plen_and
);
7602 avr_asm_len ("1:", NULL
, plen
, 0);
7605 *plen
= len_add
+ len_and
;
7611 /* Create RTL split patterns for byte sized rotate expressions. This
7612 produces a series of move instructions and considers overlap situations.
7613 Overlapping non-HImode operands need a scratch register. */
7616 avr_rotate_bytes (rtx operands
[])
7619 enum machine_mode mode
= GET_MODE (operands
[0]);
7620 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
7621 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
7622 int num
= INTVAL (operands
[2]);
7623 rtx scratch
= operands
[3];
7624 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7625 Word move if no scratch is needed, otherwise use size of scratch. */
7626 enum machine_mode move_mode
= QImode
;
7627 int move_size
, offset
, size
;
7631 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
7634 move_mode
= GET_MODE (scratch
);
7636 /* Force DI rotate to use QI moves since other DI moves are currently split
7637 into QI moves so forward propagation works better. */
7640 /* Make scratch smaller if needed. */
7641 if (SCRATCH
!= GET_CODE (scratch
)
7642 && HImode
== GET_MODE (scratch
)
7643 && QImode
== move_mode
)
7644 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
7646 move_size
= GET_MODE_SIZE (move_mode
);
7647 /* Number of bytes/words to rotate. */
7648 offset
= (num
>> 3) / move_size
;
7649 /* Number of moves needed. */
7650 size
= GET_MODE_SIZE (mode
) / move_size
;
7651 /* Himode byte swap is special case to avoid a scratch register. */
7652 if (mode
== HImode
&& same_reg
)
7654 /* HImode byte swap, using xor. This is as quick as using scratch. */
7656 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
7657 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
7658 if (!rtx_equal_p (dst
, src
))
7660 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7661 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
7662 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
7667 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7668 /* Create linked list of moves to determine move order. */
7672 } move
[MAX_SIZE
+ 8];
7675 gcc_assert (size
<= MAX_SIZE
);
7676 /* Generate list of subreg moves. */
7677 for (i
= 0; i
< size
; i
++)
7680 int to
= (from
+ offset
) % size
;
7681 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
7682 mode
, from
* move_size
);
7683 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
7684 mode
, to
* move_size
);
7687 /* Mark dependence where a dst of one move is the src of another move.
7688 The first move is a conflict as it must wait until second is
7689 performed. We ignore moves to self - we catch this later. */
7691 for (i
= 0; i
< size
; i
++)
7692 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
7693 for (j
= 0; j
< size
; j
++)
7694 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
7696 /* The dst of move i is the src of move j. */
7703 /* Go through move list and perform non-conflicting moves. As each
7704 non-overlapping move is made, it may remove other conflicts
7705 so the process is repeated until no conflicts remain. */
7710 /* Emit move where dst is not also a src or we have used that
7712 for (i
= 0; i
< size
; i
++)
7713 if (move
[i
].src
!= NULL_RTX
)
7715 if (move
[i
].links
== -1
7716 || move
[move
[i
].links
].src
== NULL_RTX
)
7719 /* Ignore NOP moves to self. */
7720 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
7721 emit_move_insn (move
[i
].dst
, move
[i
].src
);
7723 /* Remove conflict from list. */
7724 move
[i
].src
= NULL_RTX
;
7730 /* Check for deadlock. This is when no moves occurred and we have
7731 at least one blocked move. */
7732 if (moves
== 0 && blocked
!= -1)
7734 /* Need to use scratch register to break deadlock.
7735 Add move to put dst of blocked move into scratch.
7736 When this move occurs, it will break chain deadlock.
7737 The scratch register is substituted for real move. */
7739 gcc_assert (SCRATCH
!= GET_CODE (scratch
));
7741 move
[size
].src
= move
[blocked
].dst
;
7742 move
[size
].dst
= scratch
;
7743 /* Scratch move is never blocked. */
7744 move
[size
].links
= -1;
7745 /* Make sure we have valid link. */
7746 gcc_assert (move
[blocked
].links
!= -1);
7747 /* Replace src of blocking move with scratch reg. */
7748 move
[move
[blocked
].links
].src
= scratch
;
7749 /* Make dependent on scratch move occurring. */
7750 move
[blocked
].links
= size
;
7754 while (blocked
!= -1);
7760 /* Worker function for `ADJUST_INSN_LENGTH'. */
7761 /* Modifies the length assigned to instruction INSN
7762 LEN is the initially computed length of the insn. */
7765 avr_adjust_insn_length (rtx insn
, int len
)
7767 rtx
*op
= recog_data
.operand
;
7768 enum attr_adjust_len adjust_len
;
7770 /* Some complex insns don't need length adjustment and therefore
7771 the length need not/must not be adjusted for these insns.
7772 It is easier to state this in an insn attribute "adjust_len" than
7773 to clutter up code here... */
7775 if (-1 == recog_memoized (insn
))
7780 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7782 adjust_len
= get_attr_adjust_len (insn
);
7784 if (adjust_len
== ADJUST_LEN_NO
)
7786 /* Nothing to adjust: The length from attribute "length" is fine.
7787 This is the default. */
7792 /* Extract insn's operands. */
7794 extract_constrain_insn_cached (insn
);
7796 /* Dispatch to right function. */
7800 case ADJUST_LEN_RELOAD_IN16
: output_reload_inhi (op
, op
[2], &len
); break;
7801 case ADJUST_LEN_RELOAD_IN24
: avr_out_reload_inpsi (op
, op
[2], &len
); break;
7802 case ADJUST_LEN_RELOAD_IN32
: output_reload_insisf (op
, op
[2], &len
); break;
7804 case ADJUST_LEN_OUT_BITOP
: avr_out_bitop (insn
, op
, &len
); break;
7806 case ADJUST_LEN_PLUS
: avr_out_plus (insn
, op
, &len
); break;
7807 case ADJUST_LEN_ADDTO_SP
: avr_out_addto_sp (op
, &len
); break;
7809 case ADJUST_LEN_MOV8
: output_movqi (insn
, op
, &len
); break;
7810 case ADJUST_LEN_MOV16
: output_movhi (insn
, op
, &len
); break;
7811 case ADJUST_LEN_MOV24
: avr_out_movpsi (insn
, op
, &len
); break;
7812 case ADJUST_LEN_MOV32
: output_movsisf (insn
, op
, &len
); break;
7813 case ADJUST_LEN_MOVMEM
: avr_out_movmem (insn
, op
, &len
); break;
7814 case ADJUST_LEN_XLOAD
: avr_out_xload (insn
, op
, &len
); break;
7815 case ADJUST_LEN_LPM
: avr_out_lpm (insn
, op
, &len
); break;
7817 case ADJUST_LEN_SFRACT
: avr_out_fract (insn
, op
, true, &len
); break;
7818 case ADJUST_LEN_UFRACT
: avr_out_fract (insn
, op
, false, &len
); break;
7819 case ADJUST_LEN_ROUND
: avr_out_round (insn
, op
, &len
); break;
7821 case ADJUST_LEN_TSTHI
: avr_out_tsthi (insn
, op
, &len
); break;
7822 case ADJUST_LEN_TSTPSI
: avr_out_tstpsi (insn
, op
, &len
); break;
7823 case ADJUST_LEN_TSTSI
: avr_out_tstsi (insn
, op
, &len
); break;
7824 case ADJUST_LEN_COMPARE
: avr_out_compare (insn
, op
, &len
); break;
7825 case ADJUST_LEN_COMPARE64
: avr_out_compare64 (insn
, op
, &len
); break;
7827 case ADJUST_LEN_LSHRQI
: lshrqi3_out (insn
, op
, &len
); break;
7828 case ADJUST_LEN_LSHRHI
: lshrhi3_out (insn
, op
, &len
); break;
7829 case ADJUST_LEN_LSHRSI
: lshrsi3_out (insn
, op
, &len
); break;
7831 case ADJUST_LEN_ASHRQI
: ashrqi3_out (insn
, op
, &len
); break;
7832 case ADJUST_LEN_ASHRHI
: ashrhi3_out (insn
, op
, &len
); break;
7833 case ADJUST_LEN_ASHRSI
: ashrsi3_out (insn
, op
, &len
); break;
7835 case ADJUST_LEN_ASHLQI
: ashlqi3_out (insn
, op
, &len
); break;
7836 case ADJUST_LEN_ASHLHI
: ashlhi3_out (insn
, op
, &len
); break;
7837 case ADJUST_LEN_ASHLSI
: ashlsi3_out (insn
, op
, &len
); break;
7839 case ADJUST_LEN_ASHLPSI
: avr_out_ashlpsi3 (insn
, op
, &len
); break;
7840 case ADJUST_LEN_ASHRPSI
: avr_out_ashrpsi3 (insn
, op
, &len
); break;
7841 case ADJUST_LEN_LSHRPSI
: avr_out_lshrpsi3 (insn
, op
, &len
); break;
7843 case ADJUST_LEN_CALL
: len
= AVR_HAVE_JMP_CALL
? 2 : 1; break;
7845 case ADJUST_LEN_INSERT_BITS
: avr_out_insert_bits (op
, &len
); break;
7854 /* Return nonzero if register REG dead after INSN. */
7857 reg_unused_after (rtx insn
, rtx reg
)
7859 return (dead_or_set_p (insn
, reg
)
7860 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
7863 /* Return nonzero if REG is not used after INSN.
7864 We assume REG is a reload reg, and therefore does
7865 not live past labels. It may live past calls or jumps though. */
7868 _reg_unused_after (rtx insn
, rtx reg
)
7873 /* If the reg is set by this instruction, then it is safe for our
7874 case. Disregard the case where this is a store to memory, since
7875 we are checking a register used in the store address. */
7876 set
= single_set (insn
);
7877 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
7878 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7881 while ((insn
= NEXT_INSN (insn
)))
7884 code
= GET_CODE (insn
);
7887 /* If this is a label that existed before reload, then the register
7888 if dead here. However, if this is a label added by reorg, then
7889 the register may still be live here. We can't tell the difference,
7890 so we just ignore labels completely. */
7891 if (code
== CODE_LABEL
)
7899 if (code
== JUMP_INSN
)
7902 /* If this is a sequence, we must handle them all at once.
7903 We could have for instance a call that sets the target register,
7904 and an insn in a delay slot that uses the register. In this case,
7905 we must return 0. */
7906 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
7911 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7913 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
7914 rtx set
= single_set (this_insn
);
7916 if (CALL_P (this_insn
))
7918 else if (JUMP_P (this_insn
))
7920 if (INSN_ANNULLED_BRANCH_P (this_insn
))
7925 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7927 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7929 if (GET_CODE (SET_DEST (set
)) != MEM
)
7935 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
7940 else if (code
== JUMP_INSN
)
7944 if (code
== CALL_INSN
)
7947 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
7948 if (GET_CODE (XEXP (tem
, 0)) == USE
7949 && REG_P (XEXP (XEXP (tem
, 0), 0))
7950 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
7952 if (call_used_regs
[REGNO (reg
)])
7956 set
= single_set (insn
);
7958 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
7960 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
7961 return GET_CODE (SET_DEST (set
)) != MEM
;
7962 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7969 /* Implement `TARGET_ASM_INTEGER'. */
7970 /* Target hook for assembling integer objects. The AVR version needs
7971 special handling for references to certain labels. */
7974 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
7976 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
7977 && text_segment_operand (x
, VOIDmode
))
7979 fputs ("\t.word\tgs(", asm_out_file
);
7980 output_addr_const (asm_out_file
, x
);
7981 fputs (")\n", asm_out_file
);
7985 else if (GET_MODE (x
) == PSImode
)
7987 /* This needs binutils 2.23+, see PR binutils/13503 */
7989 fputs ("\t.byte\tlo8(", asm_out_file
);
7990 output_addr_const (asm_out_file
, x
);
7991 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7993 fputs ("\t.byte\thi8(", asm_out_file
);
7994 output_addr_const (asm_out_file
, x
);
7995 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
7997 fputs ("\t.byte\thh8(", asm_out_file
);
7998 output_addr_const (asm_out_file
, x
);
7999 fputs (")" ASM_COMMENT_START
"need binutils PR13503\n", asm_out_file
);
8003 else if (CONST_FIXED_P (x
))
8007 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8009 for (n
= 0; n
< size
; n
++)
8011 rtx xn
= simplify_gen_subreg (QImode
, x
, GET_MODE (x
), n
);
8012 default_assemble_integer (xn
, 1, aligned_p
);
8018 return default_assemble_integer (x
, size
, aligned_p
);
8022 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8023 /* Return value is nonzero if pseudos that have been
8024 assigned to registers of class CLASS would likely be spilled
8025 because registers of CLASS are needed for spill registers. */
8028 avr_class_likely_spilled_p (reg_class_t c
)
8030 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
8034 /* Valid attributes:
8035 progmem - Put data to program memory.
8036 signal - Make a function to be hardware interrupt.
8037 After function prologue interrupts remain disabled.
8038 interrupt - Make a function to be hardware interrupt. Before function
8039 prologue interrupts are enabled by means of SEI.
8040 naked - Don't generate function prologue/epilogue and RET
8043 /* Handle a "progmem" attribute; arguments as in
8044 struct attribute_spec.handler. */
8047 avr_handle_progmem_attribute (tree
*node
, tree name
,
8048 tree args ATTRIBUTE_UNUSED
,
8049 int flags ATTRIBUTE_UNUSED
,
8054 if (TREE_CODE (*node
) == TYPE_DECL
)
8056 /* This is really a decl attribute, not a type attribute,
8057 but try to handle it for GCC 3.0 backwards compatibility. */
8059 tree type
= TREE_TYPE (*node
);
8060 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
8061 tree newtype
= build_type_attribute_variant (type
, attr
);
8063 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
8064 TREE_TYPE (*node
) = newtype
;
8065 *no_add_attrs
= true;
8067 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
8069 *no_add_attrs
= false;
8073 warning (OPT_Wattributes
, "%qE attribute ignored",
8075 *no_add_attrs
= true;
8082 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8083 struct attribute_spec.handler. */
8086 avr_handle_fndecl_attribute (tree
*node
, tree name
,
8087 tree args ATTRIBUTE_UNUSED
,
8088 int flags ATTRIBUTE_UNUSED
,
8091 if (TREE_CODE (*node
) != FUNCTION_DECL
)
8093 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8095 *no_add_attrs
= true;
8102 avr_handle_fntype_attribute (tree
*node
, tree name
,
8103 tree args ATTRIBUTE_UNUSED
,
8104 int flags ATTRIBUTE_UNUSED
,
8107 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
8109 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
8111 *no_add_attrs
= true;
8118 /* AVR attributes. */
8119 static const struct attribute_spec
8120 avr_attribute_table
[] =
8122 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8123 affects_type_identity } */
8124 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
8126 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8128 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
8130 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8132 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8134 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
8136 { NULL
, 0, 0, false, false, false, NULL
, false }
8140 /* Look if DECL shall be placed in program memory space by
8141 means of attribute `progmem' or some address-space qualifier.
8142 Return non-zero if DECL is data that must end up in Flash and
8143 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8145 Return 2 if DECL is located in 24-bit flash address-space
8146 Return 1 if DECL is located in 16-bit flash address-space
8147 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8148 Return 0 otherwise */
8151 avr_progmem_p (tree decl
, tree attributes
)
8155 if (TREE_CODE (decl
) != VAR_DECL
)
8158 if (avr_decl_memx_p (decl
))
8161 if (avr_decl_flash_p (decl
))
8165 != lookup_attribute ("progmem", attributes
))
8172 while (TREE_CODE (a
) == ARRAY_TYPE
);
8174 if (a
== error_mark_node
)
8177 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
8184 /* Scan type TYP for pointer references to address space ASn.
8185 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8186 the AS are also declared to be CONST.
8187 Otherwise, return the respective address space, i.e. a value != 0. */
8190 avr_nonconst_pointer_addrspace (tree typ
)
8192 while (ARRAY_TYPE
== TREE_CODE (typ
))
8193 typ
= TREE_TYPE (typ
);
8195 if (POINTER_TYPE_P (typ
))
8198 tree target
= TREE_TYPE (typ
);
8200 /* Pointer to function: Test the function's return type. */
8202 if (FUNCTION_TYPE
== TREE_CODE (target
))
8203 return avr_nonconst_pointer_addrspace (TREE_TYPE (target
));
8205 /* "Ordinary" pointers... */
8207 while (TREE_CODE (target
) == ARRAY_TYPE
)
8208 target
= TREE_TYPE (target
);
8210 /* Pointers to non-generic address space must be const.
8211 Refuse address spaces outside the device's flash. */
8213 as
= TYPE_ADDR_SPACE (target
);
8215 if (!ADDR_SPACE_GENERIC_P (as
)
8216 && (!TYPE_READONLY (target
)
8217 || avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
))
8222 /* Scan pointer's target type. */
8224 return avr_nonconst_pointer_addrspace (target
);
8227 return ADDR_SPACE_GENERIC
;
8231 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8232 go along with CONST qualifier. Writing to these address spaces should
8233 be detected and complained about as early as possible. */
8236 avr_pgm_check_var_decl (tree node
)
8238 const char *reason
= NULL
;
8240 addr_space_t as
= ADDR_SPACE_GENERIC
;
8242 gcc_assert (as
== 0);
8244 if (avr_log
.progmem
)
8245 avr_edump ("%?: %t\n", node
);
8247 switch (TREE_CODE (node
))
8253 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8254 reason
= "variable";
8258 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8259 reason
= "function parameter";
8263 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (node
)), as
)
8264 reason
= "structure field";
8268 if (as
= avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node
))),
8270 reason
= "return type of function";
8274 if (as
= avr_nonconst_pointer_addrspace (node
), as
)
8281 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8284 error ("%qT uses address space %qs beyond flash of %qs",
8285 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8287 error ("%s %q+D uses address space %qs beyond flash of %qs",
8288 reason
, node
, avr_addrspace
[as
].name
,
8289 avr_current_device
->name
);
8294 error ("pointer targeting address space %qs must be const in %qT",
8295 avr_addrspace
[as
].name
, node
);
8297 error ("pointer targeting address space %qs must be const"
8299 avr_addrspace
[as
].name
, reason
, node
);
8303 return reason
== NULL
;
8307 /* Add the section attribute if the variable is in progmem. */
8310 avr_insert_attributes (tree node
, tree
*attributes
)
8312 avr_pgm_check_var_decl (node
);
8314 if (TREE_CODE (node
) == VAR_DECL
8315 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
8316 && avr_progmem_p (node
, *attributes
))
8321 /* For C++, we have to peel arrays in order to get correct
8322 determination of readonlyness. */
8325 node0
= TREE_TYPE (node0
);
8326 while (TREE_CODE (node0
) == ARRAY_TYPE
);
8328 if (error_mark_node
== node0
)
8331 as
= TYPE_ADDR_SPACE (TREE_TYPE (node
));
8333 if (avr_addrspace
[as
].segment
>= avr_current_device
->n_flash
)
8335 error ("variable %q+D located in address space %qs"
8336 " beyond flash of %qs",
8337 node
, avr_addrspace
[as
].name
, avr_current_device
->name
);
8340 if (!TYPE_READONLY (node0
)
8341 && !TREE_READONLY (node
))
8343 const char *reason
= "__attribute__((progmem))";
8345 if (!ADDR_SPACE_GENERIC_P (as
))
8346 reason
= avr_addrspace
[as
].name
;
8348 if (avr_log
.progmem
)
8349 avr_edump ("\n%?: %t\n%t\n", node
, node0
);
8351 error ("variable %q+D must be const in order to be put into"
8352 " read-only section by means of %qs", node
, reason
);
8358 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8359 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8360 /* Track need of __do_clear_bss. */
8363 avr_asm_output_aligned_decl_common (FILE * stream
,
8364 const_tree decl ATTRIBUTE_UNUSED
,
8366 unsigned HOST_WIDE_INT size
,
8367 unsigned int align
, bool local_p
)
8369 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8370 There is no need to trigger __do_clear_bss code for them. */
8372 if (!STR_PREFIX_P (name
, "__gnu_lto"))
8373 avr_need_clear_bss_p
= true;
8376 ASM_OUTPUT_ALIGNED_LOCAL (stream
, name
, size
, align
);
8378 ASM_OUTPUT_ALIGNED_COMMON (stream
, name
, size
, align
);
8382 /* Unnamed section callback for data_section
8383 to track need of __do_copy_data. */
8386 avr_output_data_section_asm_op (const void *data
)
8388 avr_need_copy_data_p
= true;
8390 /* Dispatch to default. */
8391 output_section_asm_op (data
);
8395 /* Unnamed section callback for bss_section
8396 to track need of __do_clear_bss. */
8399 avr_output_bss_section_asm_op (const void *data
)
8401 avr_need_clear_bss_p
= true;
8403 /* Dispatch to default. */
8404 output_section_asm_op (data
);
8408 /* Unnamed section callback for progmem*.data sections. */
8411 avr_output_progmem_section_asm_op (const void *data
)
8413 fprintf (asm_out_file
, "\t.section\t%s,\"a\",@progbits\n",
8414 (const char*) data
);
8418 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8421 avr_asm_init_sections (void)
8423 /* Set up a section for jump tables. Alignment is handled by
8424 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8426 if (AVR_HAVE_JMP_CALL
)
8428 progmem_swtable_section
8429 = get_unnamed_section (0, output_section_asm_op
,
8430 "\t.section\t.progmem.gcc_sw_table"
8431 ",\"a\",@progbits");
8435 progmem_swtable_section
8436 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
8437 "\t.section\t.progmem.gcc_sw_table"
8438 ",\"ax\",@progbits");
8441 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8442 resp. `avr_need_copy_data_p'. */
8444 readonly_data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8445 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
8446 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
8450 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8453 avr_asm_function_rodata_section (tree decl
)
8455 /* If a function is unused and optimized out by -ffunction-sections
8456 and --gc-sections, ensure that the same will happen for its jump
8457 tables by putting them into individual sections. */
8462 /* Get the frodata section from the default function in varasm.c
8463 but treat function-associated data-like jump tables as code
8464 rather than as user defined data. AVR has no constant pools. */
8466 int fdata
= flag_data_sections
;
8468 flag_data_sections
= flag_function_sections
;
8469 frodata
= default_function_rodata_section (decl
);
8470 flag_data_sections
= fdata
;
8471 flags
= frodata
->common
.flags
;
8474 if (frodata
!= readonly_data_section
8475 && flags
& SECTION_NAMED
)
8477 /* Adjust section flags and replace section name prefix. */
8481 static const char* const prefix
[] =
8483 ".rodata", ".progmem.gcc_sw_table",
8484 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8487 for (i
= 0; i
< sizeof (prefix
) / sizeof (*prefix
); i
+= 2)
8489 const char * old_prefix
= prefix
[i
];
8490 const char * new_prefix
= prefix
[i
+1];
8491 const char * name
= frodata
->named
.name
;
8493 if (STR_PREFIX_P (name
, old_prefix
))
8495 const char *rname
= ACONCAT ((new_prefix
,
8496 name
+ strlen (old_prefix
), NULL
));
8497 flags
&= ~SECTION_CODE
;
8498 flags
|= AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
;
8500 return get_section (rname
, flags
, frodata
->named
.decl
);
8505 return progmem_swtable_section
;
8509 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8510 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8513 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
8515 if (flags
& AVR_SECTION_PROGMEM
)
8517 addr_space_t as
= (flags
& AVR_SECTION_PROGMEM
) / SECTION_MACH_DEP
;
8518 const char *old_prefix
= ".rodata";
8519 const char *new_prefix
= avr_addrspace
[as
].section_name
;
8521 if (STR_PREFIX_P (name
, old_prefix
))
8523 const char *sname
= ACONCAT ((new_prefix
,
8524 name
+ strlen (old_prefix
), NULL
));
8525 default_elf_asm_named_section (sname
, flags
, decl
);
8529 default_elf_asm_named_section (new_prefix
, flags
, decl
);
8533 if (!avr_need_copy_data_p
)
8534 avr_need_copy_data_p
= (STR_PREFIX_P (name
, ".data")
8535 || STR_PREFIX_P (name
, ".rodata")
8536 || STR_PREFIX_P (name
, ".gnu.linkonce.d"));
8538 if (!avr_need_clear_bss_p
)
8539 avr_need_clear_bss_p
= STR_PREFIX_P (name
, ".bss");
8541 default_elf_asm_named_section (name
, flags
, decl
);
8545 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8548 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
8550 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
8552 if (STR_PREFIX_P (name
, ".noinit"))
8554 if (decl
&& TREE_CODE (decl
) == VAR_DECL
8555 && DECL_INITIAL (decl
) == NULL_TREE
)
8556 flags
|= SECTION_BSS
; /* @nobits */
8558 warning (0, "only uninitialized variables can be placed in the "
8562 if (decl
&& DECL_P (decl
)
8563 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8565 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8567 /* Attribute progmem puts data in generic address space.
8568 Set section flags as if it was in __flash to get the right
8569 section prefix in the remainder. */
8571 if (ADDR_SPACE_GENERIC_P (as
))
8572 as
= ADDR_SPACE_FLASH
;
8574 flags
|= as
* SECTION_MACH_DEP
;
8575 flags
&= ~SECTION_WRITE
;
8576 flags
&= ~SECTION_BSS
;
8583 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8586 avr_encode_section_info (tree decl
, rtx rtl
, int new_decl_p
)
8588 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8589 readily available, see PR34734. So we postpone the warning
8590 about uninitialized data in program memory section until here. */
8593 && decl
&& DECL_P (decl
)
8594 && NULL_TREE
== DECL_INITIAL (decl
)
8595 && !DECL_EXTERNAL (decl
)
8596 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8598 warning (OPT_Wuninitialized
,
8599 "uninitialized variable %q+D put into "
8600 "program memory area", decl
);
8603 default_encode_section_info (decl
, rtl
, new_decl_p
);
8605 if (decl
&& DECL_P (decl
)
8606 && TREE_CODE (decl
) != FUNCTION_DECL
8608 && SYMBOL_REF
== GET_CODE (XEXP (rtl
, 0)))
8610 rtx sym
= XEXP (rtl
, 0);
8611 tree type
= TREE_TYPE (decl
);
8612 if (type
== error_mark_node
)
8614 addr_space_t as
= TYPE_ADDR_SPACE (type
);
8616 /* PSTR strings are in generic space but located in flash:
8617 patch address space. */
8619 if (-1 == avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8620 as
= ADDR_SPACE_FLASH
;
8622 AVR_SYMBOL_SET_ADDR_SPACE (sym
, as
);
8627 /* Implement `TARGET_ASM_SELECT_SECTION' */
8630 avr_asm_select_section (tree decl
, int reloc
, unsigned HOST_WIDE_INT align
)
8632 section
* sect
= default_elf_select_section (decl
, reloc
, align
);
8634 if (decl
&& DECL_P (decl
)
8635 && avr_progmem_p (decl
, DECL_ATTRIBUTES (decl
)))
8637 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (decl
));
8639 /* __progmem__ goes in generic space but shall be allocated to
8642 if (ADDR_SPACE_GENERIC_P (as
))
8643 as
= ADDR_SPACE_FLASH
;
8645 if (sect
->common
.flags
& SECTION_NAMED
)
8647 const char * name
= sect
->named
.name
;
8648 const char * old_prefix
= ".rodata";
8649 const char * new_prefix
= avr_addrspace
[as
].section_name
;
8651 if (STR_PREFIX_P (name
, old_prefix
))
8653 const char *sname
= ACONCAT ((new_prefix
,
8654 name
+ strlen (old_prefix
), NULL
));
8655 return get_section (sname
, sect
->common
.flags
, sect
->named
.decl
);
8659 if (!progmem_section
[as
])
8662 = get_unnamed_section (0, avr_output_progmem_section_asm_op
,
8663 avr_addrspace
[as
].section_name
);
8666 return progmem_section
[as
];
8672 /* Implement `TARGET_ASM_FILE_START'. */
8673 /* Outputs some text at the start of each assembler file. */
8676 avr_file_start (void)
8678 int sfr_offset
= avr_current_arch
->sfr_offset
;
8680 if (avr_current_arch
->asm_only
)
8681 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
8683 default_file_start ();
8685 /* Print I/O addresses of some SFRs used with IN and OUT. */
8688 fprintf (asm_out_file
, "__SP_H__ = 0x%02x\n", avr_addr
.sp_h
- sfr_offset
);
8690 fprintf (asm_out_file
, "__SP_L__ = 0x%02x\n", avr_addr
.sp_l
- sfr_offset
);
8691 fprintf (asm_out_file
, "__SREG__ = 0x%02x\n", avr_addr
.sreg
- sfr_offset
);
8693 fprintf (asm_out_file
, "__RAMPZ__ = 0x%02x\n", avr_addr
.rampz
- sfr_offset
);
8695 fprintf (asm_out_file
, "__RAMPY__ = 0x%02x\n", avr_addr
.rampy
- sfr_offset
);
8697 fprintf (asm_out_file
, "__RAMPX__ = 0x%02x\n", avr_addr
.rampx
- sfr_offset
);
8699 fprintf (asm_out_file
, "__RAMPD__ = 0x%02x\n", avr_addr
.rampd
- sfr_offset
);
8701 fprintf (asm_out_file
, "__CCP__ = 0x%02x\n", avr_addr
.ccp
- sfr_offset
);
8702 fprintf (asm_out_file
, "__tmp_reg__ = %d\n", TMP_REGNO
);
8703 fprintf (asm_out_file
, "__zero_reg__ = %d\n", ZERO_REGNO
);
8707 /* Implement `TARGET_ASM_FILE_END'. */
8708 /* Outputs to the stdio stream FILE some
8709 appropriate text to go at the end of an assembler file. */
8714 /* Output these only if there is anything in the
8715 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8716 input section(s) - some code size can be saved by not
8717 linking in the initialization code from libgcc if resp.
8718 sections are empty, see PR18145. */
8720 if (avr_need_copy_data_p
)
8721 fputs (".global __do_copy_data\n", asm_out_file
);
8723 if (avr_need_clear_bss_p
)
8724 fputs (".global __do_clear_bss\n", asm_out_file
);
8728 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8729 /* Choose the order in which to allocate hard registers for
8730 pseudo-registers local to a basic block.
8732 Store the desired register order in the array `reg_alloc_order'.
8733 Element 0 should be the register to allocate first; element 1, the
8734 next register; and so on. */
8737 avr_adjust_reg_alloc_order (void)
8740 static const int order_0
[] =
8743 18, 19, 20, 21, 22, 23,
8746 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8750 static const int order_1
[] =
8752 18, 19, 20, 21, 22, 23, 24, 25,
8755 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8759 static const int order_2
[] =
8761 25, 24, 23, 22, 21, 20, 19, 18,
8764 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8769 const int *order
= (TARGET_ORDER_1
? order_1
:
8770 TARGET_ORDER_2
? order_2
:
8772 for (i
= 0; i
< ARRAY_SIZE (order_0
); ++i
)
8773 reg_alloc_order
[i
] = order
[i
];
8777 /* Implement `TARGET_REGISTER_MOVE_COST' */
8780 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
8781 reg_class_t from
, reg_class_t to
)
8783 return (from
== STACK_REG
? 6
8784 : to
== STACK_REG
? 12
8789 /* Implement `TARGET_MEMORY_MOVE_COST' */
8792 avr_memory_move_cost (enum machine_mode mode
,
8793 reg_class_t rclass ATTRIBUTE_UNUSED
,
8794 bool in ATTRIBUTE_UNUSED
)
8796 return (mode
== QImode
? 2
8797 : mode
== HImode
? 4
8798 : mode
== SImode
? 8
8799 : mode
== SFmode
? 8
8804 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8805 cost of an RTX operand given its context. X is the rtx of the
8806 operand, MODE is its mode, and OUTER is the rtx_code of this
8807 operand's parent operator. */
8810 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
8811 int opno
, bool speed
)
8813 enum rtx_code code
= GET_CODE (x
);
8825 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8832 avr_rtx_costs (x
, code
, outer
, opno
, &total
, speed
);
8836 /* Worker function for AVR backend's rtx_cost function.
8837 X is rtx expression whose cost is to be calculated.
8838 Return true if the complete cost has been computed.
8839 Return false if subexpressions should be scanned.
8840 In either case, *TOTAL contains the cost result. */
8843 avr_rtx_costs_1 (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
,
8844 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
8846 enum rtx_code code
= (enum rtx_code
) codearg
;
8847 enum machine_mode mode
= GET_MODE (x
);
8858 /* Immediate constants are as cheap as registers. */
8863 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8871 *total
= COSTS_N_INSNS (1);
8877 *total
= COSTS_N_INSNS (2 * GET_MODE_SIZE (mode
) - 1);
8883 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8891 *total
= COSTS_N_INSNS (1);
8897 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8901 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
8902 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8906 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
8907 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8908 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8912 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
8913 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
8914 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8922 && MULT
== GET_CODE (XEXP (x
, 0))
8923 && register_operand (XEXP (x
, 1), QImode
))
8926 *total
= COSTS_N_INSNS (speed
? 4 : 3);
8927 /* multiply-add with constant: will be split and load constant. */
8928 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8929 *total
= COSTS_N_INSNS (1) + *total
;
8932 *total
= COSTS_N_INSNS (1);
8933 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8934 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
8939 && (MULT
== GET_CODE (XEXP (x
, 0))
8940 || ASHIFT
== GET_CODE (XEXP (x
, 0)))
8941 && register_operand (XEXP (x
, 1), HImode
)
8942 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))
8943 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 0), 0))))
8946 *total
= COSTS_N_INSNS (speed
? 5 : 4);
8947 /* multiply-add with constant: will be split and load constant. */
8948 if (CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
8949 *total
= COSTS_N_INSNS (1) + *total
;
8952 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8954 *total
= COSTS_N_INSNS (2);
8955 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8958 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8959 *total
= COSTS_N_INSNS (1);
8961 *total
= COSTS_N_INSNS (2);
8965 if (!CONST_INT_P (XEXP (x
, 1)))
8967 *total
= COSTS_N_INSNS (3);
8968 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8971 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8972 *total
= COSTS_N_INSNS (2);
8974 *total
= COSTS_N_INSNS (3);
8978 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
8980 *total
= COSTS_N_INSNS (4);
8981 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
8984 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
8985 *total
= COSTS_N_INSNS (1);
8987 *total
= COSTS_N_INSNS (4);
8993 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
8999 && register_operand (XEXP (x
, 0), QImode
)
9000 && MULT
== GET_CODE (XEXP (x
, 1)))
9003 *total
= COSTS_N_INSNS (speed
? 4 : 3);
9004 /* multiply-sub with constant: will be split and load constant. */
9005 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9006 *total
= COSTS_N_INSNS (1) + *total
;
9011 && register_operand (XEXP (x
, 0), HImode
)
9012 && (MULT
== GET_CODE (XEXP (x
, 1))
9013 || ASHIFT
== GET_CODE (XEXP (x
, 1)))
9014 && (ZERO_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))
9015 || SIGN_EXTEND
== GET_CODE (XEXP (XEXP (x
, 1), 0))))
9018 *total
= COSTS_N_INSNS (speed
? 5 : 4);
9019 /* multiply-sub with constant: will be split and load constant. */
9020 if (CONST_INT_P (XEXP (XEXP (x
, 1), 1)))
9021 *total
= COSTS_N_INSNS (1) + *total
;
9027 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9028 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9029 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9030 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9034 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9035 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9036 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9044 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
9046 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9054 rtx op0
= XEXP (x
, 0);
9055 rtx op1
= XEXP (x
, 1);
9056 enum rtx_code code0
= GET_CODE (op0
);
9057 enum rtx_code code1
= GET_CODE (op1
);
9058 bool ex0
= SIGN_EXTEND
== code0
|| ZERO_EXTEND
== code0
;
9059 bool ex1
= SIGN_EXTEND
== code1
|| ZERO_EXTEND
== code1
;
9062 && (u8_operand (op1
, HImode
)
9063 || s8_operand (op1
, HImode
)))
9065 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9069 && register_operand (op1
, HImode
))
9071 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9074 else if (ex0
|| ex1
)
9076 *total
= COSTS_N_INSNS (!speed
? 3 : 5);
9079 else if (register_operand (op0
, HImode
)
9080 && (u8_operand (op1
, HImode
)
9081 || s8_operand (op1
, HImode
)))
9083 *total
= COSTS_N_INSNS (!speed
? 6 : 9);
9087 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
9090 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9097 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9107 /* Add some additional costs besides CALL like moves etc. */
9109 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9113 /* Just a rough estimate. Even with -O2 we don't want bulky
9114 code expanded inline. */
9116 *total
= COSTS_N_INSNS (25);
9122 *total
= COSTS_N_INSNS (300);
9124 /* Add some additional costs besides CALL like moves etc. */
9125 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 5 : 4);
9133 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9134 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9142 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
9144 *total
= COSTS_N_INSNS (15 * GET_MODE_SIZE (mode
));
9145 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9146 /* For div/mod with const-int divisor we have at least the cost of
9147 loading the divisor. */
9148 if (CONST_INT_P (XEXP (x
, 1)))
9149 *total
+= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
9150 /* Add some overall penaly for clobbering and moving around registers */
9151 *total
+= COSTS_N_INSNS (2);
9158 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
9159 *total
= COSTS_N_INSNS (1);
9164 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
9165 *total
= COSTS_N_INSNS (3);
9170 if (CONST_INT_P (XEXP (x
, 1)))
9171 switch (INTVAL (XEXP (x
, 1)))
9175 *total
= COSTS_N_INSNS (5);
9178 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
9186 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9193 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9195 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9196 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9201 val
= INTVAL (XEXP (x
, 1));
9203 *total
= COSTS_N_INSNS (3);
9204 else if (val
>= 0 && val
<= 7)
9205 *total
= COSTS_N_INSNS (val
);
9207 *total
= COSTS_N_INSNS (1);
9214 if (const_2_to_7_operand (XEXP (x
, 1), HImode
)
9215 && (SIGN_EXTEND
== GET_CODE (XEXP (x
, 0))
9216 || ZERO_EXTEND
== GET_CODE (XEXP (x
, 0))))
9218 *total
= COSTS_N_INSNS (!speed
? 4 : 6);
9223 if (const1_rtx
== (XEXP (x
, 1))
9224 && SIGN_EXTEND
== GET_CODE (XEXP (x
, 0)))
9226 *total
= COSTS_N_INSNS (2);
9230 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9232 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9233 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9237 switch (INTVAL (XEXP (x
, 1)))
9244 *total
= COSTS_N_INSNS (2);
9247 *total
= COSTS_N_INSNS (3);
9253 *total
= COSTS_N_INSNS (4);
9258 *total
= COSTS_N_INSNS (5);
9261 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9264 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9267 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
9270 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9271 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9277 if (!CONST_INT_P (XEXP (x
, 1)))
9279 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9282 switch (INTVAL (XEXP (x
, 1)))
9290 *total
= COSTS_N_INSNS (3);
9293 *total
= COSTS_N_INSNS (5);
9296 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9302 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9304 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9305 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9309 switch (INTVAL (XEXP (x
, 1)))
9315 *total
= COSTS_N_INSNS (3);
9320 *total
= COSTS_N_INSNS (4);
9323 *total
= COSTS_N_INSNS (6);
9326 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9329 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9330 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9338 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9345 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9347 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9348 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9353 val
= INTVAL (XEXP (x
, 1));
9355 *total
= COSTS_N_INSNS (4);
9357 *total
= COSTS_N_INSNS (2);
9358 else if (val
>= 0 && val
<= 7)
9359 *total
= COSTS_N_INSNS (val
);
9361 *total
= COSTS_N_INSNS (1);
9366 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9368 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9369 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9373 switch (INTVAL (XEXP (x
, 1)))
9379 *total
= COSTS_N_INSNS (2);
9382 *total
= COSTS_N_INSNS (3);
9388 *total
= COSTS_N_INSNS (4);
9392 *total
= COSTS_N_INSNS (5);
9395 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9398 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9402 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
9405 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9406 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9412 if (!CONST_INT_P (XEXP (x
, 1)))
9414 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9417 switch (INTVAL (XEXP (x
, 1)))
9423 *total
= COSTS_N_INSNS (3);
9427 *total
= COSTS_N_INSNS (5);
9430 *total
= COSTS_N_INSNS (4);
9433 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9439 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9441 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9442 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9446 switch (INTVAL (XEXP (x
, 1)))
9452 *total
= COSTS_N_INSNS (4);
9457 *total
= COSTS_N_INSNS (6);
9460 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9463 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
9466 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9467 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9475 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9482 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9484 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
9485 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9490 val
= INTVAL (XEXP (x
, 1));
9492 *total
= COSTS_N_INSNS (3);
9493 else if (val
>= 0 && val
<= 7)
9494 *total
= COSTS_N_INSNS (val
);
9496 *total
= COSTS_N_INSNS (1);
9501 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9503 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9504 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9508 switch (INTVAL (XEXP (x
, 1)))
9515 *total
= COSTS_N_INSNS (2);
9518 *total
= COSTS_N_INSNS (3);
9523 *total
= COSTS_N_INSNS (4);
9527 *total
= COSTS_N_INSNS (5);
9533 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
9536 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
9540 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
9543 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
9544 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9550 if (!CONST_INT_P (XEXP (x
, 1)))
9552 *total
= COSTS_N_INSNS (!speed
? 6 : 73);
9555 switch (INTVAL (XEXP (x
, 1)))
9563 *total
= COSTS_N_INSNS (3);
9566 *total
= COSTS_N_INSNS (5);
9569 *total
= COSTS_N_INSNS (!speed
? 5 : 3 * INTVAL (XEXP (x
, 1)));
9575 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9577 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9578 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9582 switch (INTVAL (XEXP (x
, 1)))
9588 *total
= COSTS_N_INSNS (4);
9591 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
9596 *total
= COSTS_N_INSNS (4);
9599 *total
= COSTS_N_INSNS (6);
9602 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
9603 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1,
9611 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9615 switch (GET_MODE (XEXP (x
, 0)))
9618 *total
= COSTS_N_INSNS (1);
9619 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9620 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9624 *total
= COSTS_N_INSNS (2);
9625 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9626 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9627 else if (INTVAL (XEXP (x
, 1)) != 0)
9628 *total
+= COSTS_N_INSNS (1);
9632 *total
= COSTS_N_INSNS (3);
9633 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
9634 *total
+= COSTS_N_INSNS (2);
9638 *total
= COSTS_N_INSNS (4);
9639 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9640 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, 1, speed
);
9641 else if (INTVAL (XEXP (x
, 1)) != 0)
9642 *total
+= COSTS_N_INSNS (3);
9648 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, 0, speed
);
9653 && LSHIFTRT
== GET_CODE (XEXP (x
, 0))
9654 && MULT
== GET_CODE (XEXP (XEXP (x
, 0), 0))
9655 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
9657 if (QImode
== mode
|| HImode
== mode
)
9659 *total
= COSTS_N_INSNS (2);
9672 /* Implement `TARGET_RTX_COSTS'. */
9675 avr_rtx_costs (rtx x
, int codearg
, int outer_code
,
9676 int opno
, int *total
, bool speed
)
9678 bool done
= avr_rtx_costs_1 (x
, codearg
, outer_code
,
9679 opno
, total
, speed
);
9681 if (avr_log
.rtx_costs
)
9683 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9684 done
, speed
? "speed" : "size", *total
, outer_code
, x
);
9691 /* Implement `TARGET_ADDRESS_COST'. */
9694 avr_address_cost (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
,
9695 addr_space_t as ATTRIBUTE_UNUSED
,
9696 bool speed ATTRIBUTE_UNUSED
)
9700 if (GET_CODE (x
) == PLUS
9701 && CONST_INT_P (XEXP (x
, 1))
9702 && (REG_P (XEXP (x
, 0))
9703 || GET_CODE (XEXP (x
, 0)) == SUBREG
))
9705 if (INTVAL (XEXP (x
, 1)) >= 61)
9708 else if (CONSTANT_ADDRESS_P (x
))
9711 && io_address_operand (x
, QImode
))
9715 if (avr_log
.address_cost
)
9716 avr_edump ("\n%?: %d = %r\n", cost
, x
);
9721 /* Test for extra memory constraint 'Q'.
9722 It's a memory address based on Y or Z pointer with valid displacement. */
9725 extra_constraint_Q (rtx x
)
9729 if (GET_CODE (XEXP (x
,0)) == PLUS
9730 && REG_P (XEXP (XEXP (x
,0), 0))
9731 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
9732 && (INTVAL (XEXP (XEXP (x
,0), 1))
9733 <= MAX_LD_OFFSET (GET_MODE (x
))))
9735 rtx xx
= XEXP (XEXP (x
,0), 0);
9736 int regno
= REGNO (xx
);
9738 ok
= (/* allocate pseudos */
9739 regno
>= FIRST_PSEUDO_REGISTER
9740 /* strictly check */
9741 || regno
== REG_Z
|| regno
== REG_Y
9742 /* XXX frame & arg pointer checks */
9743 || xx
== frame_pointer_rtx
9744 || xx
== arg_pointer_rtx
);
9746 if (avr_log
.constraints
)
9747 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9748 ok
, reload_completed
, reload_in_progress
, x
);
9754 /* Convert condition code CONDITION to the valid AVR condition code. */
9757 avr_normalize_condition (RTX_CODE condition
)
9774 /* Helper function for `avr_reorg'. */
9777 avr_compare_pattern (rtx insn
)
9779 rtx pattern
= single_set (insn
);
9782 && NONJUMP_INSN_P (insn
)
9783 && SET_DEST (pattern
) == cc0_rtx
9784 && GET_CODE (SET_SRC (pattern
)) == COMPARE
)
9786 enum machine_mode mode0
= GET_MODE (XEXP (SET_SRC (pattern
), 0));
9787 enum machine_mode mode1
= GET_MODE (XEXP (SET_SRC (pattern
), 1));
9789 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9790 They must not be swapped, thus skip them. */
9792 if ((mode0
== VOIDmode
|| GET_MODE_SIZE (mode0
) <= 4)
9793 && (mode1
== VOIDmode
|| GET_MODE_SIZE (mode1
) <= 4))
9800 /* Helper function for `avr_reorg'. */
9802 /* Expansion of switch/case decision trees leads to code like
9804 cc0 = compare (Reg, Num)
9808 cc0 = compare (Reg, Num)
9812 The second comparison is superfluous and can be deleted.
9813 The second jump condition can be transformed from a
9814 "difficult" one to a "simple" one because "cc0 > 0" and
9815 "cc0 >= 0" will have the same effect here.
9817 This function relies on the way switch/case is being expaned
9818 as binary decision tree. For example code see PR 49903.
9820 Return TRUE if optimization performed.
9821 Return FALSE if nothing changed.
9823 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9825 We don't want to do this in text peephole because it is
9826 tedious to work out jump offsets there and the second comparison
9827 might have been transormed by `avr_reorg'.
9829 RTL peephole won't do because peephole2 does not scan across
9833 avr_reorg_remove_redundant_compare (rtx insn1
)
9835 rtx comp1
, ifelse1
, xcond1
, branch1
;
9836 rtx comp2
, ifelse2
, xcond2
, branch2
, insn2
;
9838 rtx jump
, target
, cond
;
9840 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9842 branch1
= next_nonnote_nondebug_insn (insn1
);
9843 if (!branch1
|| !JUMP_P (branch1
))
9846 insn2
= next_nonnote_nondebug_insn (branch1
);
9847 if (!insn2
|| !avr_compare_pattern (insn2
))
9850 branch2
= next_nonnote_nondebug_insn (insn2
);
9851 if (!branch2
|| !JUMP_P (branch2
))
9854 comp1
= avr_compare_pattern (insn1
);
9855 comp2
= avr_compare_pattern (insn2
);
9856 xcond1
= single_set (branch1
);
9857 xcond2
= single_set (branch2
);
9859 if (!comp1
|| !comp2
9860 || !rtx_equal_p (comp1
, comp2
)
9861 || !xcond1
|| SET_DEST (xcond1
) != pc_rtx
9862 || !xcond2
|| SET_DEST (xcond2
) != pc_rtx
9863 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond1
))
9864 || IF_THEN_ELSE
!= GET_CODE (SET_SRC (xcond2
)))
9869 comp1
= SET_SRC (comp1
);
9870 ifelse1
= SET_SRC (xcond1
);
9871 ifelse2
= SET_SRC (xcond2
);
9873 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9875 if (EQ
!= GET_CODE (XEXP (ifelse1
, 0))
9876 || !REG_P (XEXP (comp1
, 0))
9877 || !CONST_INT_P (XEXP (comp1
, 1))
9878 || XEXP (ifelse1
, 2) != pc_rtx
9879 || XEXP (ifelse2
, 2) != pc_rtx
9880 || LABEL_REF
!= GET_CODE (XEXP (ifelse1
, 1))
9881 || LABEL_REF
!= GET_CODE (XEXP (ifelse2
, 1))
9882 || !COMPARISON_P (XEXP (ifelse2
, 0))
9883 || cc0_rtx
!= XEXP (XEXP (ifelse1
, 0), 0)
9884 || cc0_rtx
!= XEXP (XEXP (ifelse2
, 0), 0)
9885 || const0_rtx
!= XEXP (XEXP (ifelse1
, 0), 1)
9886 || const0_rtx
!= XEXP (XEXP (ifelse2
, 0), 1))
9891 /* We filtered the insn sequence to look like
9897 (if_then_else (eq (cc0)
9906 (if_then_else (CODE (cc0)
9912 code
= GET_CODE (XEXP (ifelse2
, 0));
9914 /* Map GT/GTU to GE/GEU which is easier for AVR.
9915 The first two instructions compare/branch on EQ
9916 so we may replace the difficult
9918 if (x == VAL) goto L1;
9919 if (x > VAL) goto L2;
9923 if (x == VAL) goto L1;
9924 if (x >= VAL) goto L2;
9926 Similarly, replace LE/LEU by LT/LTU. */
9937 code
= avr_normalize_condition (code
);
9944 /* Wrap the branches into UNSPECs so they won't be changed or
9945 optimized in the remainder. */
9947 target
= XEXP (XEXP (ifelse1
, 1), 0);
9948 cond
= XEXP (ifelse1
, 0);
9949 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn1
);
9951 JUMP_LABEL (jump
) = JUMP_LABEL (branch1
);
9953 target
= XEXP (XEXP (ifelse2
, 1), 0);
9954 cond
= gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9955 jump
= emit_jump_insn_after (gen_branch_unspec (target
, cond
), insn2
);
9957 JUMP_LABEL (jump
) = JUMP_LABEL (branch2
);
9959 /* The comparisons in insn1 and insn2 are exactly the same;
9960 insn2 is superfluous so delete it. */
9962 delete_insn (insn2
);
9963 delete_insn (branch1
);
9964 delete_insn (branch2
);
9970 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9971 /* Optimize conditional jumps. */
9976 rtx insn
= get_insns();
9978 for (insn
= next_real_insn (insn
); insn
; insn
= next_real_insn (insn
))
9980 rtx pattern
= avr_compare_pattern (insn
);
9986 && avr_reorg_remove_redundant_compare (insn
))
9991 if (compare_diff_p (insn
))
9993 /* Now we work under compare insn with difficult branch. */
9995 rtx next
= next_real_insn (insn
);
9996 rtx pat
= PATTERN (next
);
9998 pattern
= SET_SRC (pattern
);
10000 if (true_regnum (XEXP (pattern
, 0)) >= 0
10001 && true_regnum (XEXP (pattern
, 1)) >= 0)
10003 rtx x
= XEXP (pattern
, 0);
10004 rtx src
= SET_SRC (pat
);
10005 rtx t
= XEXP (src
,0);
10006 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10007 XEXP (pattern
, 0) = XEXP (pattern
, 1);
10008 XEXP (pattern
, 1) = x
;
10009 INSN_CODE (next
) = -1;
10011 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10012 && XEXP (pattern
, 1) == const0_rtx
)
10014 /* This is a tst insn, we can reverse it. */
10015 rtx src
= SET_SRC (pat
);
10016 rtx t
= XEXP (src
,0);
10018 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
10019 XEXP (pattern
, 1) = XEXP (pattern
, 0);
10020 XEXP (pattern
, 0) = const0_rtx
;
10021 INSN_CODE (next
) = -1;
10022 INSN_CODE (insn
) = -1;
10024 else if (true_regnum (XEXP (pattern
, 0)) >= 0
10025 && CONST_INT_P (XEXP (pattern
, 1)))
10027 rtx x
= XEXP (pattern
, 1);
10028 rtx src
= SET_SRC (pat
);
10029 rtx t
= XEXP (src
,0);
10030 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
10032 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
10034 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
10035 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
10036 INSN_CODE (next
) = -1;
10037 INSN_CODE (insn
) = -1;
10044 /* Returns register number for function return value.*/
10046 static inline unsigned int
10047 avr_ret_register (void)
10053 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10056 avr_function_value_regno_p (const unsigned int regno
)
10058 return (regno
== avr_ret_register ());
10062 /* Implement `TARGET_LIBCALL_VALUE'. */
10063 /* Create an RTX representing the place where a
10064 library function returns a value of mode MODE. */
10067 avr_libcall_value (enum machine_mode mode
,
10068 const_rtx func ATTRIBUTE_UNUSED
)
10070 int offs
= GET_MODE_SIZE (mode
);
10073 offs
= (offs
+ 1) & ~1;
10075 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
10079 /* Implement `TARGET_FUNCTION_VALUE'. */
10080 /* Create an RTX representing the place where a
10081 function returns a value of data type VALTYPE. */
10084 avr_function_value (const_tree type
,
10085 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
10086 bool outgoing ATTRIBUTE_UNUSED
)
10090 if (TYPE_MODE (type
) != BLKmode
)
10091 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
10093 offs
= int_size_in_bytes (type
);
10096 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
10097 offs
= GET_MODE_SIZE (SImode
);
10098 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
10099 offs
= GET_MODE_SIZE (DImode
);
10101 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
10105 test_hard_reg_class (enum reg_class rclass
, rtx x
)
10107 int regno
= true_regnum (x
);
10111 if (TEST_HARD_REG_CLASS (rclass
, regno
))
10118 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10119 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10122 avr_2word_insn_p (rtx insn
)
10124 if (avr_current_device
->errata_skip
10126 || 2 != get_attr_length (insn
))
10131 switch (INSN_CODE (insn
))
10136 case CODE_FOR_movqi_insn
:
10137 case CODE_FOR_movuqq_insn
:
10138 case CODE_FOR_movqq_insn
:
10140 rtx set
= single_set (insn
);
10141 rtx src
= SET_SRC (set
);
10142 rtx dest
= SET_DEST (set
);
10144 /* Factor out LDS and STS from movqi_insn. */
10147 && (REG_P (src
) || src
== CONST0_RTX (GET_MODE (dest
))))
10149 return CONSTANT_ADDRESS_P (XEXP (dest
, 0));
10151 else if (REG_P (dest
)
10154 return CONSTANT_ADDRESS_P (XEXP (src
, 0));
10160 case CODE_FOR_call_insn
:
10161 case CODE_FOR_call_value_insn
:
10168 jump_over_one_insn_p (rtx insn
, rtx dest
)
10170 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
10173 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
10174 int dest_addr
= INSN_ADDRESSES (uid
);
10175 int jump_offset
= dest_addr
- jump_addr
- get_attr_length (insn
);
10177 return (jump_offset
== 1
10178 || (jump_offset
== 2
10179 && avr_2word_insn_p (next_active_insn (insn
))));
10183 /* Worker function for `HARD_REGNO_MODE_OK'. */
10184 /* Returns 1 if a value of mode MODE can be stored starting with hard
10185 register number REGNO. On the enhanced core, anything larger than
10186 1 byte must start in even numbered register for "movw" to work
10187 (this way we don't have to check for odd registers everywhere). */
10190 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
10192 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10193 Disallowing QI et al. in these regs might lead to code like
10194 (set (subreg:QI (reg:HI 28) n) ...)
10195 which will result in wrong code because reload does not
10196 handle SUBREGs of hard regsisters like this.
10197 This could be fixed in reload. However, it appears
10198 that fixing reload is not wanted by reload people. */
10200 /* Any GENERAL_REGS register can hold 8-bit values. */
10202 if (GET_MODE_SIZE (mode
) == 1)
10205 /* FIXME: Ideally, the following test is not needed.
10206 However, it turned out that it can reduce the number
10207 of spill fails. AVR and it's poor endowment with
10208 address registers is extreme stress test for reload. */
10210 if (GET_MODE_SIZE (mode
) >= 4
10214 /* All modes larger than 8 bits should start in an even register. */
10216 return !(regno
& 1);
10220 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10223 avr_hard_regno_call_part_clobbered (unsigned regno
, enum machine_mode mode
)
10225 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10226 represent valid hard registers like, e.g. HI:29. Returning TRUE
10227 for such registers can lead to performance degradation as mentioned
10228 in PR53595. Thus, report invalid hard registers as FALSE. */
10230 if (!avr_hard_regno_mode_ok (regno
, mode
))
10233 /* Return true if any of the following boundaries is crossed:
10234 17/18, 27/28 and 29/30. */
10236 return ((regno
< 18 && regno
+ GET_MODE_SIZE (mode
) > 18)
10237 || (regno
< REG_Y
&& regno
+ GET_MODE_SIZE (mode
) > REG_Y
)
10238 || (regno
< REG_Z
&& regno
+ GET_MODE_SIZE (mode
) > REG_Z
));
10242 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10245 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED
,
10246 addr_space_t as
, RTX_CODE outer_code
,
10247 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10249 if (!ADDR_SPACE_GENERIC_P (as
))
10251 return POINTER_Z_REGS
;
10255 return reload_completed
? BASE_POINTER_REGS
: POINTER_REGS
;
10257 return PLUS
== outer_code
? BASE_POINTER_REGS
: POINTER_REGS
;
10261 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10264 avr_regno_mode_code_ok_for_base_p (int regno
,
10265 enum machine_mode mode ATTRIBUTE_UNUSED
,
10266 addr_space_t as ATTRIBUTE_UNUSED
,
10267 RTX_CODE outer_code
,
10268 RTX_CODE index_code ATTRIBUTE_UNUSED
)
10272 if (!ADDR_SPACE_GENERIC_P (as
))
10274 if (regno
< FIRST_PSEUDO_REGISTER
10282 regno
= reg_renumber
[regno
];
10284 if (regno
== REG_Z
)
10293 if (regno
< FIRST_PSEUDO_REGISTER
10297 || regno
== ARG_POINTER_REGNUM
))
10301 else if (reg_renumber
)
10303 regno
= reg_renumber
[regno
];
10308 || regno
== ARG_POINTER_REGNUM
)
10315 && PLUS
== outer_code
10325 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10326 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10327 CLOBBER_REG is a QI clobber register or NULL_RTX.
10328 LEN == NULL: output instructions.
10329 LEN != NULL: set *LEN to the length of the instruction sequence
10330 (in words) printed with LEN = NULL.
10331 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10332 If CLEAR_P is false, nothing is known about OP[0].
10334 The effect on cc0 is as follows:
10336 Load 0 to any register except ZERO_REG : NONE
10337 Load ld register with any value : NONE
10338 Anything else: : CLOBBER */
10341 output_reload_in_const (rtx
*op
, rtx clobber_reg
, int *len
, bool clear_p
)
10345 rtx xval
, xdest
[4];
10347 int clobber_val
= 1234;
10348 bool cooked_clobber_p
= false;
10349 bool set_p
= false;
10350 enum machine_mode mode
= GET_MODE (dest
);
10351 int n
, n_bytes
= GET_MODE_SIZE (mode
);
10353 gcc_assert (REG_P (dest
)
10354 && CONSTANT_P (src
));
10359 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10360 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10362 if (REGNO (dest
) < 16
10363 && REGNO (dest
) + GET_MODE_SIZE (mode
) > 16)
10365 clobber_reg
= all_regs_rtx
[REGNO (dest
) + n_bytes
- 1];
10368 /* We might need a clobber reg but don't have one. Look at the value to
10369 be loaded more closely. A clobber is only needed if it is a symbol
10370 or contains a byte that is neither 0, -1 or a power of 2. */
10372 if (NULL_RTX
== clobber_reg
10373 && !test_hard_reg_class (LD_REGS
, dest
)
10374 && (! (CONST_INT_P (src
) || CONST_FIXED_P (src
) || CONST_DOUBLE_P (src
))
10375 || !avr_popcount_each_byte (src
, n_bytes
,
10376 (1 << 0) | (1 << 1) | (1 << 8))))
10378 /* We have no clobber register but need one. Cook one up.
10379 That's cheaper than loading from constant pool. */
10381 cooked_clobber_p
= true;
10382 clobber_reg
= all_regs_rtx
[REG_Z
+ 1];
10383 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg
, len
, 1);
10386 /* Now start filling DEST from LSB to MSB. */
10388 for (n
= 0; n
< n_bytes
; n
++)
10391 bool done_byte
= false;
10395 /* Crop the n-th destination byte. */
10397 xdest
[n
] = simplify_gen_subreg (QImode
, dest
, mode
, n
);
10398 ldreg_p
= test_hard_reg_class (LD_REGS
, xdest
[n
]);
10400 if (!CONST_INT_P (src
)
10401 && !CONST_FIXED_P (src
)
10402 && !CONST_DOUBLE_P (src
))
10404 static const char* const asm_code
[][2] =
10406 { "ldi %2,lo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,lo8(%1)" },
10407 { "ldi %2,hi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hi8(%1)" },
10408 { "ldi %2,hlo8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hlo8(%1)" },
10409 { "ldi %2,hhi8(%1)" CR_TAB
"mov %0,%2", "ldi %0,hhi8(%1)" }
10414 xop
[2] = clobber_reg
;
10416 avr_asm_len (asm_code
[n
][ldreg_p
], xop
, len
, ldreg_p
? 1 : 2);
10421 /* Crop the n-th source byte. */
10423 xval
= simplify_gen_subreg (QImode
, src
, mode
, n
);
10424 ival
[n
] = INTVAL (xval
);
10426 /* Look if we can reuse the low word by means of MOVW. */
10432 rtx lo16
= simplify_gen_subreg (HImode
, src
, mode
, 0);
10433 rtx hi16
= simplify_gen_subreg (HImode
, src
, mode
, 2);
10435 if (INTVAL (lo16
) == INTVAL (hi16
))
10437 if (0 != INTVAL (lo16
)
10440 avr_asm_len ("movw %C0,%A0", &op
[0], len
, 1);
10447 /* Don't use CLR so that cc0 is set as expected. */
10452 avr_asm_len (ldreg_p
? "ldi %0,0"
10453 : ZERO_REGNO
== REGNO (xdest
[n
]) ? "clr %0"
10454 : "mov %0,__zero_reg__",
10455 &xdest
[n
], len
, 1);
10459 if (clobber_val
== ival
[n
]
10460 && REGNO (clobber_reg
) == REGNO (xdest
[n
]))
10465 /* LD_REGS can use LDI to move a constant value */
10471 avr_asm_len ("ldi %0,lo8(%1)", xop
, len
, 1);
10475 /* Try to reuse value already loaded in some lower byte. */
10477 for (j
= 0; j
< n
; j
++)
10478 if (ival
[j
] == ival
[n
])
10483 avr_asm_len ("mov %0,%1", xop
, len
, 1);
10491 /* Need no clobber reg for -1: Use CLR/DEC */
10496 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10498 avr_asm_len ("dec %0", &xdest
[n
], len
, 1);
10501 else if (1 == ival
[n
])
10504 avr_asm_len ("clr %0", &xdest
[n
], len
, 1);
10506 avr_asm_len ("inc %0", &xdest
[n
], len
, 1);
10510 /* Use T flag or INC to manage powers of 2 if we have
10513 if (NULL_RTX
== clobber_reg
10514 && single_one_operand (xval
, QImode
))
10517 xop
[1] = GEN_INT (exact_log2 (ival
[n
] & GET_MODE_MASK (QImode
)));
10519 gcc_assert (constm1_rtx
!= xop
[1]);
10524 avr_asm_len ("set", xop
, len
, 1);
10528 avr_asm_len ("clr %0", xop
, len
, 1);
10530 avr_asm_len ("bld %0,%1", xop
, len
, 1);
10534 /* We actually need the LD_REGS clobber reg. */
10536 gcc_assert (NULL_RTX
!= clobber_reg
);
10540 xop
[2] = clobber_reg
;
10541 clobber_val
= ival
[n
];
10543 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10544 "mov %0,%2", xop
, len
, 2);
10547 /* If we cooked up a clobber reg above, restore it. */
10549 if (cooked_clobber_p
)
10551 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg
, len
, 1);
10556 /* Reload the constant OP[1] into the HI register OP[0].
10557 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10558 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10559 need a clobber reg or have to cook one up.
10561 PLEN == NULL: Output instructions.
10562 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10563 by the insns printed.
10568 output_reload_inhi (rtx
*op
, rtx clobber_reg
, int *plen
)
10570 output_reload_in_const (op
, clobber_reg
, plen
, false);
10575 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10576 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10577 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10578 need a clobber reg or have to cook one up.
10580 LEN == NULL: Output instructions.
10582 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10583 by the insns printed.
10588 output_reload_insisf (rtx
*op
, rtx clobber_reg
, int *len
)
10591 && !test_hard_reg_class (LD_REGS
, op
[0])
10592 && (CONST_INT_P (op
[1])
10593 || CONST_FIXED_P (op
[1])
10594 || CONST_DOUBLE_P (op
[1])))
10596 int len_clr
, len_noclr
;
10598 /* In some cases it is better to clear the destination beforehand, e.g.
10600 CLR R2 CLR R3 MOVW R4,R2 INC R2
10604 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10606 We find it too tedious to work that out in the print function.
10607 Instead, we call the print function twice to get the lengths of
10608 both methods and use the shortest one. */
10610 output_reload_in_const (op
, clobber_reg
, &len_clr
, true);
10611 output_reload_in_const (op
, clobber_reg
, &len_noclr
, false);
10613 if (len_noclr
- len_clr
== 4)
10615 /* Default needs 4 CLR instructions: clear register beforehand. */
10617 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10618 "mov %B0,__zero_reg__" CR_TAB
10619 "movw %C0,%A0", &op
[0], len
, 3);
10621 output_reload_in_const (op
, clobber_reg
, len
, true);
10630 /* Default: destination not pre-cleared. */
10632 output_reload_in_const (op
, clobber_reg
, len
, false);
10637 avr_out_reload_inpsi (rtx
*op
, rtx clobber_reg
, int *len
)
10639 output_reload_in_const (op
, clobber_reg
, len
, false);
10644 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10647 avr_output_addr_vec_elt (FILE *stream
, int value
)
10649 if (AVR_HAVE_JMP_CALL
)
10650 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
10652 fprintf (stream
, "\trjmp .L%d\n", value
);
10656 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10657 /* Returns true if SCRATCH are safe to be allocated as a scratch
10658 registers (for a define_peephole2) in the current function. */
10661 avr_hard_regno_scratch_ok (unsigned int regno
)
10663 /* Interrupt functions can only use registers that have already been saved
10664 by the prologue, even if they would normally be call-clobbered. */
10666 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10667 && !df_regs_ever_live_p (regno
))
10670 /* Don't allow hard registers that might be part of the frame pointer.
10671 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10672 and don't care for a frame pointer that spans more than one register. */
10674 if ((!reload_completed
|| frame_pointer_needed
)
10675 && (regno
== REG_Y
|| regno
== REG_Y
+ 1))
10684 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10685 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10688 avr_hard_regno_rename_ok (unsigned int old_reg
,
10689 unsigned int new_reg
)
10691 /* Interrupt functions can only use registers that have already been
10692 saved by the prologue, even if they would normally be
10695 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
10696 && !df_regs_ever_live_p (new_reg
))
10699 /* Don't allow hard registers that might be part of the frame pointer.
10700 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10701 and don't care for a frame pointer that spans more than one register. */
10703 if ((!reload_completed
|| frame_pointer_needed
)
10704 && (old_reg
== REG_Y
|| old_reg
== REG_Y
+ 1
10705 || new_reg
== REG_Y
|| new_reg
== REG_Y
+ 1))
10713 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10714 or memory location in the I/O space (QImode only).
10716 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10717 Operand 1: register operand to test, or CONST_INT memory address.
10718 Operand 2: bit number.
10719 Operand 3: label to jump to if the test is true. */
10722 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
10724 enum rtx_code comp
= GET_CODE (operands
[0]);
10725 bool long_jump
= get_attr_length (insn
) >= 4;
10726 bool reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
10730 else if (comp
== LT
)
10734 comp
= reverse_condition (comp
);
10736 switch (GET_CODE (operands
[1]))
10743 if (low_io_address_operand (operands
[1], QImode
))
10746 output_asm_insn ("sbis %i1,%2", operands
);
10748 output_asm_insn ("sbic %i1,%2", operands
);
10752 output_asm_insn ("in __tmp_reg__,%i1", operands
);
10754 output_asm_insn ("sbrs __tmp_reg__,%2", operands
);
10756 output_asm_insn ("sbrc __tmp_reg__,%2", operands
);
10759 break; /* CONST_INT */
10764 output_asm_insn ("sbrs %T1%T2", operands
);
10766 output_asm_insn ("sbrc %T1%T2", operands
);
10772 return ("rjmp .+4" CR_TAB
10781 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10784 avr_asm_out_ctor (rtx symbol
, int priority
)
10786 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
10787 default_ctor_section_asm_out_constructor (symbol
, priority
);
10791 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
10794 avr_asm_out_dtor (rtx symbol
, int priority
)
10796 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
10797 default_dtor_section_asm_out_destructor (symbol
, priority
);
10801 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
10804 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
10806 if (TYPE_MODE (type
) == BLKmode
)
10808 HOST_WIDE_INT size
= int_size_in_bytes (type
);
10809 return (size
== -1 || size
> 8);
10816 /* Implement `CASE_VALUES_THRESHOLD'. */
10817 /* Supply the default for --param case-values-threshold=0 */
10819 static unsigned int
10820 avr_case_values_threshold (void)
10822 /* The exact break-even point between a jump table and an if-else tree
10823 depends on several factors not available here like, e.g. if 8-bit
10824 comparisons can be used in the if-else tree or not, on the
10825 range of the case values, if the case value can be reused, on the
10826 register allocation, etc. '7' appears to be a good choice. */
10832 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10834 static enum machine_mode
10835 avr_addr_space_address_mode (addr_space_t as
)
10837 return avr_addrspace
[as
].pointer_size
== 3 ? PSImode
: HImode
;
10841 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10843 static enum machine_mode
10844 avr_addr_space_pointer_mode (addr_space_t as
)
10846 return avr_addr_space_address_mode (as
);
10850 /* Helper for following function. */
10853 avr_reg_ok_for_pgm_addr (rtx reg
, bool strict
)
10855 gcc_assert (REG_P (reg
));
10859 return REGNO (reg
) == REG_Z
;
10862 /* Avoid combine to propagate hard regs. */
10864 if (can_create_pseudo_p()
10865 && REGNO (reg
) < REG_Z
)
10874 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10877 avr_addr_space_legitimate_address_p (enum machine_mode mode
, rtx x
,
10878 bool strict
, addr_space_t as
)
10887 case ADDR_SPACE_GENERIC
:
10888 return avr_legitimate_address_p (mode
, x
, strict
);
10890 case ADDR_SPACE_FLASH
:
10891 case ADDR_SPACE_FLASH1
:
10892 case ADDR_SPACE_FLASH2
:
10893 case ADDR_SPACE_FLASH3
:
10894 case ADDR_SPACE_FLASH4
:
10895 case ADDR_SPACE_FLASH5
:
10897 switch (GET_CODE (x
))
10900 ok
= avr_reg_ok_for_pgm_addr (x
, strict
);
10904 ok
= avr_reg_ok_for_pgm_addr (XEXP (x
, 0), strict
);
10913 case ADDR_SPACE_MEMX
:
10916 && can_create_pseudo_p());
10918 if (LO_SUM
== GET_CODE (x
))
10920 rtx hi
= XEXP (x
, 0);
10921 rtx lo
= XEXP (x
, 1);
10924 && (!strict
|| REGNO (hi
) < FIRST_PSEUDO_REGISTER
)
10926 && REGNO (lo
) == REG_Z
);
10932 if (avr_log
.legitimate_address_p
)
10934 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10935 "reload_completed=%d reload_in_progress=%d %s:",
10936 ok
, mode
, strict
, reload_completed
, reload_in_progress
,
10937 reg_renumber
? "(reg_renumber)" : "");
10939 if (GET_CODE (x
) == PLUS
10940 && REG_P (XEXP (x
, 0))
10941 && CONST_INT_P (XEXP (x
, 1))
10942 && IN_RANGE (INTVAL (XEXP (x
, 1)), 0, MAX_LD_OFFSET (mode
))
10945 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
10946 true_regnum (XEXP (x
, 0)));
10949 avr_edump ("\n%r\n", x
);
10956 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10959 avr_addr_space_legitimize_address (rtx x
, rtx old_x
,
10960 enum machine_mode mode
, addr_space_t as
)
10962 if (ADDR_SPACE_GENERIC_P (as
))
10963 return avr_legitimize_address (x
, old_x
, mode
);
10965 if (avr_log
.legitimize_address
)
10967 avr_edump ("\n%?: mode=%m\n %r\n", mode
, old_x
);
10974 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10977 avr_addr_space_convert (rtx src
, tree type_from
, tree type_to
)
10979 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (type_from
));
10980 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type_to
));
10982 if (avr_log
.progmem
)
10983 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10984 src
, type_from
, type_to
);
10986 /* Up-casting from 16-bit to 24-bit pointer. */
10988 if (as_from
!= ADDR_SPACE_MEMX
10989 && as_to
== ADDR_SPACE_MEMX
)
10993 rtx reg
= gen_reg_rtx (PSImode
);
10995 while (CONST
== GET_CODE (sym
) || PLUS
== GET_CODE (sym
))
10996 sym
= XEXP (sym
, 0);
10998 /* Look at symbol flags: avr_encode_section_info set the flags
10999 also if attribute progmem was seen so that we get the right
11000 promotion for, e.g. PSTR-like strings that reside in generic space
11001 but are located in flash. In that case we patch the incoming
11004 if (SYMBOL_REF
== GET_CODE (sym
)
11005 && ADDR_SPACE_FLASH
== AVR_SYMBOL_GET_ADDR_SPACE (sym
))
11007 as_from
= ADDR_SPACE_FLASH
;
11010 /* Linearize memory: RAM has bit 23 set. */
11012 msb
= ADDR_SPACE_GENERIC_P (as_from
)
11014 : avr_addrspace
[as_from
].segment
;
11016 src
= force_reg (Pmode
, src
);
11018 emit_insn (msb
== 0
11019 ? gen_zero_extendhipsi2 (reg
, src
)
11020 : gen_n_extendhipsi2 (reg
, gen_int_mode (msb
, QImode
), src
));
11025 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11027 if (as_from
== ADDR_SPACE_MEMX
11028 && as_to
!= ADDR_SPACE_MEMX
)
11030 rtx new_src
= gen_reg_rtx (Pmode
);
11032 src
= force_reg (PSImode
, src
);
11034 emit_move_insn (new_src
,
11035 simplify_gen_subreg (Pmode
, src
, PSImode
, 0));
11043 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11046 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED
,
11047 addr_space_t superset ATTRIBUTE_UNUSED
)
11049 /* Allow any kind of pointer mess. */
11055 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11058 avr_convert_to_type (tree type
, tree expr
)
11060 /* Print a diagnose for pointer conversion that changes the address
11061 space of the pointer target to a non-enclosing address space,
11062 provided -Waddr-space-convert is on.
11064 FIXME: Filter out cases where the target object is known to
11065 be located in the right memory, like in
11067 (const __flash*) PSTR ("text")
11069 Also try to distinguish between explicit casts requested by
11070 the user and implicit casts like
11072 void f (const __flash char*);
11074 void g (const char *p)
11076 f ((const __flash*) p);
11079 under the assumption that an explicit casts means that the user
11080 knows what he is doing, e.g. interface with PSTR or old style
11081 code with progmem and pgm_read_xxx.
11084 if (avr_warn_addr_space_convert
11085 && expr
!= error_mark_node
11086 && POINTER_TYPE_P (type
)
11087 && POINTER_TYPE_P (TREE_TYPE (expr
)))
11089 addr_space_t as_old
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr
)));
11090 addr_space_t as_new
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
11092 if (avr_log
.progmem
)
11093 avr_edump ("%?: type = %t\nexpr = %t\n\n", type
, expr
);
11095 if (as_new
!= ADDR_SPACE_MEMX
11096 && as_new
!= as_old
)
11098 location_t loc
= EXPR_LOCATION (expr
);
11099 const char *name_old
= avr_addrspace
[as_old
].name
;
11100 const char *name_new
= avr_addrspace
[as_new
].name
;
11102 warning (OPT_Waddr_space_convert
,
11103 "conversion from address space %qs to address space %qs",
11104 ADDR_SPACE_GENERIC_P (as_old
) ? "generic" : name_old
,
11105 ADDR_SPACE_GENERIC_P (as_new
) ? "generic" : name_new
);
11107 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, expr
);
11115 /* Worker function for movmemhi expander.
11116 XOP[0] Destination as MEM:BLK
11118 XOP[2] # Bytes to copy
11120 Return TRUE if the expansion is accomplished.
11121 Return FALSE if the operand compination is not supported. */
11124 avr_emit_movmemhi (rtx
*xop
)
11126 HOST_WIDE_INT count
;
11127 enum machine_mode loop_mode
;
11128 addr_space_t as
= MEM_ADDR_SPACE (xop
[1]);
11129 rtx loop_reg
, addr1
, a_src
, a_dest
, insn
, xas
;
11130 rtx a_hi8
= NULL_RTX
;
11132 if (avr_mem_flash_p (xop
[0]))
11135 if (!CONST_INT_P (xop
[2]))
11138 count
= INTVAL (xop
[2]);
11142 a_src
= XEXP (xop
[1], 0);
11143 a_dest
= XEXP (xop
[0], 0);
11145 if (PSImode
== GET_MODE (a_src
))
11147 gcc_assert (as
== ADDR_SPACE_MEMX
);
11149 loop_mode
= (count
< 0x100) ? QImode
: HImode
;
11150 loop_reg
= gen_rtx_REG (loop_mode
, 24);
11151 emit_move_insn (loop_reg
, gen_int_mode (count
, loop_mode
));
11153 addr1
= simplify_gen_subreg (HImode
, a_src
, PSImode
, 0);
11154 a_hi8
= simplify_gen_subreg (QImode
, a_src
, PSImode
, 2);
11158 int segment
= avr_addrspace
[as
].segment
;
11161 && avr_current_device
->n_flash
> 1)
11163 a_hi8
= GEN_INT (segment
);
11164 emit_move_insn (rampz_rtx
, a_hi8
= copy_to_mode_reg (QImode
, a_hi8
));
11166 else if (!ADDR_SPACE_GENERIC_P (as
))
11168 as
= ADDR_SPACE_FLASH
;
11173 loop_mode
= (count
<= 0x100) ? QImode
: HImode
;
11174 loop_reg
= copy_to_mode_reg (loop_mode
, gen_int_mode (count
, loop_mode
));
11177 xas
= GEN_INT (as
);
11179 /* FIXME: Register allocator might come up with spill fails if it is left
11180 on its own. Thus, we allocate the pointer registers by hand:
11182 X = destination address */
11184 emit_move_insn (lpm_addr_reg_rtx
, addr1
);
11185 emit_move_insn (gen_rtx_REG (HImode
, REG_X
), a_dest
);
11187 /* FIXME: Register allocator does a bad job and might spill address
11188 register(s) inside the loop leading to additional move instruction
11189 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11190 load and store as separate insns. Instead, we perform the copy
11191 by means of one monolithic insn. */
11193 gcc_assert (TMP_REGNO
== LPM_REGNO
);
11195 if (as
!= ADDR_SPACE_MEMX
)
11197 /* Load instruction ([E]LPM or LD) is known at compile time:
11198 Do the copy-loop inline. */
11200 rtx (*fun
) (rtx
, rtx
, rtx
)
11201 = QImode
== loop_mode
? gen_movmem_qi
: gen_movmem_hi
;
11203 insn
= fun (xas
, loop_reg
, loop_reg
);
11207 rtx (*fun
) (rtx
, rtx
)
11208 = QImode
== loop_mode
? gen_movmemx_qi
: gen_movmemx_hi
;
11210 emit_move_insn (gen_rtx_REG (QImode
, 23), a_hi8
);
11212 insn
= fun (xas
, GEN_INT (avr_addr
.rampz
));
11215 set_mem_addr_space (SET_SRC (XVECEXP (insn
, 0, 0)), as
);
11222 /* Print assembler for movmem_qi, movmem_hi insns...
11224 $1, $2 : Loop register
11226 X : Destination address
11230 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED
, rtx
*op
, int *plen
)
11232 addr_space_t as
= (addr_space_t
) INTVAL (op
[0]);
11233 enum machine_mode loop_mode
= GET_MODE (op
[1]);
11234 bool sbiw_p
= test_hard_reg_class (ADDW_REGS
, op
[1]);
11242 xop
[2] = tmp_reg_rtx
;
11246 avr_asm_len ("0:", xop
, plen
, 0);
11248 /* Load with post-increment */
11255 case ADDR_SPACE_GENERIC
:
11257 avr_asm_len ("ld %2,Z+", xop
, plen
, 1);
11260 case ADDR_SPACE_FLASH
:
11263 avr_asm_len ("lpm %2,Z+", xop
, plen
, 1);
11265 avr_asm_len ("lpm" CR_TAB
11266 "adiw r30,1", xop
, plen
, 2);
11269 case ADDR_SPACE_FLASH1
:
11270 case ADDR_SPACE_FLASH2
:
11271 case ADDR_SPACE_FLASH3
:
11272 case ADDR_SPACE_FLASH4
:
11273 case ADDR_SPACE_FLASH5
:
11275 if (AVR_HAVE_ELPMX
)
11276 avr_asm_len ("elpm %2,Z+", xop
, plen
, 1);
11278 avr_asm_len ("elpm" CR_TAB
11279 "adiw r30,1", xop
, plen
, 2);
11283 /* Store with post-increment */
11285 avr_asm_len ("st X+,%2", xop
, plen
, 1);
11287 /* Decrement loop-counter and set Z-flag */
11289 if (QImode
== loop_mode
)
11291 avr_asm_len ("dec %1", xop
, plen
, 1);
11295 avr_asm_len ("sbiw %1,1", xop
, plen
, 1);
11299 avr_asm_len ("subi %A1,1" CR_TAB
11300 "sbci %B1,0", xop
, plen
, 2);
11303 /* Loop until zero */
11305 return avr_asm_len ("brne 0b", xop
, plen
, 1);
11310 /* Helper for __builtin_avr_delay_cycles */
11313 avr_mem_clobber (void)
11315 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (Pmode
));
11316 MEM_VOLATILE_P (mem
) = 1;
11321 avr_expand_delay_cycles (rtx operands0
)
11323 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
) & GET_MODE_MASK (SImode
);
11324 unsigned HOST_WIDE_INT cycles_used
;
11325 unsigned HOST_WIDE_INT loop_count
;
11327 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
11329 loop_count
= ((cycles
- 9) / 6) + 1;
11330 cycles_used
= ((loop_count
- 1) * 6) + 9;
11331 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
),
11332 avr_mem_clobber()));
11333 cycles
-= cycles_used
;
11336 if (IN_RANGE (cycles
, 262145, 83886081))
11338 loop_count
= ((cycles
- 7) / 5) + 1;
11339 if (loop_count
> 0xFFFFFF)
11340 loop_count
= 0xFFFFFF;
11341 cycles_used
= ((loop_count
- 1) * 5) + 7;
11342 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
),
11343 avr_mem_clobber()));
11344 cycles
-= cycles_used
;
11347 if (IN_RANGE (cycles
, 768, 262144))
11349 loop_count
= ((cycles
- 5) / 4) + 1;
11350 if (loop_count
> 0xFFFF)
11351 loop_count
= 0xFFFF;
11352 cycles_used
= ((loop_count
- 1) * 4) + 5;
11353 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
),
11354 avr_mem_clobber()));
11355 cycles
-= cycles_used
;
11358 if (IN_RANGE (cycles
, 6, 767))
11360 loop_count
= cycles
/ 3;
11361 if (loop_count
> 255)
11363 cycles_used
= loop_count
* 3;
11364 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
),
11365 avr_mem_clobber()));
11366 cycles
-= cycles_used
;
11369 while (cycles
>= 2)
11371 emit_insn (gen_nopv (GEN_INT(2)));
11377 emit_insn (gen_nopv (GEN_INT(1)));
11383 /* Compute the image of x under f, i.e. perform x --> f(x) */
11386 avr_map (unsigned int f
, int x
)
11388 return x
< 8 ? (f
>> (4 * x
)) & 0xf : 0;
11392 /* Return some metrics of map A. */
11396 /* Number of fixed points in { 0 ... 7 } */
11399 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11402 /* Mask representing the fixed points in { 0 ... 7 } */
11403 MAP_MASK_FIXED_0_7
,
11405 /* Size of the preimage of { 0 ... 7 } */
11408 /* Mask that represents the preimage of { f } */
11409 MAP_MASK_PREIMAGE_F
11413 avr_map_metric (unsigned int a
, int mode
)
11415 unsigned i
, metric
= 0;
11417 for (i
= 0; i
< 8; i
++)
11419 unsigned ai
= avr_map (a
, i
);
11421 if (mode
== MAP_FIXED_0_7
)
11423 else if (mode
== MAP_NONFIXED_0_7
)
11424 metric
+= ai
< 8 && ai
!= i
;
11425 else if (mode
== MAP_MASK_FIXED_0_7
)
11426 metric
|= ((unsigned) (ai
== i
)) << i
;
11427 else if (mode
== MAP_PREIMAGE_0_7
)
11429 else if (mode
== MAP_MASK_PREIMAGE_F
)
11430 metric
|= ((unsigned) (ai
== 0xf)) << i
;
11439 /* Return true if IVAL has a 0xf in its hexadecimal representation
11440 and false, otherwise. Only nibbles 0..7 are taken into account.
11441 Used as constraint helper for C0f and Cxf. */
11444 avr_has_nibble_0xf (rtx ival
)
11446 unsigned int map
= UINTVAL (ival
) & GET_MODE_MASK (SImode
);
11447 return 0 != avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
11451 /* We have a set of bits that are mapped by a function F.
11452 Try to decompose F by means of a second function G so that
11458 cost (F o G^-1) + cost (G) < cost (F)
11460 Example: Suppose builtin insert_bits supplies us with the map
11461 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11462 nibble of the result, we can just as well rotate the bits before inserting
11463 them and use the map 0x7654ffff which is cheaper than the original map.
11464 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11468 /* tree code of binary function G */
11469 enum tree_code code
;
11471 /* The constant second argument of G */
11474 /* G^-1, the inverse of G (*, arg) */
11477 /* The cost of appplying G (*, arg) */
11480 /* The composition F o G^-1 (*, arg) for some function F */
11483 /* For debug purpose only */
11487 static const avr_map_op_t avr_map_op
[] =
11489 { LROTATE_EXPR
, 0, 0x76543210, 0, 0, "id" },
11490 { LROTATE_EXPR
, 1, 0x07654321, 2, 0, "<<<" },
11491 { LROTATE_EXPR
, 2, 0x10765432, 4, 0, "<<<" },
11492 { LROTATE_EXPR
, 3, 0x21076543, 4, 0, "<<<" },
11493 { LROTATE_EXPR
, 4, 0x32107654, 1, 0, "<<<" },
11494 { LROTATE_EXPR
, 5, 0x43210765, 3, 0, "<<<" },
11495 { LROTATE_EXPR
, 6, 0x54321076, 5, 0, "<<<" },
11496 { LROTATE_EXPR
, 7, 0x65432107, 3, 0, "<<<" },
11497 { RSHIFT_EXPR
, 1, 0x6543210c, 1, 0, ">>" },
11498 { RSHIFT_EXPR
, 1, 0x7543210c, 1, 0, ">>" },
11499 { RSHIFT_EXPR
, 2, 0x543210cc, 2, 0, ">>" },
11500 { RSHIFT_EXPR
, 2, 0x643210cc, 2, 0, ">>" },
11501 { RSHIFT_EXPR
, 2, 0x743210cc, 2, 0, ">>" },
11502 { LSHIFT_EXPR
, 1, 0xc7654321, 1, 0, "<<" },
11503 { LSHIFT_EXPR
, 2, 0xcc765432, 2, 0, "<<" }
11507 /* Try to decompose F as F = (F o G^-1) o G as described above.
11508 The result is a struct representing F o G^-1 and G.
11509 If result.cost < 0 then such a decomposition does not exist. */
11511 static avr_map_op_t
11512 avr_map_decompose (unsigned int f
, const avr_map_op_t
*g
, bool val_const_p
)
11515 bool val_used_p
= 0 != avr_map_metric (f
, MAP_MASK_PREIMAGE_F
);
11516 avr_map_op_t f_ginv
= *g
;
11517 unsigned int ginv
= g
->ginv
;
11521 /* Step 1: Computing F o G^-1 */
11523 for (i
= 7; i
>= 0; i
--)
11525 int x
= avr_map (f
, i
);
11529 x
= avr_map (ginv
, x
);
11531 /* The bit is no element of the image of G: no avail (cost = -1) */
11537 f_ginv
.map
= (f_ginv
.map
<< 4) + x
;
11540 /* Step 2: Compute the cost of the operations.
11541 The overall cost of doing an operation prior to the insertion is
11542 the cost of the insertion plus the cost of the operation. */
11544 /* Step 2a: Compute cost of F o G^-1 */
11546 if (0 == avr_map_metric (f_ginv
.map
, MAP_NONFIXED_0_7
))
11548 /* The mapping consists only of fixed points and can be folded
11549 to AND/OR logic in the remainder. Reasonable cost is 3. */
11551 f_ginv
.cost
= 2 + (val_used_p
&& !val_const_p
);
11557 /* Get the cost of the insn by calling the output worker with some
11558 fake values. Mimic effect of reloading xop[3]: Unused operands
11559 are mapped to 0 and used operands are reloaded to xop[0]. */
11561 xop
[0] = all_regs_rtx
[24];
11562 xop
[1] = gen_int_mode (f_ginv
.map
, SImode
);
11563 xop
[2] = all_regs_rtx
[25];
11564 xop
[3] = val_used_p
? xop
[0] : const0_rtx
;
11566 avr_out_insert_bits (xop
, &f_ginv
.cost
);
11568 f_ginv
.cost
+= val_const_p
&& val_used_p
? 1 : 0;
11571 /* Step 2b: Add cost of G */
11573 f_ginv
.cost
+= g
->cost
;
11575 if (avr_log
.builtin
)
11576 avr_edump (" %s%d=%d", g
->str
, g
->arg
, f_ginv
.cost
);
11582 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11583 XOP[0] and XOP[1] don't overlap.
11584 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11585 If FIXP_P = false: Just move the bit if its position in the destination
11586 is different to its source position. */
11589 avr_move_bits (rtx
*xop
, unsigned int map
, bool fixp_p
, int *plen
)
11593 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11594 int t_bit_src
= -1;
11596 /* We order the operations according to the requested source bit b. */
11598 for (b
= 0; b
< 8; b
++)
11599 for (bit_dest
= 0; bit_dest
< 8; bit_dest
++)
11601 int bit_src
= avr_map (map
, bit_dest
);
11605 /* Same position: No need to copy as requested by FIXP_P. */
11606 || (bit_dest
== bit_src
&& !fixp_p
))
11609 if (t_bit_src
!= bit_src
)
11611 /* Source bit is not yet in T: Store it to T. */
11613 t_bit_src
= bit_src
;
11615 xop
[3] = GEN_INT (bit_src
);
11616 avr_asm_len ("bst %T1%T3", xop
, plen
, 1);
11619 /* Load destination bit with T. */
11621 xop
[3] = GEN_INT (bit_dest
);
11622 avr_asm_len ("bld %T0%T3", xop
, plen
, 1);
11627 /* PLEN == 0: Print assembler code for `insert_bits'.
11628 PLEN != 0: Compute code length in bytes.
11631 OP[1]: The mapping composed of nibbles. If nibble no. N is
11632 0: Bit N of result is copied from bit OP[2].0
11634 7: Bit N of result is copied from bit OP[2].7
11635 0xf: Bit N of result is copied from bit OP[3].N
11636 OP[2]: Bits to be inserted
11637 OP[3]: Target value */
11640 avr_out_insert_bits (rtx
*op
, int *plen
)
11642 unsigned int map
= UINTVAL (op
[1]) & GET_MODE_MASK (SImode
);
11643 unsigned mask_fixed
;
11644 bool fixp_p
= true;
11651 gcc_assert (REG_P (xop
[2]) || CONST_INT_P (xop
[2]));
11655 else if (flag_print_asm_name
)
11656 fprintf (asm_out_file
, ASM_COMMENT_START
"map = 0x%08x\n", map
);
11658 /* If MAP has fixed points it might be better to initialize the result
11659 with the bits to be inserted instead of moving all bits by hand. */
11661 mask_fixed
= avr_map_metric (map
, MAP_MASK_FIXED_0_7
);
11663 if (REGNO (xop
[0]) == REGNO (xop
[1]))
11665 /* Avoid early-clobber conflicts */
11667 avr_asm_len ("mov __tmp_reg__,%1", xop
, plen
, 1);
11668 xop
[1] = tmp_reg_rtx
;
11672 if (avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
11674 /* XOP[2] is used and reloaded to XOP[0] already */
11676 int n_fix
= 0, n_nofix
= 0;
11678 gcc_assert (REG_P (xop
[2]));
11680 /* Get the code size of the bit insertions; once with all bits
11681 moved and once with fixed points omitted. */
11683 avr_move_bits (xop
, map
, true, &n_fix
);
11684 avr_move_bits (xop
, map
, false, &n_nofix
);
11686 if (fixp_p
&& n_fix
- n_nofix
> 3)
11688 xop
[3] = gen_int_mode (~mask_fixed
, QImode
);
11690 avr_asm_len ("eor %0,%1" CR_TAB
11691 "andi %0,%3" CR_TAB
11692 "eor %0,%1", xop
, plen
, 3);
11698 /* XOP[2] is unused */
11700 if (fixp_p
&& mask_fixed
)
11702 avr_asm_len ("mov %0,%1", xop
, plen
, 1);
11707 /* Move/insert remaining bits. */
11709 avr_move_bits (xop
, map
, fixp_p
, plen
);
11715 /* IDs for all the AVR builtins. */
11717 enum avr_builtin_id
11719 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11720 AVR_BUILTIN_ ## NAME,
11721 #include "builtins.def"
11727 struct GTY(()) avr_builtin_description
11729 enum insn_code icode
;
11735 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11736 that a built-in's ID can be used to access the built-in by means of
11739 static GTY(()) struct avr_builtin_description
11740 avr_bdesc
[AVR_BUILTIN_COUNT
] =
11742 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11743 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11744 #include "builtins.def"
11749 /* Implement `TARGET_BUILTIN_DECL'. */
11752 avr_builtin_decl (unsigned id
, bool initialize_p ATTRIBUTE_UNUSED
)
11754 if (id
< AVR_BUILTIN_COUNT
)
11755 return avr_bdesc
[id
].fndecl
;
11757 return error_mark_node
;
11762 avr_init_builtin_int24 (void)
11764 tree int24_type
= make_signed_type (GET_MODE_BITSIZE (PSImode
));
11765 tree uint24_type
= make_unsigned_type (GET_MODE_BITSIZE (PSImode
));
11767 lang_hooks
.types
.register_builtin_type (int24_type
, "__int24");
11768 lang_hooks
.types
.register_builtin_type (uint24_type
, "__uint24");
11772 /* Implement `TARGET_INIT_BUILTINS' */
11773 /* Set up all builtin functions for this target. */
11776 avr_init_builtins (void)
11778 tree void_ftype_void
11779 = build_function_type_list (void_type_node
, NULL_TREE
);
11780 tree uchar_ftype_uchar
11781 = build_function_type_list (unsigned_char_type_node
,
11782 unsigned_char_type_node
,
11784 tree uint_ftype_uchar_uchar
11785 = build_function_type_list (unsigned_type_node
,
11786 unsigned_char_type_node
,
11787 unsigned_char_type_node
,
11789 tree int_ftype_char_char
11790 = build_function_type_list (integer_type_node
,
11794 tree int_ftype_char_uchar
11795 = build_function_type_list (integer_type_node
,
11797 unsigned_char_type_node
,
11799 tree void_ftype_ulong
11800 = build_function_type_list (void_type_node
,
11801 long_unsigned_type_node
,
11804 tree uchar_ftype_ulong_uchar_uchar
11805 = build_function_type_list (unsigned_char_type_node
,
11806 long_unsigned_type_node
,
11807 unsigned_char_type_node
,
11808 unsigned_char_type_node
,
11811 tree const_memx_void_node
11812 = build_qualified_type (void_type_node
,
11814 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX
));
11816 tree const_memx_ptr_type_node
11817 = build_pointer_type_for_mode (const_memx_void_node
, PSImode
, false);
11819 tree char_ftype_const_memx_ptr
11820 = build_function_type_list (char_type_node
,
11821 const_memx_ptr_type_node
,
11825 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11827 #define FX_FTYPE_FX(fx) \
11828 tree fx##r_ftype_##fx##r \
11829 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
11830 tree fx##k_ftype_##fx##k \
11831 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11833 #define FX_FTYPE_FX_INT(fx) \
11834 tree fx##r_ftype_##fx##r_int \
11835 = build_function_type_list (node_##fx##r, node_##fx##r, \
11836 integer_type_node, NULL); \
11837 tree fx##k_ftype_##fx##k_int \
11838 = build_function_type_list (node_##fx##k, node_##fx##k, \
11839 integer_type_node, NULL)
11841 #define INT_FTYPE_FX(fx) \
11842 tree int_ftype_##fx##r \
11843 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11844 tree int_ftype_##fx##k \
11845 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11847 #define INTX_FTYPE_FX(fx) \
11848 tree int##fx##r_ftype_##fx##r \
11849 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11850 tree int##fx##k_ftype_##fx##k \
11851 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11853 #define FX_FTYPE_INTX(fx) \
11854 tree fx##r_ftype_int##fx##r \
11855 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11856 tree fx##k_ftype_int##fx##k \
11857 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11859 tree node_hr
= short_fract_type_node
;
11860 tree node_nr
= fract_type_node
;
11861 tree node_lr
= long_fract_type_node
;
11862 tree node_llr
= long_long_fract_type_node
;
11864 tree node_uhr
= unsigned_short_fract_type_node
;
11865 tree node_unr
= unsigned_fract_type_node
;
11866 tree node_ulr
= unsigned_long_fract_type_node
;
11867 tree node_ullr
= unsigned_long_long_fract_type_node
;
11869 tree node_hk
= short_accum_type_node
;
11870 tree node_nk
= accum_type_node
;
11871 tree node_lk
= long_accum_type_node
;
11872 tree node_llk
= long_long_accum_type_node
;
11874 tree node_uhk
= unsigned_short_accum_type_node
;
11875 tree node_unk
= unsigned_accum_type_node
;
11876 tree node_ulk
= unsigned_long_accum_type_node
;
11877 tree node_ullk
= unsigned_long_long_accum_type_node
;
11880 /* For absfx builtins. */
11887 /* For roundfx builtins. */
11889 FX_FTYPE_FX_INT (h
);
11890 FX_FTYPE_FX_INT (n
);
11891 FX_FTYPE_FX_INT (l
);
11892 FX_FTYPE_FX_INT (ll
);
11894 FX_FTYPE_FX_INT (uh
);
11895 FX_FTYPE_FX_INT (un
);
11896 FX_FTYPE_FX_INT (ul
);
11897 FX_FTYPE_FX_INT (ull
);
11899 /* For countlsfx builtins. */
11909 INT_FTYPE_FX (ull
);
11911 /* For bitsfx builtins. */
11916 INTX_FTYPE_FX (ll
);
11918 INTX_FTYPE_FX (uh
);
11919 INTX_FTYPE_FX (un
);
11920 INTX_FTYPE_FX (ul
);
11921 INTX_FTYPE_FX (ull
);
11923 /* For fxbits builtins. */
11928 FX_FTYPE_INTX (ll
);
11930 FX_FTYPE_INTX (uh
);
11931 FX_FTYPE_INTX (un
);
11932 FX_FTYPE_INTX (ul
);
11933 FX_FTYPE_INTX (ull
);
11936 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11938 int id = AVR_BUILTIN_ ## NAME; \
11939 const char *Name = "__builtin_avr_" #NAME; \
11940 char *name = (char*) alloca (1 + strlen (Name)); \
11942 gcc_assert (id < AVR_BUILTIN_COUNT); \
11943 avr_bdesc[id].fndecl \
11944 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
11945 BUILT_IN_MD, LIBNAME, NULL_TREE); \
11947 #include "builtins.def"
11950 avr_init_builtin_int24 ();
11954 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11955 with non-void result and 1 ... 3 arguments. */
11958 avr_default_expand_builtin (enum insn_code icode
, tree exp
, rtx target
)
11961 int n
, n_args
= call_expr_nargs (exp
);
11962 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
11964 gcc_assert (n_args
>= 1 && n_args
<= 3);
11966 if (target
== NULL_RTX
11967 || GET_MODE (target
) != tmode
11968 || !insn_data
[icode
].operand
[0].predicate (target
, tmode
))
11970 target
= gen_reg_rtx (tmode
);
11973 for (n
= 0; n
< n_args
; n
++)
11975 tree arg
= CALL_EXPR_ARG (exp
, n
);
11976 rtx op
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
11977 enum machine_mode opmode
= GET_MODE (op
);
11978 enum machine_mode mode
= insn_data
[icode
].operand
[n
+1].mode
;
11980 if ((opmode
== SImode
|| opmode
== VOIDmode
) && mode
== HImode
)
11983 op
= gen_lowpart (HImode
, op
);
11986 /* In case the insn wants input operands in modes different from
11987 the result, abort. */
11989 gcc_assert (opmode
== mode
|| opmode
== VOIDmode
);
11991 if (!insn_data
[icode
].operand
[n
+1].predicate (op
, mode
))
11992 op
= copy_to_mode_reg (mode
, op
);
11999 case 1: pat
= GEN_FCN (icode
) (target
, xop
[0]); break;
12000 case 2: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1]); break;
12001 case 3: pat
= GEN_FCN (icode
) (target
, xop
[0], xop
[1], xop
[2]); break;
12007 if (pat
== NULL_RTX
)
12016 /* Implement `TARGET_EXPAND_BUILTIN'. */
12017 /* Expand an expression EXP that calls a built-in function,
12018 with result going to TARGET if that's convenient
12019 (and in mode MODE if that's convenient).
12020 SUBTARGET may be used as the target for computing one of EXP's operands.
12021 IGNORE is nonzero if the value is to be ignored. */
12024 avr_expand_builtin (tree exp
, rtx target
,
12025 rtx subtarget ATTRIBUTE_UNUSED
,
12026 enum machine_mode mode ATTRIBUTE_UNUSED
,
12029 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
12030 const char *bname
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
12031 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
12032 const struct avr_builtin_description
*d
= &avr_bdesc
[id
];
12036 gcc_assert (id
< AVR_BUILTIN_COUNT
);
12040 case AVR_BUILTIN_NOP
:
12041 emit_insn (gen_nopv (GEN_INT(1)));
12044 case AVR_BUILTIN_DELAY_CYCLES
:
12046 arg0
= CALL_EXPR_ARG (exp
, 0);
12047 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12049 if (!CONST_INT_P (op0
))
12050 error ("%s expects a compile time integer constant", bname
);
12052 avr_expand_delay_cycles (op0
);
12057 case AVR_BUILTIN_INSERT_BITS
:
12059 arg0
= CALL_EXPR_ARG (exp
, 0);
12060 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
12062 if (!CONST_INT_P (op0
))
12064 error ("%s expects a compile time long integer constant"
12065 " as first argument", bname
);
12072 case AVR_BUILTIN_ROUNDHR
: case AVR_BUILTIN_ROUNDUHR
:
12073 case AVR_BUILTIN_ROUNDR
: case AVR_BUILTIN_ROUNDUR
:
12074 case AVR_BUILTIN_ROUNDLR
: case AVR_BUILTIN_ROUNDULR
:
12075 case AVR_BUILTIN_ROUNDLLR
: case AVR_BUILTIN_ROUNDULLR
:
12077 case AVR_BUILTIN_ROUNDHK
: case AVR_BUILTIN_ROUNDUHK
:
12078 case AVR_BUILTIN_ROUNDK
: case AVR_BUILTIN_ROUNDUK
:
12079 case AVR_BUILTIN_ROUNDLK
: case AVR_BUILTIN_ROUNDULK
:
12080 case AVR_BUILTIN_ROUNDLLK
: case AVR_BUILTIN_ROUNDULLK
:
12082 /* Warn about odd rounding. Rounding points >= FBIT will have
12085 if (TREE_CODE (CALL_EXPR_ARG (exp
, 1)) != INTEGER_CST
)
12088 int rbit
= (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1));
12090 if (rbit
>= (int) GET_MODE_FBIT (mode
))
12092 warning (OPT_Wextra
, "rounding to %d bits has no effect for "
12093 "fixed-point value with %d fractional bits",
12094 rbit
, GET_MODE_FBIT (mode
));
12096 return expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
, mode
,
12099 else if (rbit
<= - (int) GET_MODE_IBIT (mode
))
12101 warning (0, "rounding result will always be 0");
12102 return CONST0_RTX (mode
);
12105 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12107 TR 18037 only specifies results for RP > 0. However, the
12108 remaining cases of -IBIT < RP <= 0 can easily be supported
12109 without any additional overhead. */
12114 /* No fold found and no insn: Call support function from libgcc. */
12116 if (d
->icode
== CODE_FOR_nothing
12117 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp
)) != NULL_TREE
)
12119 return expand_call (exp
, target
, ignore
);
12122 /* No special treatment needed: vanilla expand. */
12124 gcc_assert (d
->icode
!= CODE_FOR_nothing
);
12125 gcc_assert (d
->n_args
== call_expr_nargs (exp
));
12127 if (d
->n_args
== 0)
12129 emit_insn ((GEN_FCN (d
->icode
)) (target
));
12133 return avr_default_expand_builtin (d
->icode
, exp
, target
);
12137 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12140 avr_fold_absfx (tree tval
)
12142 if (FIXED_CST
!= TREE_CODE (tval
))
12145 /* Our fixed-points have no padding: Use double_int payload directly. */
12147 FIXED_VALUE_TYPE fval
= TREE_FIXED_CST (tval
);
12148 unsigned int bits
= GET_MODE_BITSIZE (fval
.mode
);
12149 double_int ival
= fval
.data
.sext (bits
);
12151 if (!ival
.is_negative())
12154 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12156 fval
.data
= (ival
== double_int::min_value (bits
, false).sext (bits
))
12157 ? double_int::max_value (bits
, false)
12160 return build_fixed (TREE_TYPE (tval
), fval
);
12164 /* Implement `TARGET_FOLD_BUILTIN'. */
12167 avr_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*arg
,
12168 bool ignore ATTRIBUTE_UNUSED
)
12170 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
12171 tree val_type
= TREE_TYPE (TREE_TYPE (fndecl
));
12181 case AVR_BUILTIN_SWAP
:
12183 return fold_build2 (LROTATE_EXPR
, val_type
, arg
[0],
12184 build_int_cst (val_type
, 4));
12187 case AVR_BUILTIN_ABSHR
:
12188 case AVR_BUILTIN_ABSR
:
12189 case AVR_BUILTIN_ABSLR
:
12190 case AVR_BUILTIN_ABSLLR
:
12192 case AVR_BUILTIN_ABSHK
:
12193 case AVR_BUILTIN_ABSK
:
12194 case AVR_BUILTIN_ABSLK
:
12195 case AVR_BUILTIN_ABSLLK
:
12196 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12198 return avr_fold_absfx (arg
[0]);
12200 case AVR_BUILTIN_BITSHR
: case AVR_BUILTIN_HRBITS
:
12201 case AVR_BUILTIN_BITSHK
: case AVR_BUILTIN_HKBITS
:
12202 case AVR_BUILTIN_BITSUHR
: case AVR_BUILTIN_UHRBITS
:
12203 case AVR_BUILTIN_BITSUHK
: case AVR_BUILTIN_UHKBITS
:
12205 case AVR_BUILTIN_BITSR
: case AVR_BUILTIN_RBITS
:
12206 case AVR_BUILTIN_BITSK
: case AVR_BUILTIN_KBITS
:
12207 case AVR_BUILTIN_BITSUR
: case AVR_BUILTIN_URBITS
:
12208 case AVR_BUILTIN_BITSUK
: case AVR_BUILTIN_UKBITS
:
12210 case AVR_BUILTIN_BITSLR
: case AVR_BUILTIN_LRBITS
:
12211 case AVR_BUILTIN_BITSLK
: case AVR_BUILTIN_LKBITS
:
12212 case AVR_BUILTIN_BITSULR
: case AVR_BUILTIN_ULRBITS
:
12213 case AVR_BUILTIN_BITSULK
: case AVR_BUILTIN_ULKBITS
:
12215 case AVR_BUILTIN_BITSLLR
: case AVR_BUILTIN_LLRBITS
:
12216 case AVR_BUILTIN_BITSLLK
: case AVR_BUILTIN_LLKBITS
:
12217 case AVR_BUILTIN_BITSULLR
: case AVR_BUILTIN_ULLRBITS
:
12218 case AVR_BUILTIN_BITSULLK
: case AVR_BUILTIN_ULLKBITS
:
12220 gcc_assert (TYPE_PRECISION (val_type
)
12221 == TYPE_PRECISION (TREE_TYPE (arg
[0])));
12223 return build1 (VIEW_CONVERT_EXPR
, val_type
, arg
[0]);
12225 case AVR_BUILTIN_INSERT_BITS
:
12227 tree tbits
= arg
[1];
12228 tree tval
= arg
[2];
12230 tree map_type
= TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)));
12232 bool changed
= false;
12234 avr_map_op_t best_g
;
12236 if (TREE_CODE (arg
[0]) != INTEGER_CST
)
12238 /* No constant as first argument: Don't fold this and run into
12239 error in avr_expand_builtin. */
12244 tmap
= wide_int_to_tree (map_type
, arg
[0]);
12245 map
= TREE_INT_CST_LOW (tmap
);
12247 if (TREE_CODE (tval
) != INTEGER_CST
12248 && 0 == avr_map_metric (map
, MAP_MASK_PREIMAGE_F
))
12250 /* There are no F in the map, i.e. 3rd operand is unused.
12251 Replace that argument with some constant to render
12252 respective input unused. */
12254 tval
= build_int_cst (val_type
, 0);
12258 if (TREE_CODE (tbits
) != INTEGER_CST
12259 && 0 == avr_map_metric (map
, MAP_PREIMAGE_0_7
))
12261 /* Similar for the bits to be inserted. If they are unused,
12262 we can just as well pass 0. */
12264 tbits
= build_int_cst (val_type
, 0);
12267 if (TREE_CODE (tbits
) == INTEGER_CST
)
12269 /* Inserting bits known at compile time is easy and can be
12270 performed by AND and OR with appropriate masks. */
12272 int bits
= TREE_INT_CST_LOW (tbits
);
12273 int mask_ior
= 0, mask_and
= 0xff;
12275 for (i
= 0; i
< 8; i
++)
12277 int mi
= avr_map (map
, i
);
12281 if (bits
& (1 << mi
)) mask_ior
|= (1 << i
);
12282 else mask_and
&= ~(1 << i
);
12286 tval
= fold_build2 (BIT_IOR_EXPR
, val_type
, tval
,
12287 build_int_cst (val_type
, mask_ior
));
12288 return fold_build2 (BIT_AND_EXPR
, val_type
, tval
,
12289 build_int_cst (val_type
, mask_and
));
12293 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12295 /* If bits don't change their position we can use vanilla logic
12296 to merge the two arguments. */
12298 if (0 == avr_map_metric (map
, MAP_NONFIXED_0_7
))
12300 int mask_f
= avr_map_metric (map
, MAP_MASK_PREIMAGE_F
);
12301 tree tres
, tmask
= build_int_cst (val_type
, mask_f
^ 0xff);
12303 tres
= fold_build2 (BIT_XOR_EXPR
, val_type
, tbits
, tval
);
12304 tres
= fold_build2 (BIT_AND_EXPR
, val_type
, tres
, tmask
);
12305 return fold_build2 (BIT_XOR_EXPR
, val_type
, tres
, tval
);
12308 /* Try to decomposing map to reduce overall cost. */
12310 if (avr_log
.builtin
)
12311 avr_edump ("\n%?: %x\n%?: ROL cost: ", map
);
12313 best_g
= avr_map_op
[0];
12314 best_g
.cost
= 1000;
12316 for (i
= 0; i
< sizeof (avr_map_op
) / sizeof (*avr_map_op
); i
++)
12319 = avr_map_decompose (map
, avr_map_op
+ i
,
12320 TREE_CODE (tval
) == INTEGER_CST
);
12322 if (g
.cost
>= 0 && g
.cost
< best_g
.cost
)
12326 if (avr_log
.builtin
)
12329 if (best_g
.arg
== 0)
12330 /* No optimization found */
12333 /* Apply operation G to the 2nd argument. */
12335 if (avr_log
.builtin
)
12336 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12337 best_g
.str
, best_g
.arg
, best_g
.map
, best_g
.cost
);
12339 /* Do right-shifts arithmetically: They copy the MSB instead of
12340 shifting in a non-usable value (0) as with logic right-shift. */
12342 tbits
= fold_convert (signed_char_type_node
, tbits
);
12343 tbits
= fold_build2 (best_g
.code
, signed_char_type_node
, tbits
,
12344 build_int_cst (val_type
, best_g
.arg
));
12345 tbits
= fold_convert (val_type
, tbits
);
12347 /* Use map o G^-1 instead of original map to undo the effect of G. */
12349 tmap
= wide_int_to_tree (map_type
, best_g
.map
);
12351 return build_call_expr (fndecl
, 3, tmap
, tbits
, tval
);
12352 } /* AVR_BUILTIN_INSERT_BITS */
12360 /* Initialize the GCC target structure. */
12362 #undef TARGET_ASM_ALIGNED_HI_OP
12363 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12364 #undef TARGET_ASM_ALIGNED_SI_OP
12365 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12366 #undef TARGET_ASM_UNALIGNED_HI_OP
12367 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12368 #undef TARGET_ASM_UNALIGNED_SI_OP
12369 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12370 #undef TARGET_ASM_INTEGER
12371 #define TARGET_ASM_INTEGER avr_assemble_integer
12372 #undef TARGET_ASM_FILE_START
12373 #define TARGET_ASM_FILE_START avr_file_start
12374 #undef TARGET_ASM_FILE_END
12375 #define TARGET_ASM_FILE_END avr_file_end
12377 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12378 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12379 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12380 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12382 #undef TARGET_FUNCTION_VALUE
12383 #define TARGET_FUNCTION_VALUE avr_function_value
12384 #undef TARGET_LIBCALL_VALUE
12385 #define TARGET_LIBCALL_VALUE avr_libcall_value
12386 #undef TARGET_FUNCTION_VALUE_REGNO_P
12387 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12389 #undef TARGET_ATTRIBUTE_TABLE
12390 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12391 #undef TARGET_INSERT_ATTRIBUTES
12392 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12393 #undef TARGET_SECTION_TYPE_FLAGS
12394 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12396 #undef TARGET_ASM_NAMED_SECTION
12397 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12398 #undef TARGET_ASM_INIT_SECTIONS
12399 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12400 #undef TARGET_ENCODE_SECTION_INFO
12401 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12402 #undef TARGET_ASM_SELECT_SECTION
12403 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12405 #undef TARGET_REGISTER_MOVE_COST
12406 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12407 #undef TARGET_MEMORY_MOVE_COST
12408 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12409 #undef TARGET_RTX_COSTS
12410 #define TARGET_RTX_COSTS avr_rtx_costs
12411 #undef TARGET_ADDRESS_COST
12412 #define TARGET_ADDRESS_COST avr_address_cost
12413 #undef TARGET_MACHINE_DEPENDENT_REORG
12414 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12415 #undef TARGET_FUNCTION_ARG
12416 #define TARGET_FUNCTION_ARG avr_function_arg
12417 #undef TARGET_FUNCTION_ARG_ADVANCE
12418 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12420 #undef TARGET_SET_CURRENT_FUNCTION
12421 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12423 #undef TARGET_RETURN_IN_MEMORY
12424 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12426 #undef TARGET_STRICT_ARGUMENT_NAMING
12427 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12429 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12430 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12432 #undef TARGET_HARD_REGNO_SCRATCH_OK
12433 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12434 #undef TARGET_CASE_VALUES_THRESHOLD
12435 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12437 #undef TARGET_FRAME_POINTER_REQUIRED
12438 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12439 #undef TARGET_CAN_ELIMINATE
12440 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12442 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12443 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12445 #undef TARGET_WARN_FUNC_RETURN
12446 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12448 #undef TARGET_CLASS_LIKELY_SPILLED_P
12449 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12451 #undef TARGET_OPTION_OVERRIDE
12452 #define TARGET_OPTION_OVERRIDE avr_option_override
12454 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12455 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12457 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12458 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12460 #undef TARGET_INIT_BUILTINS
12461 #define TARGET_INIT_BUILTINS avr_init_builtins
12463 #undef TARGET_BUILTIN_DECL
12464 #define TARGET_BUILTIN_DECL avr_builtin_decl
12466 #undef TARGET_EXPAND_BUILTIN
12467 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12469 #undef TARGET_FOLD_BUILTIN
12470 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12472 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12473 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12475 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12476 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12478 #undef TARGET_BUILD_BUILTIN_VA_LIST
12479 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12481 #undef TARGET_FIXED_POINT_SUPPORTED_P
12482 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12484 #undef TARGET_CONVERT_TO_TYPE
12485 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12487 #undef TARGET_ADDR_SPACE_SUBSET_P
12488 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12490 #undef TARGET_ADDR_SPACE_CONVERT
12491 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12493 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12494 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12496 #undef TARGET_ADDR_SPACE_POINTER_MODE
12497 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12499 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12500 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12501 avr_addr_space_legitimate_address_p
12503 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12504 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12506 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12507 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12509 #undef TARGET_SECONDARY_RELOAD
12510 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12512 #undef TARGET_PRINT_OPERAND
12513 #define TARGET_PRINT_OPERAND avr_print_operand
12514 #undef TARGET_PRINT_OPERAND_ADDRESS
12515 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12516 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12517 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12519 struct gcc_target targetm
= TARGET_INITIALIZER
;
12522 #include "gt-avr.h"