1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree
);
56 static int interrupt_function_p (tree
);
57 static int signal_function_p (tree
);
58 static int avr_OS_task_function_p (tree
);
59 static int avr_OS_main_function_p (tree
);
60 static int avr_regs_to_save (HARD_REG_SET
*);
61 static int get_sequence_length (rtx insns
);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code
);
65 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
67 static RTX_CODE
compare_condition (rtx insn
);
68 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
69 static int compare_sign_p (rtx insn
);
70 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
71 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
72 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
73 static bool avr_assemble_integer (rtx
, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx
avr_function_value (const_tree
, const_tree
, bool);
81 static rtx
avr_libcall_value (enum machine_mode
, const_rtx
);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree
, tree
*);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree
, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx
, int);
89 static void avr_asm_out_dtor (rtx
, int);
90 static int avr_register_move_cost (enum machine_mode
, reg_class_t
, reg_class_t
);
91 static int avr_memory_move_cost (enum machine_mode
, reg_class_t
, bool);
92 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
93 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
94 static int avr_address_cost (rtx
, bool);
95 static bool avr_return_in_memory (const_tree
, const_tree
);
96 static struct machine_function
* avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx
avr_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
99 static rtx
avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c
);
105 static rtx
avr_function_arg (cumulative_args_t
, enum machine_mode
,
107 static void avr_function_arg_advance (cumulative_args_t
, enum machine_mode
,
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree
, tree
);
111 static void avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx
;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx
;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames
[] = REGISTER_NAMES
;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro
;
128 /* Current architecture. */
129 const struct base_arch_s
*avr_current_arch
;
131 /* Current device. */
132 const struct mcu_type_s
*avr_current_device
;
134 section
*progmem_section
;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p
= false;
138 bool avr_need_copy_data_p
= false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table
[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
157 { NULL
, 0, 0, false, false, false, NULL
, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
174 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
175 #undef TARGET_ASM_FILE_END
176 #define TARGET_ASM_FILE_END avr_file_end
178 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
179 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
180 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
181 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
183 #undef TARGET_FUNCTION_VALUE
184 #define TARGET_FUNCTION_VALUE avr_function_value
185 #undef TARGET_LIBCALL_VALUE
186 #define TARGET_LIBCALL_VALUE avr_libcall_value
187 #undef TARGET_FUNCTION_VALUE_REGNO_P
188 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
190 #undef TARGET_ATTRIBUTE_TABLE
191 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
192 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
193 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
194 #undef TARGET_INSERT_ATTRIBUTES
195 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
196 #undef TARGET_SECTION_TYPE_FLAGS
197 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
199 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
201 #undef TARGET_ASM_INIT_SECTIONS
202 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
254 #define TARGET_HELP avr_help
256 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
257 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
259 #undef TARGET_INIT_BUILTINS
260 #define TARGET_INIT_BUILTINS avr_init_builtins
262 #undef TARGET_EXPAND_BUILTIN
263 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
266 struct gcc_target targetm
= TARGET_INITIALIZER
;
269 avr_option_override (void)
271 const struct mcu_type_s
*t
;
273 flag_delete_null_pointer_checks
= 0;
275 for (t
= avr_mcu_types
; t
->name
; t
++)
276 if (strcmp (t
->name
, avr_mcu_name
) == 0)
281 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name
);
282 inform (input_location
, "See --target-help for supported MCUs");
285 avr_current_device
= t
;
286 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
287 avr_extra_arch_macro
= avr_current_device
->macro
;
289 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
290 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
292 init_machine_status
= avr_init_machine_status
;
295 /* Implement TARGET_HELP */
296 /* Report extra information for --target-help */
301 const struct mcu_type_s
*t
;
302 const char * const indent
= " ";
305 /* Give a list of MCUs that are accepted by -mmcu=* .
306 Note that MCUs supported by the compiler might differ from
307 MCUs supported by binutils. */
309 len
= strlen (indent
);
310 printf ("Known MCU names:\n%s", indent
);
312 /* Print a blank-separated list of all supported MCUs */
314 for (t
= avr_mcu_types
; t
->name
; t
++)
316 printf ("%s ", t
->name
);
317 len
+= 1 + strlen (t
->name
);
319 /* Break long lines */
321 if (len
> 66 && (t
+1)->name
)
323 printf ("\n%s", indent
);
324 len
= strlen (indent
);
331 /* return register class from register number. */
333 static const enum reg_class reg_class_tab
[]={
334 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
335 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
336 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
337 GENERAL_REGS
, /* r0 - r15 */
338 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
339 LD_REGS
, /* r16 - 23 */
340 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
341 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
342 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
343 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
344 STACK_REG
,STACK_REG
/* SPL,SPH */
347 /* Function to set up the backend function structure. */
349 static struct machine_function
*
350 avr_init_machine_status (void)
352 return ggc_alloc_cleared_machine_function ();
355 /* Return register class for register R. */
358 avr_regno_reg_class (int r
)
361 return reg_class_tab
[r
];
365 /* A helper for the subsequent function attribute used to dig for
366 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
369 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
371 if (FUNCTION_DECL
== TREE_CODE (func
))
373 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
378 func
= TREE_TYPE (func
);
381 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
382 || TREE_CODE (func
) == METHOD_TYPE
);
384 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
387 /* Return nonzero if FUNC is a naked function. */
390 avr_naked_function_p (tree func
)
392 return avr_lookup_function_attribute1 (func
, "naked");
395 /* Return nonzero if FUNC is an interrupt function as specified
396 by the "interrupt" attribute. */
399 interrupt_function_p (tree func
)
401 return avr_lookup_function_attribute1 (func
, "interrupt");
404 /* Return nonzero if FUNC is a signal function as specified
405 by the "signal" attribute. */
408 signal_function_p (tree func
)
410 return avr_lookup_function_attribute1 (func
, "signal");
413 /* Return nonzero if FUNC is a OS_task function. */
416 avr_OS_task_function_p (tree func
)
418 return avr_lookup_function_attribute1 (func
, "OS_task");
421 /* Return nonzero if FUNC is a OS_main function. */
424 avr_OS_main_function_p (tree func
)
426 return avr_lookup_function_attribute1 (func
, "OS_main");
429 /* Return the number of hard registers to push/pop in the prologue/epilogue
430 of the current function, and optionally store these registers in SET. */
433 avr_regs_to_save (HARD_REG_SET
*set
)
436 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
437 || signal_function_p (current_function_decl
));
440 CLEAR_HARD_REG_SET (*set
);
443 /* No need to save any registers if the function never returns or
444 is have "OS_task" or "OS_main" attribute. */
445 if (TREE_THIS_VOLATILE (current_function_decl
)
446 || cfun
->machine
->is_OS_task
447 || cfun
->machine
->is_OS_main
)
450 for (reg
= 0; reg
< 32; reg
++)
452 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
453 any global register variables. */
457 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
458 || (df_regs_ever_live_p (reg
)
459 && (int_or_sig_p
|| !call_used_regs
[reg
])
460 && !(frame_pointer_needed
461 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
464 SET_HARD_REG_BIT (*set
, reg
);
471 /* Return true if register FROM can be eliminated via register TO. */
474 avr_can_eliminate (const int from
, const int to
)
476 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
477 || ((from
== FRAME_POINTER_REGNUM
478 || from
== FRAME_POINTER_REGNUM
+ 1)
479 && !frame_pointer_needed
));
482 /* Compute offset between arg_pointer and frame_pointer. */
485 avr_initial_elimination_offset (int from
, int to
)
487 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
491 int offset
= frame_pointer_needed
? 2 : 0;
492 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
494 offset
+= avr_regs_to_save (NULL
);
495 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
499 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
500 frame pointer by +STARTING_FRAME_OFFSET.
501 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
502 avoids creating add/sub of offset in nonlocal goto and setjmp. */
504 rtx
avr_builtin_setjmp_frame_value (void)
506 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
507 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
510 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
511 This is return address of function. */
513 avr_return_addr_rtx (int count
, rtx tem
)
517 /* Can only return this functions return address. Others not supported. */
523 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
524 warning (0, "'builtin_return_address' contains only 2 bytes of address");
527 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
529 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
530 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
531 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
535 /* Return 1 if the function epilogue is just a single "ret". */
538 avr_simple_epilogue (void)
540 return (! frame_pointer_needed
541 && get_frame_size () == 0
542 && avr_regs_to_save (NULL
) == 0
543 && ! interrupt_function_p (current_function_decl
)
544 && ! signal_function_p (current_function_decl
)
545 && ! avr_naked_function_p (current_function_decl
)
546 && ! TREE_THIS_VOLATILE (current_function_decl
));
549 /* This function checks sequence of live registers. */
552 sequent_regs_live (void)
558 for (reg
= 0; reg
< 18; ++reg
)
560 if (!call_used_regs
[reg
])
562 if (df_regs_ever_live_p (reg
))
572 if (!frame_pointer_needed
)
574 if (df_regs_ever_live_p (REG_Y
))
582 if (df_regs_ever_live_p (REG_Y
+1))
595 return (cur_seq
== live_seq
) ? live_seq
: 0;
598 /* Obtain the length sequence of insns. */
601 get_sequence_length (rtx insns
)
606 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
607 length
+= get_attr_length (insn
);
612 /* Implement INCOMING_RETURN_ADDR_RTX. */
615 avr_incoming_return_addr_rtx (void)
617 /* The return address is at the top of the stack. Note that the push
618 was via post-decrement, which means the actual address is off by one. */
619 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
622 /* Helper for expand_prologue. Emit a push of a byte register. */
625 emit_push_byte (unsigned regno
, bool frame_related_p
)
629 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
630 mem
= gen_frame_mem (QImode
, mem
);
631 reg
= gen_rtx_REG (QImode
, regno
);
633 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
635 RTX_FRAME_RELATED_P (insn
) = 1;
637 cfun
->machine
->stack_usage
++;
641 /* Output function prologue. */
644 expand_prologue (void)
649 HOST_WIDE_INT size
= get_frame_size();
652 /* Init cfun->machine. */
653 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
654 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
655 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
656 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
657 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
658 cfun
->machine
->stack_usage
= 0;
660 /* Prologue: naked. */
661 if (cfun
->machine
->is_naked
)
666 avr_regs_to_save (&set
);
667 live_seq
= sequent_regs_live ();
668 minimize
= (TARGET_CALL_PROLOGUES
669 && !cfun
->machine
->is_interrupt
670 && !cfun
->machine
->is_signal
671 && !cfun
->machine
->is_OS_task
672 && !cfun
->machine
->is_OS_main
675 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
677 /* Enable interrupts. */
678 if (cfun
->machine
->is_interrupt
)
679 emit_insn (gen_enable_interrupt ());
682 emit_push_byte (ZERO_REGNO
, true);
685 emit_push_byte (TMP_REGNO
, true);
688 /* ??? There's no dwarf2 column reserved for SREG. */
689 emit_move_insn (tmp_reg_rtx
, gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
690 emit_push_byte (TMP_REGNO
, false);
693 /* ??? There's no dwarf2 column reserved for RAMPZ. */
695 && TEST_HARD_REG_BIT (set
, REG_Z
)
696 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
698 emit_move_insn (tmp_reg_rtx
,
699 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
700 emit_push_byte (TMP_REGNO
, false);
703 /* Clear zero reg. */
704 emit_move_insn (zero_reg_rtx
, const0_rtx
);
706 /* Prevent any attempt to delete the setting of ZERO_REG! */
707 emit_use (zero_reg_rtx
);
709 if (minimize
&& (frame_pointer_needed
710 || (AVR_2_BYTE_PC
&& live_seq
> 6)
713 int first_reg
, reg
, offset
;
715 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
716 gen_int_mode (size
, HImode
));
718 insn
= emit_insn (gen_call_prologue_saves
719 (gen_int_mode (live_seq
, HImode
),
720 gen_int_mode (size
+ live_seq
, HImode
)));
721 RTX_FRAME_RELATED_P (insn
) = 1;
723 /* Describe the effect of the unspec_volatile call to prologue_saves.
724 Note that this formulation assumes that add_reg_note pushes the
725 notes to the front. Thus we build them in the reverse order of
726 how we want dwarf2out to process them. */
728 /* The function does always set frame_pointer_rtx, but whether that
729 is going to be permanent in the function is frame_pointer_needed. */
730 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
731 gen_rtx_SET (VOIDmode
,
732 (frame_pointer_needed
733 ? frame_pointer_rtx
: stack_pointer_rtx
),
734 plus_constant (stack_pointer_rtx
,
735 -(size
+ live_seq
))));
737 /* Note that live_seq always contains r28+r29, but the other
738 registers to be saved are all below 18. */
739 first_reg
= 18 - (live_seq
- 2);
741 for (reg
= 29, offset
= -live_seq
+ 1;
743 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
747 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
748 r
= gen_rtx_REG (QImode
, reg
);
749 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
752 cfun
->machine
->stack_usage
+= size
+ live_seq
;
757 for (reg
= 0; reg
< 32; ++reg
)
758 if (TEST_HARD_REG_BIT (set
, reg
))
759 emit_push_byte (reg
, true);
761 if (frame_pointer_needed
)
763 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
765 /* Push frame pointer. Always be consistent about the
766 ordering of pushes -- epilogue_restores expects the
767 register pair to be pushed low byte first. */
768 emit_push_byte (REG_Y
, true);
769 emit_push_byte (REG_Y
+ 1, true);
774 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
775 RTX_FRAME_RELATED_P (insn
) = 1;
779 /* Creating a frame can be done by direct manipulation of the
780 stack or via the frame pointer. These two methods are:
787 the optimum method depends on function type, stack and frame size.
788 To avoid a complex logic, both methods are tested and shortest
793 if (AVR_HAVE_8BIT_SP
)
795 /* The high byte (r29) doesn't change. Prefer 'subi'
796 (1 cycle) over 'sbiw' (2 cycles, same size). */
797 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
801 /* Normal sized addition. */
802 myfp
= frame_pointer_rtx
;
805 /* Method 1-Adjust frame pointer. */
808 /* Normally the dwarf2out frame-related-expr interpreter does
809 not expect to have the CFA change once the frame pointer is
810 set up. Thus we avoid marking the move insn below and
811 instead indicate that the entire operation is complete after
812 the frame pointer subtraction is done. */
814 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
816 insn
= emit_move_insn (myfp
, plus_constant (myfp
, -size
));
817 RTX_FRAME_RELATED_P (insn
) = 1;
818 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
819 gen_rtx_SET (VOIDmode
, frame_pointer_rtx
,
820 plus_constant (stack_pointer_rtx
,
823 /* Copy to stack pointer. Note that since we've already
824 changed the CFA to the frame pointer this operation
825 need not be annotated at all. */
826 if (AVR_HAVE_8BIT_SP
)
828 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
830 else if (TARGET_NO_INTERRUPTS
831 || cfun
->machine
->is_signal
832 || cfun
->machine
->is_OS_main
)
834 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
837 else if (cfun
->machine
->is_interrupt
)
839 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
844 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
847 fp_plus_insns
= get_insns ();
850 /* Method 2-Adjust Stack pointer. */
857 insn
= plus_constant (stack_pointer_rtx
, -size
);
858 insn
= emit_move_insn (stack_pointer_rtx
, insn
);
859 RTX_FRAME_RELATED_P (insn
) = 1;
861 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
862 RTX_FRAME_RELATED_P (insn
) = 1;
864 sp_plus_insns
= get_insns ();
867 /* Use shortest method. */
868 if (get_sequence_length (sp_plus_insns
)
869 < get_sequence_length (fp_plus_insns
))
870 emit_insn (sp_plus_insns
);
872 emit_insn (fp_plus_insns
);
875 emit_insn (fp_plus_insns
);
877 cfun
->machine
->stack_usage
+= size
;
882 if (flag_stack_usage_info
)
883 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
886 /* Output summary at end of function prologue. */
889 avr_asm_function_end_prologue (FILE *file
)
891 if (cfun
->machine
->is_naked
)
893 fputs ("/* prologue: naked */\n", file
);
897 if (cfun
->machine
->is_interrupt
)
899 fputs ("/* prologue: Interrupt */\n", file
);
901 else if (cfun
->machine
->is_signal
)
903 fputs ("/* prologue: Signal */\n", file
);
906 fputs ("/* prologue: function */\n", file
);
908 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
910 fprintf (file
, "/* stack size = %d */\n",
911 cfun
->machine
->stack_usage
);
912 /* Create symbol stack offset here so all functions have it. Add 1 to stack
913 usage for offset so that SP + .L__stack_offset = return address. */
914 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
918 /* Implement EPILOGUE_USES. */
921 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
925 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
930 /* Helper for expand_epilogue. Emit a pop of a byte register. */
933 emit_pop_byte (unsigned regno
)
937 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
938 mem
= gen_frame_mem (QImode
, mem
);
939 reg
= gen_rtx_REG (QImode
, regno
);
941 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
944 /* Output RTL epilogue. */
947 expand_epilogue (bool sibcall_p
)
953 HOST_WIDE_INT size
= get_frame_size();
955 /* epilogue: naked */
956 if (cfun
->machine
->is_naked
)
958 gcc_assert (!sibcall_p
);
960 emit_jump_insn (gen_return ());
964 avr_regs_to_save (&set
);
965 live_seq
= sequent_regs_live ();
966 minimize
= (TARGET_CALL_PROLOGUES
967 && !cfun
->machine
->is_interrupt
968 && !cfun
->machine
->is_signal
969 && !cfun
->machine
->is_OS_task
970 && !cfun
->machine
->is_OS_main
973 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
975 if (frame_pointer_needed
)
977 /* Get rid of frame. */
978 emit_move_insn(frame_pointer_rtx
,
979 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
980 gen_int_mode (size
, HImode
)));
984 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
987 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
991 if (frame_pointer_needed
)
995 /* Try two methods to adjust stack and select shortest. */
999 if (AVR_HAVE_8BIT_SP
)
1001 /* The high byte (r29) doesn't change - prefer 'subi'
1002 (1 cycle) over 'sbiw' (2 cycles, same size). */
1003 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
1007 /* Normal sized addition. */
1008 myfp
= frame_pointer_rtx
;
1011 /* Method 1-Adjust frame pointer. */
1014 emit_move_insn (myfp
, plus_constant (myfp
, size
));
1016 /* Copy to stack pointer. */
1017 if (AVR_HAVE_8BIT_SP
)
1019 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1021 else if (TARGET_NO_INTERRUPTS
1022 || cfun
->machine
->is_signal
)
1024 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
1025 frame_pointer_rtx
));
1027 else if (cfun
->machine
->is_interrupt
)
1029 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
1030 frame_pointer_rtx
));
1034 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1037 fp_plus_insns
= get_insns ();
1040 /* Method 2-Adjust Stack pointer. */
1047 emit_move_insn (stack_pointer_rtx
,
1048 plus_constant (stack_pointer_rtx
, size
));
1050 sp_plus_insns
= get_insns ();
1053 /* Use shortest method. */
1054 if (get_sequence_length (sp_plus_insns
)
1055 < get_sequence_length (fp_plus_insns
))
1056 emit_insn (sp_plus_insns
);
1058 emit_insn (fp_plus_insns
);
1061 emit_insn (fp_plus_insns
);
1063 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1065 /* Restore previous frame_pointer. See expand_prologue for
1066 rationale for not using pophi. */
1067 emit_pop_byte (REG_Y
+ 1);
1068 emit_pop_byte (REG_Y
);
1072 /* Restore used registers. */
1073 for (reg
= 31; reg
>= 0; --reg
)
1074 if (TEST_HARD_REG_BIT (set
, reg
))
1075 emit_pop_byte (reg
);
1077 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1079 /* Restore RAMPZ using tmp reg as scratch. */
1081 && TEST_HARD_REG_BIT (set
, REG_Z
)
1082 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1084 emit_pop_byte (TMP_REGNO
);
1085 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)),
1089 /* Restore SREG using tmp reg as scratch. */
1090 emit_pop_byte (TMP_REGNO
);
1092 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)),
1095 /* Restore tmp REG. */
1096 emit_pop_byte (TMP_REGNO
);
1098 /* Restore zero REG. */
1099 emit_pop_byte (ZERO_REGNO
);
1103 emit_jump_insn (gen_return ());
1107 /* Output summary messages at beginning of function epilogue. */
1110 avr_asm_function_begin_epilogue (FILE *file
)
1112 fprintf (file
, "/* epilogue start */\n");
1116 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1119 avr_cannot_modify_jumps_p (void)
1122 /* Naked Functions must not have any instructions after
1123 their epilogue, see PR42240 */
1125 if (reload_completed
1127 && cfun
->machine
->is_naked
)
1136 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1137 machine for a memory operand of mode MODE. */
1140 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1142 enum reg_class r
= NO_REGS
;
1144 if (TARGET_ALL_DEBUG
)
1146 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
1147 GET_MODE_NAME(mode
),
1148 strict
? "(strict)": "",
1149 reload_completed
? "(reload_completed)": "",
1150 reload_in_progress
? "(reload_in_progress)": "",
1151 reg_renumber
? "(reg_renumber)" : "");
1152 if (GET_CODE (x
) == PLUS
1153 && REG_P (XEXP (x
, 0))
1154 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1155 && INTVAL (XEXP (x
, 1)) >= 0
1156 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
1159 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1160 true_regnum (XEXP (x
, 0)));
1163 if (!strict
&& GET_CODE (x
) == SUBREG
)
1165 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1166 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1168 else if (CONSTANT_ADDRESS_P (x
))
1170 else if (GET_CODE (x
) == PLUS
1171 && REG_P (XEXP (x
, 0))
1172 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1173 && INTVAL (XEXP (x
, 1)) >= 0)
1175 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1179 || REGNO (XEXP (x
,0)) == REG_X
1180 || REGNO (XEXP (x
,0)) == REG_Y
1181 || REGNO (XEXP (x
,0)) == REG_Z
)
1182 r
= BASE_POINTER_REGS
;
1183 if (XEXP (x
,0) == frame_pointer_rtx
1184 || XEXP (x
,0) == arg_pointer_rtx
)
1185 r
= BASE_POINTER_REGS
;
1187 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1190 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1191 && REG_P (XEXP (x
, 0))
1192 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1193 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1197 if (TARGET_ALL_DEBUG
)
1199 fprintf (stderr
, " ret = %c\n", r
+ '0');
1201 return r
== NO_REGS
? 0 : (int)r
;
1204 /* Attempts to replace X with a valid
1205 memory address for an operand of mode MODE */
1208 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1211 if (TARGET_ALL_DEBUG
)
1213 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1217 if (GET_CODE (oldx
) == PLUS
1218 && REG_P (XEXP (oldx
,0)))
1220 if (REG_P (XEXP (oldx
,1)))
1221 x
= force_reg (GET_MODE (oldx
), oldx
);
1222 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1224 int offs
= INTVAL (XEXP (oldx
,1));
1225 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1226 if (offs
> MAX_LD_OFFSET (mode
))
1228 if (TARGET_ALL_DEBUG
)
1229 fprintf (stderr
, "force_reg (big offset)\n");
1230 x
= force_reg (GET_MODE (oldx
), oldx
);
1238 /* Return a pointer register name as a string. */
1241 ptrreg_to_str (int regno
)
1245 case REG_X
: return "X";
1246 case REG_Y
: return "Y";
1247 case REG_Z
: return "Z";
1249 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1254 /* Return the condition name as a string.
1255 Used in conditional jump constructing */
1258 cond_string (enum rtx_code code
)
1267 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1272 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1285 /* Output ADDR to FILE as address. */
1288 print_operand_address (FILE *file
, rtx addr
)
1290 switch (GET_CODE (addr
))
1293 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1297 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1301 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1305 if (CONSTANT_ADDRESS_P (addr
)
1306 && text_segment_operand (addr
, VOIDmode
))
1309 if (GET_CODE (x
) == CONST
)
1311 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1313 /* Assembler gs() will implant word address. Make offset
1314 a byte offset inside gs() for assembler. This is
1315 needed because the more logical (constant+gs(sym)) is not
1316 accepted by gas. For 128K and lower devices this is ok. For
1317 large devices it will create a Trampoline to offset from symbol
1318 which may not be what the user really wanted. */
1319 fprintf (file
, "gs(");
1320 output_addr_const (file
, XEXP (x
,0));
1321 fprintf (file
,"+" HOST_WIDE_INT_PRINT_DEC
")", 2 * INTVAL (XEXP (x
,1)));
1323 if (warning (0, "pointer offset from symbol maybe incorrect"))
1325 output_addr_const (stderr
, addr
);
1326 fprintf(stderr
,"\n");
1331 fprintf (file
, "gs(");
1332 output_addr_const (file
, addr
);
1333 fprintf (file
, ")");
1337 output_addr_const (file
, addr
);
1342 /* Output X as assembler operand to file FILE. */
1345 print_operand (FILE *file
, rtx x
, int code
)
1349 if (code
>= 'A' && code
<= 'D')
1354 if (!AVR_HAVE_JMP_CALL
)
1357 else if (code
== '!')
1359 if (AVR_HAVE_EIJMP_EICALL
)
1364 if (x
== zero_reg_rtx
)
1365 fprintf (file
, "__zero_reg__");
1367 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1369 else if (GET_CODE (x
) == CONST_INT
)
1370 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1371 else if (GET_CODE (x
) == MEM
)
1373 rtx addr
= XEXP (x
,0);
1376 if (!CONSTANT_P (addr
))
1377 fatal_insn ("bad address, not a constant):", addr
);
1378 /* Assembler template with m-code is data - not progmem section */
1379 if (text_segment_operand (addr
, VOIDmode
))
1380 if (warning ( 0, "accessing data memory with program memory address"))
1382 output_addr_const (stderr
, addr
);
1383 fprintf(stderr
,"\n");
1385 output_addr_const (file
, addr
);
1387 else if (code
== 'o')
1389 if (GET_CODE (addr
) != PLUS
)
1390 fatal_insn ("bad address, not (reg+disp):", addr
);
1392 print_operand (file
, XEXP (addr
, 1), 0);
1394 else if (code
== 'p' || code
== 'r')
1396 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1397 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1400 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1402 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1404 else if (GET_CODE (addr
) == PLUS
)
1406 print_operand_address (file
, XEXP (addr
,0));
1407 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1408 fatal_insn ("internal compiler error. Bad address:"
1411 print_operand (file
, XEXP (addr
,1), code
);
1414 print_operand_address (file
, addr
);
1416 else if (code
== 'x')
1418 /* Constant progmem address - like used in jmp or call */
1419 if (0 == text_segment_operand (x
, VOIDmode
))
1420 if (warning ( 0, "accessing program memory with data memory address"))
1422 output_addr_const (stderr
, x
);
1423 fprintf(stderr
,"\n");
1425 /* Use normal symbol for direct address no linker trampoline needed */
1426 output_addr_const (file
, x
);
1428 else if (GET_CODE (x
) == CONST_DOUBLE
)
1432 if (GET_MODE (x
) != SFmode
)
1433 fatal_insn ("internal compiler error. Unknown mode:", x
);
1434 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1435 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1436 fprintf (file
, "0x%lx", val
);
1438 else if (code
== 'j')
1439 fputs (cond_string (GET_CODE (x
)), file
);
1440 else if (code
== 'k')
1441 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1443 print_operand_address (file
, x
);
1446 /* Update the condition code in the INSN. */
1449 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1453 switch (get_attr_cc (insn
))
1456 /* Insn does not affect CC at all. */
1464 set
= single_set (insn
);
1468 cc_status
.flags
|= CC_NO_OVERFLOW
;
1469 cc_status
.value1
= SET_DEST (set
);
1474 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1475 The V flag may or may not be known but that's ok because
1476 alter_cond will change tests to use EQ/NE. */
1477 set
= single_set (insn
);
1481 cc_status
.value1
= SET_DEST (set
);
1482 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1487 set
= single_set (insn
);
1490 cc_status
.value1
= SET_SRC (set
);
1494 /* Insn doesn't leave CC in a usable state. */
1497 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1498 set
= single_set (insn
);
1501 rtx src
= SET_SRC (set
);
1503 if (GET_CODE (src
) == ASHIFTRT
1504 && GET_MODE (src
) == QImode
)
1506 rtx x
= XEXP (src
, 1);
1508 if (GET_CODE (x
) == CONST_INT
1512 cc_status
.value1
= SET_DEST (set
);
1513 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1521 /* Return maximum number of consecutive registers of
1522 class CLASS needed to hold a value of mode MODE. */
1525 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1527 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1530 /* Choose mode for jump insn:
1531 1 - relative jump in range -63 <= x <= 62 ;
1532 2 - relative jump in range -2046 <= x <= 2045 ;
1533 3 - absolute jump (only for ATmega[16]03). */
1536 avr_jump_mode (rtx x
, rtx insn
)
1538 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1539 ? XEXP (x
, 0) : x
));
1540 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1541 int jump_distance
= cur_addr
- dest_addr
;
1543 if (-63 <= jump_distance
&& jump_distance
<= 62)
1545 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1547 else if (AVR_HAVE_JMP_CALL
)
1553 /* return an AVR condition jump commands.
1554 X is a comparison RTX.
1555 LEN is a number returned by avr_jump_mode function.
1556 if REVERSE nonzero then condition code in X must be reversed. */
1559 ret_cond_branch (rtx x
, int len
, int reverse
)
1561 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1566 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1567 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1569 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1570 AS1 (brmi
,.+2) CR_TAB
1572 (AS1 (breq
,.+6) CR_TAB
1573 AS1 (brmi
,.+4) CR_TAB
1577 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1579 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1580 AS1 (brlt
,.+2) CR_TAB
1582 (AS1 (breq
,.+6) CR_TAB
1583 AS1 (brlt
,.+4) CR_TAB
1586 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1588 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1589 AS1 (brlo
,.+2) CR_TAB
1591 (AS1 (breq
,.+6) CR_TAB
1592 AS1 (brlo
,.+4) CR_TAB
1595 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1596 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1598 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1599 AS1 (brpl
,.+2) CR_TAB
1601 (AS1 (breq
,.+2) CR_TAB
1602 AS1 (brpl
,.+4) CR_TAB
1605 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1607 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1608 AS1 (brge
,.+2) CR_TAB
1610 (AS1 (breq
,.+2) CR_TAB
1611 AS1 (brge
,.+4) CR_TAB
1614 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1616 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1617 AS1 (brsh
,.+2) CR_TAB
1619 (AS1 (breq
,.+2) CR_TAB
1620 AS1 (brsh
,.+4) CR_TAB
1628 return AS1 (br
%k1
,%0);
1630 return (AS1 (br
%j1
,.+2) CR_TAB
1633 return (AS1 (br
%j1
,.+4) CR_TAB
1642 return AS1 (br
%j1
,%0);
1644 return (AS1 (br
%k1
,.+2) CR_TAB
1647 return (AS1 (br
%k1
,.+4) CR_TAB
1655 /* Predicate function for immediate operand which fits to byte (8bit) */
1658 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1660 return (GET_CODE (op
) == CONST_INT
1661 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1664 /* Output insn cost for next insn. */
1667 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1668 int num_operands ATTRIBUTE_UNUSED
)
1670 if (TARGET_ALL_DEBUG
)
1672 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
1673 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1677 /* Return 0 if undefined, 1 if always true or always false. */
1680 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1682 unsigned int max
= (mode
== QImode
? 0xff :
1683 mode
== HImode
? 0xffff :
1684 mode
== SImode
? 0xffffffff : 0);
1685 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1687 if (unsigned_condition (op
) != op
)
1690 if (max
!= (INTVAL (x
) & max
)
1691 && INTVAL (x
) != 0xff)
1698 /* Returns nonzero if REGNO is the number of a hard
1699 register in which function arguments are sometimes passed. */
1702 function_arg_regno_p(int r
)
1704 return (r
>= 8 && r
<= 25);
1707 /* Initializing the variable cum for the state at the beginning
1708 of the argument list. */
1711 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1712 tree fndecl ATTRIBUTE_UNUSED
)
1715 cum
->regno
= FIRST_CUM_REG
;
1716 if (!libname
&& stdarg_p (fntype
))
1719 /* Assume the calle may be tail called */
1721 cfun
->machine
->sibcall_fails
= 0;
1724 /* Returns the number of registers to allocate for a function argument. */
1727 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
1731 if (mode
== BLKmode
)
1732 size
= int_size_in_bytes (type
);
1734 size
= GET_MODE_SIZE (mode
);
1736 /* Align all function arguments to start in even-numbered registers.
1737 Odd-sized arguments leave holes above them. */
1739 return (size
+ 1) & ~1;
1742 /* Controls whether a function argument is passed
1743 in a register, and which register. */
1746 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1747 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1749 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1750 int bytes
= avr_num_arg_regs (mode
, type
);
1752 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1753 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1758 /* Update the summarizer variable CUM to advance past an argument
1759 in the argument list. */
1762 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1763 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1765 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1766 int bytes
= avr_num_arg_regs (mode
, type
);
1768 cum
->nregs
-= bytes
;
1769 cum
->regno
-= bytes
;
1771 /* A parameter is being passed in a call-saved register. As the original
1772 contents of these regs has to be restored before leaving the function,
1773 a function must not pass arguments in call-saved regs in order to get
1778 && !call_used_regs
[cum
->regno
])
1780 /* FIXME: We ship info on failing tail-call in struct machine_function.
1781 This uses internals of calls.c:expand_call() and the way args_so_far
1782 is used. targetm.function_ok_for_sibcall() needs to be extended to
1783 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1784 dependent so that such an extension is not wanted. */
1786 cfun
->machine
->sibcall_fails
= 1;
1789 /* Test if all registers needed by the ABI are actually available. If the
1790 user has fixed a GPR needed to pass an argument, an (implicit) function
1791 call would clobber that fixed register. See PR45099 for an example. */
1798 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
1799 if (fixed_regs
[regno
])
1800 error ("Register %s is needed to pass a parameter but is fixed",
1804 if (cum
->nregs
<= 0)
1807 cum
->regno
= FIRST_CUM_REG
;
1811 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1812 /* Decide whether we can make a sibling call to a function. DECL is the
1813 declaration of the function being targeted by the call and EXP is the
1814 CALL_EXPR representing the call. */
1817 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
1821 /* Tail-calling must fail if callee-saved regs are used to pass
1822 function args. We must not tail-call when `epilogue_restores'
1823 is used. Unfortunately, we cannot tell at this point if that
1824 actually will happen or not, and we cannot step back from
1825 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1827 if (cfun
->machine
->sibcall_fails
1828 || TARGET_CALL_PROLOGUES
)
1833 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
1837 decl_callee
= TREE_TYPE (decl_callee
);
1841 decl_callee
= fntype_callee
;
1843 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
1844 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
1846 decl_callee
= TREE_TYPE (decl_callee
);
1850 /* Ensure that caller and callee have compatible epilogues */
1852 if (interrupt_function_p (current_function_decl
)
1853 || signal_function_p (current_function_decl
)
1854 || avr_naked_function_p (decl_callee
)
1855 || avr_naked_function_p (current_function_decl
)
1856 /* FIXME: For OS_task and OS_main, we are over-conservative.
1857 This is due to missing documentation of these attributes
1858 and what they actually should do and should not do. */
1859 || (avr_OS_task_function_p (decl_callee
)
1860 != avr_OS_task_function_p (current_function_decl
))
1861 || (avr_OS_main_function_p (decl_callee
)
1862 != avr_OS_main_function_p (current_function_decl
)))
1870 /***********************************************************************
1871 Functions for outputting various mov's for a various modes
1872 ************************************************************************/
1874 output_movqi (rtx insn
, rtx operands
[], int *l
)
1877 rtx dest
= operands
[0];
1878 rtx src
= operands
[1];
1886 if (register_operand (dest
, QImode
))
1888 if (register_operand (src
, QImode
)) /* mov r,r */
1890 if (test_hard_reg_class (STACK_REG
, dest
))
1891 return AS2 (out
,%0,%1);
1892 else if (test_hard_reg_class (STACK_REG
, src
))
1893 return AS2 (in
,%0,%1);
1895 return AS2 (mov
,%0,%1);
1897 else if (CONSTANT_P (src
))
1899 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1900 return AS2 (ldi
,%0,lo8(%1));
1902 if (GET_CODE (src
) == CONST_INT
)
1904 if (src
== const0_rtx
) /* mov r,L */
1905 return AS1 (clr
,%0);
1906 else if (src
== const1_rtx
)
1909 return (AS1 (clr
,%0) CR_TAB
1912 else if (src
== constm1_rtx
)
1914 /* Immediate constants -1 to any register */
1916 return (AS1 (clr
,%0) CR_TAB
1921 int bit_nr
= exact_log2 (INTVAL (src
));
1927 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1930 avr_output_bld (operands
, bit_nr
);
1937 /* Last resort, larger than loading from memory. */
1939 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1940 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1941 AS2 (mov
,%0,r31
) CR_TAB
1942 AS2 (mov
,r31
,__tmp_reg__
));
1944 else if (GET_CODE (src
) == MEM
)
1945 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1947 else if (GET_CODE (dest
) == MEM
)
1951 if (src
== const0_rtx
)
1952 operands
[1] = zero_reg_rtx
;
1954 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1957 output_asm_insn (templ
, operands
);
1966 output_movhi (rtx insn
, rtx operands
[], int *l
)
1969 rtx dest
= operands
[0];
1970 rtx src
= operands
[1];
1976 if (register_operand (dest
, HImode
))
1978 if (register_operand (src
, HImode
)) /* mov r,r */
1980 if (test_hard_reg_class (STACK_REG
, dest
))
1982 if (AVR_HAVE_8BIT_SP
)
1983 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1984 /* Use simple load of stack pointer if no interrupts are
1986 else if (TARGET_NO_INTERRUPTS
)
1987 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1988 AS2 (out
,__SP_L__
,%A1
));
1990 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1992 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1993 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1994 AS2 (out
,__SP_L__
,%A1
));
1996 else if (test_hard_reg_class (STACK_REG
, src
))
1999 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
2000 AS2 (in
,%B0
,__SP_H__
));
2006 return (AS2 (movw
,%0,%1));
2011 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2015 else if (CONSTANT_P (src
))
2017 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2020 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2021 AS2 (ldi
,%B0
,hi8(%1)));
2024 if (GET_CODE (src
) == CONST_INT
)
2026 if (src
== const0_rtx
) /* mov r,L */
2029 return (AS1 (clr
,%A0
) CR_TAB
2032 else if (src
== const1_rtx
)
2035 return (AS1 (clr
,%A0
) CR_TAB
2036 AS1 (clr
,%B0
) CR_TAB
2039 else if (src
== constm1_rtx
)
2041 /* Immediate constants -1 to any register */
2043 return (AS1 (clr
,%0) CR_TAB
2044 AS1 (dec
,%A0
) CR_TAB
2049 int bit_nr
= exact_log2 (INTVAL (src
));
2055 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
2056 AS1 (clr
,%B0
) CR_TAB
2059 avr_output_bld (operands
, bit_nr
);
2065 if ((INTVAL (src
) & 0xff) == 0)
2068 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2069 AS1 (clr
,%A0
) CR_TAB
2070 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2071 AS2 (mov
,%B0
,r31
) CR_TAB
2072 AS2 (mov
,r31
,__tmp_reg__
));
2074 else if ((INTVAL (src
) & 0xff00) == 0)
2077 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2078 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2079 AS2 (mov
,%A0
,r31
) CR_TAB
2080 AS1 (clr
,%B0
) CR_TAB
2081 AS2 (mov
,r31
,__tmp_reg__
));
2085 /* Last resort, equal to loading from memory. */
2087 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2088 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2089 AS2 (mov
,%A0
,r31
) CR_TAB
2090 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2091 AS2 (mov
,%B0
,r31
) CR_TAB
2092 AS2 (mov
,r31
,__tmp_reg__
));
2094 else if (GET_CODE (src
) == MEM
)
2095 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2097 else if (GET_CODE (dest
) == MEM
)
2101 if (src
== const0_rtx
)
2102 operands
[1] = zero_reg_rtx
;
2104 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
2107 output_asm_insn (templ
, operands
);
2112 fatal_insn ("invalid insn:", insn
);
2117 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
2121 rtx x
= XEXP (src
, 0);
2127 if (CONSTANT_ADDRESS_P (x
))
2129 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2132 return AS2 (in
,%0,__SREG__
);
2134 if (optimize
> 0 && io_address_operand (x
, QImode
))
2137 return AS2 (in
,%0,%m1
-0x20);
2140 return AS2 (lds
,%0,%m1
);
2142 /* memory access by reg+disp */
2143 else if (GET_CODE (x
) == PLUS
2144 && REG_P (XEXP (x
,0))
2145 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2147 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
2149 int disp
= INTVAL (XEXP (x
,1));
2150 if (REGNO (XEXP (x
,0)) != REG_Y
)
2151 fatal_insn ("incorrect insn:",insn
);
2153 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2154 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
2155 AS2 (ldd
,%0,Y
+63) CR_TAB
2156 AS2 (sbiw
,r28
,%o1
-63));
2158 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2159 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2160 AS2 (ld
,%0,Y
) CR_TAB
2161 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2162 AS2 (sbci
,r29
,hi8(%o1
)));
2164 else if (REGNO (XEXP (x
,0)) == REG_X
)
2166 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2167 it but I have this situation with extremal optimizing options. */
2168 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2169 || reg_unused_after (insn
, XEXP (x
,0)))
2170 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
2173 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
2174 AS2 (ld
,%0,X
) CR_TAB
2175 AS2 (sbiw
,r26
,%o1
));
2178 return AS2 (ldd
,%0,%1);
2181 return AS2 (ld
,%0,%1);
2185 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
2189 rtx base
= XEXP (src
, 0);
2190 int reg_dest
= true_regnum (dest
);
2191 int reg_base
= true_regnum (base
);
2192 /* "volatile" forces reading low byte first, even if less efficient,
2193 for correct operation with 16-bit I/O registers. */
2194 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2202 if (reg_dest
== reg_base
) /* R = (R) */
2205 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
2206 AS2 (ld
,%B0
,%1) CR_TAB
2207 AS2 (mov
,%A0
,__tmp_reg__
));
2209 else if (reg_base
== REG_X
) /* (R26) */
2211 if (reg_unused_after (insn
, base
))
2214 return (AS2 (ld
,%A0
,X
+) CR_TAB
2218 return (AS2 (ld
,%A0
,X
+) CR_TAB
2219 AS2 (ld
,%B0
,X
) CR_TAB
2225 return (AS2 (ld
,%A0
,%1) CR_TAB
2226 AS2 (ldd
,%B0
,%1+1));
2229 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2231 int disp
= INTVAL (XEXP (base
, 1));
2232 int reg_base
= true_regnum (XEXP (base
, 0));
2234 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2236 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2237 fatal_insn ("incorrect insn:",insn
);
2239 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2240 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
2241 AS2 (ldd
,%A0
,Y
+62) CR_TAB
2242 AS2 (ldd
,%B0
,Y
+63) CR_TAB
2243 AS2 (sbiw
,r28
,%o1
-62));
2245 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2246 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2247 AS2 (ld
,%A0
,Y
) CR_TAB
2248 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2249 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2250 AS2 (sbci
,r29
,hi8(%o1
)));
2252 if (reg_base
== REG_X
)
2254 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2255 it but I have this situation with extremal
2256 optimization options. */
2259 if (reg_base
== reg_dest
)
2260 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2261 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2262 AS2 (ld
,%B0
,X
) CR_TAB
2263 AS2 (mov
,%A0
,__tmp_reg__
));
2265 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2266 AS2 (ld
,%A0
,X
+) CR_TAB
2267 AS2 (ld
,%B0
,X
) CR_TAB
2268 AS2 (sbiw
,r26
,%o1
+1));
2271 if (reg_base
== reg_dest
)
2274 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
2275 AS2 (ldd
,%B0
,%B1
) CR_TAB
2276 AS2 (mov
,%A0
,__tmp_reg__
));
2280 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2283 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2285 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2286 fatal_insn ("incorrect insn:", insn
);
2290 if (REGNO (XEXP (base
, 0)) == REG_X
)
2293 return (AS2 (sbiw
,r26
,2) CR_TAB
2294 AS2 (ld
,%A0
,X
+) CR_TAB
2295 AS2 (ld
,%B0
,X
) CR_TAB
2301 return (AS2 (sbiw
,%r1
,2) CR_TAB
2302 AS2 (ld
,%A0
,%p1
) CR_TAB
2303 AS2 (ldd
,%B0
,%p1
+1));
2308 return (AS2 (ld
,%B0
,%1) CR_TAB
2311 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2313 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2314 fatal_insn ("incorrect insn:", insn
);
2317 return (AS2 (ld
,%A0
,%1) CR_TAB
2320 else if (CONSTANT_ADDRESS_P (base
))
2322 if (optimize
> 0 && io_address_operand (base
, HImode
))
2325 return (AS2 (in
,%A0
,%m1
-0x20) CR_TAB
2326 AS2 (in
,%B0
,%m1
+1-0x20));
2329 return (AS2 (lds
,%A0
,%m1
) CR_TAB
2330 AS2 (lds
,%B0
,%m1
+1));
2333 fatal_insn ("unknown move insn:",insn
);
2338 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2342 rtx base
= XEXP (src
, 0);
2343 int reg_dest
= true_regnum (dest
);
2344 int reg_base
= true_regnum (base
);
2352 if (reg_base
== REG_X
) /* (R26) */
2354 if (reg_dest
== REG_X
)
2355 /* "ld r26,-X" is undefined */
2356 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2357 AS2 (ld
,r29
,X
) CR_TAB
2358 AS2 (ld
,r28
,-X
) CR_TAB
2359 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2360 AS2 (sbiw
,r26
,1) CR_TAB
2361 AS2 (ld
,r26
,X
) CR_TAB
2362 AS2 (mov
,r27
,__tmp_reg__
));
2363 else if (reg_dest
== REG_X
- 2)
2364 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2365 AS2 (ld
,%B0
,X
+) CR_TAB
2366 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2367 AS2 (ld
,%D0
,X
) CR_TAB
2368 AS2 (mov
,%C0
,__tmp_reg__
));
2369 else if (reg_unused_after (insn
, base
))
2370 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2371 AS2 (ld
,%B0
,X
+) CR_TAB
2372 AS2 (ld
,%C0
,X
+) CR_TAB
2375 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2376 AS2 (ld
,%B0
,X
+) CR_TAB
2377 AS2 (ld
,%C0
,X
+) CR_TAB
2378 AS2 (ld
,%D0
,X
) CR_TAB
2383 if (reg_dest
== reg_base
)
2384 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2385 AS2 (ldd
,%C0
,%1+2) CR_TAB
2386 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2387 AS2 (ld
,%A0
,%1) CR_TAB
2388 AS2 (mov
,%B0
,__tmp_reg__
));
2389 else if (reg_base
== reg_dest
+ 2)
2390 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2391 AS2 (ldd
,%B0
,%1+1) CR_TAB
2392 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2393 AS2 (ldd
,%D0
,%1+3) CR_TAB
2394 AS2 (mov
,%C0
,__tmp_reg__
));
2396 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2397 AS2 (ldd
,%B0
,%1+1) CR_TAB
2398 AS2 (ldd
,%C0
,%1+2) CR_TAB
2399 AS2 (ldd
,%D0
,%1+3));
2402 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2404 int disp
= INTVAL (XEXP (base
, 1));
2406 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2408 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2409 fatal_insn ("incorrect insn:",insn
);
2411 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2412 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2413 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2414 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2415 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2416 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2417 AS2 (sbiw
,r28
,%o1
-60));
2419 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2420 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2421 AS2 (ld
,%A0
,Y
) CR_TAB
2422 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2423 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2424 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2425 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2426 AS2 (sbci
,r29
,hi8(%o1
)));
2429 reg_base
= true_regnum (XEXP (base
, 0));
2430 if (reg_base
== REG_X
)
2433 if (reg_dest
== REG_X
)
2436 /* "ld r26,-X" is undefined */
2437 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2438 AS2 (ld
,r29
,X
) CR_TAB
2439 AS2 (ld
,r28
,-X
) CR_TAB
2440 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2441 AS2 (sbiw
,r26
,1) CR_TAB
2442 AS2 (ld
,r26
,X
) CR_TAB
2443 AS2 (mov
,r27
,__tmp_reg__
));
2446 if (reg_dest
== REG_X
- 2)
2447 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2448 AS2 (ld
,r24
,X
+) CR_TAB
2449 AS2 (ld
,r25
,X
+) CR_TAB
2450 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2451 AS2 (ld
,r27
,X
) CR_TAB
2452 AS2 (mov
,r26
,__tmp_reg__
));
2454 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2455 AS2 (ld
,%A0
,X
+) CR_TAB
2456 AS2 (ld
,%B0
,X
+) CR_TAB
2457 AS2 (ld
,%C0
,X
+) CR_TAB
2458 AS2 (ld
,%D0
,X
) CR_TAB
2459 AS2 (sbiw
,r26
,%o1
+3));
2461 if (reg_dest
== reg_base
)
2462 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2463 AS2 (ldd
,%C0
,%C1
) CR_TAB
2464 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2465 AS2 (ldd
,%A0
,%A1
) CR_TAB
2466 AS2 (mov
,%B0
,__tmp_reg__
));
2467 else if (reg_dest
== reg_base
- 2)
2468 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2469 AS2 (ldd
,%B0
,%B1
) CR_TAB
2470 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2471 AS2 (ldd
,%D0
,%D1
) CR_TAB
2472 AS2 (mov
,%C0
,__tmp_reg__
));
2473 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2474 AS2 (ldd
,%B0
,%B1
) CR_TAB
2475 AS2 (ldd
,%C0
,%C1
) CR_TAB
2478 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2479 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2480 AS2 (ld
,%C0
,%1) CR_TAB
2481 AS2 (ld
,%B0
,%1) CR_TAB
2483 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2484 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2485 AS2 (ld
,%B0
,%1) CR_TAB
2486 AS2 (ld
,%C0
,%1) CR_TAB
2488 else if (CONSTANT_ADDRESS_P (base
))
2489 return *l
=8, (AS2 (lds
,%A0
,%m1
) CR_TAB
2490 AS2 (lds
,%B0
,%m1
+1) CR_TAB
2491 AS2 (lds
,%C0
,%m1
+2) CR_TAB
2492 AS2 (lds
,%D0
,%m1
+3));
2494 fatal_insn ("unknown move insn:",insn
);
2499 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2503 rtx base
= XEXP (dest
, 0);
2504 int reg_base
= true_regnum (base
);
2505 int reg_src
= true_regnum (src
);
2511 if (CONSTANT_ADDRESS_P (base
))
2512 return *l
=8,(AS2 (sts
,%m0
,%A1
) CR_TAB
2513 AS2 (sts
,%m0
+1,%B1
) CR_TAB
2514 AS2 (sts
,%m0
+2,%C1
) CR_TAB
2515 AS2 (sts
,%m0
+3,%D1
));
2516 if (reg_base
> 0) /* (r) */
2518 if (reg_base
== REG_X
) /* (R26) */
2520 if (reg_src
== REG_X
)
2522 /* "st X+,r26" is undefined */
2523 if (reg_unused_after (insn
, base
))
2524 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2525 AS2 (st
,X
,r26
) CR_TAB
2526 AS2 (adiw
,r26
,1) CR_TAB
2527 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2528 AS2 (st
,X
+,r28
) CR_TAB
2531 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2532 AS2 (st
,X
,r26
) CR_TAB
2533 AS2 (adiw
,r26
,1) CR_TAB
2534 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2535 AS2 (st
,X
+,r28
) CR_TAB
2536 AS2 (st
,X
,r29
) CR_TAB
2539 else if (reg_base
== reg_src
+ 2)
2541 if (reg_unused_after (insn
, base
))
2542 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2543 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2544 AS2 (st
,%0+,%A1
) CR_TAB
2545 AS2 (st
,%0+,%B1
) CR_TAB
2546 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2547 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2548 AS1 (clr
,__zero_reg__
));
2550 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2551 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2552 AS2 (st
,%0+,%A1
) CR_TAB
2553 AS2 (st
,%0+,%B1
) CR_TAB
2554 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2555 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2556 AS1 (clr
,__zero_reg__
) CR_TAB
2559 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2560 AS2 (st
,%0+,%B1
) CR_TAB
2561 AS2 (st
,%0+,%C1
) CR_TAB
2562 AS2 (st
,%0,%D1
) CR_TAB
2566 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2567 AS2 (std
,%0+1,%B1
) CR_TAB
2568 AS2 (std
,%0+2,%C1
) CR_TAB
2569 AS2 (std
,%0+3,%D1
));
2571 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2573 int disp
= INTVAL (XEXP (base
, 1));
2574 reg_base
= REGNO (XEXP (base
, 0));
2575 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2577 if (reg_base
!= REG_Y
)
2578 fatal_insn ("incorrect insn:",insn
);
2580 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2581 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2582 AS2 (std
,Y
+60,%A1
) CR_TAB
2583 AS2 (std
,Y
+61,%B1
) CR_TAB
2584 AS2 (std
,Y
+62,%C1
) CR_TAB
2585 AS2 (std
,Y
+63,%D1
) CR_TAB
2586 AS2 (sbiw
,r28
,%o0
-60));
2588 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2589 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2590 AS2 (st
,Y
,%A1
) CR_TAB
2591 AS2 (std
,Y
+1,%B1
) CR_TAB
2592 AS2 (std
,Y
+2,%C1
) CR_TAB
2593 AS2 (std
,Y
+3,%D1
) CR_TAB
2594 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2595 AS2 (sbci
,r29
,hi8(%o0
)));
2597 if (reg_base
== REG_X
)
2600 if (reg_src
== REG_X
)
2603 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2604 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2605 AS2 (adiw
,r26
,%o0
) CR_TAB
2606 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2607 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2608 AS2 (st
,X
+,r28
) CR_TAB
2609 AS2 (st
,X
,r29
) CR_TAB
2610 AS1 (clr
,__zero_reg__
) CR_TAB
2611 AS2 (sbiw
,r26
,%o0
+3));
2613 else if (reg_src
== REG_X
- 2)
2616 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2617 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2618 AS2 (adiw
,r26
,%o0
) CR_TAB
2619 AS2 (st
,X
+,r24
) CR_TAB
2620 AS2 (st
,X
+,r25
) CR_TAB
2621 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2622 AS2 (st
,X
,__zero_reg__
) CR_TAB
2623 AS1 (clr
,__zero_reg__
) CR_TAB
2624 AS2 (sbiw
,r26
,%o0
+3));
2627 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2628 AS2 (st
,X
+,%A1
) CR_TAB
2629 AS2 (st
,X
+,%B1
) CR_TAB
2630 AS2 (st
,X
+,%C1
) CR_TAB
2631 AS2 (st
,X
,%D1
) CR_TAB
2632 AS2 (sbiw
,r26
,%o0
+3));
2634 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2635 AS2 (std
,%B0
,%B1
) CR_TAB
2636 AS2 (std
,%C0
,%C1
) CR_TAB
2639 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2640 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2641 AS2 (st
,%0,%C1
) CR_TAB
2642 AS2 (st
,%0,%B1
) CR_TAB
2644 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2645 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2646 AS2 (st
,%0,%B1
) CR_TAB
2647 AS2 (st
,%0,%C1
) CR_TAB
2649 fatal_insn ("unknown move insn:",insn
);
2654 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2657 rtx dest
= operands
[0];
2658 rtx src
= operands
[1];
2664 if (register_operand (dest
, VOIDmode
))
2666 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2668 if (true_regnum (dest
) > true_regnum (src
))
2673 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2674 AS2 (movw
,%A0
,%A1
));
2677 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2678 AS2 (mov
,%C0
,%C1
) CR_TAB
2679 AS2 (mov
,%B0
,%B1
) CR_TAB
2687 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2688 AS2 (movw
,%C0
,%C1
));
2691 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2692 AS2 (mov
,%B0
,%B1
) CR_TAB
2693 AS2 (mov
,%C0
,%C1
) CR_TAB
2697 else if (CONSTANT_P (src
))
2699 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2702 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2703 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2704 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2705 AS2 (ldi
,%D0
,hhi8(%1)));
2708 if (GET_CODE (src
) == CONST_INT
)
2710 const char *const clr_op0
=
2711 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2712 AS1 (clr
,%B0
) CR_TAB
2714 : (AS1 (clr
,%A0
) CR_TAB
2715 AS1 (clr
,%B0
) CR_TAB
2716 AS1 (clr
,%C0
) CR_TAB
2719 if (src
== const0_rtx
) /* mov r,L */
2721 *l
= AVR_HAVE_MOVW
? 3 : 4;
2724 else if (src
== const1_rtx
)
2727 output_asm_insn (clr_op0
, operands
);
2728 *l
= AVR_HAVE_MOVW
? 4 : 5;
2729 return AS1 (inc
,%A0
);
2731 else if (src
== constm1_rtx
)
2733 /* Immediate constants -1 to any register */
2737 return (AS1 (clr
,%A0
) CR_TAB
2738 AS1 (dec
,%A0
) CR_TAB
2739 AS2 (mov
,%B0
,%A0
) CR_TAB
2740 AS2 (movw
,%C0
,%A0
));
2743 return (AS1 (clr
,%A0
) CR_TAB
2744 AS1 (dec
,%A0
) CR_TAB
2745 AS2 (mov
,%B0
,%A0
) CR_TAB
2746 AS2 (mov
,%C0
,%A0
) CR_TAB
2751 int bit_nr
= exact_log2 (INTVAL (src
));
2755 *l
= AVR_HAVE_MOVW
? 5 : 6;
2758 output_asm_insn (clr_op0
, operands
);
2759 output_asm_insn ("set", operands
);
2762 avr_output_bld (operands
, bit_nr
);
2769 /* Last resort, better than loading from memory. */
2771 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2772 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2773 AS2 (mov
,%A0
,r31
) CR_TAB
2774 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2775 AS2 (mov
,%B0
,r31
) CR_TAB
2776 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2777 AS2 (mov
,%C0
,r31
) CR_TAB
2778 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2779 AS2 (mov
,%D0
,r31
) CR_TAB
2780 AS2 (mov
,r31
,__tmp_reg__
));
2782 else if (GET_CODE (src
) == MEM
)
2783 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2785 else if (GET_CODE (dest
) == MEM
)
2789 if (src
== const0_rtx
)
2790 operands
[1] = zero_reg_rtx
;
2792 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2795 output_asm_insn (templ
, operands
);
2800 fatal_insn ("invalid insn:", insn
);
2805 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2809 rtx x
= XEXP (dest
, 0);
2815 if (CONSTANT_ADDRESS_P (x
))
2817 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2820 return AS2 (out
,__SREG__
,%1);
2822 if (optimize
> 0 && io_address_operand (x
, QImode
))
2825 return AS2 (out
,%m0
-0x20,%1);
2828 return AS2 (sts
,%m0
,%1);
2830 /* memory access by reg+disp */
2831 else if (GET_CODE (x
) == PLUS
2832 && REG_P (XEXP (x
,0))
2833 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2835 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2837 int disp
= INTVAL (XEXP (x
,1));
2838 if (REGNO (XEXP (x
,0)) != REG_Y
)
2839 fatal_insn ("incorrect insn:",insn
);
2841 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2842 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2843 AS2 (std
,Y
+63,%1) CR_TAB
2844 AS2 (sbiw
,r28
,%o0
-63));
2846 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2847 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2848 AS2 (st
,Y
,%1) CR_TAB
2849 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2850 AS2 (sbci
,r29
,hi8(%o0
)));
2852 else if (REGNO (XEXP (x
,0)) == REG_X
)
2854 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2856 if (reg_unused_after (insn
, XEXP (x
,0)))
2857 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2858 AS2 (adiw
,r26
,%o0
) CR_TAB
2859 AS2 (st
,X
,__tmp_reg__
));
2861 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2862 AS2 (adiw
,r26
,%o0
) CR_TAB
2863 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2864 AS2 (sbiw
,r26
,%o0
));
2868 if (reg_unused_after (insn
, XEXP (x
,0)))
2869 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2872 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2873 AS2 (st
,X
,%1) CR_TAB
2874 AS2 (sbiw
,r26
,%o0
));
2878 return AS2 (std
,%0,%1);
2881 return AS2 (st
,%0,%1);
2885 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2889 rtx base
= XEXP (dest
, 0);
2890 int reg_base
= true_regnum (base
);
2891 int reg_src
= true_regnum (src
);
2892 /* "volatile" forces writing high byte first, even if less efficient,
2893 for correct operation with 16-bit I/O registers. */
2894 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2899 if (CONSTANT_ADDRESS_P (base
))
2901 if (optimize
> 0 && io_address_operand (base
, HImode
))
2904 return (AS2 (out
,%m0
+1-0x20,%B1
) CR_TAB
2905 AS2 (out
,%m0
-0x20,%A1
));
2907 return *l
= 4, (AS2 (sts
,%m0
+1,%B1
) CR_TAB
2912 if (reg_base
== REG_X
)
2914 if (reg_src
== REG_X
)
2916 /* "st X+,r26" and "st -X,r26" are undefined. */
2917 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2918 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2919 AS2 (st
,X
,r26
) CR_TAB
2920 AS2 (adiw
,r26
,1) CR_TAB
2921 AS2 (st
,X
,__tmp_reg__
));
2923 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2924 AS2 (adiw
,r26
,1) CR_TAB
2925 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2926 AS2 (sbiw
,r26
,1) CR_TAB
2931 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2932 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2935 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2936 AS2 (st
,X
,%B1
) CR_TAB
2941 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2944 else if (GET_CODE (base
) == PLUS
)
2946 int disp
= INTVAL (XEXP (base
, 1));
2947 reg_base
= REGNO (XEXP (base
, 0));
2948 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2950 if (reg_base
!= REG_Y
)
2951 fatal_insn ("incorrect insn:",insn
);
2953 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2954 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2955 AS2 (std
,Y
+63,%B1
) CR_TAB
2956 AS2 (std
,Y
+62,%A1
) CR_TAB
2957 AS2 (sbiw
,r28
,%o0
-62));
2959 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2960 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2961 AS2 (std
,Y
+1,%B1
) CR_TAB
2962 AS2 (st
,Y
,%A1
) CR_TAB
2963 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2964 AS2 (sbci
,r29
,hi8(%o0
)));
2966 if (reg_base
== REG_X
)
2969 if (reg_src
== REG_X
)
2972 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2973 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2974 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2975 AS2 (st
,X
,__zero_reg__
) CR_TAB
2976 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2977 AS1 (clr
,__zero_reg__
) CR_TAB
2978 AS2 (sbiw
,r26
,%o0
));
2981 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2982 AS2 (st
,X
,%B1
) CR_TAB
2983 AS2 (st
,-X
,%A1
) CR_TAB
2984 AS2 (sbiw
,r26
,%o0
));
2986 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2989 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2990 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2992 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2996 if (REGNO (XEXP (base
, 0)) == REG_X
)
2999 return (AS2 (adiw
,r26
,1) CR_TAB
3000 AS2 (st
,X
,%B1
) CR_TAB
3001 AS2 (st
,-X
,%A1
) CR_TAB
3007 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
3008 AS2 (st
,%p0
,%A1
) CR_TAB
3014 return (AS2 (st
,%0,%A1
) CR_TAB
3017 fatal_insn ("unknown move insn:",insn
);
3021 /* Return 1 if frame pointer for current function required. */
3024 avr_frame_pointer_required_p (void)
3026 return (cfun
->calls_alloca
3027 || crtl
->args
.info
.nregs
== 0
3028 || get_frame_size () > 0);
3031 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3034 compare_condition (rtx insn
)
3036 rtx next
= next_real_insn (insn
);
3037 RTX_CODE cond
= UNKNOWN
;
3038 if (next
&& GET_CODE (next
) == JUMP_INSN
)
3040 rtx pat
= PATTERN (next
);
3041 rtx src
= SET_SRC (pat
);
3042 rtx t
= XEXP (src
, 0);
3043 cond
= GET_CODE (t
);
3048 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3051 compare_sign_p (rtx insn
)
3053 RTX_CODE cond
= compare_condition (insn
);
3054 return (cond
== GE
|| cond
== LT
);
3057 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3058 that needs to be swapped (GT, GTU, LE, LEU). */
3061 compare_diff_p (rtx insn
)
3063 RTX_CODE cond
= compare_condition (insn
);
3064 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
3067 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3070 compare_eq_p (rtx insn
)
3072 RTX_CODE cond
= compare_condition (insn
);
3073 return (cond
== EQ
|| cond
== NE
);
3077 /* Output test instruction for HImode. */
3080 out_tsthi (rtx insn
, rtx op
, int *l
)
3082 if (compare_sign_p (insn
))
3085 return AS1 (tst
,%B0
);
3087 if (reg_unused_after (insn
, op
)
3088 && compare_eq_p (insn
))
3090 /* Faster than sbiw if we can clobber the operand. */
3092 return "or %A0,%B0";
3094 if (test_hard_reg_class (ADDW_REGS
, op
))
3097 return AS2 (sbiw
,%0,0);
3100 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3101 AS2 (cpc
,%B0
,__zero_reg__
));
3105 /* Output test instruction for SImode. */
3108 out_tstsi (rtx insn
, rtx op
, int *l
)
3110 if (compare_sign_p (insn
))
3113 return AS1 (tst
,%D0
);
3115 if (test_hard_reg_class (ADDW_REGS
, op
))
3118 return (AS2 (sbiw
,%A0
,0) CR_TAB
3119 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3120 AS2 (cpc
,%D0
,__zero_reg__
));
3123 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3124 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
3125 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3126 AS2 (cpc
,%D0
,__zero_reg__
));
3130 /* Generate asm equivalent for various shifts.
3131 Shift count is a CONST_INT, MEM or REG.
3132 This only handles cases that are not already
3133 carefully hand-optimized in ?sh??i3_out. */
3136 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
3137 int *len
, int t_len
)
3141 int second_label
= 1;
3142 int saved_in_tmp
= 0;
3143 int use_zero_reg
= 0;
3145 op
[0] = operands
[0];
3146 op
[1] = operands
[1];
3147 op
[2] = operands
[2];
3148 op
[3] = operands
[3];
3154 if (GET_CODE (operands
[2]) == CONST_INT
)
3156 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3157 int count
= INTVAL (operands
[2]);
3158 int max_len
= 10; /* If larger than this, always use a loop. */
3167 if (count
< 8 && !scratch
)
3171 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
3173 if (t_len
* count
<= max_len
)
3175 /* Output shifts inline with no loop - faster. */
3177 *len
= t_len
* count
;
3181 output_asm_insn (templ
, op
);
3190 strcat (str
, AS2 (ldi
,%3,%2));
3192 else if (use_zero_reg
)
3194 /* Hack to save one word: use __zero_reg__ as loop counter.
3195 Set one bit, then shift in a loop until it is 0 again. */
3197 op
[3] = zero_reg_rtx
;
3201 strcat (str
, ("set" CR_TAB
3202 AS2 (bld
,%3,%2-1)));
3206 /* No scratch register available, use one from LD_REGS (saved in
3207 __tmp_reg__) that doesn't overlap with registers to shift. */
3209 op
[3] = gen_rtx_REG (QImode
,
3210 ((true_regnum (operands
[0]) - 1) & 15) + 16);
3211 op
[4] = tmp_reg_rtx
;
3215 *len
= 3; /* Includes "mov %3,%4" after the loop. */
3217 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
3223 else if (GET_CODE (operands
[2]) == MEM
)
3227 op
[3] = op_mov
[0] = tmp_reg_rtx
;
3231 out_movqi_r_mr (insn
, op_mov
, len
);
3233 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
3235 else if (register_operand (operands
[2], QImode
))
3237 if (reg_unused_after (insn
, operands
[2]))
3241 op
[3] = tmp_reg_rtx
;
3243 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
3247 fatal_insn ("bad shift insn:", insn
);
3254 strcat (str
, AS1 (rjmp
,2f
));
3258 *len
+= t_len
+ 2; /* template + dec + brXX */
3261 strcat (str
, "\n1:\t");
3262 strcat (str
, templ
);
3263 strcat (str
, second_label
? "\n2:\t" : "\n\t");
3264 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
3265 strcat (str
, CR_TAB
);
3266 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
3268 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
3269 output_asm_insn (str
, op
);
3274 /* 8bit shift left ((char)x << i) */
3277 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3279 if (GET_CODE (operands
[2]) == CONST_INT
)
3286 switch (INTVAL (operands
[2]))
3289 if (INTVAL (operands
[2]) < 8)
3293 return AS1 (clr
,%0);
3297 return AS1 (lsl
,%0);
3301 return (AS1 (lsl
,%0) CR_TAB
3306 return (AS1 (lsl
,%0) CR_TAB
3311 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3314 return (AS1 (swap
,%0) CR_TAB
3315 AS2 (andi
,%0,0xf0));
3318 return (AS1 (lsl
,%0) CR_TAB
3324 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3327 return (AS1 (swap
,%0) CR_TAB
3329 AS2 (andi
,%0,0xe0));
3332 return (AS1 (lsl
,%0) CR_TAB
3339 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3342 return (AS1 (swap
,%0) CR_TAB
3345 AS2 (andi
,%0,0xc0));
3348 return (AS1 (lsl
,%0) CR_TAB
3357 return (AS1 (ror
,%0) CR_TAB
3362 else if (CONSTANT_P (operands
[2]))
3363 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3365 out_shift_with_cnt (AS1 (lsl
,%0),
3366 insn
, operands
, len
, 1);
3371 /* 16bit shift left ((short)x << i) */
3374 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3376 if (GET_CODE (operands
[2]) == CONST_INT
)
3378 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3379 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3386 switch (INTVAL (operands
[2]))
3389 if (INTVAL (operands
[2]) < 16)
3393 return (AS1 (clr
,%B0
) CR_TAB
3397 if (optimize_size
&& scratch
)
3402 return (AS1 (swap
,%A0
) CR_TAB
3403 AS1 (swap
,%B0
) CR_TAB
3404 AS2 (andi
,%B0
,0xf0) CR_TAB
3405 AS2 (eor
,%B0
,%A0
) CR_TAB
3406 AS2 (andi
,%A0
,0xf0) CR_TAB
3412 return (AS1 (swap
,%A0
) CR_TAB
3413 AS1 (swap
,%B0
) CR_TAB
3414 AS2 (ldi
,%3,0xf0) CR_TAB
3416 AS2 (eor
,%B0
,%A0
) CR_TAB
3420 break; /* optimize_size ? 6 : 8 */
3424 break; /* scratch ? 5 : 6 */
3428 return (AS1 (lsl
,%A0
) CR_TAB
3429 AS1 (rol
,%B0
) CR_TAB
3430 AS1 (swap
,%A0
) CR_TAB
3431 AS1 (swap
,%B0
) CR_TAB
3432 AS2 (andi
,%B0
,0xf0) CR_TAB
3433 AS2 (eor
,%B0
,%A0
) CR_TAB
3434 AS2 (andi
,%A0
,0xf0) CR_TAB
3440 return (AS1 (lsl
,%A0
) CR_TAB
3441 AS1 (rol
,%B0
) CR_TAB
3442 AS1 (swap
,%A0
) CR_TAB
3443 AS1 (swap
,%B0
) CR_TAB
3444 AS2 (ldi
,%3,0xf0) CR_TAB
3446 AS2 (eor
,%B0
,%A0
) CR_TAB
3454 break; /* scratch ? 5 : 6 */
3456 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3457 AS1 (lsr
,%B0
) CR_TAB
3458 AS1 (ror
,%A0
) CR_TAB
3459 AS1 (ror
,__tmp_reg__
) CR_TAB
3460 AS1 (lsr
,%B0
) CR_TAB
3461 AS1 (ror
,%A0
) CR_TAB
3462 AS1 (ror
,__tmp_reg__
) CR_TAB
3463 AS2 (mov
,%B0
,%A0
) CR_TAB
3464 AS2 (mov
,%A0
,__tmp_reg__
));
3468 return (AS1 (lsr
,%B0
) CR_TAB
3469 AS2 (mov
,%B0
,%A0
) CR_TAB
3470 AS1 (clr
,%A0
) CR_TAB
3471 AS1 (ror
,%B0
) CR_TAB
3475 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3480 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3481 AS1 (clr
,%A0
) CR_TAB
3486 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3487 AS1 (clr
,%A0
) CR_TAB
3488 AS1 (lsl
,%B0
) CR_TAB
3493 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3494 AS1 (clr
,%A0
) CR_TAB
3495 AS1 (lsl
,%B0
) CR_TAB
3496 AS1 (lsl
,%B0
) CR_TAB
3503 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3504 AS1 (clr
,%A0
) CR_TAB
3505 AS1 (swap
,%B0
) CR_TAB
3506 AS2 (andi
,%B0
,0xf0));
3511 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3512 AS1 (clr
,%A0
) CR_TAB
3513 AS1 (swap
,%B0
) CR_TAB
3514 AS2 (ldi
,%3,0xf0) CR_TAB
3518 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3519 AS1 (clr
,%A0
) CR_TAB
3520 AS1 (lsl
,%B0
) CR_TAB
3521 AS1 (lsl
,%B0
) CR_TAB
3522 AS1 (lsl
,%B0
) CR_TAB
3529 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3530 AS1 (clr
,%A0
) CR_TAB
3531 AS1 (swap
,%B0
) CR_TAB
3532 AS1 (lsl
,%B0
) CR_TAB
3533 AS2 (andi
,%B0
,0xe0));
3535 if (AVR_HAVE_MUL
&& scratch
)
3538 return (AS2 (ldi
,%3,0x20) CR_TAB
3539 AS2 (mul
,%A0
,%3) CR_TAB
3540 AS2 (mov
,%B0
,r0
) CR_TAB
3541 AS1 (clr
,%A0
) CR_TAB
3542 AS1 (clr
,__zero_reg__
));
3544 if (optimize_size
&& scratch
)
3549 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3550 AS1 (clr
,%A0
) CR_TAB
3551 AS1 (swap
,%B0
) CR_TAB
3552 AS1 (lsl
,%B0
) CR_TAB
3553 AS2 (ldi
,%3,0xe0) CR_TAB
3559 return ("set" CR_TAB
3560 AS2 (bld
,r1
,5) CR_TAB
3561 AS2 (mul
,%A0
,r1
) CR_TAB
3562 AS2 (mov
,%B0
,r0
) CR_TAB
3563 AS1 (clr
,%A0
) CR_TAB
3564 AS1 (clr
,__zero_reg__
));
3567 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3568 AS1 (clr
,%A0
) CR_TAB
3569 AS1 (lsl
,%B0
) CR_TAB
3570 AS1 (lsl
,%B0
) CR_TAB
3571 AS1 (lsl
,%B0
) CR_TAB
3572 AS1 (lsl
,%B0
) CR_TAB
3576 if (AVR_HAVE_MUL
&& ldi_ok
)
3579 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3580 AS2 (mul
,%A0
,%B0
) CR_TAB
3581 AS2 (mov
,%B0
,r0
) CR_TAB
3582 AS1 (clr
,%A0
) CR_TAB
3583 AS1 (clr
,__zero_reg__
));
3585 if (AVR_HAVE_MUL
&& scratch
)
3588 return (AS2 (ldi
,%3,0x40) CR_TAB
3589 AS2 (mul
,%A0
,%3) CR_TAB
3590 AS2 (mov
,%B0
,r0
) CR_TAB
3591 AS1 (clr
,%A0
) CR_TAB
3592 AS1 (clr
,__zero_reg__
));
3594 if (optimize_size
&& ldi_ok
)
3597 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3598 AS2 (ldi
,%A0
,6) "\n1:\t"
3599 AS1 (lsl
,%B0
) CR_TAB
3600 AS1 (dec
,%A0
) CR_TAB
3603 if (optimize_size
&& scratch
)
3606 return (AS1 (clr
,%B0
) CR_TAB
3607 AS1 (lsr
,%A0
) CR_TAB
3608 AS1 (ror
,%B0
) CR_TAB
3609 AS1 (lsr
,%A0
) CR_TAB
3610 AS1 (ror
,%B0
) CR_TAB
3615 return (AS1 (clr
,%B0
) CR_TAB
3616 AS1 (lsr
,%A0
) CR_TAB
3617 AS1 (ror
,%B0
) CR_TAB
3622 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3624 insn
, operands
, len
, 2);
3629 /* 32bit shift left ((long)x << i) */
3632 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3634 if (GET_CODE (operands
[2]) == CONST_INT
)
3642 switch (INTVAL (operands
[2]))
3645 if (INTVAL (operands
[2]) < 32)
3649 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3650 AS1 (clr
,%C0
) CR_TAB
3651 AS2 (movw
,%A0
,%C0
));
3653 return (AS1 (clr
,%D0
) CR_TAB
3654 AS1 (clr
,%C0
) CR_TAB
3655 AS1 (clr
,%B0
) CR_TAB
3660 int reg0
= true_regnum (operands
[0]);
3661 int reg1
= true_regnum (operands
[1]);
3664 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3665 AS2 (mov
,%C0
,%B1
) CR_TAB
3666 AS2 (mov
,%B0
,%A1
) CR_TAB
3669 return (AS1 (clr
,%A0
) CR_TAB
3670 AS2 (mov
,%B0
,%A1
) CR_TAB
3671 AS2 (mov
,%C0
,%B1
) CR_TAB
3677 int reg0
= true_regnum (operands
[0]);
3678 int reg1
= true_regnum (operands
[1]);
3679 if (reg0
+ 2 == reg1
)
3680 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3683 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3684 AS1 (clr
,%B0
) CR_TAB
3687 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3688 AS2 (mov
,%D0
,%B1
) CR_TAB
3689 AS1 (clr
,%B0
) CR_TAB
3695 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3696 AS1 (clr
,%C0
) CR_TAB
3697 AS1 (clr
,%B0
) CR_TAB
3702 return (AS1 (clr
,%D0
) CR_TAB
3703 AS1 (lsr
,%A0
) CR_TAB
3704 AS1 (ror
,%D0
) CR_TAB
3705 AS1 (clr
,%C0
) CR_TAB
3706 AS1 (clr
,%B0
) CR_TAB
3711 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3712 AS1 (rol
,%B0
) CR_TAB
3713 AS1 (rol
,%C0
) CR_TAB
3715 insn
, operands
, len
, 4);
3719 /* 8bit arithmetic shift right ((signed char)x >> i) */
3722 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3724 if (GET_CODE (operands
[2]) == CONST_INT
)
3731 switch (INTVAL (operands
[2]))
3735 return AS1 (asr
,%0);
3739 return (AS1 (asr
,%0) CR_TAB
3744 return (AS1 (asr
,%0) CR_TAB
3750 return (AS1 (asr
,%0) CR_TAB
3757 return (AS1 (asr
,%0) CR_TAB
3765 return (AS2 (bst
,%0,6) CR_TAB
3767 AS2 (sbc
,%0,%0) CR_TAB
3771 if (INTVAL (operands
[2]) < 8)
3778 return (AS1 (lsl
,%0) CR_TAB
3782 else if (CONSTANT_P (operands
[2]))
3783 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3785 out_shift_with_cnt (AS1 (asr
,%0),
3786 insn
, operands
, len
, 1);
3791 /* 16bit arithmetic shift right ((signed short)x >> i) */
3794 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3796 if (GET_CODE (operands
[2]) == CONST_INT
)
3798 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3799 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3806 switch (INTVAL (operands
[2]))
3810 /* XXX try to optimize this too? */
3815 break; /* scratch ? 5 : 6 */
3817 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3818 AS2 (mov
,%A0
,%B0
) CR_TAB
3819 AS1 (lsl
,__tmp_reg__
) CR_TAB
3820 AS1 (rol
,%A0
) CR_TAB
3821 AS2 (sbc
,%B0
,%B0
) CR_TAB
3822 AS1 (lsl
,__tmp_reg__
) CR_TAB
3823 AS1 (rol
,%A0
) CR_TAB
3828 return (AS1 (lsl
,%A0
) CR_TAB
3829 AS2 (mov
,%A0
,%B0
) CR_TAB
3830 AS1 (rol
,%A0
) CR_TAB
3835 int reg0
= true_regnum (operands
[0]);
3836 int reg1
= true_regnum (operands
[1]);
3839 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3840 AS1 (lsl
,%B0
) CR_TAB
3843 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3844 AS1 (clr
,%B0
) CR_TAB
3845 AS2 (sbrc
,%A0
,7) CR_TAB
3851 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3852 AS1 (lsl
,%B0
) CR_TAB
3853 AS2 (sbc
,%B0
,%B0
) CR_TAB
3858 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3859 AS1 (lsl
,%B0
) CR_TAB
3860 AS2 (sbc
,%B0
,%B0
) CR_TAB
3861 AS1 (asr
,%A0
) CR_TAB
3865 if (AVR_HAVE_MUL
&& ldi_ok
)
3868 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3869 AS2 (muls
,%B0
,%A0
) CR_TAB
3870 AS2 (mov
,%A0
,r1
) CR_TAB
3871 AS2 (sbc
,%B0
,%B0
) CR_TAB
3872 AS1 (clr
,__zero_reg__
));
3874 if (optimize_size
&& scratch
)
3877 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3878 AS1 (lsl
,%B0
) CR_TAB
3879 AS2 (sbc
,%B0
,%B0
) CR_TAB
3880 AS1 (asr
,%A0
) CR_TAB
3881 AS1 (asr
,%A0
) CR_TAB
3885 if (AVR_HAVE_MUL
&& ldi_ok
)
3888 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3889 AS2 (muls
,%B0
,%A0
) CR_TAB
3890 AS2 (mov
,%A0
,r1
) CR_TAB
3891 AS2 (sbc
,%B0
,%B0
) CR_TAB
3892 AS1 (clr
,__zero_reg__
));
3894 if (optimize_size
&& scratch
)
3897 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3898 AS1 (lsl
,%B0
) CR_TAB
3899 AS2 (sbc
,%B0
,%B0
) CR_TAB
3900 AS1 (asr
,%A0
) CR_TAB
3901 AS1 (asr
,%A0
) CR_TAB
3902 AS1 (asr
,%A0
) CR_TAB
3906 if (AVR_HAVE_MUL
&& ldi_ok
)
3909 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3910 AS2 (muls
,%B0
,%A0
) CR_TAB
3911 AS2 (mov
,%A0
,r1
) CR_TAB
3912 AS2 (sbc
,%B0
,%B0
) CR_TAB
3913 AS1 (clr
,__zero_reg__
));
3916 break; /* scratch ? 5 : 7 */
3918 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3919 AS1 (lsl
,%B0
) CR_TAB
3920 AS2 (sbc
,%B0
,%B0
) CR_TAB
3921 AS1 (asr
,%A0
) CR_TAB
3922 AS1 (asr
,%A0
) CR_TAB
3923 AS1 (asr
,%A0
) CR_TAB
3924 AS1 (asr
,%A0
) CR_TAB
3929 return (AS1 (lsl
,%B0
) CR_TAB
3930 AS2 (sbc
,%A0
,%A0
) CR_TAB
3931 AS1 (lsl
,%B0
) CR_TAB
3932 AS2 (mov
,%B0
,%A0
) CR_TAB
3936 if (INTVAL (operands
[2]) < 16)
3942 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3943 AS2 (sbc
,%A0
,%A0
) CR_TAB
3948 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3950 insn
, operands
, len
, 2);
3955 /* 32bit arithmetic shift right ((signed long)x >> i) */
3958 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3960 if (GET_CODE (operands
[2]) == CONST_INT
)
3968 switch (INTVAL (operands
[2]))
3972 int reg0
= true_regnum (operands
[0]);
3973 int reg1
= true_regnum (operands
[1]);
3976 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3977 AS2 (mov
,%B0
,%C1
) CR_TAB
3978 AS2 (mov
,%C0
,%D1
) CR_TAB
3979 AS1 (clr
,%D0
) CR_TAB
3980 AS2 (sbrc
,%C0
,7) CR_TAB
3983 return (AS1 (clr
,%D0
) CR_TAB
3984 AS2 (sbrc
,%D1
,7) CR_TAB
3985 AS1 (dec
,%D0
) CR_TAB
3986 AS2 (mov
,%C0
,%D1
) CR_TAB
3987 AS2 (mov
,%B0
,%C1
) CR_TAB
3993 int reg0
= true_regnum (operands
[0]);
3994 int reg1
= true_regnum (operands
[1]);
3996 if (reg0
== reg1
+ 2)
3997 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3998 AS2 (sbrc
,%B0
,7) CR_TAB
3999 AS1 (com
,%D0
) CR_TAB
4002 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
4003 AS1 (clr
,%D0
) CR_TAB
4004 AS2 (sbrc
,%B0
,7) CR_TAB
4005 AS1 (com
,%D0
) CR_TAB
4008 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
4009 AS2 (mov
,%A0
,%C1
) CR_TAB
4010 AS1 (clr
,%D0
) CR_TAB
4011 AS2 (sbrc
,%B0
,7) CR_TAB
4012 AS1 (com
,%D0
) CR_TAB
4017 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
4018 AS1 (clr
,%D0
) CR_TAB
4019 AS2 (sbrc
,%A0
,7) CR_TAB
4020 AS1 (com
,%D0
) CR_TAB
4021 AS2 (mov
,%B0
,%D0
) CR_TAB
4025 if (INTVAL (operands
[2]) < 32)
4032 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
4033 AS2 (sbc
,%A0
,%A0
) CR_TAB
4034 AS2 (mov
,%B0
,%A0
) CR_TAB
4035 AS2 (movw
,%C0
,%A0
));
4037 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
4038 AS2 (sbc
,%A0
,%A0
) CR_TAB
4039 AS2 (mov
,%B0
,%A0
) CR_TAB
4040 AS2 (mov
,%C0
,%A0
) CR_TAB
4045 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
4046 AS1 (ror
,%C0
) CR_TAB
4047 AS1 (ror
,%B0
) CR_TAB
4049 insn
, operands
, len
, 4);
4053 /* 8bit logic shift right ((unsigned char)x >> i) */
4056 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
4058 if (GET_CODE (operands
[2]) == CONST_INT
)
4065 switch (INTVAL (operands
[2]))
4068 if (INTVAL (operands
[2]) < 8)
4072 return AS1 (clr
,%0);
4076 return AS1 (lsr
,%0);
4080 return (AS1 (lsr
,%0) CR_TAB
4084 return (AS1 (lsr
,%0) CR_TAB
4089 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4092 return (AS1 (swap
,%0) CR_TAB
4093 AS2 (andi
,%0,0x0f));
4096 return (AS1 (lsr
,%0) CR_TAB
4102 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4105 return (AS1 (swap
,%0) CR_TAB
4110 return (AS1 (lsr
,%0) CR_TAB
4117 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4120 return (AS1 (swap
,%0) CR_TAB
4126 return (AS1 (lsr
,%0) CR_TAB
4135 return (AS1 (rol
,%0) CR_TAB
4140 else if (CONSTANT_P (operands
[2]))
4141 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4143 out_shift_with_cnt (AS1 (lsr
,%0),
4144 insn
, operands
, len
, 1);
4148 /* 16bit logic shift right ((unsigned short)x >> i) */
4151 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
4153 if (GET_CODE (operands
[2]) == CONST_INT
)
4155 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4156 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4163 switch (INTVAL (operands
[2]))
4166 if (INTVAL (operands
[2]) < 16)
4170 return (AS1 (clr
,%B0
) CR_TAB
4174 if (optimize_size
&& scratch
)
4179 return (AS1 (swap
,%B0
) CR_TAB
4180 AS1 (swap
,%A0
) CR_TAB
4181 AS2 (andi
,%A0
,0x0f) CR_TAB
4182 AS2 (eor
,%A0
,%B0
) CR_TAB
4183 AS2 (andi
,%B0
,0x0f) CR_TAB
4189 return (AS1 (swap
,%B0
) CR_TAB
4190 AS1 (swap
,%A0
) CR_TAB
4191 AS2 (ldi
,%3,0x0f) CR_TAB
4193 AS2 (eor
,%A0
,%B0
) CR_TAB
4197 break; /* optimize_size ? 6 : 8 */
4201 break; /* scratch ? 5 : 6 */
4205 return (AS1 (lsr
,%B0
) CR_TAB
4206 AS1 (ror
,%A0
) CR_TAB
4207 AS1 (swap
,%B0
) CR_TAB
4208 AS1 (swap
,%A0
) CR_TAB
4209 AS2 (andi
,%A0
,0x0f) CR_TAB
4210 AS2 (eor
,%A0
,%B0
) CR_TAB
4211 AS2 (andi
,%B0
,0x0f) CR_TAB
4217 return (AS1 (lsr
,%B0
) CR_TAB
4218 AS1 (ror
,%A0
) CR_TAB
4219 AS1 (swap
,%B0
) CR_TAB
4220 AS1 (swap
,%A0
) CR_TAB
4221 AS2 (ldi
,%3,0x0f) CR_TAB
4223 AS2 (eor
,%A0
,%B0
) CR_TAB
4231 break; /* scratch ? 5 : 6 */
4233 return (AS1 (clr
,__tmp_reg__
) CR_TAB
4234 AS1 (lsl
,%A0
) CR_TAB
4235 AS1 (rol
,%B0
) CR_TAB
4236 AS1 (rol
,__tmp_reg__
) CR_TAB
4237 AS1 (lsl
,%A0
) CR_TAB
4238 AS1 (rol
,%B0
) CR_TAB
4239 AS1 (rol
,__tmp_reg__
) CR_TAB
4240 AS2 (mov
,%A0
,%B0
) CR_TAB
4241 AS2 (mov
,%B0
,__tmp_reg__
));
4245 return (AS1 (lsl
,%A0
) CR_TAB
4246 AS2 (mov
,%A0
,%B0
) CR_TAB
4247 AS1 (rol
,%A0
) CR_TAB
4248 AS2 (sbc
,%B0
,%B0
) CR_TAB
4252 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
4257 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4258 AS1 (clr
,%B0
) CR_TAB
4263 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4264 AS1 (clr
,%B0
) CR_TAB
4265 AS1 (lsr
,%A0
) CR_TAB
4270 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4271 AS1 (clr
,%B0
) CR_TAB
4272 AS1 (lsr
,%A0
) CR_TAB
4273 AS1 (lsr
,%A0
) CR_TAB
4280 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4281 AS1 (clr
,%B0
) CR_TAB
4282 AS1 (swap
,%A0
) CR_TAB
4283 AS2 (andi
,%A0
,0x0f));
4288 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4289 AS1 (clr
,%B0
) CR_TAB
4290 AS1 (swap
,%A0
) CR_TAB
4291 AS2 (ldi
,%3,0x0f) CR_TAB
4295 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4296 AS1 (clr
,%B0
) CR_TAB
4297 AS1 (lsr
,%A0
) CR_TAB
4298 AS1 (lsr
,%A0
) CR_TAB
4299 AS1 (lsr
,%A0
) CR_TAB
4306 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4307 AS1 (clr
,%B0
) CR_TAB
4308 AS1 (swap
,%A0
) CR_TAB
4309 AS1 (lsr
,%A0
) CR_TAB
4310 AS2 (andi
,%A0
,0x07));
4312 if (AVR_HAVE_MUL
&& scratch
)
4315 return (AS2 (ldi
,%3,0x08) CR_TAB
4316 AS2 (mul
,%B0
,%3) CR_TAB
4317 AS2 (mov
,%A0
,r1
) CR_TAB
4318 AS1 (clr
,%B0
) CR_TAB
4319 AS1 (clr
,__zero_reg__
));
4321 if (optimize_size
&& scratch
)
4326 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4327 AS1 (clr
,%B0
) CR_TAB
4328 AS1 (swap
,%A0
) CR_TAB
4329 AS1 (lsr
,%A0
) CR_TAB
4330 AS2 (ldi
,%3,0x07) CR_TAB
4336 return ("set" CR_TAB
4337 AS2 (bld
,r1
,3) CR_TAB
4338 AS2 (mul
,%B0
,r1
) CR_TAB
4339 AS2 (mov
,%A0
,r1
) CR_TAB
4340 AS1 (clr
,%B0
) CR_TAB
4341 AS1 (clr
,__zero_reg__
));
4344 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4345 AS1 (clr
,%B0
) CR_TAB
4346 AS1 (lsr
,%A0
) CR_TAB
4347 AS1 (lsr
,%A0
) CR_TAB
4348 AS1 (lsr
,%A0
) CR_TAB
4349 AS1 (lsr
,%A0
) CR_TAB
4353 if (AVR_HAVE_MUL
&& ldi_ok
)
4356 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4357 AS2 (mul
,%B0
,%A0
) CR_TAB
4358 AS2 (mov
,%A0
,r1
) CR_TAB
4359 AS1 (clr
,%B0
) CR_TAB
4360 AS1 (clr
,__zero_reg__
));
4362 if (AVR_HAVE_MUL
&& scratch
)
4365 return (AS2 (ldi
,%3,0x04) CR_TAB
4366 AS2 (mul
,%B0
,%3) CR_TAB
4367 AS2 (mov
,%A0
,r1
) CR_TAB
4368 AS1 (clr
,%B0
) CR_TAB
4369 AS1 (clr
,__zero_reg__
));
4371 if (optimize_size
&& ldi_ok
)
4374 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4375 AS2 (ldi
,%B0
,6) "\n1:\t"
4376 AS1 (lsr
,%A0
) CR_TAB
4377 AS1 (dec
,%B0
) CR_TAB
4380 if (optimize_size
&& scratch
)
4383 return (AS1 (clr
,%A0
) CR_TAB
4384 AS1 (lsl
,%B0
) CR_TAB
4385 AS1 (rol
,%A0
) CR_TAB
4386 AS1 (lsl
,%B0
) CR_TAB
4387 AS1 (rol
,%A0
) CR_TAB
4392 return (AS1 (clr
,%A0
) CR_TAB
4393 AS1 (lsl
,%B0
) CR_TAB
4394 AS1 (rol
,%A0
) CR_TAB
4399 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4401 insn
, operands
, len
, 2);
4405 /* 32bit logic shift right ((unsigned int)x >> i) */
4408 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4410 if (GET_CODE (operands
[2]) == CONST_INT
)
4418 switch (INTVAL (operands
[2]))
4421 if (INTVAL (operands
[2]) < 32)
4425 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4426 AS1 (clr
,%C0
) CR_TAB
4427 AS2 (movw
,%A0
,%C0
));
4429 return (AS1 (clr
,%D0
) CR_TAB
4430 AS1 (clr
,%C0
) CR_TAB
4431 AS1 (clr
,%B0
) CR_TAB
4436 int reg0
= true_regnum (operands
[0]);
4437 int reg1
= true_regnum (operands
[1]);
4440 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4441 AS2 (mov
,%B0
,%C1
) CR_TAB
4442 AS2 (mov
,%C0
,%D1
) CR_TAB
4445 return (AS1 (clr
,%D0
) CR_TAB
4446 AS2 (mov
,%C0
,%D1
) CR_TAB
4447 AS2 (mov
,%B0
,%C1
) CR_TAB
4453 int reg0
= true_regnum (operands
[0]);
4454 int reg1
= true_regnum (operands
[1]);
4456 if (reg0
== reg1
+ 2)
4457 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4460 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4461 AS1 (clr
,%C0
) CR_TAB
4464 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4465 AS2 (mov
,%A0
,%C1
) CR_TAB
4466 AS1 (clr
,%C0
) CR_TAB
4471 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4472 AS1 (clr
,%B0
) CR_TAB
4473 AS1 (clr
,%C0
) CR_TAB
4478 return (AS1 (clr
,%A0
) CR_TAB
4479 AS2 (sbrc
,%D0
,7) CR_TAB
4480 AS1 (inc
,%A0
) CR_TAB
4481 AS1 (clr
,%B0
) CR_TAB
4482 AS1 (clr
,%C0
) CR_TAB
4487 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4488 AS1 (ror
,%C0
) CR_TAB
4489 AS1 (ror
,%B0
) CR_TAB
4491 insn
, operands
, len
, 4);
4495 /* Create RTL split patterns for byte sized rotate expressions. This
4496 produces a series of move instructions and considers overlap situations.
4497 Overlapping non-HImode operands need a scratch register. */
4500 avr_rotate_bytes (rtx operands
[])
4503 enum machine_mode mode
= GET_MODE (operands
[0]);
4504 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
4505 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
4506 int num
= INTVAL (operands
[2]);
4507 rtx scratch
= operands
[3];
4508 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4509 Word move if no scratch is needed, otherwise use size of scratch. */
4510 enum machine_mode move_mode
= QImode
;
4511 int move_size
, offset
, size
;
4515 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
4518 move_mode
= GET_MODE (scratch
);
4520 /* Force DI rotate to use QI moves since other DI moves are currently split
4521 into QI moves so forward propagation works better. */
4524 /* Make scratch smaller if needed. */
4525 if (GET_MODE (scratch
) == HImode
&& move_mode
== QImode
)
4526 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
4528 move_size
= GET_MODE_SIZE (move_mode
);
4529 /* Number of bytes/words to rotate. */
4530 offset
= (num
>> 3) / move_size
;
4531 /* Number of moves needed. */
4532 size
= GET_MODE_SIZE (mode
) / move_size
;
4533 /* Himode byte swap is special case to avoid a scratch register. */
4534 if (mode
== HImode
&& same_reg
)
4536 /* HImode byte swap, using xor. This is as quick as using scratch. */
4538 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
4539 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
4540 if (!rtx_equal_p (dst
, src
))
4542 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4543 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
4544 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4549 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4550 /* Create linked list of moves to determine move order. */
4554 } move
[MAX_SIZE
+ 8];
4557 gcc_assert (size
<= MAX_SIZE
);
4558 /* Generate list of subreg moves. */
4559 for (i
= 0; i
< size
; i
++)
4562 int to
= (from
+ offset
) % size
;
4563 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
4564 mode
, from
* move_size
);
4565 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
4566 mode
, to
* move_size
);
4569 /* Mark dependence where a dst of one move is the src of another move.
4570 The first move is a conflict as it must wait until second is
4571 performed. We ignore moves to self - we catch this later. */
4573 for (i
= 0; i
< size
; i
++)
4574 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
4575 for (j
= 0; j
< size
; j
++)
4576 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
4578 /* The dst of move i is the src of move j. */
4585 /* Go through move list and perform non-conflicting moves. As each
4586 non-overlapping move is made, it may remove other conflicts
4587 so the process is repeated until no conflicts remain. */
4592 /* Emit move where dst is not also a src or we have used that
4594 for (i
= 0; i
< size
; i
++)
4595 if (move
[i
].src
!= NULL_RTX
)
4597 if (move
[i
].links
== -1
4598 || move
[move
[i
].links
].src
== NULL_RTX
)
4601 /* Ignore NOP moves to self. */
4602 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
4603 emit_move_insn (move
[i
].dst
, move
[i
].src
);
4605 /* Remove conflict from list. */
4606 move
[i
].src
= NULL_RTX
;
4612 /* Check for deadlock. This is when no moves occurred and we have
4613 at least one blocked move. */
4614 if (moves
== 0 && blocked
!= -1)
4616 /* Need to use scratch register to break deadlock.
4617 Add move to put dst of blocked move into scratch.
4618 When this move occurs, it will break chain deadlock.
4619 The scratch register is substituted for real move. */
4621 move
[size
].src
= move
[blocked
].dst
;
4622 move
[size
].dst
= scratch
;
4623 /* Scratch move is never blocked. */
4624 move
[size
].links
= -1;
4625 /* Make sure we have valid link. */
4626 gcc_assert (move
[blocked
].links
!= -1);
4627 /* Replace src of blocking move with scratch reg. */
4628 move
[move
[blocked
].links
].src
= scratch
;
4629 /* Make dependent on scratch move occuring. */
4630 move
[blocked
].links
= size
;
4634 while (blocked
!= -1);
4639 /* Modifies the length assigned to instruction INSN
4640 LEN is the initially computed length of the insn. */
4643 adjust_insn_length (rtx insn
, int len
)
4645 rtx patt
= PATTERN (insn
);
4648 if (GET_CODE (patt
) == SET
)
4651 op
[1] = SET_SRC (patt
);
4652 op
[0] = SET_DEST (patt
);
4653 if (general_operand (op
[1], VOIDmode
)
4654 && general_operand (op
[0], VOIDmode
))
4656 switch (GET_MODE (op
[0]))
4659 output_movqi (insn
, op
, &len
);
4662 output_movhi (insn
, op
, &len
);
4666 output_movsisf (insn
, op
, &len
);
4672 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4674 switch (GET_MODE (op
[1]))
4676 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4677 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4681 else if (GET_CODE (op
[1]) == AND
)
4683 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4685 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4686 if (GET_MODE (op
[1]) == SImode
)
4687 len
= (((mask
& 0xff) != 0xff)
4688 + ((mask
& 0xff00) != 0xff00)
4689 + ((mask
& 0xff0000L
) != 0xff0000L
)
4690 + ((mask
& 0xff000000L
) != 0xff000000L
));
4691 else if (GET_MODE (op
[1]) == HImode
)
4692 len
= (((mask
& 0xff) != 0xff)
4693 + ((mask
& 0xff00) != 0xff00));
4696 else if (GET_CODE (op
[1]) == IOR
)
4698 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4700 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4701 if (GET_MODE (op
[1]) == SImode
)
4702 len
= (((mask
& 0xff) != 0)
4703 + ((mask
& 0xff00) != 0)
4704 + ((mask
& 0xff0000L
) != 0)
4705 + ((mask
& 0xff000000L
) != 0));
4706 else if (GET_MODE (op
[1]) == HImode
)
4707 len
= (((mask
& 0xff) != 0)
4708 + ((mask
& 0xff00) != 0));
4712 set
= single_set (insn
);
4717 op
[1] = SET_SRC (set
);
4718 op
[0] = SET_DEST (set
);
4720 if (GET_CODE (patt
) == PARALLEL
4721 && general_operand (op
[1], VOIDmode
)
4722 && general_operand (op
[0], VOIDmode
))
4724 if (XVECLEN (patt
, 0) == 2)
4725 op
[2] = XVECEXP (patt
, 0, 1);
4727 switch (GET_MODE (op
[0]))
4733 output_reload_inhi (insn
, op
, &len
);
4737 output_reload_insisf (insn
, op
, &len
);
4743 else if (GET_CODE (op
[1]) == ASHIFT
4744 || GET_CODE (op
[1]) == ASHIFTRT
4745 || GET_CODE (op
[1]) == LSHIFTRT
)
4749 ops
[1] = XEXP (op
[1],0);
4750 ops
[2] = XEXP (op
[1],1);
4751 switch (GET_CODE (op
[1]))
4754 switch (GET_MODE (op
[0]))
4756 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4757 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4758 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4763 switch (GET_MODE (op
[0]))
4765 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4766 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4767 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4772 switch (GET_MODE (op
[0]))
4774 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4775 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4776 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4788 /* Return nonzero if register REG dead after INSN. */
4791 reg_unused_after (rtx insn
, rtx reg
)
4793 return (dead_or_set_p (insn
, reg
)
4794 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4797 /* Return nonzero if REG is not used after INSN.
4798 We assume REG is a reload reg, and therefore does
4799 not live past labels. It may live past calls or jumps though. */
4802 _reg_unused_after (rtx insn
, rtx reg
)
4807 /* If the reg is set by this instruction, then it is safe for our
4808 case. Disregard the case where this is a store to memory, since
4809 we are checking a register used in the store address. */
4810 set
= single_set (insn
);
4811 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4812 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4815 while ((insn
= NEXT_INSN (insn
)))
4818 code
= GET_CODE (insn
);
4821 /* If this is a label that existed before reload, then the register
4822 if dead here. However, if this is a label added by reorg, then
4823 the register may still be live here. We can't tell the difference,
4824 so we just ignore labels completely. */
4825 if (code
== CODE_LABEL
)
4833 if (code
== JUMP_INSN
)
4836 /* If this is a sequence, we must handle them all at once.
4837 We could have for instance a call that sets the target register,
4838 and an insn in a delay slot that uses the register. In this case,
4839 we must return 0. */
4840 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4845 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4847 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4848 rtx set
= single_set (this_insn
);
4850 if (GET_CODE (this_insn
) == CALL_INSN
)
4852 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4854 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4859 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4861 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4863 if (GET_CODE (SET_DEST (set
)) != MEM
)
4869 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4874 else if (code
== JUMP_INSN
)
4878 if (code
== CALL_INSN
)
4881 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4882 if (GET_CODE (XEXP (tem
, 0)) == USE
4883 && REG_P (XEXP (XEXP (tem
, 0), 0))
4884 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4886 if (call_used_regs
[REGNO (reg
)])
4890 set
= single_set (insn
);
4892 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4894 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4895 return GET_CODE (SET_DEST (set
)) != MEM
;
4896 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4902 /* Target hook for assembling integer objects. The AVR version needs
4903 special handling for references to certain labels. */
4906 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4908 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4909 && text_segment_operand (x
, VOIDmode
) )
4911 fputs ("\t.word\tgs(", asm_out_file
);
4912 output_addr_const (asm_out_file
, x
);
4913 fputs (")\n", asm_out_file
);
4916 return default_assemble_integer (x
, size
, aligned_p
);
4919 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4922 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4925 /* If the function has the 'signal' or 'interrupt' attribute, test to
4926 make sure that the name of the function is "__vector_NN" so as to
4927 catch when the user misspells the interrupt vector name. */
4929 if (cfun
->machine
->is_interrupt
)
4931 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4933 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4934 "%qs appears to be a misspelled interrupt handler",
4938 else if (cfun
->machine
->is_signal
)
4940 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4942 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4943 "%qs appears to be a misspelled signal handler",
4948 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4949 ASM_OUTPUT_LABEL (file
, name
);
4952 /* The routine used to output NUL terminated strings. We use a special
4953 version of this for most svr4 targets because doing so makes the
4954 generated assembly code more compact (and thus faster to assemble)
4955 as well as more readable, especially for targets like the i386
4956 (where the only alternative is to output character sequences as
4957 comma separated lists of numbers). */
4960 gas_output_limited_string(FILE *file
, const char *str
)
4962 const unsigned char *_limited_str
= (const unsigned char *) str
;
4964 fprintf (file
, "%s\"", STRING_ASM_OP
);
4965 for (; (ch
= *_limited_str
); _limited_str
++)
4968 switch (escape
= ESCAPES
[ch
])
4974 fprintf (file
, "\\%03o", ch
);
4978 putc (escape
, file
);
4982 fprintf (file
, "\"\n");
4985 /* The routine used to output sequences of byte values. We use a special
4986 version of this for most svr4 targets because doing so makes the
4987 generated assembly code more compact (and thus faster to assemble)
4988 as well as more readable. Note that if we find subparts of the
4989 character sequence which end with NUL (and which are shorter than
4990 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4993 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4995 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4996 const unsigned char *limit
= _ascii_bytes
+ length
;
4997 unsigned bytes_in_chunk
= 0;
4998 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
5000 const unsigned char *p
;
5001 if (bytes_in_chunk
>= 60)
5003 fprintf (file
, "\"\n");
5006 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
5008 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
5010 if (bytes_in_chunk
> 0)
5012 fprintf (file
, "\"\n");
5015 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
5022 if (bytes_in_chunk
== 0)
5023 fprintf (file
, "\t.ascii\t\"");
5024 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
5031 fprintf (file
, "\\%03o", ch
);
5032 bytes_in_chunk
+= 4;
5036 putc (escape
, file
);
5037 bytes_in_chunk
+= 2;
5042 if (bytes_in_chunk
> 0)
5043 fprintf (file
, "\"\n");
5046 /* Return value is nonzero if pseudos that have been
5047 assigned to registers of class CLASS would likely be spilled
5048 because registers of CLASS are needed for spill registers. */
5051 avr_class_likely_spilled_p (reg_class_t c
)
5053 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
5056 /* Valid attributes:
5057 progmem - put data to program memory;
5058 signal - make a function to be hardware interrupt. After function
5059 prologue interrupts are disabled;
5060 interrupt - make a function to be hardware interrupt. After function
5061 prologue interrupts are enabled;
5062 naked - don't generate function prologue/epilogue and `ret' command.
5064 Only `progmem' attribute valid for type. */
5066 /* Handle a "progmem" attribute; arguments as in
5067 struct attribute_spec.handler. */
5069 avr_handle_progmem_attribute (tree
*node
, tree name
,
5070 tree args ATTRIBUTE_UNUSED
,
5071 int flags ATTRIBUTE_UNUSED
,
5076 if (TREE_CODE (*node
) == TYPE_DECL
)
5078 /* This is really a decl attribute, not a type attribute,
5079 but try to handle it for GCC 3.0 backwards compatibility. */
5081 tree type
= TREE_TYPE (*node
);
5082 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
5083 tree newtype
= build_type_attribute_variant (type
, attr
);
5085 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
5086 TREE_TYPE (*node
) = newtype
;
5087 *no_add_attrs
= true;
5089 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
5091 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
5093 warning (0, "only initialized variables can be placed into "
5094 "program memory area");
5095 *no_add_attrs
= true;
5100 warning (OPT_Wattributes
, "%qE attribute ignored",
5102 *no_add_attrs
= true;
5109 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5110 struct attribute_spec.handler. */
5113 avr_handle_fndecl_attribute (tree
*node
, tree name
,
5114 tree args ATTRIBUTE_UNUSED
,
5115 int flags ATTRIBUTE_UNUSED
,
5118 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5120 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5122 *no_add_attrs
= true;
5129 avr_handle_fntype_attribute (tree
*node
, tree name
,
5130 tree args ATTRIBUTE_UNUSED
,
5131 int flags ATTRIBUTE_UNUSED
,
5134 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
5136 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5138 *no_add_attrs
= true;
5144 /* Look for attribute `progmem' in DECL
5145 if found return 1, otherwise 0. */
5148 avr_progmem_p (tree decl
, tree attributes
)
5152 if (TREE_CODE (decl
) != VAR_DECL
)
5156 != lookup_attribute ("progmem", attributes
))
5162 while (TREE_CODE (a
) == ARRAY_TYPE
);
5164 if (a
== error_mark_node
)
5167 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
5173 /* Add the section attribute if the variable is in progmem. */
5176 avr_insert_attributes (tree node
, tree
*attributes
)
5178 if (TREE_CODE (node
) == VAR_DECL
5179 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
5180 && avr_progmem_p (node
, *attributes
))
5182 if (TREE_READONLY (node
))
5184 static const char dsec
[] = ".progmem.data";
5186 *attributes
= tree_cons (get_identifier ("section"),
5187 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
5192 error ("variable %q+D must be const in order to be put into"
5193 " read-only section by means of %<__attribute__((progmem))%>",
5199 /* A get_unnamed_section callback for switching to progmem_section. */
5202 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
5204 fprintf (asm_out_file
,
5205 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5206 AVR_HAVE_JMP_CALL
? "a" : "ax");
5207 /* Should already be aligned, this is just to be safe if it isn't. */
5208 fprintf (asm_out_file
, "\t.p2align 1\n");
5212 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5213 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5214 /* Track need of __do_clear_bss. */
5217 avr_asm_output_aligned_decl_common (FILE * stream
, const_tree decl ATTRIBUTE_UNUSED
,
5218 const char *name
, unsigned HOST_WIDE_INT size
,
5219 unsigned int align
, bool local_p
)
5221 avr_need_clear_bss_p
= true;
5225 fputs ("\t.local\t", stream
);
5226 assemble_name (stream
, name
);
5227 fputs ("\n", stream
);
5230 fputs ("\t.comm\t", stream
);
5231 assemble_name (stream
, name
);
5233 "," HOST_WIDE_INT_PRINT_UNSIGNED
",%u\n",
5234 size
, align
/ BITS_PER_UNIT
);
5238 /* Unnamed section callback for data_section
5239 to track need of __do_copy_data. */
5242 avr_output_data_section_asm_op (const void *data
)
5244 avr_need_copy_data_p
= true;
5246 /* Dispatch to default. */
5247 output_section_asm_op (data
);
5251 /* Unnamed section callback for bss_section
5252 to track need of __do_clear_bss. */
5255 avr_output_bss_section_asm_op (const void *data
)
5257 avr_need_clear_bss_p
= true;
5259 /* Dispatch to default. */
5260 output_section_asm_op (data
);
5264 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5267 avr_asm_init_sections (void)
5269 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
5270 avr_output_progmem_section_asm_op
,
5272 readonly_data_section
= data_section
;
5274 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
5275 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
5279 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5280 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5283 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
5285 if (!avr_need_copy_data_p
)
5286 avr_need_copy_data_p
= (0 == strncmp (name
, ".data", 5)
5287 || 0 == strncmp (name
, ".rodata", 7)
5288 || 0 == strncmp (name
, ".gnu.linkonce.d", 15));
5290 if (!avr_need_clear_bss_p
)
5291 avr_need_clear_bss_p
= (0 == strncmp (name
, ".bss", 4));
5293 default_elf_asm_named_section (name
, flags
, decl
);
5297 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
5299 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
5301 if (strncmp (name
, ".noinit", 7) == 0)
5303 if (decl
&& TREE_CODE (decl
) == VAR_DECL
5304 && DECL_INITIAL (decl
) == NULL_TREE
)
5305 flags
|= SECTION_BSS
; /* @nobits */
5307 warning (0, "only uninitialized variables can be placed in the "
5315 /* Implement `TARGET_ASM_FILE_START'. */
5316 /* Outputs some appropriate text to go at the start of an assembler
5320 avr_file_start (void)
5322 if (avr_current_arch
->asm_only
)
5323 error ("MCU %qs supported for assembler only", avr_mcu_name
);
5325 default_file_start ();
5327 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5328 fputs ("__SREG__ = 0x3f\n"
5330 "__SP_L__ = 0x3d\n", asm_out_file
);
5332 fputs ("__tmp_reg__ = 0\n"
5333 "__zero_reg__ = 1\n", asm_out_file
);
5337 /* Implement `TARGET_ASM_FILE_END'. */
5338 /* Outputs to the stdio stream FILE some
5339 appropriate text to go at the end of an assembler file. */
5344 /* Output these only if there is anything in the
5345 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5346 input section(s) - some code size can be saved by not
5347 linking in the initialization code from libgcc if resp.
5348 sections are empty. */
5350 if (avr_need_copy_data_p
)
5351 fputs (".global __do_copy_data\n", asm_out_file
);
5353 if (avr_need_clear_bss_p
)
5354 fputs (".global __do_clear_bss\n", asm_out_file
);
5357 /* Choose the order in which to allocate hard registers for
5358 pseudo-registers local to a basic block.
5360 Store the desired register order in the array `reg_alloc_order'.
5361 Element 0 should be the register to allocate first; element 1, the
5362 next register; and so on. */
5365 order_regs_for_local_alloc (void)
5368 static const int order_0
[] = {
5376 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5380 static const int order_1
[] = {
5388 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5392 static const int order_2
[] = {
5401 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5406 const int *order
= (TARGET_ORDER_1
? order_1
:
5407 TARGET_ORDER_2
? order_2
:
5409 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5410 reg_alloc_order
[i
] = order
[i
];
5414 /* Implement `TARGET_REGISTER_MOVE_COST' */
5417 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
5418 reg_class_t from
, reg_class_t to
)
5420 return (from
== STACK_REG
? 6
5421 : to
== STACK_REG
? 12
5426 /* Implement `TARGET_MEMORY_MOVE_COST' */
5429 avr_memory_move_cost (enum machine_mode mode
, reg_class_t rclass ATTRIBUTE_UNUSED
,
5430 bool in ATTRIBUTE_UNUSED
)
5432 return (mode
== QImode
? 2
5433 : mode
== HImode
? 4
5434 : mode
== SImode
? 8
5435 : mode
== SFmode
? 8
5440 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5441 cost of an RTX operand given its context. X is the rtx of the
5442 operand, MODE is its mode, and OUTER is the rtx_code of this
5443 operand's parent operator. */
5446 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5449 enum rtx_code code
= GET_CODE (x
);
5460 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5467 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5471 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5472 is to be calculated. Return true if the complete cost has been
5473 computed, and false if subexpressions should be scanned. In either
5474 case, *TOTAL contains the cost result. */
5477 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5480 enum rtx_code code
= (enum rtx_code
) codearg
;
5481 enum machine_mode mode
= GET_MODE (x
);
5488 /* Immediate constants are as cheap as registers. */
5496 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5504 *total
= COSTS_N_INSNS (1);
5508 *total
= COSTS_N_INSNS (3);
5512 *total
= COSTS_N_INSNS (7);
5518 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5526 *total
= COSTS_N_INSNS (1);
5532 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5536 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5537 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5541 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5542 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5543 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5547 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5548 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5549 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5556 *total
= COSTS_N_INSNS (1);
5557 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5558 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5562 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5564 *total
= COSTS_N_INSNS (2);
5565 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5567 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5568 *total
= COSTS_N_INSNS (1);
5570 *total
= COSTS_N_INSNS (2);
5574 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5576 *total
= COSTS_N_INSNS (4);
5577 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5579 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5580 *total
= COSTS_N_INSNS (1);
5582 *total
= COSTS_N_INSNS (4);
5588 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5594 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5595 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5596 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5597 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5601 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5602 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5603 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5611 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5613 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5620 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5622 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5630 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5631 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5639 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5642 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5643 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5650 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5651 *total
= COSTS_N_INSNS (1);
5656 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5657 *total
= COSTS_N_INSNS (3);
5662 if (CONST_INT_P (XEXP (x
, 1)))
5663 switch (INTVAL (XEXP (x
, 1)))
5667 *total
= COSTS_N_INSNS (5);
5670 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5678 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5685 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5687 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5688 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5692 val
= INTVAL (XEXP (x
, 1));
5694 *total
= COSTS_N_INSNS (3);
5695 else if (val
>= 0 && val
<= 7)
5696 *total
= COSTS_N_INSNS (val
);
5698 *total
= COSTS_N_INSNS (1);
5703 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5705 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5706 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5709 switch (INTVAL (XEXP (x
, 1)))
5716 *total
= COSTS_N_INSNS (2);
5719 *total
= COSTS_N_INSNS (3);
5725 *total
= COSTS_N_INSNS (4);
5730 *total
= COSTS_N_INSNS (5);
5733 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5736 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5739 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5742 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5743 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5748 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5750 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5751 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5754 switch (INTVAL (XEXP (x
, 1)))
5760 *total
= COSTS_N_INSNS (3);
5765 *total
= COSTS_N_INSNS (4);
5768 *total
= COSTS_N_INSNS (6);
5771 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5774 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5775 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5782 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5789 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5791 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5792 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5796 val
= INTVAL (XEXP (x
, 1));
5798 *total
= COSTS_N_INSNS (4);
5800 *total
= COSTS_N_INSNS (2);
5801 else if (val
>= 0 && val
<= 7)
5802 *total
= COSTS_N_INSNS (val
);
5804 *total
= COSTS_N_INSNS (1);
5809 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5811 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5812 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5815 switch (INTVAL (XEXP (x
, 1)))
5821 *total
= COSTS_N_INSNS (2);
5824 *total
= COSTS_N_INSNS (3);
5830 *total
= COSTS_N_INSNS (4);
5834 *total
= COSTS_N_INSNS (5);
5837 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5840 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5844 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5847 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5848 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5853 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5855 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5856 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5859 switch (INTVAL (XEXP (x
, 1)))
5865 *total
= COSTS_N_INSNS (4);
5870 *total
= COSTS_N_INSNS (6);
5873 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5876 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5879 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5880 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5887 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5894 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5896 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5897 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5901 val
= INTVAL (XEXP (x
, 1));
5903 *total
= COSTS_N_INSNS (3);
5904 else if (val
>= 0 && val
<= 7)
5905 *total
= COSTS_N_INSNS (val
);
5907 *total
= COSTS_N_INSNS (1);
5912 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5914 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5915 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5918 switch (INTVAL (XEXP (x
, 1)))
5925 *total
= COSTS_N_INSNS (2);
5928 *total
= COSTS_N_INSNS (3);
5933 *total
= COSTS_N_INSNS (4);
5937 *total
= COSTS_N_INSNS (5);
5943 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5946 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5950 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5953 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5954 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5959 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5961 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5962 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5965 switch (INTVAL (XEXP (x
, 1)))
5971 *total
= COSTS_N_INSNS (4);
5974 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5979 *total
= COSTS_N_INSNS (4);
5982 *total
= COSTS_N_INSNS (6);
5985 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5986 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5993 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5997 switch (GET_MODE (XEXP (x
, 0)))
6000 *total
= COSTS_N_INSNS (1);
6001 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6002 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6006 *total
= COSTS_N_INSNS (2);
6007 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6008 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6009 else if (INTVAL (XEXP (x
, 1)) != 0)
6010 *total
+= COSTS_N_INSNS (1);
6014 *total
= COSTS_N_INSNS (4);
6015 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6016 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6017 else if (INTVAL (XEXP (x
, 1)) != 0)
6018 *total
+= COSTS_N_INSNS (3);
6024 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
6033 /* Calculate the cost of a memory address. */
6036 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
6038 if (GET_CODE (x
) == PLUS
6039 && GET_CODE (XEXP (x
,1)) == CONST_INT
6040 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
6041 && INTVAL (XEXP (x
,1)) >= 61)
6043 if (CONSTANT_ADDRESS_P (x
))
6045 if (optimize
> 0 && io_address_operand (x
, QImode
))
6052 /* Test for extra memory constraint 'Q'.
6053 It's a memory address based on Y or Z pointer with valid displacement. */
6056 extra_constraint_Q (rtx x
)
6058 if (GET_CODE (XEXP (x
,0)) == PLUS
6059 && REG_P (XEXP (XEXP (x
,0), 0))
6060 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
6061 && (INTVAL (XEXP (XEXP (x
,0), 1))
6062 <= MAX_LD_OFFSET (GET_MODE (x
))))
6064 rtx xx
= XEXP (XEXP (x
,0), 0);
6065 int regno
= REGNO (xx
);
6066 if (TARGET_ALL_DEBUG
)
6068 fprintf (stderr
, ("extra_constraint:\n"
6069 "reload_completed: %d\n"
6070 "reload_in_progress: %d\n"),
6071 reload_completed
, reload_in_progress
);
6074 if (regno
>= FIRST_PSEUDO_REGISTER
)
6075 return 1; /* allocate pseudos */
6076 else if (regno
== REG_Z
|| regno
== REG_Y
)
6077 return 1; /* strictly check */
6078 else if (xx
== frame_pointer_rtx
6079 || xx
== arg_pointer_rtx
)
6080 return 1; /* XXX frame & arg pointer checks */
6085 /* Convert condition code CONDITION to the valid AVR condition code. */
6088 avr_normalize_condition (RTX_CODE condition
)
6105 /* This function optimizes conditional jumps. */
6112 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6114 if (! (GET_CODE (insn
) == INSN
6115 || GET_CODE (insn
) == CALL_INSN
6116 || GET_CODE (insn
) == JUMP_INSN
)
6117 || !single_set (insn
))
6120 pattern
= PATTERN (insn
);
6122 if (GET_CODE (pattern
) == PARALLEL
)
6123 pattern
= XVECEXP (pattern
, 0, 0);
6124 if (GET_CODE (pattern
) == SET
6125 && SET_DEST (pattern
) == cc0_rtx
6126 && compare_diff_p (insn
))
6128 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
6130 /* Now we work under compare insn. */
6132 pattern
= SET_SRC (pattern
);
6133 if (true_regnum (XEXP (pattern
,0)) >= 0
6134 && true_regnum (XEXP (pattern
,1)) >= 0 )
6136 rtx x
= XEXP (pattern
,0);
6137 rtx next
= next_real_insn (insn
);
6138 rtx pat
= PATTERN (next
);
6139 rtx src
= SET_SRC (pat
);
6140 rtx t
= XEXP (src
,0);
6141 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6142 XEXP (pattern
,0) = XEXP (pattern
,1);
6143 XEXP (pattern
,1) = x
;
6144 INSN_CODE (next
) = -1;
6146 else if (true_regnum (XEXP (pattern
, 0)) >= 0
6147 && XEXP (pattern
, 1) == const0_rtx
)
6149 /* This is a tst insn, we can reverse it. */
6150 rtx next
= next_real_insn (insn
);
6151 rtx pat
= PATTERN (next
);
6152 rtx src
= SET_SRC (pat
);
6153 rtx t
= XEXP (src
,0);
6155 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6156 XEXP (pattern
, 1) = XEXP (pattern
, 0);
6157 XEXP (pattern
, 0) = const0_rtx
;
6158 INSN_CODE (next
) = -1;
6159 INSN_CODE (insn
) = -1;
6161 else if (true_regnum (XEXP (pattern
,0)) >= 0
6162 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
6164 rtx x
= XEXP (pattern
,1);
6165 rtx next
= next_real_insn (insn
);
6166 rtx pat
= PATTERN (next
);
6167 rtx src
= SET_SRC (pat
);
6168 rtx t
= XEXP (src
,0);
6169 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
6171 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
6173 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
6174 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
6175 INSN_CODE (next
) = -1;
6176 INSN_CODE (insn
) = -1;
6184 /* Returns register number for function return value.*/
6186 static inline unsigned int
6187 avr_ret_register (void)
6192 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6195 avr_function_value_regno_p (const unsigned int regno
)
6197 return (regno
== avr_ret_register ());
6200 /* Create an RTX representing the place where a
6201 library function returns a value of mode MODE. */
6204 avr_libcall_value (enum machine_mode mode
,
6205 const_rtx func ATTRIBUTE_UNUSED
)
6207 int offs
= GET_MODE_SIZE (mode
);
6210 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
6213 /* Create an RTX representing the place where a
6214 function returns a value of data type VALTYPE. */
6217 avr_function_value (const_tree type
,
6218 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
6219 bool outgoing ATTRIBUTE_UNUSED
)
6223 if (TYPE_MODE (type
) != BLKmode
)
6224 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
6226 offs
= int_size_in_bytes (type
);
6229 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
6230 offs
= GET_MODE_SIZE (SImode
);
6231 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
6232 offs
= GET_MODE_SIZE (DImode
);
6234 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
6238 test_hard_reg_class (enum reg_class rclass
, rtx x
)
6240 int regno
= true_regnum (x
);
6244 if (TEST_HARD_REG_CLASS (rclass
, regno
))
6252 jump_over_one_insn_p (rtx insn
, rtx dest
)
6254 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
6257 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
6258 int dest_addr
= INSN_ADDRESSES (uid
);
6259 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
6262 /* Returns 1 if a value of mode MODE can be stored starting with hard
6263 register number REGNO. On the enhanced core, anything larger than
6264 1 byte must start in even numbered register for "movw" to work
6265 (this way we don't have to check for odd registers everywhere). */
6268 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
6270 /* Disallow QImode in stack pointer regs. */
6271 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
6274 /* The only thing that can go into registers r28:r29 is a Pmode. */
6275 if (regno
== REG_Y
&& mode
== Pmode
)
6278 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6279 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
6285 /* Modes larger than QImode occupy consecutive registers. */
6286 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
6289 /* All modes larger than QImode should start in an even register. */
6290 return !(regno
& 1);
6294 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6300 if (GET_CODE (operands
[1]) == CONST_INT
)
6302 int val
= INTVAL (operands
[1]);
6303 if ((val
& 0xff) == 0)
6306 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
6307 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6310 else if ((val
& 0xff00) == 0)
6313 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6314 AS2 (mov
,%A0
,%2) CR_TAB
6315 AS2 (mov
,%B0
,__zero_reg__
));
6317 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
6320 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6321 AS2 (mov
,%A0
,%2) CR_TAB
6326 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6327 AS2 (mov
,%A0
,%2) CR_TAB
6328 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6334 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6336 rtx src
= operands
[1];
6337 int cnst
= (GET_CODE (src
) == CONST_INT
);
6342 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
6343 + ((INTVAL (src
) & 0xff00) != 0)
6344 + ((INTVAL (src
) & 0xff0000) != 0)
6345 + ((INTVAL (src
) & 0xff000000) != 0);
6352 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
6353 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
6356 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
6357 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
6359 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
6360 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
6363 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
6364 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
6366 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
6367 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
6370 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
6371 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
6373 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
6374 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
6377 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
6378 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
6384 avr_output_bld (rtx operands
[], int bit_nr
)
6386 static char s
[] = "bld %A0,0";
6388 s
[5] = 'A' + (bit_nr
>> 3);
6389 s
[8] = '0' + (bit_nr
& 7);
6390 output_asm_insn (s
, operands
);
6394 avr_output_addr_vec_elt (FILE *stream
, int value
)
6396 switch_to_section (progmem_section
);
6397 if (AVR_HAVE_JMP_CALL
)
6398 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
6400 fprintf (stream
, "\trjmp .L%d\n", value
);
6403 /* Returns true if SCRATCH are safe to be allocated as a scratch
6404 registers (for a define_peephole2) in the current function. */
6407 avr_hard_regno_scratch_ok (unsigned int regno
)
6409 /* Interrupt functions can only use registers that have already been saved
6410 by the prologue, even if they would normally be call-clobbered. */
6412 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6413 && !df_regs_ever_live_p (regno
))
6419 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6422 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
6423 unsigned int new_reg
)
6425 /* Interrupt functions can only use registers that have already been
6426 saved by the prologue, even if they would normally be
6429 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6430 && !df_regs_ever_live_p (new_reg
))
6436 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6437 or memory location in the I/O space (QImode only).
6439 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6440 Operand 1: register operand to test, or CONST_INT memory address.
6441 Operand 2: bit number.
6442 Operand 3: label to jump to if the test is true. */
6445 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6447 enum rtx_code comp
= GET_CODE (operands
[0]);
6448 int long_jump
= (get_attr_length (insn
) >= 4);
6449 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6453 else if (comp
== LT
)
6457 comp
= reverse_condition (comp
);
6459 if (GET_CODE (operands
[1]) == CONST_INT
)
6461 if (INTVAL (operands
[1]) < 0x40)
6464 output_asm_insn (AS2 (sbis
,%m1
-0x20,%2), operands
);
6466 output_asm_insn (AS2 (sbic
,%m1
-0x20,%2), operands
);
6470 output_asm_insn (AS2 (in
,__tmp_reg__
,%m1
-0x20), operands
);
6472 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6474 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6477 else /* GET_CODE (operands[1]) == REG */
6479 if (GET_MODE (operands
[1]) == QImode
)
6482 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6484 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6486 else /* HImode or SImode */
6488 static char buf
[] = "sbrc %A1,0";
6489 int bit_nr
= INTVAL (operands
[2]);
6490 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6491 buf
[6] = 'A' + (bit_nr
>> 3);
6492 buf
[9] = '0' + (bit_nr
& 7);
6493 output_asm_insn (buf
, operands
);
6498 return (AS1 (rjmp
,.+4) CR_TAB
6501 return AS1 (rjmp
,%x3
);
6505 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6508 avr_asm_out_ctor (rtx symbol
, int priority
)
6510 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6511 default_ctor_section_asm_out_constructor (symbol
, priority
);
6514 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6517 avr_asm_out_dtor (rtx symbol
, int priority
)
6519 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6520 default_dtor_section_asm_out_destructor (symbol
, priority
);
6523 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6526 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6528 if (TYPE_MODE (type
) == BLKmode
)
6530 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6531 return (size
== -1 || size
> 8);
6537 /* Worker function for CASE_VALUES_THRESHOLD. */
6539 unsigned int avr_case_values_threshold (void)
6541 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
6544 /* Helper for __builtin_avr_delay_cycles */
6547 avr_expand_delay_cycles (rtx operands0
)
6549 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
6550 unsigned HOST_WIDE_INT cycles_used
;
6551 unsigned HOST_WIDE_INT loop_count
;
6553 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
6555 loop_count
= ((cycles
- 9) / 6) + 1;
6556 cycles_used
= ((loop_count
- 1) * 6) + 9;
6557 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
6558 cycles
-= cycles_used
;
6561 if (IN_RANGE (cycles
, 262145, 83886081))
6563 loop_count
= ((cycles
- 7) / 5) + 1;
6564 if (loop_count
> 0xFFFFFF)
6565 loop_count
= 0xFFFFFF;
6566 cycles_used
= ((loop_count
- 1) * 5) + 7;
6567 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
6568 cycles
-= cycles_used
;
6571 if (IN_RANGE (cycles
, 768, 262144))
6573 loop_count
= ((cycles
- 5) / 4) + 1;
6574 if (loop_count
> 0xFFFF)
6575 loop_count
= 0xFFFF;
6576 cycles_used
= ((loop_count
- 1) * 4) + 5;
6577 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
6578 cycles
-= cycles_used
;
6581 if (IN_RANGE (cycles
, 6, 767))
6583 loop_count
= cycles
/ 3;
6584 if (loop_count
> 255)
6586 cycles_used
= loop_count
* 3;
6587 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
6588 cycles
-= cycles_used
;
6593 emit_insn (gen_nopv (GEN_INT(2)));
6599 emit_insn (gen_nopv (GEN_INT(1)));
6604 /* IDs for all the AVR builtins. */
6617 AVR_BUILTIN_DELAY_CYCLES
6620 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6623 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6628 /* Implement `TARGET_INIT_BUILTINS' */
6629 /* Set up all builtin functions for this target. */
6632 avr_init_builtins (void)
6634 tree void_ftype_void
6635 = build_function_type_list (void_type_node
, NULL_TREE
);
6636 tree uchar_ftype_uchar
6637 = build_function_type_list (unsigned_char_type_node
,
6638 unsigned_char_type_node
,
6640 tree uint_ftype_uchar_uchar
6641 = build_function_type_list (unsigned_type_node
,
6642 unsigned_char_type_node
,
6643 unsigned_char_type_node
,
6645 tree int_ftype_char_char
6646 = build_function_type_list (integer_type_node
,
6650 tree int_ftype_char_uchar
6651 = build_function_type_list (integer_type_node
,
6653 unsigned_char_type_node
,
6655 tree void_ftype_ulong
6656 = build_function_type_list (void_type_node
,
6657 long_unsigned_type_node
,
6660 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void
, AVR_BUILTIN_NOP
);
6661 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void
, AVR_BUILTIN_SEI
);
6662 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void
, AVR_BUILTIN_CLI
);
6663 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void
, AVR_BUILTIN_WDR
);
6664 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void
, AVR_BUILTIN_SLEEP
);
6665 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar
, AVR_BUILTIN_SWAP
);
6666 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong
,
6667 AVR_BUILTIN_DELAY_CYCLES
);
6671 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6672 in libgcc. For fmul and fmuls this is straight forward with
6673 upcoming fixed point support. */
6675 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar
,
6677 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char
,
6679 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar
,
6680 AVR_BUILTIN_FMULSU
);
6686 struct avr_builtin_description
6688 const enum insn_code icode
;
6689 const char *const name
;
6690 const enum avr_builtin_id id
;
6693 static const struct avr_builtin_description
6696 { CODE_FOR_rotlqi3_4
, "__builtin_avr_swap", AVR_BUILTIN_SWAP
}
6699 static const struct avr_builtin_description
6702 { CODE_FOR_fmul
, "__builtin_avr_fmul", AVR_BUILTIN_FMUL
},
6703 { CODE_FOR_fmuls
, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS
},
6704 { CODE_FOR_fmulsu
, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU
}
6707 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6710 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
6714 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6715 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6716 enum machine_mode op0mode
= GET_MODE (op0
);
6717 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6718 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6721 || GET_MODE (target
) != tmode
6722 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6724 target
= gen_reg_rtx (tmode
);
6727 if (op0mode
== SImode
&& mode0
== HImode
)
6730 op0
= gen_lowpart (HImode
, op0
);
6733 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
6735 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6736 op0
= copy_to_mode_reg (mode0
, op0
);
6738 pat
= GEN_FCN (icode
) (target
, op0
);
6748 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6751 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
6754 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6755 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6756 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6757 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6758 enum machine_mode op0mode
= GET_MODE (op0
);
6759 enum machine_mode op1mode
= GET_MODE (op1
);
6760 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6761 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6762 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6765 || GET_MODE (target
) != tmode
6766 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6768 target
= gen_reg_rtx (tmode
);
6771 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
6774 op0
= gen_lowpart (HImode
, op0
);
6777 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
6780 op1
= gen_lowpart (HImode
, op1
);
6783 /* In case the insn wants input operands in modes different from
6784 the result, abort. */
6786 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
6787 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
6789 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6790 op0
= copy_to_mode_reg (mode0
, op0
);
6792 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6793 op1
= copy_to_mode_reg (mode1
, op1
);
6795 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6805 /* Expand an expression EXP that calls a built-in function,
6806 with result going to TARGET if that's convenient
6807 (and in mode MODE if that's convenient).
6808 SUBTARGET may be used as the target for computing one of EXP's operands.
6809 IGNORE is nonzero if the value is to be ignored. */
6812 avr_expand_builtin (tree exp
, rtx target
,
6813 rtx subtarget ATTRIBUTE_UNUSED
,
6814 enum machine_mode mode ATTRIBUTE_UNUSED
,
6815 int ignore ATTRIBUTE_UNUSED
)
6818 const struct avr_builtin_description
*d
;
6819 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6820 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
6826 case AVR_BUILTIN_NOP
:
6827 emit_insn (gen_nopv (GEN_INT(1)));
6830 case AVR_BUILTIN_SEI
:
6831 emit_insn (gen_enable_interrupt ());
6834 case AVR_BUILTIN_CLI
:
6835 emit_insn (gen_disable_interrupt ());
6838 case AVR_BUILTIN_WDR
:
6839 emit_insn (gen_wdr ());
6842 case AVR_BUILTIN_SLEEP
:
6843 emit_insn (gen_sleep ());
6846 case AVR_BUILTIN_DELAY_CYCLES
:
6848 arg0
= CALL_EXPR_ARG (exp
, 0);
6849 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6851 if (! CONST_INT_P (op0
))
6852 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6854 avr_expand_delay_cycles (op0
);
6859 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6861 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
6863 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6865 return avr_expand_binop_builtin (d
->icode
, exp
, target
);