1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree
);
56 static int interrupt_function_p (tree
);
57 static int signal_function_p (tree
);
58 static int avr_OS_task_function_p (tree
);
59 static int avr_OS_main_function_p (tree
);
60 static int avr_regs_to_save (HARD_REG_SET
*);
61 static int get_sequence_length (rtx insns
);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code
);
65 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
67 static RTX_CODE
compare_condition (rtx insn
);
68 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
69 static int compare_sign_p (rtx insn
);
70 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
71 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
72 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
73 static bool avr_assemble_integer (rtx
, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx
avr_function_value (const_tree
, const_tree
, bool);
81 static rtx
avr_libcall_value (enum machine_mode
, const_rtx
);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree
, tree
*);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree
, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx
, int);
89 static void avr_asm_out_dtor (rtx
, int);
90 static int avr_register_move_cost (enum machine_mode
, reg_class_t
, reg_class_t
);
91 static int avr_memory_move_cost (enum machine_mode
, reg_class_t
, bool);
92 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
93 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
94 static int avr_address_cost (rtx
, bool);
95 static bool avr_return_in_memory (const_tree
, const_tree
);
96 static struct machine_function
* avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx
avr_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
99 static rtx
avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c
);
105 static rtx
avr_function_arg (CUMULATIVE_ARGS
*, enum machine_mode
,
107 static void avr_function_arg_advance (CUMULATIVE_ARGS
*, enum machine_mode
,
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree
, tree
);
111 static void avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx
;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx
;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames
[] = REGISTER_NAMES
;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro
;
128 /* Current architecture. */
129 const struct base_arch_s
*avr_current_arch
;
131 /* Current device. */
132 const struct mcu_type_s
*avr_current_device
;
134 section
*progmem_section
;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p
= false;
138 bool avr_need_copy_data_p
= false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table
[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
157 { NULL
, 0, 0, false, false, false, NULL
, false }
160 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
161 static const struct default_options avr_option_optimization_table
[] =
163 { OPT_LEVELS_1_PLUS
, OPT_fomit_frame_pointer
, NULL
, 1 },
164 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
167 /* Initialize the GCC target structure. */
168 #undef TARGET_ASM_ALIGNED_HI_OP
169 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
170 #undef TARGET_ASM_ALIGNED_SI_OP
171 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
172 #undef TARGET_ASM_UNALIGNED_HI_OP
173 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
174 #undef TARGET_ASM_UNALIGNED_SI_OP
175 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
176 #undef TARGET_ASM_INTEGER
177 #define TARGET_ASM_INTEGER avr_assemble_integer
178 #undef TARGET_ASM_FILE_START
179 #define TARGET_ASM_FILE_START avr_file_start
180 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
181 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
182 #undef TARGET_ASM_FILE_END
183 #define TARGET_ASM_FILE_END avr_file_end
185 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
186 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
187 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
188 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
190 #undef TARGET_FUNCTION_VALUE
191 #define TARGET_FUNCTION_VALUE avr_function_value
192 #undef TARGET_LIBCALL_VALUE
193 #define TARGET_LIBCALL_VALUE avr_libcall_value
194 #undef TARGET_FUNCTION_VALUE_REGNO_P
195 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
197 #undef TARGET_ATTRIBUTE_TABLE
198 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
199 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
200 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
201 #undef TARGET_INSERT_ATTRIBUTES
202 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
203 #undef TARGET_SECTION_TYPE_FLAGS
204 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
206 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
208 #undef TARGET_ASM_INIT_SECTIONS
209 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_REGISTER_MOVE_COST
212 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
213 #undef TARGET_MEMORY_MOVE_COST
214 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
215 #undef TARGET_RTX_COSTS
216 #define TARGET_RTX_COSTS avr_rtx_costs
217 #undef TARGET_ADDRESS_COST
218 #define TARGET_ADDRESS_COST avr_address_cost
219 #undef TARGET_MACHINE_DEPENDENT_REORG
220 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
221 #undef TARGET_FUNCTION_ARG
222 #define TARGET_FUNCTION_ARG avr_function_arg
223 #undef TARGET_FUNCTION_ARG_ADVANCE
224 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
226 #undef TARGET_LEGITIMIZE_ADDRESS
227 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
229 #undef TARGET_RETURN_IN_MEMORY
230 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
232 #undef TARGET_STRICT_ARGUMENT_NAMING
233 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
235 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
236 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
238 #undef TARGET_HARD_REGNO_SCRATCH_OK
239 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
240 #undef TARGET_CASE_VALUES_THRESHOLD
241 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
243 #undef TARGET_LEGITIMATE_ADDRESS_P
244 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
246 #undef TARGET_FRAME_POINTER_REQUIRED
247 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
248 #undef TARGET_CAN_ELIMINATE
249 #define TARGET_CAN_ELIMINATE avr_can_eliminate
251 #undef TARGET_CLASS_LIKELY_SPILLED_P
252 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
254 #undef TARGET_OPTION_OVERRIDE
255 #define TARGET_OPTION_OVERRIDE avr_option_override
257 #undef TARGET_OPTION_OPTIMIZATION_TABLE
258 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
260 #undef TARGET_CANNOT_MODIFY_JUMPS_P
261 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
264 #define TARGET_HELP avr_help
266 #undef TARGET_EXCEPT_UNWIND_INFO
267 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
272 #undef TARGET_INIT_BUILTINS
273 #define TARGET_INIT_BUILTINS avr_init_builtins
275 #undef TARGET_EXPAND_BUILTIN
276 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
279 struct gcc_target targetm
= TARGET_INITIALIZER
;
282 avr_option_override (void)
284 const struct mcu_type_s
*t
;
286 flag_delete_null_pointer_checks
= 0;
288 for (t
= avr_mcu_types
; t
->name
; t
++)
289 if (strcmp (t
->name
, avr_mcu_name
) == 0)
294 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name
);
295 inform (input_location
, "See --target-help for supported MCUs");
298 avr_current_device
= t
;
299 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
300 avr_extra_arch_macro
= avr_current_device
->macro
;
302 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
303 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
305 init_machine_status
= avr_init_machine_status
;
308 /* Implement TARGET_HELP */
309 /* Report extra information for --target-help */
314 const struct mcu_type_s
*t
;
315 const char * const indent
= " ";
318 /* Give a list of MCUs that are accepted by -mmcu=* .
319 Note that MCUs supported by the compiler might differ from
320 MCUs supported by binutils. */
322 len
= strlen (indent
);
323 printf ("Known MCU names:\n%s", indent
);
325 /* Print a blank-separated list of all supported MCUs */
327 for (t
= avr_mcu_types
; t
->name
; t
++)
329 printf ("%s ", t
->name
);
330 len
+= 1 + strlen (t
->name
);
332 /* Break long lines */
334 if (len
> 66 && (t
+1)->name
)
336 printf ("\n%s", indent
);
337 len
= strlen (indent
);
344 /* return register class from register number. */
346 static const enum reg_class reg_class_tab
[]={
347 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
348 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
349 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
350 GENERAL_REGS
, /* r0 - r15 */
351 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
352 LD_REGS
, /* r16 - 23 */
353 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
354 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
355 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
356 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
357 STACK_REG
,STACK_REG
/* SPL,SPH */
360 /* Function to set up the backend function structure. */
362 static struct machine_function
*
363 avr_init_machine_status (void)
365 return ggc_alloc_cleared_machine_function ();
368 /* Return register class for register R. */
371 avr_regno_reg_class (int r
)
374 return reg_class_tab
[r
];
378 /* A helper for the subsequent function attribute used to dig for
379 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
382 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
384 if (FUNCTION_DECL
== TREE_CODE (func
))
386 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
391 func
= TREE_TYPE (func
);
394 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
395 || TREE_CODE (func
) == METHOD_TYPE
);
397 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
400 /* Return nonzero if FUNC is a naked function. */
403 avr_naked_function_p (tree func
)
405 return avr_lookup_function_attribute1 (func
, "naked");
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
412 interrupt_function_p (tree func
)
414 return avr_lookup_function_attribute1 (func
, "interrupt");
417 /* Return nonzero if FUNC is a signal function as specified
418 by the "signal" attribute. */
421 signal_function_p (tree func
)
423 return avr_lookup_function_attribute1 (func
, "signal");
426 /* Return nonzero if FUNC is a OS_task function. */
429 avr_OS_task_function_p (tree func
)
431 return avr_lookup_function_attribute1 (func
, "OS_task");
434 /* Return nonzero if FUNC is a OS_main function. */
437 avr_OS_main_function_p (tree func
)
439 return avr_lookup_function_attribute1 (func
, "OS_main");
442 /* Return the number of hard registers to push/pop in the prologue/epilogue
443 of the current function, and optionally store these registers in SET. */
446 avr_regs_to_save (HARD_REG_SET
*set
)
449 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
450 || signal_function_p (current_function_decl
));
453 CLEAR_HARD_REG_SET (*set
);
456 /* No need to save any registers if the function never returns or
457 is have "OS_task" or "OS_main" attribute. */
458 if (TREE_THIS_VOLATILE (current_function_decl
)
459 || cfun
->machine
->is_OS_task
460 || cfun
->machine
->is_OS_main
)
463 for (reg
= 0; reg
< 32; reg
++)
465 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
466 any global register variables. */
470 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
471 || (df_regs_ever_live_p (reg
)
472 && (int_or_sig_p
|| !call_used_regs
[reg
])
473 && !(frame_pointer_needed
474 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
477 SET_HARD_REG_BIT (*set
, reg
);
484 /* Return true if register FROM can be eliminated via register TO. */
487 avr_can_eliminate (const int from
, const int to
)
489 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
490 || ((from
== FRAME_POINTER_REGNUM
491 || from
== FRAME_POINTER_REGNUM
+ 1)
492 && !frame_pointer_needed
));
495 /* Compute offset between arg_pointer and frame_pointer. */
498 avr_initial_elimination_offset (int from
, int to
)
500 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
504 int offset
= frame_pointer_needed
? 2 : 0;
505 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
507 offset
+= avr_regs_to_save (NULL
);
508 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
512 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
513 frame pointer by +STARTING_FRAME_OFFSET.
514 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
515 avoids creating add/sub of offset in nonlocal goto and setjmp. */
517 rtx
avr_builtin_setjmp_frame_value (void)
519 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
520 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
523 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
524 This is return address of function. */
526 avr_return_addr_rtx (int count
, rtx tem
)
530 /* Can only return this functions return address. Others not supported. */
536 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
537 warning (0, "'builtin_return_address' contains only 2 bytes of address");
540 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
542 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
543 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
544 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
548 /* Return 1 if the function epilogue is just a single "ret". */
551 avr_simple_epilogue (void)
553 return (! frame_pointer_needed
554 && get_frame_size () == 0
555 && avr_regs_to_save (NULL
) == 0
556 && ! interrupt_function_p (current_function_decl
)
557 && ! signal_function_p (current_function_decl
)
558 && ! avr_naked_function_p (current_function_decl
)
559 && ! TREE_THIS_VOLATILE (current_function_decl
));
562 /* This function checks sequence of live registers. */
565 sequent_regs_live (void)
571 for (reg
= 0; reg
< 18; ++reg
)
573 if (!call_used_regs
[reg
])
575 if (df_regs_ever_live_p (reg
))
585 if (!frame_pointer_needed
)
587 if (df_regs_ever_live_p (REG_Y
))
595 if (df_regs_ever_live_p (REG_Y
+1))
608 return (cur_seq
== live_seq
) ? live_seq
: 0;
611 /* Obtain the length sequence of insns. */
614 get_sequence_length (rtx insns
)
619 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
620 length
+= get_attr_length (insn
);
625 /* Implement INCOMING_RETURN_ADDR_RTX. */
628 avr_incoming_return_addr_rtx (void)
630 /* The return address is at the top of the stack. Note that the push
631 was via post-decrement, which means the actual address is off by one. */
632 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
635 /* Helper for expand_prologue. Emit a push of a byte register. */
638 emit_push_byte (unsigned regno
, bool frame_related_p
)
642 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
643 mem
= gen_frame_mem (QImode
, mem
);
644 reg
= gen_rtx_REG (QImode
, regno
);
646 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
648 RTX_FRAME_RELATED_P (insn
) = 1;
650 cfun
->machine
->stack_usage
++;
654 /* Output function prologue. */
657 expand_prologue (void)
662 HOST_WIDE_INT size
= get_frame_size();
665 /* Init cfun->machine. */
666 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
667 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
668 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
669 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
670 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
671 cfun
->machine
->stack_usage
= 0;
673 /* Prologue: naked. */
674 if (cfun
->machine
->is_naked
)
679 avr_regs_to_save (&set
);
680 live_seq
= sequent_regs_live ();
681 minimize
= (TARGET_CALL_PROLOGUES
682 && !cfun
->machine
->is_interrupt
683 && !cfun
->machine
->is_signal
684 && !cfun
->machine
->is_OS_task
685 && !cfun
->machine
->is_OS_main
688 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
690 /* Enable interrupts. */
691 if (cfun
->machine
->is_interrupt
)
692 emit_insn (gen_enable_interrupt ());
695 emit_push_byte (ZERO_REGNO
, true);
698 emit_push_byte (TMP_REGNO
, true);
701 /* ??? There's no dwarf2 column reserved for SREG. */
702 emit_move_insn (tmp_reg_rtx
, gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
703 emit_push_byte (TMP_REGNO
, false);
706 /* ??? There's no dwarf2 column reserved for RAMPZ. */
708 && TEST_HARD_REG_BIT (set
, REG_Z
)
709 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
711 emit_move_insn (tmp_reg_rtx
,
712 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
713 emit_push_byte (TMP_REGNO
, false);
716 /* Clear zero reg. */
717 emit_move_insn (zero_reg_rtx
, const0_rtx
);
719 /* Prevent any attempt to delete the setting of ZERO_REG! */
720 emit_use (zero_reg_rtx
);
722 if (minimize
&& (frame_pointer_needed
723 || (AVR_2_BYTE_PC
&& live_seq
> 6)
726 int first_reg
, reg
, offset
;
728 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
729 gen_int_mode (size
, HImode
));
731 insn
= emit_insn (gen_call_prologue_saves
732 (gen_int_mode (live_seq
, HImode
),
733 gen_int_mode (size
+ live_seq
, HImode
)));
734 RTX_FRAME_RELATED_P (insn
) = 1;
736 /* Describe the effect of the unspec_volatile call to prologue_saves.
737 Note that this formulation assumes that add_reg_note pushes the
738 notes to the front. Thus we build them in the reverse order of
739 how we want dwarf2out to process them. */
741 /* The function does always set frame_pointer_rtx, but whether that
742 is going to be permanent in the function is frame_pointer_needed. */
743 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
744 gen_rtx_SET (VOIDmode
,
745 (frame_pointer_needed
746 ? frame_pointer_rtx
: stack_pointer_rtx
),
747 plus_constant (stack_pointer_rtx
,
748 -(size
+ live_seq
))));
750 /* Note that live_seq always contains r28+r29, but the other
751 registers to be saved are all below 18. */
752 first_reg
= 18 - (live_seq
- 2);
754 for (reg
= 29, offset
= -live_seq
+ 1;
756 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
760 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
761 r
= gen_rtx_REG (QImode
, reg
);
762 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
765 cfun
->machine
->stack_usage
+= size
+ live_seq
;
770 for (reg
= 0; reg
< 32; ++reg
)
771 if (TEST_HARD_REG_BIT (set
, reg
))
772 emit_push_byte (reg
, true);
774 if (frame_pointer_needed
)
776 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
778 /* Push frame pointer. Always be consistent about the
779 ordering of pushes -- epilogue_restores expects the
780 register pair to be pushed low byte first. */
781 emit_push_byte (REG_Y
, true);
782 emit_push_byte (REG_Y
+ 1, true);
787 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
788 RTX_FRAME_RELATED_P (insn
) = 1;
792 /* Creating a frame can be done by direct manipulation of the
793 stack or via the frame pointer. These two methods are:
800 the optimum method depends on function type, stack and frame size.
801 To avoid a complex logic, both methods are tested and shortest
806 if (AVR_HAVE_8BIT_SP
)
808 /* The high byte (r29) doesn't change. Prefer 'subi'
809 (1 cycle) over 'sbiw' (2 cycles, same size). */
810 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
814 /* Normal sized addition. */
815 myfp
= frame_pointer_rtx
;
818 /* Method 1-Adjust frame pointer. */
821 /* Normally the dwarf2out frame-related-expr interpreter does
822 not expect to have the CFA change once the frame pointer is
823 set up. Thus we avoid marking the move insn below and
824 instead indicate that the entire operation is complete after
825 the frame pointer subtraction is done. */
827 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
829 insn
= emit_move_insn (myfp
, plus_constant (myfp
, -size
));
830 RTX_FRAME_RELATED_P (insn
) = 1;
831 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
832 gen_rtx_SET (VOIDmode
, frame_pointer_rtx
,
833 plus_constant (stack_pointer_rtx
,
836 /* Copy to stack pointer. Note that since we've already
837 changed the CFA to the frame pointer this operation
838 need not be annotated at all. */
839 if (AVR_HAVE_8BIT_SP
)
841 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
843 else if (TARGET_NO_INTERRUPTS
844 || cfun
->machine
->is_signal
845 || cfun
->machine
->is_OS_main
)
847 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
850 else if (cfun
->machine
->is_interrupt
)
852 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
857 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
860 fp_plus_insns
= get_insns ();
863 /* Method 2-Adjust Stack pointer. */
870 insn
= plus_constant (stack_pointer_rtx
, -size
);
871 insn
= emit_move_insn (stack_pointer_rtx
, insn
);
872 RTX_FRAME_RELATED_P (insn
) = 1;
874 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
875 RTX_FRAME_RELATED_P (insn
) = 1;
877 sp_plus_insns
= get_insns ();
880 /* Use shortest method. */
881 if (get_sequence_length (sp_plus_insns
)
882 < get_sequence_length (fp_plus_insns
))
883 emit_insn (sp_plus_insns
);
885 emit_insn (fp_plus_insns
);
888 emit_insn (fp_plus_insns
);
890 cfun
->machine
->stack_usage
+= size
;
895 if (flag_stack_usage_info
)
896 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
899 /* Output summary at end of function prologue. */
902 avr_asm_function_end_prologue (FILE *file
)
904 if (cfun
->machine
->is_naked
)
906 fputs ("/* prologue: naked */\n", file
);
910 if (cfun
->machine
->is_interrupt
)
912 fputs ("/* prologue: Interrupt */\n", file
);
914 else if (cfun
->machine
->is_signal
)
916 fputs ("/* prologue: Signal */\n", file
);
919 fputs ("/* prologue: function */\n", file
);
921 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
923 fprintf (file
, "/* stack size = %d */\n",
924 cfun
->machine
->stack_usage
);
925 /* Create symbol stack offset here so all functions have it. Add 1 to stack
926 usage for offset so that SP + .L__stack_offset = return address. */
927 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
931 /* Implement EPILOGUE_USES. */
934 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
938 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
943 /* Helper for expand_epilogue. Emit a pop of a byte register. */
946 emit_pop_byte (unsigned regno
)
950 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
951 mem
= gen_frame_mem (QImode
, mem
);
952 reg
= gen_rtx_REG (QImode
, regno
);
954 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
957 /* Output RTL epilogue. */
960 expand_epilogue (bool sibcall_p
)
966 HOST_WIDE_INT size
= get_frame_size();
968 /* epilogue: naked */
969 if (cfun
->machine
->is_naked
)
971 gcc_assert (!sibcall_p
);
973 emit_jump_insn (gen_return ());
977 avr_regs_to_save (&set
);
978 live_seq
= sequent_regs_live ();
979 minimize
= (TARGET_CALL_PROLOGUES
980 && !cfun
->machine
->is_interrupt
981 && !cfun
->machine
->is_signal
982 && !cfun
->machine
->is_OS_task
983 && !cfun
->machine
->is_OS_main
986 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
988 if (frame_pointer_needed
)
990 /* Get rid of frame. */
991 emit_move_insn(frame_pointer_rtx
,
992 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
993 gen_int_mode (size
, HImode
)));
997 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1000 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
1004 if (frame_pointer_needed
)
1008 /* Try two methods to adjust stack and select shortest. */
1012 if (AVR_HAVE_8BIT_SP
)
1014 /* The high byte (r29) doesn't change - prefer 'subi'
1015 (1 cycle) over 'sbiw' (2 cycles, same size). */
1016 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
1020 /* Normal sized addition. */
1021 myfp
= frame_pointer_rtx
;
1024 /* Method 1-Adjust frame pointer. */
1027 emit_move_insn (myfp
, plus_constant (myfp
, size
));
1029 /* Copy to stack pointer. */
1030 if (AVR_HAVE_8BIT_SP
)
1032 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1034 else if (TARGET_NO_INTERRUPTS
1035 || cfun
->machine
->is_signal
)
1037 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
1038 frame_pointer_rtx
));
1040 else if (cfun
->machine
->is_interrupt
)
1042 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
1043 frame_pointer_rtx
));
1047 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1050 fp_plus_insns
= get_insns ();
1053 /* Method 2-Adjust Stack pointer. */
1060 emit_move_insn (stack_pointer_rtx
,
1061 plus_constant (stack_pointer_rtx
, size
));
1063 sp_plus_insns
= get_insns ();
1066 /* Use shortest method. */
1067 if (get_sequence_length (sp_plus_insns
)
1068 < get_sequence_length (fp_plus_insns
))
1069 emit_insn (sp_plus_insns
);
1071 emit_insn (fp_plus_insns
);
1074 emit_insn (fp_plus_insns
);
1076 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1078 /* Restore previous frame_pointer. See expand_prologue for
1079 rationale for not using pophi. */
1080 emit_pop_byte (REG_Y
+ 1);
1081 emit_pop_byte (REG_Y
);
1085 /* Restore used registers. */
1086 for (reg
= 31; reg
>= 0; --reg
)
1087 if (TEST_HARD_REG_BIT (set
, reg
))
1088 emit_pop_byte (reg
);
1090 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1092 /* Restore RAMPZ using tmp reg as scratch. */
1094 && TEST_HARD_REG_BIT (set
, REG_Z
)
1095 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1097 emit_pop_byte (TMP_REGNO
);
1098 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)),
1102 /* Restore SREG using tmp reg as scratch. */
1103 emit_pop_byte (TMP_REGNO
);
1105 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)),
1108 /* Restore tmp REG. */
1109 emit_pop_byte (TMP_REGNO
);
1111 /* Restore zero REG. */
1112 emit_pop_byte (ZERO_REGNO
);
1116 emit_jump_insn (gen_return ());
1120 /* Output summary messages at beginning of function epilogue. */
1123 avr_asm_function_begin_epilogue (FILE *file
)
1125 fprintf (file
, "/* epilogue start */\n");
1129 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1132 avr_cannot_modify_jumps_p (void)
1135 /* Naked Functions must not have any instructions after
1136 their epilogue, see PR42240 */
1138 if (reload_completed
1140 && cfun
->machine
->is_naked
)
1149 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1150 machine for a memory operand of mode MODE. */
1153 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1155 enum reg_class r
= NO_REGS
;
1157 if (TARGET_ALL_DEBUG
)
1159 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
1160 GET_MODE_NAME(mode
),
1161 strict
? "(strict)": "",
1162 reload_completed
? "(reload_completed)": "",
1163 reload_in_progress
? "(reload_in_progress)": "",
1164 reg_renumber
? "(reg_renumber)" : "");
1165 if (GET_CODE (x
) == PLUS
1166 && REG_P (XEXP (x
, 0))
1167 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1168 && INTVAL (XEXP (x
, 1)) >= 0
1169 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
1172 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1173 true_regnum (XEXP (x
, 0)));
1176 if (!strict
&& GET_CODE (x
) == SUBREG
)
1178 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1179 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1181 else if (CONSTANT_ADDRESS_P (x
))
1183 else if (GET_CODE (x
) == PLUS
1184 && REG_P (XEXP (x
, 0))
1185 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1186 && INTVAL (XEXP (x
, 1)) >= 0)
1188 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1192 || REGNO (XEXP (x
,0)) == REG_X
1193 || REGNO (XEXP (x
,0)) == REG_Y
1194 || REGNO (XEXP (x
,0)) == REG_Z
)
1195 r
= BASE_POINTER_REGS
;
1196 if (XEXP (x
,0) == frame_pointer_rtx
1197 || XEXP (x
,0) == arg_pointer_rtx
)
1198 r
= BASE_POINTER_REGS
;
1200 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1203 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1204 && REG_P (XEXP (x
, 0))
1205 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1206 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1210 if (TARGET_ALL_DEBUG
)
1212 fprintf (stderr
, " ret = %c\n", r
+ '0');
1214 return r
== NO_REGS
? 0 : (int)r
;
1217 /* Attempts to replace X with a valid
1218 memory address for an operand of mode MODE */
1221 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1224 if (TARGET_ALL_DEBUG
)
1226 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1230 if (GET_CODE (oldx
) == PLUS
1231 && REG_P (XEXP (oldx
,0)))
1233 if (REG_P (XEXP (oldx
,1)))
1234 x
= force_reg (GET_MODE (oldx
), oldx
);
1235 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1237 int offs
= INTVAL (XEXP (oldx
,1));
1238 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1239 if (offs
> MAX_LD_OFFSET (mode
))
1241 if (TARGET_ALL_DEBUG
)
1242 fprintf (stderr
, "force_reg (big offset)\n");
1243 x
= force_reg (GET_MODE (oldx
), oldx
);
1251 /* Return a pointer register name as a string. */
1254 ptrreg_to_str (int regno
)
1258 case REG_X
: return "X";
1259 case REG_Y
: return "Y";
1260 case REG_Z
: return "Z";
1262 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1267 /* Return the condition name as a string.
1268 Used in conditional jump constructing */
1271 cond_string (enum rtx_code code
)
1280 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1285 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1298 /* Output ADDR to FILE as address. */
1301 print_operand_address (FILE *file
, rtx addr
)
1303 switch (GET_CODE (addr
))
1306 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1310 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1314 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1318 if (CONSTANT_ADDRESS_P (addr
)
1319 && text_segment_operand (addr
, VOIDmode
))
1322 if (GET_CODE (x
) == CONST
)
1324 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1326 /* Assembler gs() will implant word address. Make offset
1327 a byte offset inside gs() for assembler. This is
1328 needed because the more logical (constant+gs(sym)) is not
1329 accepted by gas. For 128K and lower devices this is ok. For
1330 large devices it will create a Trampoline to offset from symbol
1331 which may not be what the user really wanted. */
1332 fprintf (file
, "gs(");
1333 output_addr_const (file
, XEXP (x
,0));
1334 fprintf (file
,"+" HOST_WIDE_INT_PRINT_DEC
")", 2 * INTVAL (XEXP (x
,1)));
1336 if (warning (0, "pointer offset from symbol maybe incorrect"))
1338 output_addr_const (stderr
, addr
);
1339 fprintf(stderr
,"\n");
1344 fprintf (file
, "gs(");
1345 output_addr_const (file
, addr
);
1346 fprintf (file
, ")");
1350 output_addr_const (file
, addr
);
1355 /* Output X as assembler operand to file FILE. */
1358 print_operand (FILE *file
, rtx x
, int code
)
1362 if (code
>= 'A' && code
<= 'D')
1367 if (!AVR_HAVE_JMP_CALL
)
1370 else if (code
== '!')
1372 if (AVR_HAVE_EIJMP_EICALL
)
1377 if (x
== zero_reg_rtx
)
1378 fprintf (file
, "__zero_reg__");
1380 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1382 else if (GET_CODE (x
) == CONST_INT
)
1383 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1384 else if (GET_CODE (x
) == MEM
)
1386 rtx addr
= XEXP (x
,0);
1389 if (!CONSTANT_P (addr
))
1390 fatal_insn ("bad address, not a constant):", addr
);
1391 /* Assembler template with m-code is data - not progmem section */
1392 if (text_segment_operand (addr
, VOIDmode
))
1393 if (warning ( 0, "accessing data memory with program memory address"))
1395 output_addr_const (stderr
, addr
);
1396 fprintf(stderr
,"\n");
1398 output_addr_const (file
, addr
);
1400 else if (code
== 'o')
1402 if (GET_CODE (addr
) != PLUS
)
1403 fatal_insn ("bad address, not (reg+disp):", addr
);
1405 print_operand (file
, XEXP (addr
, 1), 0);
1407 else if (code
== 'p' || code
== 'r')
1409 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1410 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1413 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1415 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1417 else if (GET_CODE (addr
) == PLUS
)
1419 print_operand_address (file
, XEXP (addr
,0));
1420 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1421 fatal_insn ("internal compiler error. Bad address:"
1424 print_operand (file
, XEXP (addr
,1), code
);
1427 print_operand_address (file
, addr
);
1429 else if (code
== 'x')
1431 /* Constant progmem address - like used in jmp or call */
1432 if (0 == text_segment_operand (x
, VOIDmode
))
1433 if (warning ( 0, "accessing program memory with data memory address"))
1435 output_addr_const (stderr
, x
);
1436 fprintf(stderr
,"\n");
1438 /* Use normal symbol for direct address no linker trampoline needed */
1439 output_addr_const (file
, x
);
1441 else if (GET_CODE (x
) == CONST_DOUBLE
)
1445 if (GET_MODE (x
) != SFmode
)
1446 fatal_insn ("internal compiler error. Unknown mode:", x
);
1447 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1448 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1449 fprintf (file
, "0x%lx", val
);
1451 else if (code
== 'j')
1452 fputs (cond_string (GET_CODE (x
)), file
);
1453 else if (code
== 'k')
1454 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1456 print_operand_address (file
, x
);
1459 /* Update the condition code in the INSN. */
1462 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1466 switch (get_attr_cc (insn
))
1469 /* Insn does not affect CC at all. */
1477 set
= single_set (insn
);
1481 cc_status
.flags
|= CC_NO_OVERFLOW
;
1482 cc_status
.value1
= SET_DEST (set
);
1487 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1488 The V flag may or may not be known but that's ok because
1489 alter_cond will change tests to use EQ/NE. */
1490 set
= single_set (insn
);
1494 cc_status
.value1
= SET_DEST (set
);
1495 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1500 set
= single_set (insn
);
1503 cc_status
.value1
= SET_SRC (set
);
1507 /* Insn doesn't leave CC in a usable state. */
1510 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1511 set
= single_set (insn
);
1514 rtx src
= SET_SRC (set
);
1516 if (GET_CODE (src
) == ASHIFTRT
1517 && GET_MODE (src
) == QImode
)
1519 rtx x
= XEXP (src
, 1);
1521 if (GET_CODE (x
) == CONST_INT
1525 cc_status
.value1
= SET_DEST (set
);
1526 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1534 /* Return maximum number of consecutive registers of
1535 class CLASS needed to hold a value of mode MODE. */
1538 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1540 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1543 /* Choose mode for jump insn:
1544 1 - relative jump in range -63 <= x <= 62 ;
1545 2 - relative jump in range -2046 <= x <= 2045 ;
1546 3 - absolute jump (only for ATmega[16]03). */
1549 avr_jump_mode (rtx x
, rtx insn
)
1551 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1552 ? XEXP (x
, 0) : x
));
1553 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1554 int jump_distance
= cur_addr
- dest_addr
;
1556 if (-63 <= jump_distance
&& jump_distance
<= 62)
1558 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1560 else if (AVR_HAVE_JMP_CALL
)
1566 /* return an AVR condition jump commands.
1567 X is a comparison RTX.
1568 LEN is a number returned by avr_jump_mode function.
1569 if REVERSE nonzero then condition code in X must be reversed. */
1572 ret_cond_branch (rtx x
, int len
, int reverse
)
1574 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1579 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1580 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1582 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1583 AS1 (brmi
,.+2) CR_TAB
1585 (AS1 (breq
,.+6) CR_TAB
1586 AS1 (brmi
,.+4) CR_TAB
1590 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1592 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1593 AS1 (brlt
,.+2) CR_TAB
1595 (AS1 (breq
,.+6) CR_TAB
1596 AS1 (brlt
,.+4) CR_TAB
1599 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1601 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1602 AS1 (brlo
,.+2) CR_TAB
1604 (AS1 (breq
,.+6) CR_TAB
1605 AS1 (brlo
,.+4) CR_TAB
1608 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1609 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1611 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1612 AS1 (brpl
,.+2) CR_TAB
1614 (AS1 (breq
,.+2) CR_TAB
1615 AS1 (brpl
,.+4) CR_TAB
1618 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1620 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1621 AS1 (brge
,.+2) CR_TAB
1623 (AS1 (breq
,.+2) CR_TAB
1624 AS1 (brge
,.+4) CR_TAB
1627 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1629 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1630 AS1 (brsh
,.+2) CR_TAB
1632 (AS1 (breq
,.+2) CR_TAB
1633 AS1 (brsh
,.+4) CR_TAB
1641 return AS1 (br
%k1
,%0);
1643 return (AS1 (br
%j1
,.+2) CR_TAB
1646 return (AS1 (br
%j1
,.+4) CR_TAB
1655 return AS1 (br
%j1
,%0);
1657 return (AS1 (br
%k1
,.+2) CR_TAB
1660 return (AS1 (br
%k1
,.+4) CR_TAB
1668 /* Predicate function for immediate operand which fits to byte (8bit) */
1671 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1673 return (GET_CODE (op
) == CONST_INT
1674 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1677 /* Output insn cost for next insn. */
1680 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1681 int num_operands ATTRIBUTE_UNUSED
)
1683 if (TARGET_ALL_DEBUG
)
1685 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
1686 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1690 /* Return 0 if undefined, 1 if always true or always false. */
1693 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1695 unsigned int max
= (mode
== QImode
? 0xff :
1696 mode
== HImode
? 0xffff :
1697 mode
== SImode
? 0xffffffff : 0);
1698 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1700 if (unsigned_condition (op
) != op
)
1703 if (max
!= (INTVAL (x
) & max
)
1704 && INTVAL (x
) != 0xff)
1711 /* Returns nonzero if REGNO is the number of a hard
1712 register in which function arguments are sometimes passed. */
1715 function_arg_regno_p(int r
)
1717 return (r
>= 8 && r
<= 25);
1720 /* Initializing the variable cum for the state at the beginning
1721 of the argument list. */
1724 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1725 tree fndecl ATTRIBUTE_UNUSED
)
1728 cum
->regno
= FIRST_CUM_REG
;
1729 if (!libname
&& stdarg_p (fntype
))
1732 /* Assume the calle may be tail called */
1734 cfun
->machine
->sibcall_fails
= 0;
1737 /* Returns the number of registers to allocate for a function argument. */
1740 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
1744 if (mode
== BLKmode
)
1745 size
= int_size_in_bytes (type
);
1747 size
= GET_MODE_SIZE (mode
);
1749 /* Align all function arguments to start in even-numbered registers.
1750 Odd-sized arguments leave holes above them. */
1752 return (size
+ 1) & ~1;
1755 /* Controls whether a function argument is passed
1756 in a register, and which register. */
1759 avr_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1760 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1762 int bytes
= avr_num_arg_regs (mode
, type
);
1764 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1765 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1770 /* Update the summarizer variable CUM to advance past an argument
1771 in the argument list. */
1774 avr_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1775 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1777 int bytes
= avr_num_arg_regs (mode
, type
);
1779 cum
->nregs
-= bytes
;
1780 cum
->regno
-= bytes
;
1782 /* A parameter is being passed in a call-saved register. As the original
1783 contents of these regs has to be restored before leaving the function,
1784 a function must not pass arguments in call-saved regs in order to get
1789 && !call_used_regs
[cum
->regno
])
1791 /* FIXME: We ship info on failing tail-call in struct machine_function.
1792 This uses internals of calls.c:expand_call() and the way args_so_far
1793 is used. targetm.function_ok_for_sibcall() needs to be extended to
1794 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1795 dependent so that such an extension is not wanted. */
1797 cfun
->machine
->sibcall_fails
= 1;
1800 /* Test if all registers needed by the ABI are actually available. If the
1801 user has fixed a GPR needed to pass an argument, an (implicit) function
1802 call would clobber that fixed register. See PR45099 for an example. */
1809 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
1810 if (fixed_regs
[regno
])
1811 error ("Register %s is needed to pass a parameter but is fixed",
1815 if (cum
->nregs
<= 0)
1818 cum
->regno
= FIRST_CUM_REG
;
1822 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1823 /* Decide whether we can make a sibling call to a function. DECL is the
1824 declaration of the function being targeted by the call and EXP is the
1825 CALL_EXPR representing the call. */
1828 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
1832 /* Tail-calling must fail if callee-saved regs are used to pass
1833 function args. We must not tail-call when `epilogue_restores'
1834 is used. Unfortunately, we cannot tell at this point if that
1835 actually will happen or not, and we cannot step back from
1836 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1838 if (cfun
->machine
->sibcall_fails
1839 || TARGET_CALL_PROLOGUES
)
1844 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
1848 decl_callee
= TREE_TYPE (decl_callee
);
1852 decl_callee
= fntype_callee
;
1854 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
1855 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
1857 decl_callee
= TREE_TYPE (decl_callee
);
1861 /* Ensure that caller and callee have compatible epilogues */
1863 if (interrupt_function_p (current_function_decl
)
1864 || signal_function_p (current_function_decl
)
1865 || avr_naked_function_p (decl_callee
)
1866 || avr_naked_function_p (current_function_decl
)
1867 /* FIXME: For OS_task and OS_main, we are over-conservative.
1868 This is due to missing documentation of these attributes
1869 and what they actually should do and should not do. */
1870 || (avr_OS_task_function_p (decl_callee
)
1871 != avr_OS_task_function_p (current_function_decl
))
1872 || (avr_OS_main_function_p (decl_callee
)
1873 != avr_OS_main_function_p (current_function_decl
)))
1881 /***********************************************************************
1882 Functions for outputting various mov's for a various modes
1883 ************************************************************************/
1885 output_movqi (rtx insn
, rtx operands
[], int *l
)
1888 rtx dest
= operands
[0];
1889 rtx src
= operands
[1];
1897 if (register_operand (dest
, QImode
))
1899 if (register_operand (src
, QImode
)) /* mov r,r */
1901 if (test_hard_reg_class (STACK_REG
, dest
))
1902 return AS2 (out
,%0,%1);
1903 else if (test_hard_reg_class (STACK_REG
, src
))
1904 return AS2 (in
,%0,%1);
1906 return AS2 (mov
,%0,%1);
1908 else if (CONSTANT_P (src
))
1910 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1911 return AS2 (ldi
,%0,lo8(%1));
1913 if (GET_CODE (src
) == CONST_INT
)
1915 if (src
== const0_rtx
) /* mov r,L */
1916 return AS1 (clr
,%0);
1917 else if (src
== const1_rtx
)
1920 return (AS1 (clr
,%0) CR_TAB
1923 else if (src
== constm1_rtx
)
1925 /* Immediate constants -1 to any register */
1927 return (AS1 (clr
,%0) CR_TAB
1932 int bit_nr
= exact_log2 (INTVAL (src
));
1938 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1941 avr_output_bld (operands
, bit_nr
);
1948 /* Last resort, larger than loading from memory. */
1950 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1951 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1952 AS2 (mov
,%0,r31
) CR_TAB
1953 AS2 (mov
,r31
,__tmp_reg__
));
1955 else if (GET_CODE (src
) == MEM
)
1956 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1958 else if (GET_CODE (dest
) == MEM
)
1962 if (src
== const0_rtx
)
1963 operands
[1] = zero_reg_rtx
;
1965 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1968 output_asm_insn (templ
, operands
);
1977 output_movhi (rtx insn
, rtx operands
[], int *l
)
1980 rtx dest
= operands
[0];
1981 rtx src
= operands
[1];
1987 if (register_operand (dest
, HImode
))
1989 if (register_operand (src
, HImode
)) /* mov r,r */
1991 if (test_hard_reg_class (STACK_REG
, dest
))
1993 if (AVR_HAVE_8BIT_SP
)
1994 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1995 /* Use simple load of stack pointer if no interrupts are
1997 else if (TARGET_NO_INTERRUPTS
)
1998 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1999 AS2 (out
,__SP_L__
,%A1
));
2001 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
2003 AS2 (out
,__SP_H__
,%B1
) CR_TAB
2004 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
2005 AS2 (out
,__SP_L__
,%A1
));
2007 else if (test_hard_reg_class (STACK_REG
, src
))
2010 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
2011 AS2 (in
,%B0
,__SP_H__
));
2017 return (AS2 (movw
,%0,%1));
2022 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2026 else if (CONSTANT_P (src
))
2028 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2031 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2032 AS2 (ldi
,%B0
,hi8(%1)));
2035 if (GET_CODE (src
) == CONST_INT
)
2037 if (src
== const0_rtx
) /* mov r,L */
2040 return (AS1 (clr
,%A0
) CR_TAB
2043 else if (src
== const1_rtx
)
2046 return (AS1 (clr
,%A0
) CR_TAB
2047 AS1 (clr
,%B0
) CR_TAB
2050 else if (src
== constm1_rtx
)
2052 /* Immediate constants -1 to any register */
2054 return (AS1 (clr
,%0) CR_TAB
2055 AS1 (dec
,%A0
) CR_TAB
2060 int bit_nr
= exact_log2 (INTVAL (src
));
2066 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
2067 AS1 (clr
,%B0
) CR_TAB
2070 avr_output_bld (operands
, bit_nr
);
2076 if ((INTVAL (src
) & 0xff) == 0)
2079 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2080 AS1 (clr
,%A0
) CR_TAB
2081 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2082 AS2 (mov
,%B0
,r31
) CR_TAB
2083 AS2 (mov
,r31
,__tmp_reg__
));
2085 else if ((INTVAL (src
) & 0xff00) == 0)
2088 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2089 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2090 AS2 (mov
,%A0
,r31
) CR_TAB
2091 AS1 (clr
,%B0
) CR_TAB
2092 AS2 (mov
,r31
,__tmp_reg__
));
2096 /* Last resort, equal to loading from memory. */
2098 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2099 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2100 AS2 (mov
,%A0
,r31
) CR_TAB
2101 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2102 AS2 (mov
,%B0
,r31
) CR_TAB
2103 AS2 (mov
,r31
,__tmp_reg__
));
2105 else if (GET_CODE (src
) == MEM
)
2106 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2108 else if (GET_CODE (dest
) == MEM
)
2112 if (src
== const0_rtx
)
2113 operands
[1] = zero_reg_rtx
;
2115 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
2118 output_asm_insn (templ
, operands
);
2123 fatal_insn ("invalid insn:", insn
);
2128 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
2132 rtx x
= XEXP (src
, 0);
2138 if (CONSTANT_ADDRESS_P (x
))
2140 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2143 return AS2 (in
,%0,__SREG__
);
2145 if (optimize
> 0 && io_address_operand (x
, QImode
))
2148 return AS2 (in
,%0,%m1
-0x20);
2151 return AS2 (lds
,%0,%m1
);
2153 /* memory access by reg+disp */
2154 else if (GET_CODE (x
) == PLUS
2155 && REG_P (XEXP (x
,0))
2156 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2158 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
2160 int disp
= INTVAL (XEXP (x
,1));
2161 if (REGNO (XEXP (x
,0)) != REG_Y
)
2162 fatal_insn ("incorrect insn:",insn
);
2164 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2165 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
2166 AS2 (ldd
,%0,Y
+63) CR_TAB
2167 AS2 (sbiw
,r28
,%o1
-63));
2169 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2170 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2171 AS2 (ld
,%0,Y
) CR_TAB
2172 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2173 AS2 (sbci
,r29
,hi8(%o1
)));
2175 else if (REGNO (XEXP (x
,0)) == REG_X
)
2177 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2178 it but I have this situation with extremal optimizing options. */
2179 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2180 || reg_unused_after (insn
, XEXP (x
,0)))
2181 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
2184 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
2185 AS2 (ld
,%0,X
) CR_TAB
2186 AS2 (sbiw
,r26
,%o1
));
2189 return AS2 (ldd
,%0,%1);
2192 return AS2 (ld
,%0,%1);
2196 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
2200 rtx base
= XEXP (src
, 0);
2201 int reg_dest
= true_regnum (dest
);
2202 int reg_base
= true_regnum (base
);
2203 /* "volatile" forces reading low byte first, even if less efficient,
2204 for correct operation with 16-bit I/O registers. */
2205 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2213 if (reg_dest
== reg_base
) /* R = (R) */
2216 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
2217 AS2 (ld
,%B0
,%1) CR_TAB
2218 AS2 (mov
,%A0
,__tmp_reg__
));
2220 else if (reg_base
== REG_X
) /* (R26) */
2222 if (reg_unused_after (insn
, base
))
2225 return (AS2 (ld
,%A0
,X
+) CR_TAB
2229 return (AS2 (ld
,%A0
,X
+) CR_TAB
2230 AS2 (ld
,%B0
,X
) CR_TAB
2236 return (AS2 (ld
,%A0
,%1) CR_TAB
2237 AS2 (ldd
,%B0
,%1+1));
2240 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2242 int disp
= INTVAL (XEXP (base
, 1));
2243 int reg_base
= true_regnum (XEXP (base
, 0));
2245 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2247 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2248 fatal_insn ("incorrect insn:",insn
);
2250 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2251 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
2252 AS2 (ldd
,%A0
,Y
+62) CR_TAB
2253 AS2 (ldd
,%B0
,Y
+63) CR_TAB
2254 AS2 (sbiw
,r28
,%o1
-62));
2256 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2257 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2258 AS2 (ld
,%A0
,Y
) CR_TAB
2259 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2260 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2261 AS2 (sbci
,r29
,hi8(%o1
)));
2263 if (reg_base
== REG_X
)
2265 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2266 it but I have this situation with extremal
2267 optimization options. */
2270 if (reg_base
== reg_dest
)
2271 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2272 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2273 AS2 (ld
,%B0
,X
) CR_TAB
2274 AS2 (mov
,%A0
,__tmp_reg__
));
2276 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2277 AS2 (ld
,%A0
,X
+) CR_TAB
2278 AS2 (ld
,%B0
,X
) CR_TAB
2279 AS2 (sbiw
,r26
,%o1
+1));
2282 if (reg_base
== reg_dest
)
2285 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
2286 AS2 (ldd
,%B0
,%B1
) CR_TAB
2287 AS2 (mov
,%A0
,__tmp_reg__
));
2291 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2294 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2296 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2297 fatal_insn ("incorrect insn:", insn
);
2301 if (REGNO (XEXP (base
, 0)) == REG_X
)
2304 return (AS2 (sbiw
,r26
,2) CR_TAB
2305 AS2 (ld
,%A0
,X
+) CR_TAB
2306 AS2 (ld
,%B0
,X
) CR_TAB
2312 return (AS2 (sbiw
,%r1
,2) CR_TAB
2313 AS2 (ld
,%A0
,%p1
) CR_TAB
2314 AS2 (ldd
,%B0
,%p1
+1));
2319 return (AS2 (ld
,%B0
,%1) CR_TAB
2322 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2324 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2325 fatal_insn ("incorrect insn:", insn
);
2328 return (AS2 (ld
,%A0
,%1) CR_TAB
2331 else if (CONSTANT_ADDRESS_P (base
))
2333 if (optimize
> 0 && io_address_operand (base
, HImode
))
2336 return (AS2 (in
,%A0
,%m1
-0x20) CR_TAB
2337 AS2 (in
,%B0
,%m1
+1-0x20));
2340 return (AS2 (lds
,%A0
,%m1
) CR_TAB
2341 AS2 (lds
,%B0
,%m1
+1));
2344 fatal_insn ("unknown move insn:",insn
);
2349 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2353 rtx base
= XEXP (src
, 0);
2354 int reg_dest
= true_regnum (dest
);
2355 int reg_base
= true_regnum (base
);
2363 if (reg_base
== REG_X
) /* (R26) */
2365 if (reg_dest
== REG_X
)
2366 /* "ld r26,-X" is undefined */
2367 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2368 AS2 (ld
,r29
,X
) CR_TAB
2369 AS2 (ld
,r28
,-X
) CR_TAB
2370 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2371 AS2 (sbiw
,r26
,1) CR_TAB
2372 AS2 (ld
,r26
,X
) CR_TAB
2373 AS2 (mov
,r27
,__tmp_reg__
));
2374 else if (reg_dest
== REG_X
- 2)
2375 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2376 AS2 (ld
,%B0
,X
+) CR_TAB
2377 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2378 AS2 (ld
,%D0
,X
) CR_TAB
2379 AS2 (mov
,%C0
,__tmp_reg__
));
2380 else if (reg_unused_after (insn
, base
))
2381 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2382 AS2 (ld
,%B0
,X
+) CR_TAB
2383 AS2 (ld
,%C0
,X
+) CR_TAB
2386 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2387 AS2 (ld
,%B0
,X
+) CR_TAB
2388 AS2 (ld
,%C0
,X
+) CR_TAB
2389 AS2 (ld
,%D0
,X
) CR_TAB
2394 if (reg_dest
== reg_base
)
2395 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2396 AS2 (ldd
,%C0
,%1+2) CR_TAB
2397 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2398 AS2 (ld
,%A0
,%1) CR_TAB
2399 AS2 (mov
,%B0
,__tmp_reg__
));
2400 else if (reg_base
== reg_dest
+ 2)
2401 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2402 AS2 (ldd
,%B0
,%1+1) CR_TAB
2403 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2404 AS2 (ldd
,%D0
,%1+3) CR_TAB
2405 AS2 (mov
,%C0
,__tmp_reg__
));
2407 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2408 AS2 (ldd
,%B0
,%1+1) CR_TAB
2409 AS2 (ldd
,%C0
,%1+2) CR_TAB
2410 AS2 (ldd
,%D0
,%1+3));
2413 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2415 int disp
= INTVAL (XEXP (base
, 1));
2417 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2419 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2420 fatal_insn ("incorrect insn:",insn
);
2422 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2423 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2424 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2425 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2426 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2427 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2428 AS2 (sbiw
,r28
,%o1
-60));
2430 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2431 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2432 AS2 (ld
,%A0
,Y
) CR_TAB
2433 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2434 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2435 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2436 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2437 AS2 (sbci
,r29
,hi8(%o1
)));
2440 reg_base
= true_regnum (XEXP (base
, 0));
2441 if (reg_base
== REG_X
)
2444 if (reg_dest
== REG_X
)
2447 /* "ld r26,-X" is undefined */
2448 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2449 AS2 (ld
,r29
,X
) CR_TAB
2450 AS2 (ld
,r28
,-X
) CR_TAB
2451 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2452 AS2 (sbiw
,r26
,1) CR_TAB
2453 AS2 (ld
,r26
,X
) CR_TAB
2454 AS2 (mov
,r27
,__tmp_reg__
));
2457 if (reg_dest
== REG_X
- 2)
2458 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2459 AS2 (ld
,r24
,X
+) CR_TAB
2460 AS2 (ld
,r25
,X
+) CR_TAB
2461 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2462 AS2 (ld
,r27
,X
) CR_TAB
2463 AS2 (mov
,r26
,__tmp_reg__
));
2465 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2466 AS2 (ld
,%A0
,X
+) CR_TAB
2467 AS2 (ld
,%B0
,X
+) CR_TAB
2468 AS2 (ld
,%C0
,X
+) CR_TAB
2469 AS2 (ld
,%D0
,X
) CR_TAB
2470 AS2 (sbiw
,r26
,%o1
+3));
2472 if (reg_dest
== reg_base
)
2473 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2474 AS2 (ldd
,%C0
,%C1
) CR_TAB
2475 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2476 AS2 (ldd
,%A0
,%A1
) CR_TAB
2477 AS2 (mov
,%B0
,__tmp_reg__
));
2478 else if (reg_dest
== reg_base
- 2)
2479 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2480 AS2 (ldd
,%B0
,%B1
) CR_TAB
2481 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2482 AS2 (ldd
,%D0
,%D1
) CR_TAB
2483 AS2 (mov
,%C0
,__tmp_reg__
));
2484 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2485 AS2 (ldd
,%B0
,%B1
) CR_TAB
2486 AS2 (ldd
,%C0
,%C1
) CR_TAB
2489 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2490 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2491 AS2 (ld
,%C0
,%1) CR_TAB
2492 AS2 (ld
,%B0
,%1) CR_TAB
2494 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2495 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2496 AS2 (ld
,%B0
,%1) CR_TAB
2497 AS2 (ld
,%C0
,%1) CR_TAB
2499 else if (CONSTANT_ADDRESS_P (base
))
2500 return *l
=8, (AS2 (lds
,%A0
,%m1
) CR_TAB
2501 AS2 (lds
,%B0
,%m1
+1) CR_TAB
2502 AS2 (lds
,%C0
,%m1
+2) CR_TAB
2503 AS2 (lds
,%D0
,%m1
+3));
2505 fatal_insn ("unknown move insn:",insn
);
2510 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2514 rtx base
= XEXP (dest
, 0);
2515 int reg_base
= true_regnum (base
);
2516 int reg_src
= true_regnum (src
);
2522 if (CONSTANT_ADDRESS_P (base
))
2523 return *l
=8,(AS2 (sts
,%m0
,%A1
) CR_TAB
2524 AS2 (sts
,%m0
+1,%B1
) CR_TAB
2525 AS2 (sts
,%m0
+2,%C1
) CR_TAB
2526 AS2 (sts
,%m0
+3,%D1
));
2527 if (reg_base
> 0) /* (r) */
2529 if (reg_base
== REG_X
) /* (R26) */
2531 if (reg_src
== REG_X
)
2533 /* "st X+,r26" is undefined */
2534 if (reg_unused_after (insn
, base
))
2535 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2536 AS2 (st
,X
,r26
) CR_TAB
2537 AS2 (adiw
,r26
,1) CR_TAB
2538 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2539 AS2 (st
,X
+,r28
) CR_TAB
2542 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2543 AS2 (st
,X
,r26
) CR_TAB
2544 AS2 (adiw
,r26
,1) CR_TAB
2545 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2546 AS2 (st
,X
+,r28
) CR_TAB
2547 AS2 (st
,X
,r29
) CR_TAB
2550 else if (reg_base
== reg_src
+ 2)
2552 if (reg_unused_after (insn
, base
))
2553 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2554 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2555 AS2 (st
,%0+,%A1
) CR_TAB
2556 AS2 (st
,%0+,%B1
) CR_TAB
2557 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2558 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2559 AS1 (clr
,__zero_reg__
));
2561 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2562 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2563 AS2 (st
,%0+,%A1
) CR_TAB
2564 AS2 (st
,%0+,%B1
) CR_TAB
2565 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2566 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2567 AS1 (clr
,__zero_reg__
) CR_TAB
2570 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2571 AS2 (st
,%0+,%B1
) CR_TAB
2572 AS2 (st
,%0+,%C1
) CR_TAB
2573 AS2 (st
,%0,%D1
) CR_TAB
2577 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2578 AS2 (std
,%0+1,%B1
) CR_TAB
2579 AS2 (std
,%0+2,%C1
) CR_TAB
2580 AS2 (std
,%0+3,%D1
));
2582 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2584 int disp
= INTVAL (XEXP (base
, 1));
2585 reg_base
= REGNO (XEXP (base
, 0));
2586 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2588 if (reg_base
!= REG_Y
)
2589 fatal_insn ("incorrect insn:",insn
);
2591 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2592 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2593 AS2 (std
,Y
+60,%A1
) CR_TAB
2594 AS2 (std
,Y
+61,%B1
) CR_TAB
2595 AS2 (std
,Y
+62,%C1
) CR_TAB
2596 AS2 (std
,Y
+63,%D1
) CR_TAB
2597 AS2 (sbiw
,r28
,%o0
-60));
2599 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2600 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2601 AS2 (st
,Y
,%A1
) CR_TAB
2602 AS2 (std
,Y
+1,%B1
) CR_TAB
2603 AS2 (std
,Y
+2,%C1
) CR_TAB
2604 AS2 (std
,Y
+3,%D1
) CR_TAB
2605 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2606 AS2 (sbci
,r29
,hi8(%o0
)));
2608 if (reg_base
== REG_X
)
2611 if (reg_src
== REG_X
)
2614 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2615 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2616 AS2 (adiw
,r26
,%o0
) CR_TAB
2617 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2618 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2619 AS2 (st
,X
+,r28
) CR_TAB
2620 AS2 (st
,X
,r29
) CR_TAB
2621 AS1 (clr
,__zero_reg__
) CR_TAB
2622 AS2 (sbiw
,r26
,%o0
+3));
2624 else if (reg_src
== REG_X
- 2)
2627 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2628 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2629 AS2 (adiw
,r26
,%o0
) CR_TAB
2630 AS2 (st
,X
+,r24
) CR_TAB
2631 AS2 (st
,X
+,r25
) CR_TAB
2632 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2633 AS2 (st
,X
,__zero_reg__
) CR_TAB
2634 AS1 (clr
,__zero_reg__
) CR_TAB
2635 AS2 (sbiw
,r26
,%o0
+3));
2638 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2639 AS2 (st
,X
+,%A1
) CR_TAB
2640 AS2 (st
,X
+,%B1
) CR_TAB
2641 AS2 (st
,X
+,%C1
) CR_TAB
2642 AS2 (st
,X
,%D1
) CR_TAB
2643 AS2 (sbiw
,r26
,%o0
+3));
2645 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2646 AS2 (std
,%B0
,%B1
) CR_TAB
2647 AS2 (std
,%C0
,%C1
) CR_TAB
2650 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2651 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2652 AS2 (st
,%0,%C1
) CR_TAB
2653 AS2 (st
,%0,%B1
) CR_TAB
2655 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2656 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2657 AS2 (st
,%0,%B1
) CR_TAB
2658 AS2 (st
,%0,%C1
) CR_TAB
2660 fatal_insn ("unknown move insn:",insn
);
2665 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2668 rtx dest
= operands
[0];
2669 rtx src
= operands
[1];
2675 if (register_operand (dest
, VOIDmode
))
2677 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2679 if (true_regnum (dest
) > true_regnum (src
))
2684 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2685 AS2 (movw
,%A0
,%A1
));
2688 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2689 AS2 (mov
,%C0
,%C1
) CR_TAB
2690 AS2 (mov
,%B0
,%B1
) CR_TAB
2698 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2699 AS2 (movw
,%C0
,%C1
));
2702 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2703 AS2 (mov
,%B0
,%B1
) CR_TAB
2704 AS2 (mov
,%C0
,%C1
) CR_TAB
2708 else if (CONSTANT_P (src
))
2710 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2713 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2714 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2715 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2716 AS2 (ldi
,%D0
,hhi8(%1)));
2719 if (GET_CODE (src
) == CONST_INT
)
2721 const char *const clr_op0
=
2722 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2723 AS1 (clr
,%B0
) CR_TAB
2725 : (AS1 (clr
,%A0
) CR_TAB
2726 AS1 (clr
,%B0
) CR_TAB
2727 AS1 (clr
,%C0
) CR_TAB
2730 if (src
== const0_rtx
) /* mov r,L */
2732 *l
= AVR_HAVE_MOVW
? 3 : 4;
2735 else if (src
== const1_rtx
)
2738 output_asm_insn (clr_op0
, operands
);
2739 *l
= AVR_HAVE_MOVW
? 4 : 5;
2740 return AS1 (inc
,%A0
);
2742 else if (src
== constm1_rtx
)
2744 /* Immediate constants -1 to any register */
2748 return (AS1 (clr
,%A0
) CR_TAB
2749 AS1 (dec
,%A0
) CR_TAB
2750 AS2 (mov
,%B0
,%A0
) CR_TAB
2751 AS2 (movw
,%C0
,%A0
));
2754 return (AS1 (clr
,%A0
) CR_TAB
2755 AS1 (dec
,%A0
) CR_TAB
2756 AS2 (mov
,%B0
,%A0
) CR_TAB
2757 AS2 (mov
,%C0
,%A0
) CR_TAB
2762 int bit_nr
= exact_log2 (INTVAL (src
));
2766 *l
= AVR_HAVE_MOVW
? 5 : 6;
2769 output_asm_insn (clr_op0
, operands
);
2770 output_asm_insn ("set", operands
);
2773 avr_output_bld (operands
, bit_nr
);
2780 /* Last resort, better than loading from memory. */
2782 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2783 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2784 AS2 (mov
,%A0
,r31
) CR_TAB
2785 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2786 AS2 (mov
,%B0
,r31
) CR_TAB
2787 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2788 AS2 (mov
,%C0
,r31
) CR_TAB
2789 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2790 AS2 (mov
,%D0
,r31
) CR_TAB
2791 AS2 (mov
,r31
,__tmp_reg__
));
2793 else if (GET_CODE (src
) == MEM
)
2794 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2796 else if (GET_CODE (dest
) == MEM
)
2800 if (src
== const0_rtx
)
2801 operands
[1] = zero_reg_rtx
;
2803 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2806 output_asm_insn (templ
, operands
);
2811 fatal_insn ("invalid insn:", insn
);
2816 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2820 rtx x
= XEXP (dest
, 0);
2826 if (CONSTANT_ADDRESS_P (x
))
2828 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2831 return AS2 (out
,__SREG__
,%1);
2833 if (optimize
> 0 && io_address_operand (x
, QImode
))
2836 return AS2 (out
,%m0
-0x20,%1);
2839 return AS2 (sts
,%m0
,%1);
2841 /* memory access by reg+disp */
2842 else if (GET_CODE (x
) == PLUS
2843 && REG_P (XEXP (x
,0))
2844 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2846 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2848 int disp
= INTVAL (XEXP (x
,1));
2849 if (REGNO (XEXP (x
,0)) != REG_Y
)
2850 fatal_insn ("incorrect insn:",insn
);
2852 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2853 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2854 AS2 (std
,Y
+63,%1) CR_TAB
2855 AS2 (sbiw
,r28
,%o0
-63));
2857 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2858 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2859 AS2 (st
,Y
,%1) CR_TAB
2860 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2861 AS2 (sbci
,r29
,hi8(%o0
)));
2863 else if (REGNO (XEXP (x
,0)) == REG_X
)
2865 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2867 if (reg_unused_after (insn
, XEXP (x
,0)))
2868 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2869 AS2 (adiw
,r26
,%o0
) CR_TAB
2870 AS2 (st
,X
,__tmp_reg__
));
2872 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2873 AS2 (adiw
,r26
,%o0
) CR_TAB
2874 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2875 AS2 (sbiw
,r26
,%o0
));
2879 if (reg_unused_after (insn
, XEXP (x
,0)))
2880 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2883 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2884 AS2 (st
,X
,%1) CR_TAB
2885 AS2 (sbiw
,r26
,%o0
));
2889 return AS2 (std
,%0,%1);
2892 return AS2 (st
,%0,%1);
2896 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2900 rtx base
= XEXP (dest
, 0);
2901 int reg_base
= true_regnum (base
);
2902 int reg_src
= true_regnum (src
);
2903 /* "volatile" forces writing high byte first, even if less efficient,
2904 for correct operation with 16-bit I/O registers. */
2905 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2910 if (CONSTANT_ADDRESS_P (base
))
2912 if (optimize
> 0 && io_address_operand (base
, HImode
))
2915 return (AS2 (out
,%m0
+1-0x20,%B1
) CR_TAB
2916 AS2 (out
,%m0
-0x20,%A1
));
2918 return *l
= 4, (AS2 (sts
,%m0
+1,%B1
) CR_TAB
2923 if (reg_base
== REG_X
)
2925 if (reg_src
== REG_X
)
2927 /* "st X+,r26" and "st -X,r26" are undefined. */
2928 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2929 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2930 AS2 (st
,X
,r26
) CR_TAB
2931 AS2 (adiw
,r26
,1) CR_TAB
2932 AS2 (st
,X
,__tmp_reg__
));
2934 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2935 AS2 (adiw
,r26
,1) CR_TAB
2936 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2937 AS2 (sbiw
,r26
,1) CR_TAB
2942 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2943 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2946 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2947 AS2 (st
,X
,%B1
) CR_TAB
2952 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2955 else if (GET_CODE (base
) == PLUS
)
2957 int disp
= INTVAL (XEXP (base
, 1));
2958 reg_base
= REGNO (XEXP (base
, 0));
2959 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2961 if (reg_base
!= REG_Y
)
2962 fatal_insn ("incorrect insn:",insn
);
2964 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2965 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2966 AS2 (std
,Y
+63,%B1
) CR_TAB
2967 AS2 (std
,Y
+62,%A1
) CR_TAB
2968 AS2 (sbiw
,r28
,%o0
-62));
2970 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2971 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2972 AS2 (std
,Y
+1,%B1
) CR_TAB
2973 AS2 (st
,Y
,%A1
) CR_TAB
2974 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2975 AS2 (sbci
,r29
,hi8(%o0
)));
2977 if (reg_base
== REG_X
)
2980 if (reg_src
== REG_X
)
2983 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2984 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2985 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2986 AS2 (st
,X
,__zero_reg__
) CR_TAB
2987 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2988 AS1 (clr
,__zero_reg__
) CR_TAB
2989 AS2 (sbiw
,r26
,%o0
));
2992 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2993 AS2 (st
,X
,%B1
) CR_TAB
2994 AS2 (st
,-X
,%A1
) CR_TAB
2995 AS2 (sbiw
,r26
,%o0
));
2997 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
3000 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
3001 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
3003 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
3007 if (REGNO (XEXP (base
, 0)) == REG_X
)
3010 return (AS2 (adiw
,r26
,1) CR_TAB
3011 AS2 (st
,X
,%B1
) CR_TAB
3012 AS2 (st
,-X
,%A1
) CR_TAB
3018 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
3019 AS2 (st
,%p0
,%A1
) CR_TAB
3025 return (AS2 (st
,%0,%A1
) CR_TAB
3028 fatal_insn ("unknown move insn:",insn
);
3032 /* Return 1 if frame pointer for current function required. */
3035 avr_frame_pointer_required_p (void)
3037 return (cfun
->calls_alloca
3038 || crtl
->args
.info
.nregs
== 0
3039 || get_frame_size () > 0);
3042 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3045 compare_condition (rtx insn
)
3047 rtx next
= next_real_insn (insn
);
3048 RTX_CODE cond
= UNKNOWN
;
3049 if (next
&& GET_CODE (next
) == JUMP_INSN
)
3051 rtx pat
= PATTERN (next
);
3052 rtx src
= SET_SRC (pat
);
3053 rtx t
= XEXP (src
, 0);
3054 cond
= GET_CODE (t
);
3059 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3062 compare_sign_p (rtx insn
)
3064 RTX_CODE cond
= compare_condition (insn
);
3065 return (cond
== GE
|| cond
== LT
);
3068 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3069 that needs to be swapped (GT, GTU, LE, LEU). */
3072 compare_diff_p (rtx insn
)
3074 RTX_CODE cond
= compare_condition (insn
);
3075 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
3078 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3081 compare_eq_p (rtx insn
)
3083 RTX_CODE cond
= compare_condition (insn
);
3084 return (cond
== EQ
|| cond
== NE
);
3088 /* Output test instruction for HImode. */
3091 out_tsthi (rtx insn
, rtx op
, int *l
)
3093 if (compare_sign_p (insn
))
3096 return AS1 (tst
,%B0
);
3098 if (reg_unused_after (insn
, op
)
3099 && compare_eq_p (insn
))
3101 /* Faster than sbiw if we can clobber the operand. */
3103 return "or %A0,%B0";
3105 if (test_hard_reg_class (ADDW_REGS
, op
))
3108 return AS2 (sbiw
,%0,0);
3111 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3112 AS2 (cpc
,%B0
,__zero_reg__
));
3116 /* Output test instruction for SImode. */
3119 out_tstsi (rtx insn
, rtx op
, int *l
)
3121 if (compare_sign_p (insn
))
3124 return AS1 (tst
,%D0
);
3126 if (test_hard_reg_class (ADDW_REGS
, op
))
3129 return (AS2 (sbiw
,%A0
,0) CR_TAB
3130 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3131 AS2 (cpc
,%D0
,__zero_reg__
));
3134 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3135 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
3136 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3137 AS2 (cpc
,%D0
,__zero_reg__
));
3141 /* Generate asm equivalent for various shifts.
3142 Shift count is a CONST_INT, MEM or REG.
3143 This only handles cases that are not already
3144 carefully hand-optimized in ?sh??i3_out. */
3147 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
3148 int *len
, int t_len
)
3152 int second_label
= 1;
3153 int saved_in_tmp
= 0;
3154 int use_zero_reg
= 0;
3156 op
[0] = operands
[0];
3157 op
[1] = operands
[1];
3158 op
[2] = operands
[2];
3159 op
[3] = operands
[3];
3165 if (GET_CODE (operands
[2]) == CONST_INT
)
3167 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3168 int count
= INTVAL (operands
[2]);
3169 int max_len
= 10; /* If larger than this, always use a loop. */
3178 if (count
< 8 && !scratch
)
3182 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
3184 if (t_len
* count
<= max_len
)
3186 /* Output shifts inline with no loop - faster. */
3188 *len
= t_len
* count
;
3192 output_asm_insn (templ
, op
);
3201 strcat (str
, AS2 (ldi
,%3,%2));
3203 else if (use_zero_reg
)
3205 /* Hack to save one word: use __zero_reg__ as loop counter.
3206 Set one bit, then shift in a loop until it is 0 again. */
3208 op
[3] = zero_reg_rtx
;
3212 strcat (str
, ("set" CR_TAB
3213 AS2 (bld
,%3,%2-1)));
3217 /* No scratch register available, use one from LD_REGS (saved in
3218 __tmp_reg__) that doesn't overlap with registers to shift. */
3220 op
[3] = gen_rtx_REG (QImode
,
3221 ((true_regnum (operands
[0]) - 1) & 15) + 16);
3222 op
[4] = tmp_reg_rtx
;
3226 *len
= 3; /* Includes "mov %3,%4" after the loop. */
3228 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
3234 else if (GET_CODE (operands
[2]) == MEM
)
3238 op
[3] = op_mov
[0] = tmp_reg_rtx
;
3242 out_movqi_r_mr (insn
, op_mov
, len
);
3244 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
3246 else if (register_operand (operands
[2], QImode
))
3248 if (reg_unused_after (insn
, operands
[2]))
3252 op
[3] = tmp_reg_rtx
;
3254 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
3258 fatal_insn ("bad shift insn:", insn
);
3265 strcat (str
, AS1 (rjmp
,2f
));
3269 *len
+= t_len
+ 2; /* template + dec + brXX */
3272 strcat (str
, "\n1:\t");
3273 strcat (str
, templ
);
3274 strcat (str
, second_label
? "\n2:\t" : "\n\t");
3275 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
3276 strcat (str
, CR_TAB
);
3277 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
3279 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
3280 output_asm_insn (str
, op
);
3285 /* 8bit shift left ((char)x << i) */
3288 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3290 if (GET_CODE (operands
[2]) == CONST_INT
)
3297 switch (INTVAL (operands
[2]))
3300 if (INTVAL (operands
[2]) < 8)
3304 return AS1 (clr
,%0);
3308 return AS1 (lsl
,%0);
3312 return (AS1 (lsl
,%0) CR_TAB
3317 return (AS1 (lsl
,%0) CR_TAB
3322 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3325 return (AS1 (swap
,%0) CR_TAB
3326 AS2 (andi
,%0,0xf0));
3329 return (AS1 (lsl
,%0) CR_TAB
3335 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3338 return (AS1 (swap
,%0) CR_TAB
3340 AS2 (andi
,%0,0xe0));
3343 return (AS1 (lsl
,%0) CR_TAB
3350 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3353 return (AS1 (swap
,%0) CR_TAB
3356 AS2 (andi
,%0,0xc0));
3359 return (AS1 (lsl
,%0) CR_TAB
3368 return (AS1 (ror
,%0) CR_TAB
3373 else if (CONSTANT_P (operands
[2]))
3374 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3376 out_shift_with_cnt (AS1 (lsl
,%0),
3377 insn
, operands
, len
, 1);
3382 /* 16bit shift left ((short)x << i) */
3385 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3387 if (GET_CODE (operands
[2]) == CONST_INT
)
3389 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3390 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3397 switch (INTVAL (operands
[2]))
3400 if (INTVAL (operands
[2]) < 16)
3404 return (AS1 (clr
,%B0
) CR_TAB
3408 if (optimize_size
&& scratch
)
3413 return (AS1 (swap
,%A0
) CR_TAB
3414 AS1 (swap
,%B0
) CR_TAB
3415 AS2 (andi
,%B0
,0xf0) CR_TAB
3416 AS2 (eor
,%B0
,%A0
) CR_TAB
3417 AS2 (andi
,%A0
,0xf0) CR_TAB
3423 return (AS1 (swap
,%A0
) CR_TAB
3424 AS1 (swap
,%B0
) CR_TAB
3425 AS2 (ldi
,%3,0xf0) CR_TAB
3427 AS2 (eor
,%B0
,%A0
) CR_TAB
3431 break; /* optimize_size ? 6 : 8 */
3435 break; /* scratch ? 5 : 6 */
3439 return (AS1 (lsl
,%A0
) CR_TAB
3440 AS1 (rol
,%B0
) CR_TAB
3441 AS1 (swap
,%A0
) CR_TAB
3442 AS1 (swap
,%B0
) CR_TAB
3443 AS2 (andi
,%B0
,0xf0) CR_TAB
3444 AS2 (eor
,%B0
,%A0
) CR_TAB
3445 AS2 (andi
,%A0
,0xf0) CR_TAB
3451 return (AS1 (lsl
,%A0
) CR_TAB
3452 AS1 (rol
,%B0
) CR_TAB
3453 AS1 (swap
,%A0
) CR_TAB
3454 AS1 (swap
,%B0
) CR_TAB
3455 AS2 (ldi
,%3,0xf0) CR_TAB
3457 AS2 (eor
,%B0
,%A0
) CR_TAB
3465 break; /* scratch ? 5 : 6 */
3467 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3468 AS1 (lsr
,%B0
) CR_TAB
3469 AS1 (ror
,%A0
) CR_TAB
3470 AS1 (ror
,__tmp_reg__
) CR_TAB
3471 AS1 (lsr
,%B0
) CR_TAB
3472 AS1 (ror
,%A0
) CR_TAB
3473 AS1 (ror
,__tmp_reg__
) CR_TAB
3474 AS2 (mov
,%B0
,%A0
) CR_TAB
3475 AS2 (mov
,%A0
,__tmp_reg__
));
3479 return (AS1 (lsr
,%B0
) CR_TAB
3480 AS2 (mov
,%B0
,%A0
) CR_TAB
3481 AS1 (clr
,%A0
) CR_TAB
3482 AS1 (ror
,%B0
) CR_TAB
3486 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3491 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3492 AS1 (clr
,%A0
) CR_TAB
3497 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3498 AS1 (clr
,%A0
) CR_TAB
3499 AS1 (lsl
,%B0
) CR_TAB
3504 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3505 AS1 (clr
,%A0
) CR_TAB
3506 AS1 (lsl
,%B0
) CR_TAB
3507 AS1 (lsl
,%B0
) CR_TAB
3514 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3515 AS1 (clr
,%A0
) CR_TAB
3516 AS1 (swap
,%B0
) CR_TAB
3517 AS2 (andi
,%B0
,0xf0));
3522 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3523 AS1 (clr
,%A0
) CR_TAB
3524 AS1 (swap
,%B0
) CR_TAB
3525 AS2 (ldi
,%3,0xf0) CR_TAB
3529 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3530 AS1 (clr
,%A0
) CR_TAB
3531 AS1 (lsl
,%B0
) CR_TAB
3532 AS1 (lsl
,%B0
) CR_TAB
3533 AS1 (lsl
,%B0
) CR_TAB
3540 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3541 AS1 (clr
,%A0
) CR_TAB
3542 AS1 (swap
,%B0
) CR_TAB
3543 AS1 (lsl
,%B0
) CR_TAB
3544 AS2 (andi
,%B0
,0xe0));
3546 if (AVR_HAVE_MUL
&& scratch
)
3549 return (AS2 (ldi
,%3,0x20) CR_TAB
3550 AS2 (mul
,%A0
,%3) CR_TAB
3551 AS2 (mov
,%B0
,r0
) CR_TAB
3552 AS1 (clr
,%A0
) CR_TAB
3553 AS1 (clr
,__zero_reg__
));
3555 if (optimize_size
&& scratch
)
3560 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3561 AS1 (clr
,%A0
) CR_TAB
3562 AS1 (swap
,%B0
) CR_TAB
3563 AS1 (lsl
,%B0
) CR_TAB
3564 AS2 (ldi
,%3,0xe0) CR_TAB
3570 return ("set" CR_TAB
3571 AS2 (bld
,r1
,5) CR_TAB
3572 AS2 (mul
,%A0
,r1
) CR_TAB
3573 AS2 (mov
,%B0
,r0
) CR_TAB
3574 AS1 (clr
,%A0
) CR_TAB
3575 AS1 (clr
,__zero_reg__
));
3578 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3579 AS1 (clr
,%A0
) CR_TAB
3580 AS1 (lsl
,%B0
) CR_TAB
3581 AS1 (lsl
,%B0
) CR_TAB
3582 AS1 (lsl
,%B0
) CR_TAB
3583 AS1 (lsl
,%B0
) CR_TAB
3587 if (AVR_HAVE_MUL
&& ldi_ok
)
3590 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3591 AS2 (mul
,%A0
,%B0
) CR_TAB
3592 AS2 (mov
,%B0
,r0
) CR_TAB
3593 AS1 (clr
,%A0
) CR_TAB
3594 AS1 (clr
,__zero_reg__
));
3596 if (AVR_HAVE_MUL
&& scratch
)
3599 return (AS2 (ldi
,%3,0x40) CR_TAB
3600 AS2 (mul
,%A0
,%3) CR_TAB
3601 AS2 (mov
,%B0
,r0
) CR_TAB
3602 AS1 (clr
,%A0
) CR_TAB
3603 AS1 (clr
,__zero_reg__
));
3605 if (optimize_size
&& ldi_ok
)
3608 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3609 AS2 (ldi
,%A0
,6) "\n1:\t"
3610 AS1 (lsl
,%B0
) CR_TAB
3611 AS1 (dec
,%A0
) CR_TAB
3614 if (optimize_size
&& scratch
)
3617 return (AS1 (clr
,%B0
) CR_TAB
3618 AS1 (lsr
,%A0
) CR_TAB
3619 AS1 (ror
,%B0
) CR_TAB
3620 AS1 (lsr
,%A0
) CR_TAB
3621 AS1 (ror
,%B0
) CR_TAB
3626 return (AS1 (clr
,%B0
) CR_TAB
3627 AS1 (lsr
,%A0
) CR_TAB
3628 AS1 (ror
,%B0
) CR_TAB
3633 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3635 insn
, operands
, len
, 2);
3640 /* 32bit shift left ((long)x << i) */
3643 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3645 if (GET_CODE (operands
[2]) == CONST_INT
)
3653 switch (INTVAL (operands
[2]))
3656 if (INTVAL (operands
[2]) < 32)
3660 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3661 AS1 (clr
,%C0
) CR_TAB
3662 AS2 (movw
,%A0
,%C0
));
3664 return (AS1 (clr
,%D0
) CR_TAB
3665 AS1 (clr
,%C0
) CR_TAB
3666 AS1 (clr
,%B0
) CR_TAB
3671 int reg0
= true_regnum (operands
[0]);
3672 int reg1
= true_regnum (operands
[1]);
3675 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3676 AS2 (mov
,%C0
,%B1
) CR_TAB
3677 AS2 (mov
,%B0
,%A1
) CR_TAB
3680 return (AS1 (clr
,%A0
) CR_TAB
3681 AS2 (mov
,%B0
,%A1
) CR_TAB
3682 AS2 (mov
,%C0
,%B1
) CR_TAB
3688 int reg0
= true_regnum (operands
[0]);
3689 int reg1
= true_regnum (operands
[1]);
3690 if (reg0
+ 2 == reg1
)
3691 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3694 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3695 AS1 (clr
,%B0
) CR_TAB
3698 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3699 AS2 (mov
,%D0
,%B1
) CR_TAB
3700 AS1 (clr
,%B0
) CR_TAB
3706 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3707 AS1 (clr
,%C0
) CR_TAB
3708 AS1 (clr
,%B0
) CR_TAB
3713 return (AS1 (clr
,%D0
) CR_TAB
3714 AS1 (lsr
,%A0
) CR_TAB
3715 AS1 (ror
,%D0
) CR_TAB
3716 AS1 (clr
,%C0
) CR_TAB
3717 AS1 (clr
,%B0
) CR_TAB
3722 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3723 AS1 (rol
,%B0
) CR_TAB
3724 AS1 (rol
,%C0
) CR_TAB
3726 insn
, operands
, len
, 4);
3730 /* 8bit arithmetic shift right ((signed char)x >> i) */
3733 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3735 if (GET_CODE (operands
[2]) == CONST_INT
)
3742 switch (INTVAL (operands
[2]))
3746 return AS1 (asr
,%0);
3750 return (AS1 (asr
,%0) CR_TAB
3755 return (AS1 (asr
,%0) CR_TAB
3761 return (AS1 (asr
,%0) CR_TAB
3768 return (AS1 (asr
,%0) CR_TAB
3776 return (AS2 (bst
,%0,6) CR_TAB
3778 AS2 (sbc
,%0,%0) CR_TAB
3782 if (INTVAL (operands
[2]) < 8)
3789 return (AS1 (lsl
,%0) CR_TAB
3793 else if (CONSTANT_P (operands
[2]))
3794 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3796 out_shift_with_cnt (AS1 (asr
,%0),
3797 insn
, operands
, len
, 1);
3802 /* 16bit arithmetic shift right ((signed short)x >> i) */
3805 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3807 if (GET_CODE (operands
[2]) == CONST_INT
)
3809 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3810 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3817 switch (INTVAL (operands
[2]))
3821 /* XXX try to optimize this too? */
3826 break; /* scratch ? 5 : 6 */
3828 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3829 AS2 (mov
,%A0
,%B0
) CR_TAB
3830 AS1 (lsl
,__tmp_reg__
) CR_TAB
3831 AS1 (rol
,%A0
) CR_TAB
3832 AS2 (sbc
,%B0
,%B0
) CR_TAB
3833 AS1 (lsl
,__tmp_reg__
) CR_TAB
3834 AS1 (rol
,%A0
) CR_TAB
3839 return (AS1 (lsl
,%A0
) CR_TAB
3840 AS2 (mov
,%A0
,%B0
) CR_TAB
3841 AS1 (rol
,%A0
) CR_TAB
3846 int reg0
= true_regnum (operands
[0]);
3847 int reg1
= true_regnum (operands
[1]);
3850 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3851 AS1 (lsl
,%B0
) CR_TAB
3854 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3855 AS1 (clr
,%B0
) CR_TAB
3856 AS2 (sbrc
,%A0
,7) CR_TAB
3862 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3863 AS1 (lsl
,%B0
) CR_TAB
3864 AS2 (sbc
,%B0
,%B0
) CR_TAB
3869 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3870 AS1 (lsl
,%B0
) CR_TAB
3871 AS2 (sbc
,%B0
,%B0
) CR_TAB
3872 AS1 (asr
,%A0
) CR_TAB
3876 if (AVR_HAVE_MUL
&& ldi_ok
)
3879 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3880 AS2 (muls
,%B0
,%A0
) CR_TAB
3881 AS2 (mov
,%A0
,r1
) CR_TAB
3882 AS2 (sbc
,%B0
,%B0
) CR_TAB
3883 AS1 (clr
,__zero_reg__
));
3885 if (optimize_size
&& scratch
)
3888 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3889 AS1 (lsl
,%B0
) CR_TAB
3890 AS2 (sbc
,%B0
,%B0
) CR_TAB
3891 AS1 (asr
,%A0
) CR_TAB
3892 AS1 (asr
,%A0
) CR_TAB
3896 if (AVR_HAVE_MUL
&& ldi_ok
)
3899 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3900 AS2 (muls
,%B0
,%A0
) CR_TAB
3901 AS2 (mov
,%A0
,r1
) CR_TAB
3902 AS2 (sbc
,%B0
,%B0
) CR_TAB
3903 AS1 (clr
,__zero_reg__
));
3905 if (optimize_size
&& scratch
)
3908 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3909 AS1 (lsl
,%B0
) CR_TAB
3910 AS2 (sbc
,%B0
,%B0
) CR_TAB
3911 AS1 (asr
,%A0
) CR_TAB
3912 AS1 (asr
,%A0
) CR_TAB
3913 AS1 (asr
,%A0
) CR_TAB
3917 if (AVR_HAVE_MUL
&& ldi_ok
)
3920 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3921 AS2 (muls
,%B0
,%A0
) CR_TAB
3922 AS2 (mov
,%A0
,r1
) CR_TAB
3923 AS2 (sbc
,%B0
,%B0
) CR_TAB
3924 AS1 (clr
,__zero_reg__
));
3927 break; /* scratch ? 5 : 7 */
3929 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3930 AS1 (lsl
,%B0
) CR_TAB
3931 AS2 (sbc
,%B0
,%B0
) CR_TAB
3932 AS1 (asr
,%A0
) CR_TAB
3933 AS1 (asr
,%A0
) CR_TAB
3934 AS1 (asr
,%A0
) CR_TAB
3935 AS1 (asr
,%A0
) CR_TAB
3940 return (AS1 (lsl
,%B0
) CR_TAB
3941 AS2 (sbc
,%A0
,%A0
) CR_TAB
3942 AS1 (lsl
,%B0
) CR_TAB
3943 AS2 (mov
,%B0
,%A0
) CR_TAB
3947 if (INTVAL (operands
[2]) < 16)
3953 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3954 AS2 (sbc
,%A0
,%A0
) CR_TAB
3959 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3961 insn
, operands
, len
, 2);
3966 /* 32bit arithmetic shift right ((signed long)x >> i) */
3969 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3971 if (GET_CODE (operands
[2]) == CONST_INT
)
3979 switch (INTVAL (operands
[2]))
3983 int reg0
= true_regnum (operands
[0]);
3984 int reg1
= true_regnum (operands
[1]);
3987 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3988 AS2 (mov
,%B0
,%C1
) CR_TAB
3989 AS2 (mov
,%C0
,%D1
) CR_TAB
3990 AS1 (clr
,%D0
) CR_TAB
3991 AS2 (sbrc
,%C0
,7) CR_TAB
3994 return (AS1 (clr
,%D0
) CR_TAB
3995 AS2 (sbrc
,%D1
,7) CR_TAB
3996 AS1 (dec
,%D0
) CR_TAB
3997 AS2 (mov
,%C0
,%D1
) CR_TAB
3998 AS2 (mov
,%B0
,%C1
) CR_TAB
4004 int reg0
= true_regnum (operands
[0]);
4005 int reg1
= true_regnum (operands
[1]);
4007 if (reg0
== reg1
+ 2)
4008 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
4009 AS2 (sbrc
,%B0
,7) CR_TAB
4010 AS1 (com
,%D0
) CR_TAB
4013 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
4014 AS1 (clr
,%D0
) CR_TAB
4015 AS2 (sbrc
,%B0
,7) CR_TAB
4016 AS1 (com
,%D0
) CR_TAB
4019 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
4020 AS2 (mov
,%A0
,%C1
) CR_TAB
4021 AS1 (clr
,%D0
) CR_TAB
4022 AS2 (sbrc
,%B0
,7) CR_TAB
4023 AS1 (com
,%D0
) CR_TAB
4028 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
4029 AS1 (clr
,%D0
) CR_TAB
4030 AS2 (sbrc
,%A0
,7) CR_TAB
4031 AS1 (com
,%D0
) CR_TAB
4032 AS2 (mov
,%B0
,%D0
) CR_TAB
4036 if (INTVAL (operands
[2]) < 32)
4043 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
4044 AS2 (sbc
,%A0
,%A0
) CR_TAB
4045 AS2 (mov
,%B0
,%A0
) CR_TAB
4046 AS2 (movw
,%C0
,%A0
));
4048 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
4049 AS2 (sbc
,%A0
,%A0
) CR_TAB
4050 AS2 (mov
,%B0
,%A0
) CR_TAB
4051 AS2 (mov
,%C0
,%A0
) CR_TAB
4056 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
4057 AS1 (ror
,%C0
) CR_TAB
4058 AS1 (ror
,%B0
) CR_TAB
4060 insn
, operands
, len
, 4);
4064 /* 8bit logic shift right ((unsigned char)x >> i) */
4067 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
4069 if (GET_CODE (operands
[2]) == CONST_INT
)
4076 switch (INTVAL (operands
[2]))
4079 if (INTVAL (operands
[2]) < 8)
4083 return AS1 (clr
,%0);
4087 return AS1 (lsr
,%0);
4091 return (AS1 (lsr
,%0) CR_TAB
4095 return (AS1 (lsr
,%0) CR_TAB
4100 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4103 return (AS1 (swap
,%0) CR_TAB
4104 AS2 (andi
,%0,0x0f));
4107 return (AS1 (lsr
,%0) CR_TAB
4113 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4116 return (AS1 (swap
,%0) CR_TAB
4121 return (AS1 (lsr
,%0) CR_TAB
4128 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4131 return (AS1 (swap
,%0) CR_TAB
4137 return (AS1 (lsr
,%0) CR_TAB
4146 return (AS1 (rol
,%0) CR_TAB
4151 else if (CONSTANT_P (operands
[2]))
4152 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4154 out_shift_with_cnt (AS1 (lsr
,%0),
4155 insn
, operands
, len
, 1);
4159 /* 16bit logic shift right ((unsigned short)x >> i) */
4162 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
4164 if (GET_CODE (operands
[2]) == CONST_INT
)
4166 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4167 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4174 switch (INTVAL (operands
[2]))
4177 if (INTVAL (operands
[2]) < 16)
4181 return (AS1 (clr
,%B0
) CR_TAB
4185 if (optimize_size
&& scratch
)
4190 return (AS1 (swap
,%B0
) CR_TAB
4191 AS1 (swap
,%A0
) CR_TAB
4192 AS2 (andi
,%A0
,0x0f) CR_TAB
4193 AS2 (eor
,%A0
,%B0
) CR_TAB
4194 AS2 (andi
,%B0
,0x0f) CR_TAB
4200 return (AS1 (swap
,%B0
) CR_TAB
4201 AS1 (swap
,%A0
) CR_TAB
4202 AS2 (ldi
,%3,0x0f) CR_TAB
4204 AS2 (eor
,%A0
,%B0
) CR_TAB
4208 break; /* optimize_size ? 6 : 8 */
4212 break; /* scratch ? 5 : 6 */
4216 return (AS1 (lsr
,%B0
) CR_TAB
4217 AS1 (ror
,%A0
) CR_TAB
4218 AS1 (swap
,%B0
) CR_TAB
4219 AS1 (swap
,%A0
) CR_TAB
4220 AS2 (andi
,%A0
,0x0f) CR_TAB
4221 AS2 (eor
,%A0
,%B0
) CR_TAB
4222 AS2 (andi
,%B0
,0x0f) CR_TAB
4228 return (AS1 (lsr
,%B0
) CR_TAB
4229 AS1 (ror
,%A0
) CR_TAB
4230 AS1 (swap
,%B0
) CR_TAB
4231 AS1 (swap
,%A0
) CR_TAB
4232 AS2 (ldi
,%3,0x0f) CR_TAB
4234 AS2 (eor
,%A0
,%B0
) CR_TAB
4242 break; /* scratch ? 5 : 6 */
4244 return (AS1 (clr
,__tmp_reg__
) CR_TAB
4245 AS1 (lsl
,%A0
) CR_TAB
4246 AS1 (rol
,%B0
) CR_TAB
4247 AS1 (rol
,__tmp_reg__
) CR_TAB
4248 AS1 (lsl
,%A0
) CR_TAB
4249 AS1 (rol
,%B0
) CR_TAB
4250 AS1 (rol
,__tmp_reg__
) CR_TAB
4251 AS2 (mov
,%A0
,%B0
) CR_TAB
4252 AS2 (mov
,%B0
,__tmp_reg__
));
4256 return (AS1 (lsl
,%A0
) CR_TAB
4257 AS2 (mov
,%A0
,%B0
) CR_TAB
4258 AS1 (rol
,%A0
) CR_TAB
4259 AS2 (sbc
,%B0
,%B0
) CR_TAB
4263 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
4268 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4269 AS1 (clr
,%B0
) CR_TAB
4274 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4275 AS1 (clr
,%B0
) CR_TAB
4276 AS1 (lsr
,%A0
) CR_TAB
4281 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4282 AS1 (clr
,%B0
) CR_TAB
4283 AS1 (lsr
,%A0
) CR_TAB
4284 AS1 (lsr
,%A0
) CR_TAB
4291 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4292 AS1 (clr
,%B0
) CR_TAB
4293 AS1 (swap
,%A0
) CR_TAB
4294 AS2 (andi
,%A0
,0x0f));
4299 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4300 AS1 (clr
,%B0
) CR_TAB
4301 AS1 (swap
,%A0
) CR_TAB
4302 AS2 (ldi
,%3,0x0f) CR_TAB
4306 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4307 AS1 (clr
,%B0
) CR_TAB
4308 AS1 (lsr
,%A0
) CR_TAB
4309 AS1 (lsr
,%A0
) CR_TAB
4310 AS1 (lsr
,%A0
) CR_TAB
4317 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4318 AS1 (clr
,%B0
) CR_TAB
4319 AS1 (swap
,%A0
) CR_TAB
4320 AS1 (lsr
,%A0
) CR_TAB
4321 AS2 (andi
,%A0
,0x07));
4323 if (AVR_HAVE_MUL
&& scratch
)
4326 return (AS2 (ldi
,%3,0x08) CR_TAB
4327 AS2 (mul
,%B0
,%3) CR_TAB
4328 AS2 (mov
,%A0
,r1
) CR_TAB
4329 AS1 (clr
,%B0
) CR_TAB
4330 AS1 (clr
,__zero_reg__
));
4332 if (optimize_size
&& scratch
)
4337 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4338 AS1 (clr
,%B0
) CR_TAB
4339 AS1 (swap
,%A0
) CR_TAB
4340 AS1 (lsr
,%A0
) CR_TAB
4341 AS2 (ldi
,%3,0x07) CR_TAB
4347 return ("set" CR_TAB
4348 AS2 (bld
,r1
,3) CR_TAB
4349 AS2 (mul
,%B0
,r1
) CR_TAB
4350 AS2 (mov
,%A0
,r1
) CR_TAB
4351 AS1 (clr
,%B0
) CR_TAB
4352 AS1 (clr
,__zero_reg__
));
4355 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4356 AS1 (clr
,%B0
) CR_TAB
4357 AS1 (lsr
,%A0
) CR_TAB
4358 AS1 (lsr
,%A0
) CR_TAB
4359 AS1 (lsr
,%A0
) CR_TAB
4360 AS1 (lsr
,%A0
) CR_TAB
4364 if (AVR_HAVE_MUL
&& ldi_ok
)
4367 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4368 AS2 (mul
,%B0
,%A0
) CR_TAB
4369 AS2 (mov
,%A0
,r1
) CR_TAB
4370 AS1 (clr
,%B0
) CR_TAB
4371 AS1 (clr
,__zero_reg__
));
4373 if (AVR_HAVE_MUL
&& scratch
)
4376 return (AS2 (ldi
,%3,0x04) CR_TAB
4377 AS2 (mul
,%B0
,%3) CR_TAB
4378 AS2 (mov
,%A0
,r1
) CR_TAB
4379 AS1 (clr
,%B0
) CR_TAB
4380 AS1 (clr
,__zero_reg__
));
4382 if (optimize_size
&& ldi_ok
)
4385 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4386 AS2 (ldi
,%B0
,6) "\n1:\t"
4387 AS1 (lsr
,%A0
) CR_TAB
4388 AS1 (dec
,%B0
) CR_TAB
4391 if (optimize_size
&& scratch
)
4394 return (AS1 (clr
,%A0
) CR_TAB
4395 AS1 (lsl
,%B0
) CR_TAB
4396 AS1 (rol
,%A0
) CR_TAB
4397 AS1 (lsl
,%B0
) CR_TAB
4398 AS1 (rol
,%A0
) CR_TAB
4403 return (AS1 (clr
,%A0
) CR_TAB
4404 AS1 (lsl
,%B0
) CR_TAB
4405 AS1 (rol
,%A0
) CR_TAB
4410 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4412 insn
, operands
, len
, 2);
4416 /* 32bit logic shift right ((unsigned int)x >> i) */
4419 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4421 if (GET_CODE (operands
[2]) == CONST_INT
)
4429 switch (INTVAL (operands
[2]))
4432 if (INTVAL (operands
[2]) < 32)
4436 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4437 AS1 (clr
,%C0
) CR_TAB
4438 AS2 (movw
,%A0
,%C0
));
4440 return (AS1 (clr
,%D0
) CR_TAB
4441 AS1 (clr
,%C0
) CR_TAB
4442 AS1 (clr
,%B0
) CR_TAB
4447 int reg0
= true_regnum (operands
[0]);
4448 int reg1
= true_regnum (operands
[1]);
4451 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4452 AS2 (mov
,%B0
,%C1
) CR_TAB
4453 AS2 (mov
,%C0
,%D1
) CR_TAB
4456 return (AS1 (clr
,%D0
) CR_TAB
4457 AS2 (mov
,%C0
,%D1
) CR_TAB
4458 AS2 (mov
,%B0
,%C1
) CR_TAB
4464 int reg0
= true_regnum (operands
[0]);
4465 int reg1
= true_regnum (operands
[1]);
4467 if (reg0
== reg1
+ 2)
4468 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4471 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4472 AS1 (clr
,%C0
) CR_TAB
4475 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4476 AS2 (mov
,%A0
,%C1
) CR_TAB
4477 AS1 (clr
,%C0
) CR_TAB
4482 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4483 AS1 (clr
,%B0
) CR_TAB
4484 AS1 (clr
,%C0
) CR_TAB
4489 return (AS1 (clr
,%A0
) CR_TAB
4490 AS2 (sbrc
,%D0
,7) CR_TAB
4491 AS1 (inc
,%A0
) CR_TAB
4492 AS1 (clr
,%B0
) CR_TAB
4493 AS1 (clr
,%C0
) CR_TAB
4498 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4499 AS1 (ror
,%C0
) CR_TAB
4500 AS1 (ror
,%B0
) CR_TAB
4502 insn
, operands
, len
, 4);
4506 /* Create RTL split patterns for byte sized rotate expressions. This
4507 produces a series of move instructions and considers overlap situations.
4508 Overlapping non-HImode operands need a scratch register. */
4511 avr_rotate_bytes (rtx operands
[])
4514 enum machine_mode mode
= GET_MODE (operands
[0]);
4515 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
4516 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
4517 int num
= INTVAL (operands
[2]);
4518 rtx scratch
= operands
[3];
4519 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4520 Word move if no scratch is needed, otherwise use size of scratch. */
4521 enum machine_mode move_mode
= QImode
;
4522 int move_size
, offset
, size
;
4526 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
4529 move_mode
= GET_MODE (scratch
);
4531 /* Force DI rotate to use QI moves since other DI moves are currently split
4532 into QI moves so forward propagation works better. */
4535 /* Make scratch smaller if needed. */
4536 if (GET_MODE (scratch
) == HImode
&& move_mode
== QImode
)
4537 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
4539 move_size
= GET_MODE_SIZE (move_mode
);
4540 /* Number of bytes/words to rotate. */
4541 offset
= (num
>> 3) / move_size
;
4542 /* Number of moves needed. */
4543 size
= GET_MODE_SIZE (mode
) / move_size
;
4544 /* Himode byte swap is special case to avoid a scratch register. */
4545 if (mode
== HImode
&& same_reg
)
4547 /* HImode byte swap, using xor. This is as quick as using scratch. */
4549 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
4550 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
4551 if (!rtx_equal_p (dst
, src
))
4553 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4554 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
4555 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4560 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4561 /* Create linked list of moves to determine move order. */
4565 } move
[MAX_SIZE
+ 8];
4568 gcc_assert (size
<= MAX_SIZE
);
4569 /* Generate list of subreg moves. */
4570 for (i
= 0; i
< size
; i
++)
4573 int to
= (from
+ offset
) % size
;
4574 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
4575 mode
, from
* move_size
);
4576 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
4577 mode
, to
* move_size
);
4580 /* Mark dependence where a dst of one move is the src of another move.
4581 The first move is a conflict as it must wait until second is
4582 performed. We ignore moves to self - we catch this later. */
4584 for (i
= 0; i
< size
; i
++)
4585 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
4586 for (j
= 0; j
< size
; j
++)
4587 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
4589 /* The dst of move i is the src of move j. */
4596 /* Go through move list and perform non-conflicting moves. As each
4597 non-overlapping move is made, it may remove other conflicts
4598 so the process is repeated until no conflicts remain. */
4603 /* Emit move where dst is not also a src or we have used that
4605 for (i
= 0; i
< size
; i
++)
4606 if (move
[i
].src
!= NULL_RTX
)
4608 if (move
[i
].links
== -1
4609 || move
[move
[i
].links
].src
== NULL_RTX
)
4612 /* Ignore NOP moves to self. */
4613 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
4614 emit_move_insn (move
[i
].dst
, move
[i
].src
);
4616 /* Remove conflict from list. */
4617 move
[i
].src
= NULL_RTX
;
4623 /* Check for deadlock. This is when no moves occurred and we have
4624 at least one blocked move. */
4625 if (moves
== 0 && blocked
!= -1)
4627 /* Need to use scratch register to break deadlock.
4628 Add move to put dst of blocked move into scratch.
4629 When this move occurs, it will break chain deadlock.
4630 The scratch register is substituted for real move. */
4632 move
[size
].src
= move
[blocked
].dst
;
4633 move
[size
].dst
= scratch
;
4634 /* Scratch move is never blocked. */
4635 move
[size
].links
= -1;
4636 /* Make sure we have valid link. */
4637 gcc_assert (move
[blocked
].links
!= -1);
4638 /* Replace src of blocking move with scratch reg. */
4639 move
[move
[blocked
].links
].src
= scratch
;
4640 /* Make dependent on scratch move occuring. */
4641 move
[blocked
].links
= size
;
4645 while (blocked
!= -1);
4650 /* Modifies the length assigned to instruction INSN
4651 LEN is the initially computed length of the insn. */
4654 adjust_insn_length (rtx insn
, int len
)
4656 rtx patt
= PATTERN (insn
);
4659 if (GET_CODE (patt
) == SET
)
4662 op
[1] = SET_SRC (patt
);
4663 op
[0] = SET_DEST (patt
);
4664 if (general_operand (op
[1], VOIDmode
)
4665 && general_operand (op
[0], VOIDmode
))
4667 switch (GET_MODE (op
[0]))
4670 output_movqi (insn
, op
, &len
);
4673 output_movhi (insn
, op
, &len
);
4677 output_movsisf (insn
, op
, &len
);
4683 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4685 switch (GET_MODE (op
[1]))
4687 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4688 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4692 else if (GET_CODE (op
[1]) == AND
)
4694 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4696 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4697 if (GET_MODE (op
[1]) == SImode
)
4698 len
= (((mask
& 0xff) != 0xff)
4699 + ((mask
& 0xff00) != 0xff00)
4700 + ((mask
& 0xff0000L
) != 0xff0000L
)
4701 + ((mask
& 0xff000000L
) != 0xff000000L
));
4702 else if (GET_MODE (op
[1]) == HImode
)
4703 len
= (((mask
& 0xff) != 0xff)
4704 + ((mask
& 0xff00) != 0xff00));
4707 else if (GET_CODE (op
[1]) == IOR
)
4709 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4711 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4712 if (GET_MODE (op
[1]) == SImode
)
4713 len
= (((mask
& 0xff) != 0)
4714 + ((mask
& 0xff00) != 0)
4715 + ((mask
& 0xff0000L
) != 0)
4716 + ((mask
& 0xff000000L
) != 0));
4717 else if (GET_MODE (op
[1]) == HImode
)
4718 len
= (((mask
& 0xff) != 0)
4719 + ((mask
& 0xff00) != 0));
4723 set
= single_set (insn
);
4728 op
[1] = SET_SRC (set
);
4729 op
[0] = SET_DEST (set
);
4731 if (GET_CODE (patt
) == PARALLEL
4732 && general_operand (op
[1], VOIDmode
)
4733 && general_operand (op
[0], VOIDmode
))
4735 if (XVECLEN (patt
, 0) == 2)
4736 op
[2] = XVECEXP (patt
, 0, 1);
4738 switch (GET_MODE (op
[0]))
4744 output_reload_inhi (insn
, op
, &len
);
4748 output_reload_insisf (insn
, op
, &len
);
4754 else if (GET_CODE (op
[1]) == ASHIFT
4755 || GET_CODE (op
[1]) == ASHIFTRT
4756 || GET_CODE (op
[1]) == LSHIFTRT
)
4760 ops
[1] = XEXP (op
[1],0);
4761 ops
[2] = XEXP (op
[1],1);
4762 switch (GET_CODE (op
[1]))
4765 switch (GET_MODE (op
[0]))
4767 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4768 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4769 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4774 switch (GET_MODE (op
[0]))
4776 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4777 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4778 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4783 switch (GET_MODE (op
[0]))
4785 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4786 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4787 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4799 /* Return nonzero if register REG dead after INSN. */
4802 reg_unused_after (rtx insn
, rtx reg
)
4804 return (dead_or_set_p (insn
, reg
)
4805 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4808 /* Return nonzero if REG is not used after INSN.
4809 We assume REG is a reload reg, and therefore does
4810 not live past labels. It may live past calls or jumps though. */
4813 _reg_unused_after (rtx insn
, rtx reg
)
4818 /* If the reg is set by this instruction, then it is safe for our
4819 case. Disregard the case where this is a store to memory, since
4820 we are checking a register used in the store address. */
4821 set
= single_set (insn
);
4822 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4823 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4826 while ((insn
= NEXT_INSN (insn
)))
4829 code
= GET_CODE (insn
);
4832 /* If this is a label that existed before reload, then the register
4833 if dead here. However, if this is a label added by reorg, then
4834 the register may still be live here. We can't tell the difference,
4835 so we just ignore labels completely. */
4836 if (code
== CODE_LABEL
)
4844 if (code
== JUMP_INSN
)
4847 /* If this is a sequence, we must handle them all at once.
4848 We could have for instance a call that sets the target register,
4849 and an insn in a delay slot that uses the register. In this case,
4850 we must return 0. */
4851 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4856 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4858 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4859 rtx set
= single_set (this_insn
);
4861 if (GET_CODE (this_insn
) == CALL_INSN
)
4863 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4865 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4870 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4872 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4874 if (GET_CODE (SET_DEST (set
)) != MEM
)
4880 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4885 else if (code
== JUMP_INSN
)
4889 if (code
== CALL_INSN
)
4892 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4893 if (GET_CODE (XEXP (tem
, 0)) == USE
4894 && REG_P (XEXP (XEXP (tem
, 0), 0))
4895 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4897 if (call_used_regs
[REGNO (reg
)])
4901 set
= single_set (insn
);
4903 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4905 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4906 return GET_CODE (SET_DEST (set
)) != MEM
;
4907 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4913 /* Target hook for assembling integer objects. The AVR version needs
4914 special handling for references to certain labels. */
4917 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4919 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4920 && text_segment_operand (x
, VOIDmode
) )
4922 fputs ("\t.word\tgs(", asm_out_file
);
4923 output_addr_const (asm_out_file
, x
);
4924 fputs (")\n", asm_out_file
);
4927 return default_assemble_integer (x
, size
, aligned_p
);
4930 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4933 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4936 /* If the function has the 'signal' or 'interrupt' attribute, test to
4937 make sure that the name of the function is "__vector_NN" so as to
4938 catch when the user misspells the interrupt vector name. */
4940 if (cfun
->machine
->is_interrupt
)
4942 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4944 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4945 "%qs appears to be a misspelled interrupt handler",
4949 else if (cfun
->machine
->is_signal
)
4951 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4953 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4954 "%qs appears to be a misspelled signal handler",
4959 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4960 ASM_OUTPUT_LABEL (file
, name
);
4963 /* The routine used to output NUL terminated strings. We use a special
4964 version of this for most svr4 targets because doing so makes the
4965 generated assembly code more compact (and thus faster to assemble)
4966 as well as more readable, especially for targets like the i386
4967 (where the only alternative is to output character sequences as
4968 comma separated lists of numbers). */
4971 gas_output_limited_string(FILE *file
, const char *str
)
4973 const unsigned char *_limited_str
= (const unsigned char *) str
;
4975 fprintf (file
, "%s\"", STRING_ASM_OP
);
4976 for (; (ch
= *_limited_str
); _limited_str
++)
4979 switch (escape
= ESCAPES
[ch
])
4985 fprintf (file
, "\\%03o", ch
);
4989 putc (escape
, file
);
4993 fprintf (file
, "\"\n");
4996 /* The routine used to output sequences of byte values. We use a special
4997 version of this for most svr4 targets because doing so makes the
4998 generated assembly code more compact (and thus faster to assemble)
4999 as well as more readable. Note that if we find subparts of the
5000 character sequence which end with NUL (and which are shorter than
5001 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
5004 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
5006 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
5007 const unsigned char *limit
= _ascii_bytes
+ length
;
5008 unsigned bytes_in_chunk
= 0;
5009 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
5011 const unsigned char *p
;
5012 if (bytes_in_chunk
>= 60)
5014 fprintf (file
, "\"\n");
5017 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
5019 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
5021 if (bytes_in_chunk
> 0)
5023 fprintf (file
, "\"\n");
5026 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
5033 if (bytes_in_chunk
== 0)
5034 fprintf (file
, "\t.ascii\t\"");
5035 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
5042 fprintf (file
, "\\%03o", ch
);
5043 bytes_in_chunk
+= 4;
5047 putc (escape
, file
);
5048 bytes_in_chunk
+= 2;
5053 if (bytes_in_chunk
> 0)
5054 fprintf (file
, "\"\n");
5057 /* Return value is nonzero if pseudos that have been
5058 assigned to registers of class CLASS would likely be spilled
5059 because registers of CLASS are needed for spill registers. */
5062 avr_class_likely_spilled_p (reg_class_t c
)
5064 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
5067 /* Valid attributes:
5068 progmem - put data to program memory;
5069 signal - make a function to be hardware interrupt. After function
5070 prologue interrupts are disabled;
5071 interrupt - make a function to be hardware interrupt. After function
5072 prologue interrupts are enabled;
5073 naked - don't generate function prologue/epilogue and `ret' command.
5075 Only `progmem' attribute valid for type. */
5077 /* Handle a "progmem" attribute; arguments as in
5078 struct attribute_spec.handler. */
5080 avr_handle_progmem_attribute (tree
*node
, tree name
,
5081 tree args ATTRIBUTE_UNUSED
,
5082 int flags ATTRIBUTE_UNUSED
,
5087 if (TREE_CODE (*node
) == TYPE_DECL
)
5089 /* This is really a decl attribute, not a type attribute,
5090 but try to handle it for GCC 3.0 backwards compatibility. */
5092 tree type
= TREE_TYPE (*node
);
5093 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
5094 tree newtype
= build_type_attribute_variant (type
, attr
);
5096 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
5097 TREE_TYPE (*node
) = newtype
;
5098 *no_add_attrs
= true;
5100 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
5102 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
5104 warning (0, "only initialized variables can be placed into "
5105 "program memory area");
5106 *no_add_attrs
= true;
5111 warning (OPT_Wattributes
, "%qE attribute ignored",
5113 *no_add_attrs
= true;
5120 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5121 struct attribute_spec.handler. */
5124 avr_handle_fndecl_attribute (tree
*node
, tree name
,
5125 tree args ATTRIBUTE_UNUSED
,
5126 int flags ATTRIBUTE_UNUSED
,
5129 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5131 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5133 *no_add_attrs
= true;
5140 avr_handle_fntype_attribute (tree
*node
, tree name
,
5141 tree args ATTRIBUTE_UNUSED
,
5142 int flags ATTRIBUTE_UNUSED
,
5145 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
5147 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5149 *no_add_attrs
= true;
5155 /* Look for attribute `progmem' in DECL
5156 if found return 1, otherwise 0. */
5159 avr_progmem_p (tree decl
, tree attributes
)
5163 if (TREE_CODE (decl
) != VAR_DECL
)
5167 != lookup_attribute ("progmem", attributes
))
5173 while (TREE_CODE (a
) == ARRAY_TYPE
);
5175 if (a
== error_mark_node
)
5178 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
5184 /* Add the section attribute if the variable is in progmem. */
5187 avr_insert_attributes (tree node
, tree
*attributes
)
5189 if (TREE_CODE (node
) == VAR_DECL
5190 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
5191 && avr_progmem_p (node
, *attributes
))
5193 if (TREE_READONLY (node
))
5195 static const char dsec
[] = ".progmem.data";
5197 *attributes
= tree_cons (get_identifier ("section"),
5198 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
5203 error ("variable %q+D must be const in order to be put into"
5204 " read-only section by means of %<__attribute__((progmem))%>",
5210 /* A get_unnamed_section callback for switching to progmem_section. */
5213 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
5215 fprintf (asm_out_file
,
5216 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5217 AVR_HAVE_JMP_CALL
? "a" : "ax");
5218 /* Should already be aligned, this is just to be safe if it isn't. */
5219 fprintf (asm_out_file
, "\t.p2align 1\n");
5223 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5224 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5225 /* Track need of __do_clear_bss. */
5228 avr_asm_output_aligned_decl_common (FILE * stream
, const_tree decl ATTRIBUTE_UNUSED
,
5229 const char *name
, unsigned HOST_WIDE_INT size
,
5230 unsigned int align
, bool local_p
)
5232 avr_need_clear_bss_p
= true;
5236 fputs ("\t.local\t", stream
);
5237 assemble_name (stream
, name
);
5238 fputs ("\n", stream
);
5241 fputs ("\t.comm\t", stream
);
5242 assemble_name (stream
, name
);
5244 "," HOST_WIDE_INT_PRINT_UNSIGNED
",%u\n",
5245 size
, align
/ BITS_PER_UNIT
);
5249 /* Unnamed section callback for data_section
5250 to track need of __do_copy_data. */
5253 avr_output_data_section_asm_op (const void *data
)
5255 avr_need_copy_data_p
= true;
5257 /* Dispatch to default. */
5258 output_section_asm_op (data
);
5262 /* Unnamed section callback for bss_section
5263 to track need of __do_clear_bss. */
5266 avr_output_bss_section_asm_op (const void *data
)
5268 avr_need_clear_bss_p
= true;
5270 /* Dispatch to default. */
5271 output_section_asm_op (data
);
5275 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5278 avr_asm_init_sections (void)
5280 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
5281 avr_output_progmem_section_asm_op
,
5283 readonly_data_section
= data_section
;
5285 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
5286 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
5290 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5291 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5294 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
5296 if (!avr_need_copy_data_p
)
5297 avr_need_copy_data_p
= (0 == strncmp (name
, ".data", 5)
5298 || 0 == strncmp (name
, ".rodata", 7)
5299 || 0 == strncmp (name
, ".gnu.linkonce.d", 15));
5301 if (!avr_need_clear_bss_p
)
5302 avr_need_clear_bss_p
= (0 == strncmp (name
, ".bss", 4));
5304 default_elf_asm_named_section (name
, flags
, decl
);
5308 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
5310 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
5312 if (strncmp (name
, ".noinit", 7) == 0)
5314 if (decl
&& TREE_CODE (decl
) == VAR_DECL
5315 && DECL_INITIAL (decl
) == NULL_TREE
)
5316 flags
|= SECTION_BSS
; /* @nobits */
5318 warning (0, "only uninitialized variables can be placed in the "
5326 /* Implement `TARGET_ASM_FILE_START'. */
5327 /* Outputs some appropriate text to go at the start of an assembler
5331 avr_file_start (void)
5333 if (avr_current_arch
->asm_only
)
5334 error ("MCU %qs supported for assembler only", avr_mcu_name
);
5336 default_file_start ();
5338 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5339 fputs ("__SREG__ = 0x3f\n"
5341 "__SP_L__ = 0x3d\n", asm_out_file
);
5343 fputs ("__tmp_reg__ = 0\n"
5344 "__zero_reg__ = 1\n", asm_out_file
);
5348 /* Implement `TARGET_ASM_FILE_END'. */
5349 /* Outputs to the stdio stream FILE some
5350 appropriate text to go at the end of an assembler file. */
5355 /* Output these only if there is anything in the
5356 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5357 input section(s) - some code size can be saved by not
5358 linking in the initialization code from libgcc if resp.
5359 sections are empty. */
5361 if (avr_need_copy_data_p
)
5362 fputs (".global __do_copy_data\n", asm_out_file
);
5364 if (avr_need_clear_bss_p
)
5365 fputs (".global __do_clear_bss\n", asm_out_file
);
5368 /* Choose the order in which to allocate hard registers for
5369 pseudo-registers local to a basic block.
5371 Store the desired register order in the array `reg_alloc_order'.
5372 Element 0 should be the register to allocate first; element 1, the
5373 next register; and so on. */
5376 order_regs_for_local_alloc (void)
5379 static const int order_0
[] = {
5387 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5391 static const int order_1
[] = {
5399 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5403 static const int order_2
[] = {
5412 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5417 const int *order
= (TARGET_ORDER_1
? order_1
:
5418 TARGET_ORDER_2
? order_2
:
5420 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5421 reg_alloc_order
[i
] = order
[i
];
5425 /* Implement `TARGET_REGISTER_MOVE_COST' */
5428 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
5429 reg_class_t from
, reg_class_t to
)
5431 return (from
== STACK_REG
? 6
5432 : to
== STACK_REG
? 12
5437 /* Implement `TARGET_MEMORY_MOVE_COST' */
5440 avr_memory_move_cost (enum machine_mode mode
, reg_class_t rclass ATTRIBUTE_UNUSED
,
5441 bool in ATTRIBUTE_UNUSED
)
5443 return (mode
== QImode
? 2
5444 : mode
== HImode
? 4
5445 : mode
== SImode
? 8
5446 : mode
== SFmode
? 8
5451 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5452 cost of an RTX operand given its context. X is the rtx of the
5453 operand, MODE is its mode, and OUTER is the rtx_code of this
5454 operand's parent operator. */
5457 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5460 enum rtx_code code
= GET_CODE (x
);
5471 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5478 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5482 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5483 is to be calculated. Return true if the complete cost has been
5484 computed, and false if subexpressions should be scanned. In either
5485 case, *TOTAL contains the cost result. */
5488 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5491 enum rtx_code code
= (enum rtx_code
) codearg
;
5492 enum machine_mode mode
= GET_MODE (x
);
5499 /* Immediate constants are as cheap as registers. */
5507 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5515 *total
= COSTS_N_INSNS (1);
5519 *total
= COSTS_N_INSNS (3);
5523 *total
= COSTS_N_INSNS (7);
5529 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5537 *total
= COSTS_N_INSNS (1);
5543 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5547 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5548 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5552 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5553 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5554 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5558 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5559 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5560 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5567 *total
= COSTS_N_INSNS (1);
5568 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5569 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5573 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5575 *total
= COSTS_N_INSNS (2);
5576 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5578 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5579 *total
= COSTS_N_INSNS (1);
5581 *total
= COSTS_N_INSNS (2);
5585 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5587 *total
= COSTS_N_INSNS (4);
5588 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5590 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5591 *total
= COSTS_N_INSNS (1);
5593 *total
= COSTS_N_INSNS (4);
5599 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5605 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5606 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5607 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5608 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5612 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5613 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5614 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5622 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5624 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5631 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5633 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5641 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5642 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5650 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5653 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5654 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5661 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5662 *total
= COSTS_N_INSNS (1);
5667 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5668 *total
= COSTS_N_INSNS (3);
5673 if (CONST_INT_P (XEXP (x
, 1)))
5674 switch (INTVAL (XEXP (x
, 1)))
5678 *total
= COSTS_N_INSNS (5);
5681 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5689 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5696 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5698 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5699 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5703 val
= INTVAL (XEXP (x
, 1));
5705 *total
= COSTS_N_INSNS (3);
5706 else if (val
>= 0 && val
<= 7)
5707 *total
= COSTS_N_INSNS (val
);
5709 *total
= COSTS_N_INSNS (1);
5714 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5716 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5717 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5720 switch (INTVAL (XEXP (x
, 1)))
5727 *total
= COSTS_N_INSNS (2);
5730 *total
= COSTS_N_INSNS (3);
5736 *total
= COSTS_N_INSNS (4);
5741 *total
= COSTS_N_INSNS (5);
5744 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5747 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5750 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5753 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5754 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5759 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5761 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5762 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5765 switch (INTVAL (XEXP (x
, 1)))
5771 *total
= COSTS_N_INSNS (3);
5776 *total
= COSTS_N_INSNS (4);
5779 *total
= COSTS_N_INSNS (6);
5782 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5785 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5786 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5793 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5800 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5802 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5803 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5807 val
= INTVAL (XEXP (x
, 1));
5809 *total
= COSTS_N_INSNS (4);
5811 *total
= COSTS_N_INSNS (2);
5812 else if (val
>= 0 && val
<= 7)
5813 *total
= COSTS_N_INSNS (val
);
5815 *total
= COSTS_N_INSNS (1);
5820 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5822 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5823 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5826 switch (INTVAL (XEXP (x
, 1)))
5832 *total
= COSTS_N_INSNS (2);
5835 *total
= COSTS_N_INSNS (3);
5841 *total
= COSTS_N_INSNS (4);
5845 *total
= COSTS_N_INSNS (5);
5848 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5851 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5855 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5858 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5859 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5864 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5866 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5867 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5870 switch (INTVAL (XEXP (x
, 1)))
5876 *total
= COSTS_N_INSNS (4);
5881 *total
= COSTS_N_INSNS (6);
5884 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5887 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5890 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5891 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5898 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5905 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5907 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5908 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5912 val
= INTVAL (XEXP (x
, 1));
5914 *total
= COSTS_N_INSNS (3);
5915 else if (val
>= 0 && val
<= 7)
5916 *total
= COSTS_N_INSNS (val
);
5918 *total
= COSTS_N_INSNS (1);
5923 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5925 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5926 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5929 switch (INTVAL (XEXP (x
, 1)))
5936 *total
= COSTS_N_INSNS (2);
5939 *total
= COSTS_N_INSNS (3);
5944 *total
= COSTS_N_INSNS (4);
5948 *total
= COSTS_N_INSNS (5);
5954 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5957 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5961 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5964 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5965 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5970 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5972 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5973 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5976 switch (INTVAL (XEXP (x
, 1)))
5982 *total
= COSTS_N_INSNS (4);
5985 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5990 *total
= COSTS_N_INSNS (4);
5993 *total
= COSTS_N_INSNS (6);
5996 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5997 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6004 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
6008 switch (GET_MODE (XEXP (x
, 0)))
6011 *total
= COSTS_N_INSNS (1);
6012 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6013 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6017 *total
= COSTS_N_INSNS (2);
6018 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6019 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6020 else if (INTVAL (XEXP (x
, 1)) != 0)
6021 *total
+= COSTS_N_INSNS (1);
6025 *total
= COSTS_N_INSNS (4);
6026 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
6027 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
6028 else if (INTVAL (XEXP (x
, 1)) != 0)
6029 *total
+= COSTS_N_INSNS (3);
6035 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
6044 /* Calculate the cost of a memory address. */
6047 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
6049 if (GET_CODE (x
) == PLUS
6050 && GET_CODE (XEXP (x
,1)) == CONST_INT
6051 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
6052 && INTVAL (XEXP (x
,1)) >= 61)
6054 if (CONSTANT_ADDRESS_P (x
))
6056 if (optimize
> 0 && io_address_operand (x
, QImode
))
6063 /* Test for extra memory constraint 'Q'.
6064 It's a memory address based on Y or Z pointer with valid displacement. */
6067 extra_constraint_Q (rtx x
)
6069 if (GET_CODE (XEXP (x
,0)) == PLUS
6070 && REG_P (XEXP (XEXP (x
,0), 0))
6071 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
6072 && (INTVAL (XEXP (XEXP (x
,0), 1))
6073 <= MAX_LD_OFFSET (GET_MODE (x
))))
6075 rtx xx
= XEXP (XEXP (x
,0), 0);
6076 int regno
= REGNO (xx
);
6077 if (TARGET_ALL_DEBUG
)
6079 fprintf (stderr
, ("extra_constraint:\n"
6080 "reload_completed: %d\n"
6081 "reload_in_progress: %d\n"),
6082 reload_completed
, reload_in_progress
);
6085 if (regno
>= FIRST_PSEUDO_REGISTER
)
6086 return 1; /* allocate pseudos */
6087 else if (regno
== REG_Z
|| regno
== REG_Y
)
6088 return 1; /* strictly check */
6089 else if (xx
== frame_pointer_rtx
6090 || xx
== arg_pointer_rtx
)
6091 return 1; /* XXX frame & arg pointer checks */
6096 /* Convert condition code CONDITION to the valid AVR condition code. */
6099 avr_normalize_condition (RTX_CODE condition
)
6116 /* This function optimizes conditional jumps. */
6123 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6125 if (! (GET_CODE (insn
) == INSN
6126 || GET_CODE (insn
) == CALL_INSN
6127 || GET_CODE (insn
) == JUMP_INSN
)
6128 || !single_set (insn
))
6131 pattern
= PATTERN (insn
);
6133 if (GET_CODE (pattern
) == PARALLEL
)
6134 pattern
= XVECEXP (pattern
, 0, 0);
6135 if (GET_CODE (pattern
) == SET
6136 && SET_DEST (pattern
) == cc0_rtx
6137 && compare_diff_p (insn
))
6139 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
6141 /* Now we work under compare insn. */
6143 pattern
= SET_SRC (pattern
);
6144 if (true_regnum (XEXP (pattern
,0)) >= 0
6145 && true_regnum (XEXP (pattern
,1)) >= 0 )
6147 rtx x
= XEXP (pattern
,0);
6148 rtx next
= next_real_insn (insn
);
6149 rtx pat
= PATTERN (next
);
6150 rtx src
= SET_SRC (pat
);
6151 rtx t
= XEXP (src
,0);
6152 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6153 XEXP (pattern
,0) = XEXP (pattern
,1);
6154 XEXP (pattern
,1) = x
;
6155 INSN_CODE (next
) = -1;
6157 else if (true_regnum (XEXP (pattern
, 0)) >= 0
6158 && XEXP (pattern
, 1) == const0_rtx
)
6160 /* This is a tst insn, we can reverse it. */
6161 rtx next
= next_real_insn (insn
);
6162 rtx pat
= PATTERN (next
);
6163 rtx src
= SET_SRC (pat
);
6164 rtx t
= XEXP (src
,0);
6166 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6167 XEXP (pattern
, 1) = XEXP (pattern
, 0);
6168 XEXP (pattern
, 0) = const0_rtx
;
6169 INSN_CODE (next
) = -1;
6170 INSN_CODE (insn
) = -1;
6172 else if (true_regnum (XEXP (pattern
,0)) >= 0
6173 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
6175 rtx x
= XEXP (pattern
,1);
6176 rtx next
= next_real_insn (insn
);
6177 rtx pat
= PATTERN (next
);
6178 rtx src
= SET_SRC (pat
);
6179 rtx t
= XEXP (src
,0);
6180 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
6182 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
6184 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
6185 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
6186 INSN_CODE (next
) = -1;
6187 INSN_CODE (insn
) = -1;
6195 /* Returns register number for function return value.*/
6197 static inline unsigned int
6198 avr_ret_register (void)
6203 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6206 avr_function_value_regno_p (const unsigned int regno
)
6208 return (regno
== avr_ret_register ());
6211 /* Create an RTX representing the place where a
6212 library function returns a value of mode MODE. */
6215 avr_libcall_value (enum machine_mode mode
,
6216 const_rtx func ATTRIBUTE_UNUSED
)
6218 int offs
= GET_MODE_SIZE (mode
);
6221 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
6224 /* Create an RTX representing the place where a
6225 function returns a value of data type VALTYPE. */
6228 avr_function_value (const_tree type
,
6229 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
6230 bool outgoing ATTRIBUTE_UNUSED
)
6234 if (TYPE_MODE (type
) != BLKmode
)
6235 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
6237 offs
= int_size_in_bytes (type
);
6240 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
6241 offs
= GET_MODE_SIZE (SImode
);
6242 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
6243 offs
= GET_MODE_SIZE (DImode
);
6245 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
6249 test_hard_reg_class (enum reg_class rclass
, rtx x
)
6251 int regno
= true_regnum (x
);
6255 if (TEST_HARD_REG_CLASS (rclass
, regno
))
6263 jump_over_one_insn_p (rtx insn
, rtx dest
)
6265 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
6268 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
6269 int dest_addr
= INSN_ADDRESSES (uid
);
6270 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
6273 /* Returns 1 if a value of mode MODE can be stored starting with hard
6274 register number REGNO. On the enhanced core, anything larger than
6275 1 byte must start in even numbered register for "movw" to work
6276 (this way we don't have to check for odd registers everywhere). */
6279 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
6281 /* Disallow QImode in stack pointer regs. */
6282 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
6285 /* The only thing that can go into registers r28:r29 is a Pmode. */
6286 if (regno
== REG_Y
&& mode
== Pmode
)
6289 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6290 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
6296 /* Modes larger than QImode occupy consecutive registers. */
6297 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
6300 /* All modes larger than QImode should start in an even register. */
6301 return !(regno
& 1);
6305 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6311 if (GET_CODE (operands
[1]) == CONST_INT
)
6313 int val
= INTVAL (operands
[1]);
6314 if ((val
& 0xff) == 0)
6317 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
6318 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6321 else if ((val
& 0xff00) == 0)
6324 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6325 AS2 (mov
,%A0
,%2) CR_TAB
6326 AS2 (mov
,%B0
,__zero_reg__
));
6328 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
6331 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6332 AS2 (mov
,%A0
,%2) CR_TAB
6337 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6338 AS2 (mov
,%A0
,%2) CR_TAB
6339 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6345 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6347 rtx src
= operands
[1];
6348 int cnst
= (GET_CODE (src
) == CONST_INT
);
6353 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
6354 + ((INTVAL (src
) & 0xff00) != 0)
6355 + ((INTVAL (src
) & 0xff0000) != 0)
6356 + ((INTVAL (src
) & 0xff000000) != 0);
6363 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
6364 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
6367 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
6368 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
6370 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
6371 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
6374 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
6375 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
6377 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
6378 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
6381 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
6382 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
6384 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
6385 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
6388 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
6389 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
6395 avr_output_bld (rtx operands
[], int bit_nr
)
6397 static char s
[] = "bld %A0,0";
6399 s
[5] = 'A' + (bit_nr
>> 3);
6400 s
[8] = '0' + (bit_nr
& 7);
6401 output_asm_insn (s
, operands
);
6405 avr_output_addr_vec_elt (FILE *stream
, int value
)
6407 switch_to_section (progmem_section
);
6408 if (AVR_HAVE_JMP_CALL
)
6409 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
6411 fprintf (stream
, "\trjmp .L%d\n", value
);
6414 /* Returns true if SCRATCH are safe to be allocated as a scratch
6415 registers (for a define_peephole2) in the current function. */
6418 avr_hard_regno_scratch_ok (unsigned int regno
)
6420 /* Interrupt functions can only use registers that have already been saved
6421 by the prologue, even if they would normally be call-clobbered. */
6423 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6424 && !df_regs_ever_live_p (regno
))
6430 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6433 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
6434 unsigned int new_reg
)
6436 /* Interrupt functions can only use registers that have already been
6437 saved by the prologue, even if they would normally be
6440 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6441 && !df_regs_ever_live_p (new_reg
))
6447 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6448 or memory location in the I/O space (QImode only).
6450 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6451 Operand 1: register operand to test, or CONST_INT memory address.
6452 Operand 2: bit number.
6453 Operand 3: label to jump to if the test is true. */
6456 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6458 enum rtx_code comp
= GET_CODE (operands
[0]);
6459 int long_jump
= (get_attr_length (insn
) >= 4);
6460 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6464 else if (comp
== LT
)
6468 comp
= reverse_condition (comp
);
6470 if (GET_CODE (operands
[1]) == CONST_INT
)
6472 if (INTVAL (operands
[1]) < 0x40)
6475 output_asm_insn (AS2 (sbis
,%m1
-0x20,%2), operands
);
6477 output_asm_insn (AS2 (sbic
,%m1
-0x20,%2), operands
);
6481 output_asm_insn (AS2 (in
,__tmp_reg__
,%m1
-0x20), operands
);
6483 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6485 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6488 else /* GET_CODE (operands[1]) == REG */
6490 if (GET_MODE (operands
[1]) == QImode
)
6493 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6495 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6497 else /* HImode or SImode */
6499 static char buf
[] = "sbrc %A1,0";
6500 int bit_nr
= INTVAL (operands
[2]);
6501 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6502 buf
[6] = 'A' + (bit_nr
>> 3);
6503 buf
[9] = '0' + (bit_nr
& 7);
6504 output_asm_insn (buf
, operands
);
6509 return (AS1 (rjmp
,.+4) CR_TAB
6512 return AS1 (rjmp
,%x3
);
6516 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6519 avr_asm_out_ctor (rtx symbol
, int priority
)
6521 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6522 default_ctor_section_asm_out_constructor (symbol
, priority
);
6525 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6528 avr_asm_out_dtor (rtx symbol
, int priority
)
6530 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6531 default_dtor_section_asm_out_destructor (symbol
, priority
);
6534 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6537 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6539 if (TYPE_MODE (type
) == BLKmode
)
6541 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6542 return (size
== -1 || size
> 8);
6548 /* Worker function for CASE_VALUES_THRESHOLD. */
6550 unsigned int avr_case_values_threshold (void)
6552 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
6555 /* Helper for __builtin_avr_delay_cycles */
6558 avr_expand_delay_cycles (rtx operands0
)
6560 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
6561 unsigned HOST_WIDE_INT cycles_used
;
6562 unsigned HOST_WIDE_INT loop_count
;
6564 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
6566 loop_count
= ((cycles
- 9) / 6) + 1;
6567 cycles_used
= ((loop_count
- 1) * 6) + 9;
6568 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
6569 cycles
-= cycles_used
;
6572 if (IN_RANGE (cycles
, 262145, 83886081))
6574 loop_count
= ((cycles
- 7) / 5) + 1;
6575 if (loop_count
> 0xFFFFFF)
6576 loop_count
= 0xFFFFFF;
6577 cycles_used
= ((loop_count
- 1) * 5) + 7;
6578 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
6579 cycles
-= cycles_used
;
6582 if (IN_RANGE (cycles
, 768, 262144))
6584 loop_count
= ((cycles
- 5) / 4) + 1;
6585 if (loop_count
> 0xFFFF)
6586 loop_count
= 0xFFFF;
6587 cycles_used
= ((loop_count
- 1) * 4) + 5;
6588 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
6589 cycles
-= cycles_used
;
6592 if (IN_RANGE (cycles
, 6, 767))
6594 loop_count
= cycles
/ 3;
6595 if (loop_count
> 255)
6597 cycles_used
= loop_count
* 3;
6598 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
6599 cycles
-= cycles_used
;
6604 emit_insn (gen_nopv (GEN_INT(2)));
6610 emit_insn (gen_nopv (GEN_INT(1)));
6615 /* IDs for all the AVR builtins. */
6628 AVR_BUILTIN_DELAY_CYCLES
6631 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6634 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6639 /* Implement `TARGET_INIT_BUILTINS' */
6640 /* Set up all builtin functions for this target. */
6643 avr_init_builtins (void)
6645 tree void_ftype_void
6646 = build_function_type_list (void_type_node
, NULL_TREE
);
6647 tree uchar_ftype_uchar
6648 = build_function_type_list (unsigned_char_type_node
,
6649 unsigned_char_type_node
,
6651 tree uint_ftype_uchar_uchar
6652 = build_function_type_list (unsigned_type_node
,
6653 unsigned_char_type_node
,
6654 unsigned_char_type_node
,
6656 tree int_ftype_char_char
6657 = build_function_type_list (integer_type_node
,
6661 tree int_ftype_char_uchar
6662 = build_function_type_list (integer_type_node
,
6664 unsigned_char_type_node
,
6666 tree void_ftype_ulong
6667 = build_function_type_list (void_type_node
,
6668 long_unsigned_type_node
,
6671 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void
, AVR_BUILTIN_NOP
);
6672 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void
, AVR_BUILTIN_SEI
);
6673 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void
, AVR_BUILTIN_CLI
);
6674 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void
, AVR_BUILTIN_WDR
);
6675 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void
, AVR_BUILTIN_SLEEP
);
6676 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar
, AVR_BUILTIN_SWAP
);
6677 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong
,
6678 AVR_BUILTIN_DELAY_CYCLES
);
6682 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6683 in libgcc. For fmul and fmuls this is straight forward with
6684 upcoming fixed point support. */
6686 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar
,
6688 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char
,
6690 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar
,
6691 AVR_BUILTIN_FMULSU
);
6697 struct avr_builtin_description
6699 const enum insn_code icode
;
6700 const char *const name
;
6701 const enum avr_builtin_id id
;
6704 static const struct avr_builtin_description
6707 { CODE_FOR_rotlqi3_4
, "__builtin_avr_swap", AVR_BUILTIN_SWAP
}
6710 static const struct avr_builtin_description
6713 { CODE_FOR_fmul
, "__builtin_avr_fmul", AVR_BUILTIN_FMUL
},
6714 { CODE_FOR_fmuls
, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS
},
6715 { CODE_FOR_fmulsu
, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU
}
6718 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6721 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
6725 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6726 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6727 enum machine_mode op0mode
= GET_MODE (op0
);
6728 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6729 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6732 || GET_MODE (target
) != tmode
6733 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6735 target
= gen_reg_rtx (tmode
);
6738 if (op0mode
== SImode
&& mode0
== HImode
)
6741 op0
= gen_lowpart (HImode
, op0
);
6744 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
6746 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6747 op0
= copy_to_mode_reg (mode0
, op0
);
6749 pat
= GEN_FCN (icode
) (target
, op0
);
6759 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6762 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
6765 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6766 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6767 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6768 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6769 enum machine_mode op0mode
= GET_MODE (op0
);
6770 enum machine_mode op1mode
= GET_MODE (op1
);
6771 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6772 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6773 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6776 || GET_MODE (target
) != tmode
6777 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6779 target
= gen_reg_rtx (tmode
);
6782 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
6785 op0
= gen_lowpart (HImode
, op0
);
6788 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
6791 op1
= gen_lowpart (HImode
, op1
);
6794 /* In case the insn wants input operands in modes different from
6795 the result, abort. */
6797 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
6798 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
6800 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6801 op0
= copy_to_mode_reg (mode0
, op0
);
6803 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6804 op1
= copy_to_mode_reg (mode1
, op1
);
6806 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6816 /* Expand an expression EXP that calls a built-in function,
6817 with result going to TARGET if that's convenient
6818 (and in mode MODE if that's convenient).
6819 SUBTARGET may be used as the target for computing one of EXP's operands.
6820 IGNORE is nonzero if the value is to be ignored. */
6823 avr_expand_builtin (tree exp
, rtx target
,
6824 rtx subtarget ATTRIBUTE_UNUSED
,
6825 enum machine_mode mode ATTRIBUTE_UNUSED
,
6826 int ignore ATTRIBUTE_UNUSED
)
6829 const struct avr_builtin_description
*d
;
6830 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6831 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
6837 case AVR_BUILTIN_NOP
:
6838 emit_insn (gen_nopv (GEN_INT(1)));
6841 case AVR_BUILTIN_SEI
:
6842 emit_insn (gen_enable_interrupt ());
6845 case AVR_BUILTIN_CLI
:
6846 emit_insn (gen_disable_interrupt ());
6849 case AVR_BUILTIN_WDR
:
6850 emit_insn (gen_wdr ());
6853 case AVR_BUILTIN_SLEEP
:
6854 emit_insn (gen_sleep ());
6857 case AVR_BUILTIN_DELAY_CYCLES
:
6859 arg0
= CALL_EXPR_ARG (exp
, 0);
6860 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6862 if (! CONST_INT_P (op0
))
6863 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6865 avr_expand_delay_cycles (op0
);
6870 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6872 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
6874 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6876 return avr_expand_binop_builtin (d
->icode
, exp
, target
);