1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree
);
52 static int interrupt_function_p (tree
);
53 static int signal_function_p (tree
);
54 static int avr_OS_task_function_p (tree
);
55 static int avr_regs_to_save (HARD_REG_SET
*);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code
);
59 static int avr_num_arg_regs (enum machine_mode
, tree
);
61 static RTX_CODE
compare_condition (rtx insn
);
62 static int compare_sign_p (rtx insn
);
63 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
64 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
65 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
66 const struct attribute_spec avr_attribute_table
[];
67 static bool avr_assemble_integer (rtx
, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree
, tree
*);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree
, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx
, int);
78 static void avr_asm_out_dtor (rtx
, int);
79 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
);
80 static bool avr_rtx_costs (rtx
, int, int, int *);
81 static int avr_address_cost (rtx
);
82 static bool avr_return_in_memory (const_tree
, const_tree
);
83 static struct machine_function
* avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx
;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx
;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames
[] = REGISTER_NAMES
;
96 /* This holds the last insn address. */
97 static int last_insn_address
= 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro
;
101 const char *avr_extra_arch_macro
;
103 /* Current architecture. */
104 const struct base_arch_s
*avr_current_arch
;
106 section
*progmem_section
;
108 /* More than 8K of program memory: use "call" and "jmp". */
111 /* Core have 'MUL*' instructions. */
112 int avr_have_mul_p
= 0;
114 /* Assembler only. */
115 int avr_asm_only_p
= 0;
117 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
118 int avr_have_movw_lpmx_p
= 0;
120 static const struct base_arch_s avr_arch_types
[] = {
121 { 1, 0, 0, 0, 0, 0, 0, 0, NULL
}, /* unknown device specified */
122 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
123 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
124 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
125 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
126 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
127 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
128 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
129 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
130 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
131 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
134 /* These names are used as the index into the avr_arch_types[] table
153 const char *const name
;
154 int arch
; /* index in avr_arch_types[] */
155 /* Must lie outside user's namespace. NULL == no macro. */
156 const char *const macro
;
159 /* List of all known AVR MCU types - if updated, it has to be kept
160 in sync in several places (FIXME: is there a better way?):
162 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
163 - t-avr (MULTILIB_MATCHES)
164 - gas/config/tc-avr.c
167 static const struct mcu_type_s avr_mcu_types
[] = {
168 /* Classic, <= 8K. */
169 { "avr2", ARCH_AVR2
, NULL
},
170 { "at90s2313", ARCH_AVR2
, "__AVR_AT90S2313__" },
171 { "at90s2323", ARCH_AVR2
, "__AVR_AT90S2323__" },
172 { "at90s2333", ARCH_AVR2
, "__AVR_AT90S2333__" },
173 { "at90s2343", ARCH_AVR2
, "__AVR_AT90S2343__" },
174 { "attiny22", ARCH_AVR2
, "__AVR_ATtiny22__" },
175 { "attiny26", ARCH_AVR2
, "__AVR_ATtiny26__" },
176 { "at90s4414", ARCH_AVR2
, "__AVR_AT90S4414__" },
177 { "at90s4433", ARCH_AVR2
, "__AVR_AT90S4433__" },
178 { "at90s4434", ARCH_AVR2
, "__AVR_AT90S4434__" },
179 { "at90s8515", ARCH_AVR2
, "__AVR_AT90S8515__" },
180 { "at90c8534", ARCH_AVR2
, "__AVR_AT90C8534__" },
181 { "at90s8535", ARCH_AVR2
, "__AVR_AT90S8535__" },
182 /* Classic + MOVW, <= 8K. */
183 { "avr25", ARCH_AVR25
, NULL
},
184 { "attiny13", ARCH_AVR25
, "__AVR_ATtiny13__" },
185 { "attiny2313", ARCH_AVR25
, "__AVR_ATtiny2313__" },
186 { "attiny24", ARCH_AVR25
, "__AVR_ATtiny24__" },
187 { "attiny44", ARCH_AVR25
, "__AVR_ATtiny44__" },
188 { "attiny84", ARCH_AVR25
, "__AVR_ATtiny84__" },
189 { "attiny25", ARCH_AVR25
, "__AVR_ATtiny25__" },
190 { "attiny45", ARCH_AVR25
, "__AVR_ATtiny45__" },
191 { "attiny85", ARCH_AVR25
, "__AVR_ATtiny85__" },
192 { "attiny261", ARCH_AVR25
, "__AVR_ATtiny261__" },
193 { "attiny461", ARCH_AVR25
, "__AVR_ATtiny461__" },
194 { "attiny861", ARCH_AVR25
, "__AVR_ATtiny861__" },
195 { "attiny43u", ARCH_AVR25
, "__AVR_ATtiny43U__" },
196 { "attiny48", ARCH_AVR25
, "__AVR_ATtiny48__" },
197 { "attiny88", ARCH_AVR25
, "__AVR_ATtiny88__" },
198 { "at86rf401", ARCH_AVR25
, "__AVR_AT86RF401__" },
199 /* Classic, > 8K, <= 64K. */
200 { "avr3", ARCH_AVR3
, NULL
},
201 { "at43usb320", ARCH_AVR3
, "__AVR_AT43USB320__" },
202 { "at43usb355", ARCH_AVR3
, "__AVR_AT43USB355__" },
203 { "at76c711", ARCH_AVR3
, "__AVR_AT76C711__" },
204 /* Classic, == 128K. */
205 { "avr31", ARCH_AVR31
, NULL
},
206 { "atmega103", ARCH_AVR3
, "__AVR_ATmega103__" },
207 /* Classic + MOVW + JMP/CALL. */
208 { "avr35", ARCH_AVR35
, NULL
},
209 { "at90usb82", ARCH_AVR35
, "__AVR_AT90USB82__" },
210 { "at90usb162", ARCH_AVR35
, "__AVR_AT90USB162__" },
211 /* Enhanced, <= 8K. */
212 { "avr4", ARCH_AVR4
, NULL
},
213 { "atmega8", ARCH_AVR4
, "__AVR_ATmega8__" },
214 { "atmega48", ARCH_AVR4
, "__AVR_ATmega48__" },
215 { "atmega48p", ARCH_AVR4
, "__AVR_ATmega48P__" },
216 { "atmega88", ARCH_AVR4
, "__AVR_ATmega88__" },
217 { "atmega88p", ARCH_AVR4
, "__AVR_ATmega88P__" },
218 { "atmega8515", ARCH_AVR4
, "__AVR_ATmega8515__" },
219 { "atmega8535", ARCH_AVR4
, "__AVR_ATmega8535__" },
220 { "atmega8hva", ARCH_AVR4
, "__AVR_ATmega8HVA__" },
221 { "at90pwm1", ARCH_AVR4
, "__AVR_AT90PWM1__" },
222 { "at90pwm2", ARCH_AVR4
, "__AVR_AT90PWM2__" },
223 { "at90pwm2b", ARCH_AVR4
, "__AVR_AT90PWM2B__" },
224 { "at90pwm3", ARCH_AVR4
, "__AVR_AT90PWM3__" },
225 { "at90pwm3b", ARCH_AVR4
, "__AVR_AT90PWM3B__" },
226 /* Enhanced, > 8K, <= 64K. */
227 { "avr5", ARCH_AVR5
, NULL
},
228 { "atmega16", ARCH_AVR5
, "__AVR_ATmega16__" },
229 { "atmega161", ARCH_AVR5
, "__AVR_ATmega161__" },
230 { "atmega162", ARCH_AVR5
, "__AVR_ATmega162__" },
231 { "atmega163", ARCH_AVR5
, "__AVR_ATmega163__" },
232 { "atmega164p", ARCH_AVR5
, "__AVR_ATmega164P__" },
233 { "atmega165", ARCH_AVR5
, "__AVR_ATmega165__" },
234 { "atmega165p", ARCH_AVR5
, "__AVR_ATmega165P__" },
235 { "atmega168", ARCH_AVR5
, "__AVR_ATmega168__" },
236 { "atmega168p", ARCH_AVR5
, "__AVR_ATmega168P__" },
237 { "atmega169", ARCH_AVR5
, "__AVR_ATmega169__" },
238 { "atmega169p", ARCH_AVR5
, "__AVR_ATmega169P__" },
239 { "atmega32", ARCH_AVR5
, "__AVR_ATmega32__" },
240 { "atmega323", ARCH_AVR5
, "__AVR_ATmega323__" },
241 { "atmega324p", ARCH_AVR5
, "__AVR_ATmega324P__" },
242 { "atmega325", ARCH_AVR5
, "__AVR_ATmega325__" },
243 { "atmega325p", ARCH_AVR5
, "__AVR_ATmega325P__" },
244 { "atmega3250", ARCH_AVR5
, "__AVR_ATmega3250__" },
245 { "atmega3250p", ARCH_AVR5
, "__AVR_ATmega3250P__" },
246 { "atmega328p", ARCH_AVR5
, "__AVR_ATmega328P__" },
247 { "atmega329", ARCH_AVR5
, "__AVR_ATmega329__" },
248 { "atmega329p", ARCH_AVR5
, "__AVR_ATmega329P__" },
249 { "atmega3290", ARCH_AVR5
, "__AVR_ATmega3290__" },
250 { "atmega3290p", ARCH_AVR5
, "__AVR_ATmega3290P__" },
251 { "atmega32hvb", ARCH_AVR5
, "__AVR_ATmega32HVB__" },
252 { "atmega406", ARCH_AVR5
, "__AVR_ATmega406__" },
253 { "atmega64", ARCH_AVR5
, "__AVR_ATmega64__" },
254 { "atmega640", ARCH_AVR5
, "__AVR_ATmega640__" },
255 { "atmega644", ARCH_AVR5
, "__AVR_ATmega644__" },
256 { "atmega644p", ARCH_AVR5
, "__AVR_ATmega644P__" },
257 { "atmega645", ARCH_AVR5
, "__AVR_ATmega645__" },
258 { "atmega6450", ARCH_AVR5
, "__AVR_ATmega6450__" },
259 { "atmega649", ARCH_AVR5
, "__AVR_ATmega649__" },
260 { "atmega6490", ARCH_AVR5
, "__AVR_ATmega6490__" },
261 { "atmega16hva", ARCH_AVR5
, "__AVR_ATmega16HVA__" },
262 { "at90can32", ARCH_AVR5
, "__AVR_AT90CAN32__" },
263 { "at90can64", ARCH_AVR5
, "__AVR_AT90CAN64__" },
264 { "at90pwm216", ARCH_AVR5
, "__AVR_AT90PWM216__" },
265 { "at90pwm316", ARCH_AVR5
, "__AVR_AT90PWM316__" },
266 { "at90usb646", ARCH_AVR5
, "__AVR_AT90USB646__" },
267 { "at90usb647", ARCH_AVR5
, "__AVR_AT90USB647__" },
268 { "at94k", ARCH_AVR5
, "__AVR_AT94K__" },
269 /* Enhanced, == 128K. */
270 { "avr51", ARCH_AVR51
, NULL
},
271 { "atmega128", ARCH_AVR51
, "__AVR_ATmega128__" },
272 { "atmega1280", ARCH_AVR51
, "__AVR_ATmega1280__" },
273 { "atmega1281", ARCH_AVR51
, "__AVR_ATmega1281__" },
274 { "atmega1284p", ARCH_AVR51
, "__AVR_ATmega1284P__" },
275 { "at90can128", ARCH_AVR51
, "__AVR_AT90CAN128__" },
276 { "at90usb1286", ARCH_AVR51
, "__AVR_AT90USB1286__" },
277 { "at90usb1287", ARCH_AVR51
, "__AVR_AT90USB1287__" },
279 { "avr6", ARCH_AVR6
, NULL
},
280 { "atmega2560", ARCH_AVR6
, "__AVR_ATmega2560__" },
281 { "atmega2561", ARCH_AVR6
, "__AVR_ATmega2561__" },
282 /* Assembler only. */
283 { "avr1", ARCH_AVR1
, NULL
},
284 { "at90s1200", ARCH_AVR1
, "__AVR_AT90S1200__" },
285 { "attiny11", ARCH_AVR1
, "__AVR_ATtiny11__" },
286 { "attiny12", ARCH_AVR1
, "__AVR_ATtiny12__" },
287 { "attiny15", ARCH_AVR1
, "__AVR_ATtiny15__" },
288 { "attiny28", ARCH_AVR1
, "__AVR_ATtiny28__" },
289 { NULL
, ARCH_UNKNOWN
, NULL
}
292 int avr_case_values_threshold
= 30000;
294 /* Initialize the GCC target structure. */
295 #undef TARGET_ASM_ALIGNED_HI_OP
296 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
297 #undef TARGET_ASM_ALIGNED_SI_OP
298 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
299 #undef TARGET_ASM_UNALIGNED_HI_OP
300 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
301 #undef TARGET_ASM_UNALIGNED_SI_OP
302 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
303 #undef TARGET_ASM_INTEGER
304 #define TARGET_ASM_INTEGER avr_assemble_integer
305 #undef TARGET_ASM_FILE_START
306 #define TARGET_ASM_FILE_START avr_file_start
307 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
308 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
309 #undef TARGET_ASM_FILE_END
310 #define TARGET_ASM_FILE_END avr_file_end
312 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
313 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
314 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
315 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
318 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
319 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
320 #undef TARGET_INSERT_ATTRIBUTES
321 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
322 #undef TARGET_SECTION_TYPE_FLAGS
323 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
324 #undef TARGET_RTX_COSTS
325 #define TARGET_RTX_COSTS avr_rtx_costs
326 #undef TARGET_ADDRESS_COST
327 #define TARGET_ADDRESS_COST avr_address_cost
328 #undef TARGET_MACHINE_DEPENDENT_REORG
329 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
331 #undef TARGET_RETURN_IN_MEMORY
332 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
334 #undef TARGET_STRICT_ARGUMENT_NAMING
335 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
337 struct gcc_target targetm
= TARGET_INITIALIZER
;
340 avr_override_options (void)
342 const struct mcu_type_s
*t
;
343 const struct base_arch_s
*base
;
345 flag_delete_null_pointer_checks
= 0;
347 for (t
= avr_mcu_types
; t
->name
; t
++)
348 if (strcmp (t
->name
, avr_mcu_name
) == 0)
353 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
355 for (t
= avr_mcu_types
; t
->name
; t
++)
356 fprintf (stderr
," %s\n", t
->name
);
359 avr_current_arch
= &avr_arch_types
[t
->arch
];
360 base
= &avr_arch_types
[t
->arch
];
361 avr_asm_only_p
= base
->asm_only
;
362 avr_have_mul_p
= base
->have_mul
;
363 avr_mega_p
= base
->have_jmp_call
;
364 avr_have_movw_lpmx_p
= base
->have_movw_lpmx
;
365 avr_base_arch_macro
= base
->macro
;
366 avr_extra_arch_macro
= t
->macro
;
368 if (optimize
&& !TARGET_NO_TABLEJUMP
)
369 avr_case_values_threshold
= (!AVR_MEGA
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
371 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
372 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
374 init_machine_status
= avr_init_machine_status
;
377 /* return register class from register number. */
379 static const int reg_class_tab
[]={
380 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
381 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
382 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
383 GENERAL_REGS
, /* r0 - r15 */
384 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
385 LD_REGS
, /* r16 - 23 */
386 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
387 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
388 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
389 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
390 STACK_REG
,STACK_REG
/* SPL,SPH */
393 /* Function to set up the backend function structure. */
395 static struct machine_function
*
396 avr_init_machine_status (void)
398 return ((struct machine_function
*)
399 ggc_alloc_cleared (sizeof (struct machine_function
)));
402 /* Return register class for register R. */
405 avr_regno_reg_class (int r
)
408 return reg_class_tab
[r
];
412 /* Return nonzero if FUNC is a naked function. */
415 avr_naked_function_p (tree func
)
419 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
421 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
422 return a
!= NULL_TREE
;
425 /* Return nonzero if FUNC is an interrupt function as specified
426 by the "interrupt" attribute. */
429 interrupt_function_p (tree func
)
433 if (TREE_CODE (func
) != FUNCTION_DECL
)
436 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
437 return a
!= NULL_TREE
;
440 /* Return nonzero if FUNC is a signal function as specified
441 by the "signal" attribute. */
444 signal_function_p (tree func
)
448 if (TREE_CODE (func
) != FUNCTION_DECL
)
451 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
452 return a
!= NULL_TREE
;
455 /* Return nonzero if FUNC is a OS_task function. */
458 avr_OS_task_function_p (tree func
)
462 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
464 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
465 return a
!= NULL_TREE
;
468 /* Return the number of hard registers to push/pop in the prologue/epilogue
469 of the current function, and optionally store these registers in SET. */
472 avr_regs_to_save (HARD_REG_SET
*set
)
475 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
476 || signal_function_p (current_function_decl
));
477 int leaf_func_p
= leaf_function_p ();
480 CLEAR_HARD_REG_SET (*set
);
483 /* No need to save any registers if the function never returns or
484 is have "OS_task" attribute. */
485 if (TREE_THIS_VOLATILE (current_function_decl
)
486 || cfun
->machine
->is_OS_task
)
489 for (reg
= 0; reg
< 32; reg
++)
491 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
492 any global register variables. */
496 if ((int_or_sig_p
&& !leaf_func_p
&& call_used_regs
[reg
])
497 || (df_regs_ever_live_p (reg
)
498 && (int_or_sig_p
|| !call_used_regs
[reg
])
499 && !(frame_pointer_needed
500 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
503 SET_HARD_REG_BIT (*set
, reg
);
510 /* Compute offset between arg_pointer and frame_pointer. */
513 initial_elimination_offset (int from
, int to
)
515 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
519 int offset
= frame_pointer_needed
? 2 : 0;
520 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
522 offset
+= avr_regs_to_save (NULL
);
523 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
527 /* Return 1 if the function epilogue is just a single "ret". */
530 avr_simple_epilogue (void)
532 return (! frame_pointer_needed
533 && get_frame_size () == 0
534 && avr_regs_to_save (NULL
) == 0
535 && ! interrupt_function_p (current_function_decl
)
536 && ! signal_function_p (current_function_decl
)
537 && ! avr_naked_function_p (current_function_decl
)
538 && ! TREE_THIS_VOLATILE (current_function_decl
));
541 /* This function checks sequence of live registers. */
544 sequent_regs_live (void)
550 for (reg
= 0; reg
< 18; ++reg
)
552 if (!call_used_regs
[reg
])
554 if (df_regs_ever_live_p (reg
))
564 if (!frame_pointer_needed
)
566 if (df_regs_ever_live_p (REG_Y
))
574 if (df_regs_ever_live_p (REG_Y
+1))
587 return (cur_seq
== live_seq
) ? live_seq
: 0;
590 /* Output function prologue. */
593 expand_prologue (void)
598 HOST_WIDE_INT size
= get_frame_size();
599 /* Define templates for push instructions. */
600 rtx pushbyte
= gen_rtx_MEM (QImode
,
601 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
602 rtx pushword
= gen_rtx_MEM (HImode
,
603 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
606 last_insn_address
= 0;
608 /* Init cfun->machine. */
609 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
610 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
611 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
612 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
614 /* Prologue: naked. */
615 if (cfun
->machine
->is_naked
)
620 avr_regs_to_save (&set
);
621 live_seq
= sequent_regs_live ();
622 minimize
= (TARGET_CALL_PROLOGUES
623 && !cfun
->machine
->is_interrupt
624 && !cfun
->machine
->is_signal
625 && !cfun
->machine
->is_OS_task
628 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
630 if (cfun
->machine
->is_interrupt
)
632 /* Enable interrupts. */
633 insn
= emit_insn (gen_enable_interrupt ());
634 RTX_FRAME_RELATED_P (insn
) = 1;
638 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
639 RTX_FRAME_RELATED_P (insn
) = 1;
642 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
643 RTX_FRAME_RELATED_P (insn
) = 1;
646 insn
= emit_move_insn (tmp_reg_rtx
,
647 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
648 RTX_FRAME_RELATED_P (insn
) = 1;
649 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
650 RTX_FRAME_RELATED_P (insn
) = 1;
654 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
656 insn
= emit_move_insn (tmp_reg_rtx
,
657 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
658 RTX_FRAME_RELATED_P (insn
) = 1;
659 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
660 RTX_FRAME_RELATED_P (insn
) = 1;
663 /* Clear zero reg. */
664 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
665 RTX_FRAME_RELATED_P (insn
) = 1;
667 /* Prevent any attempt to delete the setting of ZERO_REG! */
668 emit_insn (gen_rtx_USE (VOIDmode
, zero_reg_rtx
));
670 if (minimize
&& (frame_pointer_needed
|| live_seq
> 6))
672 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
673 gen_int_mode (size
, HImode
));
674 RTX_FRAME_RELATED_P (insn
) = 1;
677 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
678 gen_int_mode (size
+ live_seq
, HImode
)));
679 RTX_FRAME_RELATED_P (insn
) = 1;
684 for (reg
= 0; reg
< 32; ++reg
)
686 if (TEST_HARD_REG_BIT (set
, reg
))
688 /* Emit push of register to save. */
689 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
690 RTX_FRAME_RELATED_P (insn
) = 1;
693 if (frame_pointer_needed
)
695 if(!cfun
->machine
->is_OS_task
)
697 /* Push frame pointer. */
698 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
699 RTX_FRAME_RELATED_P (insn
) = 1;
704 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
705 RTX_FRAME_RELATED_P (insn
) = 1;
709 /* Creating a frame can be done by direct manipulation of the
710 stack or via the frame pointer. These two methods are:
717 the optimum method depends on function type, stack and frame size.
718 To avoid a complex logic, both methods are tested and shortest
722 if (TARGET_TINY_STACK
)
724 if (size
< -63 || size
> 63)
725 warning (0, "large frame pointer change (%d) with -mtiny-stack", size
);
727 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
728 over 'sbiw' (2 cycles, same size). */
729 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
733 /* Normal sized addition. */
734 myfp
= frame_pointer_rtx
;
736 /* Calculate length. */
739 get_attr_length (gen_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
741 get_attr_length (gen_move_insn (myfp
,
742 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
746 get_attr_length (gen_move_insn (stack_pointer_rtx
, frame_pointer_rtx
));
748 /* Method 2-Adjust Stack pointer. */
749 int sp_plus_length
= 0;
753 get_attr_length (gen_move_insn (stack_pointer_rtx
,
754 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
758 get_attr_length (gen_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
760 /* Use shortest method. */
761 if (size
<= 6 && (sp_plus_length
< method1_length
))
763 insn
= emit_move_insn (stack_pointer_rtx
,
764 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
765 gen_int_mode (-size
, HImode
)));
766 RTX_FRAME_RELATED_P (insn
) = 1;
767 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
768 RTX_FRAME_RELATED_P (insn
) = 1;
772 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
773 RTX_FRAME_RELATED_P (insn
) = 1;
774 insn
= emit_move_insn (myfp
,
775 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
776 gen_int_mode (-size
, GET_MODE(myfp
))));
777 RTX_FRAME_RELATED_P (insn
) = 1;
778 insn
= emit_move_insn ( stack_pointer_rtx
, frame_pointer_rtx
);
779 RTX_FRAME_RELATED_P (insn
) = 1;
786 /* Output summary at end of function prologue. */
789 avr_asm_function_end_prologue (FILE *file
)
791 if (cfun
->machine
->is_naked
)
793 fputs ("/* prologue: naked */\n", file
);
797 if (cfun
->machine
->is_interrupt
)
799 fputs ("/* prologue: Interrupt */\n", file
);
801 else if (cfun
->machine
->is_signal
)
803 fputs ("/* prologue: Signal */\n", file
);
806 fputs ("/* prologue: function */\n", file
);
808 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
813 /* Implement EPILOGUE_USES. */
816 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
820 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
825 /* Output RTL epilogue. */
828 expand_epilogue (void)
834 HOST_WIDE_INT size
= get_frame_size();
836 /* epilogue: naked */
837 if (cfun
->machine
->is_naked
)
839 emit_jump_insn (gen_return ());
843 avr_regs_to_save (&set
);
844 live_seq
= sequent_regs_live ();
845 minimize
= (TARGET_CALL_PROLOGUES
846 && !cfun
->machine
->is_interrupt
847 && !cfun
->machine
->is_signal
848 && !cfun
->machine
->is_OS_task
851 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
853 if (frame_pointer_needed
)
855 /* Get rid of frame. */
856 emit_move_insn(frame_pointer_rtx
,
857 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
858 gen_int_mode (size
, HImode
)));
862 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
865 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
869 if (frame_pointer_needed
)
873 /* Try two methods to adjust stack and select shortest. */
875 /* Method 1-Adjust frame pointer. */
877 get_attr_length (gen_move_insn (frame_pointer_rtx
,
878 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
881 /* Copy to stack pointer. */
883 get_attr_length (gen_move_insn (stack_pointer_rtx
, frame_pointer_rtx
));
885 /* Method 2-Adjust Stack pointer. */
886 int sp_plus_length
= 0;
890 get_attr_length (gen_move_insn (stack_pointer_rtx
,
891 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
895 /* Use shortest method. */
896 if (size
<= 5 && (sp_plus_length
< fp_plus_length
))
898 emit_move_insn (stack_pointer_rtx
,
899 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
900 gen_int_mode (size
, HImode
)));
904 emit_move_insn (frame_pointer_rtx
,
905 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
906 gen_int_mode (size
, HImode
)));
907 /* Copy to stack pointer. */
908 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
911 if(!cfun
->machine
->is_OS_task
)
913 /* Restore previous frame_pointer. */
914 emit_insn (gen_pophi (frame_pointer_rtx
));
917 /* Restore used registers. */
918 for (reg
= 31; reg
>= 0; --reg
)
920 if (TEST_HARD_REG_BIT (set
, reg
))
921 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
923 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
925 /* Restore RAMPZ using tmp reg as scratch. */
927 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
929 emit_insn (gen_popqi (tmp_reg_rtx
));
930 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
934 /* Restore SREG using tmp reg as scratch. */
935 emit_insn (gen_popqi (tmp_reg_rtx
));
937 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
940 /* Restore tmp REG. */
941 emit_insn (gen_popqi (tmp_reg_rtx
));
943 /* Restore zero REG. */
944 emit_insn (gen_popqi (zero_reg_rtx
));
947 emit_jump_insn (gen_return ());
951 /* Output summary messages at beginning of function epilogue. */
954 avr_asm_function_begin_epilogue (FILE *file
)
956 fprintf (file
, "/* epilogue start */\n");
959 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
960 machine for a memory operand of mode MODE. */
963 legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
965 enum reg_class r
= NO_REGS
;
967 if (TARGET_ALL_DEBUG
)
969 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
971 strict
? "(strict)": "",
972 reload_completed
? "(reload_completed)": "",
973 reload_in_progress
? "(reload_in_progress)": "",
974 reg_renumber
? "(reg_renumber)" : "");
975 if (GET_CODE (x
) == PLUS
976 && REG_P (XEXP (x
, 0))
977 && GET_CODE (XEXP (x
, 1)) == CONST_INT
978 && INTVAL (XEXP (x
, 1)) >= 0
979 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
982 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
983 true_regnum (XEXP (x
, 0)));
986 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
987 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
989 else if (CONSTANT_ADDRESS_P (x
))
991 else if (GET_CODE (x
) == PLUS
992 && REG_P (XEXP (x
, 0))
993 && GET_CODE (XEXP (x
, 1)) == CONST_INT
994 && INTVAL (XEXP (x
, 1)) >= 0)
996 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1000 || REGNO (XEXP (x
,0)) == REG_Y
1001 || REGNO (XEXP (x
,0)) == REG_Z
)
1002 r
= BASE_POINTER_REGS
;
1003 if (XEXP (x
,0) == frame_pointer_rtx
1004 || XEXP (x
,0) == arg_pointer_rtx
)
1005 r
= BASE_POINTER_REGS
;
1007 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1010 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1011 && REG_P (XEXP (x
, 0))
1012 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1013 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1017 if (TARGET_ALL_DEBUG
)
1019 fprintf (stderr
, " ret = %c\n", r
+ '0');
1021 return r
== NO_REGS
? 0 : (int)r
;
1024 /* Attempts to replace X with a valid
1025 memory address for an operand of mode MODE */
1028 legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1031 if (TARGET_ALL_DEBUG
)
1033 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1037 if (GET_CODE (oldx
) == PLUS
1038 && REG_P (XEXP (oldx
,0)))
1040 if (REG_P (XEXP (oldx
,1)))
1041 x
= force_reg (GET_MODE (oldx
), oldx
);
1042 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1044 int offs
= INTVAL (XEXP (oldx
,1));
1045 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1046 if (offs
> MAX_LD_OFFSET (mode
))
1048 if (TARGET_ALL_DEBUG
)
1049 fprintf (stderr
, "force_reg (big offset)\n");
1050 x
= force_reg (GET_MODE (oldx
), oldx
);
1058 /* Return a pointer register name as a string. */
1061 ptrreg_to_str (int regno
)
1065 case REG_X
: return "X";
1066 case REG_Y
: return "Y";
1067 case REG_Z
: return "Z";
1069 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1074 /* Return the condition name as a string.
1075 Used in conditional jump constructing */
1078 cond_string (enum rtx_code code
)
1087 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1092 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1105 /* Output ADDR to FILE as address. */
1108 print_operand_address (FILE *file
, rtx addr
)
1110 switch (GET_CODE (addr
))
1113 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1117 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1121 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1125 if (CONSTANT_ADDRESS_P (addr
)
1126 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1127 || GET_CODE (addr
) == LABEL_REF
))
1129 fprintf (file
, "gs(");
1130 output_addr_const (file
,addr
);
1131 fprintf (file
,")");
1134 output_addr_const (file
, addr
);
1139 /* Output X as assembler operand to file FILE. */
1142 print_operand (FILE *file
, rtx x
, int code
)
1146 if (code
>= 'A' && code
<= 'D')
1154 else if (code
== '!')
1156 if (AVR_HAVE_EIJMP_EICALL
)
1161 if (x
== zero_reg_rtx
)
1162 fprintf (file
, "__zero_reg__");
1164 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1166 else if (GET_CODE (x
) == CONST_INT
)
1167 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1168 else if (GET_CODE (x
) == MEM
)
1170 rtx addr
= XEXP (x
,0);
1172 if (CONSTANT_P (addr
) && abcd
)
1175 output_address (addr
);
1176 fprintf (file
, ")+%d", abcd
);
1178 else if (code
== 'o')
1180 if (GET_CODE (addr
) != PLUS
)
1181 fatal_insn ("bad address, not (reg+disp):", addr
);
1183 print_operand (file
, XEXP (addr
, 1), 0);
1185 else if (code
== 'p' || code
== 'r')
1187 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1188 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1191 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1193 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1195 else if (GET_CODE (addr
) == PLUS
)
1197 print_operand_address (file
, XEXP (addr
,0));
1198 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1199 fatal_insn ("internal compiler error. Bad address:"
1202 print_operand (file
, XEXP (addr
,1), code
);
1205 print_operand_address (file
, addr
);
1207 else if (GET_CODE (x
) == CONST_DOUBLE
)
1211 if (GET_MODE (x
) != SFmode
)
1212 fatal_insn ("internal compiler error. Unknown mode:", x
);
1213 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1214 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1215 fprintf (file
, "0x%lx", val
);
1217 else if (code
== 'j')
1218 fputs (cond_string (GET_CODE (x
)), file
);
1219 else if (code
== 'k')
1220 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1222 print_operand_address (file
, x
);
1225 /* Update the condition code in the INSN. */
1228 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1232 switch (get_attr_cc (insn
))
1235 /* Insn does not affect CC at all. */
1243 set
= single_set (insn
);
1247 cc_status
.flags
|= CC_NO_OVERFLOW
;
1248 cc_status
.value1
= SET_DEST (set
);
1253 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1254 The V flag may or may not be known but that's ok because
1255 alter_cond will change tests to use EQ/NE. */
1256 set
= single_set (insn
);
1260 cc_status
.value1
= SET_DEST (set
);
1261 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1266 set
= single_set (insn
);
1269 cc_status
.value1
= SET_SRC (set
);
1273 /* Insn doesn't leave CC in a usable state. */
1276 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1277 set
= single_set (insn
);
1280 rtx src
= SET_SRC (set
);
1282 if (GET_CODE (src
) == ASHIFTRT
1283 && GET_MODE (src
) == QImode
)
1285 rtx x
= XEXP (src
, 1);
1287 if (GET_CODE (x
) == CONST_INT
1291 cc_status
.value1
= SET_DEST (set
);
1292 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1300 /* Return maximum number of consecutive registers of
1301 class CLASS needed to hold a value of mode MODE. */
1304 class_max_nregs (enum reg_class
class ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1306 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1309 /* Choose mode for jump insn:
1310 1 - relative jump in range -63 <= x <= 62 ;
1311 2 - relative jump in range -2046 <= x <= 2045 ;
1312 3 - absolute jump (only for ATmega[16]03). */
1315 avr_jump_mode (rtx x
, rtx insn
)
1317 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_MODE (x
) == LABEL_REF
1318 ? XEXP (x
, 0) : x
));
1319 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1320 int jump_distance
= cur_addr
- dest_addr
;
1322 if (-63 <= jump_distance
&& jump_distance
<= 62)
1324 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1332 /* return an AVR condition jump commands.
1333 X is a comparison RTX.
1334 LEN is a number returned by avr_jump_mode function.
1335 if REVERSE nonzero then condition code in X must be reversed. */
1338 ret_cond_branch (rtx x
, int len
, int reverse
)
1340 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1345 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1346 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1348 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1349 AS1 (brmi
,.+2) CR_TAB
1351 (AS1 (breq
,.+6) CR_TAB
1352 AS1 (brmi
,.+4) CR_TAB
1356 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1358 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1359 AS1 (brlt
,.+2) CR_TAB
1361 (AS1 (breq
,.+6) CR_TAB
1362 AS1 (brlt
,.+4) CR_TAB
1365 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1367 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1368 AS1 (brlo
,.+2) CR_TAB
1370 (AS1 (breq
,.+6) CR_TAB
1371 AS1 (brlo
,.+4) CR_TAB
1374 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1375 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1377 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1378 AS1 (brpl
,.+2) CR_TAB
1380 (AS1 (breq
,.+2) CR_TAB
1381 AS1 (brpl
,.+4) CR_TAB
1384 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1386 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1387 AS1 (brge
,.+2) CR_TAB
1389 (AS1 (breq
,.+2) CR_TAB
1390 AS1 (brge
,.+4) CR_TAB
1393 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1395 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1396 AS1 (brsh
,.+2) CR_TAB
1398 (AS1 (breq
,.+2) CR_TAB
1399 AS1 (brsh
,.+4) CR_TAB
1407 return AS1 (br
%k1
,%0);
1409 return (AS1 (br
%j1
,.+2) CR_TAB
1412 return (AS1 (br
%j1
,.+4) CR_TAB
1421 return AS1 (br
%j1
,%0);
1423 return (AS1 (br
%k1
,.+2) CR_TAB
1426 return (AS1 (br
%k1
,.+4) CR_TAB
1434 /* Predicate function for immediate operand which fits to byte (8bit) */
1437 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1439 return (GET_CODE (op
) == CONST_INT
1440 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1443 /* Output all insn addresses and their sizes into the assembly language
1444 output file. This is helpful for debugging whether the length attributes
1445 in the md file are correct.
1446 Output insn cost for next insn. */
1449 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1450 int num_operands ATTRIBUTE_UNUSED
)
1452 int uid
= INSN_UID (insn
);
1454 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1456 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1457 INSN_ADDRESSES (uid
),
1458 INSN_ADDRESSES (uid
) - last_insn_address
,
1459 rtx_cost (PATTERN (insn
), INSN
));
1461 last_insn_address
= INSN_ADDRESSES (uid
);
1464 /* Return 0 if undefined, 1 if always true or always false. */
1467 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE
operator, rtx x
)
1469 unsigned int max
= (mode
== QImode
? 0xff :
1470 mode
== HImode
? 0xffff :
1471 mode
== SImode
? 0xffffffff : 0);
1472 if (max
&& operator && GET_CODE (x
) == CONST_INT
)
1474 if (unsigned_condition (operator) != operator)
1477 if (max
!= (INTVAL (x
) & max
)
1478 && INTVAL (x
) != 0xff)
1485 /* Returns nonzero if REGNO is the number of a hard
1486 register in which function arguments are sometimes passed. */
1489 function_arg_regno_p(int r
)
1491 return (r
>= 8 && r
<= 25);
1494 /* Initializing the variable cum for the state at the beginning
1495 of the argument list. */
1498 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1499 tree fndecl ATTRIBUTE_UNUSED
)
1502 cum
->regno
= FIRST_CUM_REG
;
1503 if (!libname
&& fntype
)
1505 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1506 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1507 != void_type_node
));
1513 /* Returns the number of registers to allocate for a function argument. */
1516 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1520 if (mode
== BLKmode
)
1521 size
= int_size_in_bytes (type
);
1523 size
= GET_MODE_SIZE (mode
);
1525 /* Align all function arguments to start in even-numbered registers.
1526 Odd-sized arguments leave holes above them. */
1528 return (size
+ 1) & ~1;
1531 /* Controls whether a function argument is passed
1532 in a register, and which register. */
1535 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1536 int named ATTRIBUTE_UNUSED
)
1538 int bytes
= avr_num_arg_regs (mode
, type
);
1540 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1541 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1546 /* Update the summarizer variable CUM to advance past an argument
1547 in the argument list. */
1550 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1551 int named ATTRIBUTE_UNUSED
)
1553 int bytes
= avr_num_arg_regs (mode
, type
);
1555 cum
->nregs
-= bytes
;
1556 cum
->regno
-= bytes
;
1558 if (cum
->nregs
<= 0)
1561 cum
->regno
= FIRST_CUM_REG
;
1565 /***********************************************************************
1566 Functions for outputting various mov's for a various modes
1567 ************************************************************************/
1569 output_movqi (rtx insn
, rtx operands
[], int *l
)
1572 rtx dest
= operands
[0];
1573 rtx src
= operands
[1];
1581 if (register_operand (dest
, QImode
))
1583 if (register_operand (src
, QImode
)) /* mov r,r */
1585 if (test_hard_reg_class (STACK_REG
, dest
))
1586 return AS2 (out
,%0,%1);
1587 else if (test_hard_reg_class (STACK_REG
, src
))
1588 return AS2 (in
,%0,%1);
1590 return AS2 (mov
,%0,%1);
1592 else if (CONSTANT_P (src
))
1594 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1595 return AS2 (ldi
,%0,lo8(%1));
1597 if (GET_CODE (src
) == CONST_INT
)
1599 if (src
== const0_rtx
) /* mov r,L */
1600 return AS1 (clr
,%0);
1601 else if (src
== const1_rtx
)
1604 return (AS1 (clr
,%0) CR_TAB
1607 else if (src
== constm1_rtx
)
1609 /* Immediate constants -1 to any register */
1611 return (AS1 (clr
,%0) CR_TAB
1616 int bit_nr
= exact_log2 (INTVAL (src
));
1622 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1625 avr_output_bld (operands
, bit_nr
);
1632 /* Last resort, larger than loading from memory. */
1634 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1635 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1636 AS2 (mov
,%0,r31
) CR_TAB
1637 AS2 (mov
,r31
,__tmp_reg__
));
1639 else if (GET_CODE (src
) == MEM
)
1640 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1642 else if (GET_CODE (dest
) == MEM
)
1644 const char *template;
1646 if (src
== const0_rtx
)
1647 operands
[1] = zero_reg_rtx
;
1649 template = out_movqi_mr_r (insn
, operands
, real_l
);
1652 output_asm_insn (template, operands
);
1661 output_movhi (rtx insn
, rtx operands
[], int *l
)
1664 rtx dest
= operands
[0];
1665 rtx src
= operands
[1];
1671 if (register_operand (dest
, HImode
))
1673 if (register_operand (src
, HImode
)) /* mov r,r */
1675 if (test_hard_reg_class (STACK_REG
, dest
))
1677 if (TARGET_TINY_STACK
)
1680 return AS2 (out
,__SP_L__
,%A1
);
1682 /* Use simple load of stack pointer if no interrupts are used
1683 or inside main or signal function prologue where they disabled. */
1684 else if (TARGET_NO_INTERRUPTS
1685 || (reload_completed
1686 && cfun
->machine
->is_signal
1687 && prologue_epilogue_contains (insn
)))
1690 return (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1691 AS2 (out
,__SP_L__
,%A1
));
1693 /* In interrupt prolog we know interrupts are enabled. */
1694 else if (reload_completed
1695 && cfun
->machine
->is_interrupt
1696 && prologue_epilogue_contains (insn
))
1699 return ("cli" CR_TAB
1700 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1702 AS2 (out
,__SP_L__
,%A1
));
1705 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1707 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1708 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1709 AS2 (out
,__SP_L__
,%A1
));
1711 else if (test_hard_reg_class (STACK_REG
, src
))
1714 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1715 AS2 (in
,%B0
,__SP_H__
));
1721 return (AS2 (movw
,%0,%1));
1726 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1730 else if (CONSTANT_P (src
))
1732 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1735 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1736 AS2 (ldi
,%B0
,hi8(%1)));
1739 if (GET_CODE (src
) == CONST_INT
)
1741 if (src
== const0_rtx
) /* mov r,L */
1744 return (AS1 (clr
,%A0
) CR_TAB
1747 else if (src
== const1_rtx
)
1750 return (AS1 (clr
,%A0
) CR_TAB
1751 AS1 (clr
,%B0
) CR_TAB
1754 else if (src
== constm1_rtx
)
1756 /* Immediate constants -1 to any register */
1758 return (AS1 (clr
,%0) CR_TAB
1759 AS1 (dec
,%A0
) CR_TAB
1764 int bit_nr
= exact_log2 (INTVAL (src
));
1770 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1771 AS1 (clr
,%B0
) CR_TAB
1774 avr_output_bld (operands
, bit_nr
);
1780 if ((INTVAL (src
) & 0xff) == 0)
1783 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1784 AS1 (clr
,%A0
) CR_TAB
1785 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1786 AS2 (mov
,%B0
,r31
) CR_TAB
1787 AS2 (mov
,r31
,__tmp_reg__
));
1789 else if ((INTVAL (src
) & 0xff00) == 0)
1792 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1793 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1794 AS2 (mov
,%A0
,r31
) CR_TAB
1795 AS1 (clr
,%B0
) CR_TAB
1796 AS2 (mov
,r31
,__tmp_reg__
));
1800 /* Last resort, equal to loading from memory. */
1802 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1803 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1804 AS2 (mov
,%A0
,r31
) CR_TAB
1805 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1806 AS2 (mov
,%B0
,r31
) CR_TAB
1807 AS2 (mov
,r31
,__tmp_reg__
));
1809 else if (GET_CODE (src
) == MEM
)
1810 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1812 else if (GET_CODE (dest
) == MEM
)
1814 const char *template;
1816 if (src
== const0_rtx
)
1817 operands
[1] = zero_reg_rtx
;
1819 template = out_movhi_mr_r (insn
, operands
, real_l
);
1822 output_asm_insn (template, operands
);
1827 fatal_insn ("invalid insn:", insn
);
1832 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1836 rtx x
= XEXP (src
, 0);
1842 if (CONSTANT_ADDRESS_P (x
))
1844 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1847 return AS2 (in
,%0,__SREG__
);
1849 if (avr_io_address_p (x
, 1))
1852 return AS2 (in
,%0,%1-0x20);
1855 return AS2 (lds
,%0,%1);
1857 /* memory access by reg+disp */
1858 else if (GET_CODE (x
) == PLUS
1859 && REG_P (XEXP (x
,0))
1860 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1862 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1864 int disp
= INTVAL (XEXP (x
,1));
1865 if (REGNO (XEXP (x
,0)) != REG_Y
)
1866 fatal_insn ("incorrect insn:",insn
);
1868 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1869 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1870 AS2 (ldd
,%0,Y
+63) CR_TAB
1871 AS2 (sbiw
,r28
,%o1
-63));
1873 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1874 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1875 AS2 (ld
,%0,Y
) CR_TAB
1876 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1877 AS2 (sbci
,r29
,hi8(%o1
)));
1879 else if (REGNO (XEXP (x
,0)) == REG_X
)
1881 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1882 it but I have this situation with extremal optimizing options. */
1883 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1884 || reg_unused_after (insn
, XEXP (x
,0)))
1885 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1888 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1889 AS2 (ld
,%0,X
) CR_TAB
1890 AS2 (sbiw
,r26
,%o1
));
1893 return AS2 (ldd
,%0,%1);
1896 return AS2 (ld
,%0,%1);
1900 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
1904 rtx base
= XEXP (src
, 0);
1905 int reg_dest
= true_regnum (dest
);
1906 int reg_base
= true_regnum (base
);
1907 /* "volatile" forces reading low byte first, even if less efficient,
1908 for correct operation with 16-bit I/O registers. */
1909 int mem_volatile_p
= MEM_VOLATILE_P (src
);
1917 if (reg_dest
== reg_base
) /* R = (R) */
1920 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
1921 AS2 (ld
,%B0
,%1) CR_TAB
1922 AS2 (mov
,%A0
,__tmp_reg__
));
1924 else if (reg_base
== REG_X
) /* (R26) */
1926 if (reg_unused_after (insn
, base
))
1929 return (AS2 (ld
,%A0
,X
+) CR_TAB
1933 return (AS2 (ld
,%A0
,X
+) CR_TAB
1934 AS2 (ld
,%B0
,X
) CR_TAB
1940 return (AS2 (ld
,%A0
,%1) CR_TAB
1941 AS2 (ldd
,%B0
,%1+1));
1944 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
1946 int disp
= INTVAL (XEXP (base
, 1));
1947 int reg_base
= true_regnum (XEXP (base
, 0));
1949 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
1951 if (REGNO (XEXP (base
, 0)) != REG_Y
)
1952 fatal_insn ("incorrect insn:",insn
);
1954 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1955 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
1956 AS2 (ldd
,%A0
,Y
+62) CR_TAB
1957 AS2 (ldd
,%B0
,Y
+63) CR_TAB
1958 AS2 (sbiw
,r28
,%o1
-62));
1960 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1961 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1962 AS2 (ld
,%A0
,Y
) CR_TAB
1963 AS2 (ldd
,%B0
,Y
+1) CR_TAB
1964 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1965 AS2 (sbci
,r29
,hi8(%o1
)));
1967 if (reg_base
== REG_X
)
1969 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1970 it but I have this situation with extremal
1971 optimization options. */
1974 if (reg_base
== reg_dest
)
1975 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1976 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
1977 AS2 (ld
,%B0
,X
) CR_TAB
1978 AS2 (mov
,%A0
,__tmp_reg__
));
1980 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1981 AS2 (ld
,%A0
,X
+) CR_TAB
1982 AS2 (ld
,%B0
,X
) CR_TAB
1983 AS2 (sbiw
,r26
,%o1
+1));
1986 if (reg_base
== reg_dest
)
1989 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
1990 AS2 (ldd
,%B0
,%B1
) CR_TAB
1991 AS2 (mov
,%A0
,__tmp_reg__
));
1995 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
1998 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2000 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2001 fatal_insn ("incorrect insn:", insn
);
2005 if (REGNO (XEXP (base
, 0)) == REG_X
)
2008 return (AS2 (sbiw
,r26
,2) CR_TAB
2009 AS2 (ld
,%A0
,X
+) CR_TAB
2010 AS2 (ld
,%B0
,X
) CR_TAB
2016 return (AS2 (sbiw
,%r1
,2) CR_TAB
2017 AS2 (ld
,%A0
,%p1
) CR_TAB
2018 AS2 (ldd
,%B0
,%p1
+1));
2023 return (AS2 (ld
,%B0
,%1) CR_TAB
2026 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2028 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2029 fatal_insn ("incorrect insn:", insn
);
2032 return (AS2 (ld
,%A0
,%1) CR_TAB
2035 else if (CONSTANT_ADDRESS_P (base
))
2037 if (avr_io_address_p (base
, 2))
2040 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2041 AS2 (in
,%B0
,%B1
-0x20));
2044 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2048 fatal_insn ("unknown move insn:",insn
);
2053 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2057 rtx base
= XEXP (src
, 0);
2058 int reg_dest
= true_regnum (dest
);
2059 int reg_base
= true_regnum (base
);
2067 if (reg_base
== REG_X
) /* (R26) */
2069 if (reg_dest
== REG_X
)
2070 /* "ld r26,-X" is undefined */
2071 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2072 AS2 (ld
,r29
,X
) CR_TAB
2073 AS2 (ld
,r28
,-X
) CR_TAB
2074 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2075 AS2 (sbiw
,r26
,1) CR_TAB
2076 AS2 (ld
,r26
,X
) CR_TAB
2077 AS2 (mov
,r27
,__tmp_reg__
));
2078 else if (reg_dest
== REG_X
- 2)
2079 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2080 AS2 (ld
,%B0
,X
+) CR_TAB
2081 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2082 AS2 (ld
,%D0
,X
) CR_TAB
2083 AS2 (mov
,%C0
,__tmp_reg__
));
2084 else if (reg_unused_after (insn
, base
))
2085 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2086 AS2 (ld
,%B0
,X
+) CR_TAB
2087 AS2 (ld
,%C0
,X
+) CR_TAB
2090 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2091 AS2 (ld
,%B0
,X
+) CR_TAB
2092 AS2 (ld
,%C0
,X
+) CR_TAB
2093 AS2 (ld
,%D0
,X
) CR_TAB
2098 if (reg_dest
== reg_base
)
2099 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2100 AS2 (ldd
,%C0
,%1+2) CR_TAB
2101 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2102 AS2 (ld
,%A0
,%1) CR_TAB
2103 AS2 (mov
,%B0
,__tmp_reg__
));
2104 else if (reg_base
== reg_dest
+ 2)
2105 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2106 AS2 (ldd
,%B0
,%1+1) CR_TAB
2107 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2108 AS2 (ldd
,%D0
,%1+3) CR_TAB
2109 AS2 (mov
,%C0
,__tmp_reg__
));
2111 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2112 AS2 (ldd
,%B0
,%1+1) CR_TAB
2113 AS2 (ldd
,%C0
,%1+2) CR_TAB
2114 AS2 (ldd
,%D0
,%1+3));
2117 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2119 int disp
= INTVAL (XEXP (base
, 1));
2121 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2123 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2124 fatal_insn ("incorrect insn:",insn
);
2126 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2127 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2128 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2129 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2130 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2131 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2132 AS2 (sbiw
,r28
,%o1
-60));
2134 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2135 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2136 AS2 (ld
,%A0
,Y
) CR_TAB
2137 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2138 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2139 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2140 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2141 AS2 (sbci
,r29
,hi8(%o1
)));
2144 reg_base
= true_regnum (XEXP (base
, 0));
2145 if (reg_base
== REG_X
)
2148 if (reg_dest
== REG_X
)
2151 /* "ld r26,-X" is undefined */
2152 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2153 AS2 (ld
,r29
,X
) CR_TAB
2154 AS2 (ld
,r28
,-X
) CR_TAB
2155 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2156 AS2 (sbiw
,r26
,1) CR_TAB
2157 AS2 (ld
,r26
,X
) CR_TAB
2158 AS2 (mov
,r27
,__tmp_reg__
));
2161 if (reg_dest
== REG_X
- 2)
2162 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2163 AS2 (ld
,r24
,X
+) CR_TAB
2164 AS2 (ld
,r25
,X
+) CR_TAB
2165 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2166 AS2 (ld
,r27
,X
) CR_TAB
2167 AS2 (mov
,r26
,__tmp_reg__
));
2169 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2170 AS2 (ld
,%A0
,X
+) CR_TAB
2171 AS2 (ld
,%B0
,X
+) CR_TAB
2172 AS2 (ld
,%C0
,X
+) CR_TAB
2173 AS2 (ld
,%D0
,X
) CR_TAB
2174 AS2 (sbiw
,r26
,%o1
+3));
2176 if (reg_dest
== reg_base
)
2177 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2178 AS2 (ldd
,%C0
,%C1
) CR_TAB
2179 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2180 AS2 (ldd
,%A0
,%A1
) CR_TAB
2181 AS2 (mov
,%B0
,__tmp_reg__
));
2182 else if (reg_dest
== reg_base
- 2)
2183 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2184 AS2 (ldd
,%B0
,%B1
) CR_TAB
2185 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2186 AS2 (ldd
,%D0
,%D1
) CR_TAB
2187 AS2 (mov
,%C0
,__tmp_reg__
));
2188 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2189 AS2 (ldd
,%B0
,%B1
) CR_TAB
2190 AS2 (ldd
,%C0
,%C1
) CR_TAB
2193 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2194 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2195 AS2 (ld
,%C0
,%1) CR_TAB
2196 AS2 (ld
,%B0
,%1) CR_TAB
2198 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2199 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2200 AS2 (ld
,%B0
,%1) CR_TAB
2201 AS2 (ld
,%C0
,%1) CR_TAB
2203 else if (CONSTANT_ADDRESS_P (base
))
2204 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2205 AS2 (lds
,%B0
,%B1
) CR_TAB
2206 AS2 (lds
,%C0
,%C1
) CR_TAB
2209 fatal_insn ("unknown move insn:",insn
);
2214 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2218 rtx base
= XEXP (dest
, 0);
2219 int reg_base
= true_regnum (base
);
2220 int reg_src
= true_regnum (src
);
2226 if (CONSTANT_ADDRESS_P (base
))
2227 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2228 AS2 (sts
,%B0
,%B1
) CR_TAB
2229 AS2 (sts
,%C0
,%C1
) CR_TAB
2231 if (reg_base
> 0) /* (r) */
2233 if (reg_base
== REG_X
) /* (R26) */
2235 if (reg_src
== REG_X
)
2237 /* "st X+,r26" is undefined */
2238 if (reg_unused_after (insn
, base
))
2239 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2240 AS2 (st
,X
,r26
) CR_TAB
2241 AS2 (adiw
,r26
,1) CR_TAB
2242 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2243 AS2 (st
,X
+,r28
) CR_TAB
2246 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2247 AS2 (st
,X
,r26
) CR_TAB
2248 AS2 (adiw
,r26
,1) CR_TAB
2249 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2250 AS2 (st
,X
+,r28
) CR_TAB
2251 AS2 (st
,X
,r29
) CR_TAB
2254 else if (reg_base
== reg_src
+ 2)
2256 if (reg_unused_after (insn
, base
))
2257 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2258 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2259 AS2 (st
,%0+,%A1
) CR_TAB
2260 AS2 (st
,%0+,%B1
) CR_TAB
2261 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2262 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2263 AS1 (clr
,__zero_reg__
));
2265 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2266 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2267 AS2 (st
,%0+,%A1
) CR_TAB
2268 AS2 (st
,%0+,%B1
) CR_TAB
2269 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2270 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2271 AS1 (clr
,__zero_reg__
) CR_TAB
2274 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2275 AS2 (st
,%0+,%B1
) CR_TAB
2276 AS2 (st
,%0+,%C1
) CR_TAB
2277 AS2 (st
,%0,%D1
) CR_TAB
2281 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2282 AS2 (std
,%0+1,%B1
) CR_TAB
2283 AS2 (std
,%0+2,%C1
) CR_TAB
2284 AS2 (std
,%0+3,%D1
));
2286 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2288 int disp
= INTVAL (XEXP (base
, 1));
2289 reg_base
= REGNO (XEXP (base
, 0));
2290 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2292 if (reg_base
!= REG_Y
)
2293 fatal_insn ("incorrect insn:",insn
);
2295 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2296 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2297 AS2 (std
,Y
+60,%A1
) CR_TAB
2298 AS2 (std
,Y
+61,%B1
) CR_TAB
2299 AS2 (std
,Y
+62,%C1
) CR_TAB
2300 AS2 (std
,Y
+63,%D1
) CR_TAB
2301 AS2 (sbiw
,r28
,%o0
-60));
2303 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2304 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2305 AS2 (st
,Y
,%A1
) CR_TAB
2306 AS2 (std
,Y
+1,%B1
) CR_TAB
2307 AS2 (std
,Y
+2,%C1
) CR_TAB
2308 AS2 (std
,Y
+3,%D1
) CR_TAB
2309 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2310 AS2 (sbci
,r29
,hi8(%o0
)));
2312 if (reg_base
== REG_X
)
2315 if (reg_src
== REG_X
)
2318 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2319 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2320 AS2 (adiw
,r26
,%o0
) CR_TAB
2321 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2322 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2323 AS2 (st
,X
+,r28
) CR_TAB
2324 AS2 (st
,X
,r29
) CR_TAB
2325 AS1 (clr
,__zero_reg__
) CR_TAB
2326 AS2 (sbiw
,r26
,%o0
+3));
2328 else if (reg_src
== REG_X
- 2)
2331 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2332 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2333 AS2 (adiw
,r26
,%o0
) CR_TAB
2334 AS2 (st
,X
+,r24
) CR_TAB
2335 AS2 (st
,X
+,r25
) CR_TAB
2336 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2337 AS2 (st
,X
,__zero_reg__
) CR_TAB
2338 AS1 (clr
,__zero_reg__
) CR_TAB
2339 AS2 (sbiw
,r26
,%o0
+3));
2342 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2343 AS2 (st
,X
+,%A1
) CR_TAB
2344 AS2 (st
,X
+,%B1
) CR_TAB
2345 AS2 (st
,X
+,%C1
) CR_TAB
2346 AS2 (st
,X
,%D1
) CR_TAB
2347 AS2 (sbiw
,r26
,%o0
+3));
2349 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2350 AS2 (std
,%B0
,%B1
) CR_TAB
2351 AS2 (std
,%C0
,%C1
) CR_TAB
2354 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2355 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2356 AS2 (st
,%0,%C1
) CR_TAB
2357 AS2 (st
,%0,%B1
) CR_TAB
2359 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2360 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2361 AS2 (st
,%0,%B1
) CR_TAB
2362 AS2 (st
,%0,%C1
) CR_TAB
2364 fatal_insn ("unknown move insn:",insn
);
2369 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2372 rtx dest
= operands
[0];
2373 rtx src
= operands
[1];
2379 if (register_operand (dest
, VOIDmode
))
2381 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2383 if (true_regnum (dest
) > true_regnum (src
))
2388 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2389 AS2 (movw
,%A0
,%A1
));
2392 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2393 AS2 (mov
,%C0
,%C1
) CR_TAB
2394 AS2 (mov
,%B0
,%B1
) CR_TAB
2402 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2403 AS2 (movw
,%C0
,%C1
));
2406 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2407 AS2 (mov
,%B0
,%B1
) CR_TAB
2408 AS2 (mov
,%C0
,%C1
) CR_TAB
2412 else if (CONSTANT_P (src
))
2414 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2417 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2418 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2419 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2420 AS2 (ldi
,%D0
,hhi8(%1)));
2423 if (GET_CODE (src
) == CONST_INT
)
2425 const char *const clr_op0
=
2426 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2427 AS1 (clr
,%B0
) CR_TAB
2429 : (AS1 (clr
,%A0
) CR_TAB
2430 AS1 (clr
,%B0
) CR_TAB
2431 AS1 (clr
,%C0
) CR_TAB
2434 if (src
== const0_rtx
) /* mov r,L */
2436 *l
= AVR_HAVE_MOVW
? 3 : 4;
2439 else if (src
== const1_rtx
)
2442 output_asm_insn (clr_op0
, operands
);
2443 *l
= AVR_HAVE_MOVW
? 4 : 5;
2444 return AS1 (inc
,%A0
);
2446 else if (src
== constm1_rtx
)
2448 /* Immediate constants -1 to any register */
2452 return (AS1 (clr
,%A0
) CR_TAB
2453 AS1 (dec
,%A0
) CR_TAB
2454 AS2 (mov
,%B0
,%A0
) CR_TAB
2455 AS2 (movw
,%C0
,%A0
));
2458 return (AS1 (clr
,%A0
) CR_TAB
2459 AS1 (dec
,%A0
) CR_TAB
2460 AS2 (mov
,%B0
,%A0
) CR_TAB
2461 AS2 (mov
,%C0
,%A0
) CR_TAB
2466 int bit_nr
= exact_log2 (INTVAL (src
));
2470 *l
= AVR_HAVE_MOVW
? 5 : 6;
2473 output_asm_insn (clr_op0
, operands
);
2474 output_asm_insn ("set", operands
);
2477 avr_output_bld (operands
, bit_nr
);
2484 /* Last resort, better than loading from memory. */
2486 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2487 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2488 AS2 (mov
,%A0
,r31
) CR_TAB
2489 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2490 AS2 (mov
,%B0
,r31
) CR_TAB
2491 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2492 AS2 (mov
,%C0
,r31
) CR_TAB
2493 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2494 AS2 (mov
,%D0
,r31
) CR_TAB
2495 AS2 (mov
,r31
,__tmp_reg__
));
2497 else if (GET_CODE (src
) == MEM
)
2498 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2500 else if (GET_CODE (dest
) == MEM
)
2502 const char *template;
2504 if (src
== const0_rtx
)
2505 operands
[1] = zero_reg_rtx
;
2507 template = out_movsi_mr_r (insn
, operands
, real_l
);
2510 output_asm_insn (template, operands
);
2515 fatal_insn ("invalid insn:", insn
);
2520 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2524 rtx x
= XEXP (dest
, 0);
2530 if (CONSTANT_ADDRESS_P (x
))
2532 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2535 return AS2 (out
,__SREG__
,%1);
2537 if (avr_io_address_p (x
, 1))
2540 return AS2 (out
,%0-0x20,%1);
2543 return AS2 (sts
,%0,%1);
2545 /* memory access by reg+disp */
2546 else if (GET_CODE (x
) == PLUS
2547 && REG_P (XEXP (x
,0))
2548 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2550 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2552 int disp
= INTVAL (XEXP (x
,1));
2553 if (REGNO (XEXP (x
,0)) != REG_Y
)
2554 fatal_insn ("incorrect insn:",insn
);
2556 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2557 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2558 AS2 (std
,Y
+63,%1) CR_TAB
2559 AS2 (sbiw
,r28
,%o0
-63));
2561 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2562 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2563 AS2 (st
,Y
,%1) CR_TAB
2564 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2565 AS2 (sbci
,r29
,hi8(%o0
)));
2567 else if (REGNO (XEXP (x
,0)) == REG_X
)
2569 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2571 if (reg_unused_after (insn
, XEXP (x
,0)))
2572 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2573 AS2 (adiw
,r26
,%o0
) CR_TAB
2574 AS2 (st
,X
,__tmp_reg__
));
2576 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2577 AS2 (adiw
,r26
,%o0
) CR_TAB
2578 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2579 AS2 (sbiw
,r26
,%o0
));
2583 if (reg_unused_after (insn
, XEXP (x
,0)))
2584 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2587 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2588 AS2 (st
,X
,%1) CR_TAB
2589 AS2 (sbiw
,r26
,%o0
));
2593 return AS2 (std
,%0,%1);
2596 return AS2 (st
,%0,%1);
2600 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2604 rtx base
= XEXP (dest
, 0);
2605 int reg_base
= true_regnum (base
);
2606 int reg_src
= true_regnum (src
);
2607 /* "volatile" forces writing high byte first, even if less efficient,
2608 for correct operation with 16-bit I/O registers. */
2609 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2614 if (CONSTANT_ADDRESS_P (base
))
2616 if (avr_io_address_p (base
, 2))
2619 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2620 AS2 (out
,%A0
-0x20,%A1
));
2622 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2627 if (reg_base
== REG_X
)
2629 if (reg_src
== REG_X
)
2631 /* "st X+,r26" and "st -X,r26" are undefined. */
2632 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2633 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2634 AS2 (st
,X
,r26
) CR_TAB
2635 AS2 (adiw
,r26
,1) CR_TAB
2636 AS2 (st
,X
,__tmp_reg__
));
2638 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2639 AS2 (adiw
,r26
,1) CR_TAB
2640 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2641 AS2 (sbiw
,r26
,1) CR_TAB
2646 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2647 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2650 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2651 AS2 (st
,X
,%B1
) CR_TAB
2656 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2659 else if (GET_CODE (base
) == PLUS
)
2661 int disp
= INTVAL (XEXP (base
, 1));
2662 reg_base
= REGNO (XEXP (base
, 0));
2663 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2665 if (reg_base
!= REG_Y
)
2666 fatal_insn ("incorrect insn:",insn
);
2668 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2669 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2670 AS2 (std
,Y
+63,%B1
) CR_TAB
2671 AS2 (std
,Y
+62,%A1
) CR_TAB
2672 AS2 (sbiw
,r28
,%o0
-62));
2674 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2675 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2676 AS2 (std
,Y
+1,%B1
) CR_TAB
2677 AS2 (st
,Y
,%A1
) CR_TAB
2678 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2679 AS2 (sbci
,r29
,hi8(%o0
)));
2681 if (reg_base
== REG_X
)
2684 if (reg_src
== REG_X
)
2687 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2688 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2689 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2690 AS2 (st
,X
,__zero_reg__
) CR_TAB
2691 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2692 AS1 (clr
,__zero_reg__
) CR_TAB
2693 AS2 (sbiw
,r26
,%o0
));
2696 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2697 AS2 (st
,X
,%B1
) CR_TAB
2698 AS2 (st
,-X
,%A1
) CR_TAB
2699 AS2 (sbiw
,r26
,%o0
));
2701 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2704 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2705 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2707 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2711 if (REGNO (XEXP (base
, 0)) == REG_X
)
2714 return (AS2 (adiw
,r26
,1) CR_TAB
2715 AS2 (st
,X
,%B1
) CR_TAB
2716 AS2 (st
,-X
,%A1
) CR_TAB
2722 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2723 AS2 (st
,%p0
,%A1
) CR_TAB
2729 return (AS2 (st
,%0,%A1
) CR_TAB
2732 fatal_insn ("unknown move insn:",insn
);
2736 /* Return 1 if frame pointer for current function required. */
2739 frame_pointer_required_p (void)
2741 return (current_function_calls_alloca
2742 || current_function_args_info
.nregs
== 0
2743 || get_frame_size () > 0);
2746 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2749 compare_condition (rtx insn
)
2751 rtx next
= next_real_insn (insn
);
2752 RTX_CODE cond
= UNKNOWN
;
2753 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2755 rtx pat
= PATTERN (next
);
2756 rtx src
= SET_SRC (pat
);
2757 rtx t
= XEXP (src
, 0);
2758 cond
= GET_CODE (t
);
2763 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2766 compare_sign_p (rtx insn
)
2768 RTX_CODE cond
= compare_condition (insn
);
2769 return (cond
== GE
|| cond
== LT
);
2772 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2773 that needs to be swapped (GT, GTU, LE, LEU). */
2776 compare_diff_p (rtx insn
)
2778 RTX_CODE cond
= compare_condition (insn
);
2779 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2782 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2785 compare_eq_p (rtx insn
)
2787 RTX_CODE cond
= compare_condition (insn
);
2788 return (cond
== EQ
|| cond
== NE
);
2792 /* Output test instruction for HImode. */
2795 out_tsthi (rtx insn
, int *l
)
2797 if (compare_sign_p (insn
))
2800 return AS1 (tst
,%B0
);
2802 if (reg_unused_after (insn
, SET_SRC (PATTERN (insn
)))
2803 && compare_eq_p (insn
))
2805 /* Faster than sbiw if we can clobber the operand. */
2807 return AS2 (or,%A0
,%B0
);
2809 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2812 return AS2 (sbiw
,%0,0);
2815 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2816 AS2 (cpc
,%B0
,__zero_reg__
));
2820 /* Output test instruction for SImode. */
2823 out_tstsi (rtx insn
, int *l
)
2825 if (compare_sign_p (insn
))
2828 return AS1 (tst
,%D0
);
2830 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2833 return (AS2 (sbiw
,%A0
,0) CR_TAB
2834 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2835 AS2 (cpc
,%D0
,__zero_reg__
));
2838 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2839 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2840 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2841 AS2 (cpc
,%D0
,__zero_reg__
));
2845 /* Generate asm equivalent for various shifts.
2846 Shift count is a CONST_INT, MEM or REG.
2847 This only handles cases that are not already
2848 carefully hand-optimized in ?sh??i3_out. */
2851 out_shift_with_cnt (const char *template, rtx insn
, rtx operands
[],
2852 int *len
, int t_len
)
2856 int second_label
= 1;
2857 int saved_in_tmp
= 0;
2858 int use_zero_reg
= 0;
2860 op
[0] = operands
[0];
2861 op
[1] = operands
[1];
2862 op
[2] = operands
[2];
2863 op
[3] = operands
[3];
2869 if (GET_CODE (operands
[2]) == CONST_INT
)
2871 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2872 int count
= INTVAL (operands
[2]);
2873 int max_len
= 10; /* If larger than this, always use a loop. */
2882 if (count
< 8 && !scratch
)
2886 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2888 if (t_len
* count
<= max_len
)
2890 /* Output shifts inline with no loop - faster. */
2892 *len
= t_len
* count
;
2896 output_asm_insn (template, op
);
2905 strcat (str
, AS2 (ldi
,%3,%2));
2907 else if (use_zero_reg
)
2909 /* Hack to save one word: use __zero_reg__ as loop counter.
2910 Set one bit, then shift in a loop until it is 0 again. */
2912 op
[3] = zero_reg_rtx
;
2916 strcat (str
, ("set" CR_TAB
2917 AS2 (bld
,%3,%2-1)));
2921 /* No scratch register available, use one from LD_REGS (saved in
2922 __tmp_reg__) that doesn't overlap with registers to shift. */
2924 op
[3] = gen_rtx_REG (QImode
,
2925 ((true_regnum (operands
[0]) - 1) & 15) + 16);
2926 op
[4] = tmp_reg_rtx
;
2930 *len
= 3; /* Includes "mov %3,%4" after the loop. */
2932 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
2938 else if (GET_CODE (operands
[2]) == MEM
)
2942 op
[3] = op_mov
[0] = tmp_reg_rtx
;
2946 out_movqi_r_mr (insn
, op_mov
, len
);
2948 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
2950 else if (register_operand (operands
[2], QImode
))
2952 if (reg_unused_after (insn
, operands
[2]))
2956 op
[3] = tmp_reg_rtx
;
2958 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
2962 fatal_insn ("bad shift insn:", insn
);
2969 strcat (str
, AS1 (rjmp
,2f
));
2973 *len
+= t_len
+ 2; /* template + dec + brXX */
2976 strcat (str
, "\n1:\t");
2977 strcat (str
, template);
2978 strcat (str
, second_label
? "\n2:\t" : "\n\t");
2979 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
2980 strcat (str
, CR_TAB
);
2981 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
2983 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
2984 output_asm_insn (str
, op
);
2989 /* 8bit shift left ((char)x << i) */
2992 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
2994 if (GET_CODE (operands
[2]) == CONST_INT
)
3001 switch (INTVAL (operands
[2]))
3004 if (INTVAL (operands
[2]) < 8)
3008 return AS1 (clr
,%0);
3012 return AS1 (lsl
,%0);
3016 return (AS1 (lsl
,%0) CR_TAB
3021 return (AS1 (lsl
,%0) CR_TAB
3026 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3029 return (AS1 (swap
,%0) CR_TAB
3030 AS2 (andi
,%0,0xf0));
3033 return (AS1 (lsl
,%0) CR_TAB
3039 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3042 return (AS1 (swap
,%0) CR_TAB
3044 AS2 (andi
,%0,0xe0));
3047 return (AS1 (lsl
,%0) CR_TAB
3054 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3057 return (AS1 (swap
,%0) CR_TAB
3060 AS2 (andi
,%0,0xc0));
3063 return (AS1 (lsl
,%0) CR_TAB
3072 return (AS1 (ror
,%0) CR_TAB
3077 else if (CONSTANT_P (operands
[2]))
3078 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3080 out_shift_with_cnt (AS1 (lsl
,%0),
3081 insn
, operands
, len
, 1);
3086 /* 16bit shift left ((short)x << i) */
3089 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3091 if (GET_CODE (operands
[2]) == CONST_INT
)
3093 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3094 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3101 switch (INTVAL (operands
[2]))
3104 if (INTVAL (operands
[2]) < 16)
3108 return (AS1 (clr
,%B0
) CR_TAB
3112 if (optimize_size
&& scratch
)
3117 return (AS1 (swap
,%A0
) CR_TAB
3118 AS1 (swap
,%B0
) CR_TAB
3119 AS2 (andi
,%B0
,0xf0) CR_TAB
3120 AS2 (eor
,%B0
,%A0
) CR_TAB
3121 AS2 (andi
,%A0
,0xf0) CR_TAB
3127 return (AS1 (swap
,%A0
) CR_TAB
3128 AS1 (swap
,%B0
) CR_TAB
3129 AS2 (ldi
,%3,0xf0) CR_TAB
3130 AS2 (and,%B0
,%3) CR_TAB
3131 AS2 (eor
,%B0
,%A0
) CR_TAB
3132 AS2 (and,%A0
,%3) CR_TAB
3135 break; /* optimize_size ? 6 : 8 */
3139 break; /* scratch ? 5 : 6 */
3143 return (AS1 (lsl
,%A0
) CR_TAB
3144 AS1 (rol
,%B0
) CR_TAB
3145 AS1 (swap
,%A0
) CR_TAB
3146 AS1 (swap
,%B0
) CR_TAB
3147 AS2 (andi
,%B0
,0xf0) CR_TAB
3148 AS2 (eor
,%B0
,%A0
) CR_TAB
3149 AS2 (andi
,%A0
,0xf0) CR_TAB
3155 return (AS1 (lsl
,%A0
) CR_TAB
3156 AS1 (rol
,%B0
) CR_TAB
3157 AS1 (swap
,%A0
) CR_TAB
3158 AS1 (swap
,%B0
) CR_TAB
3159 AS2 (ldi
,%3,0xf0) CR_TAB
3160 AS2 (and,%B0
,%3) CR_TAB
3161 AS2 (eor
,%B0
,%A0
) CR_TAB
3162 AS2 (and,%A0
,%3) CR_TAB
3169 break; /* scratch ? 5 : 6 */
3171 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3172 AS1 (lsr
,%B0
) CR_TAB
3173 AS1 (ror
,%A0
) CR_TAB
3174 AS1 (ror
,__tmp_reg__
) CR_TAB
3175 AS1 (lsr
,%B0
) CR_TAB
3176 AS1 (ror
,%A0
) CR_TAB
3177 AS1 (ror
,__tmp_reg__
) CR_TAB
3178 AS2 (mov
,%B0
,%A0
) CR_TAB
3179 AS2 (mov
,%A0
,__tmp_reg__
));
3183 return (AS1 (lsr
,%B0
) CR_TAB
3184 AS2 (mov
,%B0
,%A0
) CR_TAB
3185 AS1 (clr
,%A0
) CR_TAB
3186 AS1 (ror
,%B0
) CR_TAB
3190 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3195 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3196 AS1 (clr
,%A0
) CR_TAB
3201 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3202 AS1 (clr
,%A0
) CR_TAB
3203 AS1 (lsl
,%B0
) CR_TAB
3208 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3209 AS1 (clr
,%A0
) CR_TAB
3210 AS1 (lsl
,%B0
) CR_TAB
3211 AS1 (lsl
,%B0
) CR_TAB
3218 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3219 AS1 (clr
,%A0
) CR_TAB
3220 AS1 (swap
,%B0
) CR_TAB
3221 AS2 (andi
,%B0
,0xf0));
3226 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3227 AS1 (clr
,%A0
) CR_TAB
3228 AS1 (swap
,%B0
) CR_TAB
3229 AS2 (ldi
,%3,0xf0) CR_TAB
3233 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3234 AS1 (clr
,%A0
) CR_TAB
3235 AS1 (lsl
,%B0
) CR_TAB
3236 AS1 (lsl
,%B0
) CR_TAB
3237 AS1 (lsl
,%B0
) CR_TAB
3244 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3245 AS1 (clr
,%A0
) CR_TAB
3246 AS1 (swap
,%B0
) CR_TAB
3247 AS1 (lsl
,%B0
) CR_TAB
3248 AS2 (andi
,%B0
,0xe0));
3250 if (AVR_HAVE_MUL
&& scratch
)
3253 return (AS2 (ldi
,%3,0x20) CR_TAB
3254 AS2 (mul
,%A0
,%3) CR_TAB
3255 AS2 (mov
,%B0
,r0
) CR_TAB
3256 AS1 (clr
,%A0
) CR_TAB
3257 AS1 (clr
,__zero_reg__
));
3259 if (optimize_size
&& scratch
)
3264 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3265 AS1 (clr
,%A0
) CR_TAB
3266 AS1 (swap
,%B0
) CR_TAB
3267 AS1 (lsl
,%B0
) CR_TAB
3268 AS2 (ldi
,%3,0xe0) CR_TAB
3274 return ("set" CR_TAB
3275 AS2 (bld
,r1
,5) CR_TAB
3276 AS2 (mul
,%A0
,r1
) CR_TAB
3277 AS2 (mov
,%B0
,r0
) CR_TAB
3278 AS1 (clr
,%A0
) CR_TAB
3279 AS1 (clr
,__zero_reg__
));
3282 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3283 AS1 (clr
,%A0
) CR_TAB
3284 AS1 (lsl
,%B0
) CR_TAB
3285 AS1 (lsl
,%B0
) CR_TAB
3286 AS1 (lsl
,%B0
) CR_TAB
3287 AS1 (lsl
,%B0
) CR_TAB
3291 if (AVR_HAVE_MUL
&& ldi_ok
)
3294 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3295 AS2 (mul
,%A0
,%B0
) CR_TAB
3296 AS2 (mov
,%B0
,r0
) CR_TAB
3297 AS1 (clr
,%A0
) CR_TAB
3298 AS1 (clr
,__zero_reg__
));
3300 if (AVR_HAVE_MUL
&& scratch
)
3303 return (AS2 (ldi
,%3,0x40) CR_TAB
3304 AS2 (mul
,%A0
,%3) CR_TAB
3305 AS2 (mov
,%B0
,r0
) CR_TAB
3306 AS1 (clr
,%A0
) CR_TAB
3307 AS1 (clr
,__zero_reg__
));
3309 if (optimize_size
&& ldi_ok
)
3312 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3313 AS2 (ldi
,%A0
,6) "\n1:\t"
3314 AS1 (lsl
,%B0
) CR_TAB
3315 AS1 (dec
,%A0
) CR_TAB
3318 if (optimize_size
&& scratch
)
3321 return (AS1 (clr
,%B0
) CR_TAB
3322 AS1 (lsr
,%A0
) CR_TAB
3323 AS1 (ror
,%B0
) CR_TAB
3324 AS1 (lsr
,%A0
) CR_TAB
3325 AS1 (ror
,%B0
) CR_TAB
3330 return (AS1 (clr
,%B0
) CR_TAB
3331 AS1 (lsr
,%A0
) CR_TAB
3332 AS1 (ror
,%B0
) CR_TAB
3337 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3339 insn
, operands
, len
, 2);
3344 /* 32bit shift left ((long)x << i) */
3347 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3349 if (GET_CODE (operands
[2]) == CONST_INT
)
3357 switch (INTVAL (operands
[2]))
3360 if (INTVAL (operands
[2]) < 32)
3364 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3365 AS1 (clr
,%C0
) CR_TAB
3366 AS2 (movw
,%A0
,%C0
));
3368 return (AS1 (clr
,%D0
) CR_TAB
3369 AS1 (clr
,%C0
) CR_TAB
3370 AS1 (clr
,%B0
) CR_TAB
3375 int reg0
= true_regnum (operands
[0]);
3376 int reg1
= true_regnum (operands
[1]);
3379 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3380 AS2 (mov
,%C0
,%B1
) CR_TAB
3381 AS2 (mov
,%B0
,%A1
) CR_TAB
3384 return (AS1 (clr
,%A0
) CR_TAB
3385 AS2 (mov
,%B0
,%A1
) CR_TAB
3386 AS2 (mov
,%C0
,%B1
) CR_TAB
3392 int reg0
= true_regnum (operands
[0]);
3393 int reg1
= true_regnum (operands
[1]);
3394 if (reg0
+ 2 == reg1
)
3395 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3398 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3399 AS1 (clr
,%B0
) CR_TAB
3402 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3403 AS2 (mov
,%D0
,%B1
) CR_TAB
3404 AS1 (clr
,%B0
) CR_TAB
3410 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3411 AS1 (clr
,%C0
) CR_TAB
3412 AS1 (clr
,%B0
) CR_TAB
3417 return (AS1 (clr
,%D0
) CR_TAB
3418 AS1 (lsr
,%A0
) CR_TAB
3419 AS1 (ror
,%D0
) CR_TAB
3420 AS1 (clr
,%C0
) CR_TAB
3421 AS1 (clr
,%B0
) CR_TAB
3426 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3427 AS1 (rol
,%B0
) CR_TAB
3428 AS1 (rol
,%C0
) CR_TAB
3430 insn
, operands
, len
, 4);
3434 /* 8bit arithmetic shift right ((signed char)x >> i) */
3437 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3439 if (GET_CODE (operands
[2]) == CONST_INT
)
3446 switch (INTVAL (operands
[2]))
3450 return AS1 (asr
,%0);
3454 return (AS1 (asr
,%0) CR_TAB
3459 return (AS1 (asr
,%0) CR_TAB
3465 return (AS1 (asr
,%0) CR_TAB
3472 return (AS1 (asr
,%0) CR_TAB
3480 return (AS2 (bst
,%0,6) CR_TAB
3482 AS2 (sbc
,%0,%0) CR_TAB
3486 if (INTVAL (operands
[2]) < 8)
3493 return (AS1 (lsl
,%0) CR_TAB
3497 else if (CONSTANT_P (operands
[2]))
3498 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3500 out_shift_with_cnt (AS1 (asr
,%0),
3501 insn
, operands
, len
, 1);
3506 /* 16bit arithmetic shift right ((signed short)x >> i) */
3509 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3511 if (GET_CODE (operands
[2]) == CONST_INT
)
3513 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3514 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3521 switch (INTVAL (operands
[2]))
3525 /* XXX try to optimize this too? */
3530 break; /* scratch ? 5 : 6 */
3532 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3533 AS2 (mov
,%A0
,%B0
) CR_TAB
3534 AS1 (lsl
,__tmp_reg__
) CR_TAB
3535 AS1 (rol
,%A0
) CR_TAB
3536 AS2 (sbc
,%B0
,%B0
) CR_TAB
3537 AS1 (lsl
,__tmp_reg__
) CR_TAB
3538 AS1 (rol
,%A0
) CR_TAB
3543 return (AS1 (lsl
,%A0
) CR_TAB
3544 AS2 (mov
,%A0
,%B0
) CR_TAB
3545 AS1 (rol
,%A0
) CR_TAB
3550 int reg0
= true_regnum (operands
[0]);
3551 int reg1
= true_regnum (operands
[1]);
3554 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3555 AS1 (lsl
,%B0
) CR_TAB
3558 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3559 AS1 (clr
,%B0
) CR_TAB
3560 AS2 (sbrc
,%A0
,7) CR_TAB
3566 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3567 AS1 (lsl
,%B0
) CR_TAB
3568 AS2 (sbc
,%B0
,%B0
) CR_TAB
3573 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3574 AS1 (lsl
,%B0
) CR_TAB
3575 AS2 (sbc
,%B0
,%B0
) CR_TAB
3576 AS1 (asr
,%A0
) CR_TAB
3580 if (AVR_HAVE_MUL
&& ldi_ok
)
3583 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3584 AS2 (muls
,%B0
,%A0
) CR_TAB
3585 AS2 (mov
,%A0
,r1
) CR_TAB
3586 AS2 (sbc
,%B0
,%B0
) CR_TAB
3587 AS1 (clr
,__zero_reg__
));
3589 if (optimize_size
&& scratch
)
3592 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3593 AS1 (lsl
,%B0
) CR_TAB
3594 AS2 (sbc
,%B0
,%B0
) CR_TAB
3595 AS1 (asr
,%A0
) CR_TAB
3596 AS1 (asr
,%A0
) CR_TAB
3600 if (AVR_HAVE_MUL
&& ldi_ok
)
3603 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3604 AS2 (muls
,%B0
,%A0
) CR_TAB
3605 AS2 (mov
,%A0
,r1
) CR_TAB
3606 AS2 (sbc
,%B0
,%B0
) CR_TAB
3607 AS1 (clr
,__zero_reg__
));
3609 if (optimize_size
&& scratch
)
3612 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3613 AS1 (lsl
,%B0
) CR_TAB
3614 AS2 (sbc
,%B0
,%B0
) CR_TAB
3615 AS1 (asr
,%A0
) CR_TAB
3616 AS1 (asr
,%A0
) CR_TAB
3617 AS1 (asr
,%A0
) CR_TAB
3621 if (AVR_HAVE_MUL
&& ldi_ok
)
3624 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3625 AS2 (muls
,%B0
,%A0
) CR_TAB
3626 AS2 (mov
,%A0
,r1
) CR_TAB
3627 AS2 (sbc
,%B0
,%B0
) CR_TAB
3628 AS1 (clr
,__zero_reg__
));
3631 break; /* scratch ? 5 : 7 */
3633 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3634 AS1 (lsl
,%B0
) CR_TAB
3635 AS2 (sbc
,%B0
,%B0
) CR_TAB
3636 AS1 (asr
,%A0
) CR_TAB
3637 AS1 (asr
,%A0
) CR_TAB
3638 AS1 (asr
,%A0
) CR_TAB
3639 AS1 (asr
,%A0
) CR_TAB
3644 return (AS1 (lsl
,%B0
) CR_TAB
3645 AS2 (sbc
,%A0
,%A0
) CR_TAB
3646 AS1 (lsl
,%B0
) CR_TAB
3647 AS2 (mov
,%B0
,%A0
) CR_TAB
3651 if (INTVAL (operands
[2]) < 16)
3657 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3658 AS2 (sbc
,%A0
,%A0
) CR_TAB
3663 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3665 insn
, operands
, len
, 2);
3670 /* 32bit arithmetic shift right ((signed long)x >> i) */
3673 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3675 if (GET_CODE (operands
[2]) == CONST_INT
)
3683 switch (INTVAL (operands
[2]))
3687 int reg0
= true_regnum (operands
[0]);
3688 int reg1
= true_regnum (operands
[1]);
3691 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3692 AS2 (mov
,%B0
,%C1
) CR_TAB
3693 AS2 (mov
,%C0
,%D1
) CR_TAB
3694 AS1 (clr
,%D0
) CR_TAB
3695 AS2 (sbrc
,%C0
,7) CR_TAB
3698 return (AS1 (clr
,%D0
) CR_TAB
3699 AS2 (sbrc
,%D1
,7) CR_TAB
3700 AS1 (dec
,%D0
) CR_TAB
3701 AS2 (mov
,%C0
,%D1
) CR_TAB
3702 AS2 (mov
,%B0
,%C1
) CR_TAB
3708 int reg0
= true_regnum (operands
[0]);
3709 int reg1
= true_regnum (operands
[1]);
3711 if (reg0
== reg1
+ 2)
3712 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3713 AS2 (sbrc
,%B0
,7) CR_TAB
3714 AS1 (com
,%D0
) CR_TAB
3717 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3718 AS1 (clr
,%D0
) CR_TAB
3719 AS2 (sbrc
,%B0
,7) CR_TAB
3720 AS1 (com
,%D0
) CR_TAB
3723 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3724 AS2 (mov
,%A0
,%C1
) CR_TAB
3725 AS1 (clr
,%D0
) CR_TAB
3726 AS2 (sbrc
,%B0
,7) CR_TAB
3727 AS1 (com
,%D0
) CR_TAB
3732 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3733 AS1 (clr
,%D0
) CR_TAB
3734 AS2 (sbrc
,%A0
,7) CR_TAB
3735 AS1 (com
,%D0
) CR_TAB
3736 AS2 (mov
,%B0
,%D0
) CR_TAB
3740 if (INTVAL (operands
[2]) < 32)
3747 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3748 AS2 (sbc
,%A0
,%A0
) CR_TAB
3749 AS2 (mov
,%B0
,%A0
) CR_TAB
3750 AS2 (movw
,%C0
,%A0
));
3752 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3753 AS2 (sbc
,%A0
,%A0
) CR_TAB
3754 AS2 (mov
,%B0
,%A0
) CR_TAB
3755 AS2 (mov
,%C0
,%A0
) CR_TAB
3760 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3761 AS1 (ror
,%C0
) CR_TAB
3762 AS1 (ror
,%B0
) CR_TAB
3764 insn
, operands
, len
, 4);
3768 /* 8bit logic shift right ((unsigned char)x >> i) */
3771 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3773 if (GET_CODE (operands
[2]) == CONST_INT
)
3780 switch (INTVAL (operands
[2]))
3783 if (INTVAL (operands
[2]) < 8)
3787 return AS1 (clr
,%0);
3791 return AS1 (lsr
,%0);
3795 return (AS1 (lsr
,%0) CR_TAB
3799 return (AS1 (lsr
,%0) CR_TAB
3804 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3807 return (AS1 (swap
,%0) CR_TAB
3808 AS2 (andi
,%0,0x0f));
3811 return (AS1 (lsr
,%0) CR_TAB
3817 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3820 return (AS1 (swap
,%0) CR_TAB
3825 return (AS1 (lsr
,%0) CR_TAB
3832 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3835 return (AS1 (swap
,%0) CR_TAB
3841 return (AS1 (lsr
,%0) CR_TAB
3850 return (AS1 (rol
,%0) CR_TAB
3855 else if (CONSTANT_P (operands
[2]))
3856 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3858 out_shift_with_cnt (AS1 (lsr
,%0),
3859 insn
, operands
, len
, 1);
3863 /* 16bit logic shift right ((unsigned short)x >> i) */
3866 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3868 if (GET_CODE (operands
[2]) == CONST_INT
)
3870 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3871 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3878 switch (INTVAL (operands
[2]))
3881 if (INTVAL (operands
[2]) < 16)
3885 return (AS1 (clr
,%B0
) CR_TAB
3889 if (optimize_size
&& scratch
)
3894 return (AS1 (swap
,%B0
) CR_TAB
3895 AS1 (swap
,%A0
) CR_TAB
3896 AS2 (andi
,%A0
,0x0f) CR_TAB
3897 AS2 (eor
,%A0
,%B0
) CR_TAB
3898 AS2 (andi
,%B0
,0x0f) CR_TAB
3904 return (AS1 (swap
,%B0
) CR_TAB
3905 AS1 (swap
,%A0
) CR_TAB
3906 AS2 (ldi
,%3,0x0f) CR_TAB
3907 AS2 (and,%A0
,%3) CR_TAB
3908 AS2 (eor
,%A0
,%B0
) CR_TAB
3909 AS2 (and,%B0
,%3) CR_TAB
3912 break; /* optimize_size ? 6 : 8 */
3916 break; /* scratch ? 5 : 6 */
3920 return (AS1 (lsr
,%B0
) CR_TAB
3921 AS1 (ror
,%A0
) CR_TAB
3922 AS1 (swap
,%B0
) CR_TAB
3923 AS1 (swap
,%A0
) CR_TAB
3924 AS2 (andi
,%A0
,0x0f) CR_TAB
3925 AS2 (eor
,%A0
,%B0
) CR_TAB
3926 AS2 (andi
,%B0
,0x0f) CR_TAB
3932 return (AS1 (lsr
,%B0
) CR_TAB
3933 AS1 (ror
,%A0
) CR_TAB
3934 AS1 (swap
,%B0
) CR_TAB
3935 AS1 (swap
,%A0
) CR_TAB
3936 AS2 (ldi
,%3,0x0f) CR_TAB
3937 AS2 (and,%A0
,%3) CR_TAB
3938 AS2 (eor
,%A0
,%B0
) CR_TAB
3939 AS2 (and,%B0
,%3) CR_TAB
3946 break; /* scratch ? 5 : 6 */
3948 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3949 AS1 (lsl
,%A0
) CR_TAB
3950 AS1 (rol
,%B0
) CR_TAB
3951 AS1 (rol
,__tmp_reg__
) CR_TAB
3952 AS1 (lsl
,%A0
) CR_TAB
3953 AS1 (rol
,%B0
) CR_TAB
3954 AS1 (rol
,__tmp_reg__
) CR_TAB
3955 AS2 (mov
,%A0
,%B0
) CR_TAB
3956 AS2 (mov
,%B0
,__tmp_reg__
));
3960 return (AS1 (lsl
,%A0
) CR_TAB
3961 AS2 (mov
,%A0
,%B0
) CR_TAB
3962 AS1 (rol
,%A0
) CR_TAB
3963 AS2 (sbc
,%B0
,%B0
) CR_TAB
3967 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
3972 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3973 AS1 (clr
,%B0
) CR_TAB
3978 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3979 AS1 (clr
,%B0
) CR_TAB
3980 AS1 (lsr
,%A0
) CR_TAB
3985 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3986 AS1 (clr
,%B0
) CR_TAB
3987 AS1 (lsr
,%A0
) CR_TAB
3988 AS1 (lsr
,%A0
) CR_TAB
3995 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3996 AS1 (clr
,%B0
) CR_TAB
3997 AS1 (swap
,%A0
) CR_TAB
3998 AS2 (andi
,%A0
,0x0f));
4003 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4004 AS1 (clr
,%B0
) CR_TAB
4005 AS1 (swap
,%A0
) CR_TAB
4006 AS2 (ldi
,%3,0x0f) CR_TAB
4010 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4011 AS1 (clr
,%B0
) CR_TAB
4012 AS1 (lsr
,%A0
) CR_TAB
4013 AS1 (lsr
,%A0
) CR_TAB
4014 AS1 (lsr
,%A0
) CR_TAB
4021 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4022 AS1 (clr
,%B0
) CR_TAB
4023 AS1 (swap
,%A0
) CR_TAB
4024 AS1 (lsr
,%A0
) CR_TAB
4025 AS2 (andi
,%A0
,0x07));
4027 if (AVR_HAVE_MUL
&& scratch
)
4030 return (AS2 (ldi
,%3,0x08) CR_TAB
4031 AS2 (mul
,%B0
,%3) CR_TAB
4032 AS2 (mov
,%A0
,r1
) CR_TAB
4033 AS1 (clr
,%B0
) CR_TAB
4034 AS1 (clr
,__zero_reg__
));
4036 if (optimize_size
&& scratch
)
4041 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4042 AS1 (clr
,%B0
) CR_TAB
4043 AS1 (swap
,%A0
) CR_TAB
4044 AS1 (lsr
,%A0
) CR_TAB
4045 AS2 (ldi
,%3,0x07) CR_TAB
4051 return ("set" CR_TAB
4052 AS2 (bld
,r1
,3) CR_TAB
4053 AS2 (mul
,%B0
,r1
) CR_TAB
4054 AS2 (mov
,%A0
,r1
) CR_TAB
4055 AS1 (clr
,%B0
) CR_TAB
4056 AS1 (clr
,__zero_reg__
));
4059 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4060 AS1 (clr
,%B0
) CR_TAB
4061 AS1 (lsr
,%A0
) CR_TAB
4062 AS1 (lsr
,%A0
) CR_TAB
4063 AS1 (lsr
,%A0
) CR_TAB
4064 AS1 (lsr
,%A0
) CR_TAB
4068 if (AVR_HAVE_MUL
&& ldi_ok
)
4071 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4072 AS2 (mul
,%B0
,%A0
) CR_TAB
4073 AS2 (mov
,%A0
,r1
) CR_TAB
4074 AS1 (clr
,%B0
) CR_TAB
4075 AS1 (clr
,__zero_reg__
));
4077 if (AVR_HAVE_MUL
&& scratch
)
4080 return (AS2 (ldi
,%3,0x04) CR_TAB
4081 AS2 (mul
,%B0
,%3) CR_TAB
4082 AS2 (mov
,%A0
,r1
) CR_TAB
4083 AS1 (clr
,%B0
) CR_TAB
4084 AS1 (clr
,__zero_reg__
));
4086 if (optimize_size
&& ldi_ok
)
4089 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4090 AS2 (ldi
,%B0
,6) "\n1:\t"
4091 AS1 (lsr
,%A0
) CR_TAB
4092 AS1 (dec
,%B0
) CR_TAB
4095 if (optimize_size
&& scratch
)
4098 return (AS1 (clr
,%A0
) CR_TAB
4099 AS1 (lsl
,%B0
) CR_TAB
4100 AS1 (rol
,%A0
) CR_TAB
4101 AS1 (lsl
,%B0
) CR_TAB
4102 AS1 (rol
,%A0
) CR_TAB
4107 return (AS1 (clr
,%A0
) CR_TAB
4108 AS1 (lsl
,%B0
) CR_TAB
4109 AS1 (rol
,%A0
) CR_TAB
4114 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4116 insn
, operands
, len
, 2);
4120 /* 32bit logic shift right ((unsigned int)x >> i) */
4123 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4125 if (GET_CODE (operands
[2]) == CONST_INT
)
4133 switch (INTVAL (operands
[2]))
4136 if (INTVAL (operands
[2]) < 32)
4140 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4141 AS1 (clr
,%C0
) CR_TAB
4142 AS2 (movw
,%A0
,%C0
));
4144 return (AS1 (clr
,%D0
) CR_TAB
4145 AS1 (clr
,%C0
) CR_TAB
4146 AS1 (clr
,%B0
) CR_TAB
4151 int reg0
= true_regnum (operands
[0]);
4152 int reg1
= true_regnum (operands
[1]);
4155 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4156 AS2 (mov
,%B0
,%C1
) CR_TAB
4157 AS2 (mov
,%C0
,%D1
) CR_TAB
4160 return (AS1 (clr
,%D0
) CR_TAB
4161 AS2 (mov
,%C0
,%D1
) CR_TAB
4162 AS2 (mov
,%B0
,%C1
) CR_TAB
4168 int reg0
= true_regnum (operands
[0]);
4169 int reg1
= true_regnum (operands
[1]);
4171 if (reg0
== reg1
+ 2)
4172 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4175 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4176 AS1 (clr
,%C0
) CR_TAB
4179 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4180 AS2 (mov
,%A0
,%C1
) CR_TAB
4181 AS1 (clr
,%C0
) CR_TAB
4186 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4187 AS1 (clr
,%B0
) CR_TAB
4188 AS1 (clr
,%C0
) CR_TAB
4193 return (AS1 (clr
,%A0
) CR_TAB
4194 AS2 (sbrc
,%D0
,7) CR_TAB
4195 AS1 (inc
,%A0
) CR_TAB
4196 AS1 (clr
,%B0
) CR_TAB
4197 AS1 (clr
,%C0
) CR_TAB
4202 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4203 AS1 (ror
,%C0
) CR_TAB
4204 AS1 (ror
,%B0
) CR_TAB
4206 insn
, operands
, len
, 4);
4210 /* Modifies the length assigned to instruction INSN
4211 LEN is the initially computed length of the insn. */
4214 adjust_insn_length (rtx insn
, int len
)
4216 rtx patt
= PATTERN (insn
);
4219 if (GET_CODE (patt
) == SET
)
4222 op
[1] = SET_SRC (patt
);
4223 op
[0] = SET_DEST (patt
);
4224 if (general_operand (op
[1], VOIDmode
)
4225 && general_operand (op
[0], VOIDmode
))
4227 switch (GET_MODE (op
[0]))
4230 output_movqi (insn
, op
, &len
);
4233 output_movhi (insn
, op
, &len
);
4237 output_movsisf (insn
, op
, &len
);
4243 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4245 switch (GET_MODE (op
[1]))
4247 case HImode
: out_tsthi (insn
,&len
); break;
4248 case SImode
: out_tstsi (insn
,&len
); break;
4252 else if (GET_CODE (op
[1]) == AND
)
4254 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4256 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4257 if (GET_MODE (op
[1]) == SImode
)
4258 len
= (((mask
& 0xff) != 0xff)
4259 + ((mask
& 0xff00) != 0xff00)
4260 + ((mask
& 0xff0000L
) != 0xff0000L
)
4261 + ((mask
& 0xff000000L
) != 0xff000000L
));
4262 else if (GET_MODE (op
[1]) == HImode
)
4263 len
= (((mask
& 0xff) != 0xff)
4264 + ((mask
& 0xff00) != 0xff00));
4267 else if (GET_CODE (op
[1]) == IOR
)
4269 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4271 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4272 if (GET_MODE (op
[1]) == SImode
)
4273 len
= (((mask
& 0xff) != 0)
4274 + ((mask
& 0xff00) != 0)
4275 + ((mask
& 0xff0000L
) != 0)
4276 + ((mask
& 0xff000000L
) != 0));
4277 else if (GET_MODE (op
[1]) == HImode
)
4278 len
= (((mask
& 0xff) != 0)
4279 + ((mask
& 0xff00) != 0));
4283 set
= single_set (insn
);
4288 op
[1] = SET_SRC (set
);
4289 op
[0] = SET_DEST (set
);
4291 if (GET_CODE (patt
) == PARALLEL
4292 && general_operand (op
[1], VOIDmode
)
4293 && general_operand (op
[0], VOIDmode
))
4295 if (XVECLEN (patt
, 0) == 2)
4296 op
[2] = XVECEXP (patt
, 0, 1);
4298 switch (GET_MODE (op
[0]))
4304 output_reload_inhi (insn
, op
, &len
);
4308 output_reload_insisf (insn
, op
, &len
);
4314 else if (GET_CODE (op
[1]) == ASHIFT
4315 || GET_CODE (op
[1]) == ASHIFTRT
4316 || GET_CODE (op
[1]) == LSHIFTRT
)
4320 ops
[1] = XEXP (op
[1],0);
4321 ops
[2] = XEXP (op
[1],1);
4322 switch (GET_CODE (op
[1]))
4325 switch (GET_MODE (op
[0]))
4327 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4328 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4329 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4334 switch (GET_MODE (op
[0]))
4336 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4337 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4338 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4343 switch (GET_MODE (op
[0]))
4345 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4346 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4347 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4359 /* Return nonzero if register REG dead after INSN. */
4362 reg_unused_after (rtx insn
, rtx reg
)
4364 return (dead_or_set_p (insn
, reg
)
4365 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4368 /* Return nonzero if REG is not used after INSN.
4369 We assume REG is a reload reg, and therefore does
4370 not live past labels. It may live past calls or jumps though. */
4373 _reg_unused_after (rtx insn
, rtx reg
)
4378 /* If the reg is set by this instruction, then it is safe for our
4379 case. Disregard the case where this is a store to memory, since
4380 we are checking a register used in the store address. */
4381 set
= single_set (insn
);
4382 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4383 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4386 while ((insn
= NEXT_INSN (insn
)))
4389 code
= GET_CODE (insn
);
4392 /* If this is a label that existed before reload, then the register
4393 if dead here. However, if this is a label added by reorg, then
4394 the register may still be live here. We can't tell the difference,
4395 so we just ignore labels completely. */
4396 if (code
== CODE_LABEL
)
4404 if (code
== JUMP_INSN
)
4407 /* If this is a sequence, we must handle them all at once.
4408 We could have for instance a call that sets the target register,
4409 and an insn in a delay slot that uses the register. In this case,
4410 we must return 0. */
4411 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4416 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4418 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4419 rtx set
= single_set (this_insn
);
4421 if (GET_CODE (this_insn
) == CALL_INSN
)
4423 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4425 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4430 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4432 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4434 if (GET_CODE (SET_DEST (set
)) != MEM
)
4440 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4445 else if (code
== JUMP_INSN
)
4449 if (code
== CALL_INSN
)
4452 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4453 if (GET_CODE (XEXP (tem
, 0)) == USE
4454 && REG_P (XEXP (XEXP (tem
, 0), 0))
4455 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4457 if (call_used_regs
[REGNO (reg
)])
4461 set
= single_set (insn
);
4463 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4465 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4466 return GET_CODE (SET_DEST (set
)) != MEM
;
4467 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4473 /* Target hook for assembling integer objects. The AVR version needs
4474 special handling for references to certain labels. */
4477 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4479 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4480 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4481 || GET_CODE (x
) == LABEL_REF
))
4483 fputs ("\t.word\tgs(", asm_out_file
);
4484 output_addr_const (asm_out_file
, x
);
4485 fputs (")\n", asm_out_file
);
4488 return default_assemble_integer (x
, size
, aligned_p
);
4491 /* The routine used to output NUL terminated strings. We use a special
4492 version of this for most svr4 targets because doing so makes the
4493 generated assembly code more compact (and thus faster to assemble)
4494 as well as more readable, especially for targets like the i386
4495 (where the only alternative is to output character sequences as
4496 comma separated lists of numbers). */
4499 gas_output_limited_string(FILE *file
, const char *str
)
4501 const unsigned char *_limited_str
= (const unsigned char *) str
;
4503 fprintf (file
, "%s\"", STRING_ASM_OP
);
4504 for (; (ch
= *_limited_str
); _limited_str
++)
4507 switch (escape
= ESCAPES
[ch
])
4513 fprintf (file
, "\\%03o", ch
);
4517 putc (escape
, file
);
4521 fprintf (file
, "\"\n");
4524 /* The routine used to output sequences of byte values. We use a special
4525 version of this for most svr4 targets because doing so makes the
4526 generated assembly code more compact (and thus faster to assemble)
4527 as well as more readable. Note that if we find subparts of the
4528 character sequence which end with NUL (and which are shorter than
4529 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4532 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4534 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4535 const unsigned char *limit
= _ascii_bytes
+ length
;
4536 unsigned bytes_in_chunk
= 0;
4537 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4539 const unsigned char *p
;
4540 if (bytes_in_chunk
>= 60)
4542 fprintf (file
, "\"\n");
4545 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4547 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4549 if (bytes_in_chunk
> 0)
4551 fprintf (file
, "\"\n");
4554 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4561 if (bytes_in_chunk
== 0)
4562 fprintf (file
, "\t.ascii\t\"");
4563 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4570 fprintf (file
, "\\%03o", ch
);
4571 bytes_in_chunk
+= 4;
4575 putc (escape
, file
);
4576 bytes_in_chunk
+= 2;
4581 if (bytes_in_chunk
> 0)
4582 fprintf (file
, "\"\n");
4585 /* Return value is nonzero if pseudos that have been
4586 assigned to registers of class CLASS would likely be spilled
4587 because registers of CLASS are needed for spill registers. */
4590 class_likely_spilled_p (int c
)
4592 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4595 /* Valid attributes:
4596 progmem - put data to program memory;
4597 signal - make a function to be hardware interrupt. After function
4598 prologue interrupts are disabled;
4599 interrupt - make a function to be hardware interrupt. After function
4600 prologue interrupts are enabled;
4601 naked - don't generate function prologue/epilogue and `ret' command.
4603 Only `progmem' attribute valid for type. */
4605 const struct attribute_spec avr_attribute_table
[] =
4607 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4608 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
4609 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4610 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4611 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4612 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4613 { NULL
, 0, 0, false, false, false, NULL
}
4616 /* Handle a "progmem" attribute; arguments as in
4617 struct attribute_spec.handler. */
4619 avr_handle_progmem_attribute (tree
*node
, tree name
,
4620 tree args ATTRIBUTE_UNUSED
,
4621 int flags ATTRIBUTE_UNUSED
,
4626 if (TREE_CODE (*node
) == TYPE_DECL
)
4628 /* This is really a decl attribute, not a type attribute,
4629 but try to handle it for GCC 3.0 backwards compatibility. */
4631 tree type
= TREE_TYPE (*node
);
4632 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4633 tree newtype
= build_type_attribute_variant (type
, attr
);
4635 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4636 TREE_TYPE (*node
) = newtype
;
4637 *no_add_attrs
= true;
4639 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4641 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4643 warning (0, "only initialized variables can be placed into "
4644 "program memory area");
4645 *no_add_attrs
= true;
4650 warning (OPT_Wattributes
, "%qs attribute ignored",
4651 IDENTIFIER_POINTER (name
));
4652 *no_add_attrs
= true;
4659 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4660 struct attribute_spec.handler. */
4663 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4664 tree args ATTRIBUTE_UNUSED
,
4665 int flags ATTRIBUTE_UNUSED
,
4668 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4670 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4671 IDENTIFIER_POINTER (name
));
4672 *no_add_attrs
= true;
4676 const char *func_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node
));
4677 const char *attr
= IDENTIFIER_POINTER (name
);
4679 /* If the function has the 'signal' or 'interrupt' attribute, test to
4680 make sure that the name of the function is "__vector_NN" so as to
4681 catch when the user misspells the interrupt vector name. */
4683 if (strncmp (attr
, "interrupt", strlen ("interrupt")) == 0)
4685 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4687 warning (0, "%qs appears to be a misspelled interrupt handler",
4691 else if (strncmp (attr
, "signal", strlen ("signal")) == 0)
4693 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4695 warning (0, "%qs appears to be a misspelled signal handler",
4705 avr_handle_fntype_attribute (tree
*node
, tree name
,
4706 tree args ATTRIBUTE_UNUSED
,
4707 int flags ATTRIBUTE_UNUSED
,
4710 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4712 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4713 IDENTIFIER_POINTER (name
));
4714 *no_add_attrs
= true;
4720 /* Look for attribute `progmem' in DECL
4721 if found return 1, otherwise 0. */
4724 avr_progmem_p (tree decl
, tree attributes
)
4728 if (TREE_CODE (decl
) != VAR_DECL
)
4732 != lookup_attribute ("progmem", attributes
))
4738 while (TREE_CODE (a
) == ARRAY_TYPE
);
4740 if (a
== error_mark_node
)
4743 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4749 /* Add the section attribute if the variable is in progmem. */
4752 avr_insert_attributes (tree node
, tree
*attributes
)
4754 if (TREE_CODE (node
) == VAR_DECL
4755 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4756 && avr_progmem_p (node
, *attributes
))
4758 static const char dsec
[] = ".progmem.data";
4759 *attributes
= tree_cons (get_identifier ("section"),
4760 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4763 /* ??? This seems sketchy. Why can't the user declare the
4764 thing const in the first place? */
4765 TREE_READONLY (node
) = 1;
4769 /* A get_unnamed_section callback for switching to progmem_section. */
4772 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4774 fprintf (asm_out_file
,
4775 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4776 AVR_MEGA
? "a" : "ax");
4777 /* Should already be aligned, this is just to be safe if it isn't. */
4778 fprintf (asm_out_file
, "\t.p2align 1\n");
4781 /* Implement TARGET_ASM_INIT_SECTIONS. */
4784 avr_asm_init_sections (void)
4786 progmem_section
= get_unnamed_section (AVR_MEGA
? 0 : SECTION_CODE
,
4787 avr_output_progmem_section_asm_op
,
4789 readonly_data_section
= data_section
;
4793 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4795 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4797 if (strncmp (name
, ".noinit", 7) == 0)
4799 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4800 && DECL_INITIAL (decl
) == NULL_TREE
)
4801 flags
|= SECTION_BSS
; /* @nobits */
4803 warning (0, "only uninitialized variables can be placed in the "
4810 /* Outputs some appropriate text to go at the start of an assembler
4814 avr_file_start (void)
4817 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4819 default_file_start ();
4821 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4822 fputs ("__SREG__ = 0x3f\n"
4824 "__SP_L__ = 0x3d\n", asm_out_file
);
4826 fputs ("__tmp_reg__ = 0\n"
4827 "__zero_reg__ = 1\n", asm_out_file
);
4829 /* FIXME: output these only if there is anything in the .data / .bss
4830 sections - some code size could be saved by not linking in the
4831 initialization code from libgcc if one or both sections are empty. */
4832 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4833 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4836 /* Outputs to the stdio stream FILE some
4837 appropriate text to go at the end of an assembler file. */
4844 /* Choose the order in which to allocate hard registers for
4845 pseudo-registers local to a basic block.
4847 Store the desired register order in the array `reg_alloc_order'.
4848 Element 0 should be the register to allocate first; element 1, the
4849 next register; and so on. */
4852 order_regs_for_local_alloc (void)
4855 static const int order_0
[] = {
4863 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4867 static const int order_1
[] = {
4875 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4879 static const int order_2
[] = {
4888 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4893 const int *order
= (TARGET_ORDER_1
? order_1
:
4894 TARGET_ORDER_2
? order_2
:
4896 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
4897 reg_alloc_order
[i
] = order
[i
];
4901 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4902 cost of an RTX operand given its context. X is the rtx of the
4903 operand, MODE is its mode, and OUTER is the rtx_code of this
4904 operand's parent operator. */
4907 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
)
4909 enum rtx_code code
= GET_CODE (x
);
4920 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4927 avr_rtx_costs (x
, code
, outer
, &total
);
4931 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4932 is to be calculated. Return true if the complete cost has been
4933 computed, and false if subexpressions should be scanned. In either
4934 case, *TOTAL contains the cost result. */
4937 avr_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
)
4939 enum machine_mode mode
= GET_MODE (x
);
4946 /* Immediate constants are as cheap as registers. */
4954 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4962 *total
= COSTS_N_INSNS (1);
4966 *total
= COSTS_N_INSNS (3);
4970 *total
= COSTS_N_INSNS (7);
4976 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4984 *total
= COSTS_N_INSNS (1);
4990 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4994 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4995 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4999 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5000 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5001 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5005 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5006 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5007 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5014 *total
= COSTS_N_INSNS (1);
5015 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5016 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5020 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5022 *total
= COSTS_N_INSNS (2);
5023 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5025 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5026 *total
= COSTS_N_INSNS (1);
5028 *total
= COSTS_N_INSNS (2);
5032 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5034 *total
= COSTS_N_INSNS (4);
5035 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5037 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5038 *total
= COSTS_N_INSNS (1);
5040 *total
= COSTS_N_INSNS (4);
5046 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5052 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5053 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5054 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5055 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5059 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5060 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5061 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5069 *total
= COSTS_N_INSNS (optimize_size
? 3 : 4);
5070 else if (optimize_size
)
5071 *total
= COSTS_N_INSNS (AVR_MEGA
? 2 : 1);
5078 *total
= COSTS_N_INSNS (optimize_size
? 7 : 10);
5079 else if (optimize_size
)
5080 *total
= COSTS_N_INSNS (AVR_MEGA
? 2 : 1);
5088 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5089 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5097 *total
= COSTS_N_INSNS (AVR_MEGA
? 2 : 1);
5100 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5101 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5108 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5110 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5111 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5115 val
= INTVAL (XEXP (x
, 1));
5117 *total
= COSTS_N_INSNS (3);
5118 else if (val
>= 0 && val
<= 7)
5119 *total
= COSTS_N_INSNS (val
);
5121 *total
= COSTS_N_INSNS (1);
5126 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5128 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5129 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5132 switch (INTVAL (XEXP (x
, 1)))
5139 *total
= COSTS_N_INSNS (2);
5142 *total
= COSTS_N_INSNS (3);
5148 *total
= COSTS_N_INSNS (4);
5153 *total
= COSTS_N_INSNS (5);
5156 *total
= COSTS_N_INSNS (optimize_size
? 5 : 8);
5159 *total
= COSTS_N_INSNS (optimize_size
? 5 : 9);
5162 *total
= COSTS_N_INSNS (optimize_size
? 5 : 10);
5165 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5166 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5171 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5173 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5174 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5177 switch (INTVAL (XEXP (x
, 1)))
5183 *total
= COSTS_N_INSNS (3);
5188 *total
= COSTS_N_INSNS (4);
5191 *total
= COSTS_N_INSNS (6);
5194 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5197 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5198 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5205 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5212 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5214 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5215 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5219 val
= INTVAL (XEXP (x
, 1));
5221 *total
= COSTS_N_INSNS (4);
5223 *total
= COSTS_N_INSNS (2);
5224 else if (val
>= 0 && val
<= 7)
5225 *total
= COSTS_N_INSNS (val
);
5227 *total
= COSTS_N_INSNS (1);
5232 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5234 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5235 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5238 switch (INTVAL (XEXP (x
, 1)))
5244 *total
= COSTS_N_INSNS (2);
5247 *total
= COSTS_N_INSNS (3);
5253 *total
= COSTS_N_INSNS (4);
5257 *total
= COSTS_N_INSNS (5);
5260 *total
= COSTS_N_INSNS (optimize_size
? 5 : 6);
5263 *total
= COSTS_N_INSNS (optimize_size
? 5 : 7);
5267 *total
= COSTS_N_INSNS (optimize_size
? 5 : 8);
5270 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5271 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5276 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5278 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5279 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5282 switch (INTVAL (XEXP (x
, 1)))
5288 *total
= COSTS_N_INSNS (4);
5293 *total
= COSTS_N_INSNS (6);
5296 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5299 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5302 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5303 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5310 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5317 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5319 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5320 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5324 val
= INTVAL (XEXP (x
, 1));
5326 *total
= COSTS_N_INSNS (3);
5327 else if (val
>= 0 && val
<= 7)
5328 *total
= COSTS_N_INSNS (val
);
5330 *total
= COSTS_N_INSNS (1);
5335 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5337 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5338 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5341 switch (INTVAL (XEXP (x
, 1)))
5348 *total
= COSTS_N_INSNS (2);
5351 *total
= COSTS_N_INSNS (3);
5356 *total
= COSTS_N_INSNS (4);
5360 *total
= COSTS_N_INSNS (5);
5366 *total
= COSTS_N_INSNS (optimize_size
? 5 : 6);
5369 *total
= COSTS_N_INSNS (optimize_size
? 5 : 7);
5373 *total
= COSTS_N_INSNS (optimize_size
? 5 : 9);
5376 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5377 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5382 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5384 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5385 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5388 switch (INTVAL (XEXP (x
, 1)))
5394 *total
= COSTS_N_INSNS (4);
5397 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5402 *total
= COSTS_N_INSNS (4);
5405 *total
= COSTS_N_INSNS (6);
5408 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5409 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5416 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5420 switch (GET_MODE (XEXP (x
, 0)))
5423 *total
= COSTS_N_INSNS (1);
5424 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5425 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5429 *total
= COSTS_N_INSNS (2);
5430 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5431 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5432 else if (INTVAL (XEXP (x
, 1)) != 0)
5433 *total
+= COSTS_N_INSNS (1);
5437 *total
= COSTS_N_INSNS (4);
5438 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5439 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5440 else if (INTVAL (XEXP (x
, 1)) != 0)
5441 *total
+= COSTS_N_INSNS (3);
5447 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5456 /* Calculate the cost of a memory address. */
5459 avr_address_cost (rtx x
)
5461 if (GET_CODE (x
) == PLUS
5462 && GET_CODE (XEXP (x
,1)) == CONST_INT
5463 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5464 && INTVAL (XEXP (x
,1)) >= 61)
5466 if (CONSTANT_ADDRESS_P (x
))
5468 if (avr_io_address_p (x
, 1))
5475 /* Test for extra memory constraint 'Q'.
5476 It's a memory address based on Y or Z pointer with valid displacement. */
5479 extra_constraint_Q (rtx x
)
5481 if (GET_CODE (XEXP (x
,0)) == PLUS
5482 && REG_P (XEXP (XEXP (x
,0), 0))
5483 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5484 && (INTVAL (XEXP (XEXP (x
,0), 1))
5485 <= MAX_LD_OFFSET (GET_MODE (x
))))
5487 rtx xx
= XEXP (XEXP (x
,0), 0);
5488 int regno
= REGNO (xx
);
5489 if (TARGET_ALL_DEBUG
)
5491 fprintf (stderr
, ("extra_constraint:\n"
5492 "reload_completed: %d\n"
5493 "reload_in_progress: %d\n"),
5494 reload_completed
, reload_in_progress
);
5497 if (regno
>= FIRST_PSEUDO_REGISTER
)
5498 return 1; /* allocate pseudos */
5499 else if (regno
== REG_Z
|| regno
== REG_Y
)
5500 return 1; /* strictly check */
5501 else if (xx
== frame_pointer_rtx
5502 || xx
== arg_pointer_rtx
)
5503 return 1; /* XXX frame & arg pointer checks */
5508 /* Convert condition code CONDITION to the valid AVR condition code. */
5511 avr_normalize_condition (RTX_CODE condition
)
5528 /* This function optimizes conditional jumps. */
5535 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5537 if (! (GET_CODE (insn
) == INSN
5538 || GET_CODE (insn
) == CALL_INSN
5539 || GET_CODE (insn
) == JUMP_INSN
)
5540 || !single_set (insn
))
5543 pattern
= PATTERN (insn
);
5545 if (GET_CODE (pattern
) == PARALLEL
)
5546 pattern
= XVECEXP (pattern
, 0, 0);
5547 if (GET_CODE (pattern
) == SET
5548 && SET_DEST (pattern
) == cc0_rtx
5549 && compare_diff_p (insn
))
5551 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5553 /* Now we work under compare insn. */
5555 pattern
= SET_SRC (pattern
);
5556 if (true_regnum (XEXP (pattern
,0)) >= 0
5557 && true_regnum (XEXP (pattern
,1)) >= 0 )
5559 rtx x
= XEXP (pattern
,0);
5560 rtx next
= next_real_insn (insn
);
5561 rtx pat
= PATTERN (next
);
5562 rtx src
= SET_SRC (pat
);
5563 rtx t
= XEXP (src
,0);
5564 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5565 XEXP (pattern
,0) = XEXP (pattern
,1);
5566 XEXP (pattern
,1) = x
;
5567 INSN_CODE (next
) = -1;
5569 else if (true_regnum (XEXP (pattern
,0)) >= 0
5570 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5572 rtx x
= XEXP (pattern
,1);
5573 rtx next
= next_real_insn (insn
);
5574 rtx pat
= PATTERN (next
);
5575 rtx src
= SET_SRC (pat
);
5576 rtx t
= XEXP (src
,0);
5577 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5579 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5581 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5582 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5583 INSN_CODE (next
) = -1;
5584 INSN_CODE (insn
) = -1;
5588 else if (true_regnum (SET_SRC (pattern
)) >= 0)
5590 /* This is a tst insn */
5591 rtx next
= next_real_insn (insn
);
5592 rtx pat
= PATTERN (next
);
5593 rtx src
= SET_SRC (pat
);
5594 rtx t
= XEXP (src
,0);
5596 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5597 SET_SRC (pattern
) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern
)),
5599 INSN_CODE (next
) = -1;
5600 INSN_CODE (insn
) = -1;
5606 /* Returns register number for function return value.*/
5609 avr_ret_register (void)
5614 /* Create an RTX representing the place where a
5615 library function returns a value of mode MODE. */
5618 avr_libcall_value (enum machine_mode mode
)
5620 int offs
= GET_MODE_SIZE (mode
);
5623 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5626 /* Create an RTX representing the place where a
5627 function returns a value of data type VALTYPE. */
5630 avr_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
5634 if (TYPE_MODE (type
) != BLKmode
)
5635 return avr_libcall_value (TYPE_MODE (type
));
5637 offs
= int_size_in_bytes (type
);
5640 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5641 offs
= GET_MODE_SIZE (SImode
);
5642 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5643 offs
= GET_MODE_SIZE (DImode
);
5645 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5648 /* Places additional restrictions on the register class to
5649 use when it is necessary to copy value X into a register
5653 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class
class)
5659 test_hard_reg_class (enum reg_class
class, rtx x
)
5661 int regno
= true_regnum (x
);
5665 if (TEST_HARD_REG_CLASS (class, regno
))
5673 jump_over_one_insn_p (rtx insn
, rtx dest
)
5675 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5678 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5679 int dest_addr
= INSN_ADDRESSES (uid
);
5680 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5683 /* Returns 1 if a value of mode MODE can be stored starting with hard
5684 register number REGNO. On the enhanced core, anything larger than
5685 1 byte must start in even numbered register for "movw" to work
5686 (this way we don't have to check for odd registers everywhere). */
5689 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5691 /* Disallow QImode in stack pointer regs. */
5692 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5695 /* The only thing that can go into registers r28:r29 is a Pmode. */
5696 if (regno
== REG_Y
&& mode
== Pmode
)
5699 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5700 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5706 /* Modes larger than QImode occupy consecutive registers. */
5707 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5710 /* All modes larger than QImode should start in an even register. */
5711 return !(regno
& 1);
5714 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5715 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5716 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5719 avr_io_address_p (rtx x
, int size
)
5721 return (optimize
> 0 && GET_CODE (x
) == CONST_INT
5722 && INTVAL (x
) >= 0x20 && INTVAL (x
) <= 0x60 - size
);
5726 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5732 if (GET_CODE (operands
[1]) == CONST_INT
)
5734 int val
= INTVAL (operands
[1]);
5735 if ((val
& 0xff) == 0)
5738 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5739 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5742 else if ((val
& 0xff00) == 0)
5745 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5746 AS2 (mov
,%A0
,%2) CR_TAB
5747 AS2 (mov
,%B0
,__zero_reg__
));
5749 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5752 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5753 AS2 (mov
,%A0
,%2) CR_TAB
5758 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5759 AS2 (mov
,%A0
,%2) CR_TAB
5760 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5766 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5768 rtx src
= operands
[1];
5769 int cnst
= (GET_CODE (src
) == CONST_INT
);
5774 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5775 + ((INTVAL (src
) & 0xff00) != 0)
5776 + ((INTVAL (src
) & 0xff0000) != 0)
5777 + ((INTVAL (src
) & 0xff000000) != 0);
5784 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5785 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5788 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5789 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5791 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5792 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5795 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5796 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5798 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5799 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5802 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5803 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5805 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5806 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5809 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5810 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5816 avr_output_bld (rtx operands
[], int bit_nr
)
5818 static char s
[] = "bld %A0,0";
5820 s
[5] = 'A' + (bit_nr
>> 3);
5821 s
[8] = '0' + (bit_nr
& 7);
5822 output_asm_insn (s
, operands
);
5826 avr_output_addr_vec_elt (FILE *stream
, int value
)
5828 switch_to_section (progmem_section
);
5829 if (AVR_HAVE_JMP_CALL
)
5830 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5832 fprintf (stream
, "\trjmp .L%d\n", value
);
5835 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5836 registers (for a define_peephole2) in the current function. */
5839 avr_peep2_scratch_safe (rtx scratch
)
5841 if ((interrupt_function_p (current_function_decl
)
5842 || signal_function_p (current_function_decl
))
5843 && leaf_function_p ())
5845 int first_reg
= true_regnum (scratch
);
5846 int last_reg
= first_reg
+ GET_MODE_SIZE (GET_MODE (scratch
)) - 1;
5849 for (reg
= first_reg
; reg
<= last_reg
; reg
++)
5851 if (!df_regs_ever_live_p (reg
))
5858 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5859 or memory location in the I/O space (QImode only).
5861 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5862 Operand 1: register operand to test, or CONST_INT memory address.
5863 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5864 Operand 3: label to jump to if the test is true. */
5867 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
5869 enum rtx_code comp
= GET_CODE (operands
[0]);
5870 int long_jump
= (get_attr_length (insn
) >= 4);
5871 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
5875 else if (comp
== LT
)
5879 comp
= reverse_condition (comp
);
5881 if (GET_CODE (operands
[1]) == CONST_INT
)
5883 if (INTVAL (operands
[1]) < 0x40)
5886 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
5888 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
5892 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
5894 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
5896 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
5899 else /* GET_CODE (operands[1]) == REG */
5901 if (GET_MODE (operands
[1]) == QImode
)
5904 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
5906 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
5908 else /* HImode or SImode */
5910 static char buf
[] = "sbrc %A1,0";
5911 int bit_nr
= exact_log2 (INTVAL (operands
[2])
5912 & GET_MODE_MASK (GET_MODE (operands
[1])));
5914 buf
[3] = (comp
== EQ
) ? 's' : 'c';
5915 buf
[6] = 'A' + (bit_nr
>> 3);
5916 buf
[9] = '0' + (bit_nr
& 7);
5917 output_asm_insn (buf
, operands
);
5922 return (AS1 (rjmp
,.+4) CR_TAB
5925 return AS1 (rjmp
,%3);
5929 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5932 avr_asm_out_ctor (rtx symbol
, int priority
)
5934 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
5935 default_ctor_section_asm_out_constructor (symbol
, priority
);
5938 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5941 avr_asm_out_dtor (rtx symbol
, int priority
)
5943 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
5944 default_dtor_section_asm_out_destructor (symbol
, priority
);
5947 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5950 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
5952 if (TYPE_MODE (type
) == BLKmode
)
5954 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5955 return (size
== -1 || size
> 8);