]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
8d9b00dde24406bf12b1e312d3f45798f760975a
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
50
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
60
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
75
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
86
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
89
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
92
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
95
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
98
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro;
101 const char *avr_extra_arch_macro;
102
103 /* Current architecture. */
104 const struct base_arch_s *avr_current_arch;
105
106 section *progmem_section;
107
108 /* More than 8K of program memory: use "call" and "jmp". */
109 int avr_mega_p = 0;
110
111 /* Core have 'MUL*' instructions. */
112 int avr_have_mul_p = 0;
113
114 /* Assembler only. */
115 int avr_asm_only_p = 0;
116
117 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
118 int avr_have_movw_lpmx_p = 0;
119
120 static const struct base_arch_s avr_arch_types[] = {
121 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
122 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
123 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
124 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
125 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
126 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
127 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
128 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
129 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
130 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
131 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
132 };
133
134 /* These names are used as the index into the avr_arch_types[] table
135 above. */
136
137 enum avr_arch
138 {
139 ARCH_UNKNOWN,
140 ARCH_AVR1,
141 ARCH_AVR2,
142 ARCH_AVR25,
143 ARCH_AVR3,
144 ARCH_AVR31,
145 ARCH_AVR35,
146 ARCH_AVR4,
147 ARCH_AVR5,
148 ARCH_AVR51,
149 ARCH_AVR6
150 };
151
152 struct mcu_type_s {
153 const char *const name;
154 int arch; /* index in avr_arch_types[] */
155 /* Must lie outside user's namespace. NULL == no macro. */
156 const char *const macro;
157 };
158
159 /* List of all known AVR MCU types - if updated, it has to be kept
160 in sync in several places (FIXME: is there a better way?):
161 - here
162 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
163 - t-avr (MULTILIB_MATCHES)
164 - gas/config/tc-avr.c
165 - avr-libc */
166
167 static const struct mcu_type_s avr_mcu_types[] = {
168 /* Classic, <= 8K. */
169 { "avr2", ARCH_AVR2, NULL },
170 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
171 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
172 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
173 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
174 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
175 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
176 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
177 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
178 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
179 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
180 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
181 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
182 /* Classic + MOVW, <= 8K. */
183 { "avr25", ARCH_AVR25, NULL },
184 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
185 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
186 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
187 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
188 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
189 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
190 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
191 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
192 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
193 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
194 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
195 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
196 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
197 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
198 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
199 /* Classic, > 8K, <= 64K. */
200 { "avr3", ARCH_AVR3, NULL },
201 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
202 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
203 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
204 /* Classic, == 128K. */
205 { "avr31", ARCH_AVR31, NULL },
206 { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
207 /* Classic + MOVW + JMP/CALL. */
208 { "avr35", ARCH_AVR35, NULL },
209 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
210 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
211 /* Enhanced, <= 8K. */
212 { "avr4", ARCH_AVR4, NULL },
213 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
214 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
215 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
216 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
217 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
218 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
219 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
220 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
221 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
222 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
223 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
224 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
225 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
226 /* Enhanced, > 8K, <= 64K. */
227 { "avr5", ARCH_AVR5, NULL },
228 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
229 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
230 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
231 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
232 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
233 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
234 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
235 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
236 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
237 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
238 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
239 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
240 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
241 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
242 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
243 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
244 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
245 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
246 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
247 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
248 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
249 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
250 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
251 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
252 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
253 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
254 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
255 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
256 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
257 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
258 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
259 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
260 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
261 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
262 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
263 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
264 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
265 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
266 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
267 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
268 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
269 /* Enhanced, == 128K. */
270 { "avr51", ARCH_AVR51, NULL },
271 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
272 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
273 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
274 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
275 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
276 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
277 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
278 /* 3-Byte PC. */
279 { "avr6", ARCH_AVR6, NULL },
280 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
281 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
282 /* Assembler only. */
283 { "avr1", ARCH_AVR1, NULL },
284 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
285 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
286 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
287 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
288 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
289 { NULL, ARCH_UNKNOWN, NULL }
290 };
291
292 int avr_case_values_threshold = 30000;
293 \f
294 /* Initialize the GCC target structure. */
295 #undef TARGET_ASM_ALIGNED_HI_OP
296 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
297 #undef TARGET_ASM_ALIGNED_SI_OP
298 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
299 #undef TARGET_ASM_UNALIGNED_HI_OP
300 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
301 #undef TARGET_ASM_UNALIGNED_SI_OP
302 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
303 #undef TARGET_ASM_INTEGER
304 #define TARGET_ASM_INTEGER avr_assemble_integer
305 #undef TARGET_ASM_FILE_START
306 #define TARGET_ASM_FILE_START avr_file_start
307 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
308 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
309 #undef TARGET_ASM_FILE_END
310 #define TARGET_ASM_FILE_END avr_file_end
311
312 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
313 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
314 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
315 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
318 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
319 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
320 #undef TARGET_INSERT_ATTRIBUTES
321 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
322 #undef TARGET_SECTION_TYPE_FLAGS
323 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
324 #undef TARGET_RTX_COSTS
325 #define TARGET_RTX_COSTS avr_rtx_costs
326 #undef TARGET_ADDRESS_COST
327 #define TARGET_ADDRESS_COST avr_address_cost
328 #undef TARGET_MACHINE_DEPENDENT_REORG
329 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
330
331 #undef TARGET_RETURN_IN_MEMORY
332 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
333
334 #undef TARGET_STRICT_ARGUMENT_NAMING
335 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
336
337 struct gcc_target targetm = TARGET_INITIALIZER;
338 \f
339 void
340 avr_override_options (void)
341 {
342 const struct mcu_type_s *t;
343 const struct base_arch_s *base;
344
345 flag_delete_null_pointer_checks = 0;
346
347 for (t = avr_mcu_types; t->name; t++)
348 if (strcmp (t->name, avr_mcu_name) == 0)
349 break;
350
351 if (!t->name)
352 {
353 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
354 avr_mcu_name);
355 for (t = avr_mcu_types; t->name; t++)
356 fprintf (stderr," %s\n", t->name);
357 }
358
359 avr_current_arch = &avr_arch_types[t->arch];
360 base = &avr_arch_types[t->arch];
361 avr_asm_only_p = base->asm_only;
362 avr_have_mul_p = base->have_mul;
363 avr_mega_p = base->have_jmp_call;
364 avr_have_movw_lpmx_p = base->have_movw_lpmx;
365 avr_base_arch_macro = base->macro;
366 avr_extra_arch_macro = t->macro;
367
368 if (optimize && !TARGET_NO_TABLEJUMP)
369 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
370
371 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
372 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
373
374 init_machine_status = avr_init_machine_status;
375 }
376
377 /* return register class from register number. */
378
379 static const int reg_class_tab[]={
380 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
381 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
382 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
383 GENERAL_REGS, /* r0 - r15 */
384 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
385 LD_REGS, /* r16 - 23 */
386 ADDW_REGS,ADDW_REGS, /* r24,r25 */
387 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
388 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
389 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
390 STACK_REG,STACK_REG /* SPL,SPH */
391 };
392
393 /* Function to set up the backend function structure. */
394
395 static struct machine_function *
396 avr_init_machine_status (void)
397 {
398 return ((struct machine_function *)
399 ggc_alloc_cleared (sizeof (struct machine_function)));
400 }
401
402 /* Return register class for register R. */
403
404 enum reg_class
405 avr_regno_reg_class (int r)
406 {
407 if (r <= 33)
408 return reg_class_tab[r];
409 return ALL_REGS;
410 }
411
412 /* Return nonzero if FUNC is a naked function. */
413
414 static int
415 avr_naked_function_p (tree func)
416 {
417 tree a;
418
419 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
420
421 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
422 return a != NULL_TREE;
423 }
424
425 /* Return nonzero if FUNC is an interrupt function as specified
426 by the "interrupt" attribute. */
427
428 static int
429 interrupt_function_p (tree func)
430 {
431 tree a;
432
433 if (TREE_CODE (func) != FUNCTION_DECL)
434 return 0;
435
436 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
437 return a != NULL_TREE;
438 }
439
440 /* Return nonzero if FUNC is a signal function as specified
441 by the "signal" attribute. */
442
443 static int
444 signal_function_p (tree func)
445 {
446 tree a;
447
448 if (TREE_CODE (func) != FUNCTION_DECL)
449 return 0;
450
451 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
452 return a != NULL_TREE;
453 }
454
455 /* Return nonzero if FUNC is a OS_task function. */
456
457 static int
458 avr_OS_task_function_p (tree func)
459 {
460 tree a;
461
462 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
463
464 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
465 return a != NULL_TREE;
466 }
467
468 /* Return the number of hard registers to push/pop in the prologue/epilogue
469 of the current function, and optionally store these registers in SET. */
470
471 static int
472 avr_regs_to_save (HARD_REG_SET *set)
473 {
474 int reg, count;
475 int int_or_sig_p = (interrupt_function_p (current_function_decl)
476 || signal_function_p (current_function_decl));
477 int leaf_func_p = leaf_function_p ();
478
479 if (set)
480 CLEAR_HARD_REG_SET (*set);
481 count = 0;
482
483 /* No need to save any registers if the function never returns or
484 is have "OS_task" attribute. */
485 if (TREE_THIS_VOLATILE (current_function_decl)
486 || cfun->machine->is_OS_task)
487 return 0;
488
489 for (reg = 0; reg < 32; reg++)
490 {
491 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
492 any global register variables. */
493 if (fixed_regs[reg])
494 continue;
495
496 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
497 || (df_regs_ever_live_p (reg)
498 && (int_or_sig_p || !call_used_regs[reg])
499 && !(frame_pointer_needed
500 && (reg == REG_Y || reg == (REG_Y+1)))))
501 {
502 if (set)
503 SET_HARD_REG_BIT (*set, reg);
504 count++;
505 }
506 }
507 return count;
508 }
509
510 /* Compute offset between arg_pointer and frame_pointer. */
511
512 int
513 initial_elimination_offset (int from, int to)
514 {
515 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
516 return 0;
517 else
518 {
519 int offset = frame_pointer_needed ? 2 : 0;
520 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
521
522 offset += avr_regs_to_save (NULL);
523 return get_frame_size () + (avr_pc_size) + 1 + offset;
524 }
525 }
526
527 /* Return 1 if the function epilogue is just a single "ret". */
528
529 int
530 avr_simple_epilogue (void)
531 {
532 return (! frame_pointer_needed
533 && get_frame_size () == 0
534 && avr_regs_to_save (NULL) == 0
535 && ! interrupt_function_p (current_function_decl)
536 && ! signal_function_p (current_function_decl)
537 && ! avr_naked_function_p (current_function_decl)
538 && ! TREE_THIS_VOLATILE (current_function_decl));
539 }
540
541 /* This function checks sequence of live registers. */
542
543 static int
544 sequent_regs_live (void)
545 {
546 int reg;
547 int live_seq=0;
548 int cur_seq=0;
549
550 for (reg = 0; reg < 18; ++reg)
551 {
552 if (!call_used_regs[reg])
553 {
554 if (df_regs_ever_live_p (reg))
555 {
556 ++live_seq;
557 ++cur_seq;
558 }
559 else
560 cur_seq = 0;
561 }
562 }
563
564 if (!frame_pointer_needed)
565 {
566 if (df_regs_ever_live_p (REG_Y))
567 {
568 ++live_seq;
569 ++cur_seq;
570 }
571 else
572 cur_seq = 0;
573
574 if (df_regs_ever_live_p (REG_Y+1))
575 {
576 ++live_seq;
577 ++cur_seq;
578 }
579 else
580 cur_seq = 0;
581 }
582 else
583 {
584 cur_seq += 2;
585 live_seq += 2;
586 }
587 return (cur_seq == live_seq) ? live_seq : 0;
588 }
589
590 /* Output function prologue. */
591
592 void
593 expand_prologue (void)
594 {
595 int live_seq;
596 HARD_REG_SET set;
597 int minimize;
598 HOST_WIDE_INT size = get_frame_size();
599 /* Define templates for push instructions. */
600 rtx pushbyte = gen_rtx_MEM (QImode,
601 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
602 rtx pushword = gen_rtx_MEM (HImode,
603 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
604 rtx insn;
605
606 last_insn_address = 0;
607
608 /* Init cfun->machine. */
609 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
610 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
611 cfun->machine->is_signal = signal_function_p (current_function_decl);
612 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
613
614 /* Prologue: naked. */
615 if (cfun->machine->is_naked)
616 {
617 return;
618 }
619
620 avr_regs_to_save (&set);
621 live_seq = sequent_regs_live ();
622 minimize = (TARGET_CALL_PROLOGUES
623 && !cfun->machine->is_interrupt
624 && !cfun->machine->is_signal
625 && !cfun->machine->is_OS_task
626 && live_seq);
627
628 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
629 {
630 if (cfun->machine->is_interrupt)
631 {
632 /* Enable interrupts. */
633 insn = emit_insn (gen_enable_interrupt ());
634 RTX_FRAME_RELATED_P (insn) = 1;
635 }
636
637 /* Push zero reg. */
638 insn = emit_move_insn (pushbyte, zero_reg_rtx);
639 RTX_FRAME_RELATED_P (insn) = 1;
640
641 /* Push tmp reg. */
642 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
643 RTX_FRAME_RELATED_P (insn) = 1;
644
645 /* Push SREG. */
646 insn = emit_move_insn (tmp_reg_rtx,
647 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
648 RTX_FRAME_RELATED_P (insn) = 1;
649 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
650 RTX_FRAME_RELATED_P (insn) = 1;
651
652 /* Push RAMPZ. */
653 if(AVR_HAVE_RAMPZ
654 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
655 {
656 insn = emit_move_insn (tmp_reg_rtx,
657 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
658 RTX_FRAME_RELATED_P (insn) = 1;
659 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
660 RTX_FRAME_RELATED_P (insn) = 1;
661 }
662
663 /* Clear zero reg. */
664 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
665 RTX_FRAME_RELATED_P (insn) = 1;
666
667 /* Prevent any attempt to delete the setting of ZERO_REG! */
668 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
669 }
670 if (minimize && (frame_pointer_needed || live_seq > 6))
671 {
672 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
673 gen_int_mode (size, HImode));
674 RTX_FRAME_RELATED_P (insn) = 1;
675
676 insn =
677 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
678 gen_int_mode (size + live_seq, HImode)));
679 RTX_FRAME_RELATED_P (insn) = 1;
680 }
681 else
682 {
683 int reg;
684 for (reg = 0; reg < 32; ++reg)
685 {
686 if (TEST_HARD_REG_BIT (set, reg))
687 {
688 /* Emit push of register to save. */
689 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
690 RTX_FRAME_RELATED_P (insn) = 1;
691 }
692 }
693 if (frame_pointer_needed)
694 {
695 if(!cfun->machine->is_OS_task)
696 {
697 /* Push frame pointer. */
698 insn = emit_move_insn (pushword, frame_pointer_rtx);
699 RTX_FRAME_RELATED_P (insn) = 1;
700 }
701
702 if (!size)
703 {
704 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
705 RTX_FRAME_RELATED_P (insn) = 1;
706 }
707 else
708 {
709 /* Creating a frame can be done by direct manipulation of the
710 stack or via the frame pointer. These two methods are:
711 fp=sp
712 fp-=size
713 sp=fp
714 OR
715 sp-=size
716 fp=sp
717 the optimum method depends on function type, stack and frame size.
718 To avoid a complex logic, both methods are tested and shortest
719 is selected. */
720 rtx myfp;
721 /* First method. */
722 if (TARGET_TINY_STACK)
723 {
724 if (size < -63 || size > 63)
725 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
726
727 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
728 over 'sbiw' (2 cycles, same size). */
729 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
730 }
731 else
732 {
733 /* Normal sized addition. */
734 myfp = frame_pointer_rtx;
735 }
736 /* Calculate length. */
737 int method1_length;
738 method1_length =
739 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
740 method1_length +=
741 get_attr_length (gen_move_insn (myfp,
742 gen_rtx_PLUS (GET_MODE(myfp), myfp,
743 gen_int_mode (-size,
744 GET_MODE(myfp)))));
745 method1_length +=
746 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
747
748 /* Method 2-Adjust Stack pointer. */
749 int sp_plus_length = 0;
750 if (size <= 6)
751 {
752 sp_plus_length =
753 get_attr_length (gen_move_insn (stack_pointer_rtx,
754 gen_rtx_PLUS (HImode, stack_pointer_rtx,
755 gen_int_mode (-size,
756 HImode))));
757 sp_plus_length +=
758 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
759 }
760 /* Use shortest method. */
761 if (size <= 6 && (sp_plus_length < method1_length))
762 {
763 insn = emit_move_insn (stack_pointer_rtx,
764 gen_rtx_PLUS (HImode, stack_pointer_rtx,
765 gen_int_mode (-size, HImode)));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
768 RTX_FRAME_RELATED_P (insn) = 1;
769 }
770 else
771 {
772 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
773 RTX_FRAME_RELATED_P (insn) = 1;
774 insn = emit_move_insn (myfp,
775 gen_rtx_PLUS (GET_MODE(myfp), myfp,
776 gen_int_mode (-size, GET_MODE(myfp))));
777 RTX_FRAME_RELATED_P (insn) = 1;
778 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
779 RTX_FRAME_RELATED_P (insn) = 1;
780 }
781 }
782 }
783 }
784 }
785
786 /* Output summary at end of function prologue. */
787
788 static void
789 avr_asm_function_end_prologue (FILE *file)
790 {
791 if (cfun->machine->is_naked)
792 {
793 fputs ("/* prologue: naked */\n", file);
794 }
795 else
796 {
797 if (cfun->machine->is_interrupt)
798 {
799 fputs ("/* prologue: Interrupt */\n", file);
800 }
801 else if (cfun->machine->is_signal)
802 {
803 fputs ("/* prologue: Signal */\n", file);
804 }
805 else
806 fputs ("/* prologue: function */\n", file);
807 }
808 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
809 get_frame_size());
810 }
811
812
813 /* Implement EPILOGUE_USES. */
814
815 int
816 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
817 {
818 if (reload_completed
819 && cfun->machine
820 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
821 return 1;
822 return 0;
823 }
824
825 /* Output RTL epilogue. */
826
827 void
828 expand_epilogue (void)
829 {
830 int reg;
831 int live_seq;
832 HARD_REG_SET set;
833 int minimize;
834 HOST_WIDE_INT size = get_frame_size();
835
836 /* epilogue: naked */
837 if (cfun->machine->is_naked)
838 {
839 emit_jump_insn (gen_return ());
840 return;
841 }
842
843 avr_regs_to_save (&set);
844 live_seq = sequent_regs_live ();
845 minimize = (TARGET_CALL_PROLOGUES
846 && !cfun->machine->is_interrupt
847 && !cfun->machine->is_signal
848 && !cfun->machine->is_OS_task
849 && live_seq);
850
851 if (minimize && (frame_pointer_needed || live_seq > 4))
852 {
853 if (frame_pointer_needed)
854 {
855 /* Get rid of frame. */
856 emit_move_insn(frame_pointer_rtx,
857 gen_rtx_PLUS (HImode, frame_pointer_rtx,
858 gen_int_mode (size, HImode)));
859 }
860 else
861 {
862 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
863 }
864
865 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
866 }
867 else
868 {
869 if (frame_pointer_needed)
870 {
871 if (size)
872 {
873 /* Try two methods to adjust stack and select shortest. */
874 int fp_plus_length;
875 /* Method 1-Adjust frame pointer. */
876 fp_plus_length =
877 get_attr_length (gen_move_insn (frame_pointer_rtx,
878 gen_rtx_PLUS (HImode, frame_pointer_rtx,
879 gen_int_mode (size,
880 HImode))));
881 /* Copy to stack pointer. */
882 fp_plus_length +=
883 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
884
885 /* Method 2-Adjust Stack pointer. */
886 int sp_plus_length = 0;
887 if (size <= 5)
888 {
889 sp_plus_length =
890 get_attr_length (gen_move_insn (stack_pointer_rtx,
891 gen_rtx_PLUS (HImode, stack_pointer_rtx,
892 gen_int_mode (size,
893 HImode))));
894 }
895 /* Use shortest method. */
896 if (size <= 5 && (sp_plus_length < fp_plus_length))
897 {
898 emit_move_insn (stack_pointer_rtx,
899 gen_rtx_PLUS (HImode, stack_pointer_rtx,
900 gen_int_mode (size, HImode)));
901 }
902 else
903 {
904 emit_move_insn (frame_pointer_rtx,
905 gen_rtx_PLUS (HImode, frame_pointer_rtx,
906 gen_int_mode (size, HImode)));
907 /* Copy to stack pointer. */
908 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
909 }
910 }
911 if(!cfun->machine->is_OS_task)
912 {
913 /* Restore previous frame_pointer. */
914 emit_insn (gen_pophi (frame_pointer_rtx));
915 }
916 }
917 /* Restore used registers. */
918 for (reg = 31; reg >= 0; --reg)
919 {
920 if (TEST_HARD_REG_BIT (set, reg))
921 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
922 }
923 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
924 {
925 /* Restore RAMPZ using tmp reg as scratch. */
926 if(AVR_HAVE_RAMPZ
927 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
928 {
929 emit_insn (gen_popqi (tmp_reg_rtx));
930 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
931 tmp_reg_rtx);
932 }
933
934 /* Restore SREG using tmp reg as scratch. */
935 emit_insn (gen_popqi (tmp_reg_rtx));
936
937 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
938 tmp_reg_rtx);
939
940 /* Restore tmp REG. */
941 emit_insn (gen_popqi (tmp_reg_rtx));
942
943 /* Restore zero REG. */
944 emit_insn (gen_popqi (zero_reg_rtx));
945 }
946
947 emit_jump_insn (gen_return ());
948 }
949 }
950
951 /* Output summary messages at beginning of function epilogue. */
952
953 static void
954 avr_asm_function_begin_epilogue (FILE *file)
955 {
956 fprintf (file, "/* epilogue start */\n");
957 }
958
959 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
960 machine for a memory operand of mode MODE. */
961
962 int
963 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
964 {
965 enum reg_class r = NO_REGS;
966
967 if (TARGET_ALL_DEBUG)
968 {
969 fprintf (stderr, "mode: (%s) %s %s %s %s:",
970 GET_MODE_NAME(mode),
971 strict ? "(strict)": "",
972 reload_completed ? "(reload_completed)": "",
973 reload_in_progress ? "(reload_in_progress)": "",
974 reg_renumber ? "(reg_renumber)" : "");
975 if (GET_CODE (x) == PLUS
976 && REG_P (XEXP (x, 0))
977 && GET_CODE (XEXP (x, 1)) == CONST_INT
978 && INTVAL (XEXP (x, 1)) >= 0
979 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
980 && reg_renumber
981 )
982 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
983 true_regnum (XEXP (x, 0)));
984 debug_rtx (x);
985 }
986 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
987 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
988 r = POINTER_REGS;
989 else if (CONSTANT_ADDRESS_P (x))
990 r = ALL_REGS;
991 else if (GET_CODE (x) == PLUS
992 && REG_P (XEXP (x, 0))
993 && GET_CODE (XEXP (x, 1)) == CONST_INT
994 && INTVAL (XEXP (x, 1)) >= 0)
995 {
996 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
997 if (fit)
998 {
999 if (! strict
1000 || REGNO (XEXP (x,0)) == REG_Y
1001 || REGNO (XEXP (x,0)) == REG_Z)
1002 r = BASE_POINTER_REGS;
1003 if (XEXP (x,0) == frame_pointer_rtx
1004 || XEXP (x,0) == arg_pointer_rtx)
1005 r = BASE_POINTER_REGS;
1006 }
1007 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1008 r = POINTER_Y_REGS;
1009 }
1010 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1011 && REG_P (XEXP (x, 0))
1012 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1013 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1014 {
1015 r = POINTER_REGS;
1016 }
1017 if (TARGET_ALL_DEBUG)
1018 {
1019 fprintf (stderr, " ret = %c\n", r + '0');
1020 }
1021 return r == NO_REGS ? 0 : (int)r;
1022 }
1023
1024 /* Attempts to replace X with a valid
1025 memory address for an operand of mode MODE */
1026
1027 rtx
1028 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1029 {
1030 x = oldx;
1031 if (TARGET_ALL_DEBUG)
1032 {
1033 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1034 debug_rtx (oldx);
1035 }
1036
1037 if (GET_CODE (oldx) == PLUS
1038 && REG_P (XEXP (oldx,0)))
1039 {
1040 if (REG_P (XEXP (oldx,1)))
1041 x = force_reg (GET_MODE (oldx), oldx);
1042 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1043 {
1044 int offs = INTVAL (XEXP (oldx,1));
1045 if (frame_pointer_rtx != XEXP (oldx,0))
1046 if (offs > MAX_LD_OFFSET (mode))
1047 {
1048 if (TARGET_ALL_DEBUG)
1049 fprintf (stderr, "force_reg (big offset)\n");
1050 x = force_reg (GET_MODE (oldx), oldx);
1051 }
1052 }
1053 }
1054 return x;
1055 }
1056
1057
1058 /* Return a pointer register name as a string. */
1059
1060 static const char *
1061 ptrreg_to_str (int regno)
1062 {
1063 switch (regno)
1064 {
1065 case REG_X: return "X";
1066 case REG_Y: return "Y";
1067 case REG_Z: return "Z";
1068 default:
1069 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1070 }
1071 return NULL;
1072 }
1073
1074 /* Return the condition name as a string.
1075 Used in conditional jump constructing */
1076
1077 static const char *
1078 cond_string (enum rtx_code code)
1079 {
1080 switch (code)
1081 {
1082 case NE:
1083 return "ne";
1084 case EQ:
1085 return "eq";
1086 case GE:
1087 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1088 return "pl";
1089 else
1090 return "ge";
1091 case LT:
1092 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1093 return "mi";
1094 else
1095 return "lt";
1096 case GEU:
1097 return "sh";
1098 case LTU:
1099 return "lo";
1100 default:
1101 gcc_unreachable ();
1102 }
1103 }
1104
1105 /* Output ADDR to FILE as address. */
1106
1107 void
1108 print_operand_address (FILE *file, rtx addr)
1109 {
1110 switch (GET_CODE (addr))
1111 {
1112 case REG:
1113 fprintf (file, ptrreg_to_str (REGNO (addr)));
1114 break;
1115
1116 case PRE_DEC:
1117 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1118 break;
1119
1120 case POST_INC:
1121 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1122 break;
1123
1124 default:
1125 if (CONSTANT_ADDRESS_P (addr)
1126 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1127 || GET_CODE (addr) == LABEL_REF))
1128 {
1129 fprintf (file, "gs(");
1130 output_addr_const (file,addr);
1131 fprintf (file ,")");
1132 }
1133 else
1134 output_addr_const (file, addr);
1135 }
1136 }
1137
1138
1139 /* Output X as assembler operand to file FILE. */
1140
1141 void
1142 print_operand (FILE *file, rtx x, int code)
1143 {
1144 int abcd = 0;
1145
1146 if (code >= 'A' && code <= 'D')
1147 abcd = code - 'A';
1148
1149 if (code == '~')
1150 {
1151 if (!AVR_MEGA)
1152 fputc ('r', file);
1153 }
1154 else if (code == '!')
1155 {
1156 if (AVR_HAVE_EIJMP_EICALL)
1157 fputc ('e', file);
1158 }
1159 else if (REG_P (x))
1160 {
1161 if (x == zero_reg_rtx)
1162 fprintf (file, "__zero_reg__");
1163 else
1164 fprintf (file, reg_names[true_regnum (x) + abcd]);
1165 }
1166 else if (GET_CODE (x) == CONST_INT)
1167 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1168 else if (GET_CODE (x) == MEM)
1169 {
1170 rtx addr = XEXP (x,0);
1171
1172 if (CONSTANT_P (addr) && abcd)
1173 {
1174 fputc ('(', file);
1175 output_address (addr);
1176 fprintf (file, ")+%d", abcd);
1177 }
1178 else if (code == 'o')
1179 {
1180 if (GET_CODE (addr) != PLUS)
1181 fatal_insn ("bad address, not (reg+disp):", addr);
1182
1183 print_operand (file, XEXP (addr, 1), 0);
1184 }
1185 else if (code == 'p' || code == 'r')
1186 {
1187 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1188 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1189
1190 if (code == 'p')
1191 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1192 else
1193 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1194 }
1195 else if (GET_CODE (addr) == PLUS)
1196 {
1197 print_operand_address (file, XEXP (addr,0));
1198 if (REGNO (XEXP (addr, 0)) == REG_X)
1199 fatal_insn ("internal compiler error. Bad address:"
1200 ,addr);
1201 fputc ('+', file);
1202 print_operand (file, XEXP (addr,1), code);
1203 }
1204 else
1205 print_operand_address (file, addr);
1206 }
1207 else if (GET_CODE (x) == CONST_DOUBLE)
1208 {
1209 long val;
1210 REAL_VALUE_TYPE rv;
1211 if (GET_MODE (x) != SFmode)
1212 fatal_insn ("internal compiler error. Unknown mode:", x);
1213 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1214 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1215 fprintf (file, "0x%lx", val);
1216 }
1217 else if (code == 'j')
1218 fputs (cond_string (GET_CODE (x)), file);
1219 else if (code == 'k')
1220 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1221 else
1222 print_operand_address (file, x);
1223 }
1224
1225 /* Update the condition code in the INSN. */
1226
1227 void
1228 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1229 {
1230 rtx set;
1231
1232 switch (get_attr_cc (insn))
1233 {
1234 case CC_NONE:
1235 /* Insn does not affect CC at all. */
1236 break;
1237
1238 case CC_SET_N:
1239 CC_STATUS_INIT;
1240 break;
1241
1242 case CC_SET_ZN:
1243 set = single_set (insn);
1244 CC_STATUS_INIT;
1245 if (set)
1246 {
1247 cc_status.flags |= CC_NO_OVERFLOW;
1248 cc_status.value1 = SET_DEST (set);
1249 }
1250 break;
1251
1252 case CC_SET_CZN:
1253 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1254 The V flag may or may not be known but that's ok because
1255 alter_cond will change tests to use EQ/NE. */
1256 set = single_set (insn);
1257 CC_STATUS_INIT;
1258 if (set)
1259 {
1260 cc_status.value1 = SET_DEST (set);
1261 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1262 }
1263 break;
1264
1265 case CC_COMPARE:
1266 set = single_set (insn);
1267 CC_STATUS_INIT;
1268 if (set)
1269 cc_status.value1 = SET_SRC (set);
1270 break;
1271
1272 case CC_CLOBBER:
1273 /* Insn doesn't leave CC in a usable state. */
1274 CC_STATUS_INIT;
1275
1276 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1277 set = single_set (insn);
1278 if (set)
1279 {
1280 rtx src = SET_SRC (set);
1281
1282 if (GET_CODE (src) == ASHIFTRT
1283 && GET_MODE (src) == QImode)
1284 {
1285 rtx x = XEXP (src, 1);
1286
1287 if (GET_CODE (x) == CONST_INT
1288 && INTVAL (x) > 0
1289 && INTVAL (x) != 6)
1290 {
1291 cc_status.value1 = SET_DEST (set);
1292 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1293 }
1294 }
1295 }
1296 break;
1297 }
1298 }
1299
1300 /* Return maximum number of consecutive registers of
1301 class CLASS needed to hold a value of mode MODE. */
1302
1303 int
1304 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1305 {
1306 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1307 }
1308
1309 /* Choose mode for jump insn:
1310 1 - relative jump in range -63 <= x <= 62 ;
1311 2 - relative jump in range -2046 <= x <= 2045 ;
1312 3 - absolute jump (only for ATmega[16]03). */
1313
1314 int
1315 avr_jump_mode (rtx x, rtx insn)
1316 {
1317 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1318 ? XEXP (x, 0) : x));
1319 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1320 int jump_distance = cur_addr - dest_addr;
1321
1322 if (-63 <= jump_distance && jump_distance <= 62)
1323 return 1;
1324 else if (-2046 <= jump_distance && jump_distance <= 2045)
1325 return 2;
1326 else if (AVR_MEGA)
1327 return 3;
1328
1329 return 2;
1330 }
1331
1332 /* return an AVR condition jump commands.
1333 X is a comparison RTX.
1334 LEN is a number returned by avr_jump_mode function.
1335 if REVERSE nonzero then condition code in X must be reversed. */
1336
1337 const char *
1338 ret_cond_branch (rtx x, int len, int reverse)
1339 {
1340 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1341
1342 switch (cond)
1343 {
1344 case GT:
1345 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1346 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1347 AS1 (brpl,%0)) :
1348 len == 2 ? (AS1 (breq,.+4) CR_TAB
1349 AS1 (brmi,.+2) CR_TAB
1350 AS1 (rjmp,%0)) :
1351 (AS1 (breq,.+6) CR_TAB
1352 AS1 (brmi,.+4) CR_TAB
1353 AS1 (jmp,%0)));
1354
1355 else
1356 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1357 AS1 (brge,%0)) :
1358 len == 2 ? (AS1 (breq,.+4) CR_TAB
1359 AS1 (brlt,.+2) CR_TAB
1360 AS1 (rjmp,%0)) :
1361 (AS1 (breq,.+6) CR_TAB
1362 AS1 (brlt,.+4) CR_TAB
1363 AS1 (jmp,%0)));
1364 case GTU:
1365 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1366 AS1 (brsh,%0)) :
1367 len == 2 ? (AS1 (breq,.+4) CR_TAB
1368 AS1 (brlo,.+2) CR_TAB
1369 AS1 (rjmp,%0)) :
1370 (AS1 (breq,.+6) CR_TAB
1371 AS1 (brlo,.+4) CR_TAB
1372 AS1 (jmp,%0)));
1373 case LE:
1374 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1375 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1376 AS1 (brmi,%0)) :
1377 len == 2 ? (AS1 (breq,.+2) CR_TAB
1378 AS1 (brpl,.+2) CR_TAB
1379 AS1 (rjmp,%0)) :
1380 (AS1 (breq,.+2) CR_TAB
1381 AS1 (brpl,.+4) CR_TAB
1382 AS1 (jmp,%0)));
1383 else
1384 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1385 AS1 (brlt,%0)) :
1386 len == 2 ? (AS1 (breq,.+2) CR_TAB
1387 AS1 (brge,.+2) CR_TAB
1388 AS1 (rjmp,%0)) :
1389 (AS1 (breq,.+2) CR_TAB
1390 AS1 (brge,.+4) CR_TAB
1391 AS1 (jmp,%0)));
1392 case LEU:
1393 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1394 AS1 (brlo,%0)) :
1395 len == 2 ? (AS1 (breq,.+2) CR_TAB
1396 AS1 (brsh,.+2) CR_TAB
1397 AS1 (rjmp,%0)) :
1398 (AS1 (breq,.+2) CR_TAB
1399 AS1 (brsh,.+4) CR_TAB
1400 AS1 (jmp,%0)));
1401 default:
1402 if (reverse)
1403 {
1404 switch (len)
1405 {
1406 case 1:
1407 return AS1 (br%k1,%0);
1408 case 2:
1409 return (AS1 (br%j1,.+2) CR_TAB
1410 AS1 (rjmp,%0));
1411 default:
1412 return (AS1 (br%j1,.+4) CR_TAB
1413 AS1 (jmp,%0));
1414 }
1415 }
1416 else
1417 {
1418 switch (len)
1419 {
1420 case 1:
1421 return AS1 (br%j1,%0);
1422 case 2:
1423 return (AS1 (br%k1,.+2) CR_TAB
1424 AS1 (rjmp,%0));
1425 default:
1426 return (AS1 (br%k1,.+4) CR_TAB
1427 AS1 (jmp,%0));
1428 }
1429 }
1430 }
1431 return "";
1432 }
1433
1434 /* Predicate function for immediate operand which fits to byte (8bit) */
1435
1436 int
1437 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1438 {
1439 return (GET_CODE (op) == CONST_INT
1440 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1441 }
1442
1443 /* Output all insn addresses and their sizes into the assembly language
1444 output file. This is helpful for debugging whether the length attributes
1445 in the md file are correct.
1446 Output insn cost for next insn. */
1447
1448 void
1449 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1450 int num_operands ATTRIBUTE_UNUSED)
1451 {
1452 int uid = INSN_UID (insn);
1453
1454 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1455 {
1456 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1457 INSN_ADDRESSES (uid),
1458 INSN_ADDRESSES (uid) - last_insn_address,
1459 rtx_cost (PATTERN (insn), INSN));
1460 }
1461 last_insn_address = INSN_ADDRESSES (uid);
1462 }
1463
1464 /* Return 0 if undefined, 1 if always true or always false. */
1465
1466 int
1467 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1468 {
1469 unsigned int max = (mode == QImode ? 0xff :
1470 mode == HImode ? 0xffff :
1471 mode == SImode ? 0xffffffff : 0);
1472 if (max && operator && GET_CODE (x) == CONST_INT)
1473 {
1474 if (unsigned_condition (operator) != operator)
1475 max >>= 1;
1476
1477 if (max != (INTVAL (x) & max)
1478 && INTVAL (x) != 0xff)
1479 return 1;
1480 }
1481 return 0;
1482 }
1483
1484
1485 /* Returns nonzero if REGNO is the number of a hard
1486 register in which function arguments are sometimes passed. */
1487
1488 int
1489 function_arg_regno_p(int r)
1490 {
1491 return (r >= 8 && r <= 25);
1492 }
1493
1494 /* Initializing the variable cum for the state at the beginning
1495 of the argument list. */
1496
1497 void
1498 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1499 tree fndecl ATTRIBUTE_UNUSED)
1500 {
1501 cum->nregs = 18;
1502 cum->regno = FIRST_CUM_REG;
1503 if (!libname && fntype)
1504 {
1505 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1506 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1507 != void_type_node));
1508 if (stdarg)
1509 cum->nregs = 0;
1510 }
1511 }
1512
1513 /* Returns the number of registers to allocate for a function argument. */
1514
1515 static int
1516 avr_num_arg_regs (enum machine_mode mode, tree type)
1517 {
1518 int size;
1519
1520 if (mode == BLKmode)
1521 size = int_size_in_bytes (type);
1522 else
1523 size = GET_MODE_SIZE (mode);
1524
1525 /* Align all function arguments to start in even-numbered registers.
1526 Odd-sized arguments leave holes above them. */
1527
1528 return (size + 1) & ~1;
1529 }
1530
1531 /* Controls whether a function argument is passed
1532 in a register, and which register. */
1533
1534 rtx
1535 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1536 int named ATTRIBUTE_UNUSED)
1537 {
1538 int bytes = avr_num_arg_regs (mode, type);
1539
1540 if (cum->nregs && bytes <= cum->nregs)
1541 return gen_rtx_REG (mode, cum->regno - bytes);
1542
1543 return NULL_RTX;
1544 }
1545
1546 /* Update the summarizer variable CUM to advance past an argument
1547 in the argument list. */
1548
1549 void
1550 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1551 int named ATTRIBUTE_UNUSED)
1552 {
1553 int bytes = avr_num_arg_regs (mode, type);
1554
1555 cum->nregs -= bytes;
1556 cum->regno -= bytes;
1557
1558 if (cum->nregs <= 0)
1559 {
1560 cum->nregs = 0;
1561 cum->regno = FIRST_CUM_REG;
1562 }
1563 }
1564
1565 /***********************************************************************
1566 Functions for outputting various mov's for a various modes
1567 ************************************************************************/
1568 const char *
1569 output_movqi (rtx insn, rtx operands[], int *l)
1570 {
1571 int dummy;
1572 rtx dest = operands[0];
1573 rtx src = operands[1];
1574 int *real_l = l;
1575
1576 if (!l)
1577 l = &dummy;
1578
1579 *l = 1;
1580
1581 if (register_operand (dest, QImode))
1582 {
1583 if (register_operand (src, QImode)) /* mov r,r */
1584 {
1585 if (test_hard_reg_class (STACK_REG, dest))
1586 return AS2 (out,%0,%1);
1587 else if (test_hard_reg_class (STACK_REG, src))
1588 return AS2 (in,%0,%1);
1589
1590 return AS2 (mov,%0,%1);
1591 }
1592 else if (CONSTANT_P (src))
1593 {
1594 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1595 return AS2 (ldi,%0,lo8(%1));
1596
1597 if (GET_CODE (src) == CONST_INT)
1598 {
1599 if (src == const0_rtx) /* mov r,L */
1600 return AS1 (clr,%0);
1601 else if (src == const1_rtx)
1602 {
1603 *l = 2;
1604 return (AS1 (clr,%0) CR_TAB
1605 AS1 (inc,%0));
1606 }
1607 else if (src == constm1_rtx)
1608 {
1609 /* Immediate constants -1 to any register */
1610 *l = 2;
1611 return (AS1 (clr,%0) CR_TAB
1612 AS1 (dec,%0));
1613 }
1614 else
1615 {
1616 int bit_nr = exact_log2 (INTVAL (src));
1617
1618 if (bit_nr >= 0)
1619 {
1620 *l = 3;
1621 if (!real_l)
1622 output_asm_insn ((AS1 (clr,%0) CR_TAB
1623 "set"), operands);
1624 if (!real_l)
1625 avr_output_bld (operands, bit_nr);
1626
1627 return "";
1628 }
1629 }
1630 }
1631
1632 /* Last resort, larger than loading from memory. */
1633 *l = 4;
1634 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1635 AS2 (ldi,r31,lo8(%1)) CR_TAB
1636 AS2 (mov,%0,r31) CR_TAB
1637 AS2 (mov,r31,__tmp_reg__));
1638 }
1639 else if (GET_CODE (src) == MEM)
1640 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1641 }
1642 else if (GET_CODE (dest) == MEM)
1643 {
1644 const char *template;
1645
1646 if (src == const0_rtx)
1647 operands[1] = zero_reg_rtx;
1648
1649 template = out_movqi_mr_r (insn, operands, real_l);
1650
1651 if (!real_l)
1652 output_asm_insn (template, operands);
1653
1654 operands[1] = src;
1655 }
1656 return "";
1657 }
1658
1659
1660 const char *
1661 output_movhi (rtx insn, rtx operands[], int *l)
1662 {
1663 int dummy;
1664 rtx dest = operands[0];
1665 rtx src = operands[1];
1666 int *real_l = l;
1667
1668 if (!l)
1669 l = &dummy;
1670
1671 if (register_operand (dest, HImode))
1672 {
1673 if (register_operand (src, HImode)) /* mov r,r */
1674 {
1675 if (test_hard_reg_class (STACK_REG, dest))
1676 {
1677 if (TARGET_TINY_STACK)
1678 {
1679 *l = 1;
1680 return AS2 (out,__SP_L__,%A1);
1681 }
1682 /* Use simple load of stack pointer if no interrupts are used
1683 or inside main or signal function prologue where they disabled. */
1684 else if (TARGET_NO_INTERRUPTS
1685 || (reload_completed
1686 && cfun->machine->is_signal
1687 && prologue_epilogue_contains (insn)))
1688 {
1689 *l = 2;
1690 return (AS2 (out,__SP_H__,%B1) CR_TAB
1691 AS2 (out,__SP_L__,%A1));
1692 }
1693 /* In interrupt prolog we know interrupts are enabled. */
1694 else if (reload_completed
1695 && cfun->machine->is_interrupt
1696 && prologue_epilogue_contains (insn))
1697 {
1698 *l = 4;
1699 return ("cli" CR_TAB
1700 AS2 (out,__SP_H__,%B1) CR_TAB
1701 "sei" CR_TAB
1702 AS2 (out,__SP_L__,%A1));
1703 }
1704 *l = 5;
1705 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1706 "cli" CR_TAB
1707 AS2 (out,__SP_H__,%B1) CR_TAB
1708 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1709 AS2 (out,__SP_L__,%A1));
1710 }
1711 else if (test_hard_reg_class (STACK_REG, src))
1712 {
1713 *l = 2;
1714 return (AS2 (in,%A0,__SP_L__) CR_TAB
1715 AS2 (in,%B0,__SP_H__));
1716 }
1717
1718 if (AVR_HAVE_MOVW)
1719 {
1720 *l = 1;
1721 return (AS2 (movw,%0,%1));
1722 }
1723 else
1724 {
1725 *l = 2;
1726 return (AS2 (mov,%A0,%A1) CR_TAB
1727 AS2 (mov,%B0,%B1));
1728 }
1729 }
1730 else if (CONSTANT_P (src))
1731 {
1732 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1733 {
1734 *l = 2;
1735 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1736 AS2 (ldi,%B0,hi8(%1)));
1737 }
1738
1739 if (GET_CODE (src) == CONST_INT)
1740 {
1741 if (src == const0_rtx) /* mov r,L */
1742 {
1743 *l = 2;
1744 return (AS1 (clr,%A0) CR_TAB
1745 AS1 (clr,%B0));
1746 }
1747 else if (src == const1_rtx)
1748 {
1749 *l = 3;
1750 return (AS1 (clr,%A0) CR_TAB
1751 AS1 (clr,%B0) CR_TAB
1752 AS1 (inc,%A0));
1753 }
1754 else if (src == constm1_rtx)
1755 {
1756 /* Immediate constants -1 to any register */
1757 *l = 3;
1758 return (AS1 (clr,%0) CR_TAB
1759 AS1 (dec,%A0) CR_TAB
1760 AS2 (mov,%B0,%A0));
1761 }
1762 else
1763 {
1764 int bit_nr = exact_log2 (INTVAL (src));
1765
1766 if (bit_nr >= 0)
1767 {
1768 *l = 4;
1769 if (!real_l)
1770 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1771 AS1 (clr,%B0) CR_TAB
1772 "set"), operands);
1773 if (!real_l)
1774 avr_output_bld (operands, bit_nr);
1775
1776 return "";
1777 }
1778 }
1779
1780 if ((INTVAL (src) & 0xff) == 0)
1781 {
1782 *l = 5;
1783 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1784 AS1 (clr,%A0) CR_TAB
1785 AS2 (ldi,r31,hi8(%1)) CR_TAB
1786 AS2 (mov,%B0,r31) CR_TAB
1787 AS2 (mov,r31,__tmp_reg__));
1788 }
1789 else if ((INTVAL (src) & 0xff00) == 0)
1790 {
1791 *l = 5;
1792 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1793 AS2 (ldi,r31,lo8(%1)) CR_TAB
1794 AS2 (mov,%A0,r31) CR_TAB
1795 AS1 (clr,%B0) CR_TAB
1796 AS2 (mov,r31,__tmp_reg__));
1797 }
1798 }
1799
1800 /* Last resort, equal to loading from memory. */
1801 *l = 6;
1802 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1803 AS2 (ldi,r31,lo8(%1)) CR_TAB
1804 AS2 (mov,%A0,r31) CR_TAB
1805 AS2 (ldi,r31,hi8(%1)) CR_TAB
1806 AS2 (mov,%B0,r31) CR_TAB
1807 AS2 (mov,r31,__tmp_reg__));
1808 }
1809 else if (GET_CODE (src) == MEM)
1810 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1811 }
1812 else if (GET_CODE (dest) == MEM)
1813 {
1814 const char *template;
1815
1816 if (src == const0_rtx)
1817 operands[1] = zero_reg_rtx;
1818
1819 template = out_movhi_mr_r (insn, operands, real_l);
1820
1821 if (!real_l)
1822 output_asm_insn (template, operands);
1823
1824 operands[1] = src;
1825 return "";
1826 }
1827 fatal_insn ("invalid insn:", insn);
1828 return "";
1829 }
1830
1831 const char *
1832 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1833 {
1834 rtx dest = op[0];
1835 rtx src = op[1];
1836 rtx x = XEXP (src, 0);
1837 int dummy;
1838
1839 if (!l)
1840 l = &dummy;
1841
1842 if (CONSTANT_ADDRESS_P (x))
1843 {
1844 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1845 {
1846 *l = 1;
1847 return AS2 (in,%0,__SREG__);
1848 }
1849 if (avr_io_address_p (x, 1))
1850 {
1851 *l = 1;
1852 return AS2 (in,%0,%1-0x20);
1853 }
1854 *l = 2;
1855 return AS2 (lds,%0,%1);
1856 }
1857 /* memory access by reg+disp */
1858 else if (GET_CODE (x) == PLUS
1859 && REG_P (XEXP (x,0))
1860 && GET_CODE (XEXP (x,1)) == CONST_INT)
1861 {
1862 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1863 {
1864 int disp = INTVAL (XEXP (x,1));
1865 if (REGNO (XEXP (x,0)) != REG_Y)
1866 fatal_insn ("incorrect insn:",insn);
1867
1868 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1869 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1870 AS2 (ldd,%0,Y+63) CR_TAB
1871 AS2 (sbiw,r28,%o1-63));
1872
1873 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1874 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1875 AS2 (ld,%0,Y) CR_TAB
1876 AS2 (subi,r28,lo8(%o1)) CR_TAB
1877 AS2 (sbci,r29,hi8(%o1)));
1878 }
1879 else if (REGNO (XEXP (x,0)) == REG_X)
1880 {
1881 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1882 it but I have this situation with extremal optimizing options. */
1883 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1884 || reg_unused_after (insn, XEXP (x,0)))
1885 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1886 AS2 (ld,%0,X));
1887
1888 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1889 AS2 (ld,%0,X) CR_TAB
1890 AS2 (sbiw,r26,%o1));
1891 }
1892 *l = 1;
1893 return AS2 (ldd,%0,%1);
1894 }
1895 *l = 1;
1896 return AS2 (ld,%0,%1);
1897 }
1898
1899 const char *
1900 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1901 {
1902 rtx dest = op[0];
1903 rtx src = op[1];
1904 rtx base = XEXP (src, 0);
1905 int reg_dest = true_regnum (dest);
1906 int reg_base = true_regnum (base);
1907 /* "volatile" forces reading low byte first, even if less efficient,
1908 for correct operation with 16-bit I/O registers. */
1909 int mem_volatile_p = MEM_VOLATILE_P (src);
1910 int tmp;
1911
1912 if (!l)
1913 l = &tmp;
1914
1915 if (reg_base > 0)
1916 {
1917 if (reg_dest == reg_base) /* R = (R) */
1918 {
1919 *l = 3;
1920 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1921 AS2 (ld,%B0,%1) CR_TAB
1922 AS2 (mov,%A0,__tmp_reg__));
1923 }
1924 else if (reg_base == REG_X) /* (R26) */
1925 {
1926 if (reg_unused_after (insn, base))
1927 {
1928 *l = 2;
1929 return (AS2 (ld,%A0,X+) CR_TAB
1930 AS2 (ld,%B0,X));
1931 }
1932 *l = 3;
1933 return (AS2 (ld,%A0,X+) CR_TAB
1934 AS2 (ld,%B0,X) CR_TAB
1935 AS2 (sbiw,r26,1));
1936 }
1937 else /* (R) */
1938 {
1939 *l = 2;
1940 return (AS2 (ld,%A0,%1) CR_TAB
1941 AS2 (ldd,%B0,%1+1));
1942 }
1943 }
1944 else if (GET_CODE (base) == PLUS) /* (R + i) */
1945 {
1946 int disp = INTVAL (XEXP (base, 1));
1947 int reg_base = true_regnum (XEXP (base, 0));
1948
1949 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1950 {
1951 if (REGNO (XEXP (base, 0)) != REG_Y)
1952 fatal_insn ("incorrect insn:",insn);
1953
1954 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1955 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1956 AS2 (ldd,%A0,Y+62) CR_TAB
1957 AS2 (ldd,%B0,Y+63) CR_TAB
1958 AS2 (sbiw,r28,%o1-62));
1959
1960 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1961 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1962 AS2 (ld,%A0,Y) CR_TAB
1963 AS2 (ldd,%B0,Y+1) CR_TAB
1964 AS2 (subi,r28,lo8(%o1)) CR_TAB
1965 AS2 (sbci,r29,hi8(%o1)));
1966 }
1967 if (reg_base == REG_X)
1968 {
1969 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1970 it but I have this situation with extremal
1971 optimization options. */
1972
1973 *l = 4;
1974 if (reg_base == reg_dest)
1975 return (AS2 (adiw,r26,%o1) CR_TAB
1976 AS2 (ld,__tmp_reg__,X+) CR_TAB
1977 AS2 (ld,%B0,X) CR_TAB
1978 AS2 (mov,%A0,__tmp_reg__));
1979
1980 return (AS2 (adiw,r26,%o1) CR_TAB
1981 AS2 (ld,%A0,X+) CR_TAB
1982 AS2 (ld,%B0,X) CR_TAB
1983 AS2 (sbiw,r26,%o1+1));
1984 }
1985
1986 if (reg_base == reg_dest)
1987 {
1988 *l = 3;
1989 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1990 AS2 (ldd,%B0,%B1) CR_TAB
1991 AS2 (mov,%A0,__tmp_reg__));
1992 }
1993
1994 *l = 2;
1995 return (AS2 (ldd,%A0,%A1) CR_TAB
1996 AS2 (ldd,%B0,%B1));
1997 }
1998 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1999 {
2000 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2001 fatal_insn ("incorrect insn:", insn);
2002
2003 if (mem_volatile_p)
2004 {
2005 if (REGNO (XEXP (base, 0)) == REG_X)
2006 {
2007 *l = 4;
2008 return (AS2 (sbiw,r26,2) CR_TAB
2009 AS2 (ld,%A0,X+) CR_TAB
2010 AS2 (ld,%B0,X) CR_TAB
2011 AS2 (sbiw,r26,1));
2012 }
2013 else
2014 {
2015 *l = 3;
2016 return (AS2 (sbiw,%r1,2) CR_TAB
2017 AS2 (ld,%A0,%p1) CR_TAB
2018 AS2 (ldd,%B0,%p1+1));
2019 }
2020 }
2021
2022 *l = 2;
2023 return (AS2 (ld,%B0,%1) CR_TAB
2024 AS2 (ld,%A0,%1));
2025 }
2026 else if (GET_CODE (base) == POST_INC) /* (R++) */
2027 {
2028 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2029 fatal_insn ("incorrect insn:", insn);
2030
2031 *l = 2;
2032 return (AS2 (ld,%A0,%1) CR_TAB
2033 AS2 (ld,%B0,%1));
2034 }
2035 else if (CONSTANT_ADDRESS_P (base))
2036 {
2037 if (avr_io_address_p (base, 2))
2038 {
2039 *l = 2;
2040 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2041 AS2 (in,%B0,%B1-0x20));
2042 }
2043 *l = 4;
2044 return (AS2 (lds,%A0,%A1) CR_TAB
2045 AS2 (lds,%B0,%B1));
2046 }
2047
2048 fatal_insn ("unknown move insn:",insn);
2049 return "";
2050 }
2051
2052 const char *
2053 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2054 {
2055 rtx dest = op[0];
2056 rtx src = op[1];
2057 rtx base = XEXP (src, 0);
2058 int reg_dest = true_regnum (dest);
2059 int reg_base = true_regnum (base);
2060 int tmp;
2061
2062 if (!l)
2063 l = &tmp;
2064
2065 if (reg_base > 0)
2066 {
2067 if (reg_base == REG_X) /* (R26) */
2068 {
2069 if (reg_dest == REG_X)
2070 /* "ld r26,-X" is undefined */
2071 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2072 AS2 (ld,r29,X) CR_TAB
2073 AS2 (ld,r28,-X) CR_TAB
2074 AS2 (ld,__tmp_reg__,-X) CR_TAB
2075 AS2 (sbiw,r26,1) CR_TAB
2076 AS2 (ld,r26,X) CR_TAB
2077 AS2 (mov,r27,__tmp_reg__));
2078 else if (reg_dest == REG_X - 2)
2079 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2080 AS2 (ld,%B0,X+) CR_TAB
2081 AS2 (ld,__tmp_reg__,X+) CR_TAB
2082 AS2 (ld,%D0,X) CR_TAB
2083 AS2 (mov,%C0,__tmp_reg__));
2084 else if (reg_unused_after (insn, base))
2085 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2086 AS2 (ld,%B0,X+) CR_TAB
2087 AS2 (ld,%C0,X+) CR_TAB
2088 AS2 (ld,%D0,X));
2089 else
2090 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2091 AS2 (ld,%B0,X+) CR_TAB
2092 AS2 (ld,%C0,X+) CR_TAB
2093 AS2 (ld,%D0,X) CR_TAB
2094 AS2 (sbiw,r26,3));
2095 }
2096 else
2097 {
2098 if (reg_dest == reg_base)
2099 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2100 AS2 (ldd,%C0,%1+2) CR_TAB
2101 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2102 AS2 (ld,%A0,%1) CR_TAB
2103 AS2 (mov,%B0,__tmp_reg__));
2104 else if (reg_base == reg_dest + 2)
2105 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2106 AS2 (ldd,%B0,%1+1) CR_TAB
2107 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2108 AS2 (ldd,%D0,%1+3) CR_TAB
2109 AS2 (mov,%C0,__tmp_reg__));
2110 else
2111 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2112 AS2 (ldd,%B0,%1+1) CR_TAB
2113 AS2 (ldd,%C0,%1+2) CR_TAB
2114 AS2 (ldd,%D0,%1+3));
2115 }
2116 }
2117 else if (GET_CODE (base) == PLUS) /* (R + i) */
2118 {
2119 int disp = INTVAL (XEXP (base, 1));
2120
2121 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2122 {
2123 if (REGNO (XEXP (base, 0)) != REG_Y)
2124 fatal_insn ("incorrect insn:",insn);
2125
2126 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2127 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2128 AS2 (ldd,%A0,Y+60) CR_TAB
2129 AS2 (ldd,%B0,Y+61) CR_TAB
2130 AS2 (ldd,%C0,Y+62) CR_TAB
2131 AS2 (ldd,%D0,Y+63) CR_TAB
2132 AS2 (sbiw,r28,%o1-60));
2133
2134 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2135 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2136 AS2 (ld,%A0,Y) CR_TAB
2137 AS2 (ldd,%B0,Y+1) CR_TAB
2138 AS2 (ldd,%C0,Y+2) CR_TAB
2139 AS2 (ldd,%D0,Y+3) CR_TAB
2140 AS2 (subi,r28,lo8(%o1)) CR_TAB
2141 AS2 (sbci,r29,hi8(%o1)));
2142 }
2143
2144 reg_base = true_regnum (XEXP (base, 0));
2145 if (reg_base == REG_X)
2146 {
2147 /* R = (X + d) */
2148 if (reg_dest == REG_X)
2149 {
2150 *l = 7;
2151 /* "ld r26,-X" is undefined */
2152 return (AS2 (adiw,r26,%o1+3) CR_TAB
2153 AS2 (ld,r29,X) CR_TAB
2154 AS2 (ld,r28,-X) CR_TAB
2155 AS2 (ld,__tmp_reg__,-X) CR_TAB
2156 AS2 (sbiw,r26,1) CR_TAB
2157 AS2 (ld,r26,X) CR_TAB
2158 AS2 (mov,r27,__tmp_reg__));
2159 }
2160 *l = 6;
2161 if (reg_dest == REG_X - 2)
2162 return (AS2 (adiw,r26,%o1) CR_TAB
2163 AS2 (ld,r24,X+) CR_TAB
2164 AS2 (ld,r25,X+) CR_TAB
2165 AS2 (ld,__tmp_reg__,X+) CR_TAB
2166 AS2 (ld,r27,X) CR_TAB
2167 AS2 (mov,r26,__tmp_reg__));
2168
2169 return (AS2 (adiw,r26,%o1) CR_TAB
2170 AS2 (ld,%A0,X+) CR_TAB
2171 AS2 (ld,%B0,X+) CR_TAB
2172 AS2 (ld,%C0,X+) CR_TAB
2173 AS2 (ld,%D0,X) CR_TAB
2174 AS2 (sbiw,r26,%o1+3));
2175 }
2176 if (reg_dest == reg_base)
2177 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2178 AS2 (ldd,%C0,%C1) CR_TAB
2179 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2180 AS2 (ldd,%A0,%A1) CR_TAB
2181 AS2 (mov,%B0,__tmp_reg__));
2182 else if (reg_dest == reg_base - 2)
2183 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2184 AS2 (ldd,%B0,%B1) CR_TAB
2185 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2186 AS2 (ldd,%D0,%D1) CR_TAB
2187 AS2 (mov,%C0,__tmp_reg__));
2188 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2189 AS2 (ldd,%B0,%B1) CR_TAB
2190 AS2 (ldd,%C0,%C1) CR_TAB
2191 AS2 (ldd,%D0,%D1));
2192 }
2193 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2194 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2195 AS2 (ld,%C0,%1) CR_TAB
2196 AS2 (ld,%B0,%1) CR_TAB
2197 AS2 (ld,%A0,%1));
2198 else if (GET_CODE (base) == POST_INC) /* (R++) */
2199 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2200 AS2 (ld,%B0,%1) CR_TAB
2201 AS2 (ld,%C0,%1) CR_TAB
2202 AS2 (ld,%D0,%1));
2203 else if (CONSTANT_ADDRESS_P (base))
2204 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2205 AS2 (lds,%B0,%B1) CR_TAB
2206 AS2 (lds,%C0,%C1) CR_TAB
2207 AS2 (lds,%D0,%D1));
2208
2209 fatal_insn ("unknown move insn:",insn);
2210 return "";
2211 }
2212
2213 const char *
2214 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2215 {
2216 rtx dest = op[0];
2217 rtx src = op[1];
2218 rtx base = XEXP (dest, 0);
2219 int reg_base = true_regnum (base);
2220 int reg_src = true_regnum (src);
2221 int tmp;
2222
2223 if (!l)
2224 l = &tmp;
2225
2226 if (CONSTANT_ADDRESS_P (base))
2227 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2228 AS2 (sts,%B0,%B1) CR_TAB
2229 AS2 (sts,%C0,%C1) CR_TAB
2230 AS2 (sts,%D0,%D1));
2231 if (reg_base > 0) /* (r) */
2232 {
2233 if (reg_base == REG_X) /* (R26) */
2234 {
2235 if (reg_src == REG_X)
2236 {
2237 /* "st X+,r26" is undefined */
2238 if (reg_unused_after (insn, base))
2239 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2240 AS2 (st,X,r26) CR_TAB
2241 AS2 (adiw,r26,1) CR_TAB
2242 AS2 (st,X+,__tmp_reg__) CR_TAB
2243 AS2 (st,X+,r28) CR_TAB
2244 AS2 (st,X,r29));
2245 else
2246 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2247 AS2 (st,X,r26) CR_TAB
2248 AS2 (adiw,r26,1) CR_TAB
2249 AS2 (st,X+,__tmp_reg__) CR_TAB
2250 AS2 (st,X+,r28) CR_TAB
2251 AS2 (st,X,r29) CR_TAB
2252 AS2 (sbiw,r26,3));
2253 }
2254 else if (reg_base == reg_src + 2)
2255 {
2256 if (reg_unused_after (insn, base))
2257 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2258 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2259 AS2 (st,%0+,%A1) CR_TAB
2260 AS2 (st,%0+,%B1) CR_TAB
2261 AS2 (st,%0+,__zero_reg__) CR_TAB
2262 AS2 (st,%0,__tmp_reg__) CR_TAB
2263 AS1 (clr,__zero_reg__));
2264 else
2265 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2266 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2267 AS2 (st,%0+,%A1) CR_TAB
2268 AS2 (st,%0+,%B1) CR_TAB
2269 AS2 (st,%0+,__zero_reg__) CR_TAB
2270 AS2 (st,%0,__tmp_reg__) CR_TAB
2271 AS1 (clr,__zero_reg__) CR_TAB
2272 AS2 (sbiw,r26,3));
2273 }
2274 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2275 AS2 (st,%0+,%B1) CR_TAB
2276 AS2 (st,%0+,%C1) CR_TAB
2277 AS2 (st,%0,%D1) CR_TAB
2278 AS2 (sbiw,r26,3));
2279 }
2280 else
2281 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2282 AS2 (std,%0+1,%B1) CR_TAB
2283 AS2 (std,%0+2,%C1) CR_TAB
2284 AS2 (std,%0+3,%D1));
2285 }
2286 else if (GET_CODE (base) == PLUS) /* (R + i) */
2287 {
2288 int disp = INTVAL (XEXP (base, 1));
2289 reg_base = REGNO (XEXP (base, 0));
2290 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2291 {
2292 if (reg_base != REG_Y)
2293 fatal_insn ("incorrect insn:",insn);
2294
2295 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2296 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2297 AS2 (std,Y+60,%A1) CR_TAB
2298 AS2 (std,Y+61,%B1) CR_TAB
2299 AS2 (std,Y+62,%C1) CR_TAB
2300 AS2 (std,Y+63,%D1) CR_TAB
2301 AS2 (sbiw,r28,%o0-60));
2302
2303 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2304 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2305 AS2 (st,Y,%A1) CR_TAB
2306 AS2 (std,Y+1,%B1) CR_TAB
2307 AS2 (std,Y+2,%C1) CR_TAB
2308 AS2 (std,Y+3,%D1) CR_TAB
2309 AS2 (subi,r28,lo8(%o0)) CR_TAB
2310 AS2 (sbci,r29,hi8(%o0)));
2311 }
2312 if (reg_base == REG_X)
2313 {
2314 /* (X + d) = R */
2315 if (reg_src == REG_X)
2316 {
2317 *l = 9;
2318 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2319 AS2 (mov,__zero_reg__,r27) CR_TAB
2320 AS2 (adiw,r26,%o0) CR_TAB
2321 AS2 (st,X+,__tmp_reg__) CR_TAB
2322 AS2 (st,X+,__zero_reg__) CR_TAB
2323 AS2 (st,X+,r28) CR_TAB
2324 AS2 (st,X,r29) CR_TAB
2325 AS1 (clr,__zero_reg__) CR_TAB
2326 AS2 (sbiw,r26,%o0+3));
2327 }
2328 else if (reg_src == REG_X - 2)
2329 {
2330 *l = 9;
2331 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2332 AS2 (mov,__zero_reg__,r27) CR_TAB
2333 AS2 (adiw,r26,%o0) CR_TAB
2334 AS2 (st,X+,r24) CR_TAB
2335 AS2 (st,X+,r25) CR_TAB
2336 AS2 (st,X+,__tmp_reg__) CR_TAB
2337 AS2 (st,X,__zero_reg__) CR_TAB
2338 AS1 (clr,__zero_reg__) CR_TAB
2339 AS2 (sbiw,r26,%o0+3));
2340 }
2341 *l = 6;
2342 return (AS2 (adiw,r26,%o0) CR_TAB
2343 AS2 (st,X+,%A1) CR_TAB
2344 AS2 (st,X+,%B1) CR_TAB
2345 AS2 (st,X+,%C1) CR_TAB
2346 AS2 (st,X,%D1) CR_TAB
2347 AS2 (sbiw,r26,%o0+3));
2348 }
2349 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2350 AS2 (std,%B0,%B1) CR_TAB
2351 AS2 (std,%C0,%C1) CR_TAB
2352 AS2 (std,%D0,%D1));
2353 }
2354 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2355 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2356 AS2 (st,%0,%C1) CR_TAB
2357 AS2 (st,%0,%B1) CR_TAB
2358 AS2 (st,%0,%A1));
2359 else if (GET_CODE (base) == POST_INC) /* (R++) */
2360 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2361 AS2 (st,%0,%B1) CR_TAB
2362 AS2 (st,%0,%C1) CR_TAB
2363 AS2 (st,%0,%D1));
2364 fatal_insn ("unknown move insn:",insn);
2365 return "";
2366 }
2367
2368 const char *
2369 output_movsisf(rtx insn, rtx operands[], int *l)
2370 {
2371 int dummy;
2372 rtx dest = operands[0];
2373 rtx src = operands[1];
2374 int *real_l = l;
2375
2376 if (!l)
2377 l = &dummy;
2378
2379 if (register_operand (dest, VOIDmode))
2380 {
2381 if (register_operand (src, VOIDmode)) /* mov r,r */
2382 {
2383 if (true_regnum (dest) > true_regnum (src))
2384 {
2385 if (AVR_HAVE_MOVW)
2386 {
2387 *l = 2;
2388 return (AS2 (movw,%C0,%C1) CR_TAB
2389 AS2 (movw,%A0,%A1));
2390 }
2391 *l = 4;
2392 return (AS2 (mov,%D0,%D1) CR_TAB
2393 AS2 (mov,%C0,%C1) CR_TAB
2394 AS2 (mov,%B0,%B1) CR_TAB
2395 AS2 (mov,%A0,%A1));
2396 }
2397 else
2398 {
2399 if (AVR_HAVE_MOVW)
2400 {
2401 *l = 2;
2402 return (AS2 (movw,%A0,%A1) CR_TAB
2403 AS2 (movw,%C0,%C1));
2404 }
2405 *l = 4;
2406 return (AS2 (mov,%A0,%A1) CR_TAB
2407 AS2 (mov,%B0,%B1) CR_TAB
2408 AS2 (mov,%C0,%C1) CR_TAB
2409 AS2 (mov,%D0,%D1));
2410 }
2411 }
2412 else if (CONSTANT_P (src))
2413 {
2414 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2415 {
2416 *l = 4;
2417 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2418 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2419 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2420 AS2 (ldi,%D0,hhi8(%1)));
2421 }
2422
2423 if (GET_CODE (src) == CONST_INT)
2424 {
2425 const char *const clr_op0 =
2426 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2427 AS1 (clr,%B0) CR_TAB
2428 AS2 (movw,%C0,%A0))
2429 : (AS1 (clr,%A0) CR_TAB
2430 AS1 (clr,%B0) CR_TAB
2431 AS1 (clr,%C0) CR_TAB
2432 AS1 (clr,%D0));
2433
2434 if (src == const0_rtx) /* mov r,L */
2435 {
2436 *l = AVR_HAVE_MOVW ? 3 : 4;
2437 return clr_op0;
2438 }
2439 else if (src == const1_rtx)
2440 {
2441 if (!real_l)
2442 output_asm_insn (clr_op0, operands);
2443 *l = AVR_HAVE_MOVW ? 4 : 5;
2444 return AS1 (inc,%A0);
2445 }
2446 else if (src == constm1_rtx)
2447 {
2448 /* Immediate constants -1 to any register */
2449 if (AVR_HAVE_MOVW)
2450 {
2451 *l = 4;
2452 return (AS1 (clr,%A0) CR_TAB
2453 AS1 (dec,%A0) CR_TAB
2454 AS2 (mov,%B0,%A0) CR_TAB
2455 AS2 (movw,%C0,%A0));
2456 }
2457 *l = 5;
2458 return (AS1 (clr,%A0) CR_TAB
2459 AS1 (dec,%A0) CR_TAB
2460 AS2 (mov,%B0,%A0) CR_TAB
2461 AS2 (mov,%C0,%A0) CR_TAB
2462 AS2 (mov,%D0,%A0));
2463 }
2464 else
2465 {
2466 int bit_nr = exact_log2 (INTVAL (src));
2467
2468 if (bit_nr >= 0)
2469 {
2470 *l = AVR_HAVE_MOVW ? 5 : 6;
2471 if (!real_l)
2472 {
2473 output_asm_insn (clr_op0, operands);
2474 output_asm_insn ("set", operands);
2475 }
2476 if (!real_l)
2477 avr_output_bld (operands, bit_nr);
2478
2479 return "";
2480 }
2481 }
2482 }
2483
2484 /* Last resort, better than loading from memory. */
2485 *l = 10;
2486 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2487 AS2 (ldi,r31,lo8(%1)) CR_TAB
2488 AS2 (mov,%A0,r31) CR_TAB
2489 AS2 (ldi,r31,hi8(%1)) CR_TAB
2490 AS2 (mov,%B0,r31) CR_TAB
2491 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2492 AS2 (mov,%C0,r31) CR_TAB
2493 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2494 AS2 (mov,%D0,r31) CR_TAB
2495 AS2 (mov,r31,__tmp_reg__));
2496 }
2497 else if (GET_CODE (src) == MEM)
2498 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2499 }
2500 else if (GET_CODE (dest) == MEM)
2501 {
2502 const char *template;
2503
2504 if (src == const0_rtx)
2505 operands[1] = zero_reg_rtx;
2506
2507 template = out_movsi_mr_r (insn, operands, real_l);
2508
2509 if (!real_l)
2510 output_asm_insn (template, operands);
2511
2512 operands[1] = src;
2513 return "";
2514 }
2515 fatal_insn ("invalid insn:", insn);
2516 return "";
2517 }
2518
2519 const char *
2520 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2521 {
2522 rtx dest = op[0];
2523 rtx src = op[1];
2524 rtx x = XEXP (dest, 0);
2525 int dummy;
2526
2527 if (!l)
2528 l = &dummy;
2529
2530 if (CONSTANT_ADDRESS_P (x))
2531 {
2532 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2533 {
2534 *l = 1;
2535 return AS2 (out,__SREG__,%1);
2536 }
2537 if (avr_io_address_p (x, 1))
2538 {
2539 *l = 1;
2540 return AS2 (out,%0-0x20,%1);
2541 }
2542 *l = 2;
2543 return AS2 (sts,%0,%1);
2544 }
2545 /* memory access by reg+disp */
2546 else if (GET_CODE (x) == PLUS
2547 && REG_P (XEXP (x,0))
2548 && GET_CODE (XEXP (x,1)) == CONST_INT)
2549 {
2550 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2551 {
2552 int disp = INTVAL (XEXP (x,1));
2553 if (REGNO (XEXP (x,0)) != REG_Y)
2554 fatal_insn ("incorrect insn:",insn);
2555
2556 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2557 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2558 AS2 (std,Y+63,%1) CR_TAB
2559 AS2 (sbiw,r28,%o0-63));
2560
2561 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2562 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2563 AS2 (st,Y,%1) CR_TAB
2564 AS2 (subi,r28,lo8(%o0)) CR_TAB
2565 AS2 (sbci,r29,hi8(%o0)));
2566 }
2567 else if (REGNO (XEXP (x,0)) == REG_X)
2568 {
2569 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2570 {
2571 if (reg_unused_after (insn, XEXP (x,0)))
2572 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2573 AS2 (adiw,r26,%o0) CR_TAB
2574 AS2 (st,X,__tmp_reg__));
2575
2576 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2577 AS2 (adiw,r26,%o0) CR_TAB
2578 AS2 (st,X,__tmp_reg__) CR_TAB
2579 AS2 (sbiw,r26,%o0));
2580 }
2581 else
2582 {
2583 if (reg_unused_after (insn, XEXP (x,0)))
2584 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2585 AS2 (st,X,%1));
2586
2587 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2588 AS2 (st,X,%1) CR_TAB
2589 AS2 (sbiw,r26,%o0));
2590 }
2591 }
2592 *l = 1;
2593 return AS2 (std,%0,%1);
2594 }
2595 *l = 1;
2596 return AS2 (st,%0,%1);
2597 }
2598
2599 const char *
2600 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2601 {
2602 rtx dest = op[0];
2603 rtx src = op[1];
2604 rtx base = XEXP (dest, 0);
2605 int reg_base = true_regnum (base);
2606 int reg_src = true_regnum (src);
2607 /* "volatile" forces writing high byte first, even if less efficient,
2608 for correct operation with 16-bit I/O registers. */
2609 int mem_volatile_p = MEM_VOLATILE_P (dest);
2610 int tmp;
2611
2612 if (!l)
2613 l = &tmp;
2614 if (CONSTANT_ADDRESS_P (base))
2615 {
2616 if (avr_io_address_p (base, 2))
2617 {
2618 *l = 2;
2619 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2620 AS2 (out,%A0-0x20,%A1));
2621 }
2622 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2623 AS2 (sts,%A0,%A1));
2624 }
2625 if (reg_base > 0)
2626 {
2627 if (reg_base == REG_X)
2628 {
2629 if (reg_src == REG_X)
2630 {
2631 /* "st X+,r26" and "st -X,r26" are undefined. */
2632 if (!mem_volatile_p && reg_unused_after (insn, src))
2633 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2634 AS2 (st,X,r26) CR_TAB
2635 AS2 (adiw,r26,1) CR_TAB
2636 AS2 (st,X,__tmp_reg__));
2637 else
2638 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2639 AS2 (adiw,r26,1) CR_TAB
2640 AS2 (st,X,__tmp_reg__) CR_TAB
2641 AS2 (sbiw,r26,1) CR_TAB
2642 AS2 (st,X,r26));
2643 }
2644 else
2645 {
2646 if (!mem_volatile_p && reg_unused_after (insn, base))
2647 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2648 AS2 (st,X,%B1));
2649 else
2650 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2651 AS2 (st,X,%B1) CR_TAB
2652 AS2 (st,-X,%A1));
2653 }
2654 }
2655 else
2656 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2657 AS2 (st,%0,%A1));
2658 }
2659 else if (GET_CODE (base) == PLUS)
2660 {
2661 int disp = INTVAL (XEXP (base, 1));
2662 reg_base = REGNO (XEXP (base, 0));
2663 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2664 {
2665 if (reg_base != REG_Y)
2666 fatal_insn ("incorrect insn:",insn);
2667
2668 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2669 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2670 AS2 (std,Y+63,%B1) CR_TAB
2671 AS2 (std,Y+62,%A1) CR_TAB
2672 AS2 (sbiw,r28,%o0-62));
2673
2674 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2675 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2676 AS2 (std,Y+1,%B1) CR_TAB
2677 AS2 (st,Y,%A1) CR_TAB
2678 AS2 (subi,r28,lo8(%o0)) CR_TAB
2679 AS2 (sbci,r29,hi8(%o0)));
2680 }
2681 if (reg_base == REG_X)
2682 {
2683 /* (X + d) = R */
2684 if (reg_src == REG_X)
2685 {
2686 *l = 7;
2687 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2688 AS2 (mov,__zero_reg__,r27) CR_TAB
2689 AS2 (adiw,r26,%o0+1) CR_TAB
2690 AS2 (st,X,__zero_reg__) CR_TAB
2691 AS2 (st,-X,__tmp_reg__) CR_TAB
2692 AS1 (clr,__zero_reg__) CR_TAB
2693 AS2 (sbiw,r26,%o0));
2694 }
2695 *l = 4;
2696 return (AS2 (adiw,r26,%o0+1) CR_TAB
2697 AS2 (st,X,%B1) CR_TAB
2698 AS2 (st,-X,%A1) CR_TAB
2699 AS2 (sbiw,r26,%o0));
2700 }
2701 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2702 AS2 (std,%A0,%A1));
2703 }
2704 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2705 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2706 AS2 (st,%0,%A1));
2707 else if (GET_CODE (base) == POST_INC) /* (R++) */
2708 {
2709 if (mem_volatile_p)
2710 {
2711 if (REGNO (XEXP (base, 0)) == REG_X)
2712 {
2713 *l = 4;
2714 return (AS2 (adiw,r26,1) CR_TAB
2715 AS2 (st,X,%B1) CR_TAB
2716 AS2 (st,-X,%A1) CR_TAB
2717 AS2 (adiw,r26,2));
2718 }
2719 else
2720 {
2721 *l = 3;
2722 return (AS2 (std,%p0+1,%B1) CR_TAB
2723 AS2 (st,%p0,%A1) CR_TAB
2724 AS2 (adiw,%r0,2));
2725 }
2726 }
2727
2728 *l = 2;
2729 return (AS2 (st,%0,%A1) CR_TAB
2730 AS2 (st,%0,%B1));
2731 }
2732 fatal_insn ("unknown move insn:",insn);
2733 return "";
2734 }
2735
2736 /* Return 1 if frame pointer for current function required. */
2737
2738 int
2739 frame_pointer_required_p (void)
2740 {
2741 return (current_function_calls_alloca
2742 || current_function_args_info.nregs == 0
2743 || get_frame_size () > 0);
2744 }
2745
2746 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2747
2748 static RTX_CODE
2749 compare_condition (rtx insn)
2750 {
2751 rtx next = next_real_insn (insn);
2752 RTX_CODE cond = UNKNOWN;
2753 if (next && GET_CODE (next) == JUMP_INSN)
2754 {
2755 rtx pat = PATTERN (next);
2756 rtx src = SET_SRC (pat);
2757 rtx t = XEXP (src, 0);
2758 cond = GET_CODE (t);
2759 }
2760 return cond;
2761 }
2762
2763 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2764
2765 static int
2766 compare_sign_p (rtx insn)
2767 {
2768 RTX_CODE cond = compare_condition (insn);
2769 return (cond == GE || cond == LT);
2770 }
2771
2772 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2773 that needs to be swapped (GT, GTU, LE, LEU). */
2774
2775 int
2776 compare_diff_p (rtx insn)
2777 {
2778 RTX_CODE cond = compare_condition (insn);
2779 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2780 }
2781
2782 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2783
2784 int
2785 compare_eq_p (rtx insn)
2786 {
2787 RTX_CODE cond = compare_condition (insn);
2788 return (cond == EQ || cond == NE);
2789 }
2790
2791
2792 /* Output test instruction for HImode. */
2793
2794 const char *
2795 out_tsthi (rtx insn, int *l)
2796 {
2797 if (compare_sign_p (insn))
2798 {
2799 if (l) *l = 1;
2800 return AS1 (tst,%B0);
2801 }
2802 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2803 && compare_eq_p (insn))
2804 {
2805 /* Faster than sbiw if we can clobber the operand. */
2806 if (l) *l = 1;
2807 return AS2 (or,%A0,%B0);
2808 }
2809 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2810 {
2811 if (l) *l = 1;
2812 return AS2 (sbiw,%0,0);
2813 }
2814 if (l) *l = 2;
2815 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2816 AS2 (cpc,%B0,__zero_reg__));
2817 }
2818
2819
2820 /* Output test instruction for SImode. */
2821
2822 const char *
2823 out_tstsi (rtx insn, int *l)
2824 {
2825 if (compare_sign_p (insn))
2826 {
2827 if (l) *l = 1;
2828 return AS1 (tst,%D0);
2829 }
2830 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2831 {
2832 if (l) *l = 3;
2833 return (AS2 (sbiw,%A0,0) CR_TAB
2834 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2835 AS2 (cpc,%D0,__zero_reg__));
2836 }
2837 if (l) *l = 4;
2838 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2839 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2840 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2841 AS2 (cpc,%D0,__zero_reg__));
2842 }
2843
2844
2845 /* Generate asm equivalent for various shifts.
2846 Shift count is a CONST_INT, MEM or REG.
2847 This only handles cases that are not already
2848 carefully hand-optimized in ?sh??i3_out. */
2849
2850 void
2851 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2852 int *len, int t_len)
2853 {
2854 rtx op[10];
2855 char str[500];
2856 int second_label = 1;
2857 int saved_in_tmp = 0;
2858 int use_zero_reg = 0;
2859
2860 op[0] = operands[0];
2861 op[1] = operands[1];
2862 op[2] = operands[2];
2863 op[3] = operands[3];
2864 str[0] = 0;
2865
2866 if (len)
2867 *len = 1;
2868
2869 if (GET_CODE (operands[2]) == CONST_INT)
2870 {
2871 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2872 int count = INTVAL (operands[2]);
2873 int max_len = 10; /* If larger than this, always use a loop. */
2874
2875 if (count <= 0)
2876 {
2877 if (len)
2878 *len = 0;
2879 return;
2880 }
2881
2882 if (count < 8 && !scratch)
2883 use_zero_reg = 1;
2884
2885 if (optimize_size)
2886 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2887
2888 if (t_len * count <= max_len)
2889 {
2890 /* Output shifts inline with no loop - faster. */
2891 if (len)
2892 *len = t_len * count;
2893 else
2894 {
2895 while (count-- > 0)
2896 output_asm_insn (template, op);
2897 }
2898
2899 return;
2900 }
2901
2902 if (scratch)
2903 {
2904 if (!len)
2905 strcat (str, AS2 (ldi,%3,%2));
2906 }
2907 else if (use_zero_reg)
2908 {
2909 /* Hack to save one word: use __zero_reg__ as loop counter.
2910 Set one bit, then shift in a loop until it is 0 again. */
2911
2912 op[3] = zero_reg_rtx;
2913 if (len)
2914 *len = 2;
2915 else
2916 strcat (str, ("set" CR_TAB
2917 AS2 (bld,%3,%2-1)));
2918 }
2919 else
2920 {
2921 /* No scratch register available, use one from LD_REGS (saved in
2922 __tmp_reg__) that doesn't overlap with registers to shift. */
2923
2924 op[3] = gen_rtx_REG (QImode,
2925 ((true_regnum (operands[0]) - 1) & 15) + 16);
2926 op[4] = tmp_reg_rtx;
2927 saved_in_tmp = 1;
2928
2929 if (len)
2930 *len = 3; /* Includes "mov %3,%4" after the loop. */
2931 else
2932 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2933 AS2 (ldi,%3,%2)));
2934 }
2935
2936 second_label = 0;
2937 }
2938 else if (GET_CODE (operands[2]) == MEM)
2939 {
2940 rtx op_mov[10];
2941
2942 op[3] = op_mov[0] = tmp_reg_rtx;
2943 op_mov[1] = op[2];
2944
2945 if (len)
2946 out_movqi_r_mr (insn, op_mov, len);
2947 else
2948 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2949 }
2950 else if (register_operand (operands[2], QImode))
2951 {
2952 if (reg_unused_after (insn, operands[2]))
2953 op[3] = op[2];
2954 else
2955 {
2956 op[3] = tmp_reg_rtx;
2957 if (!len)
2958 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2959 }
2960 }
2961 else
2962 fatal_insn ("bad shift insn:", insn);
2963
2964 if (second_label)
2965 {
2966 if (len)
2967 ++*len;
2968 else
2969 strcat (str, AS1 (rjmp,2f));
2970 }
2971
2972 if (len)
2973 *len += t_len + 2; /* template + dec + brXX */
2974 else
2975 {
2976 strcat (str, "\n1:\t");
2977 strcat (str, template);
2978 strcat (str, second_label ? "\n2:\t" : "\n\t");
2979 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2980 strcat (str, CR_TAB);
2981 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2982 if (saved_in_tmp)
2983 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2984 output_asm_insn (str, op);
2985 }
2986 }
2987
2988
2989 /* 8bit shift left ((char)x << i) */
2990
2991 const char *
2992 ashlqi3_out (rtx insn, rtx operands[], int *len)
2993 {
2994 if (GET_CODE (operands[2]) == CONST_INT)
2995 {
2996 int k;
2997
2998 if (!len)
2999 len = &k;
3000
3001 switch (INTVAL (operands[2]))
3002 {
3003 default:
3004 if (INTVAL (operands[2]) < 8)
3005 break;
3006
3007 *len = 1;
3008 return AS1 (clr,%0);
3009
3010 case 1:
3011 *len = 1;
3012 return AS1 (lsl,%0);
3013
3014 case 2:
3015 *len = 2;
3016 return (AS1 (lsl,%0) CR_TAB
3017 AS1 (lsl,%0));
3018
3019 case 3:
3020 *len = 3;
3021 return (AS1 (lsl,%0) CR_TAB
3022 AS1 (lsl,%0) CR_TAB
3023 AS1 (lsl,%0));
3024
3025 case 4:
3026 if (test_hard_reg_class (LD_REGS, operands[0]))
3027 {
3028 *len = 2;
3029 return (AS1 (swap,%0) CR_TAB
3030 AS2 (andi,%0,0xf0));
3031 }
3032 *len = 4;
3033 return (AS1 (lsl,%0) CR_TAB
3034 AS1 (lsl,%0) CR_TAB
3035 AS1 (lsl,%0) CR_TAB
3036 AS1 (lsl,%0));
3037
3038 case 5:
3039 if (test_hard_reg_class (LD_REGS, operands[0]))
3040 {
3041 *len = 3;
3042 return (AS1 (swap,%0) CR_TAB
3043 AS1 (lsl,%0) CR_TAB
3044 AS2 (andi,%0,0xe0));
3045 }
3046 *len = 5;
3047 return (AS1 (lsl,%0) CR_TAB
3048 AS1 (lsl,%0) CR_TAB
3049 AS1 (lsl,%0) CR_TAB
3050 AS1 (lsl,%0) CR_TAB
3051 AS1 (lsl,%0));
3052
3053 case 6:
3054 if (test_hard_reg_class (LD_REGS, operands[0]))
3055 {
3056 *len = 4;
3057 return (AS1 (swap,%0) CR_TAB
3058 AS1 (lsl,%0) CR_TAB
3059 AS1 (lsl,%0) CR_TAB
3060 AS2 (andi,%0,0xc0));
3061 }
3062 *len = 6;
3063 return (AS1 (lsl,%0) CR_TAB
3064 AS1 (lsl,%0) CR_TAB
3065 AS1 (lsl,%0) CR_TAB
3066 AS1 (lsl,%0) CR_TAB
3067 AS1 (lsl,%0) CR_TAB
3068 AS1 (lsl,%0));
3069
3070 case 7:
3071 *len = 3;
3072 return (AS1 (ror,%0) CR_TAB
3073 AS1 (clr,%0) CR_TAB
3074 AS1 (ror,%0));
3075 }
3076 }
3077 else if (CONSTANT_P (operands[2]))
3078 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3079
3080 out_shift_with_cnt (AS1 (lsl,%0),
3081 insn, operands, len, 1);
3082 return "";
3083 }
3084
3085
3086 /* 16bit shift left ((short)x << i) */
3087
3088 const char *
3089 ashlhi3_out (rtx insn, rtx operands[], int *len)
3090 {
3091 if (GET_CODE (operands[2]) == CONST_INT)
3092 {
3093 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3094 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3095 int k;
3096 int *t = len;
3097
3098 if (!len)
3099 len = &k;
3100
3101 switch (INTVAL (operands[2]))
3102 {
3103 default:
3104 if (INTVAL (operands[2]) < 16)
3105 break;
3106
3107 *len = 2;
3108 return (AS1 (clr,%B0) CR_TAB
3109 AS1 (clr,%A0));
3110
3111 case 4:
3112 if (optimize_size && scratch)
3113 break; /* 5 */
3114 if (ldi_ok)
3115 {
3116 *len = 6;
3117 return (AS1 (swap,%A0) CR_TAB
3118 AS1 (swap,%B0) CR_TAB
3119 AS2 (andi,%B0,0xf0) CR_TAB
3120 AS2 (eor,%B0,%A0) CR_TAB
3121 AS2 (andi,%A0,0xf0) CR_TAB
3122 AS2 (eor,%B0,%A0));
3123 }
3124 if (scratch)
3125 {
3126 *len = 7;
3127 return (AS1 (swap,%A0) CR_TAB
3128 AS1 (swap,%B0) CR_TAB
3129 AS2 (ldi,%3,0xf0) CR_TAB
3130 AS2 (and,%B0,%3) CR_TAB
3131 AS2 (eor,%B0,%A0) CR_TAB
3132 AS2 (and,%A0,%3) CR_TAB
3133 AS2 (eor,%B0,%A0));
3134 }
3135 break; /* optimize_size ? 6 : 8 */
3136
3137 case 5:
3138 if (optimize_size)
3139 break; /* scratch ? 5 : 6 */
3140 if (ldi_ok)
3141 {
3142 *len = 8;
3143 return (AS1 (lsl,%A0) CR_TAB
3144 AS1 (rol,%B0) CR_TAB
3145 AS1 (swap,%A0) CR_TAB
3146 AS1 (swap,%B0) CR_TAB
3147 AS2 (andi,%B0,0xf0) CR_TAB
3148 AS2 (eor,%B0,%A0) CR_TAB
3149 AS2 (andi,%A0,0xf0) CR_TAB
3150 AS2 (eor,%B0,%A0));
3151 }
3152 if (scratch)
3153 {
3154 *len = 9;
3155 return (AS1 (lsl,%A0) CR_TAB
3156 AS1 (rol,%B0) CR_TAB
3157 AS1 (swap,%A0) CR_TAB
3158 AS1 (swap,%B0) CR_TAB
3159 AS2 (ldi,%3,0xf0) CR_TAB
3160 AS2 (and,%B0,%3) CR_TAB
3161 AS2 (eor,%B0,%A0) CR_TAB
3162 AS2 (and,%A0,%3) CR_TAB
3163 AS2 (eor,%B0,%A0));
3164 }
3165 break; /* 10 */
3166
3167 case 6:
3168 if (optimize_size)
3169 break; /* scratch ? 5 : 6 */
3170 *len = 9;
3171 return (AS1 (clr,__tmp_reg__) CR_TAB
3172 AS1 (lsr,%B0) CR_TAB
3173 AS1 (ror,%A0) CR_TAB
3174 AS1 (ror,__tmp_reg__) CR_TAB
3175 AS1 (lsr,%B0) CR_TAB
3176 AS1 (ror,%A0) CR_TAB
3177 AS1 (ror,__tmp_reg__) CR_TAB
3178 AS2 (mov,%B0,%A0) CR_TAB
3179 AS2 (mov,%A0,__tmp_reg__));
3180
3181 case 7:
3182 *len = 5;
3183 return (AS1 (lsr,%B0) CR_TAB
3184 AS2 (mov,%B0,%A0) CR_TAB
3185 AS1 (clr,%A0) CR_TAB
3186 AS1 (ror,%B0) CR_TAB
3187 AS1 (ror,%A0));
3188
3189 case 8:
3190 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3191 AS1 (clr,%A0));
3192
3193 case 9:
3194 *len = 3;
3195 return (AS2 (mov,%B0,%A0) CR_TAB
3196 AS1 (clr,%A0) CR_TAB
3197 AS1 (lsl,%B0));
3198
3199 case 10:
3200 *len = 4;
3201 return (AS2 (mov,%B0,%A0) CR_TAB
3202 AS1 (clr,%A0) CR_TAB
3203 AS1 (lsl,%B0) CR_TAB
3204 AS1 (lsl,%B0));
3205
3206 case 11:
3207 *len = 5;
3208 return (AS2 (mov,%B0,%A0) CR_TAB
3209 AS1 (clr,%A0) CR_TAB
3210 AS1 (lsl,%B0) CR_TAB
3211 AS1 (lsl,%B0) CR_TAB
3212 AS1 (lsl,%B0));
3213
3214 case 12:
3215 if (ldi_ok)
3216 {
3217 *len = 4;
3218 return (AS2 (mov,%B0,%A0) CR_TAB
3219 AS1 (clr,%A0) CR_TAB
3220 AS1 (swap,%B0) CR_TAB
3221 AS2 (andi,%B0,0xf0));
3222 }
3223 if (scratch)
3224 {
3225 *len = 5;
3226 return (AS2 (mov,%B0,%A0) CR_TAB
3227 AS1 (clr,%A0) CR_TAB
3228 AS1 (swap,%B0) CR_TAB
3229 AS2 (ldi,%3,0xf0) CR_TAB
3230 AS2 (and,%B0,%3));
3231 }
3232 *len = 6;
3233 return (AS2 (mov,%B0,%A0) CR_TAB
3234 AS1 (clr,%A0) CR_TAB
3235 AS1 (lsl,%B0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS1 (lsl,%B0) CR_TAB
3238 AS1 (lsl,%B0));
3239
3240 case 13:
3241 if (ldi_ok)
3242 {
3243 *len = 5;
3244 return (AS2 (mov,%B0,%A0) CR_TAB
3245 AS1 (clr,%A0) CR_TAB
3246 AS1 (swap,%B0) CR_TAB
3247 AS1 (lsl,%B0) CR_TAB
3248 AS2 (andi,%B0,0xe0));
3249 }
3250 if (AVR_HAVE_MUL && scratch)
3251 {
3252 *len = 5;
3253 return (AS2 (ldi,%3,0x20) CR_TAB
3254 AS2 (mul,%A0,%3) CR_TAB
3255 AS2 (mov,%B0,r0) CR_TAB
3256 AS1 (clr,%A0) CR_TAB
3257 AS1 (clr,__zero_reg__));
3258 }
3259 if (optimize_size && scratch)
3260 break; /* 5 */
3261 if (scratch)
3262 {
3263 *len = 6;
3264 return (AS2 (mov,%B0,%A0) CR_TAB
3265 AS1 (clr,%A0) CR_TAB
3266 AS1 (swap,%B0) CR_TAB
3267 AS1 (lsl,%B0) CR_TAB
3268 AS2 (ldi,%3,0xe0) CR_TAB
3269 AS2 (and,%B0,%3));
3270 }
3271 if (AVR_HAVE_MUL)
3272 {
3273 *len = 6;
3274 return ("set" CR_TAB
3275 AS2 (bld,r1,5) CR_TAB
3276 AS2 (mul,%A0,r1) CR_TAB
3277 AS2 (mov,%B0,r0) CR_TAB
3278 AS1 (clr,%A0) CR_TAB
3279 AS1 (clr,__zero_reg__));
3280 }
3281 *len = 7;
3282 return (AS2 (mov,%B0,%A0) CR_TAB
3283 AS1 (clr,%A0) CR_TAB
3284 AS1 (lsl,%B0) CR_TAB
3285 AS1 (lsl,%B0) CR_TAB
3286 AS1 (lsl,%B0) CR_TAB
3287 AS1 (lsl,%B0) CR_TAB
3288 AS1 (lsl,%B0));
3289
3290 case 14:
3291 if (AVR_HAVE_MUL && ldi_ok)
3292 {
3293 *len = 5;
3294 return (AS2 (ldi,%B0,0x40) CR_TAB
3295 AS2 (mul,%A0,%B0) CR_TAB
3296 AS2 (mov,%B0,r0) CR_TAB
3297 AS1 (clr,%A0) CR_TAB
3298 AS1 (clr,__zero_reg__));
3299 }
3300 if (AVR_HAVE_MUL && scratch)
3301 {
3302 *len = 5;
3303 return (AS2 (ldi,%3,0x40) CR_TAB
3304 AS2 (mul,%A0,%3) CR_TAB
3305 AS2 (mov,%B0,r0) CR_TAB
3306 AS1 (clr,%A0) CR_TAB
3307 AS1 (clr,__zero_reg__));
3308 }
3309 if (optimize_size && ldi_ok)
3310 {
3311 *len = 5;
3312 return (AS2 (mov,%B0,%A0) CR_TAB
3313 AS2 (ldi,%A0,6) "\n1:\t"
3314 AS1 (lsl,%B0) CR_TAB
3315 AS1 (dec,%A0) CR_TAB
3316 AS1 (brne,1b));
3317 }
3318 if (optimize_size && scratch)
3319 break; /* 5 */
3320 *len = 6;
3321 return (AS1 (clr,%B0) CR_TAB
3322 AS1 (lsr,%A0) CR_TAB
3323 AS1 (ror,%B0) CR_TAB
3324 AS1 (lsr,%A0) CR_TAB
3325 AS1 (ror,%B0) CR_TAB
3326 AS1 (clr,%A0));
3327
3328 case 15:
3329 *len = 4;
3330 return (AS1 (clr,%B0) CR_TAB
3331 AS1 (lsr,%A0) CR_TAB
3332 AS1 (ror,%B0) CR_TAB
3333 AS1 (clr,%A0));
3334 }
3335 len = t;
3336 }
3337 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3338 AS1 (rol,%B0)),
3339 insn, operands, len, 2);
3340 return "";
3341 }
3342
3343
3344 /* 32bit shift left ((long)x << i) */
3345
3346 const char *
3347 ashlsi3_out (rtx insn, rtx operands[], int *len)
3348 {
3349 if (GET_CODE (operands[2]) == CONST_INT)
3350 {
3351 int k;
3352 int *t = len;
3353
3354 if (!len)
3355 len = &k;
3356
3357 switch (INTVAL (operands[2]))
3358 {
3359 default:
3360 if (INTVAL (operands[2]) < 32)
3361 break;
3362
3363 if (AVR_HAVE_MOVW)
3364 return *len = 3, (AS1 (clr,%D0) CR_TAB
3365 AS1 (clr,%C0) CR_TAB
3366 AS2 (movw,%A0,%C0));
3367 *len = 4;
3368 return (AS1 (clr,%D0) CR_TAB
3369 AS1 (clr,%C0) CR_TAB
3370 AS1 (clr,%B0) CR_TAB
3371 AS1 (clr,%A0));
3372
3373 case 8:
3374 {
3375 int reg0 = true_regnum (operands[0]);
3376 int reg1 = true_regnum (operands[1]);
3377 *len = 4;
3378 if (reg0 >= reg1)
3379 return (AS2 (mov,%D0,%C1) CR_TAB
3380 AS2 (mov,%C0,%B1) CR_TAB
3381 AS2 (mov,%B0,%A1) CR_TAB
3382 AS1 (clr,%A0));
3383 else
3384 return (AS1 (clr,%A0) CR_TAB
3385 AS2 (mov,%B0,%A1) CR_TAB
3386 AS2 (mov,%C0,%B1) CR_TAB
3387 AS2 (mov,%D0,%C1));
3388 }
3389
3390 case 16:
3391 {
3392 int reg0 = true_regnum (operands[0]);
3393 int reg1 = true_regnum (operands[1]);
3394 if (reg0 + 2 == reg1)
3395 return *len = 2, (AS1 (clr,%B0) CR_TAB
3396 AS1 (clr,%A0));
3397 if (AVR_HAVE_MOVW)
3398 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3399 AS1 (clr,%B0) CR_TAB
3400 AS1 (clr,%A0));
3401 else
3402 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3403 AS2 (mov,%D0,%B1) CR_TAB
3404 AS1 (clr,%B0) CR_TAB
3405 AS1 (clr,%A0));
3406 }
3407
3408 case 24:
3409 *len = 4;
3410 return (AS2 (mov,%D0,%A1) CR_TAB
3411 AS1 (clr,%C0) CR_TAB
3412 AS1 (clr,%B0) CR_TAB
3413 AS1 (clr,%A0));
3414
3415 case 31:
3416 *len = 6;
3417 return (AS1 (clr,%D0) CR_TAB
3418 AS1 (lsr,%A0) CR_TAB
3419 AS1 (ror,%D0) CR_TAB
3420 AS1 (clr,%C0) CR_TAB
3421 AS1 (clr,%B0) CR_TAB
3422 AS1 (clr,%A0));
3423 }
3424 len = t;
3425 }
3426 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3427 AS1 (rol,%B0) CR_TAB
3428 AS1 (rol,%C0) CR_TAB
3429 AS1 (rol,%D0)),
3430 insn, operands, len, 4);
3431 return "";
3432 }
3433
3434 /* 8bit arithmetic shift right ((signed char)x >> i) */
3435
3436 const char *
3437 ashrqi3_out (rtx insn, rtx operands[], int *len)
3438 {
3439 if (GET_CODE (operands[2]) == CONST_INT)
3440 {
3441 int k;
3442
3443 if (!len)
3444 len = &k;
3445
3446 switch (INTVAL (operands[2]))
3447 {
3448 case 1:
3449 *len = 1;
3450 return AS1 (asr,%0);
3451
3452 case 2:
3453 *len = 2;
3454 return (AS1 (asr,%0) CR_TAB
3455 AS1 (asr,%0));
3456
3457 case 3:
3458 *len = 3;
3459 return (AS1 (asr,%0) CR_TAB
3460 AS1 (asr,%0) CR_TAB
3461 AS1 (asr,%0));
3462
3463 case 4:
3464 *len = 4;
3465 return (AS1 (asr,%0) CR_TAB
3466 AS1 (asr,%0) CR_TAB
3467 AS1 (asr,%0) CR_TAB
3468 AS1 (asr,%0));
3469
3470 case 5:
3471 *len = 5;
3472 return (AS1 (asr,%0) CR_TAB
3473 AS1 (asr,%0) CR_TAB
3474 AS1 (asr,%0) CR_TAB
3475 AS1 (asr,%0) CR_TAB
3476 AS1 (asr,%0));
3477
3478 case 6:
3479 *len = 4;
3480 return (AS2 (bst,%0,6) CR_TAB
3481 AS1 (lsl,%0) CR_TAB
3482 AS2 (sbc,%0,%0) CR_TAB
3483 AS2 (bld,%0,0));
3484
3485 default:
3486 if (INTVAL (operands[2]) < 8)
3487 break;
3488
3489 /* fall through */
3490
3491 case 7:
3492 *len = 2;
3493 return (AS1 (lsl,%0) CR_TAB
3494 AS2 (sbc,%0,%0));
3495 }
3496 }
3497 else if (CONSTANT_P (operands[2]))
3498 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3499
3500 out_shift_with_cnt (AS1 (asr,%0),
3501 insn, operands, len, 1);
3502 return "";
3503 }
3504
3505
3506 /* 16bit arithmetic shift right ((signed short)x >> i) */
3507
3508 const char *
3509 ashrhi3_out (rtx insn, rtx operands[], int *len)
3510 {
3511 if (GET_CODE (operands[2]) == CONST_INT)
3512 {
3513 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3514 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3515 int k;
3516 int *t = len;
3517
3518 if (!len)
3519 len = &k;
3520
3521 switch (INTVAL (operands[2]))
3522 {
3523 case 4:
3524 case 5:
3525 /* XXX try to optimize this too? */
3526 break;
3527
3528 case 6:
3529 if (optimize_size)
3530 break; /* scratch ? 5 : 6 */
3531 *len = 8;
3532 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3533 AS2 (mov,%A0,%B0) CR_TAB
3534 AS1 (lsl,__tmp_reg__) CR_TAB
3535 AS1 (rol,%A0) CR_TAB
3536 AS2 (sbc,%B0,%B0) CR_TAB
3537 AS1 (lsl,__tmp_reg__) CR_TAB
3538 AS1 (rol,%A0) CR_TAB
3539 AS1 (rol,%B0));
3540
3541 case 7:
3542 *len = 4;
3543 return (AS1 (lsl,%A0) CR_TAB
3544 AS2 (mov,%A0,%B0) CR_TAB
3545 AS1 (rol,%A0) CR_TAB
3546 AS2 (sbc,%B0,%B0));
3547
3548 case 8:
3549 {
3550 int reg0 = true_regnum (operands[0]);
3551 int reg1 = true_regnum (operands[1]);
3552
3553 if (reg0 == reg1)
3554 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3555 AS1 (lsl,%B0) CR_TAB
3556 AS2 (sbc,%B0,%B0));
3557 else
3558 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3559 AS1 (clr,%B0) CR_TAB
3560 AS2 (sbrc,%A0,7) CR_TAB
3561 AS1 (dec,%B0));
3562 }
3563
3564 case 9:
3565 *len = 4;
3566 return (AS2 (mov,%A0,%B0) CR_TAB
3567 AS1 (lsl,%B0) CR_TAB
3568 AS2 (sbc,%B0,%B0) CR_TAB
3569 AS1 (asr,%A0));
3570
3571 case 10:
3572 *len = 5;
3573 return (AS2 (mov,%A0,%B0) CR_TAB
3574 AS1 (lsl,%B0) CR_TAB
3575 AS2 (sbc,%B0,%B0) CR_TAB
3576 AS1 (asr,%A0) CR_TAB
3577 AS1 (asr,%A0));
3578
3579 case 11:
3580 if (AVR_HAVE_MUL && ldi_ok)
3581 {
3582 *len = 5;
3583 return (AS2 (ldi,%A0,0x20) CR_TAB
3584 AS2 (muls,%B0,%A0) CR_TAB
3585 AS2 (mov,%A0,r1) CR_TAB
3586 AS2 (sbc,%B0,%B0) CR_TAB
3587 AS1 (clr,__zero_reg__));
3588 }
3589 if (optimize_size && scratch)
3590 break; /* 5 */
3591 *len = 6;
3592 return (AS2 (mov,%A0,%B0) CR_TAB
3593 AS1 (lsl,%B0) CR_TAB
3594 AS2 (sbc,%B0,%B0) CR_TAB
3595 AS1 (asr,%A0) CR_TAB
3596 AS1 (asr,%A0) CR_TAB
3597 AS1 (asr,%A0));
3598
3599 case 12:
3600 if (AVR_HAVE_MUL && ldi_ok)
3601 {
3602 *len = 5;
3603 return (AS2 (ldi,%A0,0x10) CR_TAB
3604 AS2 (muls,%B0,%A0) CR_TAB
3605 AS2 (mov,%A0,r1) CR_TAB
3606 AS2 (sbc,%B0,%B0) CR_TAB
3607 AS1 (clr,__zero_reg__));
3608 }
3609 if (optimize_size && scratch)
3610 break; /* 5 */
3611 *len = 7;
3612 return (AS2 (mov,%A0,%B0) CR_TAB
3613 AS1 (lsl,%B0) CR_TAB
3614 AS2 (sbc,%B0,%B0) CR_TAB
3615 AS1 (asr,%A0) CR_TAB
3616 AS1 (asr,%A0) CR_TAB
3617 AS1 (asr,%A0) CR_TAB
3618 AS1 (asr,%A0));
3619
3620 case 13:
3621 if (AVR_HAVE_MUL && ldi_ok)
3622 {
3623 *len = 5;
3624 return (AS2 (ldi,%A0,0x08) CR_TAB
3625 AS2 (muls,%B0,%A0) CR_TAB
3626 AS2 (mov,%A0,r1) CR_TAB
3627 AS2 (sbc,%B0,%B0) CR_TAB
3628 AS1 (clr,__zero_reg__));
3629 }
3630 if (optimize_size)
3631 break; /* scratch ? 5 : 7 */
3632 *len = 8;
3633 return (AS2 (mov,%A0,%B0) CR_TAB
3634 AS1 (lsl,%B0) CR_TAB
3635 AS2 (sbc,%B0,%B0) CR_TAB
3636 AS1 (asr,%A0) CR_TAB
3637 AS1 (asr,%A0) CR_TAB
3638 AS1 (asr,%A0) CR_TAB
3639 AS1 (asr,%A0) CR_TAB
3640 AS1 (asr,%A0));
3641
3642 case 14:
3643 *len = 5;
3644 return (AS1 (lsl,%B0) CR_TAB
3645 AS2 (sbc,%A0,%A0) CR_TAB
3646 AS1 (lsl,%B0) CR_TAB
3647 AS2 (mov,%B0,%A0) CR_TAB
3648 AS1 (rol,%A0));
3649
3650 default:
3651 if (INTVAL (operands[2]) < 16)
3652 break;
3653
3654 /* fall through */
3655
3656 case 15:
3657 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3658 AS2 (sbc,%A0,%A0) CR_TAB
3659 AS2 (mov,%B0,%A0));
3660 }
3661 len = t;
3662 }
3663 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3664 AS1 (ror,%A0)),
3665 insn, operands, len, 2);
3666 return "";
3667 }
3668
3669
3670 /* 32bit arithmetic shift right ((signed long)x >> i) */
3671
3672 const char *
3673 ashrsi3_out (rtx insn, rtx operands[], int *len)
3674 {
3675 if (GET_CODE (operands[2]) == CONST_INT)
3676 {
3677 int k;
3678 int *t = len;
3679
3680 if (!len)
3681 len = &k;
3682
3683 switch (INTVAL (operands[2]))
3684 {
3685 case 8:
3686 {
3687 int reg0 = true_regnum (operands[0]);
3688 int reg1 = true_regnum (operands[1]);
3689 *len=6;
3690 if (reg0 <= reg1)
3691 return (AS2 (mov,%A0,%B1) CR_TAB
3692 AS2 (mov,%B0,%C1) CR_TAB
3693 AS2 (mov,%C0,%D1) CR_TAB
3694 AS1 (clr,%D0) CR_TAB
3695 AS2 (sbrc,%C0,7) CR_TAB
3696 AS1 (dec,%D0));
3697 else
3698 return (AS1 (clr,%D0) CR_TAB
3699 AS2 (sbrc,%D1,7) CR_TAB
3700 AS1 (dec,%D0) CR_TAB
3701 AS2 (mov,%C0,%D1) CR_TAB
3702 AS2 (mov,%B0,%C1) CR_TAB
3703 AS2 (mov,%A0,%B1));
3704 }
3705
3706 case 16:
3707 {
3708 int reg0 = true_regnum (operands[0]);
3709 int reg1 = true_regnum (operands[1]);
3710
3711 if (reg0 == reg1 + 2)
3712 return *len = 4, (AS1 (clr,%D0) CR_TAB
3713 AS2 (sbrc,%B0,7) CR_TAB
3714 AS1 (com,%D0) CR_TAB
3715 AS2 (mov,%C0,%D0));
3716 if (AVR_HAVE_MOVW)
3717 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3718 AS1 (clr,%D0) CR_TAB
3719 AS2 (sbrc,%B0,7) CR_TAB
3720 AS1 (com,%D0) CR_TAB
3721 AS2 (mov,%C0,%D0));
3722 else
3723 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3724 AS2 (mov,%A0,%C1) CR_TAB
3725 AS1 (clr,%D0) CR_TAB
3726 AS2 (sbrc,%B0,7) CR_TAB
3727 AS1 (com,%D0) CR_TAB
3728 AS2 (mov,%C0,%D0));
3729 }
3730
3731 case 24:
3732 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3733 AS1 (clr,%D0) CR_TAB
3734 AS2 (sbrc,%A0,7) CR_TAB
3735 AS1 (com,%D0) CR_TAB
3736 AS2 (mov,%B0,%D0) CR_TAB
3737 AS2 (mov,%C0,%D0));
3738
3739 default:
3740 if (INTVAL (operands[2]) < 32)
3741 break;
3742
3743 /* fall through */
3744
3745 case 31:
3746 if (AVR_HAVE_MOVW)
3747 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3748 AS2 (sbc,%A0,%A0) CR_TAB
3749 AS2 (mov,%B0,%A0) CR_TAB
3750 AS2 (movw,%C0,%A0));
3751 else
3752 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3753 AS2 (sbc,%A0,%A0) CR_TAB
3754 AS2 (mov,%B0,%A0) CR_TAB
3755 AS2 (mov,%C0,%A0) CR_TAB
3756 AS2 (mov,%D0,%A0));
3757 }
3758 len = t;
3759 }
3760 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3761 AS1 (ror,%C0) CR_TAB
3762 AS1 (ror,%B0) CR_TAB
3763 AS1 (ror,%A0)),
3764 insn, operands, len, 4);
3765 return "";
3766 }
3767
3768 /* 8bit logic shift right ((unsigned char)x >> i) */
3769
3770 const char *
3771 lshrqi3_out (rtx insn, rtx operands[], int *len)
3772 {
3773 if (GET_CODE (operands[2]) == CONST_INT)
3774 {
3775 int k;
3776
3777 if (!len)
3778 len = &k;
3779
3780 switch (INTVAL (operands[2]))
3781 {
3782 default:
3783 if (INTVAL (operands[2]) < 8)
3784 break;
3785
3786 *len = 1;
3787 return AS1 (clr,%0);
3788
3789 case 1:
3790 *len = 1;
3791 return AS1 (lsr,%0);
3792
3793 case 2:
3794 *len = 2;
3795 return (AS1 (lsr,%0) CR_TAB
3796 AS1 (lsr,%0));
3797 case 3:
3798 *len = 3;
3799 return (AS1 (lsr,%0) CR_TAB
3800 AS1 (lsr,%0) CR_TAB
3801 AS1 (lsr,%0));
3802
3803 case 4:
3804 if (test_hard_reg_class (LD_REGS, operands[0]))
3805 {
3806 *len=2;
3807 return (AS1 (swap,%0) CR_TAB
3808 AS2 (andi,%0,0x0f));
3809 }
3810 *len = 4;
3811 return (AS1 (lsr,%0) CR_TAB
3812 AS1 (lsr,%0) CR_TAB
3813 AS1 (lsr,%0) CR_TAB
3814 AS1 (lsr,%0));
3815
3816 case 5:
3817 if (test_hard_reg_class (LD_REGS, operands[0]))
3818 {
3819 *len = 3;
3820 return (AS1 (swap,%0) CR_TAB
3821 AS1 (lsr,%0) CR_TAB
3822 AS2 (andi,%0,0x7));
3823 }
3824 *len = 5;
3825 return (AS1 (lsr,%0) CR_TAB
3826 AS1 (lsr,%0) CR_TAB
3827 AS1 (lsr,%0) CR_TAB
3828 AS1 (lsr,%0) CR_TAB
3829 AS1 (lsr,%0));
3830
3831 case 6:
3832 if (test_hard_reg_class (LD_REGS, operands[0]))
3833 {
3834 *len = 4;
3835 return (AS1 (swap,%0) CR_TAB
3836 AS1 (lsr,%0) CR_TAB
3837 AS1 (lsr,%0) CR_TAB
3838 AS2 (andi,%0,0x3));
3839 }
3840 *len = 6;
3841 return (AS1 (lsr,%0) CR_TAB
3842 AS1 (lsr,%0) CR_TAB
3843 AS1 (lsr,%0) CR_TAB
3844 AS1 (lsr,%0) CR_TAB
3845 AS1 (lsr,%0) CR_TAB
3846 AS1 (lsr,%0));
3847
3848 case 7:
3849 *len = 3;
3850 return (AS1 (rol,%0) CR_TAB
3851 AS1 (clr,%0) CR_TAB
3852 AS1 (rol,%0));
3853 }
3854 }
3855 else if (CONSTANT_P (operands[2]))
3856 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3857
3858 out_shift_with_cnt (AS1 (lsr,%0),
3859 insn, operands, len, 1);
3860 return "";
3861 }
3862
3863 /* 16bit logic shift right ((unsigned short)x >> i) */
3864
3865 const char *
3866 lshrhi3_out (rtx insn, rtx operands[], int *len)
3867 {
3868 if (GET_CODE (operands[2]) == CONST_INT)
3869 {
3870 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3871 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3872 int k;
3873 int *t = len;
3874
3875 if (!len)
3876 len = &k;
3877
3878 switch (INTVAL (operands[2]))
3879 {
3880 default:
3881 if (INTVAL (operands[2]) < 16)
3882 break;
3883
3884 *len = 2;
3885 return (AS1 (clr,%B0) CR_TAB
3886 AS1 (clr,%A0));
3887
3888 case 4:
3889 if (optimize_size && scratch)
3890 break; /* 5 */
3891 if (ldi_ok)
3892 {
3893 *len = 6;
3894 return (AS1 (swap,%B0) CR_TAB
3895 AS1 (swap,%A0) CR_TAB
3896 AS2 (andi,%A0,0x0f) CR_TAB
3897 AS2 (eor,%A0,%B0) CR_TAB
3898 AS2 (andi,%B0,0x0f) CR_TAB
3899 AS2 (eor,%A0,%B0));
3900 }
3901 if (scratch)
3902 {
3903 *len = 7;
3904 return (AS1 (swap,%B0) CR_TAB
3905 AS1 (swap,%A0) CR_TAB
3906 AS2 (ldi,%3,0x0f) CR_TAB
3907 AS2 (and,%A0,%3) CR_TAB
3908 AS2 (eor,%A0,%B0) CR_TAB
3909 AS2 (and,%B0,%3) CR_TAB
3910 AS2 (eor,%A0,%B0));
3911 }
3912 break; /* optimize_size ? 6 : 8 */
3913
3914 case 5:
3915 if (optimize_size)
3916 break; /* scratch ? 5 : 6 */
3917 if (ldi_ok)
3918 {
3919 *len = 8;
3920 return (AS1 (lsr,%B0) CR_TAB
3921 AS1 (ror,%A0) CR_TAB
3922 AS1 (swap,%B0) CR_TAB
3923 AS1 (swap,%A0) CR_TAB
3924 AS2 (andi,%A0,0x0f) CR_TAB
3925 AS2 (eor,%A0,%B0) CR_TAB
3926 AS2 (andi,%B0,0x0f) CR_TAB
3927 AS2 (eor,%A0,%B0));
3928 }
3929 if (scratch)
3930 {
3931 *len = 9;
3932 return (AS1 (lsr,%B0) CR_TAB
3933 AS1 (ror,%A0) CR_TAB
3934 AS1 (swap,%B0) CR_TAB
3935 AS1 (swap,%A0) CR_TAB
3936 AS2 (ldi,%3,0x0f) CR_TAB
3937 AS2 (and,%A0,%3) CR_TAB
3938 AS2 (eor,%A0,%B0) CR_TAB
3939 AS2 (and,%B0,%3) CR_TAB
3940 AS2 (eor,%A0,%B0));
3941 }
3942 break; /* 10 */
3943
3944 case 6:
3945 if (optimize_size)
3946 break; /* scratch ? 5 : 6 */
3947 *len = 9;
3948 return (AS1 (clr,__tmp_reg__) CR_TAB
3949 AS1 (lsl,%A0) CR_TAB
3950 AS1 (rol,%B0) CR_TAB
3951 AS1 (rol,__tmp_reg__) CR_TAB
3952 AS1 (lsl,%A0) CR_TAB
3953 AS1 (rol,%B0) CR_TAB
3954 AS1 (rol,__tmp_reg__) CR_TAB
3955 AS2 (mov,%A0,%B0) CR_TAB
3956 AS2 (mov,%B0,__tmp_reg__));
3957
3958 case 7:
3959 *len = 5;
3960 return (AS1 (lsl,%A0) CR_TAB
3961 AS2 (mov,%A0,%B0) CR_TAB
3962 AS1 (rol,%A0) CR_TAB
3963 AS2 (sbc,%B0,%B0) CR_TAB
3964 AS1 (neg,%B0));
3965
3966 case 8:
3967 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3968 AS1 (clr,%B0));
3969
3970 case 9:
3971 *len = 3;
3972 return (AS2 (mov,%A0,%B0) CR_TAB
3973 AS1 (clr,%B0) CR_TAB
3974 AS1 (lsr,%A0));
3975
3976 case 10:
3977 *len = 4;
3978 return (AS2 (mov,%A0,%B0) CR_TAB
3979 AS1 (clr,%B0) CR_TAB
3980 AS1 (lsr,%A0) CR_TAB
3981 AS1 (lsr,%A0));
3982
3983 case 11:
3984 *len = 5;
3985 return (AS2 (mov,%A0,%B0) CR_TAB
3986 AS1 (clr,%B0) CR_TAB
3987 AS1 (lsr,%A0) CR_TAB
3988 AS1 (lsr,%A0) CR_TAB
3989 AS1 (lsr,%A0));
3990
3991 case 12:
3992 if (ldi_ok)
3993 {
3994 *len = 4;
3995 return (AS2 (mov,%A0,%B0) CR_TAB
3996 AS1 (clr,%B0) CR_TAB
3997 AS1 (swap,%A0) CR_TAB
3998 AS2 (andi,%A0,0x0f));
3999 }
4000 if (scratch)
4001 {
4002 *len = 5;
4003 return (AS2 (mov,%A0,%B0) CR_TAB
4004 AS1 (clr,%B0) CR_TAB
4005 AS1 (swap,%A0) CR_TAB
4006 AS2 (ldi,%3,0x0f) CR_TAB
4007 AS2 (and,%A0,%3));
4008 }
4009 *len = 6;
4010 return (AS2 (mov,%A0,%B0) CR_TAB
4011 AS1 (clr,%B0) CR_TAB
4012 AS1 (lsr,%A0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS1 (lsr,%A0) CR_TAB
4015 AS1 (lsr,%A0));
4016
4017 case 13:
4018 if (ldi_ok)
4019 {
4020 *len = 5;
4021 return (AS2 (mov,%A0,%B0) CR_TAB
4022 AS1 (clr,%B0) CR_TAB
4023 AS1 (swap,%A0) CR_TAB
4024 AS1 (lsr,%A0) CR_TAB
4025 AS2 (andi,%A0,0x07));
4026 }
4027 if (AVR_HAVE_MUL && scratch)
4028 {
4029 *len = 5;
4030 return (AS2 (ldi,%3,0x08) CR_TAB
4031 AS2 (mul,%B0,%3) CR_TAB
4032 AS2 (mov,%A0,r1) CR_TAB
4033 AS1 (clr,%B0) CR_TAB
4034 AS1 (clr,__zero_reg__));
4035 }
4036 if (optimize_size && scratch)
4037 break; /* 5 */
4038 if (scratch)
4039 {
4040 *len = 6;
4041 return (AS2 (mov,%A0,%B0) CR_TAB
4042 AS1 (clr,%B0) CR_TAB
4043 AS1 (swap,%A0) CR_TAB
4044 AS1 (lsr,%A0) CR_TAB
4045 AS2 (ldi,%3,0x07) CR_TAB
4046 AS2 (and,%A0,%3));
4047 }
4048 if (AVR_HAVE_MUL)
4049 {
4050 *len = 6;
4051 return ("set" CR_TAB
4052 AS2 (bld,r1,3) CR_TAB
4053 AS2 (mul,%B0,r1) CR_TAB
4054 AS2 (mov,%A0,r1) CR_TAB
4055 AS1 (clr,%B0) CR_TAB
4056 AS1 (clr,__zero_reg__));
4057 }
4058 *len = 7;
4059 return (AS2 (mov,%A0,%B0) CR_TAB
4060 AS1 (clr,%B0) CR_TAB
4061 AS1 (lsr,%A0) CR_TAB
4062 AS1 (lsr,%A0) CR_TAB
4063 AS1 (lsr,%A0) CR_TAB
4064 AS1 (lsr,%A0) CR_TAB
4065 AS1 (lsr,%A0));
4066
4067 case 14:
4068 if (AVR_HAVE_MUL && ldi_ok)
4069 {
4070 *len = 5;
4071 return (AS2 (ldi,%A0,0x04) CR_TAB
4072 AS2 (mul,%B0,%A0) CR_TAB
4073 AS2 (mov,%A0,r1) CR_TAB
4074 AS1 (clr,%B0) CR_TAB
4075 AS1 (clr,__zero_reg__));
4076 }
4077 if (AVR_HAVE_MUL && scratch)
4078 {
4079 *len = 5;
4080 return (AS2 (ldi,%3,0x04) CR_TAB
4081 AS2 (mul,%B0,%3) CR_TAB
4082 AS2 (mov,%A0,r1) CR_TAB
4083 AS1 (clr,%B0) CR_TAB
4084 AS1 (clr,__zero_reg__));
4085 }
4086 if (optimize_size && ldi_ok)
4087 {
4088 *len = 5;
4089 return (AS2 (mov,%A0,%B0) CR_TAB
4090 AS2 (ldi,%B0,6) "\n1:\t"
4091 AS1 (lsr,%A0) CR_TAB
4092 AS1 (dec,%B0) CR_TAB
4093 AS1 (brne,1b));
4094 }
4095 if (optimize_size && scratch)
4096 break; /* 5 */
4097 *len = 6;
4098 return (AS1 (clr,%A0) CR_TAB
4099 AS1 (lsl,%B0) CR_TAB
4100 AS1 (rol,%A0) CR_TAB
4101 AS1 (lsl,%B0) CR_TAB
4102 AS1 (rol,%A0) CR_TAB
4103 AS1 (clr,%B0));
4104
4105 case 15:
4106 *len = 4;
4107 return (AS1 (clr,%A0) CR_TAB
4108 AS1 (lsl,%B0) CR_TAB
4109 AS1 (rol,%A0) CR_TAB
4110 AS1 (clr,%B0));
4111 }
4112 len = t;
4113 }
4114 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4115 AS1 (ror,%A0)),
4116 insn, operands, len, 2);
4117 return "";
4118 }
4119
4120 /* 32bit logic shift right ((unsigned int)x >> i) */
4121
4122 const char *
4123 lshrsi3_out (rtx insn, rtx operands[], int *len)
4124 {
4125 if (GET_CODE (operands[2]) == CONST_INT)
4126 {
4127 int k;
4128 int *t = len;
4129
4130 if (!len)
4131 len = &k;
4132
4133 switch (INTVAL (operands[2]))
4134 {
4135 default:
4136 if (INTVAL (operands[2]) < 32)
4137 break;
4138
4139 if (AVR_HAVE_MOVW)
4140 return *len = 3, (AS1 (clr,%D0) CR_TAB
4141 AS1 (clr,%C0) CR_TAB
4142 AS2 (movw,%A0,%C0));
4143 *len = 4;
4144 return (AS1 (clr,%D0) CR_TAB
4145 AS1 (clr,%C0) CR_TAB
4146 AS1 (clr,%B0) CR_TAB
4147 AS1 (clr,%A0));
4148
4149 case 8:
4150 {
4151 int reg0 = true_regnum (operands[0]);
4152 int reg1 = true_regnum (operands[1]);
4153 *len = 4;
4154 if (reg0 <= reg1)
4155 return (AS2 (mov,%A0,%B1) CR_TAB
4156 AS2 (mov,%B0,%C1) CR_TAB
4157 AS2 (mov,%C0,%D1) CR_TAB
4158 AS1 (clr,%D0));
4159 else
4160 return (AS1 (clr,%D0) CR_TAB
4161 AS2 (mov,%C0,%D1) CR_TAB
4162 AS2 (mov,%B0,%C1) CR_TAB
4163 AS2 (mov,%A0,%B1));
4164 }
4165
4166 case 16:
4167 {
4168 int reg0 = true_regnum (operands[0]);
4169 int reg1 = true_regnum (operands[1]);
4170
4171 if (reg0 == reg1 + 2)
4172 return *len = 2, (AS1 (clr,%C0) CR_TAB
4173 AS1 (clr,%D0));
4174 if (AVR_HAVE_MOVW)
4175 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4176 AS1 (clr,%C0) CR_TAB
4177 AS1 (clr,%D0));
4178 else
4179 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4180 AS2 (mov,%A0,%C1) CR_TAB
4181 AS1 (clr,%C0) CR_TAB
4182 AS1 (clr,%D0));
4183 }
4184
4185 case 24:
4186 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4187 AS1 (clr,%B0) CR_TAB
4188 AS1 (clr,%C0) CR_TAB
4189 AS1 (clr,%D0));
4190
4191 case 31:
4192 *len = 6;
4193 return (AS1 (clr,%A0) CR_TAB
4194 AS2 (sbrc,%D0,7) CR_TAB
4195 AS1 (inc,%A0) CR_TAB
4196 AS1 (clr,%B0) CR_TAB
4197 AS1 (clr,%C0) CR_TAB
4198 AS1 (clr,%D0));
4199 }
4200 len = t;
4201 }
4202 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4203 AS1 (ror,%C0) CR_TAB
4204 AS1 (ror,%B0) CR_TAB
4205 AS1 (ror,%A0)),
4206 insn, operands, len, 4);
4207 return "";
4208 }
4209
4210 /* Modifies the length assigned to instruction INSN
4211 LEN is the initially computed length of the insn. */
4212
4213 int
4214 adjust_insn_length (rtx insn, int len)
4215 {
4216 rtx patt = PATTERN (insn);
4217 rtx set;
4218
4219 if (GET_CODE (patt) == SET)
4220 {
4221 rtx op[10];
4222 op[1] = SET_SRC (patt);
4223 op[0] = SET_DEST (patt);
4224 if (general_operand (op[1], VOIDmode)
4225 && general_operand (op[0], VOIDmode))
4226 {
4227 switch (GET_MODE (op[0]))
4228 {
4229 case QImode:
4230 output_movqi (insn, op, &len);
4231 break;
4232 case HImode:
4233 output_movhi (insn, op, &len);
4234 break;
4235 case SImode:
4236 case SFmode:
4237 output_movsisf (insn, op, &len);
4238 break;
4239 default:
4240 break;
4241 }
4242 }
4243 else if (op[0] == cc0_rtx && REG_P (op[1]))
4244 {
4245 switch (GET_MODE (op[1]))
4246 {
4247 case HImode: out_tsthi (insn,&len); break;
4248 case SImode: out_tstsi (insn,&len); break;
4249 default: break;
4250 }
4251 }
4252 else if (GET_CODE (op[1]) == AND)
4253 {
4254 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4255 {
4256 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4257 if (GET_MODE (op[1]) == SImode)
4258 len = (((mask & 0xff) != 0xff)
4259 + ((mask & 0xff00) != 0xff00)
4260 + ((mask & 0xff0000L) != 0xff0000L)
4261 + ((mask & 0xff000000L) != 0xff000000L));
4262 else if (GET_MODE (op[1]) == HImode)
4263 len = (((mask & 0xff) != 0xff)
4264 + ((mask & 0xff00) != 0xff00));
4265 }
4266 }
4267 else if (GET_CODE (op[1]) == IOR)
4268 {
4269 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4270 {
4271 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4272 if (GET_MODE (op[1]) == SImode)
4273 len = (((mask & 0xff) != 0)
4274 + ((mask & 0xff00) != 0)
4275 + ((mask & 0xff0000L) != 0)
4276 + ((mask & 0xff000000L) != 0));
4277 else if (GET_MODE (op[1]) == HImode)
4278 len = (((mask & 0xff) != 0)
4279 + ((mask & 0xff00) != 0));
4280 }
4281 }
4282 }
4283 set = single_set (insn);
4284 if (set)
4285 {
4286 rtx op[10];
4287
4288 op[1] = SET_SRC (set);
4289 op[0] = SET_DEST (set);
4290
4291 if (GET_CODE (patt) == PARALLEL
4292 && general_operand (op[1], VOIDmode)
4293 && general_operand (op[0], VOIDmode))
4294 {
4295 if (XVECLEN (patt, 0) == 2)
4296 op[2] = XVECEXP (patt, 0, 1);
4297
4298 switch (GET_MODE (op[0]))
4299 {
4300 case QImode:
4301 len = 2;
4302 break;
4303 case HImode:
4304 output_reload_inhi (insn, op, &len);
4305 break;
4306 case SImode:
4307 case SFmode:
4308 output_reload_insisf (insn, op, &len);
4309 break;
4310 default:
4311 break;
4312 }
4313 }
4314 else if (GET_CODE (op[1]) == ASHIFT
4315 || GET_CODE (op[1]) == ASHIFTRT
4316 || GET_CODE (op[1]) == LSHIFTRT)
4317 {
4318 rtx ops[10];
4319 ops[0] = op[0];
4320 ops[1] = XEXP (op[1],0);
4321 ops[2] = XEXP (op[1],1);
4322 switch (GET_CODE (op[1]))
4323 {
4324 case ASHIFT:
4325 switch (GET_MODE (op[0]))
4326 {
4327 case QImode: ashlqi3_out (insn,ops,&len); break;
4328 case HImode: ashlhi3_out (insn,ops,&len); break;
4329 case SImode: ashlsi3_out (insn,ops,&len); break;
4330 default: break;
4331 }
4332 break;
4333 case ASHIFTRT:
4334 switch (GET_MODE (op[0]))
4335 {
4336 case QImode: ashrqi3_out (insn,ops,&len); break;
4337 case HImode: ashrhi3_out (insn,ops,&len); break;
4338 case SImode: ashrsi3_out (insn,ops,&len); break;
4339 default: break;
4340 }
4341 break;
4342 case LSHIFTRT:
4343 switch (GET_MODE (op[0]))
4344 {
4345 case QImode: lshrqi3_out (insn,ops,&len); break;
4346 case HImode: lshrhi3_out (insn,ops,&len); break;
4347 case SImode: lshrsi3_out (insn,ops,&len); break;
4348 default: break;
4349 }
4350 break;
4351 default:
4352 break;
4353 }
4354 }
4355 }
4356 return len;
4357 }
4358
4359 /* Return nonzero if register REG dead after INSN. */
4360
4361 int
4362 reg_unused_after (rtx insn, rtx reg)
4363 {
4364 return (dead_or_set_p (insn, reg)
4365 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4366 }
4367
4368 /* Return nonzero if REG is not used after INSN.
4369 We assume REG is a reload reg, and therefore does
4370 not live past labels. It may live past calls or jumps though. */
4371
4372 int
4373 _reg_unused_after (rtx insn, rtx reg)
4374 {
4375 enum rtx_code code;
4376 rtx set;
4377
4378 /* If the reg is set by this instruction, then it is safe for our
4379 case. Disregard the case where this is a store to memory, since
4380 we are checking a register used in the store address. */
4381 set = single_set (insn);
4382 if (set && GET_CODE (SET_DEST (set)) != MEM
4383 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4384 return 1;
4385
4386 while ((insn = NEXT_INSN (insn)))
4387 {
4388 rtx set;
4389 code = GET_CODE (insn);
4390
4391 #if 0
4392 /* If this is a label that existed before reload, then the register
4393 if dead here. However, if this is a label added by reorg, then
4394 the register may still be live here. We can't tell the difference,
4395 so we just ignore labels completely. */
4396 if (code == CODE_LABEL)
4397 return 1;
4398 /* else */
4399 #endif
4400
4401 if (!INSN_P (insn))
4402 continue;
4403
4404 if (code == JUMP_INSN)
4405 return 0;
4406
4407 /* If this is a sequence, we must handle them all at once.
4408 We could have for instance a call that sets the target register,
4409 and an insn in a delay slot that uses the register. In this case,
4410 we must return 0. */
4411 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4412 {
4413 int i;
4414 int retval = 0;
4415
4416 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4417 {
4418 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4419 rtx set = single_set (this_insn);
4420
4421 if (GET_CODE (this_insn) == CALL_INSN)
4422 code = CALL_INSN;
4423 else if (GET_CODE (this_insn) == JUMP_INSN)
4424 {
4425 if (INSN_ANNULLED_BRANCH_P (this_insn))
4426 return 0;
4427 code = JUMP_INSN;
4428 }
4429
4430 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4431 return 0;
4432 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4433 {
4434 if (GET_CODE (SET_DEST (set)) != MEM)
4435 retval = 1;
4436 else
4437 return 0;
4438 }
4439 if (set == 0
4440 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4441 return 0;
4442 }
4443 if (retval == 1)
4444 return 1;
4445 else if (code == JUMP_INSN)
4446 return 0;
4447 }
4448
4449 if (code == CALL_INSN)
4450 {
4451 rtx tem;
4452 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4453 if (GET_CODE (XEXP (tem, 0)) == USE
4454 && REG_P (XEXP (XEXP (tem, 0), 0))
4455 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4456 return 0;
4457 if (call_used_regs[REGNO (reg)])
4458 return 1;
4459 }
4460
4461 set = single_set (insn);
4462
4463 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4464 return 0;
4465 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4466 return GET_CODE (SET_DEST (set)) != MEM;
4467 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4468 return 0;
4469 }
4470 return 1;
4471 }
4472
4473 /* Target hook for assembling integer objects. The AVR version needs
4474 special handling for references to certain labels. */
4475
4476 static bool
4477 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4478 {
4479 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4480 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4481 || GET_CODE (x) == LABEL_REF))
4482 {
4483 fputs ("\t.word\tgs(", asm_out_file);
4484 output_addr_const (asm_out_file, x);
4485 fputs (")\n", asm_out_file);
4486 return true;
4487 }
4488 return default_assemble_integer (x, size, aligned_p);
4489 }
4490
4491 /* The routine used to output NUL terminated strings. We use a special
4492 version of this for most svr4 targets because doing so makes the
4493 generated assembly code more compact (and thus faster to assemble)
4494 as well as more readable, especially for targets like the i386
4495 (where the only alternative is to output character sequences as
4496 comma separated lists of numbers). */
4497
4498 void
4499 gas_output_limited_string(FILE *file, const char *str)
4500 {
4501 const unsigned char *_limited_str = (const unsigned char *) str;
4502 unsigned ch;
4503 fprintf (file, "%s\"", STRING_ASM_OP);
4504 for (; (ch = *_limited_str); _limited_str++)
4505 {
4506 int escape;
4507 switch (escape = ESCAPES[ch])
4508 {
4509 case 0:
4510 putc (ch, file);
4511 break;
4512 case 1:
4513 fprintf (file, "\\%03o", ch);
4514 break;
4515 default:
4516 putc ('\\', file);
4517 putc (escape, file);
4518 break;
4519 }
4520 }
4521 fprintf (file, "\"\n");
4522 }
4523
4524 /* The routine used to output sequences of byte values. We use a special
4525 version of this for most svr4 targets because doing so makes the
4526 generated assembly code more compact (and thus faster to assemble)
4527 as well as more readable. Note that if we find subparts of the
4528 character sequence which end with NUL (and which are shorter than
4529 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4530
4531 void
4532 gas_output_ascii(FILE *file, const char *str, size_t length)
4533 {
4534 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4535 const unsigned char *limit = _ascii_bytes + length;
4536 unsigned bytes_in_chunk = 0;
4537 for (; _ascii_bytes < limit; _ascii_bytes++)
4538 {
4539 const unsigned char *p;
4540 if (bytes_in_chunk >= 60)
4541 {
4542 fprintf (file, "\"\n");
4543 bytes_in_chunk = 0;
4544 }
4545 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4546 continue;
4547 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4548 {
4549 if (bytes_in_chunk > 0)
4550 {
4551 fprintf (file, "\"\n");
4552 bytes_in_chunk = 0;
4553 }
4554 gas_output_limited_string (file, (const char*)_ascii_bytes);
4555 _ascii_bytes = p;
4556 }
4557 else
4558 {
4559 int escape;
4560 unsigned ch;
4561 if (bytes_in_chunk == 0)
4562 fprintf (file, "\t.ascii\t\"");
4563 switch (escape = ESCAPES[ch = *_ascii_bytes])
4564 {
4565 case 0:
4566 putc (ch, file);
4567 bytes_in_chunk++;
4568 break;
4569 case 1:
4570 fprintf (file, "\\%03o", ch);
4571 bytes_in_chunk += 4;
4572 break;
4573 default:
4574 putc ('\\', file);
4575 putc (escape, file);
4576 bytes_in_chunk += 2;
4577 break;
4578 }
4579 }
4580 }
4581 if (bytes_in_chunk > 0)
4582 fprintf (file, "\"\n");
4583 }
4584
4585 /* Return value is nonzero if pseudos that have been
4586 assigned to registers of class CLASS would likely be spilled
4587 because registers of CLASS are needed for spill registers. */
4588
4589 enum reg_class
4590 class_likely_spilled_p (int c)
4591 {
4592 return (c != ALL_REGS && c != ADDW_REGS);
4593 }
4594
4595 /* Valid attributes:
4596 progmem - put data to program memory;
4597 signal - make a function to be hardware interrupt. After function
4598 prologue interrupts are disabled;
4599 interrupt - make a function to be hardware interrupt. After function
4600 prologue interrupts are enabled;
4601 naked - don't generate function prologue/epilogue and `ret' command.
4602
4603 Only `progmem' attribute valid for type. */
4604
4605 const struct attribute_spec avr_attribute_table[] =
4606 {
4607 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4608 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4609 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4610 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4611 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4612 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4613 { NULL, 0, 0, false, false, false, NULL }
4614 };
4615
4616 /* Handle a "progmem" attribute; arguments as in
4617 struct attribute_spec.handler. */
4618 static tree
4619 avr_handle_progmem_attribute (tree *node, tree name,
4620 tree args ATTRIBUTE_UNUSED,
4621 int flags ATTRIBUTE_UNUSED,
4622 bool *no_add_attrs)
4623 {
4624 if (DECL_P (*node))
4625 {
4626 if (TREE_CODE (*node) == TYPE_DECL)
4627 {
4628 /* This is really a decl attribute, not a type attribute,
4629 but try to handle it for GCC 3.0 backwards compatibility. */
4630
4631 tree type = TREE_TYPE (*node);
4632 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4633 tree newtype = build_type_attribute_variant (type, attr);
4634
4635 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4636 TREE_TYPE (*node) = newtype;
4637 *no_add_attrs = true;
4638 }
4639 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4640 {
4641 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4642 {
4643 warning (0, "only initialized variables can be placed into "
4644 "program memory area");
4645 *no_add_attrs = true;
4646 }
4647 }
4648 else
4649 {
4650 warning (OPT_Wattributes, "%qs attribute ignored",
4651 IDENTIFIER_POINTER (name));
4652 *no_add_attrs = true;
4653 }
4654 }
4655
4656 return NULL_TREE;
4657 }
4658
4659 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4660 struct attribute_spec.handler. */
4661
4662 static tree
4663 avr_handle_fndecl_attribute (tree *node, tree name,
4664 tree args ATTRIBUTE_UNUSED,
4665 int flags ATTRIBUTE_UNUSED,
4666 bool *no_add_attrs)
4667 {
4668 if (TREE_CODE (*node) != FUNCTION_DECL)
4669 {
4670 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4671 IDENTIFIER_POINTER (name));
4672 *no_add_attrs = true;
4673 }
4674 else
4675 {
4676 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4677 const char *attr = IDENTIFIER_POINTER (name);
4678
4679 /* If the function has the 'signal' or 'interrupt' attribute, test to
4680 make sure that the name of the function is "__vector_NN" so as to
4681 catch when the user misspells the interrupt vector name. */
4682
4683 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4684 {
4685 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4686 {
4687 warning (0, "%qs appears to be a misspelled interrupt handler",
4688 func_name);
4689 }
4690 }
4691 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4692 {
4693 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4694 {
4695 warning (0, "%qs appears to be a misspelled signal handler",
4696 func_name);
4697 }
4698 }
4699 }
4700
4701 return NULL_TREE;
4702 }
4703
4704 static tree
4705 avr_handle_fntype_attribute (tree *node, tree name,
4706 tree args ATTRIBUTE_UNUSED,
4707 int flags ATTRIBUTE_UNUSED,
4708 bool *no_add_attrs)
4709 {
4710 if (TREE_CODE (*node) != FUNCTION_TYPE)
4711 {
4712 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4713 IDENTIFIER_POINTER (name));
4714 *no_add_attrs = true;
4715 }
4716
4717 return NULL_TREE;
4718 }
4719
4720 /* Look for attribute `progmem' in DECL
4721 if found return 1, otherwise 0. */
4722
4723 int
4724 avr_progmem_p (tree decl, tree attributes)
4725 {
4726 tree a;
4727
4728 if (TREE_CODE (decl) != VAR_DECL)
4729 return 0;
4730
4731 if (NULL_TREE
4732 != lookup_attribute ("progmem", attributes))
4733 return 1;
4734
4735 a=decl;
4736 do
4737 a = TREE_TYPE(a);
4738 while (TREE_CODE (a) == ARRAY_TYPE);
4739
4740 if (a == error_mark_node)
4741 return 0;
4742
4743 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4744 return 1;
4745
4746 return 0;
4747 }
4748
4749 /* Add the section attribute if the variable is in progmem. */
4750
4751 static void
4752 avr_insert_attributes (tree node, tree *attributes)
4753 {
4754 if (TREE_CODE (node) == VAR_DECL
4755 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4756 && avr_progmem_p (node, *attributes))
4757 {
4758 static const char dsec[] = ".progmem.data";
4759 *attributes = tree_cons (get_identifier ("section"),
4760 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4761 *attributes);
4762
4763 /* ??? This seems sketchy. Why can't the user declare the
4764 thing const in the first place? */
4765 TREE_READONLY (node) = 1;
4766 }
4767 }
4768
4769 /* A get_unnamed_section callback for switching to progmem_section. */
4770
4771 static void
4772 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4773 {
4774 fprintf (asm_out_file,
4775 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4776 AVR_MEGA ? "a" : "ax");
4777 /* Should already be aligned, this is just to be safe if it isn't. */
4778 fprintf (asm_out_file, "\t.p2align 1\n");
4779 }
4780
4781 /* Implement TARGET_ASM_INIT_SECTIONS. */
4782
4783 static void
4784 avr_asm_init_sections (void)
4785 {
4786 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4787 avr_output_progmem_section_asm_op,
4788 NULL);
4789 readonly_data_section = data_section;
4790 }
4791
4792 static unsigned int
4793 avr_section_type_flags (tree decl, const char *name, int reloc)
4794 {
4795 unsigned int flags = default_section_type_flags (decl, name, reloc);
4796
4797 if (strncmp (name, ".noinit", 7) == 0)
4798 {
4799 if (decl && TREE_CODE (decl) == VAR_DECL
4800 && DECL_INITIAL (decl) == NULL_TREE)
4801 flags |= SECTION_BSS; /* @nobits */
4802 else
4803 warning (0, "only uninitialized variables can be placed in the "
4804 ".noinit section");
4805 }
4806
4807 return flags;
4808 }
4809
4810 /* Outputs some appropriate text to go at the start of an assembler
4811 file. */
4812
4813 static void
4814 avr_file_start (void)
4815 {
4816 if (avr_asm_only_p)
4817 error ("MCU %qs supported for assembler only", avr_mcu_name);
4818
4819 default_file_start ();
4820
4821 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4822 fputs ("__SREG__ = 0x3f\n"
4823 "__SP_H__ = 0x3e\n"
4824 "__SP_L__ = 0x3d\n", asm_out_file);
4825
4826 fputs ("__tmp_reg__ = 0\n"
4827 "__zero_reg__ = 1\n", asm_out_file);
4828
4829 /* FIXME: output these only if there is anything in the .data / .bss
4830 sections - some code size could be saved by not linking in the
4831 initialization code from libgcc if one or both sections are empty. */
4832 fputs ("\t.global __do_copy_data\n", asm_out_file);
4833 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4834 }
4835
4836 /* Outputs to the stdio stream FILE some
4837 appropriate text to go at the end of an assembler file. */
4838
4839 static void
4840 avr_file_end (void)
4841 {
4842 }
4843
4844 /* Choose the order in which to allocate hard registers for
4845 pseudo-registers local to a basic block.
4846
4847 Store the desired register order in the array `reg_alloc_order'.
4848 Element 0 should be the register to allocate first; element 1, the
4849 next register; and so on. */
4850
4851 void
4852 order_regs_for_local_alloc (void)
4853 {
4854 unsigned int i;
4855 static const int order_0[] = {
4856 24,25,
4857 18,19,
4858 20,21,
4859 22,23,
4860 30,31,
4861 26,27,
4862 28,29,
4863 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4864 0,1,
4865 32,33,34,35
4866 };
4867 static const int order_1[] = {
4868 18,19,
4869 20,21,
4870 22,23,
4871 24,25,
4872 30,31,
4873 26,27,
4874 28,29,
4875 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4876 0,1,
4877 32,33,34,35
4878 };
4879 static const int order_2[] = {
4880 25,24,
4881 23,22,
4882 21,20,
4883 19,18,
4884 30,31,
4885 26,27,
4886 28,29,
4887 17,16,
4888 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4889 1,0,
4890 32,33,34,35
4891 };
4892
4893 const int *order = (TARGET_ORDER_1 ? order_1 :
4894 TARGET_ORDER_2 ? order_2 :
4895 order_0);
4896 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4897 reg_alloc_order[i] = order[i];
4898 }
4899
4900
4901 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4902 cost of an RTX operand given its context. X is the rtx of the
4903 operand, MODE is its mode, and OUTER is the rtx_code of this
4904 operand's parent operator. */
4905
4906 static int
4907 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4908 {
4909 enum rtx_code code = GET_CODE (x);
4910 int total;
4911
4912 switch (code)
4913 {
4914 case REG:
4915 case SUBREG:
4916 return 0;
4917
4918 case CONST_INT:
4919 case CONST_DOUBLE:
4920 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4921
4922 default:
4923 break;
4924 }
4925
4926 total = 0;
4927 avr_rtx_costs (x, code, outer, &total);
4928 return total;
4929 }
4930
4931 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4932 is to be calculated. Return true if the complete cost has been
4933 computed, and false if subexpressions should be scanned. In either
4934 case, *TOTAL contains the cost result. */
4935
4936 static bool
4937 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4938 {
4939 enum machine_mode mode = GET_MODE (x);
4940 HOST_WIDE_INT val;
4941
4942 switch (code)
4943 {
4944 case CONST_INT:
4945 case CONST_DOUBLE:
4946 /* Immediate constants are as cheap as registers. */
4947 *total = 0;
4948 return true;
4949
4950 case MEM:
4951 case CONST:
4952 case LABEL_REF:
4953 case SYMBOL_REF:
4954 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4955 return true;
4956
4957 case NEG:
4958 switch (mode)
4959 {
4960 case QImode:
4961 case SFmode:
4962 *total = COSTS_N_INSNS (1);
4963 break;
4964
4965 case HImode:
4966 *total = COSTS_N_INSNS (3);
4967 break;
4968
4969 case SImode:
4970 *total = COSTS_N_INSNS (7);
4971 break;
4972
4973 default:
4974 return false;
4975 }
4976 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4977 return true;
4978
4979 case ABS:
4980 switch (mode)
4981 {
4982 case QImode:
4983 case SFmode:
4984 *total = COSTS_N_INSNS (1);
4985 break;
4986
4987 default:
4988 return false;
4989 }
4990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4991 return true;
4992
4993 case NOT:
4994 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4995 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4996 return true;
4997
4998 case ZERO_EXTEND:
4999 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5000 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5001 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5002 return true;
5003
5004 case SIGN_EXTEND:
5005 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5006 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5007 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5008 return true;
5009
5010 case PLUS:
5011 switch (mode)
5012 {
5013 case QImode:
5014 *total = COSTS_N_INSNS (1);
5015 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5017 break;
5018
5019 case HImode:
5020 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5021 {
5022 *total = COSTS_N_INSNS (2);
5023 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5024 }
5025 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5026 *total = COSTS_N_INSNS (1);
5027 else
5028 *total = COSTS_N_INSNS (2);
5029 break;
5030
5031 case SImode:
5032 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5033 {
5034 *total = COSTS_N_INSNS (4);
5035 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5036 }
5037 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5038 *total = COSTS_N_INSNS (1);
5039 else
5040 *total = COSTS_N_INSNS (4);
5041 break;
5042
5043 default:
5044 return false;
5045 }
5046 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5047 return true;
5048
5049 case MINUS:
5050 case AND:
5051 case IOR:
5052 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5053 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5054 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5055 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5056 return true;
5057
5058 case XOR:
5059 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5060 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5061 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5062 return true;
5063
5064 case MULT:
5065 switch (mode)
5066 {
5067 case QImode:
5068 if (AVR_HAVE_MUL)
5069 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5070 else if (optimize_size)
5071 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5072 else
5073 return false;
5074 break;
5075
5076 case HImode:
5077 if (AVR_HAVE_MUL)
5078 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5079 else if (optimize_size)
5080 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5081 else
5082 return false;
5083 break;
5084
5085 default:
5086 return false;
5087 }
5088 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5089 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5090 return true;
5091
5092 case DIV:
5093 case MOD:
5094 case UDIV:
5095 case UMOD:
5096 if (optimize_size)
5097 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5098 else
5099 return false;
5100 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5101 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5102 return true;
5103
5104 case ASHIFT:
5105 switch (mode)
5106 {
5107 case QImode:
5108 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5109 {
5110 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5111 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5112 }
5113 else
5114 {
5115 val = INTVAL (XEXP (x, 1));
5116 if (val == 7)
5117 *total = COSTS_N_INSNS (3);
5118 else if (val >= 0 && val <= 7)
5119 *total = COSTS_N_INSNS (val);
5120 else
5121 *total = COSTS_N_INSNS (1);
5122 }
5123 break;
5124
5125 case HImode:
5126 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5127 {
5128 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5130 }
5131 else
5132 switch (INTVAL (XEXP (x, 1)))
5133 {
5134 case 0:
5135 *total = 0;
5136 break;
5137 case 1:
5138 case 8:
5139 *total = COSTS_N_INSNS (2);
5140 break;
5141 case 9:
5142 *total = COSTS_N_INSNS (3);
5143 break;
5144 case 2:
5145 case 3:
5146 case 10:
5147 case 15:
5148 *total = COSTS_N_INSNS (4);
5149 break;
5150 case 7:
5151 case 11:
5152 case 12:
5153 *total = COSTS_N_INSNS (5);
5154 break;
5155 case 4:
5156 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5157 break;
5158 case 6:
5159 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5160 break;
5161 case 5:
5162 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5163 break;
5164 default:
5165 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5166 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5167 }
5168 break;
5169
5170 case SImode:
5171 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5172 {
5173 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5174 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5175 }
5176 else
5177 switch (INTVAL (XEXP (x, 1)))
5178 {
5179 case 0:
5180 *total = 0;
5181 break;
5182 case 24:
5183 *total = COSTS_N_INSNS (3);
5184 break;
5185 case 1:
5186 case 8:
5187 case 16:
5188 *total = COSTS_N_INSNS (4);
5189 break;
5190 case 31:
5191 *total = COSTS_N_INSNS (6);
5192 break;
5193 case 2:
5194 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5195 break;
5196 default:
5197 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5198 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5199 }
5200 break;
5201
5202 default:
5203 return false;
5204 }
5205 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5206 return true;
5207
5208 case ASHIFTRT:
5209 switch (mode)
5210 {
5211 case QImode:
5212 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5213 {
5214 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5215 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5216 }
5217 else
5218 {
5219 val = INTVAL (XEXP (x, 1));
5220 if (val == 6)
5221 *total = COSTS_N_INSNS (4);
5222 else if (val == 7)
5223 *total = COSTS_N_INSNS (2);
5224 else if (val >= 0 && val <= 7)
5225 *total = COSTS_N_INSNS (val);
5226 else
5227 *total = COSTS_N_INSNS (1);
5228 }
5229 break;
5230
5231 case HImode:
5232 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5233 {
5234 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5235 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5236 }
5237 else
5238 switch (INTVAL (XEXP (x, 1)))
5239 {
5240 case 0:
5241 *total = 0;
5242 break;
5243 case 1:
5244 *total = COSTS_N_INSNS (2);
5245 break;
5246 case 15:
5247 *total = COSTS_N_INSNS (3);
5248 break;
5249 case 2:
5250 case 7:
5251 case 8:
5252 case 9:
5253 *total = COSTS_N_INSNS (4);
5254 break;
5255 case 10:
5256 case 14:
5257 *total = COSTS_N_INSNS (5);
5258 break;
5259 case 11:
5260 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5261 break;
5262 case 12:
5263 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5264 break;
5265 case 6:
5266 case 13:
5267 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5268 break;
5269 default:
5270 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5271 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5272 }
5273 break;
5274
5275 case SImode:
5276 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5277 {
5278 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5279 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5280 }
5281 else
5282 switch (INTVAL (XEXP (x, 1)))
5283 {
5284 case 0:
5285 *total = 0;
5286 break;
5287 case 1:
5288 *total = COSTS_N_INSNS (4);
5289 break;
5290 case 8:
5291 case 16:
5292 case 24:
5293 *total = COSTS_N_INSNS (6);
5294 break;
5295 case 2:
5296 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5297 break;
5298 case 31:
5299 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5300 break;
5301 default:
5302 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5303 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5304 }
5305 break;
5306
5307 default:
5308 return false;
5309 }
5310 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5311 return true;
5312
5313 case LSHIFTRT:
5314 switch (mode)
5315 {
5316 case QImode:
5317 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5318 {
5319 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5320 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5321 }
5322 else
5323 {
5324 val = INTVAL (XEXP (x, 1));
5325 if (val == 7)
5326 *total = COSTS_N_INSNS (3);
5327 else if (val >= 0 && val <= 7)
5328 *total = COSTS_N_INSNS (val);
5329 else
5330 *total = COSTS_N_INSNS (1);
5331 }
5332 break;
5333
5334 case HImode:
5335 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5336 {
5337 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5338 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5339 }
5340 else
5341 switch (INTVAL (XEXP (x, 1)))
5342 {
5343 case 0:
5344 *total = 0;
5345 break;
5346 case 1:
5347 case 8:
5348 *total = COSTS_N_INSNS (2);
5349 break;
5350 case 9:
5351 *total = COSTS_N_INSNS (3);
5352 break;
5353 case 2:
5354 case 10:
5355 case 15:
5356 *total = COSTS_N_INSNS (4);
5357 break;
5358 case 7:
5359 case 11:
5360 *total = COSTS_N_INSNS (5);
5361 break;
5362 case 3:
5363 case 12:
5364 case 13:
5365 case 14:
5366 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5367 break;
5368 case 4:
5369 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5370 break;
5371 case 5:
5372 case 6:
5373 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5374 break;
5375 default:
5376 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5377 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5378 }
5379 break;
5380
5381 case SImode:
5382 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5383 {
5384 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5385 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5386 }
5387 else
5388 switch (INTVAL (XEXP (x, 1)))
5389 {
5390 case 0:
5391 *total = 0;
5392 break;
5393 case 1:
5394 *total = COSTS_N_INSNS (4);
5395 break;
5396 case 2:
5397 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5398 break;
5399 case 8:
5400 case 16:
5401 case 24:
5402 *total = COSTS_N_INSNS (4);
5403 break;
5404 case 31:
5405 *total = COSTS_N_INSNS (6);
5406 break;
5407 default:
5408 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5409 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5410 }
5411 break;
5412
5413 default:
5414 return false;
5415 }
5416 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5417 return true;
5418
5419 case COMPARE:
5420 switch (GET_MODE (XEXP (x, 0)))
5421 {
5422 case QImode:
5423 *total = COSTS_N_INSNS (1);
5424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5425 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5426 break;
5427
5428 case HImode:
5429 *total = COSTS_N_INSNS (2);
5430 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5431 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5432 else if (INTVAL (XEXP (x, 1)) != 0)
5433 *total += COSTS_N_INSNS (1);
5434 break;
5435
5436 case SImode:
5437 *total = COSTS_N_INSNS (4);
5438 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5439 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5440 else if (INTVAL (XEXP (x, 1)) != 0)
5441 *total += COSTS_N_INSNS (3);
5442 break;
5443
5444 default:
5445 return false;
5446 }
5447 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5448 return true;
5449
5450 default:
5451 break;
5452 }
5453 return false;
5454 }
5455
5456 /* Calculate the cost of a memory address. */
5457
5458 static int
5459 avr_address_cost (rtx x)
5460 {
5461 if (GET_CODE (x) == PLUS
5462 && GET_CODE (XEXP (x,1)) == CONST_INT
5463 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5464 && INTVAL (XEXP (x,1)) >= 61)
5465 return 18;
5466 if (CONSTANT_ADDRESS_P (x))
5467 {
5468 if (avr_io_address_p (x, 1))
5469 return 2;
5470 return 4;
5471 }
5472 return 4;
5473 }
5474
5475 /* Test for extra memory constraint 'Q'.
5476 It's a memory address based on Y or Z pointer with valid displacement. */
5477
5478 int
5479 extra_constraint_Q (rtx x)
5480 {
5481 if (GET_CODE (XEXP (x,0)) == PLUS
5482 && REG_P (XEXP (XEXP (x,0), 0))
5483 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5484 && (INTVAL (XEXP (XEXP (x,0), 1))
5485 <= MAX_LD_OFFSET (GET_MODE (x))))
5486 {
5487 rtx xx = XEXP (XEXP (x,0), 0);
5488 int regno = REGNO (xx);
5489 if (TARGET_ALL_DEBUG)
5490 {
5491 fprintf (stderr, ("extra_constraint:\n"
5492 "reload_completed: %d\n"
5493 "reload_in_progress: %d\n"),
5494 reload_completed, reload_in_progress);
5495 debug_rtx (x);
5496 }
5497 if (regno >= FIRST_PSEUDO_REGISTER)
5498 return 1; /* allocate pseudos */
5499 else if (regno == REG_Z || regno == REG_Y)
5500 return 1; /* strictly check */
5501 else if (xx == frame_pointer_rtx
5502 || xx == arg_pointer_rtx)
5503 return 1; /* XXX frame & arg pointer checks */
5504 }
5505 return 0;
5506 }
5507
5508 /* Convert condition code CONDITION to the valid AVR condition code. */
5509
5510 RTX_CODE
5511 avr_normalize_condition (RTX_CODE condition)
5512 {
5513 switch (condition)
5514 {
5515 case GT:
5516 return GE;
5517 case GTU:
5518 return GEU;
5519 case LE:
5520 return LT;
5521 case LEU:
5522 return LTU;
5523 default:
5524 gcc_unreachable ();
5525 }
5526 }
5527
5528 /* This function optimizes conditional jumps. */
5529
5530 static void
5531 avr_reorg (void)
5532 {
5533 rtx insn, pattern;
5534
5535 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5536 {
5537 if (! (GET_CODE (insn) == INSN
5538 || GET_CODE (insn) == CALL_INSN
5539 || GET_CODE (insn) == JUMP_INSN)
5540 || !single_set (insn))
5541 continue;
5542
5543 pattern = PATTERN (insn);
5544
5545 if (GET_CODE (pattern) == PARALLEL)
5546 pattern = XVECEXP (pattern, 0, 0);
5547 if (GET_CODE (pattern) == SET
5548 && SET_DEST (pattern) == cc0_rtx
5549 && compare_diff_p (insn))
5550 {
5551 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5552 {
5553 /* Now we work under compare insn. */
5554
5555 pattern = SET_SRC (pattern);
5556 if (true_regnum (XEXP (pattern,0)) >= 0
5557 && true_regnum (XEXP (pattern,1)) >= 0 )
5558 {
5559 rtx x = XEXP (pattern,0);
5560 rtx next = next_real_insn (insn);
5561 rtx pat = PATTERN (next);
5562 rtx src = SET_SRC (pat);
5563 rtx t = XEXP (src,0);
5564 PUT_CODE (t, swap_condition (GET_CODE (t)));
5565 XEXP (pattern,0) = XEXP (pattern,1);
5566 XEXP (pattern,1) = x;
5567 INSN_CODE (next) = -1;
5568 }
5569 else if (true_regnum (XEXP (pattern,0)) >= 0
5570 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5571 {
5572 rtx x = XEXP (pattern,1);
5573 rtx next = next_real_insn (insn);
5574 rtx pat = PATTERN (next);
5575 rtx src = SET_SRC (pat);
5576 rtx t = XEXP (src,0);
5577 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5578
5579 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5580 {
5581 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5582 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5583 INSN_CODE (next) = -1;
5584 INSN_CODE (insn) = -1;
5585 }
5586 }
5587 }
5588 else if (true_regnum (SET_SRC (pattern)) >= 0)
5589 {
5590 /* This is a tst insn */
5591 rtx next = next_real_insn (insn);
5592 rtx pat = PATTERN (next);
5593 rtx src = SET_SRC (pat);
5594 rtx t = XEXP (src,0);
5595
5596 PUT_CODE (t, swap_condition (GET_CODE (t)));
5597 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5598 SET_SRC (pattern));
5599 INSN_CODE (next) = -1;
5600 INSN_CODE (insn) = -1;
5601 }
5602 }
5603 }
5604 }
5605
5606 /* Returns register number for function return value.*/
5607
5608 int
5609 avr_ret_register (void)
5610 {
5611 return 24;
5612 }
5613
5614 /* Create an RTX representing the place where a
5615 library function returns a value of mode MODE. */
5616
5617 rtx
5618 avr_libcall_value (enum machine_mode mode)
5619 {
5620 int offs = GET_MODE_SIZE (mode);
5621 if (offs < 2)
5622 offs = 2;
5623 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5624 }
5625
5626 /* Create an RTX representing the place where a
5627 function returns a value of data type VALTYPE. */
5628
5629 rtx
5630 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5631 {
5632 unsigned int offs;
5633
5634 if (TYPE_MODE (type) != BLKmode)
5635 return avr_libcall_value (TYPE_MODE (type));
5636
5637 offs = int_size_in_bytes (type);
5638 if (offs < 2)
5639 offs = 2;
5640 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5641 offs = GET_MODE_SIZE (SImode);
5642 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5643 offs = GET_MODE_SIZE (DImode);
5644
5645 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5646 }
5647
5648 /* Places additional restrictions on the register class to
5649 use when it is necessary to copy value X into a register
5650 in class CLASS. */
5651
5652 enum reg_class
5653 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5654 {
5655 return class;
5656 }
5657
5658 int
5659 test_hard_reg_class (enum reg_class class, rtx x)
5660 {
5661 int regno = true_regnum (x);
5662 if (regno < 0)
5663 return 0;
5664
5665 if (TEST_HARD_REG_CLASS (class, regno))
5666 return 1;
5667
5668 return 0;
5669 }
5670
5671
5672 int
5673 jump_over_one_insn_p (rtx insn, rtx dest)
5674 {
5675 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5676 ? XEXP (dest, 0)
5677 : dest);
5678 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5679 int dest_addr = INSN_ADDRESSES (uid);
5680 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5681 }
5682
5683 /* Returns 1 if a value of mode MODE can be stored starting with hard
5684 register number REGNO. On the enhanced core, anything larger than
5685 1 byte must start in even numbered register for "movw" to work
5686 (this way we don't have to check for odd registers everywhere). */
5687
5688 int
5689 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5690 {
5691 /* Disallow QImode in stack pointer regs. */
5692 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5693 return 0;
5694
5695 /* The only thing that can go into registers r28:r29 is a Pmode. */
5696 if (regno == REG_Y && mode == Pmode)
5697 return 1;
5698
5699 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5700 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5701 return 0;
5702
5703 if (mode == QImode)
5704 return 1;
5705
5706 /* Modes larger than QImode occupy consecutive registers. */
5707 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5708 return 0;
5709
5710 /* All modes larger than QImode should start in an even register. */
5711 return !(regno & 1);
5712 }
5713
5714 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5715 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5716 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5717
5718 int
5719 avr_io_address_p (rtx x, int size)
5720 {
5721 return (optimize > 0 && GET_CODE (x) == CONST_INT
5722 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5723 }
5724
5725 const char *
5726 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5727 {
5728 int tmp;
5729 if (!len)
5730 len = &tmp;
5731
5732 if (GET_CODE (operands[1]) == CONST_INT)
5733 {
5734 int val = INTVAL (operands[1]);
5735 if ((val & 0xff) == 0)
5736 {
5737 *len = 3;
5738 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5739 AS2 (ldi,%2,hi8(%1)) CR_TAB
5740 AS2 (mov,%B0,%2));
5741 }
5742 else if ((val & 0xff00) == 0)
5743 {
5744 *len = 3;
5745 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5746 AS2 (mov,%A0,%2) CR_TAB
5747 AS2 (mov,%B0,__zero_reg__));
5748 }
5749 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5750 {
5751 *len = 3;
5752 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5753 AS2 (mov,%A0,%2) CR_TAB
5754 AS2 (mov,%B0,%2));
5755 }
5756 }
5757 *len = 4;
5758 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5759 AS2 (mov,%A0,%2) CR_TAB
5760 AS2 (ldi,%2,hi8(%1)) CR_TAB
5761 AS2 (mov,%B0,%2));
5762 }
5763
5764
5765 const char *
5766 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5767 {
5768 rtx src = operands[1];
5769 int cnst = (GET_CODE (src) == CONST_INT);
5770
5771 if (len)
5772 {
5773 if (cnst)
5774 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5775 + ((INTVAL (src) & 0xff00) != 0)
5776 + ((INTVAL (src) & 0xff0000) != 0)
5777 + ((INTVAL (src) & 0xff000000) != 0);
5778 else
5779 *len = 8;
5780
5781 return "";
5782 }
5783
5784 if (cnst && ((INTVAL (src) & 0xff) == 0))
5785 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5786 else
5787 {
5788 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5789 output_asm_insn (AS2 (mov, %A0, %2), operands);
5790 }
5791 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5792 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5793 else
5794 {
5795 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5796 output_asm_insn (AS2 (mov, %B0, %2), operands);
5797 }
5798 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5799 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5800 else
5801 {
5802 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5803 output_asm_insn (AS2 (mov, %C0, %2), operands);
5804 }
5805 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5806 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5807 else
5808 {
5809 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5810 output_asm_insn (AS2 (mov, %D0, %2), operands);
5811 }
5812 return "";
5813 }
5814
5815 void
5816 avr_output_bld (rtx operands[], int bit_nr)
5817 {
5818 static char s[] = "bld %A0,0";
5819
5820 s[5] = 'A' + (bit_nr >> 3);
5821 s[8] = '0' + (bit_nr & 7);
5822 output_asm_insn (s, operands);
5823 }
5824
5825 void
5826 avr_output_addr_vec_elt (FILE *stream, int value)
5827 {
5828 switch_to_section (progmem_section);
5829 if (AVR_HAVE_JMP_CALL)
5830 fprintf (stream, "\t.word gs(.L%d)\n", value);
5831 else
5832 fprintf (stream, "\trjmp .L%d\n", value);
5833 }
5834
5835 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5836 registers (for a define_peephole2) in the current function. */
5837
5838 int
5839 avr_peep2_scratch_safe (rtx scratch)
5840 {
5841 if ((interrupt_function_p (current_function_decl)
5842 || signal_function_p (current_function_decl))
5843 && leaf_function_p ())
5844 {
5845 int first_reg = true_regnum (scratch);
5846 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5847 int reg;
5848
5849 for (reg = first_reg; reg <= last_reg; reg++)
5850 {
5851 if (!df_regs_ever_live_p (reg))
5852 return 0;
5853 }
5854 }
5855 return 1;
5856 }
5857
5858 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5859 or memory location in the I/O space (QImode only).
5860
5861 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5862 Operand 1: register operand to test, or CONST_INT memory address.
5863 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5864 Operand 3: label to jump to if the test is true. */
5865
5866 const char *
5867 avr_out_sbxx_branch (rtx insn, rtx operands[])
5868 {
5869 enum rtx_code comp = GET_CODE (operands[0]);
5870 int long_jump = (get_attr_length (insn) >= 4);
5871 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5872
5873 if (comp == GE)
5874 comp = EQ;
5875 else if (comp == LT)
5876 comp = NE;
5877
5878 if (reverse)
5879 comp = reverse_condition (comp);
5880
5881 if (GET_CODE (operands[1]) == CONST_INT)
5882 {
5883 if (INTVAL (operands[1]) < 0x40)
5884 {
5885 if (comp == EQ)
5886 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5887 else
5888 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5889 }
5890 else
5891 {
5892 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5893 if (comp == EQ)
5894 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5895 else
5896 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5897 }
5898 }
5899 else /* GET_CODE (operands[1]) == REG */
5900 {
5901 if (GET_MODE (operands[1]) == QImode)
5902 {
5903 if (comp == EQ)
5904 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5905 else
5906 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5907 }
5908 else /* HImode or SImode */
5909 {
5910 static char buf[] = "sbrc %A1,0";
5911 int bit_nr = exact_log2 (INTVAL (operands[2])
5912 & GET_MODE_MASK (GET_MODE (operands[1])));
5913
5914 buf[3] = (comp == EQ) ? 's' : 'c';
5915 buf[6] = 'A' + (bit_nr >> 3);
5916 buf[9] = '0' + (bit_nr & 7);
5917 output_asm_insn (buf, operands);
5918 }
5919 }
5920
5921 if (long_jump)
5922 return (AS1 (rjmp,.+4) CR_TAB
5923 AS1 (jmp,%3));
5924 if (!reverse)
5925 return AS1 (rjmp,%3);
5926 return "";
5927 }
5928
5929 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5930
5931 static void
5932 avr_asm_out_ctor (rtx symbol, int priority)
5933 {
5934 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5935 default_ctor_section_asm_out_constructor (symbol, priority);
5936 }
5937
5938 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5939
5940 static void
5941 avr_asm_out_dtor (rtx symbol, int priority)
5942 {
5943 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5944 default_dtor_section_asm_out_destructor (symbol, priority);
5945 }
5946
5947 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5948
5949 static bool
5950 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5951 {
5952 if (TYPE_MODE (type) == BLKmode)
5953 {
5954 HOST_WIDE_INT size = int_size_in_bytes (type);
5955 return (size == -1 || size > 8);
5956 }
5957 else
5958 return false;
5959 }
5960
5961 #include "gt-avr.h"