]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
aadfd244b68e92c9ffca9de69b1cfac7aa687655
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
50
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
53
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
66
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
86
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
106 const_tree, bool);
107 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
108 const_tree, bool);
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree, tree);
111 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
112
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
115
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
118
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
121
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
124
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
127
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
130
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
133
134 section *progmem_section;
135
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
139
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
142 {
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
146 false },
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
148 false },
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
150 false },
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
152 false },
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
154 false },
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
156 false },
157 { NULL, 0, 0, false, false, false, NULL, false }
158 };
159
160 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
161 static const struct default_options avr_option_optimization_table[] =
162 {
163 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
164 { OPT_LEVELS_NONE, 0, NULL, 0 }
165 };
166 \f
167 /* Initialize the GCC target structure. */
168 #undef TARGET_ASM_ALIGNED_HI_OP
169 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
170 #undef TARGET_ASM_ALIGNED_SI_OP
171 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
172 #undef TARGET_ASM_UNALIGNED_HI_OP
173 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
174 #undef TARGET_ASM_UNALIGNED_SI_OP
175 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
176 #undef TARGET_ASM_INTEGER
177 #define TARGET_ASM_INTEGER avr_assemble_integer
178 #undef TARGET_ASM_FILE_START
179 #define TARGET_ASM_FILE_START avr_file_start
180 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
181 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
182 #undef TARGET_ASM_FILE_END
183 #define TARGET_ASM_FILE_END avr_file_end
184
185 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
186 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
187 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
188 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
189
190 #undef TARGET_FUNCTION_VALUE
191 #define TARGET_FUNCTION_VALUE avr_function_value
192 #undef TARGET_LIBCALL_VALUE
193 #define TARGET_LIBCALL_VALUE avr_libcall_value
194 #undef TARGET_FUNCTION_VALUE_REGNO_P
195 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
196
197 #undef TARGET_ATTRIBUTE_TABLE
198 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
199 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
200 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
201 #undef TARGET_INSERT_ATTRIBUTES
202 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
203 #undef TARGET_SECTION_TYPE_FLAGS
204 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
205
206 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
207
208 #undef TARGET_ASM_INIT_SECTIONS
209 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
210
211 #undef TARGET_REGISTER_MOVE_COST
212 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
213 #undef TARGET_MEMORY_MOVE_COST
214 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
215 #undef TARGET_RTX_COSTS
216 #define TARGET_RTX_COSTS avr_rtx_costs
217 #undef TARGET_ADDRESS_COST
218 #define TARGET_ADDRESS_COST avr_address_cost
219 #undef TARGET_MACHINE_DEPENDENT_REORG
220 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
221 #undef TARGET_FUNCTION_ARG
222 #define TARGET_FUNCTION_ARG avr_function_arg
223 #undef TARGET_FUNCTION_ARG_ADVANCE
224 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
225
226 #undef TARGET_LEGITIMIZE_ADDRESS
227 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
228
229 #undef TARGET_RETURN_IN_MEMORY
230 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
231
232 #undef TARGET_STRICT_ARGUMENT_NAMING
233 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
234
235 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
236 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
237
238 #undef TARGET_HARD_REGNO_SCRATCH_OK
239 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
240 #undef TARGET_CASE_VALUES_THRESHOLD
241 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
242
243 #undef TARGET_LEGITIMATE_ADDRESS_P
244 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
245
246 #undef TARGET_FRAME_POINTER_REQUIRED
247 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
248 #undef TARGET_CAN_ELIMINATE
249 #define TARGET_CAN_ELIMINATE avr_can_eliminate
250
251 #undef TARGET_CLASS_LIKELY_SPILLED_P
252 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
253
254 #undef TARGET_OPTION_OVERRIDE
255 #define TARGET_OPTION_OVERRIDE avr_option_override
256
257 #undef TARGET_OPTION_OPTIMIZATION_TABLE
258 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
259
260 #undef TARGET_CANNOT_MODIFY_JUMPS_P
261 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
262
263 #undef TARGET_HELP
264 #define TARGET_HELP avr_help
265
266 #undef TARGET_EXCEPT_UNWIND_INFO
267 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
268
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
271
272 #undef TARGET_INIT_BUILTINS
273 #define TARGET_INIT_BUILTINS avr_init_builtins
274
275 #undef TARGET_EXPAND_BUILTIN
276 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
277
278
279 struct gcc_target targetm = TARGET_INITIALIZER;
280 \f
281 static void
282 avr_option_override (void)
283 {
284 const struct mcu_type_s *t;
285
286 flag_delete_null_pointer_checks = 0;
287
288 for (t = avr_mcu_types; t->name; t++)
289 if (strcmp (t->name, avr_mcu_name) == 0)
290 break;
291
292 if (!t->name)
293 {
294 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
295 inform (input_location, "See --target-help for supported MCUs");
296 }
297
298 avr_current_device = t;
299 avr_current_arch = &avr_arch_types[avr_current_device->arch];
300 avr_extra_arch_macro = avr_current_device->macro;
301
302 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
303 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
304
305 init_machine_status = avr_init_machine_status;
306 }
307
308 /* Implement TARGET_HELP */
309 /* Report extra information for --target-help */
310
311 static void
312 avr_help (void)
313 {
314 const struct mcu_type_s *t;
315 const char * const indent = " ";
316 int len;
317
318 /* Give a list of MCUs that are accepted by -mmcu=* .
319 Note that MCUs supported by the compiler might differ from
320 MCUs supported by binutils. */
321
322 len = strlen (indent);
323 printf ("Known MCU names:\n%s", indent);
324
325 /* Print a blank-separated list of all supported MCUs */
326
327 for (t = avr_mcu_types; t->name; t++)
328 {
329 printf ("%s ", t->name);
330 len += 1 + strlen (t->name);
331
332 /* Break long lines */
333
334 if (len > 66 && (t+1)->name)
335 {
336 printf ("\n%s", indent);
337 len = strlen (indent);
338 }
339 }
340
341 printf ("\n\n");
342 }
343
344 /* return register class from register number. */
345
346 static const enum reg_class reg_class_tab[]={
347 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
348 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
349 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
350 GENERAL_REGS, /* r0 - r15 */
351 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
352 LD_REGS, /* r16 - 23 */
353 ADDW_REGS,ADDW_REGS, /* r24,r25 */
354 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
355 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
356 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
357 STACK_REG,STACK_REG /* SPL,SPH */
358 };
359
360 /* Function to set up the backend function structure. */
361
362 static struct machine_function *
363 avr_init_machine_status (void)
364 {
365 return ggc_alloc_cleared_machine_function ();
366 }
367
368 /* Return register class for register R. */
369
370 enum reg_class
371 avr_regno_reg_class (int r)
372 {
373 if (r <= 33)
374 return reg_class_tab[r];
375 return ALL_REGS;
376 }
377
378 /* A helper for the subsequent function attribute used to dig for
379 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
380
381 static inline int
382 avr_lookup_function_attribute1 (const_tree func, const char *name)
383 {
384 if (FUNCTION_DECL == TREE_CODE (func))
385 {
386 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
387 {
388 return true;
389 }
390
391 func = TREE_TYPE (func);
392 }
393
394 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
395 || TREE_CODE (func) == METHOD_TYPE);
396
397 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
398 }
399
400 /* Return nonzero if FUNC is a naked function. */
401
402 static int
403 avr_naked_function_p (tree func)
404 {
405 return avr_lookup_function_attribute1 (func, "naked");
406 }
407
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
410
411 static int
412 interrupt_function_p (tree func)
413 {
414 return avr_lookup_function_attribute1 (func, "interrupt");
415 }
416
417 /* Return nonzero if FUNC is a signal function as specified
418 by the "signal" attribute. */
419
420 static int
421 signal_function_p (tree func)
422 {
423 return avr_lookup_function_attribute1 (func, "signal");
424 }
425
426 /* Return nonzero if FUNC is a OS_task function. */
427
428 static int
429 avr_OS_task_function_p (tree func)
430 {
431 return avr_lookup_function_attribute1 (func, "OS_task");
432 }
433
434 /* Return nonzero if FUNC is a OS_main function. */
435
436 static int
437 avr_OS_main_function_p (tree func)
438 {
439 return avr_lookup_function_attribute1 (func, "OS_main");
440 }
441
442 /* Return the number of hard registers to push/pop in the prologue/epilogue
443 of the current function, and optionally store these registers in SET. */
444
445 static int
446 avr_regs_to_save (HARD_REG_SET *set)
447 {
448 int reg, count;
449 int int_or_sig_p = (interrupt_function_p (current_function_decl)
450 || signal_function_p (current_function_decl));
451
452 if (set)
453 CLEAR_HARD_REG_SET (*set);
454 count = 0;
455
456 /* No need to save any registers if the function never returns or
457 is have "OS_task" or "OS_main" attribute. */
458 if (TREE_THIS_VOLATILE (current_function_decl)
459 || cfun->machine->is_OS_task
460 || cfun->machine->is_OS_main)
461 return 0;
462
463 for (reg = 0; reg < 32; reg++)
464 {
465 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
466 any global register variables. */
467 if (fixed_regs[reg])
468 continue;
469
470 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
471 || (df_regs_ever_live_p (reg)
472 && (int_or_sig_p || !call_used_regs[reg])
473 && !(frame_pointer_needed
474 && (reg == REG_Y || reg == (REG_Y+1)))))
475 {
476 if (set)
477 SET_HARD_REG_BIT (*set, reg);
478 count++;
479 }
480 }
481 return count;
482 }
483
484 /* Return true if register FROM can be eliminated via register TO. */
485
486 bool
487 avr_can_eliminate (const int from, const int to)
488 {
489 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
490 || ((from == FRAME_POINTER_REGNUM
491 || from == FRAME_POINTER_REGNUM + 1)
492 && !frame_pointer_needed));
493 }
494
495 /* Compute offset between arg_pointer and frame_pointer. */
496
497 int
498 avr_initial_elimination_offset (int from, int to)
499 {
500 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
501 return 0;
502 else
503 {
504 int offset = frame_pointer_needed ? 2 : 0;
505 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
506
507 offset += avr_regs_to_save (NULL);
508 return get_frame_size () + (avr_pc_size) + 1 + offset;
509 }
510 }
511
512 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
513 frame pointer by +STARTING_FRAME_OFFSET.
514 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
515 avoids creating add/sub of offset in nonlocal goto and setjmp. */
516
517 rtx avr_builtin_setjmp_frame_value (void)
518 {
519 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
520 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
521 }
522
523 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
524 This is return address of function. */
525 rtx
526 avr_return_addr_rtx (int count, rtx tem)
527 {
528 rtx r;
529
530 /* Can only return this functions return address. Others not supported. */
531 if (count)
532 return NULL;
533
534 if (AVR_3_BYTE_PC)
535 {
536 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
537 warning (0, "'builtin_return_address' contains only 2 bytes of address");
538 }
539 else
540 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
541
542 r = gen_rtx_PLUS (Pmode, tem, r);
543 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
544 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
545 return r;
546 }
547
548 /* Return 1 if the function epilogue is just a single "ret". */
549
550 int
551 avr_simple_epilogue (void)
552 {
553 return (! frame_pointer_needed
554 && get_frame_size () == 0
555 && avr_regs_to_save (NULL) == 0
556 && ! interrupt_function_p (current_function_decl)
557 && ! signal_function_p (current_function_decl)
558 && ! avr_naked_function_p (current_function_decl)
559 && ! TREE_THIS_VOLATILE (current_function_decl));
560 }
561
562 /* This function checks sequence of live registers. */
563
564 static int
565 sequent_regs_live (void)
566 {
567 int reg;
568 int live_seq=0;
569 int cur_seq=0;
570
571 for (reg = 0; reg < 18; ++reg)
572 {
573 if (!call_used_regs[reg])
574 {
575 if (df_regs_ever_live_p (reg))
576 {
577 ++live_seq;
578 ++cur_seq;
579 }
580 else
581 cur_seq = 0;
582 }
583 }
584
585 if (!frame_pointer_needed)
586 {
587 if (df_regs_ever_live_p (REG_Y))
588 {
589 ++live_seq;
590 ++cur_seq;
591 }
592 else
593 cur_seq = 0;
594
595 if (df_regs_ever_live_p (REG_Y+1))
596 {
597 ++live_seq;
598 ++cur_seq;
599 }
600 else
601 cur_seq = 0;
602 }
603 else
604 {
605 cur_seq += 2;
606 live_seq += 2;
607 }
608 return (cur_seq == live_seq) ? live_seq : 0;
609 }
610
611 /* Obtain the length sequence of insns. */
612
613 int
614 get_sequence_length (rtx insns)
615 {
616 rtx insn;
617 int length;
618
619 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
620 length += get_attr_length (insn);
621
622 return length;
623 }
624
625 /* Implement INCOMING_RETURN_ADDR_RTX. */
626
627 rtx
628 avr_incoming_return_addr_rtx (void)
629 {
630 /* The return address is at the top of the stack. Note that the push
631 was via post-decrement, which means the actual address is off by one. */
632 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
633 }
634
635 /* Helper for expand_prologue. Emit a push of a byte register. */
636
637 static void
638 emit_push_byte (unsigned regno, bool frame_related_p)
639 {
640 rtx mem, reg, insn;
641
642 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
643 mem = gen_frame_mem (QImode, mem);
644 reg = gen_rtx_REG (QImode, regno);
645
646 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
647 if (frame_related_p)
648 RTX_FRAME_RELATED_P (insn) = 1;
649
650 cfun->machine->stack_usage++;
651 }
652
653
654 /* Output function prologue. */
655
656 void
657 expand_prologue (void)
658 {
659 int live_seq;
660 HARD_REG_SET set;
661 int minimize;
662 HOST_WIDE_INT size = get_frame_size();
663 rtx insn;
664
665 /* Init cfun->machine. */
666 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
667 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
668 cfun->machine->is_signal = signal_function_p (current_function_decl);
669 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
670 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
671 cfun->machine->stack_usage = 0;
672
673 /* Prologue: naked. */
674 if (cfun->machine->is_naked)
675 {
676 return;
677 }
678
679 avr_regs_to_save (&set);
680 live_seq = sequent_regs_live ();
681 minimize = (TARGET_CALL_PROLOGUES
682 && !cfun->machine->is_interrupt
683 && !cfun->machine->is_signal
684 && !cfun->machine->is_OS_task
685 && !cfun->machine->is_OS_main
686 && live_seq);
687
688 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
689 {
690 /* Enable interrupts. */
691 if (cfun->machine->is_interrupt)
692 emit_insn (gen_enable_interrupt ());
693
694 /* Push zero reg. */
695 emit_push_byte (ZERO_REGNO, true);
696
697 /* Push tmp reg. */
698 emit_push_byte (TMP_REGNO, true);
699
700 /* Push SREG. */
701 /* ??? There's no dwarf2 column reserved for SREG. */
702 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
703 emit_push_byte (TMP_REGNO, false);
704
705 /* Push RAMPZ. */
706 /* ??? There's no dwarf2 column reserved for RAMPZ. */
707 if (AVR_HAVE_RAMPZ
708 && TEST_HARD_REG_BIT (set, REG_Z)
709 && TEST_HARD_REG_BIT (set, REG_Z + 1))
710 {
711 emit_move_insn (tmp_reg_rtx,
712 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
713 emit_push_byte (TMP_REGNO, false);
714 }
715
716 /* Clear zero reg. */
717 emit_move_insn (zero_reg_rtx, const0_rtx);
718
719 /* Prevent any attempt to delete the setting of ZERO_REG! */
720 emit_use (zero_reg_rtx);
721 }
722 if (minimize && (frame_pointer_needed
723 || (AVR_2_BYTE_PC && live_seq > 6)
724 || live_seq > 7))
725 {
726 int first_reg, reg, offset;
727
728 emit_move_insn (gen_rtx_REG (HImode, REG_X),
729 gen_int_mode (size, HImode));
730
731 insn = emit_insn (gen_call_prologue_saves
732 (gen_int_mode (live_seq, HImode),
733 gen_int_mode (size + live_seq, HImode)));
734 RTX_FRAME_RELATED_P (insn) = 1;
735
736 /* Describe the effect of the unspec_volatile call to prologue_saves.
737 Note that this formulation assumes that add_reg_note pushes the
738 notes to the front. Thus we build them in the reverse order of
739 how we want dwarf2out to process them. */
740
741 /* The function does always set frame_pointer_rtx, but whether that
742 is going to be permanent in the function is frame_pointer_needed. */
743 add_reg_note (insn, REG_CFA_ADJUST_CFA,
744 gen_rtx_SET (VOIDmode,
745 (frame_pointer_needed
746 ? frame_pointer_rtx : stack_pointer_rtx),
747 plus_constant (stack_pointer_rtx,
748 -(size + live_seq))));
749
750 /* Note that live_seq always contains r28+r29, but the other
751 registers to be saved are all below 18. */
752 first_reg = 18 - (live_seq - 2);
753
754 for (reg = 29, offset = -live_seq + 1;
755 reg >= first_reg;
756 reg = (reg == 28 ? 17 : reg - 1), ++offset)
757 {
758 rtx m, r;
759
760 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
761 r = gen_rtx_REG (QImode, reg);
762 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
763 }
764
765 cfun->machine->stack_usage += size + live_seq;
766 }
767 else
768 {
769 int reg;
770 for (reg = 0; reg < 32; ++reg)
771 if (TEST_HARD_REG_BIT (set, reg))
772 emit_push_byte (reg, true);
773
774 if (frame_pointer_needed)
775 {
776 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
777 {
778 /* Push frame pointer. Always be consistent about the
779 ordering of pushes -- epilogue_restores expects the
780 register pair to be pushed low byte first. */
781 emit_push_byte (REG_Y, true);
782 emit_push_byte (REG_Y + 1, true);
783 }
784
785 if (!size)
786 {
787 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
788 RTX_FRAME_RELATED_P (insn) = 1;
789 }
790 else
791 {
792 /* Creating a frame can be done by direct manipulation of the
793 stack or via the frame pointer. These two methods are:
794 fp=sp
795 fp-=size
796 sp=fp
797 OR
798 sp-=size
799 fp=sp
800 the optimum method depends on function type, stack and frame size.
801 To avoid a complex logic, both methods are tested and shortest
802 is selected. */
803 rtx myfp;
804 rtx fp_plus_insns;
805
806 if (AVR_HAVE_8BIT_SP)
807 {
808 /* The high byte (r29) doesn't change. Prefer 'subi'
809 (1 cycle) over 'sbiw' (2 cycles, same size). */
810 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
811 }
812 else
813 {
814 /* Normal sized addition. */
815 myfp = frame_pointer_rtx;
816 }
817
818 /* Method 1-Adjust frame pointer. */
819 start_sequence ();
820
821 /* Normally the dwarf2out frame-related-expr interpreter does
822 not expect to have the CFA change once the frame pointer is
823 set up. Thus we avoid marking the move insn below and
824 instead indicate that the entire operation is complete after
825 the frame pointer subtraction is done. */
826
827 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
828
829 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
830 RTX_FRAME_RELATED_P (insn) = 1;
831 add_reg_note (insn, REG_CFA_ADJUST_CFA,
832 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
833 plus_constant (stack_pointer_rtx,
834 -size)));
835
836 /* Copy to stack pointer. Note that since we've already
837 changed the CFA to the frame pointer this operation
838 need not be annotated at all. */
839 if (AVR_HAVE_8BIT_SP)
840 {
841 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
842 }
843 else if (TARGET_NO_INTERRUPTS
844 || cfun->machine->is_signal
845 || cfun->machine->is_OS_main)
846 {
847 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
848 frame_pointer_rtx));
849 }
850 else if (cfun->machine->is_interrupt)
851 {
852 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
853 frame_pointer_rtx));
854 }
855 else
856 {
857 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
858 }
859
860 fp_plus_insns = get_insns ();
861 end_sequence ();
862
863 /* Method 2-Adjust Stack pointer. */
864 if (size <= 6)
865 {
866 rtx sp_plus_insns;
867
868 start_sequence ();
869
870 insn = plus_constant (stack_pointer_rtx, -size);
871 insn = emit_move_insn (stack_pointer_rtx, insn);
872 RTX_FRAME_RELATED_P (insn) = 1;
873
874 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
875 RTX_FRAME_RELATED_P (insn) = 1;
876
877 sp_plus_insns = get_insns ();
878 end_sequence ();
879
880 /* Use shortest method. */
881 if (get_sequence_length (sp_plus_insns)
882 < get_sequence_length (fp_plus_insns))
883 emit_insn (sp_plus_insns);
884 else
885 emit_insn (fp_plus_insns);
886 }
887 else
888 emit_insn (fp_plus_insns);
889
890 cfun->machine->stack_usage += size;
891 }
892 }
893 }
894
895 if (flag_stack_usage_info)
896 current_function_static_stack_size = cfun->machine->stack_usage;
897 }
898
899 /* Output summary at end of function prologue. */
900
901 static void
902 avr_asm_function_end_prologue (FILE *file)
903 {
904 if (cfun->machine->is_naked)
905 {
906 fputs ("/* prologue: naked */\n", file);
907 }
908 else
909 {
910 if (cfun->machine->is_interrupt)
911 {
912 fputs ("/* prologue: Interrupt */\n", file);
913 }
914 else if (cfun->machine->is_signal)
915 {
916 fputs ("/* prologue: Signal */\n", file);
917 }
918 else
919 fputs ("/* prologue: function */\n", file);
920 }
921 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
922 get_frame_size());
923 fprintf (file, "/* stack size = %d */\n",
924 cfun->machine->stack_usage);
925 /* Create symbol stack offset here so all functions have it. Add 1 to stack
926 usage for offset so that SP + .L__stack_offset = return address. */
927 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
928 }
929
930
931 /* Implement EPILOGUE_USES. */
932
933 int
934 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
935 {
936 if (reload_completed
937 && cfun->machine
938 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
939 return 1;
940 return 0;
941 }
942
943 /* Helper for expand_epilogue. Emit a pop of a byte register. */
944
945 static void
946 emit_pop_byte (unsigned regno)
947 {
948 rtx mem, reg;
949
950 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
951 mem = gen_frame_mem (QImode, mem);
952 reg = gen_rtx_REG (QImode, regno);
953
954 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
955 }
956
957 /* Output RTL epilogue. */
958
959 void
960 expand_epilogue (bool sibcall_p)
961 {
962 int reg;
963 int live_seq;
964 HARD_REG_SET set;
965 int minimize;
966 HOST_WIDE_INT size = get_frame_size();
967
968 /* epilogue: naked */
969 if (cfun->machine->is_naked)
970 {
971 gcc_assert (!sibcall_p);
972
973 emit_jump_insn (gen_return ());
974 return;
975 }
976
977 avr_regs_to_save (&set);
978 live_seq = sequent_regs_live ();
979 minimize = (TARGET_CALL_PROLOGUES
980 && !cfun->machine->is_interrupt
981 && !cfun->machine->is_signal
982 && !cfun->machine->is_OS_task
983 && !cfun->machine->is_OS_main
984 && live_seq);
985
986 if (minimize && (frame_pointer_needed || live_seq > 4))
987 {
988 if (frame_pointer_needed)
989 {
990 /* Get rid of frame. */
991 emit_move_insn(frame_pointer_rtx,
992 gen_rtx_PLUS (HImode, frame_pointer_rtx,
993 gen_int_mode (size, HImode)));
994 }
995 else
996 {
997 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
998 }
999
1000 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1001 }
1002 else
1003 {
1004 if (frame_pointer_needed)
1005 {
1006 if (size)
1007 {
1008 /* Try two methods to adjust stack and select shortest. */
1009 rtx myfp;
1010 rtx fp_plus_insns;
1011
1012 if (AVR_HAVE_8BIT_SP)
1013 {
1014 /* The high byte (r29) doesn't change - prefer 'subi'
1015 (1 cycle) over 'sbiw' (2 cycles, same size). */
1016 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1017 }
1018 else
1019 {
1020 /* Normal sized addition. */
1021 myfp = frame_pointer_rtx;
1022 }
1023
1024 /* Method 1-Adjust frame pointer. */
1025 start_sequence ();
1026
1027 emit_move_insn (myfp, plus_constant (myfp, size));
1028
1029 /* Copy to stack pointer. */
1030 if (AVR_HAVE_8BIT_SP)
1031 {
1032 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1033 }
1034 else if (TARGET_NO_INTERRUPTS
1035 || cfun->machine->is_signal)
1036 {
1037 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1038 frame_pointer_rtx));
1039 }
1040 else if (cfun->machine->is_interrupt)
1041 {
1042 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1043 frame_pointer_rtx));
1044 }
1045 else
1046 {
1047 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1048 }
1049
1050 fp_plus_insns = get_insns ();
1051 end_sequence ();
1052
1053 /* Method 2-Adjust Stack pointer. */
1054 if (size <= 5)
1055 {
1056 rtx sp_plus_insns;
1057
1058 start_sequence ();
1059
1060 emit_move_insn (stack_pointer_rtx,
1061 plus_constant (stack_pointer_rtx, size));
1062
1063 sp_plus_insns = get_insns ();
1064 end_sequence ();
1065
1066 /* Use shortest method. */
1067 if (get_sequence_length (sp_plus_insns)
1068 < get_sequence_length (fp_plus_insns))
1069 emit_insn (sp_plus_insns);
1070 else
1071 emit_insn (fp_plus_insns);
1072 }
1073 else
1074 emit_insn (fp_plus_insns);
1075 }
1076 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1077 {
1078 /* Restore previous frame_pointer. See expand_prologue for
1079 rationale for not using pophi. */
1080 emit_pop_byte (REG_Y + 1);
1081 emit_pop_byte (REG_Y);
1082 }
1083 }
1084
1085 /* Restore used registers. */
1086 for (reg = 31; reg >= 0; --reg)
1087 if (TEST_HARD_REG_BIT (set, reg))
1088 emit_pop_byte (reg);
1089
1090 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1091 {
1092 /* Restore RAMPZ using tmp reg as scratch. */
1093 if (AVR_HAVE_RAMPZ
1094 && TEST_HARD_REG_BIT (set, REG_Z)
1095 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1096 {
1097 emit_pop_byte (TMP_REGNO);
1098 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1099 tmp_reg_rtx);
1100 }
1101
1102 /* Restore SREG using tmp reg as scratch. */
1103 emit_pop_byte (TMP_REGNO);
1104
1105 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1106 tmp_reg_rtx);
1107
1108 /* Restore tmp REG. */
1109 emit_pop_byte (TMP_REGNO);
1110
1111 /* Restore zero REG. */
1112 emit_pop_byte (ZERO_REGNO);
1113 }
1114
1115 if (!sibcall_p)
1116 emit_jump_insn (gen_return ());
1117 }
1118 }
1119
1120 /* Output summary messages at beginning of function epilogue. */
1121
1122 static void
1123 avr_asm_function_begin_epilogue (FILE *file)
1124 {
1125 fprintf (file, "/* epilogue start */\n");
1126 }
1127
1128
1129 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1130
1131 static bool
1132 avr_cannot_modify_jumps_p (void)
1133 {
1134
1135 /* Naked Functions must not have any instructions after
1136 their epilogue, see PR42240 */
1137
1138 if (reload_completed
1139 && cfun->machine
1140 && cfun->machine->is_naked)
1141 {
1142 return true;
1143 }
1144
1145 return false;
1146 }
1147
1148
1149 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1150 machine for a memory operand of mode MODE. */
1151
1152 bool
1153 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1154 {
1155 enum reg_class r = NO_REGS;
1156
1157 if (TARGET_ALL_DEBUG)
1158 {
1159 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1160 GET_MODE_NAME(mode),
1161 strict ? "(strict)": "",
1162 reload_completed ? "(reload_completed)": "",
1163 reload_in_progress ? "(reload_in_progress)": "",
1164 reg_renumber ? "(reg_renumber)" : "");
1165 if (GET_CODE (x) == PLUS
1166 && REG_P (XEXP (x, 0))
1167 && GET_CODE (XEXP (x, 1)) == CONST_INT
1168 && INTVAL (XEXP (x, 1)) >= 0
1169 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1170 && reg_renumber
1171 )
1172 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1173 true_regnum (XEXP (x, 0)));
1174 debug_rtx (x);
1175 }
1176 if (!strict && GET_CODE (x) == SUBREG)
1177 x = SUBREG_REG (x);
1178 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1179 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1180 r = POINTER_REGS;
1181 else if (CONSTANT_ADDRESS_P (x))
1182 r = ALL_REGS;
1183 else if (GET_CODE (x) == PLUS
1184 && REG_P (XEXP (x, 0))
1185 && GET_CODE (XEXP (x, 1)) == CONST_INT
1186 && INTVAL (XEXP (x, 1)) >= 0)
1187 {
1188 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1189 if (fit)
1190 {
1191 if (! strict
1192 || REGNO (XEXP (x,0)) == REG_X
1193 || REGNO (XEXP (x,0)) == REG_Y
1194 || REGNO (XEXP (x,0)) == REG_Z)
1195 r = BASE_POINTER_REGS;
1196 if (XEXP (x,0) == frame_pointer_rtx
1197 || XEXP (x,0) == arg_pointer_rtx)
1198 r = BASE_POINTER_REGS;
1199 }
1200 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1201 r = POINTER_Y_REGS;
1202 }
1203 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1204 && REG_P (XEXP (x, 0))
1205 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1206 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1207 {
1208 r = POINTER_REGS;
1209 }
1210 if (TARGET_ALL_DEBUG)
1211 {
1212 fprintf (stderr, " ret = %c\n", r + '0');
1213 }
1214 return r == NO_REGS ? 0 : (int)r;
1215 }
1216
1217 /* Attempts to replace X with a valid
1218 memory address for an operand of mode MODE */
1219
1220 rtx
1221 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1222 {
1223 x = oldx;
1224 if (TARGET_ALL_DEBUG)
1225 {
1226 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1227 debug_rtx (oldx);
1228 }
1229
1230 if (GET_CODE (oldx) == PLUS
1231 && REG_P (XEXP (oldx,0)))
1232 {
1233 if (REG_P (XEXP (oldx,1)))
1234 x = force_reg (GET_MODE (oldx), oldx);
1235 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1236 {
1237 int offs = INTVAL (XEXP (oldx,1));
1238 if (frame_pointer_rtx != XEXP (oldx,0))
1239 if (offs > MAX_LD_OFFSET (mode))
1240 {
1241 if (TARGET_ALL_DEBUG)
1242 fprintf (stderr, "force_reg (big offset)\n");
1243 x = force_reg (GET_MODE (oldx), oldx);
1244 }
1245 }
1246 }
1247 return x;
1248 }
1249
1250
1251 /* Return a pointer register name as a string. */
1252
1253 static const char *
1254 ptrreg_to_str (int regno)
1255 {
1256 switch (regno)
1257 {
1258 case REG_X: return "X";
1259 case REG_Y: return "Y";
1260 case REG_Z: return "Z";
1261 default:
1262 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1263 }
1264 return NULL;
1265 }
1266
1267 /* Return the condition name as a string.
1268 Used in conditional jump constructing */
1269
1270 static const char *
1271 cond_string (enum rtx_code code)
1272 {
1273 switch (code)
1274 {
1275 case NE:
1276 return "ne";
1277 case EQ:
1278 return "eq";
1279 case GE:
1280 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1281 return "pl";
1282 else
1283 return "ge";
1284 case LT:
1285 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1286 return "mi";
1287 else
1288 return "lt";
1289 case GEU:
1290 return "sh";
1291 case LTU:
1292 return "lo";
1293 default:
1294 gcc_unreachable ();
1295 }
1296 }
1297
1298 /* Output ADDR to FILE as address. */
1299
1300 void
1301 print_operand_address (FILE *file, rtx addr)
1302 {
1303 switch (GET_CODE (addr))
1304 {
1305 case REG:
1306 fprintf (file, ptrreg_to_str (REGNO (addr)));
1307 break;
1308
1309 case PRE_DEC:
1310 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1311 break;
1312
1313 case POST_INC:
1314 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1315 break;
1316
1317 default:
1318 if (CONSTANT_ADDRESS_P (addr)
1319 && text_segment_operand (addr, VOIDmode))
1320 {
1321 rtx x = addr;
1322 if (GET_CODE (x) == CONST)
1323 x = XEXP (x, 0);
1324 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1325 {
1326 /* Assembler gs() will implant word address. Make offset
1327 a byte offset inside gs() for assembler. This is
1328 needed because the more logical (constant+gs(sym)) is not
1329 accepted by gas. For 128K and lower devices this is ok. For
1330 large devices it will create a Trampoline to offset from symbol
1331 which may not be what the user really wanted. */
1332 fprintf (file, "gs(");
1333 output_addr_const (file, XEXP (x,0));
1334 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1335 if (AVR_3_BYTE_PC)
1336 if (warning (0, "pointer offset from symbol maybe incorrect"))
1337 {
1338 output_addr_const (stderr, addr);
1339 fprintf(stderr,"\n");
1340 }
1341 }
1342 else
1343 {
1344 fprintf (file, "gs(");
1345 output_addr_const (file, addr);
1346 fprintf (file, ")");
1347 }
1348 }
1349 else
1350 output_addr_const (file, addr);
1351 }
1352 }
1353
1354
1355 /* Output X as assembler operand to file FILE. */
1356
1357 void
1358 print_operand (FILE *file, rtx x, int code)
1359 {
1360 int abcd = 0;
1361
1362 if (code >= 'A' && code <= 'D')
1363 abcd = code - 'A';
1364
1365 if (code == '~')
1366 {
1367 if (!AVR_HAVE_JMP_CALL)
1368 fputc ('r', file);
1369 }
1370 else if (code == '!')
1371 {
1372 if (AVR_HAVE_EIJMP_EICALL)
1373 fputc ('e', file);
1374 }
1375 else if (REG_P (x))
1376 {
1377 if (x == zero_reg_rtx)
1378 fprintf (file, "__zero_reg__");
1379 else
1380 fprintf (file, reg_names[true_regnum (x) + abcd]);
1381 }
1382 else if (GET_CODE (x) == CONST_INT)
1383 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1384 else if (GET_CODE (x) == MEM)
1385 {
1386 rtx addr = XEXP (x,0);
1387 if (code == 'm')
1388 {
1389 if (!CONSTANT_P (addr))
1390 fatal_insn ("bad address, not a constant):", addr);
1391 /* Assembler template with m-code is data - not progmem section */
1392 if (text_segment_operand (addr, VOIDmode))
1393 if (warning ( 0, "accessing data memory with program memory address"))
1394 {
1395 output_addr_const (stderr, addr);
1396 fprintf(stderr,"\n");
1397 }
1398 output_addr_const (file, addr);
1399 }
1400 else if (code == 'o')
1401 {
1402 if (GET_CODE (addr) != PLUS)
1403 fatal_insn ("bad address, not (reg+disp):", addr);
1404
1405 print_operand (file, XEXP (addr, 1), 0);
1406 }
1407 else if (code == 'p' || code == 'r')
1408 {
1409 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1410 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1411
1412 if (code == 'p')
1413 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1414 else
1415 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1416 }
1417 else if (GET_CODE (addr) == PLUS)
1418 {
1419 print_operand_address (file, XEXP (addr,0));
1420 if (REGNO (XEXP (addr, 0)) == REG_X)
1421 fatal_insn ("internal compiler error. Bad address:"
1422 ,addr);
1423 fputc ('+', file);
1424 print_operand (file, XEXP (addr,1), code);
1425 }
1426 else
1427 print_operand_address (file, addr);
1428 }
1429 else if (code == 'x')
1430 {
1431 /* Constant progmem address - like used in jmp or call */
1432 if (0 == text_segment_operand (x, VOIDmode))
1433 if (warning ( 0, "accessing program memory with data memory address"))
1434 {
1435 output_addr_const (stderr, x);
1436 fprintf(stderr,"\n");
1437 }
1438 /* Use normal symbol for direct address no linker trampoline needed */
1439 output_addr_const (file, x);
1440 }
1441 else if (GET_CODE (x) == CONST_DOUBLE)
1442 {
1443 long val;
1444 REAL_VALUE_TYPE rv;
1445 if (GET_MODE (x) != SFmode)
1446 fatal_insn ("internal compiler error. Unknown mode:", x);
1447 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1448 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1449 fprintf (file, "0x%lx", val);
1450 }
1451 else if (code == 'j')
1452 fputs (cond_string (GET_CODE (x)), file);
1453 else if (code == 'k')
1454 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1455 else
1456 print_operand_address (file, x);
1457 }
1458
1459 /* Update the condition code in the INSN. */
1460
1461 void
1462 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1463 {
1464 rtx set;
1465
1466 switch (get_attr_cc (insn))
1467 {
1468 case CC_NONE:
1469 /* Insn does not affect CC at all. */
1470 break;
1471
1472 case CC_SET_N:
1473 CC_STATUS_INIT;
1474 break;
1475
1476 case CC_SET_ZN:
1477 set = single_set (insn);
1478 CC_STATUS_INIT;
1479 if (set)
1480 {
1481 cc_status.flags |= CC_NO_OVERFLOW;
1482 cc_status.value1 = SET_DEST (set);
1483 }
1484 break;
1485
1486 case CC_SET_CZN:
1487 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1488 The V flag may or may not be known but that's ok because
1489 alter_cond will change tests to use EQ/NE. */
1490 set = single_set (insn);
1491 CC_STATUS_INIT;
1492 if (set)
1493 {
1494 cc_status.value1 = SET_DEST (set);
1495 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1496 }
1497 break;
1498
1499 case CC_COMPARE:
1500 set = single_set (insn);
1501 CC_STATUS_INIT;
1502 if (set)
1503 cc_status.value1 = SET_SRC (set);
1504 break;
1505
1506 case CC_CLOBBER:
1507 /* Insn doesn't leave CC in a usable state. */
1508 CC_STATUS_INIT;
1509
1510 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1511 set = single_set (insn);
1512 if (set)
1513 {
1514 rtx src = SET_SRC (set);
1515
1516 if (GET_CODE (src) == ASHIFTRT
1517 && GET_MODE (src) == QImode)
1518 {
1519 rtx x = XEXP (src, 1);
1520
1521 if (GET_CODE (x) == CONST_INT
1522 && INTVAL (x) > 0
1523 && INTVAL (x) != 6)
1524 {
1525 cc_status.value1 = SET_DEST (set);
1526 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1527 }
1528 }
1529 }
1530 break;
1531 }
1532 }
1533
1534 /* Return maximum number of consecutive registers of
1535 class CLASS needed to hold a value of mode MODE. */
1536
1537 int
1538 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1539 {
1540 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1541 }
1542
1543 /* Choose mode for jump insn:
1544 1 - relative jump in range -63 <= x <= 62 ;
1545 2 - relative jump in range -2046 <= x <= 2045 ;
1546 3 - absolute jump (only for ATmega[16]03). */
1547
1548 int
1549 avr_jump_mode (rtx x, rtx insn)
1550 {
1551 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1552 ? XEXP (x, 0) : x));
1553 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1554 int jump_distance = cur_addr - dest_addr;
1555
1556 if (-63 <= jump_distance && jump_distance <= 62)
1557 return 1;
1558 else if (-2046 <= jump_distance && jump_distance <= 2045)
1559 return 2;
1560 else if (AVR_HAVE_JMP_CALL)
1561 return 3;
1562
1563 return 2;
1564 }
1565
1566 /* return an AVR condition jump commands.
1567 X is a comparison RTX.
1568 LEN is a number returned by avr_jump_mode function.
1569 if REVERSE nonzero then condition code in X must be reversed. */
1570
1571 const char *
1572 ret_cond_branch (rtx x, int len, int reverse)
1573 {
1574 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1575
1576 switch (cond)
1577 {
1578 case GT:
1579 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1580 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1581 AS1 (brpl,%0)) :
1582 len == 2 ? (AS1 (breq,.+4) CR_TAB
1583 AS1 (brmi,.+2) CR_TAB
1584 AS1 (rjmp,%0)) :
1585 (AS1 (breq,.+6) CR_TAB
1586 AS1 (brmi,.+4) CR_TAB
1587 AS1 (jmp,%0)));
1588
1589 else
1590 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1591 AS1 (brge,%0)) :
1592 len == 2 ? (AS1 (breq,.+4) CR_TAB
1593 AS1 (brlt,.+2) CR_TAB
1594 AS1 (rjmp,%0)) :
1595 (AS1 (breq,.+6) CR_TAB
1596 AS1 (brlt,.+4) CR_TAB
1597 AS1 (jmp,%0)));
1598 case GTU:
1599 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1600 AS1 (brsh,%0)) :
1601 len == 2 ? (AS1 (breq,.+4) CR_TAB
1602 AS1 (brlo,.+2) CR_TAB
1603 AS1 (rjmp,%0)) :
1604 (AS1 (breq,.+6) CR_TAB
1605 AS1 (brlo,.+4) CR_TAB
1606 AS1 (jmp,%0)));
1607 case LE:
1608 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1609 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1610 AS1 (brmi,%0)) :
1611 len == 2 ? (AS1 (breq,.+2) CR_TAB
1612 AS1 (brpl,.+2) CR_TAB
1613 AS1 (rjmp,%0)) :
1614 (AS1 (breq,.+2) CR_TAB
1615 AS1 (brpl,.+4) CR_TAB
1616 AS1 (jmp,%0)));
1617 else
1618 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1619 AS1 (brlt,%0)) :
1620 len == 2 ? (AS1 (breq,.+2) CR_TAB
1621 AS1 (brge,.+2) CR_TAB
1622 AS1 (rjmp,%0)) :
1623 (AS1 (breq,.+2) CR_TAB
1624 AS1 (brge,.+4) CR_TAB
1625 AS1 (jmp,%0)));
1626 case LEU:
1627 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1628 AS1 (brlo,%0)) :
1629 len == 2 ? (AS1 (breq,.+2) CR_TAB
1630 AS1 (brsh,.+2) CR_TAB
1631 AS1 (rjmp,%0)) :
1632 (AS1 (breq,.+2) CR_TAB
1633 AS1 (brsh,.+4) CR_TAB
1634 AS1 (jmp,%0)));
1635 default:
1636 if (reverse)
1637 {
1638 switch (len)
1639 {
1640 case 1:
1641 return AS1 (br%k1,%0);
1642 case 2:
1643 return (AS1 (br%j1,.+2) CR_TAB
1644 AS1 (rjmp,%0));
1645 default:
1646 return (AS1 (br%j1,.+4) CR_TAB
1647 AS1 (jmp,%0));
1648 }
1649 }
1650 else
1651 {
1652 switch (len)
1653 {
1654 case 1:
1655 return AS1 (br%j1,%0);
1656 case 2:
1657 return (AS1 (br%k1,.+2) CR_TAB
1658 AS1 (rjmp,%0));
1659 default:
1660 return (AS1 (br%k1,.+4) CR_TAB
1661 AS1 (jmp,%0));
1662 }
1663 }
1664 }
1665 return "";
1666 }
1667
1668 /* Predicate function for immediate operand which fits to byte (8bit) */
1669
1670 int
1671 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1672 {
1673 return (GET_CODE (op) == CONST_INT
1674 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1675 }
1676
1677 /* Output insn cost for next insn. */
1678
1679 void
1680 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1681 int num_operands ATTRIBUTE_UNUSED)
1682 {
1683 if (TARGET_ALL_DEBUG)
1684 {
1685 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1686 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1687 }
1688 }
1689
1690 /* Return 0 if undefined, 1 if always true or always false. */
1691
1692 int
1693 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1694 {
1695 unsigned int max = (mode == QImode ? 0xff :
1696 mode == HImode ? 0xffff :
1697 mode == SImode ? 0xffffffff : 0);
1698 if (max && op && GET_CODE (x) == CONST_INT)
1699 {
1700 if (unsigned_condition (op) != op)
1701 max >>= 1;
1702
1703 if (max != (INTVAL (x) & max)
1704 && INTVAL (x) != 0xff)
1705 return 1;
1706 }
1707 return 0;
1708 }
1709
1710
1711 /* Returns nonzero if REGNO is the number of a hard
1712 register in which function arguments are sometimes passed. */
1713
1714 int
1715 function_arg_regno_p(int r)
1716 {
1717 return (r >= 8 && r <= 25);
1718 }
1719
1720 /* Initializing the variable cum for the state at the beginning
1721 of the argument list. */
1722
1723 void
1724 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1725 tree fndecl ATTRIBUTE_UNUSED)
1726 {
1727 cum->nregs = 18;
1728 cum->regno = FIRST_CUM_REG;
1729 if (!libname && stdarg_p (fntype))
1730 cum->nregs = 0;
1731
1732 /* Assume the calle may be tail called */
1733
1734 cfun->machine->sibcall_fails = 0;
1735 }
1736
1737 /* Returns the number of registers to allocate for a function argument. */
1738
1739 static int
1740 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1741 {
1742 int size;
1743
1744 if (mode == BLKmode)
1745 size = int_size_in_bytes (type);
1746 else
1747 size = GET_MODE_SIZE (mode);
1748
1749 /* Align all function arguments to start in even-numbered registers.
1750 Odd-sized arguments leave holes above them. */
1751
1752 return (size + 1) & ~1;
1753 }
1754
1755 /* Controls whether a function argument is passed
1756 in a register, and which register. */
1757
1758 static rtx
1759 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1760 const_tree type, bool named ATTRIBUTE_UNUSED)
1761 {
1762 int bytes = avr_num_arg_regs (mode, type);
1763
1764 if (cum->nregs && bytes <= cum->nregs)
1765 return gen_rtx_REG (mode, cum->regno - bytes);
1766
1767 return NULL_RTX;
1768 }
1769
1770 /* Update the summarizer variable CUM to advance past an argument
1771 in the argument list. */
1772
1773 static void
1774 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1775 const_tree type, bool named ATTRIBUTE_UNUSED)
1776 {
1777 int bytes = avr_num_arg_regs (mode, type);
1778
1779 cum->nregs -= bytes;
1780 cum->regno -= bytes;
1781
1782 /* A parameter is being passed in a call-saved register. As the original
1783 contents of these regs has to be restored before leaving the function,
1784 a function must not pass arguments in call-saved regs in order to get
1785 tail-called. */
1786
1787 if (cum->regno >= 8
1788 && cum->nregs >= 0
1789 && !call_used_regs[cum->regno])
1790 {
1791 /* FIXME: We ship info on failing tail-call in struct machine_function.
1792 This uses internals of calls.c:expand_call() and the way args_so_far
1793 is used. targetm.function_ok_for_sibcall() needs to be extended to
1794 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1795 dependent so that such an extension is not wanted. */
1796
1797 cfun->machine->sibcall_fails = 1;
1798 }
1799
1800 /* Test if all registers needed by the ABI are actually available. If the
1801 user has fixed a GPR needed to pass an argument, an (implicit) function
1802 call would clobber that fixed register. See PR45099 for an example. */
1803
1804 if (cum->regno >= 8
1805 && cum->nregs >= 0)
1806 {
1807 int regno;
1808
1809 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1810 if (fixed_regs[regno])
1811 error ("Register %s is needed to pass a parameter but is fixed",
1812 reg_names[regno]);
1813 }
1814
1815 if (cum->nregs <= 0)
1816 {
1817 cum->nregs = 0;
1818 cum->regno = FIRST_CUM_REG;
1819 }
1820 }
1821
1822 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1823 /* Decide whether we can make a sibling call to a function. DECL is the
1824 declaration of the function being targeted by the call and EXP is the
1825 CALL_EXPR representing the call. */
1826
1827 static bool
1828 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1829 {
1830 tree fntype_callee;
1831
1832 /* Tail-calling must fail if callee-saved regs are used to pass
1833 function args. We must not tail-call when `epilogue_restores'
1834 is used. Unfortunately, we cannot tell at this point if that
1835 actually will happen or not, and we cannot step back from
1836 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1837
1838 if (cfun->machine->sibcall_fails
1839 || TARGET_CALL_PROLOGUES)
1840 {
1841 return false;
1842 }
1843
1844 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1845
1846 if (decl_callee)
1847 {
1848 decl_callee = TREE_TYPE (decl_callee);
1849 }
1850 else
1851 {
1852 decl_callee = fntype_callee;
1853
1854 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1855 && METHOD_TYPE != TREE_CODE (decl_callee))
1856 {
1857 decl_callee = TREE_TYPE (decl_callee);
1858 }
1859 }
1860
1861 /* Ensure that caller and callee have compatible epilogues */
1862
1863 if (interrupt_function_p (current_function_decl)
1864 || signal_function_p (current_function_decl)
1865 || avr_naked_function_p (decl_callee)
1866 || avr_naked_function_p (current_function_decl)
1867 /* FIXME: For OS_task and OS_main, we are over-conservative.
1868 This is due to missing documentation of these attributes
1869 and what they actually should do and should not do. */
1870 || (avr_OS_task_function_p (decl_callee)
1871 != avr_OS_task_function_p (current_function_decl))
1872 || (avr_OS_main_function_p (decl_callee)
1873 != avr_OS_main_function_p (current_function_decl)))
1874 {
1875 return false;
1876 }
1877
1878 return true;
1879 }
1880
1881 /***********************************************************************
1882 Functions for outputting various mov's for a various modes
1883 ************************************************************************/
1884 const char *
1885 output_movqi (rtx insn, rtx operands[], int *l)
1886 {
1887 int dummy;
1888 rtx dest = operands[0];
1889 rtx src = operands[1];
1890 int *real_l = l;
1891
1892 if (!l)
1893 l = &dummy;
1894
1895 *l = 1;
1896
1897 if (register_operand (dest, QImode))
1898 {
1899 if (register_operand (src, QImode)) /* mov r,r */
1900 {
1901 if (test_hard_reg_class (STACK_REG, dest))
1902 return AS2 (out,%0,%1);
1903 else if (test_hard_reg_class (STACK_REG, src))
1904 return AS2 (in,%0,%1);
1905
1906 return AS2 (mov,%0,%1);
1907 }
1908 else if (CONSTANT_P (src))
1909 {
1910 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1911 return AS2 (ldi,%0,lo8(%1));
1912
1913 if (GET_CODE (src) == CONST_INT)
1914 {
1915 if (src == const0_rtx) /* mov r,L */
1916 return AS1 (clr,%0);
1917 else if (src == const1_rtx)
1918 {
1919 *l = 2;
1920 return (AS1 (clr,%0) CR_TAB
1921 AS1 (inc,%0));
1922 }
1923 else if (src == constm1_rtx)
1924 {
1925 /* Immediate constants -1 to any register */
1926 *l = 2;
1927 return (AS1 (clr,%0) CR_TAB
1928 AS1 (dec,%0));
1929 }
1930 else
1931 {
1932 int bit_nr = exact_log2 (INTVAL (src));
1933
1934 if (bit_nr >= 0)
1935 {
1936 *l = 3;
1937 if (!real_l)
1938 output_asm_insn ((AS1 (clr,%0) CR_TAB
1939 "set"), operands);
1940 if (!real_l)
1941 avr_output_bld (operands, bit_nr);
1942
1943 return "";
1944 }
1945 }
1946 }
1947
1948 /* Last resort, larger than loading from memory. */
1949 *l = 4;
1950 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1951 AS2 (ldi,r31,lo8(%1)) CR_TAB
1952 AS2 (mov,%0,r31) CR_TAB
1953 AS2 (mov,r31,__tmp_reg__));
1954 }
1955 else if (GET_CODE (src) == MEM)
1956 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1957 }
1958 else if (GET_CODE (dest) == MEM)
1959 {
1960 const char *templ;
1961
1962 if (src == const0_rtx)
1963 operands[1] = zero_reg_rtx;
1964
1965 templ = out_movqi_mr_r (insn, operands, real_l);
1966
1967 if (!real_l)
1968 output_asm_insn (templ, operands);
1969
1970 operands[1] = src;
1971 }
1972 return "";
1973 }
1974
1975
1976 const char *
1977 output_movhi (rtx insn, rtx operands[], int *l)
1978 {
1979 int dummy;
1980 rtx dest = operands[0];
1981 rtx src = operands[1];
1982 int *real_l = l;
1983
1984 if (!l)
1985 l = &dummy;
1986
1987 if (register_operand (dest, HImode))
1988 {
1989 if (register_operand (src, HImode)) /* mov r,r */
1990 {
1991 if (test_hard_reg_class (STACK_REG, dest))
1992 {
1993 if (AVR_HAVE_8BIT_SP)
1994 return *l = 1, AS2 (out,__SP_L__,%A1);
1995 /* Use simple load of stack pointer if no interrupts are
1996 used. */
1997 else if (TARGET_NO_INTERRUPTS)
1998 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1999 AS2 (out,__SP_L__,%A1));
2000 *l = 5;
2001 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2002 "cli" CR_TAB
2003 AS2 (out,__SP_H__,%B1) CR_TAB
2004 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2005 AS2 (out,__SP_L__,%A1));
2006 }
2007 else if (test_hard_reg_class (STACK_REG, src))
2008 {
2009 *l = 2;
2010 return (AS2 (in,%A0,__SP_L__) CR_TAB
2011 AS2 (in,%B0,__SP_H__));
2012 }
2013
2014 if (AVR_HAVE_MOVW)
2015 {
2016 *l = 1;
2017 return (AS2 (movw,%0,%1));
2018 }
2019 else
2020 {
2021 *l = 2;
2022 return (AS2 (mov,%A0,%A1) CR_TAB
2023 AS2 (mov,%B0,%B1));
2024 }
2025 }
2026 else if (CONSTANT_P (src))
2027 {
2028 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2029 {
2030 *l = 2;
2031 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2032 AS2 (ldi,%B0,hi8(%1)));
2033 }
2034
2035 if (GET_CODE (src) == CONST_INT)
2036 {
2037 if (src == const0_rtx) /* mov r,L */
2038 {
2039 *l = 2;
2040 return (AS1 (clr,%A0) CR_TAB
2041 AS1 (clr,%B0));
2042 }
2043 else if (src == const1_rtx)
2044 {
2045 *l = 3;
2046 return (AS1 (clr,%A0) CR_TAB
2047 AS1 (clr,%B0) CR_TAB
2048 AS1 (inc,%A0));
2049 }
2050 else if (src == constm1_rtx)
2051 {
2052 /* Immediate constants -1 to any register */
2053 *l = 3;
2054 return (AS1 (clr,%0) CR_TAB
2055 AS1 (dec,%A0) CR_TAB
2056 AS2 (mov,%B0,%A0));
2057 }
2058 else
2059 {
2060 int bit_nr = exact_log2 (INTVAL (src));
2061
2062 if (bit_nr >= 0)
2063 {
2064 *l = 4;
2065 if (!real_l)
2066 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2067 AS1 (clr,%B0) CR_TAB
2068 "set"), operands);
2069 if (!real_l)
2070 avr_output_bld (operands, bit_nr);
2071
2072 return "";
2073 }
2074 }
2075
2076 if ((INTVAL (src) & 0xff) == 0)
2077 {
2078 *l = 5;
2079 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2080 AS1 (clr,%A0) CR_TAB
2081 AS2 (ldi,r31,hi8(%1)) CR_TAB
2082 AS2 (mov,%B0,r31) CR_TAB
2083 AS2 (mov,r31,__tmp_reg__));
2084 }
2085 else if ((INTVAL (src) & 0xff00) == 0)
2086 {
2087 *l = 5;
2088 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2089 AS2 (ldi,r31,lo8(%1)) CR_TAB
2090 AS2 (mov,%A0,r31) CR_TAB
2091 AS1 (clr,%B0) CR_TAB
2092 AS2 (mov,r31,__tmp_reg__));
2093 }
2094 }
2095
2096 /* Last resort, equal to loading from memory. */
2097 *l = 6;
2098 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2099 AS2 (ldi,r31,lo8(%1)) CR_TAB
2100 AS2 (mov,%A0,r31) CR_TAB
2101 AS2 (ldi,r31,hi8(%1)) CR_TAB
2102 AS2 (mov,%B0,r31) CR_TAB
2103 AS2 (mov,r31,__tmp_reg__));
2104 }
2105 else if (GET_CODE (src) == MEM)
2106 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2107 }
2108 else if (GET_CODE (dest) == MEM)
2109 {
2110 const char *templ;
2111
2112 if (src == const0_rtx)
2113 operands[1] = zero_reg_rtx;
2114
2115 templ = out_movhi_mr_r (insn, operands, real_l);
2116
2117 if (!real_l)
2118 output_asm_insn (templ, operands);
2119
2120 operands[1] = src;
2121 return "";
2122 }
2123 fatal_insn ("invalid insn:", insn);
2124 return "";
2125 }
2126
2127 const char *
2128 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2129 {
2130 rtx dest = op[0];
2131 rtx src = op[1];
2132 rtx x = XEXP (src, 0);
2133 int dummy;
2134
2135 if (!l)
2136 l = &dummy;
2137
2138 if (CONSTANT_ADDRESS_P (x))
2139 {
2140 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2141 {
2142 *l = 1;
2143 return AS2 (in,%0,__SREG__);
2144 }
2145 if (optimize > 0 && io_address_operand (x, QImode))
2146 {
2147 *l = 1;
2148 return AS2 (in,%0,%m1-0x20);
2149 }
2150 *l = 2;
2151 return AS2 (lds,%0,%m1);
2152 }
2153 /* memory access by reg+disp */
2154 else if (GET_CODE (x) == PLUS
2155 && REG_P (XEXP (x,0))
2156 && GET_CODE (XEXP (x,1)) == CONST_INT)
2157 {
2158 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2159 {
2160 int disp = INTVAL (XEXP (x,1));
2161 if (REGNO (XEXP (x,0)) != REG_Y)
2162 fatal_insn ("incorrect insn:",insn);
2163
2164 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2165 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2166 AS2 (ldd,%0,Y+63) CR_TAB
2167 AS2 (sbiw,r28,%o1-63));
2168
2169 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2170 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2171 AS2 (ld,%0,Y) CR_TAB
2172 AS2 (subi,r28,lo8(%o1)) CR_TAB
2173 AS2 (sbci,r29,hi8(%o1)));
2174 }
2175 else if (REGNO (XEXP (x,0)) == REG_X)
2176 {
2177 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2178 it but I have this situation with extremal optimizing options. */
2179 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2180 || reg_unused_after (insn, XEXP (x,0)))
2181 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2182 AS2 (ld,%0,X));
2183
2184 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2185 AS2 (ld,%0,X) CR_TAB
2186 AS2 (sbiw,r26,%o1));
2187 }
2188 *l = 1;
2189 return AS2 (ldd,%0,%1);
2190 }
2191 *l = 1;
2192 return AS2 (ld,%0,%1);
2193 }
2194
2195 const char *
2196 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2197 {
2198 rtx dest = op[0];
2199 rtx src = op[1];
2200 rtx base = XEXP (src, 0);
2201 int reg_dest = true_regnum (dest);
2202 int reg_base = true_regnum (base);
2203 /* "volatile" forces reading low byte first, even if less efficient,
2204 for correct operation with 16-bit I/O registers. */
2205 int mem_volatile_p = MEM_VOLATILE_P (src);
2206 int tmp;
2207
2208 if (!l)
2209 l = &tmp;
2210
2211 if (reg_base > 0)
2212 {
2213 if (reg_dest == reg_base) /* R = (R) */
2214 {
2215 *l = 3;
2216 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2217 AS2 (ld,%B0,%1) CR_TAB
2218 AS2 (mov,%A0,__tmp_reg__));
2219 }
2220 else if (reg_base == REG_X) /* (R26) */
2221 {
2222 if (reg_unused_after (insn, base))
2223 {
2224 *l = 2;
2225 return (AS2 (ld,%A0,X+) CR_TAB
2226 AS2 (ld,%B0,X));
2227 }
2228 *l = 3;
2229 return (AS2 (ld,%A0,X+) CR_TAB
2230 AS2 (ld,%B0,X) CR_TAB
2231 AS2 (sbiw,r26,1));
2232 }
2233 else /* (R) */
2234 {
2235 *l = 2;
2236 return (AS2 (ld,%A0,%1) CR_TAB
2237 AS2 (ldd,%B0,%1+1));
2238 }
2239 }
2240 else if (GET_CODE (base) == PLUS) /* (R + i) */
2241 {
2242 int disp = INTVAL (XEXP (base, 1));
2243 int reg_base = true_regnum (XEXP (base, 0));
2244
2245 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2246 {
2247 if (REGNO (XEXP (base, 0)) != REG_Y)
2248 fatal_insn ("incorrect insn:",insn);
2249
2250 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2251 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2252 AS2 (ldd,%A0,Y+62) CR_TAB
2253 AS2 (ldd,%B0,Y+63) CR_TAB
2254 AS2 (sbiw,r28,%o1-62));
2255
2256 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2257 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2258 AS2 (ld,%A0,Y) CR_TAB
2259 AS2 (ldd,%B0,Y+1) CR_TAB
2260 AS2 (subi,r28,lo8(%o1)) CR_TAB
2261 AS2 (sbci,r29,hi8(%o1)));
2262 }
2263 if (reg_base == REG_X)
2264 {
2265 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2266 it but I have this situation with extremal
2267 optimization options. */
2268
2269 *l = 4;
2270 if (reg_base == reg_dest)
2271 return (AS2 (adiw,r26,%o1) CR_TAB
2272 AS2 (ld,__tmp_reg__,X+) CR_TAB
2273 AS2 (ld,%B0,X) CR_TAB
2274 AS2 (mov,%A0,__tmp_reg__));
2275
2276 return (AS2 (adiw,r26,%o1) CR_TAB
2277 AS2 (ld,%A0,X+) CR_TAB
2278 AS2 (ld,%B0,X) CR_TAB
2279 AS2 (sbiw,r26,%o1+1));
2280 }
2281
2282 if (reg_base == reg_dest)
2283 {
2284 *l = 3;
2285 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2286 AS2 (ldd,%B0,%B1) CR_TAB
2287 AS2 (mov,%A0,__tmp_reg__));
2288 }
2289
2290 *l = 2;
2291 return (AS2 (ldd,%A0,%A1) CR_TAB
2292 AS2 (ldd,%B0,%B1));
2293 }
2294 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2295 {
2296 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2297 fatal_insn ("incorrect insn:", insn);
2298
2299 if (mem_volatile_p)
2300 {
2301 if (REGNO (XEXP (base, 0)) == REG_X)
2302 {
2303 *l = 4;
2304 return (AS2 (sbiw,r26,2) CR_TAB
2305 AS2 (ld,%A0,X+) CR_TAB
2306 AS2 (ld,%B0,X) CR_TAB
2307 AS2 (sbiw,r26,1));
2308 }
2309 else
2310 {
2311 *l = 3;
2312 return (AS2 (sbiw,%r1,2) CR_TAB
2313 AS2 (ld,%A0,%p1) CR_TAB
2314 AS2 (ldd,%B0,%p1+1));
2315 }
2316 }
2317
2318 *l = 2;
2319 return (AS2 (ld,%B0,%1) CR_TAB
2320 AS2 (ld,%A0,%1));
2321 }
2322 else if (GET_CODE (base) == POST_INC) /* (R++) */
2323 {
2324 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2325 fatal_insn ("incorrect insn:", insn);
2326
2327 *l = 2;
2328 return (AS2 (ld,%A0,%1) CR_TAB
2329 AS2 (ld,%B0,%1));
2330 }
2331 else if (CONSTANT_ADDRESS_P (base))
2332 {
2333 if (optimize > 0 && io_address_operand (base, HImode))
2334 {
2335 *l = 2;
2336 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2337 AS2 (in,%B0,%m1+1-0x20));
2338 }
2339 *l = 4;
2340 return (AS2 (lds,%A0,%m1) CR_TAB
2341 AS2 (lds,%B0,%m1+1));
2342 }
2343
2344 fatal_insn ("unknown move insn:",insn);
2345 return "";
2346 }
2347
2348 const char *
2349 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2350 {
2351 rtx dest = op[0];
2352 rtx src = op[1];
2353 rtx base = XEXP (src, 0);
2354 int reg_dest = true_regnum (dest);
2355 int reg_base = true_regnum (base);
2356 int tmp;
2357
2358 if (!l)
2359 l = &tmp;
2360
2361 if (reg_base > 0)
2362 {
2363 if (reg_base == REG_X) /* (R26) */
2364 {
2365 if (reg_dest == REG_X)
2366 /* "ld r26,-X" is undefined */
2367 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2368 AS2 (ld,r29,X) CR_TAB
2369 AS2 (ld,r28,-X) CR_TAB
2370 AS2 (ld,__tmp_reg__,-X) CR_TAB
2371 AS2 (sbiw,r26,1) CR_TAB
2372 AS2 (ld,r26,X) CR_TAB
2373 AS2 (mov,r27,__tmp_reg__));
2374 else if (reg_dest == REG_X - 2)
2375 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2376 AS2 (ld,%B0,X+) CR_TAB
2377 AS2 (ld,__tmp_reg__,X+) CR_TAB
2378 AS2 (ld,%D0,X) CR_TAB
2379 AS2 (mov,%C0,__tmp_reg__));
2380 else if (reg_unused_after (insn, base))
2381 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2382 AS2 (ld,%B0,X+) CR_TAB
2383 AS2 (ld,%C0,X+) CR_TAB
2384 AS2 (ld,%D0,X));
2385 else
2386 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2387 AS2 (ld,%B0,X+) CR_TAB
2388 AS2 (ld,%C0,X+) CR_TAB
2389 AS2 (ld,%D0,X) CR_TAB
2390 AS2 (sbiw,r26,3));
2391 }
2392 else
2393 {
2394 if (reg_dest == reg_base)
2395 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2396 AS2 (ldd,%C0,%1+2) CR_TAB
2397 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2398 AS2 (ld,%A0,%1) CR_TAB
2399 AS2 (mov,%B0,__tmp_reg__));
2400 else if (reg_base == reg_dest + 2)
2401 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2402 AS2 (ldd,%B0,%1+1) CR_TAB
2403 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2404 AS2 (ldd,%D0,%1+3) CR_TAB
2405 AS2 (mov,%C0,__tmp_reg__));
2406 else
2407 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2408 AS2 (ldd,%B0,%1+1) CR_TAB
2409 AS2 (ldd,%C0,%1+2) CR_TAB
2410 AS2 (ldd,%D0,%1+3));
2411 }
2412 }
2413 else if (GET_CODE (base) == PLUS) /* (R + i) */
2414 {
2415 int disp = INTVAL (XEXP (base, 1));
2416
2417 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2418 {
2419 if (REGNO (XEXP (base, 0)) != REG_Y)
2420 fatal_insn ("incorrect insn:",insn);
2421
2422 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2423 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2424 AS2 (ldd,%A0,Y+60) CR_TAB
2425 AS2 (ldd,%B0,Y+61) CR_TAB
2426 AS2 (ldd,%C0,Y+62) CR_TAB
2427 AS2 (ldd,%D0,Y+63) CR_TAB
2428 AS2 (sbiw,r28,%o1-60));
2429
2430 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2431 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2432 AS2 (ld,%A0,Y) CR_TAB
2433 AS2 (ldd,%B0,Y+1) CR_TAB
2434 AS2 (ldd,%C0,Y+2) CR_TAB
2435 AS2 (ldd,%D0,Y+3) CR_TAB
2436 AS2 (subi,r28,lo8(%o1)) CR_TAB
2437 AS2 (sbci,r29,hi8(%o1)));
2438 }
2439
2440 reg_base = true_regnum (XEXP (base, 0));
2441 if (reg_base == REG_X)
2442 {
2443 /* R = (X + d) */
2444 if (reg_dest == REG_X)
2445 {
2446 *l = 7;
2447 /* "ld r26,-X" is undefined */
2448 return (AS2 (adiw,r26,%o1+3) CR_TAB
2449 AS2 (ld,r29,X) CR_TAB
2450 AS2 (ld,r28,-X) CR_TAB
2451 AS2 (ld,__tmp_reg__,-X) CR_TAB
2452 AS2 (sbiw,r26,1) CR_TAB
2453 AS2 (ld,r26,X) CR_TAB
2454 AS2 (mov,r27,__tmp_reg__));
2455 }
2456 *l = 6;
2457 if (reg_dest == REG_X - 2)
2458 return (AS2 (adiw,r26,%o1) CR_TAB
2459 AS2 (ld,r24,X+) CR_TAB
2460 AS2 (ld,r25,X+) CR_TAB
2461 AS2 (ld,__tmp_reg__,X+) CR_TAB
2462 AS2 (ld,r27,X) CR_TAB
2463 AS2 (mov,r26,__tmp_reg__));
2464
2465 return (AS2 (adiw,r26,%o1) CR_TAB
2466 AS2 (ld,%A0,X+) CR_TAB
2467 AS2 (ld,%B0,X+) CR_TAB
2468 AS2 (ld,%C0,X+) CR_TAB
2469 AS2 (ld,%D0,X) CR_TAB
2470 AS2 (sbiw,r26,%o1+3));
2471 }
2472 if (reg_dest == reg_base)
2473 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2474 AS2 (ldd,%C0,%C1) CR_TAB
2475 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2476 AS2 (ldd,%A0,%A1) CR_TAB
2477 AS2 (mov,%B0,__tmp_reg__));
2478 else if (reg_dest == reg_base - 2)
2479 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2480 AS2 (ldd,%B0,%B1) CR_TAB
2481 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2482 AS2 (ldd,%D0,%D1) CR_TAB
2483 AS2 (mov,%C0,__tmp_reg__));
2484 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2485 AS2 (ldd,%B0,%B1) CR_TAB
2486 AS2 (ldd,%C0,%C1) CR_TAB
2487 AS2 (ldd,%D0,%D1));
2488 }
2489 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2490 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2491 AS2 (ld,%C0,%1) CR_TAB
2492 AS2 (ld,%B0,%1) CR_TAB
2493 AS2 (ld,%A0,%1));
2494 else if (GET_CODE (base) == POST_INC) /* (R++) */
2495 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2496 AS2 (ld,%B0,%1) CR_TAB
2497 AS2 (ld,%C0,%1) CR_TAB
2498 AS2 (ld,%D0,%1));
2499 else if (CONSTANT_ADDRESS_P (base))
2500 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2501 AS2 (lds,%B0,%m1+1) CR_TAB
2502 AS2 (lds,%C0,%m1+2) CR_TAB
2503 AS2 (lds,%D0,%m1+3));
2504
2505 fatal_insn ("unknown move insn:",insn);
2506 return "";
2507 }
2508
2509 const char *
2510 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2511 {
2512 rtx dest = op[0];
2513 rtx src = op[1];
2514 rtx base = XEXP (dest, 0);
2515 int reg_base = true_regnum (base);
2516 int reg_src = true_regnum (src);
2517 int tmp;
2518
2519 if (!l)
2520 l = &tmp;
2521
2522 if (CONSTANT_ADDRESS_P (base))
2523 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2524 AS2 (sts,%m0+1,%B1) CR_TAB
2525 AS2 (sts,%m0+2,%C1) CR_TAB
2526 AS2 (sts,%m0+3,%D1));
2527 if (reg_base > 0) /* (r) */
2528 {
2529 if (reg_base == REG_X) /* (R26) */
2530 {
2531 if (reg_src == REG_X)
2532 {
2533 /* "st X+,r26" is undefined */
2534 if (reg_unused_after (insn, base))
2535 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2536 AS2 (st,X,r26) CR_TAB
2537 AS2 (adiw,r26,1) CR_TAB
2538 AS2 (st,X+,__tmp_reg__) CR_TAB
2539 AS2 (st,X+,r28) CR_TAB
2540 AS2 (st,X,r29));
2541 else
2542 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2543 AS2 (st,X,r26) CR_TAB
2544 AS2 (adiw,r26,1) CR_TAB
2545 AS2 (st,X+,__tmp_reg__) CR_TAB
2546 AS2 (st,X+,r28) CR_TAB
2547 AS2 (st,X,r29) CR_TAB
2548 AS2 (sbiw,r26,3));
2549 }
2550 else if (reg_base == reg_src + 2)
2551 {
2552 if (reg_unused_after (insn, base))
2553 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2554 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2555 AS2 (st,%0+,%A1) CR_TAB
2556 AS2 (st,%0+,%B1) CR_TAB
2557 AS2 (st,%0+,__zero_reg__) CR_TAB
2558 AS2 (st,%0,__tmp_reg__) CR_TAB
2559 AS1 (clr,__zero_reg__));
2560 else
2561 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2562 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2563 AS2 (st,%0+,%A1) CR_TAB
2564 AS2 (st,%0+,%B1) CR_TAB
2565 AS2 (st,%0+,__zero_reg__) CR_TAB
2566 AS2 (st,%0,__tmp_reg__) CR_TAB
2567 AS1 (clr,__zero_reg__) CR_TAB
2568 AS2 (sbiw,r26,3));
2569 }
2570 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2571 AS2 (st,%0+,%B1) CR_TAB
2572 AS2 (st,%0+,%C1) CR_TAB
2573 AS2 (st,%0,%D1) CR_TAB
2574 AS2 (sbiw,r26,3));
2575 }
2576 else
2577 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2578 AS2 (std,%0+1,%B1) CR_TAB
2579 AS2 (std,%0+2,%C1) CR_TAB
2580 AS2 (std,%0+3,%D1));
2581 }
2582 else if (GET_CODE (base) == PLUS) /* (R + i) */
2583 {
2584 int disp = INTVAL (XEXP (base, 1));
2585 reg_base = REGNO (XEXP (base, 0));
2586 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2587 {
2588 if (reg_base != REG_Y)
2589 fatal_insn ("incorrect insn:",insn);
2590
2591 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2592 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2593 AS2 (std,Y+60,%A1) CR_TAB
2594 AS2 (std,Y+61,%B1) CR_TAB
2595 AS2 (std,Y+62,%C1) CR_TAB
2596 AS2 (std,Y+63,%D1) CR_TAB
2597 AS2 (sbiw,r28,%o0-60));
2598
2599 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2600 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2601 AS2 (st,Y,%A1) CR_TAB
2602 AS2 (std,Y+1,%B1) CR_TAB
2603 AS2 (std,Y+2,%C1) CR_TAB
2604 AS2 (std,Y+3,%D1) CR_TAB
2605 AS2 (subi,r28,lo8(%o0)) CR_TAB
2606 AS2 (sbci,r29,hi8(%o0)));
2607 }
2608 if (reg_base == REG_X)
2609 {
2610 /* (X + d) = R */
2611 if (reg_src == REG_X)
2612 {
2613 *l = 9;
2614 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2615 AS2 (mov,__zero_reg__,r27) CR_TAB
2616 AS2 (adiw,r26,%o0) CR_TAB
2617 AS2 (st,X+,__tmp_reg__) CR_TAB
2618 AS2 (st,X+,__zero_reg__) CR_TAB
2619 AS2 (st,X+,r28) CR_TAB
2620 AS2 (st,X,r29) CR_TAB
2621 AS1 (clr,__zero_reg__) CR_TAB
2622 AS2 (sbiw,r26,%o0+3));
2623 }
2624 else if (reg_src == REG_X - 2)
2625 {
2626 *l = 9;
2627 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2628 AS2 (mov,__zero_reg__,r27) CR_TAB
2629 AS2 (adiw,r26,%o0) CR_TAB
2630 AS2 (st,X+,r24) CR_TAB
2631 AS2 (st,X+,r25) CR_TAB
2632 AS2 (st,X+,__tmp_reg__) CR_TAB
2633 AS2 (st,X,__zero_reg__) CR_TAB
2634 AS1 (clr,__zero_reg__) CR_TAB
2635 AS2 (sbiw,r26,%o0+3));
2636 }
2637 *l = 6;
2638 return (AS2 (adiw,r26,%o0) CR_TAB
2639 AS2 (st,X+,%A1) CR_TAB
2640 AS2 (st,X+,%B1) CR_TAB
2641 AS2 (st,X+,%C1) CR_TAB
2642 AS2 (st,X,%D1) CR_TAB
2643 AS2 (sbiw,r26,%o0+3));
2644 }
2645 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2646 AS2 (std,%B0,%B1) CR_TAB
2647 AS2 (std,%C0,%C1) CR_TAB
2648 AS2 (std,%D0,%D1));
2649 }
2650 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2651 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2652 AS2 (st,%0,%C1) CR_TAB
2653 AS2 (st,%0,%B1) CR_TAB
2654 AS2 (st,%0,%A1));
2655 else if (GET_CODE (base) == POST_INC) /* (R++) */
2656 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2657 AS2 (st,%0,%B1) CR_TAB
2658 AS2 (st,%0,%C1) CR_TAB
2659 AS2 (st,%0,%D1));
2660 fatal_insn ("unknown move insn:",insn);
2661 return "";
2662 }
2663
2664 const char *
2665 output_movsisf(rtx insn, rtx operands[], int *l)
2666 {
2667 int dummy;
2668 rtx dest = operands[0];
2669 rtx src = operands[1];
2670 int *real_l = l;
2671
2672 if (!l)
2673 l = &dummy;
2674
2675 if (register_operand (dest, VOIDmode))
2676 {
2677 if (register_operand (src, VOIDmode)) /* mov r,r */
2678 {
2679 if (true_regnum (dest) > true_regnum (src))
2680 {
2681 if (AVR_HAVE_MOVW)
2682 {
2683 *l = 2;
2684 return (AS2 (movw,%C0,%C1) CR_TAB
2685 AS2 (movw,%A0,%A1));
2686 }
2687 *l = 4;
2688 return (AS2 (mov,%D0,%D1) CR_TAB
2689 AS2 (mov,%C0,%C1) CR_TAB
2690 AS2 (mov,%B0,%B1) CR_TAB
2691 AS2 (mov,%A0,%A1));
2692 }
2693 else
2694 {
2695 if (AVR_HAVE_MOVW)
2696 {
2697 *l = 2;
2698 return (AS2 (movw,%A0,%A1) CR_TAB
2699 AS2 (movw,%C0,%C1));
2700 }
2701 *l = 4;
2702 return (AS2 (mov,%A0,%A1) CR_TAB
2703 AS2 (mov,%B0,%B1) CR_TAB
2704 AS2 (mov,%C0,%C1) CR_TAB
2705 AS2 (mov,%D0,%D1));
2706 }
2707 }
2708 else if (CONSTANT_P (src))
2709 {
2710 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2711 {
2712 *l = 4;
2713 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2714 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2715 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2716 AS2 (ldi,%D0,hhi8(%1)));
2717 }
2718
2719 if (GET_CODE (src) == CONST_INT)
2720 {
2721 const char *const clr_op0 =
2722 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2723 AS1 (clr,%B0) CR_TAB
2724 AS2 (movw,%C0,%A0))
2725 : (AS1 (clr,%A0) CR_TAB
2726 AS1 (clr,%B0) CR_TAB
2727 AS1 (clr,%C0) CR_TAB
2728 AS1 (clr,%D0));
2729
2730 if (src == const0_rtx) /* mov r,L */
2731 {
2732 *l = AVR_HAVE_MOVW ? 3 : 4;
2733 return clr_op0;
2734 }
2735 else if (src == const1_rtx)
2736 {
2737 if (!real_l)
2738 output_asm_insn (clr_op0, operands);
2739 *l = AVR_HAVE_MOVW ? 4 : 5;
2740 return AS1 (inc,%A0);
2741 }
2742 else if (src == constm1_rtx)
2743 {
2744 /* Immediate constants -1 to any register */
2745 if (AVR_HAVE_MOVW)
2746 {
2747 *l = 4;
2748 return (AS1 (clr,%A0) CR_TAB
2749 AS1 (dec,%A0) CR_TAB
2750 AS2 (mov,%B0,%A0) CR_TAB
2751 AS2 (movw,%C0,%A0));
2752 }
2753 *l = 5;
2754 return (AS1 (clr,%A0) CR_TAB
2755 AS1 (dec,%A0) CR_TAB
2756 AS2 (mov,%B0,%A0) CR_TAB
2757 AS2 (mov,%C0,%A0) CR_TAB
2758 AS2 (mov,%D0,%A0));
2759 }
2760 else
2761 {
2762 int bit_nr = exact_log2 (INTVAL (src));
2763
2764 if (bit_nr >= 0)
2765 {
2766 *l = AVR_HAVE_MOVW ? 5 : 6;
2767 if (!real_l)
2768 {
2769 output_asm_insn (clr_op0, operands);
2770 output_asm_insn ("set", operands);
2771 }
2772 if (!real_l)
2773 avr_output_bld (operands, bit_nr);
2774
2775 return "";
2776 }
2777 }
2778 }
2779
2780 /* Last resort, better than loading from memory. */
2781 *l = 10;
2782 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2783 AS2 (ldi,r31,lo8(%1)) CR_TAB
2784 AS2 (mov,%A0,r31) CR_TAB
2785 AS2 (ldi,r31,hi8(%1)) CR_TAB
2786 AS2 (mov,%B0,r31) CR_TAB
2787 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2788 AS2 (mov,%C0,r31) CR_TAB
2789 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2790 AS2 (mov,%D0,r31) CR_TAB
2791 AS2 (mov,r31,__tmp_reg__));
2792 }
2793 else if (GET_CODE (src) == MEM)
2794 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2795 }
2796 else if (GET_CODE (dest) == MEM)
2797 {
2798 const char *templ;
2799
2800 if (src == const0_rtx)
2801 operands[1] = zero_reg_rtx;
2802
2803 templ = out_movsi_mr_r (insn, operands, real_l);
2804
2805 if (!real_l)
2806 output_asm_insn (templ, operands);
2807
2808 operands[1] = src;
2809 return "";
2810 }
2811 fatal_insn ("invalid insn:", insn);
2812 return "";
2813 }
2814
2815 const char *
2816 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2817 {
2818 rtx dest = op[0];
2819 rtx src = op[1];
2820 rtx x = XEXP (dest, 0);
2821 int dummy;
2822
2823 if (!l)
2824 l = &dummy;
2825
2826 if (CONSTANT_ADDRESS_P (x))
2827 {
2828 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2829 {
2830 *l = 1;
2831 return AS2 (out,__SREG__,%1);
2832 }
2833 if (optimize > 0 && io_address_operand (x, QImode))
2834 {
2835 *l = 1;
2836 return AS2 (out,%m0-0x20,%1);
2837 }
2838 *l = 2;
2839 return AS2 (sts,%m0,%1);
2840 }
2841 /* memory access by reg+disp */
2842 else if (GET_CODE (x) == PLUS
2843 && REG_P (XEXP (x,0))
2844 && GET_CODE (XEXP (x,1)) == CONST_INT)
2845 {
2846 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2847 {
2848 int disp = INTVAL (XEXP (x,1));
2849 if (REGNO (XEXP (x,0)) != REG_Y)
2850 fatal_insn ("incorrect insn:",insn);
2851
2852 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2853 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2854 AS2 (std,Y+63,%1) CR_TAB
2855 AS2 (sbiw,r28,%o0-63));
2856
2857 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2858 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2859 AS2 (st,Y,%1) CR_TAB
2860 AS2 (subi,r28,lo8(%o0)) CR_TAB
2861 AS2 (sbci,r29,hi8(%o0)));
2862 }
2863 else if (REGNO (XEXP (x,0)) == REG_X)
2864 {
2865 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2866 {
2867 if (reg_unused_after (insn, XEXP (x,0)))
2868 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2869 AS2 (adiw,r26,%o0) CR_TAB
2870 AS2 (st,X,__tmp_reg__));
2871
2872 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2873 AS2 (adiw,r26,%o0) CR_TAB
2874 AS2 (st,X,__tmp_reg__) CR_TAB
2875 AS2 (sbiw,r26,%o0));
2876 }
2877 else
2878 {
2879 if (reg_unused_after (insn, XEXP (x,0)))
2880 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2881 AS2 (st,X,%1));
2882
2883 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2884 AS2 (st,X,%1) CR_TAB
2885 AS2 (sbiw,r26,%o0));
2886 }
2887 }
2888 *l = 1;
2889 return AS2 (std,%0,%1);
2890 }
2891 *l = 1;
2892 return AS2 (st,%0,%1);
2893 }
2894
2895 const char *
2896 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2897 {
2898 rtx dest = op[0];
2899 rtx src = op[1];
2900 rtx base = XEXP (dest, 0);
2901 int reg_base = true_regnum (base);
2902 int reg_src = true_regnum (src);
2903 /* "volatile" forces writing high byte first, even if less efficient,
2904 for correct operation with 16-bit I/O registers. */
2905 int mem_volatile_p = MEM_VOLATILE_P (dest);
2906 int tmp;
2907
2908 if (!l)
2909 l = &tmp;
2910 if (CONSTANT_ADDRESS_P (base))
2911 {
2912 if (optimize > 0 && io_address_operand (base, HImode))
2913 {
2914 *l = 2;
2915 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2916 AS2 (out,%m0-0x20,%A1));
2917 }
2918 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2919 AS2 (sts,%m0,%A1));
2920 }
2921 if (reg_base > 0)
2922 {
2923 if (reg_base == REG_X)
2924 {
2925 if (reg_src == REG_X)
2926 {
2927 /* "st X+,r26" and "st -X,r26" are undefined. */
2928 if (!mem_volatile_p && reg_unused_after (insn, src))
2929 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2930 AS2 (st,X,r26) CR_TAB
2931 AS2 (adiw,r26,1) CR_TAB
2932 AS2 (st,X,__tmp_reg__));
2933 else
2934 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2935 AS2 (adiw,r26,1) CR_TAB
2936 AS2 (st,X,__tmp_reg__) CR_TAB
2937 AS2 (sbiw,r26,1) CR_TAB
2938 AS2 (st,X,r26));
2939 }
2940 else
2941 {
2942 if (!mem_volatile_p && reg_unused_after (insn, base))
2943 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2944 AS2 (st,X,%B1));
2945 else
2946 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2947 AS2 (st,X,%B1) CR_TAB
2948 AS2 (st,-X,%A1));
2949 }
2950 }
2951 else
2952 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2953 AS2 (st,%0,%A1));
2954 }
2955 else if (GET_CODE (base) == PLUS)
2956 {
2957 int disp = INTVAL (XEXP (base, 1));
2958 reg_base = REGNO (XEXP (base, 0));
2959 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2960 {
2961 if (reg_base != REG_Y)
2962 fatal_insn ("incorrect insn:",insn);
2963
2964 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2965 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2966 AS2 (std,Y+63,%B1) CR_TAB
2967 AS2 (std,Y+62,%A1) CR_TAB
2968 AS2 (sbiw,r28,%o0-62));
2969
2970 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2971 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2972 AS2 (std,Y+1,%B1) CR_TAB
2973 AS2 (st,Y,%A1) CR_TAB
2974 AS2 (subi,r28,lo8(%o0)) CR_TAB
2975 AS2 (sbci,r29,hi8(%o0)));
2976 }
2977 if (reg_base == REG_X)
2978 {
2979 /* (X + d) = R */
2980 if (reg_src == REG_X)
2981 {
2982 *l = 7;
2983 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2984 AS2 (mov,__zero_reg__,r27) CR_TAB
2985 AS2 (adiw,r26,%o0+1) CR_TAB
2986 AS2 (st,X,__zero_reg__) CR_TAB
2987 AS2 (st,-X,__tmp_reg__) CR_TAB
2988 AS1 (clr,__zero_reg__) CR_TAB
2989 AS2 (sbiw,r26,%o0));
2990 }
2991 *l = 4;
2992 return (AS2 (adiw,r26,%o0+1) CR_TAB
2993 AS2 (st,X,%B1) CR_TAB
2994 AS2 (st,-X,%A1) CR_TAB
2995 AS2 (sbiw,r26,%o0));
2996 }
2997 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2998 AS2 (std,%A0,%A1));
2999 }
3000 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3001 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3002 AS2 (st,%0,%A1));
3003 else if (GET_CODE (base) == POST_INC) /* (R++) */
3004 {
3005 if (mem_volatile_p)
3006 {
3007 if (REGNO (XEXP (base, 0)) == REG_X)
3008 {
3009 *l = 4;
3010 return (AS2 (adiw,r26,1) CR_TAB
3011 AS2 (st,X,%B1) CR_TAB
3012 AS2 (st,-X,%A1) CR_TAB
3013 AS2 (adiw,r26,2));
3014 }
3015 else
3016 {
3017 *l = 3;
3018 return (AS2 (std,%p0+1,%B1) CR_TAB
3019 AS2 (st,%p0,%A1) CR_TAB
3020 AS2 (adiw,%r0,2));
3021 }
3022 }
3023
3024 *l = 2;
3025 return (AS2 (st,%0,%A1) CR_TAB
3026 AS2 (st,%0,%B1));
3027 }
3028 fatal_insn ("unknown move insn:",insn);
3029 return "";
3030 }
3031
3032 /* Return 1 if frame pointer for current function required. */
3033
3034 bool
3035 avr_frame_pointer_required_p (void)
3036 {
3037 return (cfun->calls_alloca
3038 || crtl->args.info.nregs == 0
3039 || get_frame_size () > 0);
3040 }
3041
3042 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3043
3044 static RTX_CODE
3045 compare_condition (rtx insn)
3046 {
3047 rtx next = next_real_insn (insn);
3048 RTX_CODE cond = UNKNOWN;
3049 if (next && GET_CODE (next) == JUMP_INSN)
3050 {
3051 rtx pat = PATTERN (next);
3052 rtx src = SET_SRC (pat);
3053 rtx t = XEXP (src, 0);
3054 cond = GET_CODE (t);
3055 }
3056 return cond;
3057 }
3058
3059 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3060
3061 static int
3062 compare_sign_p (rtx insn)
3063 {
3064 RTX_CODE cond = compare_condition (insn);
3065 return (cond == GE || cond == LT);
3066 }
3067
3068 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3069 that needs to be swapped (GT, GTU, LE, LEU). */
3070
3071 int
3072 compare_diff_p (rtx insn)
3073 {
3074 RTX_CODE cond = compare_condition (insn);
3075 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3076 }
3077
3078 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3079
3080 int
3081 compare_eq_p (rtx insn)
3082 {
3083 RTX_CODE cond = compare_condition (insn);
3084 return (cond == EQ || cond == NE);
3085 }
3086
3087
3088 /* Output test instruction for HImode. */
3089
3090 const char *
3091 out_tsthi (rtx insn, rtx op, int *l)
3092 {
3093 if (compare_sign_p (insn))
3094 {
3095 if (l) *l = 1;
3096 return AS1 (tst,%B0);
3097 }
3098 if (reg_unused_after (insn, op)
3099 && compare_eq_p (insn))
3100 {
3101 /* Faster than sbiw if we can clobber the operand. */
3102 if (l) *l = 1;
3103 return "or %A0,%B0";
3104 }
3105 if (test_hard_reg_class (ADDW_REGS, op))
3106 {
3107 if (l) *l = 1;
3108 return AS2 (sbiw,%0,0);
3109 }
3110 if (l) *l = 2;
3111 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3112 AS2 (cpc,%B0,__zero_reg__));
3113 }
3114
3115
3116 /* Output test instruction for SImode. */
3117
3118 const char *
3119 out_tstsi (rtx insn, rtx op, int *l)
3120 {
3121 if (compare_sign_p (insn))
3122 {
3123 if (l) *l = 1;
3124 return AS1 (tst,%D0);
3125 }
3126 if (test_hard_reg_class (ADDW_REGS, op))
3127 {
3128 if (l) *l = 3;
3129 return (AS2 (sbiw,%A0,0) CR_TAB
3130 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3131 AS2 (cpc,%D0,__zero_reg__));
3132 }
3133 if (l) *l = 4;
3134 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3135 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3136 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3137 AS2 (cpc,%D0,__zero_reg__));
3138 }
3139
3140
3141 /* Generate asm equivalent for various shifts.
3142 Shift count is a CONST_INT, MEM or REG.
3143 This only handles cases that are not already
3144 carefully hand-optimized in ?sh??i3_out. */
3145
3146 void
3147 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3148 int *len, int t_len)
3149 {
3150 rtx op[10];
3151 char str[500];
3152 int second_label = 1;
3153 int saved_in_tmp = 0;
3154 int use_zero_reg = 0;
3155
3156 op[0] = operands[0];
3157 op[1] = operands[1];
3158 op[2] = operands[2];
3159 op[3] = operands[3];
3160 str[0] = 0;
3161
3162 if (len)
3163 *len = 1;
3164
3165 if (GET_CODE (operands[2]) == CONST_INT)
3166 {
3167 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3168 int count = INTVAL (operands[2]);
3169 int max_len = 10; /* If larger than this, always use a loop. */
3170
3171 if (count <= 0)
3172 {
3173 if (len)
3174 *len = 0;
3175 return;
3176 }
3177
3178 if (count < 8 && !scratch)
3179 use_zero_reg = 1;
3180
3181 if (optimize_size)
3182 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3183
3184 if (t_len * count <= max_len)
3185 {
3186 /* Output shifts inline with no loop - faster. */
3187 if (len)
3188 *len = t_len * count;
3189 else
3190 {
3191 while (count-- > 0)
3192 output_asm_insn (templ, op);
3193 }
3194
3195 return;
3196 }
3197
3198 if (scratch)
3199 {
3200 if (!len)
3201 strcat (str, AS2 (ldi,%3,%2));
3202 }
3203 else if (use_zero_reg)
3204 {
3205 /* Hack to save one word: use __zero_reg__ as loop counter.
3206 Set one bit, then shift in a loop until it is 0 again. */
3207
3208 op[3] = zero_reg_rtx;
3209 if (len)
3210 *len = 2;
3211 else
3212 strcat (str, ("set" CR_TAB
3213 AS2 (bld,%3,%2-1)));
3214 }
3215 else
3216 {
3217 /* No scratch register available, use one from LD_REGS (saved in
3218 __tmp_reg__) that doesn't overlap with registers to shift. */
3219
3220 op[3] = gen_rtx_REG (QImode,
3221 ((true_regnum (operands[0]) - 1) & 15) + 16);
3222 op[4] = tmp_reg_rtx;
3223 saved_in_tmp = 1;
3224
3225 if (len)
3226 *len = 3; /* Includes "mov %3,%4" after the loop. */
3227 else
3228 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3229 AS2 (ldi,%3,%2)));
3230 }
3231
3232 second_label = 0;
3233 }
3234 else if (GET_CODE (operands[2]) == MEM)
3235 {
3236 rtx op_mov[10];
3237
3238 op[3] = op_mov[0] = tmp_reg_rtx;
3239 op_mov[1] = op[2];
3240
3241 if (len)
3242 out_movqi_r_mr (insn, op_mov, len);
3243 else
3244 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3245 }
3246 else if (register_operand (operands[2], QImode))
3247 {
3248 if (reg_unused_after (insn, operands[2]))
3249 op[3] = op[2];
3250 else
3251 {
3252 op[3] = tmp_reg_rtx;
3253 if (!len)
3254 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3255 }
3256 }
3257 else
3258 fatal_insn ("bad shift insn:", insn);
3259
3260 if (second_label)
3261 {
3262 if (len)
3263 ++*len;
3264 else
3265 strcat (str, AS1 (rjmp,2f));
3266 }
3267
3268 if (len)
3269 *len += t_len + 2; /* template + dec + brXX */
3270 else
3271 {
3272 strcat (str, "\n1:\t");
3273 strcat (str, templ);
3274 strcat (str, second_label ? "\n2:\t" : "\n\t");
3275 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3276 strcat (str, CR_TAB);
3277 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3278 if (saved_in_tmp)
3279 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3280 output_asm_insn (str, op);
3281 }
3282 }
3283
3284
3285 /* 8bit shift left ((char)x << i) */
3286
3287 const char *
3288 ashlqi3_out (rtx insn, rtx operands[], int *len)
3289 {
3290 if (GET_CODE (operands[2]) == CONST_INT)
3291 {
3292 int k;
3293
3294 if (!len)
3295 len = &k;
3296
3297 switch (INTVAL (operands[2]))
3298 {
3299 default:
3300 if (INTVAL (operands[2]) < 8)
3301 break;
3302
3303 *len = 1;
3304 return AS1 (clr,%0);
3305
3306 case 1:
3307 *len = 1;
3308 return AS1 (lsl,%0);
3309
3310 case 2:
3311 *len = 2;
3312 return (AS1 (lsl,%0) CR_TAB
3313 AS1 (lsl,%0));
3314
3315 case 3:
3316 *len = 3;
3317 return (AS1 (lsl,%0) CR_TAB
3318 AS1 (lsl,%0) CR_TAB
3319 AS1 (lsl,%0));
3320
3321 case 4:
3322 if (test_hard_reg_class (LD_REGS, operands[0]))
3323 {
3324 *len = 2;
3325 return (AS1 (swap,%0) CR_TAB
3326 AS2 (andi,%0,0xf0));
3327 }
3328 *len = 4;
3329 return (AS1 (lsl,%0) CR_TAB
3330 AS1 (lsl,%0) CR_TAB
3331 AS1 (lsl,%0) CR_TAB
3332 AS1 (lsl,%0));
3333
3334 case 5:
3335 if (test_hard_reg_class (LD_REGS, operands[0]))
3336 {
3337 *len = 3;
3338 return (AS1 (swap,%0) CR_TAB
3339 AS1 (lsl,%0) CR_TAB
3340 AS2 (andi,%0,0xe0));
3341 }
3342 *len = 5;
3343 return (AS1 (lsl,%0) CR_TAB
3344 AS1 (lsl,%0) CR_TAB
3345 AS1 (lsl,%0) CR_TAB
3346 AS1 (lsl,%0) CR_TAB
3347 AS1 (lsl,%0));
3348
3349 case 6:
3350 if (test_hard_reg_class (LD_REGS, operands[0]))
3351 {
3352 *len = 4;
3353 return (AS1 (swap,%0) CR_TAB
3354 AS1 (lsl,%0) CR_TAB
3355 AS1 (lsl,%0) CR_TAB
3356 AS2 (andi,%0,0xc0));
3357 }
3358 *len = 6;
3359 return (AS1 (lsl,%0) CR_TAB
3360 AS1 (lsl,%0) CR_TAB
3361 AS1 (lsl,%0) CR_TAB
3362 AS1 (lsl,%0) CR_TAB
3363 AS1 (lsl,%0) CR_TAB
3364 AS1 (lsl,%0));
3365
3366 case 7:
3367 *len = 3;
3368 return (AS1 (ror,%0) CR_TAB
3369 AS1 (clr,%0) CR_TAB
3370 AS1 (ror,%0));
3371 }
3372 }
3373 else if (CONSTANT_P (operands[2]))
3374 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3375
3376 out_shift_with_cnt (AS1 (lsl,%0),
3377 insn, operands, len, 1);
3378 return "";
3379 }
3380
3381
3382 /* 16bit shift left ((short)x << i) */
3383
3384 const char *
3385 ashlhi3_out (rtx insn, rtx operands[], int *len)
3386 {
3387 if (GET_CODE (operands[2]) == CONST_INT)
3388 {
3389 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3390 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3391 int k;
3392 int *t = len;
3393
3394 if (!len)
3395 len = &k;
3396
3397 switch (INTVAL (operands[2]))
3398 {
3399 default:
3400 if (INTVAL (operands[2]) < 16)
3401 break;
3402
3403 *len = 2;
3404 return (AS1 (clr,%B0) CR_TAB
3405 AS1 (clr,%A0));
3406
3407 case 4:
3408 if (optimize_size && scratch)
3409 break; /* 5 */
3410 if (ldi_ok)
3411 {
3412 *len = 6;
3413 return (AS1 (swap,%A0) CR_TAB
3414 AS1 (swap,%B0) CR_TAB
3415 AS2 (andi,%B0,0xf0) CR_TAB
3416 AS2 (eor,%B0,%A0) CR_TAB
3417 AS2 (andi,%A0,0xf0) CR_TAB
3418 AS2 (eor,%B0,%A0));
3419 }
3420 if (scratch)
3421 {
3422 *len = 7;
3423 return (AS1 (swap,%A0) CR_TAB
3424 AS1 (swap,%B0) CR_TAB
3425 AS2 (ldi,%3,0xf0) CR_TAB
3426 "and %B0,%3" CR_TAB
3427 AS2 (eor,%B0,%A0) CR_TAB
3428 "and %A0,%3" CR_TAB
3429 AS2 (eor,%B0,%A0));
3430 }
3431 break; /* optimize_size ? 6 : 8 */
3432
3433 case 5:
3434 if (optimize_size)
3435 break; /* scratch ? 5 : 6 */
3436 if (ldi_ok)
3437 {
3438 *len = 8;
3439 return (AS1 (lsl,%A0) CR_TAB
3440 AS1 (rol,%B0) CR_TAB
3441 AS1 (swap,%A0) CR_TAB
3442 AS1 (swap,%B0) CR_TAB
3443 AS2 (andi,%B0,0xf0) CR_TAB
3444 AS2 (eor,%B0,%A0) CR_TAB
3445 AS2 (andi,%A0,0xf0) CR_TAB
3446 AS2 (eor,%B0,%A0));
3447 }
3448 if (scratch)
3449 {
3450 *len = 9;
3451 return (AS1 (lsl,%A0) CR_TAB
3452 AS1 (rol,%B0) CR_TAB
3453 AS1 (swap,%A0) CR_TAB
3454 AS1 (swap,%B0) CR_TAB
3455 AS2 (ldi,%3,0xf0) CR_TAB
3456 "and %B0,%3" CR_TAB
3457 AS2 (eor,%B0,%A0) CR_TAB
3458 "and %A0,%3" CR_TAB
3459 AS2 (eor,%B0,%A0));
3460 }
3461 break; /* 10 */
3462
3463 case 6:
3464 if (optimize_size)
3465 break; /* scratch ? 5 : 6 */
3466 *len = 9;
3467 return (AS1 (clr,__tmp_reg__) CR_TAB
3468 AS1 (lsr,%B0) CR_TAB
3469 AS1 (ror,%A0) CR_TAB
3470 AS1 (ror,__tmp_reg__) CR_TAB
3471 AS1 (lsr,%B0) CR_TAB
3472 AS1 (ror,%A0) CR_TAB
3473 AS1 (ror,__tmp_reg__) CR_TAB
3474 AS2 (mov,%B0,%A0) CR_TAB
3475 AS2 (mov,%A0,__tmp_reg__));
3476
3477 case 7:
3478 *len = 5;
3479 return (AS1 (lsr,%B0) CR_TAB
3480 AS2 (mov,%B0,%A0) CR_TAB
3481 AS1 (clr,%A0) CR_TAB
3482 AS1 (ror,%B0) CR_TAB
3483 AS1 (ror,%A0));
3484
3485 case 8:
3486 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3487 AS1 (clr,%A0));
3488
3489 case 9:
3490 *len = 3;
3491 return (AS2 (mov,%B0,%A0) CR_TAB
3492 AS1 (clr,%A0) CR_TAB
3493 AS1 (lsl,%B0));
3494
3495 case 10:
3496 *len = 4;
3497 return (AS2 (mov,%B0,%A0) CR_TAB
3498 AS1 (clr,%A0) CR_TAB
3499 AS1 (lsl,%B0) CR_TAB
3500 AS1 (lsl,%B0));
3501
3502 case 11:
3503 *len = 5;
3504 return (AS2 (mov,%B0,%A0) CR_TAB
3505 AS1 (clr,%A0) CR_TAB
3506 AS1 (lsl,%B0) CR_TAB
3507 AS1 (lsl,%B0) CR_TAB
3508 AS1 (lsl,%B0));
3509
3510 case 12:
3511 if (ldi_ok)
3512 {
3513 *len = 4;
3514 return (AS2 (mov,%B0,%A0) CR_TAB
3515 AS1 (clr,%A0) CR_TAB
3516 AS1 (swap,%B0) CR_TAB
3517 AS2 (andi,%B0,0xf0));
3518 }
3519 if (scratch)
3520 {
3521 *len = 5;
3522 return (AS2 (mov,%B0,%A0) CR_TAB
3523 AS1 (clr,%A0) CR_TAB
3524 AS1 (swap,%B0) CR_TAB
3525 AS2 (ldi,%3,0xf0) CR_TAB
3526 "and %B0,%3");
3527 }
3528 *len = 6;
3529 return (AS2 (mov,%B0,%A0) CR_TAB
3530 AS1 (clr,%A0) CR_TAB
3531 AS1 (lsl,%B0) CR_TAB
3532 AS1 (lsl,%B0) CR_TAB
3533 AS1 (lsl,%B0) CR_TAB
3534 AS1 (lsl,%B0));
3535
3536 case 13:
3537 if (ldi_ok)
3538 {
3539 *len = 5;
3540 return (AS2 (mov,%B0,%A0) CR_TAB
3541 AS1 (clr,%A0) CR_TAB
3542 AS1 (swap,%B0) CR_TAB
3543 AS1 (lsl,%B0) CR_TAB
3544 AS2 (andi,%B0,0xe0));
3545 }
3546 if (AVR_HAVE_MUL && scratch)
3547 {
3548 *len = 5;
3549 return (AS2 (ldi,%3,0x20) CR_TAB
3550 AS2 (mul,%A0,%3) CR_TAB
3551 AS2 (mov,%B0,r0) CR_TAB
3552 AS1 (clr,%A0) CR_TAB
3553 AS1 (clr,__zero_reg__));
3554 }
3555 if (optimize_size && scratch)
3556 break; /* 5 */
3557 if (scratch)
3558 {
3559 *len = 6;
3560 return (AS2 (mov,%B0,%A0) CR_TAB
3561 AS1 (clr,%A0) CR_TAB
3562 AS1 (swap,%B0) CR_TAB
3563 AS1 (lsl,%B0) CR_TAB
3564 AS2 (ldi,%3,0xe0) CR_TAB
3565 "and %B0,%3");
3566 }
3567 if (AVR_HAVE_MUL)
3568 {
3569 *len = 6;
3570 return ("set" CR_TAB
3571 AS2 (bld,r1,5) CR_TAB
3572 AS2 (mul,%A0,r1) CR_TAB
3573 AS2 (mov,%B0,r0) CR_TAB
3574 AS1 (clr,%A0) CR_TAB
3575 AS1 (clr,__zero_reg__));
3576 }
3577 *len = 7;
3578 return (AS2 (mov,%B0,%A0) CR_TAB
3579 AS1 (clr,%A0) CR_TAB
3580 AS1 (lsl,%B0) CR_TAB
3581 AS1 (lsl,%B0) CR_TAB
3582 AS1 (lsl,%B0) CR_TAB
3583 AS1 (lsl,%B0) CR_TAB
3584 AS1 (lsl,%B0));
3585
3586 case 14:
3587 if (AVR_HAVE_MUL && ldi_ok)
3588 {
3589 *len = 5;
3590 return (AS2 (ldi,%B0,0x40) CR_TAB
3591 AS2 (mul,%A0,%B0) CR_TAB
3592 AS2 (mov,%B0,r0) CR_TAB
3593 AS1 (clr,%A0) CR_TAB
3594 AS1 (clr,__zero_reg__));
3595 }
3596 if (AVR_HAVE_MUL && scratch)
3597 {
3598 *len = 5;
3599 return (AS2 (ldi,%3,0x40) CR_TAB
3600 AS2 (mul,%A0,%3) CR_TAB
3601 AS2 (mov,%B0,r0) CR_TAB
3602 AS1 (clr,%A0) CR_TAB
3603 AS1 (clr,__zero_reg__));
3604 }
3605 if (optimize_size && ldi_ok)
3606 {
3607 *len = 5;
3608 return (AS2 (mov,%B0,%A0) CR_TAB
3609 AS2 (ldi,%A0,6) "\n1:\t"
3610 AS1 (lsl,%B0) CR_TAB
3611 AS1 (dec,%A0) CR_TAB
3612 AS1 (brne,1b));
3613 }
3614 if (optimize_size && scratch)
3615 break; /* 5 */
3616 *len = 6;
3617 return (AS1 (clr,%B0) CR_TAB
3618 AS1 (lsr,%A0) CR_TAB
3619 AS1 (ror,%B0) CR_TAB
3620 AS1 (lsr,%A0) CR_TAB
3621 AS1 (ror,%B0) CR_TAB
3622 AS1 (clr,%A0));
3623
3624 case 15:
3625 *len = 4;
3626 return (AS1 (clr,%B0) CR_TAB
3627 AS1 (lsr,%A0) CR_TAB
3628 AS1 (ror,%B0) CR_TAB
3629 AS1 (clr,%A0));
3630 }
3631 len = t;
3632 }
3633 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3634 AS1 (rol,%B0)),
3635 insn, operands, len, 2);
3636 return "";
3637 }
3638
3639
3640 /* 32bit shift left ((long)x << i) */
3641
3642 const char *
3643 ashlsi3_out (rtx insn, rtx operands[], int *len)
3644 {
3645 if (GET_CODE (operands[2]) == CONST_INT)
3646 {
3647 int k;
3648 int *t = len;
3649
3650 if (!len)
3651 len = &k;
3652
3653 switch (INTVAL (operands[2]))
3654 {
3655 default:
3656 if (INTVAL (operands[2]) < 32)
3657 break;
3658
3659 if (AVR_HAVE_MOVW)
3660 return *len = 3, (AS1 (clr,%D0) CR_TAB
3661 AS1 (clr,%C0) CR_TAB
3662 AS2 (movw,%A0,%C0));
3663 *len = 4;
3664 return (AS1 (clr,%D0) CR_TAB
3665 AS1 (clr,%C0) CR_TAB
3666 AS1 (clr,%B0) CR_TAB
3667 AS1 (clr,%A0));
3668
3669 case 8:
3670 {
3671 int reg0 = true_regnum (operands[0]);
3672 int reg1 = true_regnum (operands[1]);
3673 *len = 4;
3674 if (reg0 >= reg1)
3675 return (AS2 (mov,%D0,%C1) CR_TAB
3676 AS2 (mov,%C0,%B1) CR_TAB
3677 AS2 (mov,%B0,%A1) CR_TAB
3678 AS1 (clr,%A0));
3679 else
3680 return (AS1 (clr,%A0) CR_TAB
3681 AS2 (mov,%B0,%A1) CR_TAB
3682 AS2 (mov,%C0,%B1) CR_TAB
3683 AS2 (mov,%D0,%C1));
3684 }
3685
3686 case 16:
3687 {
3688 int reg0 = true_regnum (operands[0]);
3689 int reg1 = true_regnum (operands[1]);
3690 if (reg0 + 2 == reg1)
3691 return *len = 2, (AS1 (clr,%B0) CR_TAB
3692 AS1 (clr,%A0));
3693 if (AVR_HAVE_MOVW)
3694 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3695 AS1 (clr,%B0) CR_TAB
3696 AS1 (clr,%A0));
3697 else
3698 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3699 AS2 (mov,%D0,%B1) CR_TAB
3700 AS1 (clr,%B0) CR_TAB
3701 AS1 (clr,%A0));
3702 }
3703
3704 case 24:
3705 *len = 4;
3706 return (AS2 (mov,%D0,%A1) CR_TAB
3707 AS1 (clr,%C0) CR_TAB
3708 AS1 (clr,%B0) CR_TAB
3709 AS1 (clr,%A0));
3710
3711 case 31:
3712 *len = 6;
3713 return (AS1 (clr,%D0) CR_TAB
3714 AS1 (lsr,%A0) CR_TAB
3715 AS1 (ror,%D0) CR_TAB
3716 AS1 (clr,%C0) CR_TAB
3717 AS1 (clr,%B0) CR_TAB
3718 AS1 (clr,%A0));
3719 }
3720 len = t;
3721 }
3722 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3723 AS1 (rol,%B0) CR_TAB
3724 AS1 (rol,%C0) CR_TAB
3725 AS1 (rol,%D0)),
3726 insn, operands, len, 4);
3727 return "";
3728 }
3729
3730 /* 8bit arithmetic shift right ((signed char)x >> i) */
3731
3732 const char *
3733 ashrqi3_out (rtx insn, rtx operands[], int *len)
3734 {
3735 if (GET_CODE (operands[2]) == CONST_INT)
3736 {
3737 int k;
3738
3739 if (!len)
3740 len = &k;
3741
3742 switch (INTVAL (operands[2]))
3743 {
3744 case 1:
3745 *len = 1;
3746 return AS1 (asr,%0);
3747
3748 case 2:
3749 *len = 2;
3750 return (AS1 (asr,%0) CR_TAB
3751 AS1 (asr,%0));
3752
3753 case 3:
3754 *len = 3;
3755 return (AS1 (asr,%0) CR_TAB
3756 AS1 (asr,%0) CR_TAB
3757 AS1 (asr,%0));
3758
3759 case 4:
3760 *len = 4;
3761 return (AS1 (asr,%0) CR_TAB
3762 AS1 (asr,%0) CR_TAB
3763 AS1 (asr,%0) CR_TAB
3764 AS1 (asr,%0));
3765
3766 case 5:
3767 *len = 5;
3768 return (AS1 (asr,%0) CR_TAB
3769 AS1 (asr,%0) CR_TAB
3770 AS1 (asr,%0) CR_TAB
3771 AS1 (asr,%0) CR_TAB
3772 AS1 (asr,%0));
3773
3774 case 6:
3775 *len = 4;
3776 return (AS2 (bst,%0,6) CR_TAB
3777 AS1 (lsl,%0) CR_TAB
3778 AS2 (sbc,%0,%0) CR_TAB
3779 AS2 (bld,%0,0));
3780
3781 default:
3782 if (INTVAL (operands[2]) < 8)
3783 break;
3784
3785 /* fall through */
3786
3787 case 7:
3788 *len = 2;
3789 return (AS1 (lsl,%0) CR_TAB
3790 AS2 (sbc,%0,%0));
3791 }
3792 }
3793 else if (CONSTANT_P (operands[2]))
3794 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3795
3796 out_shift_with_cnt (AS1 (asr,%0),
3797 insn, operands, len, 1);
3798 return "";
3799 }
3800
3801
3802 /* 16bit arithmetic shift right ((signed short)x >> i) */
3803
3804 const char *
3805 ashrhi3_out (rtx insn, rtx operands[], int *len)
3806 {
3807 if (GET_CODE (operands[2]) == CONST_INT)
3808 {
3809 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3810 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3811 int k;
3812 int *t = len;
3813
3814 if (!len)
3815 len = &k;
3816
3817 switch (INTVAL (operands[2]))
3818 {
3819 case 4:
3820 case 5:
3821 /* XXX try to optimize this too? */
3822 break;
3823
3824 case 6:
3825 if (optimize_size)
3826 break; /* scratch ? 5 : 6 */
3827 *len = 8;
3828 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3829 AS2 (mov,%A0,%B0) CR_TAB
3830 AS1 (lsl,__tmp_reg__) CR_TAB
3831 AS1 (rol,%A0) CR_TAB
3832 AS2 (sbc,%B0,%B0) CR_TAB
3833 AS1 (lsl,__tmp_reg__) CR_TAB
3834 AS1 (rol,%A0) CR_TAB
3835 AS1 (rol,%B0));
3836
3837 case 7:
3838 *len = 4;
3839 return (AS1 (lsl,%A0) CR_TAB
3840 AS2 (mov,%A0,%B0) CR_TAB
3841 AS1 (rol,%A0) CR_TAB
3842 AS2 (sbc,%B0,%B0));
3843
3844 case 8:
3845 {
3846 int reg0 = true_regnum (operands[0]);
3847 int reg1 = true_regnum (operands[1]);
3848
3849 if (reg0 == reg1)
3850 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3851 AS1 (lsl,%B0) CR_TAB
3852 AS2 (sbc,%B0,%B0));
3853 else
3854 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3855 AS1 (clr,%B0) CR_TAB
3856 AS2 (sbrc,%A0,7) CR_TAB
3857 AS1 (dec,%B0));
3858 }
3859
3860 case 9:
3861 *len = 4;
3862 return (AS2 (mov,%A0,%B0) CR_TAB
3863 AS1 (lsl,%B0) CR_TAB
3864 AS2 (sbc,%B0,%B0) CR_TAB
3865 AS1 (asr,%A0));
3866
3867 case 10:
3868 *len = 5;
3869 return (AS2 (mov,%A0,%B0) CR_TAB
3870 AS1 (lsl,%B0) CR_TAB
3871 AS2 (sbc,%B0,%B0) CR_TAB
3872 AS1 (asr,%A0) CR_TAB
3873 AS1 (asr,%A0));
3874
3875 case 11:
3876 if (AVR_HAVE_MUL && ldi_ok)
3877 {
3878 *len = 5;
3879 return (AS2 (ldi,%A0,0x20) CR_TAB
3880 AS2 (muls,%B0,%A0) CR_TAB
3881 AS2 (mov,%A0,r1) CR_TAB
3882 AS2 (sbc,%B0,%B0) CR_TAB
3883 AS1 (clr,__zero_reg__));
3884 }
3885 if (optimize_size && scratch)
3886 break; /* 5 */
3887 *len = 6;
3888 return (AS2 (mov,%A0,%B0) CR_TAB
3889 AS1 (lsl,%B0) CR_TAB
3890 AS2 (sbc,%B0,%B0) CR_TAB
3891 AS1 (asr,%A0) CR_TAB
3892 AS1 (asr,%A0) CR_TAB
3893 AS1 (asr,%A0));
3894
3895 case 12:
3896 if (AVR_HAVE_MUL && ldi_ok)
3897 {
3898 *len = 5;
3899 return (AS2 (ldi,%A0,0x10) CR_TAB
3900 AS2 (muls,%B0,%A0) CR_TAB
3901 AS2 (mov,%A0,r1) CR_TAB
3902 AS2 (sbc,%B0,%B0) CR_TAB
3903 AS1 (clr,__zero_reg__));
3904 }
3905 if (optimize_size && scratch)
3906 break; /* 5 */
3907 *len = 7;
3908 return (AS2 (mov,%A0,%B0) CR_TAB
3909 AS1 (lsl,%B0) CR_TAB
3910 AS2 (sbc,%B0,%B0) CR_TAB
3911 AS1 (asr,%A0) CR_TAB
3912 AS1 (asr,%A0) CR_TAB
3913 AS1 (asr,%A0) CR_TAB
3914 AS1 (asr,%A0));
3915
3916 case 13:
3917 if (AVR_HAVE_MUL && ldi_ok)
3918 {
3919 *len = 5;
3920 return (AS2 (ldi,%A0,0x08) CR_TAB
3921 AS2 (muls,%B0,%A0) CR_TAB
3922 AS2 (mov,%A0,r1) CR_TAB
3923 AS2 (sbc,%B0,%B0) CR_TAB
3924 AS1 (clr,__zero_reg__));
3925 }
3926 if (optimize_size)
3927 break; /* scratch ? 5 : 7 */
3928 *len = 8;
3929 return (AS2 (mov,%A0,%B0) CR_TAB
3930 AS1 (lsl,%B0) CR_TAB
3931 AS2 (sbc,%B0,%B0) CR_TAB
3932 AS1 (asr,%A0) CR_TAB
3933 AS1 (asr,%A0) CR_TAB
3934 AS1 (asr,%A0) CR_TAB
3935 AS1 (asr,%A0) CR_TAB
3936 AS1 (asr,%A0));
3937
3938 case 14:
3939 *len = 5;
3940 return (AS1 (lsl,%B0) CR_TAB
3941 AS2 (sbc,%A0,%A0) CR_TAB
3942 AS1 (lsl,%B0) CR_TAB
3943 AS2 (mov,%B0,%A0) CR_TAB
3944 AS1 (rol,%A0));
3945
3946 default:
3947 if (INTVAL (operands[2]) < 16)
3948 break;
3949
3950 /* fall through */
3951
3952 case 15:
3953 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3954 AS2 (sbc,%A0,%A0) CR_TAB
3955 AS2 (mov,%B0,%A0));
3956 }
3957 len = t;
3958 }
3959 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3960 AS1 (ror,%A0)),
3961 insn, operands, len, 2);
3962 return "";
3963 }
3964
3965
3966 /* 32bit arithmetic shift right ((signed long)x >> i) */
3967
3968 const char *
3969 ashrsi3_out (rtx insn, rtx operands[], int *len)
3970 {
3971 if (GET_CODE (operands[2]) == CONST_INT)
3972 {
3973 int k;
3974 int *t = len;
3975
3976 if (!len)
3977 len = &k;
3978
3979 switch (INTVAL (operands[2]))
3980 {
3981 case 8:
3982 {
3983 int reg0 = true_regnum (operands[0]);
3984 int reg1 = true_regnum (operands[1]);
3985 *len=6;
3986 if (reg0 <= reg1)
3987 return (AS2 (mov,%A0,%B1) CR_TAB
3988 AS2 (mov,%B0,%C1) CR_TAB
3989 AS2 (mov,%C0,%D1) CR_TAB
3990 AS1 (clr,%D0) CR_TAB
3991 AS2 (sbrc,%C0,7) CR_TAB
3992 AS1 (dec,%D0));
3993 else
3994 return (AS1 (clr,%D0) CR_TAB
3995 AS2 (sbrc,%D1,7) CR_TAB
3996 AS1 (dec,%D0) CR_TAB
3997 AS2 (mov,%C0,%D1) CR_TAB
3998 AS2 (mov,%B0,%C1) CR_TAB
3999 AS2 (mov,%A0,%B1));
4000 }
4001
4002 case 16:
4003 {
4004 int reg0 = true_regnum (operands[0]);
4005 int reg1 = true_regnum (operands[1]);
4006
4007 if (reg0 == reg1 + 2)
4008 return *len = 4, (AS1 (clr,%D0) CR_TAB
4009 AS2 (sbrc,%B0,7) CR_TAB
4010 AS1 (com,%D0) CR_TAB
4011 AS2 (mov,%C0,%D0));
4012 if (AVR_HAVE_MOVW)
4013 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4014 AS1 (clr,%D0) CR_TAB
4015 AS2 (sbrc,%B0,7) CR_TAB
4016 AS1 (com,%D0) CR_TAB
4017 AS2 (mov,%C0,%D0));
4018 else
4019 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4020 AS2 (mov,%A0,%C1) CR_TAB
4021 AS1 (clr,%D0) CR_TAB
4022 AS2 (sbrc,%B0,7) CR_TAB
4023 AS1 (com,%D0) CR_TAB
4024 AS2 (mov,%C0,%D0));
4025 }
4026
4027 case 24:
4028 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4029 AS1 (clr,%D0) CR_TAB
4030 AS2 (sbrc,%A0,7) CR_TAB
4031 AS1 (com,%D0) CR_TAB
4032 AS2 (mov,%B0,%D0) CR_TAB
4033 AS2 (mov,%C0,%D0));
4034
4035 default:
4036 if (INTVAL (operands[2]) < 32)
4037 break;
4038
4039 /* fall through */
4040
4041 case 31:
4042 if (AVR_HAVE_MOVW)
4043 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4044 AS2 (sbc,%A0,%A0) CR_TAB
4045 AS2 (mov,%B0,%A0) CR_TAB
4046 AS2 (movw,%C0,%A0));
4047 else
4048 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4049 AS2 (sbc,%A0,%A0) CR_TAB
4050 AS2 (mov,%B0,%A0) CR_TAB
4051 AS2 (mov,%C0,%A0) CR_TAB
4052 AS2 (mov,%D0,%A0));
4053 }
4054 len = t;
4055 }
4056 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4057 AS1 (ror,%C0) CR_TAB
4058 AS1 (ror,%B0) CR_TAB
4059 AS1 (ror,%A0)),
4060 insn, operands, len, 4);
4061 return "";
4062 }
4063
4064 /* 8bit logic shift right ((unsigned char)x >> i) */
4065
4066 const char *
4067 lshrqi3_out (rtx insn, rtx operands[], int *len)
4068 {
4069 if (GET_CODE (operands[2]) == CONST_INT)
4070 {
4071 int k;
4072
4073 if (!len)
4074 len = &k;
4075
4076 switch (INTVAL (operands[2]))
4077 {
4078 default:
4079 if (INTVAL (operands[2]) < 8)
4080 break;
4081
4082 *len = 1;
4083 return AS1 (clr,%0);
4084
4085 case 1:
4086 *len = 1;
4087 return AS1 (lsr,%0);
4088
4089 case 2:
4090 *len = 2;
4091 return (AS1 (lsr,%0) CR_TAB
4092 AS1 (lsr,%0));
4093 case 3:
4094 *len = 3;
4095 return (AS1 (lsr,%0) CR_TAB
4096 AS1 (lsr,%0) CR_TAB
4097 AS1 (lsr,%0));
4098
4099 case 4:
4100 if (test_hard_reg_class (LD_REGS, operands[0]))
4101 {
4102 *len=2;
4103 return (AS1 (swap,%0) CR_TAB
4104 AS2 (andi,%0,0x0f));
4105 }
4106 *len = 4;
4107 return (AS1 (lsr,%0) CR_TAB
4108 AS1 (lsr,%0) CR_TAB
4109 AS1 (lsr,%0) CR_TAB
4110 AS1 (lsr,%0));
4111
4112 case 5:
4113 if (test_hard_reg_class (LD_REGS, operands[0]))
4114 {
4115 *len = 3;
4116 return (AS1 (swap,%0) CR_TAB
4117 AS1 (lsr,%0) CR_TAB
4118 AS2 (andi,%0,0x7));
4119 }
4120 *len = 5;
4121 return (AS1 (lsr,%0) CR_TAB
4122 AS1 (lsr,%0) CR_TAB
4123 AS1 (lsr,%0) CR_TAB
4124 AS1 (lsr,%0) CR_TAB
4125 AS1 (lsr,%0));
4126
4127 case 6:
4128 if (test_hard_reg_class (LD_REGS, operands[0]))
4129 {
4130 *len = 4;
4131 return (AS1 (swap,%0) CR_TAB
4132 AS1 (lsr,%0) CR_TAB
4133 AS1 (lsr,%0) CR_TAB
4134 AS2 (andi,%0,0x3));
4135 }
4136 *len = 6;
4137 return (AS1 (lsr,%0) CR_TAB
4138 AS1 (lsr,%0) CR_TAB
4139 AS1 (lsr,%0) CR_TAB
4140 AS1 (lsr,%0) CR_TAB
4141 AS1 (lsr,%0) CR_TAB
4142 AS1 (lsr,%0));
4143
4144 case 7:
4145 *len = 3;
4146 return (AS1 (rol,%0) CR_TAB
4147 AS1 (clr,%0) CR_TAB
4148 AS1 (rol,%0));
4149 }
4150 }
4151 else if (CONSTANT_P (operands[2]))
4152 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4153
4154 out_shift_with_cnt (AS1 (lsr,%0),
4155 insn, operands, len, 1);
4156 return "";
4157 }
4158
4159 /* 16bit logic shift right ((unsigned short)x >> i) */
4160
4161 const char *
4162 lshrhi3_out (rtx insn, rtx operands[], int *len)
4163 {
4164 if (GET_CODE (operands[2]) == CONST_INT)
4165 {
4166 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4167 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4168 int k;
4169 int *t = len;
4170
4171 if (!len)
4172 len = &k;
4173
4174 switch (INTVAL (operands[2]))
4175 {
4176 default:
4177 if (INTVAL (operands[2]) < 16)
4178 break;
4179
4180 *len = 2;
4181 return (AS1 (clr,%B0) CR_TAB
4182 AS1 (clr,%A0));
4183
4184 case 4:
4185 if (optimize_size && scratch)
4186 break; /* 5 */
4187 if (ldi_ok)
4188 {
4189 *len = 6;
4190 return (AS1 (swap,%B0) CR_TAB
4191 AS1 (swap,%A0) CR_TAB
4192 AS2 (andi,%A0,0x0f) CR_TAB
4193 AS2 (eor,%A0,%B0) CR_TAB
4194 AS2 (andi,%B0,0x0f) CR_TAB
4195 AS2 (eor,%A0,%B0));
4196 }
4197 if (scratch)
4198 {
4199 *len = 7;
4200 return (AS1 (swap,%B0) CR_TAB
4201 AS1 (swap,%A0) CR_TAB
4202 AS2 (ldi,%3,0x0f) CR_TAB
4203 "and %A0,%3" CR_TAB
4204 AS2 (eor,%A0,%B0) CR_TAB
4205 "and %B0,%3" CR_TAB
4206 AS2 (eor,%A0,%B0));
4207 }
4208 break; /* optimize_size ? 6 : 8 */
4209
4210 case 5:
4211 if (optimize_size)
4212 break; /* scratch ? 5 : 6 */
4213 if (ldi_ok)
4214 {
4215 *len = 8;
4216 return (AS1 (lsr,%B0) CR_TAB
4217 AS1 (ror,%A0) CR_TAB
4218 AS1 (swap,%B0) CR_TAB
4219 AS1 (swap,%A0) CR_TAB
4220 AS2 (andi,%A0,0x0f) CR_TAB
4221 AS2 (eor,%A0,%B0) CR_TAB
4222 AS2 (andi,%B0,0x0f) CR_TAB
4223 AS2 (eor,%A0,%B0));
4224 }
4225 if (scratch)
4226 {
4227 *len = 9;
4228 return (AS1 (lsr,%B0) CR_TAB
4229 AS1 (ror,%A0) CR_TAB
4230 AS1 (swap,%B0) CR_TAB
4231 AS1 (swap,%A0) CR_TAB
4232 AS2 (ldi,%3,0x0f) CR_TAB
4233 "and %A0,%3" CR_TAB
4234 AS2 (eor,%A0,%B0) CR_TAB
4235 "and %B0,%3" CR_TAB
4236 AS2 (eor,%A0,%B0));
4237 }
4238 break; /* 10 */
4239
4240 case 6:
4241 if (optimize_size)
4242 break; /* scratch ? 5 : 6 */
4243 *len = 9;
4244 return (AS1 (clr,__tmp_reg__) CR_TAB
4245 AS1 (lsl,%A0) CR_TAB
4246 AS1 (rol,%B0) CR_TAB
4247 AS1 (rol,__tmp_reg__) CR_TAB
4248 AS1 (lsl,%A0) CR_TAB
4249 AS1 (rol,%B0) CR_TAB
4250 AS1 (rol,__tmp_reg__) CR_TAB
4251 AS2 (mov,%A0,%B0) CR_TAB
4252 AS2 (mov,%B0,__tmp_reg__));
4253
4254 case 7:
4255 *len = 5;
4256 return (AS1 (lsl,%A0) CR_TAB
4257 AS2 (mov,%A0,%B0) CR_TAB
4258 AS1 (rol,%A0) CR_TAB
4259 AS2 (sbc,%B0,%B0) CR_TAB
4260 AS1 (neg,%B0));
4261
4262 case 8:
4263 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4264 AS1 (clr,%B0));
4265
4266 case 9:
4267 *len = 3;
4268 return (AS2 (mov,%A0,%B0) CR_TAB
4269 AS1 (clr,%B0) CR_TAB
4270 AS1 (lsr,%A0));
4271
4272 case 10:
4273 *len = 4;
4274 return (AS2 (mov,%A0,%B0) CR_TAB
4275 AS1 (clr,%B0) CR_TAB
4276 AS1 (lsr,%A0) CR_TAB
4277 AS1 (lsr,%A0));
4278
4279 case 11:
4280 *len = 5;
4281 return (AS2 (mov,%A0,%B0) CR_TAB
4282 AS1 (clr,%B0) CR_TAB
4283 AS1 (lsr,%A0) CR_TAB
4284 AS1 (lsr,%A0) CR_TAB
4285 AS1 (lsr,%A0));
4286
4287 case 12:
4288 if (ldi_ok)
4289 {
4290 *len = 4;
4291 return (AS2 (mov,%A0,%B0) CR_TAB
4292 AS1 (clr,%B0) CR_TAB
4293 AS1 (swap,%A0) CR_TAB
4294 AS2 (andi,%A0,0x0f));
4295 }
4296 if (scratch)
4297 {
4298 *len = 5;
4299 return (AS2 (mov,%A0,%B0) CR_TAB
4300 AS1 (clr,%B0) CR_TAB
4301 AS1 (swap,%A0) CR_TAB
4302 AS2 (ldi,%3,0x0f) CR_TAB
4303 "and %A0,%3");
4304 }
4305 *len = 6;
4306 return (AS2 (mov,%A0,%B0) CR_TAB
4307 AS1 (clr,%B0) CR_TAB
4308 AS1 (lsr,%A0) CR_TAB
4309 AS1 (lsr,%A0) CR_TAB
4310 AS1 (lsr,%A0) CR_TAB
4311 AS1 (lsr,%A0));
4312
4313 case 13:
4314 if (ldi_ok)
4315 {
4316 *len = 5;
4317 return (AS2 (mov,%A0,%B0) CR_TAB
4318 AS1 (clr,%B0) CR_TAB
4319 AS1 (swap,%A0) CR_TAB
4320 AS1 (lsr,%A0) CR_TAB
4321 AS2 (andi,%A0,0x07));
4322 }
4323 if (AVR_HAVE_MUL && scratch)
4324 {
4325 *len = 5;
4326 return (AS2 (ldi,%3,0x08) CR_TAB
4327 AS2 (mul,%B0,%3) CR_TAB
4328 AS2 (mov,%A0,r1) CR_TAB
4329 AS1 (clr,%B0) CR_TAB
4330 AS1 (clr,__zero_reg__));
4331 }
4332 if (optimize_size && scratch)
4333 break; /* 5 */
4334 if (scratch)
4335 {
4336 *len = 6;
4337 return (AS2 (mov,%A0,%B0) CR_TAB
4338 AS1 (clr,%B0) CR_TAB
4339 AS1 (swap,%A0) CR_TAB
4340 AS1 (lsr,%A0) CR_TAB
4341 AS2 (ldi,%3,0x07) CR_TAB
4342 "and %A0,%3");
4343 }
4344 if (AVR_HAVE_MUL)
4345 {
4346 *len = 6;
4347 return ("set" CR_TAB
4348 AS2 (bld,r1,3) CR_TAB
4349 AS2 (mul,%B0,r1) CR_TAB
4350 AS2 (mov,%A0,r1) CR_TAB
4351 AS1 (clr,%B0) CR_TAB
4352 AS1 (clr,__zero_reg__));
4353 }
4354 *len = 7;
4355 return (AS2 (mov,%A0,%B0) CR_TAB
4356 AS1 (clr,%B0) CR_TAB
4357 AS1 (lsr,%A0) CR_TAB
4358 AS1 (lsr,%A0) CR_TAB
4359 AS1 (lsr,%A0) CR_TAB
4360 AS1 (lsr,%A0) CR_TAB
4361 AS1 (lsr,%A0));
4362
4363 case 14:
4364 if (AVR_HAVE_MUL && ldi_ok)
4365 {
4366 *len = 5;
4367 return (AS2 (ldi,%A0,0x04) CR_TAB
4368 AS2 (mul,%B0,%A0) CR_TAB
4369 AS2 (mov,%A0,r1) CR_TAB
4370 AS1 (clr,%B0) CR_TAB
4371 AS1 (clr,__zero_reg__));
4372 }
4373 if (AVR_HAVE_MUL && scratch)
4374 {
4375 *len = 5;
4376 return (AS2 (ldi,%3,0x04) CR_TAB
4377 AS2 (mul,%B0,%3) CR_TAB
4378 AS2 (mov,%A0,r1) CR_TAB
4379 AS1 (clr,%B0) CR_TAB
4380 AS1 (clr,__zero_reg__));
4381 }
4382 if (optimize_size && ldi_ok)
4383 {
4384 *len = 5;
4385 return (AS2 (mov,%A0,%B0) CR_TAB
4386 AS2 (ldi,%B0,6) "\n1:\t"
4387 AS1 (lsr,%A0) CR_TAB
4388 AS1 (dec,%B0) CR_TAB
4389 AS1 (brne,1b));
4390 }
4391 if (optimize_size && scratch)
4392 break; /* 5 */
4393 *len = 6;
4394 return (AS1 (clr,%A0) CR_TAB
4395 AS1 (lsl,%B0) CR_TAB
4396 AS1 (rol,%A0) CR_TAB
4397 AS1 (lsl,%B0) CR_TAB
4398 AS1 (rol,%A0) CR_TAB
4399 AS1 (clr,%B0));
4400
4401 case 15:
4402 *len = 4;
4403 return (AS1 (clr,%A0) CR_TAB
4404 AS1 (lsl,%B0) CR_TAB
4405 AS1 (rol,%A0) CR_TAB
4406 AS1 (clr,%B0));
4407 }
4408 len = t;
4409 }
4410 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4411 AS1 (ror,%A0)),
4412 insn, operands, len, 2);
4413 return "";
4414 }
4415
4416 /* 32bit logic shift right ((unsigned int)x >> i) */
4417
4418 const char *
4419 lshrsi3_out (rtx insn, rtx operands[], int *len)
4420 {
4421 if (GET_CODE (operands[2]) == CONST_INT)
4422 {
4423 int k;
4424 int *t = len;
4425
4426 if (!len)
4427 len = &k;
4428
4429 switch (INTVAL (operands[2]))
4430 {
4431 default:
4432 if (INTVAL (operands[2]) < 32)
4433 break;
4434
4435 if (AVR_HAVE_MOVW)
4436 return *len = 3, (AS1 (clr,%D0) CR_TAB
4437 AS1 (clr,%C0) CR_TAB
4438 AS2 (movw,%A0,%C0));
4439 *len = 4;
4440 return (AS1 (clr,%D0) CR_TAB
4441 AS1 (clr,%C0) CR_TAB
4442 AS1 (clr,%B0) CR_TAB
4443 AS1 (clr,%A0));
4444
4445 case 8:
4446 {
4447 int reg0 = true_regnum (operands[0]);
4448 int reg1 = true_regnum (operands[1]);
4449 *len = 4;
4450 if (reg0 <= reg1)
4451 return (AS2 (mov,%A0,%B1) CR_TAB
4452 AS2 (mov,%B0,%C1) CR_TAB
4453 AS2 (mov,%C0,%D1) CR_TAB
4454 AS1 (clr,%D0));
4455 else
4456 return (AS1 (clr,%D0) CR_TAB
4457 AS2 (mov,%C0,%D1) CR_TAB
4458 AS2 (mov,%B0,%C1) CR_TAB
4459 AS2 (mov,%A0,%B1));
4460 }
4461
4462 case 16:
4463 {
4464 int reg0 = true_regnum (operands[0]);
4465 int reg1 = true_regnum (operands[1]);
4466
4467 if (reg0 == reg1 + 2)
4468 return *len = 2, (AS1 (clr,%C0) CR_TAB
4469 AS1 (clr,%D0));
4470 if (AVR_HAVE_MOVW)
4471 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4472 AS1 (clr,%C0) CR_TAB
4473 AS1 (clr,%D0));
4474 else
4475 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4476 AS2 (mov,%A0,%C1) CR_TAB
4477 AS1 (clr,%C0) CR_TAB
4478 AS1 (clr,%D0));
4479 }
4480
4481 case 24:
4482 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4483 AS1 (clr,%B0) CR_TAB
4484 AS1 (clr,%C0) CR_TAB
4485 AS1 (clr,%D0));
4486
4487 case 31:
4488 *len = 6;
4489 return (AS1 (clr,%A0) CR_TAB
4490 AS2 (sbrc,%D0,7) CR_TAB
4491 AS1 (inc,%A0) CR_TAB
4492 AS1 (clr,%B0) CR_TAB
4493 AS1 (clr,%C0) CR_TAB
4494 AS1 (clr,%D0));
4495 }
4496 len = t;
4497 }
4498 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4499 AS1 (ror,%C0) CR_TAB
4500 AS1 (ror,%B0) CR_TAB
4501 AS1 (ror,%A0)),
4502 insn, operands, len, 4);
4503 return "";
4504 }
4505
4506 /* Create RTL split patterns for byte sized rotate expressions. This
4507 produces a series of move instructions and considers overlap situations.
4508 Overlapping non-HImode operands need a scratch register. */
4509
4510 bool
4511 avr_rotate_bytes (rtx operands[])
4512 {
4513 int i, j;
4514 enum machine_mode mode = GET_MODE (operands[0]);
4515 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4516 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4517 int num = INTVAL (operands[2]);
4518 rtx scratch = operands[3];
4519 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4520 Word move if no scratch is needed, otherwise use size of scratch. */
4521 enum machine_mode move_mode = QImode;
4522 int move_size, offset, size;
4523
4524 if (num & 0xf)
4525 move_mode = QImode;
4526 else if ((mode == SImode && !same_reg) || !overlapped)
4527 move_mode = HImode;
4528 else
4529 move_mode = GET_MODE (scratch);
4530
4531 /* Force DI rotate to use QI moves since other DI moves are currently split
4532 into QI moves so forward propagation works better. */
4533 if (mode == DImode)
4534 move_mode = QImode;
4535 /* Make scratch smaller if needed. */
4536 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4537 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4538
4539 move_size = GET_MODE_SIZE (move_mode);
4540 /* Number of bytes/words to rotate. */
4541 offset = (num >> 3) / move_size;
4542 /* Number of moves needed. */
4543 size = GET_MODE_SIZE (mode) / move_size;
4544 /* Himode byte swap is special case to avoid a scratch register. */
4545 if (mode == HImode && same_reg)
4546 {
4547 /* HImode byte swap, using xor. This is as quick as using scratch. */
4548 rtx src, dst;
4549 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4550 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4551 if (!rtx_equal_p (dst, src))
4552 {
4553 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4554 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4555 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4556 }
4557 }
4558 else
4559 {
4560 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4561 /* Create linked list of moves to determine move order. */
4562 struct {
4563 rtx src, dst;
4564 int links;
4565 } move[MAX_SIZE + 8];
4566 int blocked, moves;
4567
4568 gcc_assert (size <= MAX_SIZE);
4569 /* Generate list of subreg moves. */
4570 for (i = 0; i < size; i++)
4571 {
4572 int from = i;
4573 int to = (from + offset) % size;
4574 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4575 mode, from * move_size);
4576 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4577 mode, to * move_size);
4578 move[i].links = -1;
4579 }
4580 /* Mark dependence where a dst of one move is the src of another move.
4581 The first move is a conflict as it must wait until second is
4582 performed. We ignore moves to self - we catch this later. */
4583 if (overlapped)
4584 for (i = 0; i < size; i++)
4585 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4586 for (j = 0; j < size; j++)
4587 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4588 {
4589 /* The dst of move i is the src of move j. */
4590 move[i].links = j;
4591 break;
4592 }
4593
4594 blocked = -1;
4595 moves = 0;
4596 /* Go through move list and perform non-conflicting moves. As each
4597 non-overlapping move is made, it may remove other conflicts
4598 so the process is repeated until no conflicts remain. */
4599 do
4600 {
4601 blocked = -1;
4602 moves = 0;
4603 /* Emit move where dst is not also a src or we have used that
4604 src already. */
4605 for (i = 0; i < size; i++)
4606 if (move[i].src != NULL_RTX)
4607 {
4608 if (move[i].links == -1
4609 || move[move[i].links].src == NULL_RTX)
4610 {
4611 moves++;
4612 /* Ignore NOP moves to self. */
4613 if (!rtx_equal_p (move[i].dst, move[i].src))
4614 emit_move_insn (move[i].dst, move[i].src);
4615
4616 /* Remove conflict from list. */
4617 move[i].src = NULL_RTX;
4618 }
4619 else
4620 blocked = i;
4621 }
4622
4623 /* Check for deadlock. This is when no moves occurred and we have
4624 at least one blocked move. */
4625 if (moves == 0 && blocked != -1)
4626 {
4627 /* Need to use scratch register to break deadlock.
4628 Add move to put dst of blocked move into scratch.
4629 When this move occurs, it will break chain deadlock.
4630 The scratch register is substituted for real move. */
4631
4632 move[size].src = move[blocked].dst;
4633 move[size].dst = scratch;
4634 /* Scratch move is never blocked. */
4635 move[size].links = -1;
4636 /* Make sure we have valid link. */
4637 gcc_assert (move[blocked].links != -1);
4638 /* Replace src of blocking move with scratch reg. */
4639 move[move[blocked].links].src = scratch;
4640 /* Make dependent on scratch move occuring. */
4641 move[blocked].links = size;
4642 size=size+1;
4643 }
4644 }
4645 while (blocked != -1);
4646 }
4647 return true;
4648 }
4649
4650 /* Modifies the length assigned to instruction INSN
4651 LEN is the initially computed length of the insn. */
4652
4653 int
4654 adjust_insn_length (rtx insn, int len)
4655 {
4656 rtx patt = PATTERN (insn);
4657 rtx set;
4658
4659 if (GET_CODE (patt) == SET)
4660 {
4661 rtx op[10];
4662 op[1] = SET_SRC (patt);
4663 op[0] = SET_DEST (patt);
4664 if (general_operand (op[1], VOIDmode)
4665 && general_operand (op[0], VOIDmode))
4666 {
4667 switch (GET_MODE (op[0]))
4668 {
4669 case QImode:
4670 output_movqi (insn, op, &len);
4671 break;
4672 case HImode:
4673 output_movhi (insn, op, &len);
4674 break;
4675 case SImode:
4676 case SFmode:
4677 output_movsisf (insn, op, &len);
4678 break;
4679 default:
4680 break;
4681 }
4682 }
4683 else if (op[0] == cc0_rtx && REG_P (op[1]))
4684 {
4685 switch (GET_MODE (op[1]))
4686 {
4687 case HImode: out_tsthi (insn, op[1], &len); break;
4688 case SImode: out_tstsi (insn, op[1], &len); break;
4689 default: break;
4690 }
4691 }
4692 else if (GET_CODE (op[1]) == AND)
4693 {
4694 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4695 {
4696 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4697 if (GET_MODE (op[1]) == SImode)
4698 len = (((mask & 0xff) != 0xff)
4699 + ((mask & 0xff00) != 0xff00)
4700 + ((mask & 0xff0000L) != 0xff0000L)
4701 + ((mask & 0xff000000L) != 0xff000000L));
4702 else if (GET_MODE (op[1]) == HImode)
4703 len = (((mask & 0xff) != 0xff)
4704 + ((mask & 0xff00) != 0xff00));
4705 }
4706 }
4707 else if (GET_CODE (op[1]) == IOR)
4708 {
4709 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4710 {
4711 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4712 if (GET_MODE (op[1]) == SImode)
4713 len = (((mask & 0xff) != 0)
4714 + ((mask & 0xff00) != 0)
4715 + ((mask & 0xff0000L) != 0)
4716 + ((mask & 0xff000000L) != 0));
4717 else if (GET_MODE (op[1]) == HImode)
4718 len = (((mask & 0xff) != 0)
4719 + ((mask & 0xff00) != 0));
4720 }
4721 }
4722 }
4723 set = single_set (insn);
4724 if (set)
4725 {
4726 rtx op[10];
4727
4728 op[1] = SET_SRC (set);
4729 op[0] = SET_DEST (set);
4730
4731 if (GET_CODE (patt) == PARALLEL
4732 && general_operand (op[1], VOIDmode)
4733 && general_operand (op[0], VOIDmode))
4734 {
4735 if (XVECLEN (patt, 0) == 2)
4736 op[2] = XVECEXP (patt, 0, 1);
4737
4738 switch (GET_MODE (op[0]))
4739 {
4740 case QImode:
4741 len = 2;
4742 break;
4743 case HImode:
4744 output_reload_inhi (insn, op, &len);
4745 break;
4746 case SImode:
4747 case SFmode:
4748 output_reload_insisf (insn, op, &len);
4749 break;
4750 default:
4751 break;
4752 }
4753 }
4754 else if (GET_CODE (op[1]) == ASHIFT
4755 || GET_CODE (op[1]) == ASHIFTRT
4756 || GET_CODE (op[1]) == LSHIFTRT)
4757 {
4758 rtx ops[10];
4759 ops[0] = op[0];
4760 ops[1] = XEXP (op[1],0);
4761 ops[2] = XEXP (op[1],1);
4762 switch (GET_CODE (op[1]))
4763 {
4764 case ASHIFT:
4765 switch (GET_MODE (op[0]))
4766 {
4767 case QImode: ashlqi3_out (insn,ops,&len); break;
4768 case HImode: ashlhi3_out (insn,ops,&len); break;
4769 case SImode: ashlsi3_out (insn,ops,&len); break;
4770 default: break;
4771 }
4772 break;
4773 case ASHIFTRT:
4774 switch (GET_MODE (op[0]))
4775 {
4776 case QImode: ashrqi3_out (insn,ops,&len); break;
4777 case HImode: ashrhi3_out (insn,ops,&len); break;
4778 case SImode: ashrsi3_out (insn,ops,&len); break;
4779 default: break;
4780 }
4781 break;
4782 case LSHIFTRT:
4783 switch (GET_MODE (op[0]))
4784 {
4785 case QImode: lshrqi3_out (insn,ops,&len); break;
4786 case HImode: lshrhi3_out (insn,ops,&len); break;
4787 case SImode: lshrsi3_out (insn,ops,&len); break;
4788 default: break;
4789 }
4790 break;
4791 default:
4792 break;
4793 }
4794 }
4795 }
4796 return len;
4797 }
4798
4799 /* Return nonzero if register REG dead after INSN. */
4800
4801 int
4802 reg_unused_after (rtx insn, rtx reg)
4803 {
4804 return (dead_or_set_p (insn, reg)
4805 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4806 }
4807
4808 /* Return nonzero if REG is not used after INSN.
4809 We assume REG is a reload reg, and therefore does
4810 not live past labels. It may live past calls or jumps though. */
4811
4812 int
4813 _reg_unused_after (rtx insn, rtx reg)
4814 {
4815 enum rtx_code code;
4816 rtx set;
4817
4818 /* If the reg is set by this instruction, then it is safe for our
4819 case. Disregard the case where this is a store to memory, since
4820 we are checking a register used in the store address. */
4821 set = single_set (insn);
4822 if (set && GET_CODE (SET_DEST (set)) != MEM
4823 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4824 return 1;
4825
4826 while ((insn = NEXT_INSN (insn)))
4827 {
4828 rtx set;
4829 code = GET_CODE (insn);
4830
4831 #if 0
4832 /* If this is a label that existed before reload, then the register
4833 if dead here. However, if this is a label added by reorg, then
4834 the register may still be live here. We can't tell the difference,
4835 so we just ignore labels completely. */
4836 if (code == CODE_LABEL)
4837 return 1;
4838 /* else */
4839 #endif
4840
4841 if (!INSN_P (insn))
4842 continue;
4843
4844 if (code == JUMP_INSN)
4845 return 0;
4846
4847 /* If this is a sequence, we must handle them all at once.
4848 We could have for instance a call that sets the target register,
4849 and an insn in a delay slot that uses the register. In this case,
4850 we must return 0. */
4851 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4852 {
4853 int i;
4854 int retval = 0;
4855
4856 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4857 {
4858 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4859 rtx set = single_set (this_insn);
4860
4861 if (GET_CODE (this_insn) == CALL_INSN)
4862 code = CALL_INSN;
4863 else if (GET_CODE (this_insn) == JUMP_INSN)
4864 {
4865 if (INSN_ANNULLED_BRANCH_P (this_insn))
4866 return 0;
4867 code = JUMP_INSN;
4868 }
4869
4870 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4871 return 0;
4872 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4873 {
4874 if (GET_CODE (SET_DEST (set)) != MEM)
4875 retval = 1;
4876 else
4877 return 0;
4878 }
4879 if (set == 0
4880 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4881 return 0;
4882 }
4883 if (retval == 1)
4884 return 1;
4885 else if (code == JUMP_INSN)
4886 return 0;
4887 }
4888
4889 if (code == CALL_INSN)
4890 {
4891 rtx tem;
4892 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4893 if (GET_CODE (XEXP (tem, 0)) == USE
4894 && REG_P (XEXP (XEXP (tem, 0), 0))
4895 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4896 return 0;
4897 if (call_used_regs[REGNO (reg)])
4898 return 1;
4899 }
4900
4901 set = single_set (insn);
4902
4903 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4904 return 0;
4905 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4906 return GET_CODE (SET_DEST (set)) != MEM;
4907 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4908 return 0;
4909 }
4910 return 1;
4911 }
4912
4913 /* Target hook for assembling integer objects. The AVR version needs
4914 special handling for references to certain labels. */
4915
4916 static bool
4917 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4918 {
4919 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4920 && text_segment_operand (x, VOIDmode) )
4921 {
4922 fputs ("\t.word\tgs(", asm_out_file);
4923 output_addr_const (asm_out_file, x);
4924 fputs (")\n", asm_out_file);
4925 return true;
4926 }
4927 return default_assemble_integer (x, size, aligned_p);
4928 }
4929
4930 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4931
4932 void
4933 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4934 {
4935
4936 /* If the function has the 'signal' or 'interrupt' attribute, test to
4937 make sure that the name of the function is "__vector_NN" so as to
4938 catch when the user misspells the interrupt vector name. */
4939
4940 if (cfun->machine->is_interrupt)
4941 {
4942 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4943 {
4944 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4945 "%qs appears to be a misspelled interrupt handler",
4946 name);
4947 }
4948 }
4949 else if (cfun->machine->is_signal)
4950 {
4951 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4952 {
4953 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4954 "%qs appears to be a misspelled signal handler",
4955 name);
4956 }
4957 }
4958
4959 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4960 ASM_OUTPUT_LABEL (file, name);
4961 }
4962
4963 /* The routine used to output NUL terminated strings. We use a special
4964 version of this for most svr4 targets because doing so makes the
4965 generated assembly code more compact (and thus faster to assemble)
4966 as well as more readable, especially for targets like the i386
4967 (where the only alternative is to output character sequences as
4968 comma separated lists of numbers). */
4969
4970 void
4971 gas_output_limited_string(FILE *file, const char *str)
4972 {
4973 const unsigned char *_limited_str = (const unsigned char *) str;
4974 unsigned ch;
4975 fprintf (file, "%s\"", STRING_ASM_OP);
4976 for (; (ch = *_limited_str); _limited_str++)
4977 {
4978 int escape;
4979 switch (escape = ESCAPES[ch])
4980 {
4981 case 0:
4982 putc (ch, file);
4983 break;
4984 case 1:
4985 fprintf (file, "\\%03o", ch);
4986 break;
4987 default:
4988 putc ('\\', file);
4989 putc (escape, file);
4990 break;
4991 }
4992 }
4993 fprintf (file, "\"\n");
4994 }
4995
4996 /* The routine used to output sequences of byte values. We use a special
4997 version of this for most svr4 targets because doing so makes the
4998 generated assembly code more compact (and thus faster to assemble)
4999 as well as more readable. Note that if we find subparts of the
5000 character sequence which end with NUL (and which are shorter than
5001 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
5002
5003 void
5004 gas_output_ascii(FILE *file, const char *str, size_t length)
5005 {
5006 const unsigned char *_ascii_bytes = (const unsigned char *) str;
5007 const unsigned char *limit = _ascii_bytes + length;
5008 unsigned bytes_in_chunk = 0;
5009 for (; _ascii_bytes < limit; _ascii_bytes++)
5010 {
5011 const unsigned char *p;
5012 if (bytes_in_chunk >= 60)
5013 {
5014 fprintf (file, "\"\n");
5015 bytes_in_chunk = 0;
5016 }
5017 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
5018 continue;
5019 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
5020 {
5021 if (bytes_in_chunk > 0)
5022 {
5023 fprintf (file, "\"\n");
5024 bytes_in_chunk = 0;
5025 }
5026 gas_output_limited_string (file, (const char*)_ascii_bytes);
5027 _ascii_bytes = p;
5028 }
5029 else
5030 {
5031 int escape;
5032 unsigned ch;
5033 if (bytes_in_chunk == 0)
5034 fprintf (file, "\t.ascii\t\"");
5035 switch (escape = ESCAPES[ch = *_ascii_bytes])
5036 {
5037 case 0:
5038 putc (ch, file);
5039 bytes_in_chunk++;
5040 break;
5041 case 1:
5042 fprintf (file, "\\%03o", ch);
5043 bytes_in_chunk += 4;
5044 break;
5045 default:
5046 putc ('\\', file);
5047 putc (escape, file);
5048 bytes_in_chunk += 2;
5049 break;
5050 }
5051 }
5052 }
5053 if (bytes_in_chunk > 0)
5054 fprintf (file, "\"\n");
5055 }
5056
5057 /* Return value is nonzero if pseudos that have been
5058 assigned to registers of class CLASS would likely be spilled
5059 because registers of CLASS are needed for spill registers. */
5060
5061 static bool
5062 avr_class_likely_spilled_p (reg_class_t c)
5063 {
5064 return (c != ALL_REGS && c != ADDW_REGS);
5065 }
5066
5067 /* Valid attributes:
5068 progmem - put data to program memory;
5069 signal - make a function to be hardware interrupt. After function
5070 prologue interrupts are disabled;
5071 interrupt - make a function to be hardware interrupt. After function
5072 prologue interrupts are enabled;
5073 naked - don't generate function prologue/epilogue and `ret' command.
5074
5075 Only `progmem' attribute valid for type. */
5076
5077 /* Handle a "progmem" attribute; arguments as in
5078 struct attribute_spec.handler. */
5079 static tree
5080 avr_handle_progmem_attribute (tree *node, tree name,
5081 tree args ATTRIBUTE_UNUSED,
5082 int flags ATTRIBUTE_UNUSED,
5083 bool *no_add_attrs)
5084 {
5085 if (DECL_P (*node))
5086 {
5087 if (TREE_CODE (*node) == TYPE_DECL)
5088 {
5089 /* This is really a decl attribute, not a type attribute,
5090 but try to handle it for GCC 3.0 backwards compatibility. */
5091
5092 tree type = TREE_TYPE (*node);
5093 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5094 tree newtype = build_type_attribute_variant (type, attr);
5095
5096 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5097 TREE_TYPE (*node) = newtype;
5098 *no_add_attrs = true;
5099 }
5100 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5101 {
5102 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5103 {
5104 warning (0, "only initialized variables can be placed into "
5105 "program memory area");
5106 *no_add_attrs = true;
5107 }
5108 }
5109 else
5110 {
5111 warning (OPT_Wattributes, "%qE attribute ignored",
5112 name);
5113 *no_add_attrs = true;
5114 }
5115 }
5116
5117 return NULL_TREE;
5118 }
5119
5120 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5121 struct attribute_spec.handler. */
5122
5123 static tree
5124 avr_handle_fndecl_attribute (tree *node, tree name,
5125 tree args ATTRIBUTE_UNUSED,
5126 int flags ATTRIBUTE_UNUSED,
5127 bool *no_add_attrs)
5128 {
5129 if (TREE_CODE (*node) != FUNCTION_DECL)
5130 {
5131 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5132 name);
5133 *no_add_attrs = true;
5134 }
5135
5136 return NULL_TREE;
5137 }
5138
5139 static tree
5140 avr_handle_fntype_attribute (tree *node, tree name,
5141 tree args ATTRIBUTE_UNUSED,
5142 int flags ATTRIBUTE_UNUSED,
5143 bool *no_add_attrs)
5144 {
5145 if (TREE_CODE (*node) != FUNCTION_TYPE)
5146 {
5147 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5148 name);
5149 *no_add_attrs = true;
5150 }
5151
5152 return NULL_TREE;
5153 }
5154
5155 /* Look for attribute `progmem' in DECL
5156 if found return 1, otherwise 0. */
5157
5158 int
5159 avr_progmem_p (tree decl, tree attributes)
5160 {
5161 tree a;
5162
5163 if (TREE_CODE (decl) != VAR_DECL)
5164 return 0;
5165
5166 if (NULL_TREE
5167 != lookup_attribute ("progmem", attributes))
5168 return 1;
5169
5170 a=decl;
5171 do
5172 a = TREE_TYPE(a);
5173 while (TREE_CODE (a) == ARRAY_TYPE);
5174
5175 if (a == error_mark_node)
5176 return 0;
5177
5178 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5179 return 1;
5180
5181 return 0;
5182 }
5183
5184 /* Add the section attribute if the variable is in progmem. */
5185
5186 static void
5187 avr_insert_attributes (tree node, tree *attributes)
5188 {
5189 if (TREE_CODE (node) == VAR_DECL
5190 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5191 && avr_progmem_p (node, *attributes))
5192 {
5193 if (TREE_READONLY (node))
5194 {
5195 static const char dsec[] = ".progmem.data";
5196
5197 *attributes = tree_cons (get_identifier ("section"),
5198 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5199 *attributes);
5200 }
5201 else
5202 {
5203 error ("variable %q+D must be const in order to be put into"
5204 " read-only section by means of %<__attribute__((progmem))%>",
5205 node);
5206 }
5207 }
5208 }
5209
5210 /* A get_unnamed_section callback for switching to progmem_section. */
5211
5212 static void
5213 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5214 {
5215 fprintf (asm_out_file,
5216 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5217 AVR_HAVE_JMP_CALL ? "a" : "ax");
5218 /* Should already be aligned, this is just to be safe if it isn't. */
5219 fprintf (asm_out_file, "\t.p2align 1\n");
5220 }
5221
5222
5223 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5224 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5225 /* Track need of __do_clear_bss. */
5226
5227 void
5228 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5229 const char *name, unsigned HOST_WIDE_INT size,
5230 unsigned int align, bool local_p)
5231 {
5232 avr_need_clear_bss_p = true;
5233
5234 if (local_p)
5235 {
5236 fputs ("\t.local\t", stream);
5237 assemble_name (stream, name);
5238 fputs ("\n", stream);
5239 }
5240
5241 fputs ("\t.comm\t", stream);
5242 assemble_name (stream, name);
5243 fprintf (stream,
5244 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5245 size, align / BITS_PER_UNIT);
5246 }
5247
5248
5249 /* Unnamed section callback for data_section
5250 to track need of __do_copy_data. */
5251
5252 static void
5253 avr_output_data_section_asm_op (const void *data)
5254 {
5255 avr_need_copy_data_p = true;
5256
5257 /* Dispatch to default. */
5258 output_section_asm_op (data);
5259 }
5260
5261
5262 /* Unnamed section callback for bss_section
5263 to track need of __do_clear_bss. */
5264
5265 static void
5266 avr_output_bss_section_asm_op (const void *data)
5267 {
5268 avr_need_clear_bss_p = true;
5269
5270 /* Dispatch to default. */
5271 output_section_asm_op (data);
5272 }
5273
5274
5275 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5276
5277 static void
5278 avr_asm_init_sections (void)
5279 {
5280 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5281 avr_output_progmem_section_asm_op,
5282 NULL);
5283 readonly_data_section = data_section;
5284
5285 data_section->unnamed.callback = avr_output_data_section_asm_op;
5286 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5287 }
5288
5289
5290 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5291 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5292
5293 void
5294 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5295 {
5296 if (!avr_need_copy_data_p)
5297 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5298 || 0 == strncmp (name, ".rodata", 7)
5299 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5300
5301 if (!avr_need_clear_bss_p)
5302 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5303
5304 default_elf_asm_named_section (name, flags, decl);
5305 }
5306
5307 static unsigned int
5308 avr_section_type_flags (tree decl, const char *name, int reloc)
5309 {
5310 unsigned int flags = default_section_type_flags (decl, name, reloc);
5311
5312 if (strncmp (name, ".noinit", 7) == 0)
5313 {
5314 if (decl && TREE_CODE (decl) == VAR_DECL
5315 && DECL_INITIAL (decl) == NULL_TREE)
5316 flags |= SECTION_BSS; /* @nobits */
5317 else
5318 warning (0, "only uninitialized variables can be placed in the "
5319 ".noinit section");
5320 }
5321
5322 return flags;
5323 }
5324
5325
5326 /* Implement `TARGET_ASM_FILE_START'. */
5327 /* Outputs some appropriate text to go at the start of an assembler
5328 file. */
5329
5330 static void
5331 avr_file_start (void)
5332 {
5333 if (avr_current_arch->asm_only)
5334 error ("MCU %qs supported for assembler only", avr_mcu_name);
5335
5336 default_file_start ();
5337
5338 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5339 fputs ("__SREG__ = 0x3f\n"
5340 "__SP_H__ = 0x3e\n"
5341 "__SP_L__ = 0x3d\n", asm_out_file);
5342
5343 fputs ("__tmp_reg__ = 0\n"
5344 "__zero_reg__ = 1\n", asm_out_file);
5345 }
5346
5347
5348 /* Implement `TARGET_ASM_FILE_END'. */
5349 /* Outputs to the stdio stream FILE some
5350 appropriate text to go at the end of an assembler file. */
5351
5352 static void
5353 avr_file_end (void)
5354 {
5355 /* Output these only if there is anything in the
5356 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5357 input section(s) - some code size can be saved by not
5358 linking in the initialization code from libgcc if resp.
5359 sections are empty. */
5360
5361 if (avr_need_copy_data_p)
5362 fputs (".global __do_copy_data\n", asm_out_file);
5363
5364 if (avr_need_clear_bss_p)
5365 fputs (".global __do_clear_bss\n", asm_out_file);
5366 }
5367
5368 /* Choose the order in which to allocate hard registers for
5369 pseudo-registers local to a basic block.
5370
5371 Store the desired register order in the array `reg_alloc_order'.
5372 Element 0 should be the register to allocate first; element 1, the
5373 next register; and so on. */
5374
5375 void
5376 order_regs_for_local_alloc (void)
5377 {
5378 unsigned int i;
5379 static const int order_0[] = {
5380 24,25,
5381 18,19,
5382 20,21,
5383 22,23,
5384 30,31,
5385 26,27,
5386 28,29,
5387 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5388 0,1,
5389 32,33,34,35
5390 };
5391 static const int order_1[] = {
5392 18,19,
5393 20,21,
5394 22,23,
5395 24,25,
5396 30,31,
5397 26,27,
5398 28,29,
5399 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5400 0,1,
5401 32,33,34,35
5402 };
5403 static const int order_2[] = {
5404 25,24,
5405 23,22,
5406 21,20,
5407 19,18,
5408 30,31,
5409 26,27,
5410 28,29,
5411 17,16,
5412 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5413 1,0,
5414 32,33,34,35
5415 };
5416
5417 const int *order = (TARGET_ORDER_1 ? order_1 :
5418 TARGET_ORDER_2 ? order_2 :
5419 order_0);
5420 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5421 reg_alloc_order[i] = order[i];
5422 }
5423
5424
5425 /* Implement `TARGET_REGISTER_MOVE_COST' */
5426
5427 static int
5428 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5429 reg_class_t from, reg_class_t to)
5430 {
5431 return (from == STACK_REG ? 6
5432 : to == STACK_REG ? 12
5433 : 2);
5434 }
5435
5436
5437 /* Implement `TARGET_MEMORY_MOVE_COST' */
5438
5439 static int
5440 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5441 bool in ATTRIBUTE_UNUSED)
5442 {
5443 return (mode == QImode ? 2
5444 : mode == HImode ? 4
5445 : mode == SImode ? 8
5446 : mode == SFmode ? 8
5447 : 16);
5448 }
5449
5450
5451 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5452 cost of an RTX operand given its context. X is the rtx of the
5453 operand, MODE is its mode, and OUTER is the rtx_code of this
5454 operand's parent operator. */
5455
5456 static int
5457 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5458 bool speed)
5459 {
5460 enum rtx_code code = GET_CODE (x);
5461 int total;
5462
5463 switch (code)
5464 {
5465 case REG:
5466 case SUBREG:
5467 return 0;
5468
5469 case CONST_INT:
5470 case CONST_DOUBLE:
5471 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5472
5473 default:
5474 break;
5475 }
5476
5477 total = 0;
5478 avr_rtx_costs (x, code, outer, &total, speed);
5479 return total;
5480 }
5481
5482 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5483 is to be calculated. Return true if the complete cost has been
5484 computed, and false if subexpressions should be scanned. In either
5485 case, *TOTAL contains the cost result. */
5486
5487 static bool
5488 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5489 bool speed)
5490 {
5491 enum rtx_code code = (enum rtx_code) codearg;
5492 enum machine_mode mode = GET_MODE (x);
5493 HOST_WIDE_INT val;
5494
5495 switch (code)
5496 {
5497 case CONST_INT:
5498 case CONST_DOUBLE:
5499 /* Immediate constants are as cheap as registers. */
5500 *total = 0;
5501 return true;
5502
5503 case MEM:
5504 case CONST:
5505 case LABEL_REF:
5506 case SYMBOL_REF:
5507 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5508 return true;
5509
5510 case NEG:
5511 switch (mode)
5512 {
5513 case QImode:
5514 case SFmode:
5515 *total = COSTS_N_INSNS (1);
5516 break;
5517
5518 case HImode:
5519 *total = COSTS_N_INSNS (3);
5520 break;
5521
5522 case SImode:
5523 *total = COSTS_N_INSNS (7);
5524 break;
5525
5526 default:
5527 return false;
5528 }
5529 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5530 return true;
5531
5532 case ABS:
5533 switch (mode)
5534 {
5535 case QImode:
5536 case SFmode:
5537 *total = COSTS_N_INSNS (1);
5538 break;
5539
5540 default:
5541 return false;
5542 }
5543 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5544 return true;
5545
5546 case NOT:
5547 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5548 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5549 return true;
5550
5551 case ZERO_EXTEND:
5552 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5553 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5554 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5555 return true;
5556
5557 case SIGN_EXTEND:
5558 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5559 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5560 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5561 return true;
5562
5563 case PLUS:
5564 switch (mode)
5565 {
5566 case QImode:
5567 *total = COSTS_N_INSNS (1);
5568 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5569 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5570 break;
5571
5572 case HImode:
5573 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5574 {
5575 *total = COSTS_N_INSNS (2);
5576 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5577 }
5578 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5579 *total = COSTS_N_INSNS (1);
5580 else
5581 *total = COSTS_N_INSNS (2);
5582 break;
5583
5584 case SImode:
5585 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5586 {
5587 *total = COSTS_N_INSNS (4);
5588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5589 }
5590 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5591 *total = COSTS_N_INSNS (1);
5592 else
5593 *total = COSTS_N_INSNS (4);
5594 break;
5595
5596 default:
5597 return false;
5598 }
5599 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5600 return true;
5601
5602 case MINUS:
5603 case AND:
5604 case IOR:
5605 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5606 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5607 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5608 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5609 return true;
5610
5611 case XOR:
5612 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5613 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5614 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5615 return true;
5616
5617 case MULT:
5618 switch (mode)
5619 {
5620 case QImode:
5621 if (AVR_HAVE_MUL)
5622 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5623 else if (!speed)
5624 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5625 else
5626 return false;
5627 break;
5628
5629 case HImode:
5630 if (AVR_HAVE_MUL)
5631 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5632 else if (!speed)
5633 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5634 else
5635 return false;
5636 break;
5637
5638 default:
5639 return false;
5640 }
5641 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5642 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5643 return true;
5644
5645 case DIV:
5646 case MOD:
5647 case UDIV:
5648 case UMOD:
5649 if (!speed)
5650 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5651 else
5652 return false;
5653 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5654 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5655 return true;
5656
5657 case ROTATE:
5658 switch (mode)
5659 {
5660 case QImode:
5661 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5662 *total = COSTS_N_INSNS (1);
5663
5664 break;
5665
5666 case HImode:
5667 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5668 *total = COSTS_N_INSNS (3);
5669
5670 break;
5671
5672 case SImode:
5673 if (CONST_INT_P (XEXP (x, 1)))
5674 switch (INTVAL (XEXP (x, 1)))
5675 {
5676 case 8:
5677 case 24:
5678 *total = COSTS_N_INSNS (5);
5679 break;
5680 case 16:
5681 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5682 break;
5683 }
5684 break;
5685
5686 default:
5687 return false;
5688 }
5689 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5690 return true;
5691
5692 case ASHIFT:
5693 switch (mode)
5694 {
5695 case QImode:
5696 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5697 {
5698 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5699 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5700 }
5701 else
5702 {
5703 val = INTVAL (XEXP (x, 1));
5704 if (val == 7)
5705 *total = COSTS_N_INSNS (3);
5706 else if (val >= 0 && val <= 7)
5707 *total = COSTS_N_INSNS (val);
5708 else
5709 *total = COSTS_N_INSNS (1);
5710 }
5711 break;
5712
5713 case HImode:
5714 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5715 {
5716 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5717 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5718 }
5719 else
5720 switch (INTVAL (XEXP (x, 1)))
5721 {
5722 case 0:
5723 *total = 0;
5724 break;
5725 case 1:
5726 case 8:
5727 *total = COSTS_N_INSNS (2);
5728 break;
5729 case 9:
5730 *total = COSTS_N_INSNS (3);
5731 break;
5732 case 2:
5733 case 3:
5734 case 10:
5735 case 15:
5736 *total = COSTS_N_INSNS (4);
5737 break;
5738 case 7:
5739 case 11:
5740 case 12:
5741 *total = COSTS_N_INSNS (5);
5742 break;
5743 case 4:
5744 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5745 break;
5746 case 6:
5747 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5748 break;
5749 case 5:
5750 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5751 break;
5752 default:
5753 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5754 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5755 }
5756 break;
5757
5758 case SImode:
5759 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5760 {
5761 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5762 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5763 }
5764 else
5765 switch (INTVAL (XEXP (x, 1)))
5766 {
5767 case 0:
5768 *total = 0;
5769 break;
5770 case 24:
5771 *total = COSTS_N_INSNS (3);
5772 break;
5773 case 1:
5774 case 8:
5775 case 16:
5776 *total = COSTS_N_INSNS (4);
5777 break;
5778 case 31:
5779 *total = COSTS_N_INSNS (6);
5780 break;
5781 case 2:
5782 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5783 break;
5784 default:
5785 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5786 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5787 }
5788 break;
5789
5790 default:
5791 return false;
5792 }
5793 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5794 return true;
5795
5796 case ASHIFTRT:
5797 switch (mode)
5798 {
5799 case QImode:
5800 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5801 {
5802 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5803 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5804 }
5805 else
5806 {
5807 val = INTVAL (XEXP (x, 1));
5808 if (val == 6)
5809 *total = COSTS_N_INSNS (4);
5810 else if (val == 7)
5811 *total = COSTS_N_INSNS (2);
5812 else if (val >= 0 && val <= 7)
5813 *total = COSTS_N_INSNS (val);
5814 else
5815 *total = COSTS_N_INSNS (1);
5816 }
5817 break;
5818
5819 case HImode:
5820 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5821 {
5822 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5823 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5824 }
5825 else
5826 switch (INTVAL (XEXP (x, 1)))
5827 {
5828 case 0:
5829 *total = 0;
5830 break;
5831 case 1:
5832 *total = COSTS_N_INSNS (2);
5833 break;
5834 case 15:
5835 *total = COSTS_N_INSNS (3);
5836 break;
5837 case 2:
5838 case 7:
5839 case 8:
5840 case 9:
5841 *total = COSTS_N_INSNS (4);
5842 break;
5843 case 10:
5844 case 14:
5845 *total = COSTS_N_INSNS (5);
5846 break;
5847 case 11:
5848 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5849 break;
5850 case 12:
5851 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5852 break;
5853 case 6:
5854 case 13:
5855 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5856 break;
5857 default:
5858 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5859 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5860 }
5861 break;
5862
5863 case SImode:
5864 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5865 {
5866 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5867 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5868 }
5869 else
5870 switch (INTVAL (XEXP (x, 1)))
5871 {
5872 case 0:
5873 *total = 0;
5874 break;
5875 case 1:
5876 *total = COSTS_N_INSNS (4);
5877 break;
5878 case 8:
5879 case 16:
5880 case 24:
5881 *total = COSTS_N_INSNS (6);
5882 break;
5883 case 2:
5884 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5885 break;
5886 case 31:
5887 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5888 break;
5889 default:
5890 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5891 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5892 }
5893 break;
5894
5895 default:
5896 return false;
5897 }
5898 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5899 return true;
5900
5901 case LSHIFTRT:
5902 switch (mode)
5903 {
5904 case QImode:
5905 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5906 {
5907 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5908 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5909 }
5910 else
5911 {
5912 val = INTVAL (XEXP (x, 1));
5913 if (val == 7)
5914 *total = COSTS_N_INSNS (3);
5915 else if (val >= 0 && val <= 7)
5916 *total = COSTS_N_INSNS (val);
5917 else
5918 *total = COSTS_N_INSNS (1);
5919 }
5920 break;
5921
5922 case HImode:
5923 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5924 {
5925 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5926 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5927 }
5928 else
5929 switch (INTVAL (XEXP (x, 1)))
5930 {
5931 case 0:
5932 *total = 0;
5933 break;
5934 case 1:
5935 case 8:
5936 *total = COSTS_N_INSNS (2);
5937 break;
5938 case 9:
5939 *total = COSTS_N_INSNS (3);
5940 break;
5941 case 2:
5942 case 10:
5943 case 15:
5944 *total = COSTS_N_INSNS (4);
5945 break;
5946 case 7:
5947 case 11:
5948 *total = COSTS_N_INSNS (5);
5949 break;
5950 case 3:
5951 case 12:
5952 case 13:
5953 case 14:
5954 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5955 break;
5956 case 4:
5957 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5958 break;
5959 case 5:
5960 case 6:
5961 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5962 break;
5963 default:
5964 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5965 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5966 }
5967 break;
5968
5969 case SImode:
5970 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5971 {
5972 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5973 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5974 }
5975 else
5976 switch (INTVAL (XEXP (x, 1)))
5977 {
5978 case 0:
5979 *total = 0;
5980 break;
5981 case 1:
5982 *total = COSTS_N_INSNS (4);
5983 break;
5984 case 2:
5985 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5986 break;
5987 case 8:
5988 case 16:
5989 case 24:
5990 *total = COSTS_N_INSNS (4);
5991 break;
5992 case 31:
5993 *total = COSTS_N_INSNS (6);
5994 break;
5995 default:
5996 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5997 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5998 }
5999 break;
6000
6001 default:
6002 return false;
6003 }
6004 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6005 return true;
6006
6007 case COMPARE:
6008 switch (GET_MODE (XEXP (x, 0)))
6009 {
6010 case QImode:
6011 *total = COSTS_N_INSNS (1);
6012 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6013 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6014 break;
6015
6016 case HImode:
6017 *total = COSTS_N_INSNS (2);
6018 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6019 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6020 else if (INTVAL (XEXP (x, 1)) != 0)
6021 *total += COSTS_N_INSNS (1);
6022 break;
6023
6024 case SImode:
6025 *total = COSTS_N_INSNS (4);
6026 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6027 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6028 else if (INTVAL (XEXP (x, 1)) != 0)
6029 *total += COSTS_N_INSNS (3);
6030 break;
6031
6032 default:
6033 return false;
6034 }
6035 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6036 return true;
6037
6038 default:
6039 break;
6040 }
6041 return false;
6042 }
6043
6044 /* Calculate the cost of a memory address. */
6045
6046 static int
6047 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6048 {
6049 if (GET_CODE (x) == PLUS
6050 && GET_CODE (XEXP (x,1)) == CONST_INT
6051 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6052 && INTVAL (XEXP (x,1)) >= 61)
6053 return 18;
6054 if (CONSTANT_ADDRESS_P (x))
6055 {
6056 if (optimize > 0 && io_address_operand (x, QImode))
6057 return 2;
6058 return 4;
6059 }
6060 return 4;
6061 }
6062
6063 /* Test for extra memory constraint 'Q'.
6064 It's a memory address based on Y or Z pointer with valid displacement. */
6065
6066 int
6067 extra_constraint_Q (rtx x)
6068 {
6069 if (GET_CODE (XEXP (x,0)) == PLUS
6070 && REG_P (XEXP (XEXP (x,0), 0))
6071 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6072 && (INTVAL (XEXP (XEXP (x,0), 1))
6073 <= MAX_LD_OFFSET (GET_MODE (x))))
6074 {
6075 rtx xx = XEXP (XEXP (x,0), 0);
6076 int regno = REGNO (xx);
6077 if (TARGET_ALL_DEBUG)
6078 {
6079 fprintf (stderr, ("extra_constraint:\n"
6080 "reload_completed: %d\n"
6081 "reload_in_progress: %d\n"),
6082 reload_completed, reload_in_progress);
6083 debug_rtx (x);
6084 }
6085 if (regno >= FIRST_PSEUDO_REGISTER)
6086 return 1; /* allocate pseudos */
6087 else if (regno == REG_Z || regno == REG_Y)
6088 return 1; /* strictly check */
6089 else if (xx == frame_pointer_rtx
6090 || xx == arg_pointer_rtx)
6091 return 1; /* XXX frame & arg pointer checks */
6092 }
6093 return 0;
6094 }
6095
6096 /* Convert condition code CONDITION to the valid AVR condition code. */
6097
6098 RTX_CODE
6099 avr_normalize_condition (RTX_CODE condition)
6100 {
6101 switch (condition)
6102 {
6103 case GT:
6104 return GE;
6105 case GTU:
6106 return GEU;
6107 case LE:
6108 return LT;
6109 case LEU:
6110 return LTU;
6111 default:
6112 gcc_unreachable ();
6113 }
6114 }
6115
6116 /* This function optimizes conditional jumps. */
6117
6118 static void
6119 avr_reorg (void)
6120 {
6121 rtx insn, pattern;
6122
6123 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6124 {
6125 if (! (GET_CODE (insn) == INSN
6126 || GET_CODE (insn) == CALL_INSN
6127 || GET_CODE (insn) == JUMP_INSN)
6128 || !single_set (insn))
6129 continue;
6130
6131 pattern = PATTERN (insn);
6132
6133 if (GET_CODE (pattern) == PARALLEL)
6134 pattern = XVECEXP (pattern, 0, 0);
6135 if (GET_CODE (pattern) == SET
6136 && SET_DEST (pattern) == cc0_rtx
6137 && compare_diff_p (insn))
6138 {
6139 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6140 {
6141 /* Now we work under compare insn. */
6142
6143 pattern = SET_SRC (pattern);
6144 if (true_regnum (XEXP (pattern,0)) >= 0
6145 && true_regnum (XEXP (pattern,1)) >= 0 )
6146 {
6147 rtx x = XEXP (pattern,0);
6148 rtx next = next_real_insn (insn);
6149 rtx pat = PATTERN (next);
6150 rtx src = SET_SRC (pat);
6151 rtx t = XEXP (src,0);
6152 PUT_CODE (t, swap_condition (GET_CODE (t)));
6153 XEXP (pattern,0) = XEXP (pattern,1);
6154 XEXP (pattern,1) = x;
6155 INSN_CODE (next) = -1;
6156 }
6157 else if (true_regnum (XEXP (pattern, 0)) >= 0
6158 && XEXP (pattern, 1) == const0_rtx)
6159 {
6160 /* This is a tst insn, we can reverse it. */
6161 rtx next = next_real_insn (insn);
6162 rtx pat = PATTERN (next);
6163 rtx src = SET_SRC (pat);
6164 rtx t = XEXP (src,0);
6165
6166 PUT_CODE (t, swap_condition (GET_CODE (t)));
6167 XEXP (pattern, 1) = XEXP (pattern, 0);
6168 XEXP (pattern, 0) = const0_rtx;
6169 INSN_CODE (next) = -1;
6170 INSN_CODE (insn) = -1;
6171 }
6172 else if (true_regnum (XEXP (pattern,0)) >= 0
6173 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6174 {
6175 rtx x = XEXP (pattern,1);
6176 rtx next = next_real_insn (insn);
6177 rtx pat = PATTERN (next);
6178 rtx src = SET_SRC (pat);
6179 rtx t = XEXP (src,0);
6180 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6181
6182 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6183 {
6184 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6185 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6186 INSN_CODE (next) = -1;
6187 INSN_CODE (insn) = -1;
6188 }
6189 }
6190 }
6191 }
6192 }
6193 }
6194
6195 /* Returns register number for function return value.*/
6196
6197 static inline unsigned int
6198 avr_ret_register (void)
6199 {
6200 return 24;
6201 }
6202
6203 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6204
6205 static bool
6206 avr_function_value_regno_p (const unsigned int regno)
6207 {
6208 return (regno == avr_ret_register ());
6209 }
6210
6211 /* Create an RTX representing the place where a
6212 library function returns a value of mode MODE. */
6213
6214 static rtx
6215 avr_libcall_value (enum machine_mode mode,
6216 const_rtx func ATTRIBUTE_UNUSED)
6217 {
6218 int offs = GET_MODE_SIZE (mode);
6219 if (offs < 2)
6220 offs = 2;
6221 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6222 }
6223
6224 /* Create an RTX representing the place where a
6225 function returns a value of data type VALTYPE. */
6226
6227 static rtx
6228 avr_function_value (const_tree type,
6229 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6230 bool outgoing ATTRIBUTE_UNUSED)
6231 {
6232 unsigned int offs;
6233
6234 if (TYPE_MODE (type) != BLKmode)
6235 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6236
6237 offs = int_size_in_bytes (type);
6238 if (offs < 2)
6239 offs = 2;
6240 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6241 offs = GET_MODE_SIZE (SImode);
6242 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6243 offs = GET_MODE_SIZE (DImode);
6244
6245 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6246 }
6247
6248 int
6249 test_hard_reg_class (enum reg_class rclass, rtx x)
6250 {
6251 int regno = true_regnum (x);
6252 if (regno < 0)
6253 return 0;
6254
6255 if (TEST_HARD_REG_CLASS (rclass, regno))
6256 return 1;
6257
6258 return 0;
6259 }
6260
6261
6262 int
6263 jump_over_one_insn_p (rtx insn, rtx dest)
6264 {
6265 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6266 ? XEXP (dest, 0)
6267 : dest);
6268 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6269 int dest_addr = INSN_ADDRESSES (uid);
6270 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6271 }
6272
6273 /* Returns 1 if a value of mode MODE can be stored starting with hard
6274 register number REGNO. On the enhanced core, anything larger than
6275 1 byte must start in even numbered register for "movw" to work
6276 (this way we don't have to check for odd registers everywhere). */
6277
6278 int
6279 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6280 {
6281 /* Disallow QImode in stack pointer regs. */
6282 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6283 return 0;
6284
6285 /* The only thing that can go into registers r28:r29 is a Pmode. */
6286 if (regno == REG_Y && mode == Pmode)
6287 return 1;
6288
6289 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6290 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6291 return 0;
6292
6293 if (mode == QImode)
6294 return 1;
6295
6296 /* Modes larger than QImode occupy consecutive registers. */
6297 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6298 return 0;
6299
6300 /* All modes larger than QImode should start in an even register. */
6301 return !(regno & 1);
6302 }
6303
6304 const char *
6305 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6306 {
6307 int tmp;
6308 if (!len)
6309 len = &tmp;
6310
6311 if (GET_CODE (operands[1]) == CONST_INT)
6312 {
6313 int val = INTVAL (operands[1]);
6314 if ((val & 0xff) == 0)
6315 {
6316 *len = 3;
6317 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6318 AS2 (ldi,%2,hi8(%1)) CR_TAB
6319 AS2 (mov,%B0,%2));
6320 }
6321 else if ((val & 0xff00) == 0)
6322 {
6323 *len = 3;
6324 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6325 AS2 (mov,%A0,%2) CR_TAB
6326 AS2 (mov,%B0,__zero_reg__));
6327 }
6328 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6329 {
6330 *len = 3;
6331 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6332 AS2 (mov,%A0,%2) CR_TAB
6333 AS2 (mov,%B0,%2));
6334 }
6335 }
6336 *len = 4;
6337 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6338 AS2 (mov,%A0,%2) CR_TAB
6339 AS2 (ldi,%2,hi8(%1)) CR_TAB
6340 AS2 (mov,%B0,%2));
6341 }
6342
6343
6344 const char *
6345 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6346 {
6347 rtx src = operands[1];
6348 int cnst = (GET_CODE (src) == CONST_INT);
6349
6350 if (len)
6351 {
6352 if (cnst)
6353 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6354 + ((INTVAL (src) & 0xff00) != 0)
6355 + ((INTVAL (src) & 0xff0000) != 0)
6356 + ((INTVAL (src) & 0xff000000) != 0);
6357 else
6358 *len = 8;
6359
6360 return "";
6361 }
6362
6363 if (cnst && ((INTVAL (src) & 0xff) == 0))
6364 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6365 else
6366 {
6367 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6368 output_asm_insn (AS2 (mov, %A0, %2), operands);
6369 }
6370 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6371 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6372 else
6373 {
6374 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6375 output_asm_insn (AS2 (mov, %B0, %2), operands);
6376 }
6377 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6378 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6379 else
6380 {
6381 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6382 output_asm_insn (AS2 (mov, %C0, %2), operands);
6383 }
6384 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6385 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6386 else
6387 {
6388 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6389 output_asm_insn (AS2 (mov, %D0, %2), operands);
6390 }
6391 return "";
6392 }
6393
6394 void
6395 avr_output_bld (rtx operands[], int bit_nr)
6396 {
6397 static char s[] = "bld %A0,0";
6398
6399 s[5] = 'A' + (bit_nr >> 3);
6400 s[8] = '0' + (bit_nr & 7);
6401 output_asm_insn (s, operands);
6402 }
6403
6404 void
6405 avr_output_addr_vec_elt (FILE *stream, int value)
6406 {
6407 switch_to_section (progmem_section);
6408 if (AVR_HAVE_JMP_CALL)
6409 fprintf (stream, "\t.word gs(.L%d)\n", value);
6410 else
6411 fprintf (stream, "\trjmp .L%d\n", value);
6412 }
6413
6414 /* Returns true if SCRATCH are safe to be allocated as a scratch
6415 registers (for a define_peephole2) in the current function. */
6416
6417 bool
6418 avr_hard_regno_scratch_ok (unsigned int regno)
6419 {
6420 /* Interrupt functions can only use registers that have already been saved
6421 by the prologue, even if they would normally be call-clobbered. */
6422
6423 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6424 && !df_regs_ever_live_p (regno))
6425 return false;
6426
6427 return true;
6428 }
6429
6430 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6431
6432 int
6433 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6434 unsigned int new_reg)
6435 {
6436 /* Interrupt functions can only use registers that have already been
6437 saved by the prologue, even if they would normally be
6438 call-clobbered. */
6439
6440 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6441 && !df_regs_ever_live_p (new_reg))
6442 return 0;
6443
6444 return 1;
6445 }
6446
6447 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6448 or memory location in the I/O space (QImode only).
6449
6450 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6451 Operand 1: register operand to test, or CONST_INT memory address.
6452 Operand 2: bit number.
6453 Operand 3: label to jump to if the test is true. */
6454
6455 const char *
6456 avr_out_sbxx_branch (rtx insn, rtx operands[])
6457 {
6458 enum rtx_code comp = GET_CODE (operands[0]);
6459 int long_jump = (get_attr_length (insn) >= 4);
6460 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6461
6462 if (comp == GE)
6463 comp = EQ;
6464 else if (comp == LT)
6465 comp = NE;
6466
6467 if (reverse)
6468 comp = reverse_condition (comp);
6469
6470 if (GET_CODE (operands[1]) == CONST_INT)
6471 {
6472 if (INTVAL (operands[1]) < 0x40)
6473 {
6474 if (comp == EQ)
6475 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6476 else
6477 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6478 }
6479 else
6480 {
6481 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6482 if (comp == EQ)
6483 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6484 else
6485 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6486 }
6487 }
6488 else /* GET_CODE (operands[1]) == REG */
6489 {
6490 if (GET_MODE (operands[1]) == QImode)
6491 {
6492 if (comp == EQ)
6493 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6494 else
6495 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6496 }
6497 else /* HImode or SImode */
6498 {
6499 static char buf[] = "sbrc %A1,0";
6500 int bit_nr = INTVAL (operands[2]);
6501 buf[3] = (comp == EQ) ? 's' : 'c';
6502 buf[6] = 'A' + (bit_nr >> 3);
6503 buf[9] = '0' + (bit_nr & 7);
6504 output_asm_insn (buf, operands);
6505 }
6506 }
6507
6508 if (long_jump)
6509 return (AS1 (rjmp,.+4) CR_TAB
6510 AS1 (jmp,%x3));
6511 if (!reverse)
6512 return AS1 (rjmp,%x3);
6513 return "";
6514 }
6515
6516 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6517
6518 static void
6519 avr_asm_out_ctor (rtx symbol, int priority)
6520 {
6521 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6522 default_ctor_section_asm_out_constructor (symbol, priority);
6523 }
6524
6525 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6526
6527 static void
6528 avr_asm_out_dtor (rtx symbol, int priority)
6529 {
6530 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6531 default_dtor_section_asm_out_destructor (symbol, priority);
6532 }
6533
6534 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6535
6536 static bool
6537 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6538 {
6539 if (TYPE_MODE (type) == BLKmode)
6540 {
6541 HOST_WIDE_INT size = int_size_in_bytes (type);
6542 return (size == -1 || size > 8);
6543 }
6544 else
6545 return false;
6546 }
6547
6548 /* Worker function for CASE_VALUES_THRESHOLD. */
6549
6550 unsigned int avr_case_values_threshold (void)
6551 {
6552 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6553 }
6554
6555 /* Helper for __builtin_avr_delay_cycles */
6556
6557 static void
6558 avr_expand_delay_cycles (rtx operands0)
6559 {
6560 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6561 unsigned HOST_WIDE_INT cycles_used;
6562 unsigned HOST_WIDE_INT loop_count;
6563
6564 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6565 {
6566 loop_count = ((cycles - 9) / 6) + 1;
6567 cycles_used = ((loop_count - 1) * 6) + 9;
6568 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6569 cycles -= cycles_used;
6570 }
6571
6572 if (IN_RANGE (cycles, 262145, 83886081))
6573 {
6574 loop_count = ((cycles - 7) / 5) + 1;
6575 if (loop_count > 0xFFFFFF)
6576 loop_count = 0xFFFFFF;
6577 cycles_used = ((loop_count - 1) * 5) + 7;
6578 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6579 cycles -= cycles_used;
6580 }
6581
6582 if (IN_RANGE (cycles, 768, 262144))
6583 {
6584 loop_count = ((cycles - 5) / 4) + 1;
6585 if (loop_count > 0xFFFF)
6586 loop_count = 0xFFFF;
6587 cycles_used = ((loop_count - 1) * 4) + 5;
6588 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6589 cycles -= cycles_used;
6590 }
6591
6592 if (IN_RANGE (cycles, 6, 767))
6593 {
6594 loop_count = cycles / 3;
6595 if (loop_count > 255)
6596 loop_count = 255;
6597 cycles_used = loop_count * 3;
6598 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6599 cycles -= cycles_used;
6600 }
6601
6602 while (cycles >= 2)
6603 {
6604 emit_insn (gen_nopv (GEN_INT(2)));
6605 cycles -= 2;
6606 }
6607
6608 if (cycles == 1)
6609 {
6610 emit_insn (gen_nopv (GEN_INT(1)));
6611 cycles--;
6612 }
6613 }
6614
6615 /* IDs for all the AVR builtins. */
6616
6617 enum avr_builtin_id
6618 {
6619 AVR_BUILTIN_NOP,
6620 AVR_BUILTIN_SEI,
6621 AVR_BUILTIN_CLI,
6622 AVR_BUILTIN_WDR,
6623 AVR_BUILTIN_SLEEP,
6624 AVR_BUILTIN_SWAP,
6625 AVR_BUILTIN_FMUL,
6626 AVR_BUILTIN_FMULS,
6627 AVR_BUILTIN_FMULSU,
6628 AVR_BUILTIN_DELAY_CYCLES
6629 };
6630
6631 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6632 do \
6633 { \
6634 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6635 NULL, NULL_TREE); \
6636 } while (0)
6637
6638
6639 /* Implement `TARGET_INIT_BUILTINS' */
6640 /* Set up all builtin functions for this target. */
6641
6642 static void
6643 avr_init_builtins (void)
6644 {
6645 tree void_ftype_void
6646 = build_function_type_list (void_type_node, NULL_TREE);
6647 tree uchar_ftype_uchar
6648 = build_function_type_list (unsigned_char_type_node,
6649 unsigned_char_type_node,
6650 NULL_TREE);
6651 tree uint_ftype_uchar_uchar
6652 = build_function_type_list (unsigned_type_node,
6653 unsigned_char_type_node,
6654 unsigned_char_type_node,
6655 NULL_TREE);
6656 tree int_ftype_char_char
6657 = build_function_type_list (integer_type_node,
6658 char_type_node,
6659 char_type_node,
6660 NULL_TREE);
6661 tree int_ftype_char_uchar
6662 = build_function_type_list (integer_type_node,
6663 char_type_node,
6664 unsigned_char_type_node,
6665 NULL_TREE);
6666 tree void_ftype_ulong
6667 = build_function_type_list (void_type_node,
6668 long_unsigned_type_node,
6669 NULL_TREE);
6670
6671 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6672 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6673 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6674 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6675 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6676 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6677 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6678 AVR_BUILTIN_DELAY_CYCLES);
6679
6680 if (AVR_HAVE_MUL)
6681 {
6682 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6683 in libgcc. For fmul and fmuls this is straight forward with
6684 upcoming fixed point support. */
6685
6686 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6687 AVR_BUILTIN_FMUL);
6688 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6689 AVR_BUILTIN_FMULS);
6690 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6691 AVR_BUILTIN_FMULSU);
6692 }
6693 }
6694
6695 #undef DEF_BUILTIN
6696
6697 struct avr_builtin_description
6698 {
6699 const enum insn_code icode;
6700 const char *const name;
6701 const enum avr_builtin_id id;
6702 };
6703
6704 static const struct avr_builtin_description
6705 bdesc_1arg[] =
6706 {
6707 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6708 };
6709
6710 static const struct avr_builtin_description
6711 bdesc_2arg[] =
6712 {
6713 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6714 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6715 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6716 };
6717
6718 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6719
6720 static rtx
6721 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6722 rtx target)
6723 {
6724 rtx pat;
6725 tree arg0 = CALL_EXPR_ARG (exp, 0);
6726 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6727 enum machine_mode op0mode = GET_MODE (op0);
6728 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6729 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6730
6731 if (! target
6732 || GET_MODE (target) != tmode
6733 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6734 {
6735 target = gen_reg_rtx (tmode);
6736 }
6737
6738 if (op0mode == SImode && mode0 == HImode)
6739 {
6740 op0mode = HImode;
6741 op0 = gen_lowpart (HImode, op0);
6742 }
6743
6744 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6745
6746 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6747 op0 = copy_to_mode_reg (mode0, op0);
6748
6749 pat = GEN_FCN (icode) (target, op0);
6750 if (! pat)
6751 return 0;
6752
6753 emit_insn (pat);
6754
6755 return target;
6756 }
6757
6758
6759 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6760
6761 static rtx
6762 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6763 {
6764 rtx pat;
6765 tree arg0 = CALL_EXPR_ARG (exp, 0);
6766 tree arg1 = CALL_EXPR_ARG (exp, 1);
6767 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6768 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6769 enum machine_mode op0mode = GET_MODE (op0);
6770 enum machine_mode op1mode = GET_MODE (op1);
6771 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6772 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6773 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6774
6775 if (! target
6776 || GET_MODE (target) != tmode
6777 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6778 {
6779 target = gen_reg_rtx (tmode);
6780 }
6781
6782 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6783 {
6784 op0mode = HImode;
6785 op0 = gen_lowpart (HImode, op0);
6786 }
6787
6788 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6789 {
6790 op1mode = HImode;
6791 op1 = gen_lowpart (HImode, op1);
6792 }
6793
6794 /* In case the insn wants input operands in modes different from
6795 the result, abort. */
6796
6797 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6798 && (op1mode == mode1 || op1mode == VOIDmode));
6799
6800 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6801 op0 = copy_to_mode_reg (mode0, op0);
6802
6803 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6804 op1 = copy_to_mode_reg (mode1, op1);
6805
6806 pat = GEN_FCN (icode) (target, op0, op1);
6807
6808 if (! pat)
6809 return 0;
6810
6811 emit_insn (pat);
6812 return target;
6813 }
6814
6815
6816 /* Expand an expression EXP that calls a built-in function,
6817 with result going to TARGET if that's convenient
6818 (and in mode MODE if that's convenient).
6819 SUBTARGET may be used as the target for computing one of EXP's operands.
6820 IGNORE is nonzero if the value is to be ignored. */
6821
6822 static rtx
6823 avr_expand_builtin (tree exp, rtx target,
6824 rtx subtarget ATTRIBUTE_UNUSED,
6825 enum machine_mode mode ATTRIBUTE_UNUSED,
6826 int ignore ATTRIBUTE_UNUSED)
6827 {
6828 size_t i;
6829 const struct avr_builtin_description *d;
6830 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6831 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6832 tree arg0;
6833 rtx op0;
6834
6835 switch (id)
6836 {
6837 case AVR_BUILTIN_NOP:
6838 emit_insn (gen_nopv (GEN_INT(1)));
6839 return 0;
6840
6841 case AVR_BUILTIN_SEI:
6842 emit_insn (gen_enable_interrupt ());
6843 return 0;
6844
6845 case AVR_BUILTIN_CLI:
6846 emit_insn (gen_disable_interrupt ());
6847 return 0;
6848
6849 case AVR_BUILTIN_WDR:
6850 emit_insn (gen_wdr ());
6851 return 0;
6852
6853 case AVR_BUILTIN_SLEEP:
6854 emit_insn (gen_sleep ());
6855 return 0;
6856
6857 case AVR_BUILTIN_DELAY_CYCLES:
6858 {
6859 arg0 = CALL_EXPR_ARG (exp, 0);
6860 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6861
6862 if (! CONST_INT_P (op0))
6863 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6864
6865 avr_expand_delay_cycles (op0);
6866 return 0;
6867 }
6868 }
6869
6870 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6871 if (d->id == id)
6872 return avr_expand_unop_builtin (d->icode, exp, target);
6873
6874 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6875 if (d->id == id)
6876 return avr_expand_binop_builtin (d->icode, exp, target);
6877
6878 gcc_unreachable ();
6879 }
6880
6881
6882 #include "gt-avr.h"