]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
re PR middle-end/46500 (target.h includes tm.h)
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
50
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
53
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
66
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
86
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
106 const_tree, bool);
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
108 const_tree, bool);
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree, tree);
111 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
112
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
115
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
118
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
121
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
124
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
127
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
130
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
133
134 section *progmem_section;
135
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
139
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
142 {
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
146 false },
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
148 false },
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
150 false },
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
152 false },
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
154 false },
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
156 false },
157 { NULL, 0, 0, false, false, false, NULL, false }
158 };
159 \f
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
174 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
175 #undef TARGET_ASM_FILE_END
176 #define TARGET_ASM_FILE_END avr_file_end
177
178 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
179 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
180 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
181 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
182
183 #undef TARGET_FUNCTION_VALUE
184 #define TARGET_FUNCTION_VALUE avr_function_value
185 #undef TARGET_LIBCALL_VALUE
186 #define TARGET_LIBCALL_VALUE avr_libcall_value
187 #undef TARGET_FUNCTION_VALUE_REGNO_P
188 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
189
190 #undef TARGET_ATTRIBUTE_TABLE
191 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
192 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
193 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
194 #undef TARGET_INSERT_ATTRIBUTES
195 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
196 #undef TARGET_SECTION_TYPE_FLAGS
197 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
198
199 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
200
201 #undef TARGET_ASM_INIT_SECTIONS
202 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
203
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
218
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
221
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
224
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
227
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
230
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
235
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
238
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
243
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
246
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
249
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
252
253 #undef TARGET_HELP
254 #define TARGET_HELP avr_help
255
256 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
257 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
258
259 #undef TARGET_INIT_BUILTINS
260 #define TARGET_INIT_BUILTINS avr_init_builtins
261
262 #undef TARGET_EXPAND_BUILTIN
263 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
264
265
266 struct gcc_target targetm = TARGET_INITIALIZER;
267 \f
268 static void
269 avr_option_override (void)
270 {
271 const struct mcu_type_s *t;
272
273 flag_delete_null_pointer_checks = 0;
274
275 for (t = avr_mcu_types; t->name; t++)
276 if (strcmp (t->name, avr_mcu_name) == 0)
277 break;
278
279 if (!t->name)
280 {
281 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
282 inform (input_location, "See --target-help for supported MCUs");
283 }
284
285 avr_current_device = t;
286 avr_current_arch = &avr_arch_types[avr_current_device->arch];
287 avr_extra_arch_macro = avr_current_device->macro;
288
289 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
290 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
291
292 init_machine_status = avr_init_machine_status;
293 }
294
295 /* Implement TARGET_HELP */
296 /* Report extra information for --target-help */
297
298 static void
299 avr_help (void)
300 {
301 const struct mcu_type_s *t;
302 const char * const indent = " ";
303 int len;
304
305 /* Give a list of MCUs that are accepted by -mmcu=* .
306 Note that MCUs supported by the compiler might differ from
307 MCUs supported by binutils. */
308
309 len = strlen (indent);
310 printf ("Known MCU names:\n%s", indent);
311
312 /* Print a blank-separated list of all supported MCUs */
313
314 for (t = avr_mcu_types; t->name; t++)
315 {
316 printf ("%s ", t->name);
317 len += 1 + strlen (t->name);
318
319 /* Break long lines */
320
321 if (len > 66 && (t+1)->name)
322 {
323 printf ("\n%s", indent);
324 len = strlen (indent);
325 }
326 }
327
328 printf ("\n\n");
329 }
330
331 /* return register class from register number. */
332
333 static const enum reg_class reg_class_tab[]={
334 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
335 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
336 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
337 GENERAL_REGS, /* r0 - r15 */
338 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
339 LD_REGS, /* r16 - 23 */
340 ADDW_REGS,ADDW_REGS, /* r24,r25 */
341 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
342 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
343 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
344 STACK_REG,STACK_REG /* SPL,SPH */
345 };
346
347 /* Function to set up the backend function structure. */
348
349 static struct machine_function *
350 avr_init_machine_status (void)
351 {
352 return ggc_alloc_cleared_machine_function ();
353 }
354
355 /* Return register class for register R. */
356
357 enum reg_class
358 avr_regno_reg_class (int r)
359 {
360 if (r <= 33)
361 return reg_class_tab[r];
362 return ALL_REGS;
363 }
364
365 /* A helper for the subsequent function attribute used to dig for
366 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
367
368 static inline int
369 avr_lookup_function_attribute1 (const_tree func, const char *name)
370 {
371 if (FUNCTION_DECL == TREE_CODE (func))
372 {
373 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
374 {
375 return true;
376 }
377
378 func = TREE_TYPE (func);
379 }
380
381 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
382 || TREE_CODE (func) == METHOD_TYPE);
383
384 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
385 }
386
387 /* Return nonzero if FUNC is a naked function. */
388
389 static int
390 avr_naked_function_p (tree func)
391 {
392 return avr_lookup_function_attribute1 (func, "naked");
393 }
394
395 /* Return nonzero if FUNC is an interrupt function as specified
396 by the "interrupt" attribute. */
397
398 static int
399 interrupt_function_p (tree func)
400 {
401 return avr_lookup_function_attribute1 (func, "interrupt");
402 }
403
404 /* Return nonzero if FUNC is a signal function as specified
405 by the "signal" attribute. */
406
407 static int
408 signal_function_p (tree func)
409 {
410 return avr_lookup_function_attribute1 (func, "signal");
411 }
412
413 /* Return nonzero if FUNC is a OS_task function. */
414
415 static int
416 avr_OS_task_function_p (tree func)
417 {
418 return avr_lookup_function_attribute1 (func, "OS_task");
419 }
420
421 /* Return nonzero if FUNC is a OS_main function. */
422
423 static int
424 avr_OS_main_function_p (tree func)
425 {
426 return avr_lookup_function_attribute1 (func, "OS_main");
427 }
428
429 /* Return the number of hard registers to push/pop in the prologue/epilogue
430 of the current function, and optionally store these registers in SET. */
431
432 static int
433 avr_regs_to_save (HARD_REG_SET *set)
434 {
435 int reg, count;
436 int int_or_sig_p = (interrupt_function_p (current_function_decl)
437 || signal_function_p (current_function_decl));
438
439 if (set)
440 CLEAR_HARD_REG_SET (*set);
441 count = 0;
442
443 /* No need to save any registers if the function never returns or
444 is have "OS_task" or "OS_main" attribute. */
445 if (TREE_THIS_VOLATILE (current_function_decl)
446 || cfun->machine->is_OS_task
447 || cfun->machine->is_OS_main)
448 return 0;
449
450 for (reg = 0; reg < 32; reg++)
451 {
452 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
453 any global register variables. */
454 if (fixed_regs[reg])
455 continue;
456
457 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
458 || (df_regs_ever_live_p (reg)
459 && (int_or_sig_p || !call_used_regs[reg])
460 && !(frame_pointer_needed
461 && (reg == REG_Y || reg == (REG_Y+1)))))
462 {
463 if (set)
464 SET_HARD_REG_BIT (*set, reg);
465 count++;
466 }
467 }
468 return count;
469 }
470
471 /* Return true if register FROM can be eliminated via register TO. */
472
473 bool
474 avr_can_eliminate (const int from, const int to)
475 {
476 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
477 || ((from == FRAME_POINTER_REGNUM
478 || from == FRAME_POINTER_REGNUM + 1)
479 && !frame_pointer_needed));
480 }
481
482 /* Compute offset between arg_pointer and frame_pointer. */
483
484 int
485 avr_initial_elimination_offset (int from, int to)
486 {
487 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
488 return 0;
489 else
490 {
491 int offset = frame_pointer_needed ? 2 : 0;
492 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
493
494 offset += avr_regs_to_save (NULL);
495 return get_frame_size () + (avr_pc_size) + 1 + offset;
496 }
497 }
498
499 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
500 frame pointer by +STARTING_FRAME_OFFSET.
501 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
502 avoids creating add/sub of offset in nonlocal goto and setjmp. */
503
504 rtx avr_builtin_setjmp_frame_value (void)
505 {
506 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
507 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
508 }
509
510 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
511 This is return address of function. */
512 rtx
513 avr_return_addr_rtx (int count, rtx tem)
514 {
515 rtx r;
516
517 /* Can only return this functions return address. Others not supported. */
518 if (count)
519 return NULL;
520
521 if (AVR_3_BYTE_PC)
522 {
523 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
524 warning (0, "'builtin_return_address' contains only 2 bytes of address");
525 }
526 else
527 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
528
529 r = gen_rtx_PLUS (Pmode, tem, r);
530 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
531 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
532 return r;
533 }
534
535 /* Return 1 if the function epilogue is just a single "ret". */
536
537 int
538 avr_simple_epilogue (void)
539 {
540 return (! frame_pointer_needed
541 && get_frame_size () == 0
542 && avr_regs_to_save (NULL) == 0
543 && ! interrupt_function_p (current_function_decl)
544 && ! signal_function_p (current_function_decl)
545 && ! avr_naked_function_p (current_function_decl)
546 && ! TREE_THIS_VOLATILE (current_function_decl));
547 }
548
549 /* This function checks sequence of live registers. */
550
551 static int
552 sequent_regs_live (void)
553 {
554 int reg;
555 int live_seq=0;
556 int cur_seq=0;
557
558 for (reg = 0; reg < 18; ++reg)
559 {
560 if (!call_used_regs[reg])
561 {
562 if (df_regs_ever_live_p (reg))
563 {
564 ++live_seq;
565 ++cur_seq;
566 }
567 else
568 cur_seq = 0;
569 }
570 }
571
572 if (!frame_pointer_needed)
573 {
574 if (df_regs_ever_live_p (REG_Y))
575 {
576 ++live_seq;
577 ++cur_seq;
578 }
579 else
580 cur_seq = 0;
581
582 if (df_regs_ever_live_p (REG_Y+1))
583 {
584 ++live_seq;
585 ++cur_seq;
586 }
587 else
588 cur_seq = 0;
589 }
590 else
591 {
592 cur_seq += 2;
593 live_seq += 2;
594 }
595 return (cur_seq == live_seq) ? live_seq : 0;
596 }
597
598 /* Obtain the length sequence of insns. */
599
600 int
601 get_sequence_length (rtx insns)
602 {
603 rtx insn;
604 int length;
605
606 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
607 length += get_attr_length (insn);
608
609 return length;
610 }
611
612 /* Implement INCOMING_RETURN_ADDR_RTX. */
613
614 rtx
615 avr_incoming_return_addr_rtx (void)
616 {
617 /* The return address is at the top of the stack. Note that the push
618 was via post-decrement, which means the actual address is off by one. */
619 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
620 }
621
622 /* Helper for expand_prologue. Emit a push of a byte register. */
623
624 static void
625 emit_push_byte (unsigned regno, bool frame_related_p)
626 {
627 rtx mem, reg, insn;
628
629 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
630 mem = gen_frame_mem (QImode, mem);
631 reg = gen_rtx_REG (QImode, regno);
632
633 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
634 if (frame_related_p)
635 RTX_FRAME_RELATED_P (insn) = 1;
636
637 cfun->machine->stack_usage++;
638 }
639
640
641 /* Output function prologue. */
642
643 void
644 expand_prologue (void)
645 {
646 int live_seq;
647 HARD_REG_SET set;
648 int minimize;
649 HOST_WIDE_INT size = get_frame_size();
650 rtx insn;
651
652 /* Init cfun->machine. */
653 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
654 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
655 cfun->machine->is_signal = signal_function_p (current_function_decl);
656 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
657 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
658 cfun->machine->stack_usage = 0;
659
660 /* Prologue: naked. */
661 if (cfun->machine->is_naked)
662 {
663 return;
664 }
665
666 avr_regs_to_save (&set);
667 live_seq = sequent_regs_live ();
668 minimize = (TARGET_CALL_PROLOGUES
669 && !cfun->machine->is_interrupt
670 && !cfun->machine->is_signal
671 && !cfun->machine->is_OS_task
672 && !cfun->machine->is_OS_main
673 && live_seq);
674
675 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
676 {
677 /* Enable interrupts. */
678 if (cfun->machine->is_interrupt)
679 emit_insn (gen_enable_interrupt ());
680
681 /* Push zero reg. */
682 emit_push_byte (ZERO_REGNO, true);
683
684 /* Push tmp reg. */
685 emit_push_byte (TMP_REGNO, true);
686
687 /* Push SREG. */
688 /* ??? There's no dwarf2 column reserved for SREG. */
689 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
690 emit_push_byte (TMP_REGNO, false);
691
692 /* Push RAMPZ. */
693 /* ??? There's no dwarf2 column reserved for RAMPZ. */
694 if (AVR_HAVE_RAMPZ
695 && TEST_HARD_REG_BIT (set, REG_Z)
696 && TEST_HARD_REG_BIT (set, REG_Z + 1))
697 {
698 emit_move_insn (tmp_reg_rtx,
699 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
700 emit_push_byte (TMP_REGNO, false);
701 }
702
703 /* Clear zero reg. */
704 emit_move_insn (zero_reg_rtx, const0_rtx);
705
706 /* Prevent any attempt to delete the setting of ZERO_REG! */
707 emit_use (zero_reg_rtx);
708 }
709 if (minimize && (frame_pointer_needed
710 || (AVR_2_BYTE_PC && live_seq > 6)
711 || live_seq > 7))
712 {
713 int first_reg, reg, offset;
714
715 emit_move_insn (gen_rtx_REG (HImode, REG_X),
716 gen_int_mode (size, HImode));
717
718 insn = emit_insn (gen_call_prologue_saves
719 (gen_int_mode (live_seq, HImode),
720 gen_int_mode (size + live_seq, HImode)));
721 RTX_FRAME_RELATED_P (insn) = 1;
722
723 /* Describe the effect of the unspec_volatile call to prologue_saves.
724 Note that this formulation assumes that add_reg_note pushes the
725 notes to the front. Thus we build them in the reverse order of
726 how we want dwarf2out to process them. */
727
728 /* The function does always set frame_pointer_rtx, but whether that
729 is going to be permanent in the function is frame_pointer_needed. */
730 add_reg_note (insn, REG_CFA_ADJUST_CFA,
731 gen_rtx_SET (VOIDmode,
732 (frame_pointer_needed
733 ? frame_pointer_rtx : stack_pointer_rtx),
734 plus_constant (stack_pointer_rtx,
735 -(size + live_seq))));
736
737 /* Note that live_seq always contains r28+r29, but the other
738 registers to be saved are all below 18. */
739 first_reg = 18 - (live_seq - 2);
740
741 for (reg = 29, offset = -live_seq + 1;
742 reg >= first_reg;
743 reg = (reg == 28 ? 17 : reg - 1), ++offset)
744 {
745 rtx m, r;
746
747 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
748 r = gen_rtx_REG (QImode, reg);
749 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
750 }
751
752 cfun->machine->stack_usage += size + live_seq;
753 }
754 else
755 {
756 int reg;
757 for (reg = 0; reg < 32; ++reg)
758 if (TEST_HARD_REG_BIT (set, reg))
759 emit_push_byte (reg, true);
760
761 if (frame_pointer_needed)
762 {
763 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
764 {
765 /* Push frame pointer. Always be consistent about the
766 ordering of pushes -- epilogue_restores expects the
767 register pair to be pushed low byte first. */
768 emit_push_byte (REG_Y, true);
769 emit_push_byte (REG_Y + 1, true);
770 }
771
772 if (!size)
773 {
774 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
775 RTX_FRAME_RELATED_P (insn) = 1;
776 }
777 else
778 {
779 /* Creating a frame can be done by direct manipulation of the
780 stack or via the frame pointer. These two methods are:
781 fp=sp
782 fp-=size
783 sp=fp
784 OR
785 sp-=size
786 fp=sp
787 the optimum method depends on function type, stack and frame size.
788 To avoid a complex logic, both methods are tested and shortest
789 is selected. */
790 rtx myfp;
791 rtx fp_plus_insns;
792
793 if (AVR_HAVE_8BIT_SP)
794 {
795 /* The high byte (r29) doesn't change. Prefer 'subi'
796 (1 cycle) over 'sbiw' (2 cycles, same size). */
797 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
798 }
799 else
800 {
801 /* Normal sized addition. */
802 myfp = frame_pointer_rtx;
803 }
804
805 /* Method 1-Adjust frame pointer. */
806 start_sequence ();
807
808 /* Normally the dwarf2out frame-related-expr interpreter does
809 not expect to have the CFA change once the frame pointer is
810 set up. Thus we avoid marking the move insn below and
811 instead indicate that the entire operation is complete after
812 the frame pointer subtraction is done. */
813
814 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
815
816 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
817 RTX_FRAME_RELATED_P (insn) = 1;
818 add_reg_note (insn, REG_CFA_ADJUST_CFA,
819 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
820 plus_constant (stack_pointer_rtx,
821 -size)));
822
823 /* Copy to stack pointer. Note that since we've already
824 changed the CFA to the frame pointer this operation
825 need not be annotated at all. */
826 if (AVR_HAVE_8BIT_SP)
827 {
828 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
829 }
830 else if (TARGET_NO_INTERRUPTS
831 || cfun->machine->is_signal
832 || cfun->machine->is_OS_main)
833 {
834 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
835 frame_pointer_rtx));
836 }
837 else if (cfun->machine->is_interrupt)
838 {
839 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
840 frame_pointer_rtx));
841 }
842 else
843 {
844 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
845 }
846
847 fp_plus_insns = get_insns ();
848 end_sequence ();
849
850 /* Method 2-Adjust Stack pointer. */
851 if (size <= 6)
852 {
853 rtx sp_plus_insns;
854
855 start_sequence ();
856
857 insn = plus_constant (stack_pointer_rtx, -size);
858 insn = emit_move_insn (stack_pointer_rtx, insn);
859 RTX_FRAME_RELATED_P (insn) = 1;
860
861 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
862 RTX_FRAME_RELATED_P (insn) = 1;
863
864 sp_plus_insns = get_insns ();
865 end_sequence ();
866
867 /* Use shortest method. */
868 if (get_sequence_length (sp_plus_insns)
869 < get_sequence_length (fp_plus_insns))
870 emit_insn (sp_plus_insns);
871 else
872 emit_insn (fp_plus_insns);
873 }
874 else
875 emit_insn (fp_plus_insns);
876
877 cfun->machine->stack_usage += size;
878 }
879 }
880 }
881
882 if (flag_stack_usage_info)
883 current_function_static_stack_size = cfun->machine->stack_usage;
884 }
885
886 /* Output summary at end of function prologue. */
887
888 static void
889 avr_asm_function_end_prologue (FILE *file)
890 {
891 if (cfun->machine->is_naked)
892 {
893 fputs ("/* prologue: naked */\n", file);
894 }
895 else
896 {
897 if (cfun->machine->is_interrupt)
898 {
899 fputs ("/* prologue: Interrupt */\n", file);
900 }
901 else if (cfun->machine->is_signal)
902 {
903 fputs ("/* prologue: Signal */\n", file);
904 }
905 else
906 fputs ("/* prologue: function */\n", file);
907 }
908 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
909 get_frame_size());
910 fprintf (file, "/* stack size = %d */\n",
911 cfun->machine->stack_usage);
912 /* Create symbol stack offset here so all functions have it. Add 1 to stack
913 usage for offset so that SP + .L__stack_offset = return address. */
914 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
915 }
916
917
918 /* Implement EPILOGUE_USES. */
919
920 int
921 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
922 {
923 if (reload_completed
924 && cfun->machine
925 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
926 return 1;
927 return 0;
928 }
929
930 /* Helper for expand_epilogue. Emit a pop of a byte register. */
931
932 static void
933 emit_pop_byte (unsigned regno)
934 {
935 rtx mem, reg;
936
937 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
938 mem = gen_frame_mem (QImode, mem);
939 reg = gen_rtx_REG (QImode, regno);
940
941 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
942 }
943
944 /* Output RTL epilogue. */
945
946 void
947 expand_epilogue (bool sibcall_p)
948 {
949 int reg;
950 int live_seq;
951 HARD_REG_SET set;
952 int minimize;
953 HOST_WIDE_INT size = get_frame_size();
954
955 /* epilogue: naked */
956 if (cfun->machine->is_naked)
957 {
958 gcc_assert (!sibcall_p);
959
960 emit_jump_insn (gen_return ());
961 return;
962 }
963
964 avr_regs_to_save (&set);
965 live_seq = sequent_regs_live ();
966 minimize = (TARGET_CALL_PROLOGUES
967 && !cfun->machine->is_interrupt
968 && !cfun->machine->is_signal
969 && !cfun->machine->is_OS_task
970 && !cfun->machine->is_OS_main
971 && live_seq);
972
973 if (minimize && (frame_pointer_needed || live_seq > 4))
974 {
975 if (frame_pointer_needed)
976 {
977 /* Get rid of frame. */
978 emit_move_insn(frame_pointer_rtx,
979 gen_rtx_PLUS (HImode, frame_pointer_rtx,
980 gen_int_mode (size, HImode)));
981 }
982 else
983 {
984 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
985 }
986
987 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
988 }
989 else
990 {
991 if (frame_pointer_needed)
992 {
993 if (size)
994 {
995 /* Try two methods to adjust stack and select shortest. */
996 rtx myfp;
997 rtx fp_plus_insns;
998
999 if (AVR_HAVE_8BIT_SP)
1000 {
1001 /* The high byte (r29) doesn't change - prefer 'subi'
1002 (1 cycle) over 'sbiw' (2 cycles, same size). */
1003 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1004 }
1005 else
1006 {
1007 /* Normal sized addition. */
1008 myfp = frame_pointer_rtx;
1009 }
1010
1011 /* Method 1-Adjust frame pointer. */
1012 start_sequence ();
1013
1014 emit_move_insn (myfp, plus_constant (myfp, size));
1015
1016 /* Copy to stack pointer. */
1017 if (AVR_HAVE_8BIT_SP)
1018 {
1019 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1020 }
1021 else if (TARGET_NO_INTERRUPTS
1022 || cfun->machine->is_signal)
1023 {
1024 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1025 frame_pointer_rtx));
1026 }
1027 else if (cfun->machine->is_interrupt)
1028 {
1029 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1030 frame_pointer_rtx));
1031 }
1032 else
1033 {
1034 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1035 }
1036
1037 fp_plus_insns = get_insns ();
1038 end_sequence ();
1039
1040 /* Method 2-Adjust Stack pointer. */
1041 if (size <= 5)
1042 {
1043 rtx sp_plus_insns;
1044
1045 start_sequence ();
1046
1047 emit_move_insn (stack_pointer_rtx,
1048 plus_constant (stack_pointer_rtx, size));
1049
1050 sp_plus_insns = get_insns ();
1051 end_sequence ();
1052
1053 /* Use shortest method. */
1054 if (get_sequence_length (sp_plus_insns)
1055 < get_sequence_length (fp_plus_insns))
1056 emit_insn (sp_plus_insns);
1057 else
1058 emit_insn (fp_plus_insns);
1059 }
1060 else
1061 emit_insn (fp_plus_insns);
1062 }
1063 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1064 {
1065 /* Restore previous frame_pointer. See expand_prologue for
1066 rationale for not using pophi. */
1067 emit_pop_byte (REG_Y + 1);
1068 emit_pop_byte (REG_Y);
1069 }
1070 }
1071
1072 /* Restore used registers. */
1073 for (reg = 31; reg >= 0; --reg)
1074 if (TEST_HARD_REG_BIT (set, reg))
1075 emit_pop_byte (reg);
1076
1077 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1078 {
1079 /* Restore RAMPZ using tmp reg as scratch. */
1080 if (AVR_HAVE_RAMPZ
1081 && TEST_HARD_REG_BIT (set, REG_Z)
1082 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1083 {
1084 emit_pop_byte (TMP_REGNO);
1085 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1086 tmp_reg_rtx);
1087 }
1088
1089 /* Restore SREG using tmp reg as scratch. */
1090 emit_pop_byte (TMP_REGNO);
1091
1092 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1093 tmp_reg_rtx);
1094
1095 /* Restore tmp REG. */
1096 emit_pop_byte (TMP_REGNO);
1097
1098 /* Restore zero REG. */
1099 emit_pop_byte (ZERO_REGNO);
1100 }
1101
1102 if (!sibcall_p)
1103 emit_jump_insn (gen_return ());
1104 }
1105 }
1106
1107 /* Output summary messages at beginning of function epilogue. */
1108
1109 static void
1110 avr_asm_function_begin_epilogue (FILE *file)
1111 {
1112 fprintf (file, "/* epilogue start */\n");
1113 }
1114
1115
1116 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1117
1118 static bool
1119 avr_cannot_modify_jumps_p (void)
1120 {
1121
1122 /* Naked Functions must not have any instructions after
1123 their epilogue, see PR42240 */
1124
1125 if (reload_completed
1126 && cfun->machine
1127 && cfun->machine->is_naked)
1128 {
1129 return true;
1130 }
1131
1132 return false;
1133 }
1134
1135
1136 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1137 machine for a memory operand of mode MODE. */
1138
1139 bool
1140 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1141 {
1142 enum reg_class r = NO_REGS;
1143
1144 if (TARGET_ALL_DEBUG)
1145 {
1146 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1147 GET_MODE_NAME(mode),
1148 strict ? "(strict)": "",
1149 reload_completed ? "(reload_completed)": "",
1150 reload_in_progress ? "(reload_in_progress)": "",
1151 reg_renumber ? "(reg_renumber)" : "");
1152 if (GET_CODE (x) == PLUS
1153 && REG_P (XEXP (x, 0))
1154 && GET_CODE (XEXP (x, 1)) == CONST_INT
1155 && INTVAL (XEXP (x, 1)) >= 0
1156 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1157 && reg_renumber
1158 )
1159 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1160 true_regnum (XEXP (x, 0)));
1161 debug_rtx (x);
1162 }
1163 if (!strict && GET_CODE (x) == SUBREG)
1164 x = SUBREG_REG (x);
1165 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1166 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1167 r = POINTER_REGS;
1168 else if (CONSTANT_ADDRESS_P (x))
1169 r = ALL_REGS;
1170 else if (GET_CODE (x) == PLUS
1171 && REG_P (XEXP (x, 0))
1172 && GET_CODE (XEXP (x, 1)) == CONST_INT
1173 && INTVAL (XEXP (x, 1)) >= 0)
1174 {
1175 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1176 if (fit)
1177 {
1178 if (! strict
1179 || REGNO (XEXP (x,0)) == REG_X
1180 || REGNO (XEXP (x,0)) == REG_Y
1181 || REGNO (XEXP (x,0)) == REG_Z)
1182 r = BASE_POINTER_REGS;
1183 if (XEXP (x,0) == frame_pointer_rtx
1184 || XEXP (x,0) == arg_pointer_rtx)
1185 r = BASE_POINTER_REGS;
1186 }
1187 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1188 r = POINTER_Y_REGS;
1189 }
1190 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1191 && REG_P (XEXP (x, 0))
1192 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1193 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1194 {
1195 r = POINTER_REGS;
1196 }
1197 if (TARGET_ALL_DEBUG)
1198 {
1199 fprintf (stderr, " ret = %c\n", r + '0');
1200 }
1201 return r == NO_REGS ? 0 : (int)r;
1202 }
1203
1204 /* Attempts to replace X with a valid
1205 memory address for an operand of mode MODE */
1206
1207 rtx
1208 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1209 {
1210 x = oldx;
1211 if (TARGET_ALL_DEBUG)
1212 {
1213 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1214 debug_rtx (oldx);
1215 }
1216
1217 if (GET_CODE (oldx) == PLUS
1218 && REG_P (XEXP (oldx,0)))
1219 {
1220 if (REG_P (XEXP (oldx,1)))
1221 x = force_reg (GET_MODE (oldx), oldx);
1222 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1223 {
1224 int offs = INTVAL (XEXP (oldx,1));
1225 if (frame_pointer_rtx != XEXP (oldx,0))
1226 if (offs > MAX_LD_OFFSET (mode))
1227 {
1228 if (TARGET_ALL_DEBUG)
1229 fprintf (stderr, "force_reg (big offset)\n");
1230 x = force_reg (GET_MODE (oldx), oldx);
1231 }
1232 }
1233 }
1234 return x;
1235 }
1236
1237
1238 /* Return a pointer register name as a string. */
1239
1240 static const char *
1241 ptrreg_to_str (int regno)
1242 {
1243 switch (regno)
1244 {
1245 case REG_X: return "X";
1246 case REG_Y: return "Y";
1247 case REG_Z: return "Z";
1248 default:
1249 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1250 }
1251 return NULL;
1252 }
1253
1254 /* Return the condition name as a string.
1255 Used in conditional jump constructing */
1256
1257 static const char *
1258 cond_string (enum rtx_code code)
1259 {
1260 switch (code)
1261 {
1262 case NE:
1263 return "ne";
1264 case EQ:
1265 return "eq";
1266 case GE:
1267 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1268 return "pl";
1269 else
1270 return "ge";
1271 case LT:
1272 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1273 return "mi";
1274 else
1275 return "lt";
1276 case GEU:
1277 return "sh";
1278 case LTU:
1279 return "lo";
1280 default:
1281 gcc_unreachable ();
1282 }
1283 }
1284
1285 /* Output ADDR to FILE as address. */
1286
1287 void
1288 print_operand_address (FILE *file, rtx addr)
1289 {
1290 switch (GET_CODE (addr))
1291 {
1292 case REG:
1293 fprintf (file, ptrreg_to_str (REGNO (addr)));
1294 break;
1295
1296 case PRE_DEC:
1297 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1298 break;
1299
1300 case POST_INC:
1301 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1302 break;
1303
1304 default:
1305 if (CONSTANT_ADDRESS_P (addr)
1306 && text_segment_operand (addr, VOIDmode))
1307 {
1308 rtx x = addr;
1309 if (GET_CODE (x) == CONST)
1310 x = XEXP (x, 0);
1311 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1312 {
1313 /* Assembler gs() will implant word address. Make offset
1314 a byte offset inside gs() for assembler. This is
1315 needed because the more logical (constant+gs(sym)) is not
1316 accepted by gas. For 128K and lower devices this is ok. For
1317 large devices it will create a Trampoline to offset from symbol
1318 which may not be what the user really wanted. */
1319 fprintf (file, "gs(");
1320 output_addr_const (file, XEXP (x,0));
1321 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1322 if (AVR_3_BYTE_PC)
1323 if (warning (0, "pointer offset from symbol maybe incorrect"))
1324 {
1325 output_addr_const (stderr, addr);
1326 fprintf(stderr,"\n");
1327 }
1328 }
1329 else
1330 {
1331 fprintf (file, "gs(");
1332 output_addr_const (file, addr);
1333 fprintf (file, ")");
1334 }
1335 }
1336 else
1337 output_addr_const (file, addr);
1338 }
1339 }
1340
1341
1342 /* Output X as assembler operand to file FILE. */
1343
1344 void
1345 print_operand (FILE *file, rtx x, int code)
1346 {
1347 int abcd = 0;
1348
1349 if (code >= 'A' && code <= 'D')
1350 abcd = code - 'A';
1351
1352 if (code == '~')
1353 {
1354 if (!AVR_HAVE_JMP_CALL)
1355 fputc ('r', file);
1356 }
1357 else if (code == '!')
1358 {
1359 if (AVR_HAVE_EIJMP_EICALL)
1360 fputc ('e', file);
1361 }
1362 else if (REG_P (x))
1363 {
1364 if (x == zero_reg_rtx)
1365 fprintf (file, "__zero_reg__");
1366 else
1367 fprintf (file, reg_names[true_regnum (x) + abcd]);
1368 }
1369 else if (GET_CODE (x) == CONST_INT)
1370 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1371 else if (GET_CODE (x) == MEM)
1372 {
1373 rtx addr = XEXP (x,0);
1374 if (code == 'm')
1375 {
1376 if (!CONSTANT_P (addr))
1377 fatal_insn ("bad address, not a constant):", addr);
1378 /* Assembler template with m-code is data - not progmem section */
1379 if (text_segment_operand (addr, VOIDmode))
1380 if (warning ( 0, "accessing data memory with program memory address"))
1381 {
1382 output_addr_const (stderr, addr);
1383 fprintf(stderr,"\n");
1384 }
1385 output_addr_const (file, addr);
1386 }
1387 else if (code == 'o')
1388 {
1389 if (GET_CODE (addr) != PLUS)
1390 fatal_insn ("bad address, not (reg+disp):", addr);
1391
1392 print_operand (file, XEXP (addr, 1), 0);
1393 }
1394 else if (code == 'p' || code == 'r')
1395 {
1396 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1397 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1398
1399 if (code == 'p')
1400 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1401 else
1402 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1403 }
1404 else if (GET_CODE (addr) == PLUS)
1405 {
1406 print_operand_address (file, XEXP (addr,0));
1407 if (REGNO (XEXP (addr, 0)) == REG_X)
1408 fatal_insn ("internal compiler error. Bad address:"
1409 ,addr);
1410 fputc ('+', file);
1411 print_operand (file, XEXP (addr,1), code);
1412 }
1413 else
1414 print_operand_address (file, addr);
1415 }
1416 else if (code == 'x')
1417 {
1418 /* Constant progmem address - like used in jmp or call */
1419 if (0 == text_segment_operand (x, VOIDmode))
1420 if (warning ( 0, "accessing program memory with data memory address"))
1421 {
1422 output_addr_const (stderr, x);
1423 fprintf(stderr,"\n");
1424 }
1425 /* Use normal symbol for direct address no linker trampoline needed */
1426 output_addr_const (file, x);
1427 }
1428 else if (GET_CODE (x) == CONST_DOUBLE)
1429 {
1430 long val;
1431 REAL_VALUE_TYPE rv;
1432 if (GET_MODE (x) != SFmode)
1433 fatal_insn ("internal compiler error. Unknown mode:", x);
1434 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1435 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1436 fprintf (file, "0x%lx", val);
1437 }
1438 else if (code == 'j')
1439 fputs (cond_string (GET_CODE (x)), file);
1440 else if (code == 'k')
1441 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1442 else
1443 print_operand_address (file, x);
1444 }
1445
1446 /* Update the condition code in the INSN. */
1447
1448 void
1449 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1450 {
1451 rtx set;
1452
1453 switch (get_attr_cc (insn))
1454 {
1455 case CC_NONE:
1456 /* Insn does not affect CC at all. */
1457 break;
1458
1459 case CC_SET_N:
1460 CC_STATUS_INIT;
1461 break;
1462
1463 case CC_SET_ZN:
1464 set = single_set (insn);
1465 CC_STATUS_INIT;
1466 if (set)
1467 {
1468 cc_status.flags |= CC_NO_OVERFLOW;
1469 cc_status.value1 = SET_DEST (set);
1470 }
1471 break;
1472
1473 case CC_SET_CZN:
1474 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1475 The V flag may or may not be known but that's ok because
1476 alter_cond will change tests to use EQ/NE. */
1477 set = single_set (insn);
1478 CC_STATUS_INIT;
1479 if (set)
1480 {
1481 cc_status.value1 = SET_DEST (set);
1482 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1483 }
1484 break;
1485
1486 case CC_COMPARE:
1487 set = single_set (insn);
1488 CC_STATUS_INIT;
1489 if (set)
1490 cc_status.value1 = SET_SRC (set);
1491 break;
1492
1493 case CC_CLOBBER:
1494 /* Insn doesn't leave CC in a usable state. */
1495 CC_STATUS_INIT;
1496
1497 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1498 set = single_set (insn);
1499 if (set)
1500 {
1501 rtx src = SET_SRC (set);
1502
1503 if (GET_CODE (src) == ASHIFTRT
1504 && GET_MODE (src) == QImode)
1505 {
1506 rtx x = XEXP (src, 1);
1507
1508 if (GET_CODE (x) == CONST_INT
1509 && INTVAL (x) > 0
1510 && INTVAL (x) != 6)
1511 {
1512 cc_status.value1 = SET_DEST (set);
1513 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1514 }
1515 }
1516 }
1517 break;
1518 }
1519 }
1520
1521 /* Return maximum number of consecutive registers of
1522 class CLASS needed to hold a value of mode MODE. */
1523
1524 int
1525 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1526 {
1527 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1528 }
1529
1530 /* Choose mode for jump insn:
1531 1 - relative jump in range -63 <= x <= 62 ;
1532 2 - relative jump in range -2046 <= x <= 2045 ;
1533 3 - absolute jump (only for ATmega[16]03). */
1534
1535 int
1536 avr_jump_mode (rtx x, rtx insn)
1537 {
1538 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1539 ? XEXP (x, 0) : x));
1540 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1541 int jump_distance = cur_addr - dest_addr;
1542
1543 if (-63 <= jump_distance && jump_distance <= 62)
1544 return 1;
1545 else if (-2046 <= jump_distance && jump_distance <= 2045)
1546 return 2;
1547 else if (AVR_HAVE_JMP_CALL)
1548 return 3;
1549
1550 return 2;
1551 }
1552
1553 /* return an AVR condition jump commands.
1554 X is a comparison RTX.
1555 LEN is a number returned by avr_jump_mode function.
1556 if REVERSE nonzero then condition code in X must be reversed. */
1557
1558 const char *
1559 ret_cond_branch (rtx x, int len, int reverse)
1560 {
1561 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1562
1563 switch (cond)
1564 {
1565 case GT:
1566 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1567 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1568 AS1 (brpl,%0)) :
1569 len == 2 ? (AS1 (breq,.+4) CR_TAB
1570 AS1 (brmi,.+2) CR_TAB
1571 AS1 (rjmp,%0)) :
1572 (AS1 (breq,.+6) CR_TAB
1573 AS1 (brmi,.+4) CR_TAB
1574 AS1 (jmp,%0)));
1575
1576 else
1577 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1578 AS1 (brge,%0)) :
1579 len == 2 ? (AS1 (breq,.+4) CR_TAB
1580 AS1 (brlt,.+2) CR_TAB
1581 AS1 (rjmp,%0)) :
1582 (AS1 (breq,.+6) CR_TAB
1583 AS1 (brlt,.+4) CR_TAB
1584 AS1 (jmp,%0)));
1585 case GTU:
1586 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1587 AS1 (brsh,%0)) :
1588 len == 2 ? (AS1 (breq,.+4) CR_TAB
1589 AS1 (brlo,.+2) CR_TAB
1590 AS1 (rjmp,%0)) :
1591 (AS1 (breq,.+6) CR_TAB
1592 AS1 (brlo,.+4) CR_TAB
1593 AS1 (jmp,%0)));
1594 case LE:
1595 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1596 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1597 AS1 (brmi,%0)) :
1598 len == 2 ? (AS1 (breq,.+2) CR_TAB
1599 AS1 (brpl,.+2) CR_TAB
1600 AS1 (rjmp,%0)) :
1601 (AS1 (breq,.+2) CR_TAB
1602 AS1 (brpl,.+4) CR_TAB
1603 AS1 (jmp,%0)));
1604 else
1605 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1606 AS1 (brlt,%0)) :
1607 len == 2 ? (AS1 (breq,.+2) CR_TAB
1608 AS1 (brge,.+2) CR_TAB
1609 AS1 (rjmp,%0)) :
1610 (AS1 (breq,.+2) CR_TAB
1611 AS1 (brge,.+4) CR_TAB
1612 AS1 (jmp,%0)));
1613 case LEU:
1614 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1615 AS1 (brlo,%0)) :
1616 len == 2 ? (AS1 (breq,.+2) CR_TAB
1617 AS1 (brsh,.+2) CR_TAB
1618 AS1 (rjmp,%0)) :
1619 (AS1 (breq,.+2) CR_TAB
1620 AS1 (brsh,.+4) CR_TAB
1621 AS1 (jmp,%0)));
1622 default:
1623 if (reverse)
1624 {
1625 switch (len)
1626 {
1627 case 1:
1628 return AS1 (br%k1,%0);
1629 case 2:
1630 return (AS1 (br%j1,.+2) CR_TAB
1631 AS1 (rjmp,%0));
1632 default:
1633 return (AS1 (br%j1,.+4) CR_TAB
1634 AS1 (jmp,%0));
1635 }
1636 }
1637 else
1638 {
1639 switch (len)
1640 {
1641 case 1:
1642 return AS1 (br%j1,%0);
1643 case 2:
1644 return (AS1 (br%k1,.+2) CR_TAB
1645 AS1 (rjmp,%0));
1646 default:
1647 return (AS1 (br%k1,.+4) CR_TAB
1648 AS1 (jmp,%0));
1649 }
1650 }
1651 }
1652 return "";
1653 }
1654
1655 /* Predicate function for immediate operand which fits to byte (8bit) */
1656
1657 int
1658 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1659 {
1660 return (GET_CODE (op) == CONST_INT
1661 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1662 }
1663
1664 /* Output insn cost for next insn. */
1665
1666 void
1667 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1668 int num_operands ATTRIBUTE_UNUSED)
1669 {
1670 if (TARGET_ALL_DEBUG)
1671 {
1672 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1673 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1674 }
1675 }
1676
1677 /* Return 0 if undefined, 1 if always true or always false. */
1678
1679 int
1680 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1681 {
1682 unsigned int max = (mode == QImode ? 0xff :
1683 mode == HImode ? 0xffff :
1684 mode == SImode ? 0xffffffff : 0);
1685 if (max && op && GET_CODE (x) == CONST_INT)
1686 {
1687 if (unsigned_condition (op) != op)
1688 max >>= 1;
1689
1690 if (max != (INTVAL (x) & max)
1691 && INTVAL (x) != 0xff)
1692 return 1;
1693 }
1694 return 0;
1695 }
1696
1697
1698 /* Returns nonzero if REGNO is the number of a hard
1699 register in which function arguments are sometimes passed. */
1700
1701 int
1702 function_arg_regno_p(int r)
1703 {
1704 return (r >= 8 && r <= 25);
1705 }
1706
1707 /* Initializing the variable cum for the state at the beginning
1708 of the argument list. */
1709
1710 void
1711 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1712 tree fndecl ATTRIBUTE_UNUSED)
1713 {
1714 cum->nregs = 18;
1715 cum->regno = FIRST_CUM_REG;
1716 if (!libname && stdarg_p (fntype))
1717 cum->nregs = 0;
1718
1719 /* Assume the calle may be tail called */
1720
1721 cfun->machine->sibcall_fails = 0;
1722 }
1723
1724 /* Returns the number of registers to allocate for a function argument. */
1725
1726 static int
1727 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1728 {
1729 int size;
1730
1731 if (mode == BLKmode)
1732 size = int_size_in_bytes (type);
1733 else
1734 size = GET_MODE_SIZE (mode);
1735
1736 /* Align all function arguments to start in even-numbered registers.
1737 Odd-sized arguments leave holes above them. */
1738
1739 return (size + 1) & ~1;
1740 }
1741
1742 /* Controls whether a function argument is passed
1743 in a register, and which register. */
1744
1745 static rtx
1746 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1747 const_tree type, bool named ATTRIBUTE_UNUSED)
1748 {
1749 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1750 int bytes = avr_num_arg_regs (mode, type);
1751
1752 if (cum->nregs && bytes <= cum->nregs)
1753 return gen_rtx_REG (mode, cum->regno - bytes);
1754
1755 return NULL_RTX;
1756 }
1757
1758 /* Update the summarizer variable CUM to advance past an argument
1759 in the argument list. */
1760
1761 static void
1762 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1763 const_tree type, bool named ATTRIBUTE_UNUSED)
1764 {
1765 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1766 int bytes = avr_num_arg_regs (mode, type);
1767
1768 cum->nregs -= bytes;
1769 cum->regno -= bytes;
1770
1771 /* A parameter is being passed in a call-saved register. As the original
1772 contents of these regs has to be restored before leaving the function,
1773 a function must not pass arguments in call-saved regs in order to get
1774 tail-called. */
1775
1776 if (cum->regno >= 8
1777 && cum->nregs >= 0
1778 && !call_used_regs[cum->regno])
1779 {
1780 /* FIXME: We ship info on failing tail-call in struct machine_function.
1781 This uses internals of calls.c:expand_call() and the way args_so_far
1782 is used. targetm.function_ok_for_sibcall() needs to be extended to
1783 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1784 dependent so that such an extension is not wanted. */
1785
1786 cfun->machine->sibcall_fails = 1;
1787 }
1788
1789 /* Test if all registers needed by the ABI are actually available. If the
1790 user has fixed a GPR needed to pass an argument, an (implicit) function
1791 call would clobber that fixed register. See PR45099 for an example. */
1792
1793 if (cum->regno >= 8
1794 && cum->nregs >= 0)
1795 {
1796 int regno;
1797
1798 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1799 if (fixed_regs[regno])
1800 error ("Register %s is needed to pass a parameter but is fixed",
1801 reg_names[regno]);
1802 }
1803
1804 if (cum->nregs <= 0)
1805 {
1806 cum->nregs = 0;
1807 cum->regno = FIRST_CUM_REG;
1808 }
1809 }
1810
1811 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1812 /* Decide whether we can make a sibling call to a function. DECL is the
1813 declaration of the function being targeted by the call and EXP is the
1814 CALL_EXPR representing the call. */
1815
1816 static bool
1817 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1818 {
1819 tree fntype_callee;
1820
1821 /* Tail-calling must fail if callee-saved regs are used to pass
1822 function args. We must not tail-call when `epilogue_restores'
1823 is used. Unfortunately, we cannot tell at this point if that
1824 actually will happen or not, and we cannot step back from
1825 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1826
1827 if (cfun->machine->sibcall_fails
1828 || TARGET_CALL_PROLOGUES)
1829 {
1830 return false;
1831 }
1832
1833 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1834
1835 if (decl_callee)
1836 {
1837 decl_callee = TREE_TYPE (decl_callee);
1838 }
1839 else
1840 {
1841 decl_callee = fntype_callee;
1842
1843 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1844 && METHOD_TYPE != TREE_CODE (decl_callee))
1845 {
1846 decl_callee = TREE_TYPE (decl_callee);
1847 }
1848 }
1849
1850 /* Ensure that caller and callee have compatible epilogues */
1851
1852 if (interrupt_function_p (current_function_decl)
1853 || signal_function_p (current_function_decl)
1854 || avr_naked_function_p (decl_callee)
1855 || avr_naked_function_p (current_function_decl)
1856 /* FIXME: For OS_task and OS_main, we are over-conservative.
1857 This is due to missing documentation of these attributes
1858 and what they actually should do and should not do. */
1859 || (avr_OS_task_function_p (decl_callee)
1860 != avr_OS_task_function_p (current_function_decl))
1861 || (avr_OS_main_function_p (decl_callee)
1862 != avr_OS_main_function_p (current_function_decl)))
1863 {
1864 return false;
1865 }
1866
1867 return true;
1868 }
1869
1870 /***********************************************************************
1871 Functions for outputting various mov's for a various modes
1872 ************************************************************************/
1873 const char *
1874 output_movqi (rtx insn, rtx operands[], int *l)
1875 {
1876 int dummy;
1877 rtx dest = operands[0];
1878 rtx src = operands[1];
1879 int *real_l = l;
1880
1881 if (!l)
1882 l = &dummy;
1883
1884 *l = 1;
1885
1886 if (register_operand (dest, QImode))
1887 {
1888 if (register_operand (src, QImode)) /* mov r,r */
1889 {
1890 if (test_hard_reg_class (STACK_REG, dest))
1891 return AS2 (out,%0,%1);
1892 else if (test_hard_reg_class (STACK_REG, src))
1893 return AS2 (in,%0,%1);
1894
1895 return AS2 (mov,%0,%1);
1896 }
1897 else if (CONSTANT_P (src))
1898 {
1899 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1900 return AS2 (ldi,%0,lo8(%1));
1901
1902 if (GET_CODE (src) == CONST_INT)
1903 {
1904 if (src == const0_rtx) /* mov r,L */
1905 return AS1 (clr,%0);
1906 else if (src == const1_rtx)
1907 {
1908 *l = 2;
1909 return (AS1 (clr,%0) CR_TAB
1910 AS1 (inc,%0));
1911 }
1912 else if (src == constm1_rtx)
1913 {
1914 /* Immediate constants -1 to any register */
1915 *l = 2;
1916 return (AS1 (clr,%0) CR_TAB
1917 AS1 (dec,%0));
1918 }
1919 else
1920 {
1921 int bit_nr = exact_log2 (INTVAL (src));
1922
1923 if (bit_nr >= 0)
1924 {
1925 *l = 3;
1926 if (!real_l)
1927 output_asm_insn ((AS1 (clr,%0) CR_TAB
1928 "set"), operands);
1929 if (!real_l)
1930 avr_output_bld (operands, bit_nr);
1931
1932 return "";
1933 }
1934 }
1935 }
1936
1937 /* Last resort, larger than loading from memory. */
1938 *l = 4;
1939 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1940 AS2 (ldi,r31,lo8(%1)) CR_TAB
1941 AS2 (mov,%0,r31) CR_TAB
1942 AS2 (mov,r31,__tmp_reg__));
1943 }
1944 else if (GET_CODE (src) == MEM)
1945 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1946 }
1947 else if (GET_CODE (dest) == MEM)
1948 {
1949 const char *templ;
1950
1951 if (src == const0_rtx)
1952 operands[1] = zero_reg_rtx;
1953
1954 templ = out_movqi_mr_r (insn, operands, real_l);
1955
1956 if (!real_l)
1957 output_asm_insn (templ, operands);
1958
1959 operands[1] = src;
1960 }
1961 return "";
1962 }
1963
1964
1965 const char *
1966 output_movhi (rtx insn, rtx operands[], int *l)
1967 {
1968 int dummy;
1969 rtx dest = operands[0];
1970 rtx src = operands[1];
1971 int *real_l = l;
1972
1973 if (!l)
1974 l = &dummy;
1975
1976 if (register_operand (dest, HImode))
1977 {
1978 if (register_operand (src, HImode)) /* mov r,r */
1979 {
1980 if (test_hard_reg_class (STACK_REG, dest))
1981 {
1982 if (AVR_HAVE_8BIT_SP)
1983 return *l = 1, AS2 (out,__SP_L__,%A1);
1984 /* Use simple load of stack pointer if no interrupts are
1985 used. */
1986 else if (TARGET_NO_INTERRUPTS)
1987 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1988 AS2 (out,__SP_L__,%A1));
1989 *l = 5;
1990 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1991 "cli" CR_TAB
1992 AS2 (out,__SP_H__,%B1) CR_TAB
1993 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1994 AS2 (out,__SP_L__,%A1));
1995 }
1996 else if (test_hard_reg_class (STACK_REG, src))
1997 {
1998 *l = 2;
1999 return (AS2 (in,%A0,__SP_L__) CR_TAB
2000 AS2 (in,%B0,__SP_H__));
2001 }
2002
2003 if (AVR_HAVE_MOVW)
2004 {
2005 *l = 1;
2006 return (AS2 (movw,%0,%1));
2007 }
2008 else
2009 {
2010 *l = 2;
2011 return (AS2 (mov,%A0,%A1) CR_TAB
2012 AS2 (mov,%B0,%B1));
2013 }
2014 }
2015 else if (CONSTANT_P (src))
2016 {
2017 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2018 {
2019 *l = 2;
2020 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2021 AS2 (ldi,%B0,hi8(%1)));
2022 }
2023
2024 if (GET_CODE (src) == CONST_INT)
2025 {
2026 if (src == const0_rtx) /* mov r,L */
2027 {
2028 *l = 2;
2029 return (AS1 (clr,%A0) CR_TAB
2030 AS1 (clr,%B0));
2031 }
2032 else if (src == const1_rtx)
2033 {
2034 *l = 3;
2035 return (AS1 (clr,%A0) CR_TAB
2036 AS1 (clr,%B0) CR_TAB
2037 AS1 (inc,%A0));
2038 }
2039 else if (src == constm1_rtx)
2040 {
2041 /* Immediate constants -1 to any register */
2042 *l = 3;
2043 return (AS1 (clr,%0) CR_TAB
2044 AS1 (dec,%A0) CR_TAB
2045 AS2 (mov,%B0,%A0));
2046 }
2047 else
2048 {
2049 int bit_nr = exact_log2 (INTVAL (src));
2050
2051 if (bit_nr >= 0)
2052 {
2053 *l = 4;
2054 if (!real_l)
2055 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2056 AS1 (clr,%B0) CR_TAB
2057 "set"), operands);
2058 if (!real_l)
2059 avr_output_bld (operands, bit_nr);
2060
2061 return "";
2062 }
2063 }
2064
2065 if ((INTVAL (src) & 0xff) == 0)
2066 {
2067 *l = 5;
2068 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2069 AS1 (clr,%A0) CR_TAB
2070 AS2 (ldi,r31,hi8(%1)) CR_TAB
2071 AS2 (mov,%B0,r31) CR_TAB
2072 AS2 (mov,r31,__tmp_reg__));
2073 }
2074 else if ((INTVAL (src) & 0xff00) == 0)
2075 {
2076 *l = 5;
2077 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2078 AS2 (ldi,r31,lo8(%1)) CR_TAB
2079 AS2 (mov,%A0,r31) CR_TAB
2080 AS1 (clr,%B0) CR_TAB
2081 AS2 (mov,r31,__tmp_reg__));
2082 }
2083 }
2084
2085 /* Last resort, equal to loading from memory. */
2086 *l = 6;
2087 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2088 AS2 (ldi,r31,lo8(%1)) CR_TAB
2089 AS2 (mov,%A0,r31) CR_TAB
2090 AS2 (ldi,r31,hi8(%1)) CR_TAB
2091 AS2 (mov,%B0,r31) CR_TAB
2092 AS2 (mov,r31,__tmp_reg__));
2093 }
2094 else if (GET_CODE (src) == MEM)
2095 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2096 }
2097 else if (GET_CODE (dest) == MEM)
2098 {
2099 const char *templ;
2100
2101 if (src == const0_rtx)
2102 operands[1] = zero_reg_rtx;
2103
2104 templ = out_movhi_mr_r (insn, operands, real_l);
2105
2106 if (!real_l)
2107 output_asm_insn (templ, operands);
2108
2109 operands[1] = src;
2110 return "";
2111 }
2112 fatal_insn ("invalid insn:", insn);
2113 return "";
2114 }
2115
2116 const char *
2117 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2118 {
2119 rtx dest = op[0];
2120 rtx src = op[1];
2121 rtx x = XEXP (src, 0);
2122 int dummy;
2123
2124 if (!l)
2125 l = &dummy;
2126
2127 if (CONSTANT_ADDRESS_P (x))
2128 {
2129 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2130 {
2131 *l = 1;
2132 return AS2 (in,%0,__SREG__);
2133 }
2134 if (optimize > 0 && io_address_operand (x, QImode))
2135 {
2136 *l = 1;
2137 return AS2 (in,%0,%m1-0x20);
2138 }
2139 *l = 2;
2140 return AS2 (lds,%0,%m1);
2141 }
2142 /* memory access by reg+disp */
2143 else if (GET_CODE (x) == PLUS
2144 && REG_P (XEXP (x,0))
2145 && GET_CODE (XEXP (x,1)) == CONST_INT)
2146 {
2147 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2148 {
2149 int disp = INTVAL (XEXP (x,1));
2150 if (REGNO (XEXP (x,0)) != REG_Y)
2151 fatal_insn ("incorrect insn:",insn);
2152
2153 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2154 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2155 AS2 (ldd,%0,Y+63) CR_TAB
2156 AS2 (sbiw,r28,%o1-63));
2157
2158 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2159 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2160 AS2 (ld,%0,Y) CR_TAB
2161 AS2 (subi,r28,lo8(%o1)) CR_TAB
2162 AS2 (sbci,r29,hi8(%o1)));
2163 }
2164 else if (REGNO (XEXP (x,0)) == REG_X)
2165 {
2166 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2167 it but I have this situation with extremal optimizing options. */
2168 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2169 || reg_unused_after (insn, XEXP (x,0)))
2170 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2171 AS2 (ld,%0,X));
2172
2173 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2174 AS2 (ld,%0,X) CR_TAB
2175 AS2 (sbiw,r26,%o1));
2176 }
2177 *l = 1;
2178 return AS2 (ldd,%0,%1);
2179 }
2180 *l = 1;
2181 return AS2 (ld,%0,%1);
2182 }
2183
2184 const char *
2185 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2186 {
2187 rtx dest = op[0];
2188 rtx src = op[1];
2189 rtx base = XEXP (src, 0);
2190 int reg_dest = true_regnum (dest);
2191 int reg_base = true_regnum (base);
2192 /* "volatile" forces reading low byte first, even if less efficient,
2193 for correct operation with 16-bit I/O registers. */
2194 int mem_volatile_p = MEM_VOLATILE_P (src);
2195 int tmp;
2196
2197 if (!l)
2198 l = &tmp;
2199
2200 if (reg_base > 0)
2201 {
2202 if (reg_dest == reg_base) /* R = (R) */
2203 {
2204 *l = 3;
2205 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2206 AS2 (ld,%B0,%1) CR_TAB
2207 AS2 (mov,%A0,__tmp_reg__));
2208 }
2209 else if (reg_base == REG_X) /* (R26) */
2210 {
2211 if (reg_unused_after (insn, base))
2212 {
2213 *l = 2;
2214 return (AS2 (ld,%A0,X+) CR_TAB
2215 AS2 (ld,%B0,X));
2216 }
2217 *l = 3;
2218 return (AS2 (ld,%A0,X+) CR_TAB
2219 AS2 (ld,%B0,X) CR_TAB
2220 AS2 (sbiw,r26,1));
2221 }
2222 else /* (R) */
2223 {
2224 *l = 2;
2225 return (AS2 (ld,%A0,%1) CR_TAB
2226 AS2 (ldd,%B0,%1+1));
2227 }
2228 }
2229 else if (GET_CODE (base) == PLUS) /* (R + i) */
2230 {
2231 int disp = INTVAL (XEXP (base, 1));
2232 int reg_base = true_regnum (XEXP (base, 0));
2233
2234 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2235 {
2236 if (REGNO (XEXP (base, 0)) != REG_Y)
2237 fatal_insn ("incorrect insn:",insn);
2238
2239 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2240 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2241 AS2 (ldd,%A0,Y+62) CR_TAB
2242 AS2 (ldd,%B0,Y+63) CR_TAB
2243 AS2 (sbiw,r28,%o1-62));
2244
2245 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2246 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2247 AS2 (ld,%A0,Y) CR_TAB
2248 AS2 (ldd,%B0,Y+1) CR_TAB
2249 AS2 (subi,r28,lo8(%o1)) CR_TAB
2250 AS2 (sbci,r29,hi8(%o1)));
2251 }
2252 if (reg_base == REG_X)
2253 {
2254 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2255 it but I have this situation with extremal
2256 optimization options. */
2257
2258 *l = 4;
2259 if (reg_base == reg_dest)
2260 return (AS2 (adiw,r26,%o1) CR_TAB
2261 AS2 (ld,__tmp_reg__,X+) CR_TAB
2262 AS2 (ld,%B0,X) CR_TAB
2263 AS2 (mov,%A0,__tmp_reg__));
2264
2265 return (AS2 (adiw,r26,%o1) CR_TAB
2266 AS2 (ld,%A0,X+) CR_TAB
2267 AS2 (ld,%B0,X) CR_TAB
2268 AS2 (sbiw,r26,%o1+1));
2269 }
2270
2271 if (reg_base == reg_dest)
2272 {
2273 *l = 3;
2274 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2275 AS2 (ldd,%B0,%B1) CR_TAB
2276 AS2 (mov,%A0,__tmp_reg__));
2277 }
2278
2279 *l = 2;
2280 return (AS2 (ldd,%A0,%A1) CR_TAB
2281 AS2 (ldd,%B0,%B1));
2282 }
2283 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2284 {
2285 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2286 fatal_insn ("incorrect insn:", insn);
2287
2288 if (mem_volatile_p)
2289 {
2290 if (REGNO (XEXP (base, 0)) == REG_X)
2291 {
2292 *l = 4;
2293 return (AS2 (sbiw,r26,2) CR_TAB
2294 AS2 (ld,%A0,X+) CR_TAB
2295 AS2 (ld,%B0,X) CR_TAB
2296 AS2 (sbiw,r26,1));
2297 }
2298 else
2299 {
2300 *l = 3;
2301 return (AS2 (sbiw,%r1,2) CR_TAB
2302 AS2 (ld,%A0,%p1) CR_TAB
2303 AS2 (ldd,%B0,%p1+1));
2304 }
2305 }
2306
2307 *l = 2;
2308 return (AS2 (ld,%B0,%1) CR_TAB
2309 AS2 (ld,%A0,%1));
2310 }
2311 else if (GET_CODE (base) == POST_INC) /* (R++) */
2312 {
2313 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2314 fatal_insn ("incorrect insn:", insn);
2315
2316 *l = 2;
2317 return (AS2 (ld,%A0,%1) CR_TAB
2318 AS2 (ld,%B0,%1));
2319 }
2320 else if (CONSTANT_ADDRESS_P (base))
2321 {
2322 if (optimize > 0 && io_address_operand (base, HImode))
2323 {
2324 *l = 2;
2325 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2326 AS2 (in,%B0,%m1+1-0x20));
2327 }
2328 *l = 4;
2329 return (AS2 (lds,%A0,%m1) CR_TAB
2330 AS2 (lds,%B0,%m1+1));
2331 }
2332
2333 fatal_insn ("unknown move insn:",insn);
2334 return "";
2335 }
2336
2337 const char *
2338 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2339 {
2340 rtx dest = op[0];
2341 rtx src = op[1];
2342 rtx base = XEXP (src, 0);
2343 int reg_dest = true_regnum (dest);
2344 int reg_base = true_regnum (base);
2345 int tmp;
2346
2347 if (!l)
2348 l = &tmp;
2349
2350 if (reg_base > 0)
2351 {
2352 if (reg_base == REG_X) /* (R26) */
2353 {
2354 if (reg_dest == REG_X)
2355 /* "ld r26,-X" is undefined */
2356 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2357 AS2 (ld,r29,X) CR_TAB
2358 AS2 (ld,r28,-X) CR_TAB
2359 AS2 (ld,__tmp_reg__,-X) CR_TAB
2360 AS2 (sbiw,r26,1) CR_TAB
2361 AS2 (ld,r26,X) CR_TAB
2362 AS2 (mov,r27,__tmp_reg__));
2363 else if (reg_dest == REG_X - 2)
2364 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2365 AS2 (ld,%B0,X+) CR_TAB
2366 AS2 (ld,__tmp_reg__,X+) CR_TAB
2367 AS2 (ld,%D0,X) CR_TAB
2368 AS2 (mov,%C0,__tmp_reg__));
2369 else if (reg_unused_after (insn, base))
2370 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2371 AS2 (ld,%B0,X+) CR_TAB
2372 AS2 (ld,%C0,X+) CR_TAB
2373 AS2 (ld,%D0,X));
2374 else
2375 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2376 AS2 (ld,%B0,X+) CR_TAB
2377 AS2 (ld,%C0,X+) CR_TAB
2378 AS2 (ld,%D0,X) CR_TAB
2379 AS2 (sbiw,r26,3));
2380 }
2381 else
2382 {
2383 if (reg_dest == reg_base)
2384 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2385 AS2 (ldd,%C0,%1+2) CR_TAB
2386 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2387 AS2 (ld,%A0,%1) CR_TAB
2388 AS2 (mov,%B0,__tmp_reg__));
2389 else if (reg_base == reg_dest + 2)
2390 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2391 AS2 (ldd,%B0,%1+1) CR_TAB
2392 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2393 AS2 (ldd,%D0,%1+3) CR_TAB
2394 AS2 (mov,%C0,__tmp_reg__));
2395 else
2396 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2397 AS2 (ldd,%B0,%1+1) CR_TAB
2398 AS2 (ldd,%C0,%1+2) CR_TAB
2399 AS2 (ldd,%D0,%1+3));
2400 }
2401 }
2402 else if (GET_CODE (base) == PLUS) /* (R + i) */
2403 {
2404 int disp = INTVAL (XEXP (base, 1));
2405
2406 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2407 {
2408 if (REGNO (XEXP (base, 0)) != REG_Y)
2409 fatal_insn ("incorrect insn:",insn);
2410
2411 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2412 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2413 AS2 (ldd,%A0,Y+60) CR_TAB
2414 AS2 (ldd,%B0,Y+61) CR_TAB
2415 AS2 (ldd,%C0,Y+62) CR_TAB
2416 AS2 (ldd,%D0,Y+63) CR_TAB
2417 AS2 (sbiw,r28,%o1-60));
2418
2419 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2420 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2421 AS2 (ld,%A0,Y) CR_TAB
2422 AS2 (ldd,%B0,Y+1) CR_TAB
2423 AS2 (ldd,%C0,Y+2) CR_TAB
2424 AS2 (ldd,%D0,Y+3) CR_TAB
2425 AS2 (subi,r28,lo8(%o1)) CR_TAB
2426 AS2 (sbci,r29,hi8(%o1)));
2427 }
2428
2429 reg_base = true_regnum (XEXP (base, 0));
2430 if (reg_base == REG_X)
2431 {
2432 /* R = (X + d) */
2433 if (reg_dest == REG_X)
2434 {
2435 *l = 7;
2436 /* "ld r26,-X" is undefined */
2437 return (AS2 (adiw,r26,%o1+3) CR_TAB
2438 AS2 (ld,r29,X) CR_TAB
2439 AS2 (ld,r28,-X) CR_TAB
2440 AS2 (ld,__tmp_reg__,-X) CR_TAB
2441 AS2 (sbiw,r26,1) CR_TAB
2442 AS2 (ld,r26,X) CR_TAB
2443 AS2 (mov,r27,__tmp_reg__));
2444 }
2445 *l = 6;
2446 if (reg_dest == REG_X - 2)
2447 return (AS2 (adiw,r26,%o1) CR_TAB
2448 AS2 (ld,r24,X+) CR_TAB
2449 AS2 (ld,r25,X+) CR_TAB
2450 AS2 (ld,__tmp_reg__,X+) CR_TAB
2451 AS2 (ld,r27,X) CR_TAB
2452 AS2 (mov,r26,__tmp_reg__));
2453
2454 return (AS2 (adiw,r26,%o1) CR_TAB
2455 AS2 (ld,%A0,X+) CR_TAB
2456 AS2 (ld,%B0,X+) CR_TAB
2457 AS2 (ld,%C0,X+) CR_TAB
2458 AS2 (ld,%D0,X) CR_TAB
2459 AS2 (sbiw,r26,%o1+3));
2460 }
2461 if (reg_dest == reg_base)
2462 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2463 AS2 (ldd,%C0,%C1) CR_TAB
2464 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2465 AS2 (ldd,%A0,%A1) CR_TAB
2466 AS2 (mov,%B0,__tmp_reg__));
2467 else if (reg_dest == reg_base - 2)
2468 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2469 AS2 (ldd,%B0,%B1) CR_TAB
2470 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2471 AS2 (ldd,%D0,%D1) CR_TAB
2472 AS2 (mov,%C0,__tmp_reg__));
2473 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2474 AS2 (ldd,%B0,%B1) CR_TAB
2475 AS2 (ldd,%C0,%C1) CR_TAB
2476 AS2 (ldd,%D0,%D1));
2477 }
2478 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2479 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2480 AS2 (ld,%C0,%1) CR_TAB
2481 AS2 (ld,%B0,%1) CR_TAB
2482 AS2 (ld,%A0,%1));
2483 else if (GET_CODE (base) == POST_INC) /* (R++) */
2484 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2485 AS2 (ld,%B0,%1) CR_TAB
2486 AS2 (ld,%C0,%1) CR_TAB
2487 AS2 (ld,%D0,%1));
2488 else if (CONSTANT_ADDRESS_P (base))
2489 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2490 AS2 (lds,%B0,%m1+1) CR_TAB
2491 AS2 (lds,%C0,%m1+2) CR_TAB
2492 AS2 (lds,%D0,%m1+3));
2493
2494 fatal_insn ("unknown move insn:",insn);
2495 return "";
2496 }
2497
2498 const char *
2499 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2500 {
2501 rtx dest = op[0];
2502 rtx src = op[1];
2503 rtx base = XEXP (dest, 0);
2504 int reg_base = true_regnum (base);
2505 int reg_src = true_regnum (src);
2506 int tmp;
2507
2508 if (!l)
2509 l = &tmp;
2510
2511 if (CONSTANT_ADDRESS_P (base))
2512 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2513 AS2 (sts,%m0+1,%B1) CR_TAB
2514 AS2 (sts,%m0+2,%C1) CR_TAB
2515 AS2 (sts,%m0+3,%D1));
2516 if (reg_base > 0) /* (r) */
2517 {
2518 if (reg_base == REG_X) /* (R26) */
2519 {
2520 if (reg_src == REG_X)
2521 {
2522 /* "st X+,r26" is undefined */
2523 if (reg_unused_after (insn, base))
2524 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2525 AS2 (st,X,r26) CR_TAB
2526 AS2 (adiw,r26,1) CR_TAB
2527 AS2 (st,X+,__tmp_reg__) CR_TAB
2528 AS2 (st,X+,r28) CR_TAB
2529 AS2 (st,X,r29));
2530 else
2531 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2532 AS2 (st,X,r26) CR_TAB
2533 AS2 (adiw,r26,1) CR_TAB
2534 AS2 (st,X+,__tmp_reg__) CR_TAB
2535 AS2 (st,X+,r28) CR_TAB
2536 AS2 (st,X,r29) CR_TAB
2537 AS2 (sbiw,r26,3));
2538 }
2539 else if (reg_base == reg_src + 2)
2540 {
2541 if (reg_unused_after (insn, base))
2542 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2543 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2544 AS2 (st,%0+,%A1) CR_TAB
2545 AS2 (st,%0+,%B1) CR_TAB
2546 AS2 (st,%0+,__zero_reg__) CR_TAB
2547 AS2 (st,%0,__tmp_reg__) CR_TAB
2548 AS1 (clr,__zero_reg__));
2549 else
2550 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2551 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2552 AS2 (st,%0+,%A1) CR_TAB
2553 AS2 (st,%0+,%B1) CR_TAB
2554 AS2 (st,%0+,__zero_reg__) CR_TAB
2555 AS2 (st,%0,__tmp_reg__) CR_TAB
2556 AS1 (clr,__zero_reg__) CR_TAB
2557 AS2 (sbiw,r26,3));
2558 }
2559 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2560 AS2 (st,%0+,%B1) CR_TAB
2561 AS2 (st,%0+,%C1) CR_TAB
2562 AS2 (st,%0,%D1) CR_TAB
2563 AS2 (sbiw,r26,3));
2564 }
2565 else
2566 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2567 AS2 (std,%0+1,%B1) CR_TAB
2568 AS2 (std,%0+2,%C1) CR_TAB
2569 AS2 (std,%0+3,%D1));
2570 }
2571 else if (GET_CODE (base) == PLUS) /* (R + i) */
2572 {
2573 int disp = INTVAL (XEXP (base, 1));
2574 reg_base = REGNO (XEXP (base, 0));
2575 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2576 {
2577 if (reg_base != REG_Y)
2578 fatal_insn ("incorrect insn:",insn);
2579
2580 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2581 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2582 AS2 (std,Y+60,%A1) CR_TAB
2583 AS2 (std,Y+61,%B1) CR_TAB
2584 AS2 (std,Y+62,%C1) CR_TAB
2585 AS2 (std,Y+63,%D1) CR_TAB
2586 AS2 (sbiw,r28,%o0-60));
2587
2588 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2589 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2590 AS2 (st,Y,%A1) CR_TAB
2591 AS2 (std,Y+1,%B1) CR_TAB
2592 AS2 (std,Y+2,%C1) CR_TAB
2593 AS2 (std,Y+3,%D1) CR_TAB
2594 AS2 (subi,r28,lo8(%o0)) CR_TAB
2595 AS2 (sbci,r29,hi8(%o0)));
2596 }
2597 if (reg_base == REG_X)
2598 {
2599 /* (X + d) = R */
2600 if (reg_src == REG_X)
2601 {
2602 *l = 9;
2603 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2604 AS2 (mov,__zero_reg__,r27) CR_TAB
2605 AS2 (adiw,r26,%o0) CR_TAB
2606 AS2 (st,X+,__tmp_reg__) CR_TAB
2607 AS2 (st,X+,__zero_reg__) CR_TAB
2608 AS2 (st,X+,r28) CR_TAB
2609 AS2 (st,X,r29) CR_TAB
2610 AS1 (clr,__zero_reg__) CR_TAB
2611 AS2 (sbiw,r26,%o0+3));
2612 }
2613 else if (reg_src == REG_X - 2)
2614 {
2615 *l = 9;
2616 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2617 AS2 (mov,__zero_reg__,r27) CR_TAB
2618 AS2 (adiw,r26,%o0) CR_TAB
2619 AS2 (st,X+,r24) CR_TAB
2620 AS2 (st,X+,r25) CR_TAB
2621 AS2 (st,X+,__tmp_reg__) CR_TAB
2622 AS2 (st,X,__zero_reg__) CR_TAB
2623 AS1 (clr,__zero_reg__) CR_TAB
2624 AS2 (sbiw,r26,%o0+3));
2625 }
2626 *l = 6;
2627 return (AS2 (adiw,r26,%o0) CR_TAB
2628 AS2 (st,X+,%A1) CR_TAB
2629 AS2 (st,X+,%B1) CR_TAB
2630 AS2 (st,X+,%C1) CR_TAB
2631 AS2 (st,X,%D1) CR_TAB
2632 AS2 (sbiw,r26,%o0+3));
2633 }
2634 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2635 AS2 (std,%B0,%B1) CR_TAB
2636 AS2 (std,%C0,%C1) CR_TAB
2637 AS2 (std,%D0,%D1));
2638 }
2639 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2640 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2641 AS2 (st,%0,%C1) CR_TAB
2642 AS2 (st,%0,%B1) CR_TAB
2643 AS2 (st,%0,%A1));
2644 else if (GET_CODE (base) == POST_INC) /* (R++) */
2645 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2646 AS2 (st,%0,%B1) CR_TAB
2647 AS2 (st,%0,%C1) CR_TAB
2648 AS2 (st,%0,%D1));
2649 fatal_insn ("unknown move insn:",insn);
2650 return "";
2651 }
2652
2653 const char *
2654 output_movsisf(rtx insn, rtx operands[], int *l)
2655 {
2656 int dummy;
2657 rtx dest = operands[0];
2658 rtx src = operands[1];
2659 int *real_l = l;
2660
2661 if (!l)
2662 l = &dummy;
2663
2664 if (register_operand (dest, VOIDmode))
2665 {
2666 if (register_operand (src, VOIDmode)) /* mov r,r */
2667 {
2668 if (true_regnum (dest) > true_regnum (src))
2669 {
2670 if (AVR_HAVE_MOVW)
2671 {
2672 *l = 2;
2673 return (AS2 (movw,%C0,%C1) CR_TAB
2674 AS2 (movw,%A0,%A1));
2675 }
2676 *l = 4;
2677 return (AS2 (mov,%D0,%D1) CR_TAB
2678 AS2 (mov,%C0,%C1) CR_TAB
2679 AS2 (mov,%B0,%B1) CR_TAB
2680 AS2 (mov,%A0,%A1));
2681 }
2682 else
2683 {
2684 if (AVR_HAVE_MOVW)
2685 {
2686 *l = 2;
2687 return (AS2 (movw,%A0,%A1) CR_TAB
2688 AS2 (movw,%C0,%C1));
2689 }
2690 *l = 4;
2691 return (AS2 (mov,%A0,%A1) CR_TAB
2692 AS2 (mov,%B0,%B1) CR_TAB
2693 AS2 (mov,%C0,%C1) CR_TAB
2694 AS2 (mov,%D0,%D1));
2695 }
2696 }
2697 else if (CONSTANT_P (src))
2698 {
2699 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2700 {
2701 *l = 4;
2702 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2703 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2704 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2705 AS2 (ldi,%D0,hhi8(%1)));
2706 }
2707
2708 if (GET_CODE (src) == CONST_INT)
2709 {
2710 const char *const clr_op0 =
2711 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2712 AS1 (clr,%B0) CR_TAB
2713 AS2 (movw,%C0,%A0))
2714 : (AS1 (clr,%A0) CR_TAB
2715 AS1 (clr,%B0) CR_TAB
2716 AS1 (clr,%C0) CR_TAB
2717 AS1 (clr,%D0));
2718
2719 if (src == const0_rtx) /* mov r,L */
2720 {
2721 *l = AVR_HAVE_MOVW ? 3 : 4;
2722 return clr_op0;
2723 }
2724 else if (src == const1_rtx)
2725 {
2726 if (!real_l)
2727 output_asm_insn (clr_op0, operands);
2728 *l = AVR_HAVE_MOVW ? 4 : 5;
2729 return AS1 (inc,%A0);
2730 }
2731 else if (src == constm1_rtx)
2732 {
2733 /* Immediate constants -1 to any register */
2734 if (AVR_HAVE_MOVW)
2735 {
2736 *l = 4;
2737 return (AS1 (clr,%A0) CR_TAB
2738 AS1 (dec,%A0) CR_TAB
2739 AS2 (mov,%B0,%A0) CR_TAB
2740 AS2 (movw,%C0,%A0));
2741 }
2742 *l = 5;
2743 return (AS1 (clr,%A0) CR_TAB
2744 AS1 (dec,%A0) CR_TAB
2745 AS2 (mov,%B0,%A0) CR_TAB
2746 AS2 (mov,%C0,%A0) CR_TAB
2747 AS2 (mov,%D0,%A0));
2748 }
2749 else
2750 {
2751 int bit_nr = exact_log2 (INTVAL (src));
2752
2753 if (bit_nr >= 0)
2754 {
2755 *l = AVR_HAVE_MOVW ? 5 : 6;
2756 if (!real_l)
2757 {
2758 output_asm_insn (clr_op0, operands);
2759 output_asm_insn ("set", operands);
2760 }
2761 if (!real_l)
2762 avr_output_bld (operands, bit_nr);
2763
2764 return "";
2765 }
2766 }
2767 }
2768
2769 /* Last resort, better than loading from memory. */
2770 *l = 10;
2771 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2772 AS2 (ldi,r31,lo8(%1)) CR_TAB
2773 AS2 (mov,%A0,r31) CR_TAB
2774 AS2 (ldi,r31,hi8(%1)) CR_TAB
2775 AS2 (mov,%B0,r31) CR_TAB
2776 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2777 AS2 (mov,%C0,r31) CR_TAB
2778 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2779 AS2 (mov,%D0,r31) CR_TAB
2780 AS2 (mov,r31,__tmp_reg__));
2781 }
2782 else if (GET_CODE (src) == MEM)
2783 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2784 }
2785 else if (GET_CODE (dest) == MEM)
2786 {
2787 const char *templ;
2788
2789 if (src == const0_rtx)
2790 operands[1] = zero_reg_rtx;
2791
2792 templ = out_movsi_mr_r (insn, operands, real_l);
2793
2794 if (!real_l)
2795 output_asm_insn (templ, operands);
2796
2797 operands[1] = src;
2798 return "";
2799 }
2800 fatal_insn ("invalid insn:", insn);
2801 return "";
2802 }
2803
2804 const char *
2805 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2806 {
2807 rtx dest = op[0];
2808 rtx src = op[1];
2809 rtx x = XEXP (dest, 0);
2810 int dummy;
2811
2812 if (!l)
2813 l = &dummy;
2814
2815 if (CONSTANT_ADDRESS_P (x))
2816 {
2817 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2818 {
2819 *l = 1;
2820 return AS2 (out,__SREG__,%1);
2821 }
2822 if (optimize > 0 && io_address_operand (x, QImode))
2823 {
2824 *l = 1;
2825 return AS2 (out,%m0-0x20,%1);
2826 }
2827 *l = 2;
2828 return AS2 (sts,%m0,%1);
2829 }
2830 /* memory access by reg+disp */
2831 else if (GET_CODE (x) == PLUS
2832 && REG_P (XEXP (x,0))
2833 && GET_CODE (XEXP (x,1)) == CONST_INT)
2834 {
2835 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2836 {
2837 int disp = INTVAL (XEXP (x,1));
2838 if (REGNO (XEXP (x,0)) != REG_Y)
2839 fatal_insn ("incorrect insn:",insn);
2840
2841 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2842 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2843 AS2 (std,Y+63,%1) CR_TAB
2844 AS2 (sbiw,r28,%o0-63));
2845
2846 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2847 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2848 AS2 (st,Y,%1) CR_TAB
2849 AS2 (subi,r28,lo8(%o0)) CR_TAB
2850 AS2 (sbci,r29,hi8(%o0)));
2851 }
2852 else if (REGNO (XEXP (x,0)) == REG_X)
2853 {
2854 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2855 {
2856 if (reg_unused_after (insn, XEXP (x,0)))
2857 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2858 AS2 (adiw,r26,%o0) CR_TAB
2859 AS2 (st,X,__tmp_reg__));
2860
2861 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2862 AS2 (adiw,r26,%o0) CR_TAB
2863 AS2 (st,X,__tmp_reg__) CR_TAB
2864 AS2 (sbiw,r26,%o0));
2865 }
2866 else
2867 {
2868 if (reg_unused_after (insn, XEXP (x,0)))
2869 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2870 AS2 (st,X,%1));
2871
2872 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2873 AS2 (st,X,%1) CR_TAB
2874 AS2 (sbiw,r26,%o0));
2875 }
2876 }
2877 *l = 1;
2878 return AS2 (std,%0,%1);
2879 }
2880 *l = 1;
2881 return AS2 (st,%0,%1);
2882 }
2883
2884 const char *
2885 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2886 {
2887 rtx dest = op[0];
2888 rtx src = op[1];
2889 rtx base = XEXP (dest, 0);
2890 int reg_base = true_regnum (base);
2891 int reg_src = true_regnum (src);
2892 /* "volatile" forces writing high byte first, even if less efficient,
2893 for correct operation with 16-bit I/O registers. */
2894 int mem_volatile_p = MEM_VOLATILE_P (dest);
2895 int tmp;
2896
2897 if (!l)
2898 l = &tmp;
2899 if (CONSTANT_ADDRESS_P (base))
2900 {
2901 if (optimize > 0 && io_address_operand (base, HImode))
2902 {
2903 *l = 2;
2904 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2905 AS2 (out,%m0-0x20,%A1));
2906 }
2907 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2908 AS2 (sts,%m0,%A1));
2909 }
2910 if (reg_base > 0)
2911 {
2912 if (reg_base == REG_X)
2913 {
2914 if (reg_src == REG_X)
2915 {
2916 /* "st X+,r26" and "st -X,r26" are undefined. */
2917 if (!mem_volatile_p && reg_unused_after (insn, src))
2918 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2919 AS2 (st,X,r26) CR_TAB
2920 AS2 (adiw,r26,1) CR_TAB
2921 AS2 (st,X,__tmp_reg__));
2922 else
2923 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2924 AS2 (adiw,r26,1) CR_TAB
2925 AS2 (st,X,__tmp_reg__) CR_TAB
2926 AS2 (sbiw,r26,1) CR_TAB
2927 AS2 (st,X,r26));
2928 }
2929 else
2930 {
2931 if (!mem_volatile_p && reg_unused_after (insn, base))
2932 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2933 AS2 (st,X,%B1));
2934 else
2935 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2936 AS2 (st,X,%B1) CR_TAB
2937 AS2 (st,-X,%A1));
2938 }
2939 }
2940 else
2941 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2942 AS2 (st,%0,%A1));
2943 }
2944 else if (GET_CODE (base) == PLUS)
2945 {
2946 int disp = INTVAL (XEXP (base, 1));
2947 reg_base = REGNO (XEXP (base, 0));
2948 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2949 {
2950 if (reg_base != REG_Y)
2951 fatal_insn ("incorrect insn:",insn);
2952
2953 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2954 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2955 AS2 (std,Y+63,%B1) CR_TAB
2956 AS2 (std,Y+62,%A1) CR_TAB
2957 AS2 (sbiw,r28,%o0-62));
2958
2959 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2960 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2961 AS2 (std,Y+1,%B1) CR_TAB
2962 AS2 (st,Y,%A1) CR_TAB
2963 AS2 (subi,r28,lo8(%o0)) CR_TAB
2964 AS2 (sbci,r29,hi8(%o0)));
2965 }
2966 if (reg_base == REG_X)
2967 {
2968 /* (X + d) = R */
2969 if (reg_src == REG_X)
2970 {
2971 *l = 7;
2972 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2973 AS2 (mov,__zero_reg__,r27) CR_TAB
2974 AS2 (adiw,r26,%o0+1) CR_TAB
2975 AS2 (st,X,__zero_reg__) CR_TAB
2976 AS2 (st,-X,__tmp_reg__) CR_TAB
2977 AS1 (clr,__zero_reg__) CR_TAB
2978 AS2 (sbiw,r26,%o0));
2979 }
2980 *l = 4;
2981 return (AS2 (adiw,r26,%o0+1) CR_TAB
2982 AS2 (st,X,%B1) CR_TAB
2983 AS2 (st,-X,%A1) CR_TAB
2984 AS2 (sbiw,r26,%o0));
2985 }
2986 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2987 AS2 (std,%A0,%A1));
2988 }
2989 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2990 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2991 AS2 (st,%0,%A1));
2992 else if (GET_CODE (base) == POST_INC) /* (R++) */
2993 {
2994 if (mem_volatile_p)
2995 {
2996 if (REGNO (XEXP (base, 0)) == REG_X)
2997 {
2998 *l = 4;
2999 return (AS2 (adiw,r26,1) CR_TAB
3000 AS2 (st,X,%B1) CR_TAB
3001 AS2 (st,-X,%A1) CR_TAB
3002 AS2 (adiw,r26,2));
3003 }
3004 else
3005 {
3006 *l = 3;
3007 return (AS2 (std,%p0+1,%B1) CR_TAB
3008 AS2 (st,%p0,%A1) CR_TAB
3009 AS2 (adiw,%r0,2));
3010 }
3011 }
3012
3013 *l = 2;
3014 return (AS2 (st,%0,%A1) CR_TAB
3015 AS2 (st,%0,%B1));
3016 }
3017 fatal_insn ("unknown move insn:",insn);
3018 return "";
3019 }
3020
3021 /* Return 1 if frame pointer for current function required. */
3022
3023 bool
3024 avr_frame_pointer_required_p (void)
3025 {
3026 return (cfun->calls_alloca
3027 || crtl->args.info.nregs == 0
3028 || get_frame_size () > 0);
3029 }
3030
3031 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3032
3033 static RTX_CODE
3034 compare_condition (rtx insn)
3035 {
3036 rtx next = next_real_insn (insn);
3037 RTX_CODE cond = UNKNOWN;
3038 if (next && GET_CODE (next) == JUMP_INSN)
3039 {
3040 rtx pat = PATTERN (next);
3041 rtx src = SET_SRC (pat);
3042 rtx t = XEXP (src, 0);
3043 cond = GET_CODE (t);
3044 }
3045 return cond;
3046 }
3047
3048 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3049
3050 static int
3051 compare_sign_p (rtx insn)
3052 {
3053 RTX_CODE cond = compare_condition (insn);
3054 return (cond == GE || cond == LT);
3055 }
3056
3057 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3058 that needs to be swapped (GT, GTU, LE, LEU). */
3059
3060 int
3061 compare_diff_p (rtx insn)
3062 {
3063 RTX_CODE cond = compare_condition (insn);
3064 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3065 }
3066
3067 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3068
3069 int
3070 compare_eq_p (rtx insn)
3071 {
3072 RTX_CODE cond = compare_condition (insn);
3073 return (cond == EQ || cond == NE);
3074 }
3075
3076
3077 /* Output test instruction for HImode. */
3078
3079 const char *
3080 out_tsthi (rtx insn, rtx op, int *l)
3081 {
3082 if (compare_sign_p (insn))
3083 {
3084 if (l) *l = 1;
3085 return AS1 (tst,%B0);
3086 }
3087 if (reg_unused_after (insn, op)
3088 && compare_eq_p (insn))
3089 {
3090 /* Faster than sbiw if we can clobber the operand. */
3091 if (l) *l = 1;
3092 return "or %A0,%B0";
3093 }
3094 if (test_hard_reg_class (ADDW_REGS, op))
3095 {
3096 if (l) *l = 1;
3097 return AS2 (sbiw,%0,0);
3098 }
3099 if (l) *l = 2;
3100 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3101 AS2 (cpc,%B0,__zero_reg__));
3102 }
3103
3104
3105 /* Output test instruction for SImode. */
3106
3107 const char *
3108 out_tstsi (rtx insn, rtx op, int *l)
3109 {
3110 if (compare_sign_p (insn))
3111 {
3112 if (l) *l = 1;
3113 return AS1 (tst,%D0);
3114 }
3115 if (test_hard_reg_class (ADDW_REGS, op))
3116 {
3117 if (l) *l = 3;
3118 return (AS2 (sbiw,%A0,0) CR_TAB
3119 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3120 AS2 (cpc,%D0,__zero_reg__));
3121 }
3122 if (l) *l = 4;
3123 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3124 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3125 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3126 AS2 (cpc,%D0,__zero_reg__));
3127 }
3128
3129
3130 /* Generate asm equivalent for various shifts.
3131 Shift count is a CONST_INT, MEM or REG.
3132 This only handles cases that are not already
3133 carefully hand-optimized in ?sh??i3_out. */
3134
3135 void
3136 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3137 int *len, int t_len)
3138 {
3139 rtx op[10];
3140 char str[500];
3141 int second_label = 1;
3142 int saved_in_tmp = 0;
3143 int use_zero_reg = 0;
3144
3145 op[0] = operands[0];
3146 op[1] = operands[1];
3147 op[2] = operands[2];
3148 op[3] = operands[3];
3149 str[0] = 0;
3150
3151 if (len)
3152 *len = 1;
3153
3154 if (GET_CODE (operands[2]) == CONST_INT)
3155 {
3156 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3157 int count = INTVAL (operands[2]);
3158 int max_len = 10; /* If larger than this, always use a loop. */
3159
3160 if (count <= 0)
3161 {
3162 if (len)
3163 *len = 0;
3164 return;
3165 }
3166
3167 if (count < 8 && !scratch)
3168 use_zero_reg = 1;
3169
3170 if (optimize_size)
3171 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3172
3173 if (t_len * count <= max_len)
3174 {
3175 /* Output shifts inline with no loop - faster. */
3176 if (len)
3177 *len = t_len * count;
3178 else
3179 {
3180 while (count-- > 0)
3181 output_asm_insn (templ, op);
3182 }
3183
3184 return;
3185 }
3186
3187 if (scratch)
3188 {
3189 if (!len)
3190 strcat (str, AS2 (ldi,%3,%2));
3191 }
3192 else if (use_zero_reg)
3193 {
3194 /* Hack to save one word: use __zero_reg__ as loop counter.
3195 Set one bit, then shift in a loop until it is 0 again. */
3196
3197 op[3] = zero_reg_rtx;
3198 if (len)
3199 *len = 2;
3200 else
3201 strcat (str, ("set" CR_TAB
3202 AS2 (bld,%3,%2-1)));
3203 }
3204 else
3205 {
3206 /* No scratch register available, use one from LD_REGS (saved in
3207 __tmp_reg__) that doesn't overlap with registers to shift. */
3208
3209 op[3] = gen_rtx_REG (QImode,
3210 ((true_regnum (operands[0]) - 1) & 15) + 16);
3211 op[4] = tmp_reg_rtx;
3212 saved_in_tmp = 1;
3213
3214 if (len)
3215 *len = 3; /* Includes "mov %3,%4" after the loop. */
3216 else
3217 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3218 AS2 (ldi,%3,%2)));
3219 }
3220
3221 second_label = 0;
3222 }
3223 else if (GET_CODE (operands[2]) == MEM)
3224 {
3225 rtx op_mov[10];
3226
3227 op[3] = op_mov[0] = tmp_reg_rtx;
3228 op_mov[1] = op[2];
3229
3230 if (len)
3231 out_movqi_r_mr (insn, op_mov, len);
3232 else
3233 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3234 }
3235 else if (register_operand (operands[2], QImode))
3236 {
3237 if (reg_unused_after (insn, operands[2]))
3238 op[3] = op[2];
3239 else
3240 {
3241 op[3] = tmp_reg_rtx;
3242 if (!len)
3243 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3244 }
3245 }
3246 else
3247 fatal_insn ("bad shift insn:", insn);
3248
3249 if (second_label)
3250 {
3251 if (len)
3252 ++*len;
3253 else
3254 strcat (str, AS1 (rjmp,2f));
3255 }
3256
3257 if (len)
3258 *len += t_len + 2; /* template + dec + brXX */
3259 else
3260 {
3261 strcat (str, "\n1:\t");
3262 strcat (str, templ);
3263 strcat (str, second_label ? "\n2:\t" : "\n\t");
3264 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3265 strcat (str, CR_TAB);
3266 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3267 if (saved_in_tmp)
3268 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3269 output_asm_insn (str, op);
3270 }
3271 }
3272
3273
3274 /* 8bit shift left ((char)x << i) */
3275
3276 const char *
3277 ashlqi3_out (rtx insn, rtx operands[], int *len)
3278 {
3279 if (GET_CODE (operands[2]) == CONST_INT)
3280 {
3281 int k;
3282
3283 if (!len)
3284 len = &k;
3285
3286 switch (INTVAL (operands[2]))
3287 {
3288 default:
3289 if (INTVAL (operands[2]) < 8)
3290 break;
3291
3292 *len = 1;
3293 return AS1 (clr,%0);
3294
3295 case 1:
3296 *len = 1;
3297 return AS1 (lsl,%0);
3298
3299 case 2:
3300 *len = 2;
3301 return (AS1 (lsl,%0) CR_TAB
3302 AS1 (lsl,%0));
3303
3304 case 3:
3305 *len = 3;
3306 return (AS1 (lsl,%0) CR_TAB
3307 AS1 (lsl,%0) CR_TAB
3308 AS1 (lsl,%0));
3309
3310 case 4:
3311 if (test_hard_reg_class (LD_REGS, operands[0]))
3312 {
3313 *len = 2;
3314 return (AS1 (swap,%0) CR_TAB
3315 AS2 (andi,%0,0xf0));
3316 }
3317 *len = 4;
3318 return (AS1 (lsl,%0) CR_TAB
3319 AS1 (lsl,%0) CR_TAB
3320 AS1 (lsl,%0) CR_TAB
3321 AS1 (lsl,%0));
3322
3323 case 5:
3324 if (test_hard_reg_class (LD_REGS, operands[0]))
3325 {
3326 *len = 3;
3327 return (AS1 (swap,%0) CR_TAB
3328 AS1 (lsl,%0) CR_TAB
3329 AS2 (andi,%0,0xe0));
3330 }
3331 *len = 5;
3332 return (AS1 (lsl,%0) CR_TAB
3333 AS1 (lsl,%0) CR_TAB
3334 AS1 (lsl,%0) CR_TAB
3335 AS1 (lsl,%0) CR_TAB
3336 AS1 (lsl,%0));
3337
3338 case 6:
3339 if (test_hard_reg_class (LD_REGS, operands[0]))
3340 {
3341 *len = 4;
3342 return (AS1 (swap,%0) CR_TAB
3343 AS1 (lsl,%0) CR_TAB
3344 AS1 (lsl,%0) CR_TAB
3345 AS2 (andi,%0,0xc0));
3346 }
3347 *len = 6;
3348 return (AS1 (lsl,%0) CR_TAB
3349 AS1 (lsl,%0) CR_TAB
3350 AS1 (lsl,%0) CR_TAB
3351 AS1 (lsl,%0) CR_TAB
3352 AS1 (lsl,%0) CR_TAB
3353 AS1 (lsl,%0));
3354
3355 case 7:
3356 *len = 3;
3357 return (AS1 (ror,%0) CR_TAB
3358 AS1 (clr,%0) CR_TAB
3359 AS1 (ror,%0));
3360 }
3361 }
3362 else if (CONSTANT_P (operands[2]))
3363 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3364
3365 out_shift_with_cnt (AS1 (lsl,%0),
3366 insn, operands, len, 1);
3367 return "";
3368 }
3369
3370
3371 /* 16bit shift left ((short)x << i) */
3372
3373 const char *
3374 ashlhi3_out (rtx insn, rtx operands[], int *len)
3375 {
3376 if (GET_CODE (operands[2]) == CONST_INT)
3377 {
3378 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3379 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3380 int k;
3381 int *t = len;
3382
3383 if (!len)
3384 len = &k;
3385
3386 switch (INTVAL (operands[2]))
3387 {
3388 default:
3389 if (INTVAL (operands[2]) < 16)
3390 break;
3391
3392 *len = 2;
3393 return (AS1 (clr,%B0) CR_TAB
3394 AS1 (clr,%A0));
3395
3396 case 4:
3397 if (optimize_size && scratch)
3398 break; /* 5 */
3399 if (ldi_ok)
3400 {
3401 *len = 6;
3402 return (AS1 (swap,%A0) CR_TAB
3403 AS1 (swap,%B0) CR_TAB
3404 AS2 (andi,%B0,0xf0) CR_TAB
3405 AS2 (eor,%B0,%A0) CR_TAB
3406 AS2 (andi,%A0,0xf0) CR_TAB
3407 AS2 (eor,%B0,%A0));
3408 }
3409 if (scratch)
3410 {
3411 *len = 7;
3412 return (AS1 (swap,%A0) CR_TAB
3413 AS1 (swap,%B0) CR_TAB
3414 AS2 (ldi,%3,0xf0) CR_TAB
3415 "and %B0,%3" CR_TAB
3416 AS2 (eor,%B0,%A0) CR_TAB
3417 "and %A0,%3" CR_TAB
3418 AS2 (eor,%B0,%A0));
3419 }
3420 break; /* optimize_size ? 6 : 8 */
3421
3422 case 5:
3423 if (optimize_size)
3424 break; /* scratch ? 5 : 6 */
3425 if (ldi_ok)
3426 {
3427 *len = 8;
3428 return (AS1 (lsl,%A0) CR_TAB
3429 AS1 (rol,%B0) CR_TAB
3430 AS1 (swap,%A0) CR_TAB
3431 AS1 (swap,%B0) CR_TAB
3432 AS2 (andi,%B0,0xf0) CR_TAB
3433 AS2 (eor,%B0,%A0) CR_TAB
3434 AS2 (andi,%A0,0xf0) CR_TAB
3435 AS2 (eor,%B0,%A0));
3436 }
3437 if (scratch)
3438 {
3439 *len = 9;
3440 return (AS1 (lsl,%A0) CR_TAB
3441 AS1 (rol,%B0) CR_TAB
3442 AS1 (swap,%A0) CR_TAB
3443 AS1 (swap,%B0) CR_TAB
3444 AS2 (ldi,%3,0xf0) CR_TAB
3445 "and %B0,%3" CR_TAB
3446 AS2 (eor,%B0,%A0) CR_TAB
3447 "and %A0,%3" CR_TAB
3448 AS2 (eor,%B0,%A0));
3449 }
3450 break; /* 10 */
3451
3452 case 6:
3453 if (optimize_size)
3454 break; /* scratch ? 5 : 6 */
3455 *len = 9;
3456 return (AS1 (clr,__tmp_reg__) CR_TAB
3457 AS1 (lsr,%B0) CR_TAB
3458 AS1 (ror,%A0) CR_TAB
3459 AS1 (ror,__tmp_reg__) CR_TAB
3460 AS1 (lsr,%B0) CR_TAB
3461 AS1 (ror,%A0) CR_TAB
3462 AS1 (ror,__tmp_reg__) CR_TAB
3463 AS2 (mov,%B0,%A0) CR_TAB
3464 AS2 (mov,%A0,__tmp_reg__));
3465
3466 case 7:
3467 *len = 5;
3468 return (AS1 (lsr,%B0) CR_TAB
3469 AS2 (mov,%B0,%A0) CR_TAB
3470 AS1 (clr,%A0) CR_TAB
3471 AS1 (ror,%B0) CR_TAB
3472 AS1 (ror,%A0));
3473
3474 case 8:
3475 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3476 AS1 (clr,%A0));
3477
3478 case 9:
3479 *len = 3;
3480 return (AS2 (mov,%B0,%A0) CR_TAB
3481 AS1 (clr,%A0) CR_TAB
3482 AS1 (lsl,%B0));
3483
3484 case 10:
3485 *len = 4;
3486 return (AS2 (mov,%B0,%A0) CR_TAB
3487 AS1 (clr,%A0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3489 AS1 (lsl,%B0));
3490
3491 case 11:
3492 *len = 5;
3493 return (AS2 (mov,%B0,%A0) CR_TAB
3494 AS1 (clr,%A0) CR_TAB
3495 AS1 (lsl,%B0) CR_TAB
3496 AS1 (lsl,%B0) CR_TAB
3497 AS1 (lsl,%B0));
3498
3499 case 12:
3500 if (ldi_ok)
3501 {
3502 *len = 4;
3503 return (AS2 (mov,%B0,%A0) CR_TAB
3504 AS1 (clr,%A0) CR_TAB
3505 AS1 (swap,%B0) CR_TAB
3506 AS2 (andi,%B0,0xf0));
3507 }
3508 if (scratch)
3509 {
3510 *len = 5;
3511 return (AS2 (mov,%B0,%A0) CR_TAB
3512 AS1 (clr,%A0) CR_TAB
3513 AS1 (swap,%B0) CR_TAB
3514 AS2 (ldi,%3,0xf0) CR_TAB
3515 "and %B0,%3");
3516 }
3517 *len = 6;
3518 return (AS2 (mov,%B0,%A0) CR_TAB
3519 AS1 (clr,%A0) CR_TAB
3520 AS1 (lsl,%B0) CR_TAB
3521 AS1 (lsl,%B0) CR_TAB
3522 AS1 (lsl,%B0) CR_TAB
3523 AS1 (lsl,%B0));
3524
3525 case 13:
3526 if (ldi_ok)
3527 {
3528 *len = 5;
3529 return (AS2 (mov,%B0,%A0) CR_TAB
3530 AS1 (clr,%A0) CR_TAB
3531 AS1 (swap,%B0) CR_TAB
3532 AS1 (lsl,%B0) CR_TAB
3533 AS2 (andi,%B0,0xe0));
3534 }
3535 if (AVR_HAVE_MUL && scratch)
3536 {
3537 *len = 5;
3538 return (AS2 (ldi,%3,0x20) CR_TAB
3539 AS2 (mul,%A0,%3) CR_TAB
3540 AS2 (mov,%B0,r0) CR_TAB
3541 AS1 (clr,%A0) CR_TAB
3542 AS1 (clr,__zero_reg__));
3543 }
3544 if (optimize_size && scratch)
3545 break; /* 5 */
3546 if (scratch)
3547 {
3548 *len = 6;
3549 return (AS2 (mov,%B0,%A0) CR_TAB
3550 AS1 (clr,%A0) CR_TAB
3551 AS1 (swap,%B0) CR_TAB
3552 AS1 (lsl,%B0) CR_TAB
3553 AS2 (ldi,%3,0xe0) CR_TAB
3554 "and %B0,%3");
3555 }
3556 if (AVR_HAVE_MUL)
3557 {
3558 *len = 6;
3559 return ("set" CR_TAB
3560 AS2 (bld,r1,5) CR_TAB
3561 AS2 (mul,%A0,r1) CR_TAB
3562 AS2 (mov,%B0,r0) CR_TAB
3563 AS1 (clr,%A0) CR_TAB
3564 AS1 (clr,__zero_reg__));
3565 }
3566 *len = 7;
3567 return (AS2 (mov,%B0,%A0) CR_TAB
3568 AS1 (clr,%A0) CR_TAB
3569 AS1 (lsl,%B0) CR_TAB
3570 AS1 (lsl,%B0) CR_TAB
3571 AS1 (lsl,%B0) CR_TAB
3572 AS1 (lsl,%B0) CR_TAB
3573 AS1 (lsl,%B0));
3574
3575 case 14:
3576 if (AVR_HAVE_MUL && ldi_ok)
3577 {
3578 *len = 5;
3579 return (AS2 (ldi,%B0,0x40) CR_TAB
3580 AS2 (mul,%A0,%B0) CR_TAB
3581 AS2 (mov,%B0,r0) CR_TAB
3582 AS1 (clr,%A0) CR_TAB
3583 AS1 (clr,__zero_reg__));
3584 }
3585 if (AVR_HAVE_MUL && scratch)
3586 {
3587 *len = 5;
3588 return (AS2 (ldi,%3,0x40) CR_TAB
3589 AS2 (mul,%A0,%3) CR_TAB
3590 AS2 (mov,%B0,r0) CR_TAB
3591 AS1 (clr,%A0) CR_TAB
3592 AS1 (clr,__zero_reg__));
3593 }
3594 if (optimize_size && ldi_ok)
3595 {
3596 *len = 5;
3597 return (AS2 (mov,%B0,%A0) CR_TAB
3598 AS2 (ldi,%A0,6) "\n1:\t"
3599 AS1 (lsl,%B0) CR_TAB
3600 AS1 (dec,%A0) CR_TAB
3601 AS1 (brne,1b));
3602 }
3603 if (optimize_size && scratch)
3604 break; /* 5 */
3605 *len = 6;
3606 return (AS1 (clr,%B0) CR_TAB
3607 AS1 (lsr,%A0) CR_TAB
3608 AS1 (ror,%B0) CR_TAB
3609 AS1 (lsr,%A0) CR_TAB
3610 AS1 (ror,%B0) CR_TAB
3611 AS1 (clr,%A0));
3612
3613 case 15:
3614 *len = 4;
3615 return (AS1 (clr,%B0) CR_TAB
3616 AS1 (lsr,%A0) CR_TAB
3617 AS1 (ror,%B0) CR_TAB
3618 AS1 (clr,%A0));
3619 }
3620 len = t;
3621 }
3622 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3623 AS1 (rol,%B0)),
3624 insn, operands, len, 2);
3625 return "";
3626 }
3627
3628
3629 /* 32bit shift left ((long)x << i) */
3630
3631 const char *
3632 ashlsi3_out (rtx insn, rtx operands[], int *len)
3633 {
3634 if (GET_CODE (operands[2]) == CONST_INT)
3635 {
3636 int k;
3637 int *t = len;
3638
3639 if (!len)
3640 len = &k;
3641
3642 switch (INTVAL (operands[2]))
3643 {
3644 default:
3645 if (INTVAL (operands[2]) < 32)
3646 break;
3647
3648 if (AVR_HAVE_MOVW)
3649 return *len = 3, (AS1 (clr,%D0) CR_TAB
3650 AS1 (clr,%C0) CR_TAB
3651 AS2 (movw,%A0,%C0));
3652 *len = 4;
3653 return (AS1 (clr,%D0) CR_TAB
3654 AS1 (clr,%C0) CR_TAB
3655 AS1 (clr,%B0) CR_TAB
3656 AS1 (clr,%A0));
3657
3658 case 8:
3659 {
3660 int reg0 = true_regnum (operands[0]);
3661 int reg1 = true_regnum (operands[1]);
3662 *len = 4;
3663 if (reg0 >= reg1)
3664 return (AS2 (mov,%D0,%C1) CR_TAB
3665 AS2 (mov,%C0,%B1) CR_TAB
3666 AS2 (mov,%B0,%A1) CR_TAB
3667 AS1 (clr,%A0));
3668 else
3669 return (AS1 (clr,%A0) CR_TAB
3670 AS2 (mov,%B0,%A1) CR_TAB
3671 AS2 (mov,%C0,%B1) CR_TAB
3672 AS2 (mov,%D0,%C1));
3673 }
3674
3675 case 16:
3676 {
3677 int reg0 = true_regnum (operands[0]);
3678 int reg1 = true_regnum (operands[1]);
3679 if (reg0 + 2 == reg1)
3680 return *len = 2, (AS1 (clr,%B0) CR_TAB
3681 AS1 (clr,%A0));
3682 if (AVR_HAVE_MOVW)
3683 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3684 AS1 (clr,%B0) CR_TAB
3685 AS1 (clr,%A0));
3686 else
3687 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3688 AS2 (mov,%D0,%B1) CR_TAB
3689 AS1 (clr,%B0) CR_TAB
3690 AS1 (clr,%A0));
3691 }
3692
3693 case 24:
3694 *len = 4;
3695 return (AS2 (mov,%D0,%A1) CR_TAB
3696 AS1 (clr,%C0) CR_TAB
3697 AS1 (clr,%B0) CR_TAB
3698 AS1 (clr,%A0));
3699
3700 case 31:
3701 *len = 6;
3702 return (AS1 (clr,%D0) CR_TAB
3703 AS1 (lsr,%A0) CR_TAB
3704 AS1 (ror,%D0) CR_TAB
3705 AS1 (clr,%C0) CR_TAB
3706 AS1 (clr,%B0) CR_TAB
3707 AS1 (clr,%A0));
3708 }
3709 len = t;
3710 }
3711 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3712 AS1 (rol,%B0) CR_TAB
3713 AS1 (rol,%C0) CR_TAB
3714 AS1 (rol,%D0)),
3715 insn, operands, len, 4);
3716 return "";
3717 }
3718
3719 /* 8bit arithmetic shift right ((signed char)x >> i) */
3720
3721 const char *
3722 ashrqi3_out (rtx insn, rtx operands[], int *len)
3723 {
3724 if (GET_CODE (operands[2]) == CONST_INT)
3725 {
3726 int k;
3727
3728 if (!len)
3729 len = &k;
3730
3731 switch (INTVAL (operands[2]))
3732 {
3733 case 1:
3734 *len = 1;
3735 return AS1 (asr,%0);
3736
3737 case 2:
3738 *len = 2;
3739 return (AS1 (asr,%0) CR_TAB
3740 AS1 (asr,%0));
3741
3742 case 3:
3743 *len = 3;
3744 return (AS1 (asr,%0) CR_TAB
3745 AS1 (asr,%0) CR_TAB
3746 AS1 (asr,%0));
3747
3748 case 4:
3749 *len = 4;
3750 return (AS1 (asr,%0) CR_TAB
3751 AS1 (asr,%0) CR_TAB
3752 AS1 (asr,%0) CR_TAB
3753 AS1 (asr,%0));
3754
3755 case 5:
3756 *len = 5;
3757 return (AS1 (asr,%0) CR_TAB
3758 AS1 (asr,%0) CR_TAB
3759 AS1 (asr,%0) CR_TAB
3760 AS1 (asr,%0) CR_TAB
3761 AS1 (asr,%0));
3762
3763 case 6:
3764 *len = 4;
3765 return (AS2 (bst,%0,6) CR_TAB
3766 AS1 (lsl,%0) CR_TAB
3767 AS2 (sbc,%0,%0) CR_TAB
3768 AS2 (bld,%0,0));
3769
3770 default:
3771 if (INTVAL (operands[2]) < 8)
3772 break;
3773
3774 /* fall through */
3775
3776 case 7:
3777 *len = 2;
3778 return (AS1 (lsl,%0) CR_TAB
3779 AS2 (sbc,%0,%0));
3780 }
3781 }
3782 else if (CONSTANT_P (operands[2]))
3783 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3784
3785 out_shift_with_cnt (AS1 (asr,%0),
3786 insn, operands, len, 1);
3787 return "";
3788 }
3789
3790
3791 /* 16bit arithmetic shift right ((signed short)x >> i) */
3792
3793 const char *
3794 ashrhi3_out (rtx insn, rtx operands[], int *len)
3795 {
3796 if (GET_CODE (operands[2]) == CONST_INT)
3797 {
3798 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3799 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3800 int k;
3801 int *t = len;
3802
3803 if (!len)
3804 len = &k;
3805
3806 switch (INTVAL (operands[2]))
3807 {
3808 case 4:
3809 case 5:
3810 /* XXX try to optimize this too? */
3811 break;
3812
3813 case 6:
3814 if (optimize_size)
3815 break; /* scratch ? 5 : 6 */
3816 *len = 8;
3817 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3818 AS2 (mov,%A0,%B0) CR_TAB
3819 AS1 (lsl,__tmp_reg__) CR_TAB
3820 AS1 (rol,%A0) CR_TAB
3821 AS2 (sbc,%B0,%B0) CR_TAB
3822 AS1 (lsl,__tmp_reg__) CR_TAB
3823 AS1 (rol,%A0) CR_TAB
3824 AS1 (rol,%B0));
3825
3826 case 7:
3827 *len = 4;
3828 return (AS1 (lsl,%A0) CR_TAB
3829 AS2 (mov,%A0,%B0) CR_TAB
3830 AS1 (rol,%A0) CR_TAB
3831 AS2 (sbc,%B0,%B0));
3832
3833 case 8:
3834 {
3835 int reg0 = true_regnum (operands[0]);
3836 int reg1 = true_regnum (operands[1]);
3837
3838 if (reg0 == reg1)
3839 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3840 AS1 (lsl,%B0) CR_TAB
3841 AS2 (sbc,%B0,%B0));
3842 else
3843 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3844 AS1 (clr,%B0) CR_TAB
3845 AS2 (sbrc,%A0,7) CR_TAB
3846 AS1 (dec,%B0));
3847 }
3848
3849 case 9:
3850 *len = 4;
3851 return (AS2 (mov,%A0,%B0) CR_TAB
3852 AS1 (lsl,%B0) CR_TAB
3853 AS2 (sbc,%B0,%B0) CR_TAB
3854 AS1 (asr,%A0));
3855
3856 case 10:
3857 *len = 5;
3858 return (AS2 (mov,%A0,%B0) CR_TAB
3859 AS1 (lsl,%B0) CR_TAB
3860 AS2 (sbc,%B0,%B0) CR_TAB
3861 AS1 (asr,%A0) CR_TAB
3862 AS1 (asr,%A0));
3863
3864 case 11:
3865 if (AVR_HAVE_MUL && ldi_ok)
3866 {
3867 *len = 5;
3868 return (AS2 (ldi,%A0,0x20) CR_TAB
3869 AS2 (muls,%B0,%A0) CR_TAB
3870 AS2 (mov,%A0,r1) CR_TAB
3871 AS2 (sbc,%B0,%B0) CR_TAB
3872 AS1 (clr,__zero_reg__));
3873 }
3874 if (optimize_size && scratch)
3875 break; /* 5 */
3876 *len = 6;
3877 return (AS2 (mov,%A0,%B0) CR_TAB
3878 AS1 (lsl,%B0) CR_TAB
3879 AS2 (sbc,%B0,%B0) CR_TAB
3880 AS1 (asr,%A0) CR_TAB
3881 AS1 (asr,%A0) CR_TAB
3882 AS1 (asr,%A0));
3883
3884 case 12:
3885 if (AVR_HAVE_MUL && ldi_ok)
3886 {
3887 *len = 5;
3888 return (AS2 (ldi,%A0,0x10) CR_TAB
3889 AS2 (muls,%B0,%A0) CR_TAB
3890 AS2 (mov,%A0,r1) CR_TAB
3891 AS2 (sbc,%B0,%B0) CR_TAB
3892 AS1 (clr,__zero_reg__));
3893 }
3894 if (optimize_size && scratch)
3895 break; /* 5 */
3896 *len = 7;
3897 return (AS2 (mov,%A0,%B0) CR_TAB
3898 AS1 (lsl,%B0) CR_TAB
3899 AS2 (sbc,%B0,%B0) CR_TAB
3900 AS1 (asr,%A0) CR_TAB
3901 AS1 (asr,%A0) CR_TAB
3902 AS1 (asr,%A0) CR_TAB
3903 AS1 (asr,%A0));
3904
3905 case 13:
3906 if (AVR_HAVE_MUL && ldi_ok)
3907 {
3908 *len = 5;
3909 return (AS2 (ldi,%A0,0x08) CR_TAB
3910 AS2 (muls,%B0,%A0) CR_TAB
3911 AS2 (mov,%A0,r1) CR_TAB
3912 AS2 (sbc,%B0,%B0) CR_TAB
3913 AS1 (clr,__zero_reg__));
3914 }
3915 if (optimize_size)
3916 break; /* scratch ? 5 : 7 */
3917 *len = 8;
3918 return (AS2 (mov,%A0,%B0) CR_TAB
3919 AS1 (lsl,%B0) CR_TAB
3920 AS2 (sbc,%B0,%B0) CR_TAB
3921 AS1 (asr,%A0) CR_TAB
3922 AS1 (asr,%A0) CR_TAB
3923 AS1 (asr,%A0) CR_TAB
3924 AS1 (asr,%A0) CR_TAB
3925 AS1 (asr,%A0));
3926
3927 case 14:
3928 *len = 5;
3929 return (AS1 (lsl,%B0) CR_TAB
3930 AS2 (sbc,%A0,%A0) CR_TAB
3931 AS1 (lsl,%B0) CR_TAB
3932 AS2 (mov,%B0,%A0) CR_TAB
3933 AS1 (rol,%A0));
3934
3935 default:
3936 if (INTVAL (operands[2]) < 16)
3937 break;
3938
3939 /* fall through */
3940
3941 case 15:
3942 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3943 AS2 (sbc,%A0,%A0) CR_TAB
3944 AS2 (mov,%B0,%A0));
3945 }
3946 len = t;
3947 }
3948 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3949 AS1 (ror,%A0)),
3950 insn, operands, len, 2);
3951 return "";
3952 }
3953
3954
3955 /* 32bit arithmetic shift right ((signed long)x >> i) */
3956
3957 const char *
3958 ashrsi3_out (rtx insn, rtx operands[], int *len)
3959 {
3960 if (GET_CODE (operands[2]) == CONST_INT)
3961 {
3962 int k;
3963 int *t = len;
3964
3965 if (!len)
3966 len = &k;
3967
3968 switch (INTVAL (operands[2]))
3969 {
3970 case 8:
3971 {
3972 int reg0 = true_regnum (operands[0]);
3973 int reg1 = true_regnum (operands[1]);
3974 *len=6;
3975 if (reg0 <= reg1)
3976 return (AS2 (mov,%A0,%B1) CR_TAB
3977 AS2 (mov,%B0,%C1) CR_TAB
3978 AS2 (mov,%C0,%D1) CR_TAB
3979 AS1 (clr,%D0) CR_TAB
3980 AS2 (sbrc,%C0,7) CR_TAB
3981 AS1 (dec,%D0));
3982 else
3983 return (AS1 (clr,%D0) CR_TAB
3984 AS2 (sbrc,%D1,7) CR_TAB
3985 AS1 (dec,%D0) CR_TAB
3986 AS2 (mov,%C0,%D1) CR_TAB
3987 AS2 (mov,%B0,%C1) CR_TAB
3988 AS2 (mov,%A0,%B1));
3989 }
3990
3991 case 16:
3992 {
3993 int reg0 = true_regnum (operands[0]);
3994 int reg1 = true_regnum (operands[1]);
3995
3996 if (reg0 == reg1 + 2)
3997 return *len = 4, (AS1 (clr,%D0) CR_TAB
3998 AS2 (sbrc,%B0,7) CR_TAB
3999 AS1 (com,%D0) CR_TAB
4000 AS2 (mov,%C0,%D0));
4001 if (AVR_HAVE_MOVW)
4002 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4003 AS1 (clr,%D0) CR_TAB
4004 AS2 (sbrc,%B0,7) CR_TAB
4005 AS1 (com,%D0) CR_TAB
4006 AS2 (mov,%C0,%D0));
4007 else
4008 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4009 AS2 (mov,%A0,%C1) CR_TAB
4010 AS1 (clr,%D0) CR_TAB
4011 AS2 (sbrc,%B0,7) CR_TAB
4012 AS1 (com,%D0) CR_TAB
4013 AS2 (mov,%C0,%D0));
4014 }
4015
4016 case 24:
4017 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4018 AS1 (clr,%D0) CR_TAB
4019 AS2 (sbrc,%A0,7) CR_TAB
4020 AS1 (com,%D0) CR_TAB
4021 AS2 (mov,%B0,%D0) CR_TAB
4022 AS2 (mov,%C0,%D0));
4023
4024 default:
4025 if (INTVAL (operands[2]) < 32)
4026 break;
4027
4028 /* fall through */
4029
4030 case 31:
4031 if (AVR_HAVE_MOVW)
4032 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4033 AS2 (sbc,%A0,%A0) CR_TAB
4034 AS2 (mov,%B0,%A0) CR_TAB
4035 AS2 (movw,%C0,%A0));
4036 else
4037 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4038 AS2 (sbc,%A0,%A0) CR_TAB
4039 AS2 (mov,%B0,%A0) CR_TAB
4040 AS2 (mov,%C0,%A0) CR_TAB
4041 AS2 (mov,%D0,%A0));
4042 }
4043 len = t;
4044 }
4045 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4046 AS1 (ror,%C0) CR_TAB
4047 AS1 (ror,%B0) CR_TAB
4048 AS1 (ror,%A0)),
4049 insn, operands, len, 4);
4050 return "";
4051 }
4052
4053 /* 8bit logic shift right ((unsigned char)x >> i) */
4054
4055 const char *
4056 lshrqi3_out (rtx insn, rtx operands[], int *len)
4057 {
4058 if (GET_CODE (operands[2]) == CONST_INT)
4059 {
4060 int k;
4061
4062 if (!len)
4063 len = &k;
4064
4065 switch (INTVAL (operands[2]))
4066 {
4067 default:
4068 if (INTVAL (operands[2]) < 8)
4069 break;
4070
4071 *len = 1;
4072 return AS1 (clr,%0);
4073
4074 case 1:
4075 *len = 1;
4076 return AS1 (lsr,%0);
4077
4078 case 2:
4079 *len = 2;
4080 return (AS1 (lsr,%0) CR_TAB
4081 AS1 (lsr,%0));
4082 case 3:
4083 *len = 3;
4084 return (AS1 (lsr,%0) CR_TAB
4085 AS1 (lsr,%0) CR_TAB
4086 AS1 (lsr,%0));
4087
4088 case 4:
4089 if (test_hard_reg_class (LD_REGS, operands[0]))
4090 {
4091 *len=2;
4092 return (AS1 (swap,%0) CR_TAB
4093 AS2 (andi,%0,0x0f));
4094 }
4095 *len = 4;
4096 return (AS1 (lsr,%0) CR_TAB
4097 AS1 (lsr,%0) CR_TAB
4098 AS1 (lsr,%0) CR_TAB
4099 AS1 (lsr,%0));
4100
4101 case 5:
4102 if (test_hard_reg_class (LD_REGS, operands[0]))
4103 {
4104 *len = 3;
4105 return (AS1 (swap,%0) CR_TAB
4106 AS1 (lsr,%0) CR_TAB
4107 AS2 (andi,%0,0x7));
4108 }
4109 *len = 5;
4110 return (AS1 (lsr,%0) CR_TAB
4111 AS1 (lsr,%0) CR_TAB
4112 AS1 (lsr,%0) CR_TAB
4113 AS1 (lsr,%0) CR_TAB
4114 AS1 (lsr,%0));
4115
4116 case 6:
4117 if (test_hard_reg_class (LD_REGS, operands[0]))
4118 {
4119 *len = 4;
4120 return (AS1 (swap,%0) CR_TAB
4121 AS1 (lsr,%0) CR_TAB
4122 AS1 (lsr,%0) CR_TAB
4123 AS2 (andi,%0,0x3));
4124 }
4125 *len = 6;
4126 return (AS1 (lsr,%0) CR_TAB
4127 AS1 (lsr,%0) CR_TAB
4128 AS1 (lsr,%0) CR_TAB
4129 AS1 (lsr,%0) CR_TAB
4130 AS1 (lsr,%0) CR_TAB
4131 AS1 (lsr,%0));
4132
4133 case 7:
4134 *len = 3;
4135 return (AS1 (rol,%0) CR_TAB
4136 AS1 (clr,%0) CR_TAB
4137 AS1 (rol,%0));
4138 }
4139 }
4140 else if (CONSTANT_P (operands[2]))
4141 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4142
4143 out_shift_with_cnt (AS1 (lsr,%0),
4144 insn, operands, len, 1);
4145 return "";
4146 }
4147
4148 /* 16bit logic shift right ((unsigned short)x >> i) */
4149
4150 const char *
4151 lshrhi3_out (rtx insn, rtx operands[], int *len)
4152 {
4153 if (GET_CODE (operands[2]) == CONST_INT)
4154 {
4155 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4156 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4157 int k;
4158 int *t = len;
4159
4160 if (!len)
4161 len = &k;
4162
4163 switch (INTVAL (operands[2]))
4164 {
4165 default:
4166 if (INTVAL (operands[2]) < 16)
4167 break;
4168
4169 *len = 2;
4170 return (AS1 (clr,%B0) CR_TAB
4171 AS1 (clr,%A0));
4172
4173 case 4:
4174 if (optimize_size && scratch)
4175 break; /* 5 */
4176 if (ldi_ok)
4177 {
4178 *len = 6;
4179 return (AS1 (swap,%B0) CR_TAB
4180 AS1 (swap,%A0) CR_TAB
4181 AS2 (andi,%A0,0x0f) CR_TAB
4182 AS2 (eor,%A0,%B0) CR_TAB
4183 AS2 (andi,%B0,0x0f) CR_TAB
4184 AS2 (eor,%A0,%B0));
4185 }
4186 if (scratch)
4187 {
4188 *len = 7;
4189 return (AS1 (swap,%B0) CR_TAB
4190 AS1 (swap,%A0) CR_TAB
4191 AS2 (ldi,%3,0x0f) CR_TAB
4192 "and %A0,%3" CR_TAB
4193 AS2 (eor,%A0,%B0) CR_TAB
4194 "and %B0,%3" CR_TAB
4195 AS2 (eor,%A0,%B0));
4196 }
4197 break; /* optimize_size ? 6 : 8 */
4198
4199 case 5:
4200 if (optimize_size)
4201 break; /* scratch ? 5 : 6 */
4202 if (ldi_ok)
4203 {
4204 *len = 8;
4205 return (AS1 (lsr,%B0) CR_TAB
4206 AS1 (ror,%A0) CR_TAB
4207 AS1 (swap,%B0) CR_TAB
4208 AS1 (swap,%A0) CR_TAB
4209 AS2 (andi,%A0,0x0f) CR_TAB
4210 AS2 (eor,%A0,%B0) CR_TAB
4211 AS2 (andi,%B0,0x0f) CR_TAB
4212 AS2 (eor,%A0,%B0));
4213 }
4214 if (scratch)
4215 {
4216 *len = 9;
4217 return (AS1 (lsr,%B0) CR_TAB
4218 AS1 (ror,%A0) CR_TAB
4219 AS1 (swap,%B0) CR_TAB
4220 AS1 (swap,%A0) CR_TAB
4221 AS2 (ldi,%3,0x0f) CR_TAB
4222 "and %A0,%3" CR_TAB
4223 AS2 (eor,%A0,%B0) CR_TAB
4224 "and %B0,%3" CR_TAB
4225 AS2 (eor,%A0,%B0));
4226 }
4227 break; /* 10 */
4228
4229 case 6:
4230 if (optimize_size)
4231 break; /* scratch ? 5 : 6 */
4232 *len = 9;
4233 return (AS1 (clr,__tmp_reg__) CR_TAB
4234 AS1 (lsl,%A0) CR_TAB
4235 AS1 (rol,%B0) CR_TAB
4236 AS1 (rol,__tmp_reg__) CR_TAB
4237 AS1 (lsl,%A0) CR_TAB
4238 AS1 (rol,%B0) CR_TAB
4239 AS1 (rol,__tmp_reg__) CR_TAB
4240 AS2 (mov,%A0,%B0) CR_TAB
4241 AS2 (mov,%B0,__tmp_reg__));
4242
4243 case 7:
4244 *len = 5;
4245 return (AS1 (lsl,%A0) CR_TAB
4246 AS2 (mov,%A0,%B0) CR_TAB
4247 AS1 (rol,%A0) CR_TAB
4248 AS2 (sbc,%B0,%B0) CR_TAB
4249 AS1 (neg,%B0));
4250
4251 case 8:
4252 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4253 AS1 (clr,%B0));
4254
4255 case 9:
4256 *len = 3;
4257 return (AS2 (mov,%A0,%B0) CR_TAB
4258 AS1 (clr,%B0) CR_TAB
4259 AS1 (lsr,%A0));
4260
4261 case 10:
4262 *len = 4;
4263 return (AS2 (mov,%A0,%B0) CR_TAB
4264 AS1 (clr,%B0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4266 AS1 (lsr,%A0));
4267
4268 case 11:
4269 *len = 5;
4270 return (AS2 (mov,%A0,%B0) CR_TAB
4271 AS1 (clr,%B0) CR_TAB
4272 AS1 (lsr,%A0) CR_TAB
4273 AS1 (lsr,%A0) CR_TAB
4274 AS1 (lsr,%A0));
4275
4276 case 12:
4277 if (ldi_ok)
4278 {
4279 *len = 4;
4280 return (AS2 (mov,%A0,%B0) CR_TAB
4281 AS1 (clr,%B0) CR_TAB
4282 AS1 (swap,%A0) CR_TAB
4283 AS2 (andi,%A0,0x0f));
4284 }
4285 if (scratch)
4286 {
4287 *len = 5;
4288 return (AS2 (mov,%A0,%B0) CR_TAB
4289 AS1 (clr,%B0) CR_TAB
4290 AS1 (swap,%A0) CR_TAB
4291 AS2 (ldi,%3,0x0f) CR_TAB
4292 "and %A0,%3");
4293 }
4294 *len = 6;
4295 return (AS2 (mov,%A0,%B0) CR_TAB
4296 AS1 (clr,%B0) CR_TAB
4297 AS1 (lsr,%A0) CR_TAB
4298 AS1 (lsr,%A0) CR_TAB
4299 AS1 (lsr,%A0) CR_TAB
4300 AS1 (lsr,%A0));
4301
4302 case 13:
4303 if (ldi_ok)
4304 {
4305 *len = 5;
4306 return (AS2 (mov,%A0,%B0) CR_TAB
4307 AS1 (clr,%B0) CR_TAB
4308 AS1 (swap,%A0) CR_TAB
4309 AS1 (lsr,%A0) CR_TAB
4310 AS2 (andi,%A0,0x07));
4311 }
4312 if (AVR_HAVE_MUL && scratch)
4313 {
4314 *len = 5;
4315 return (AS2 (ldi,%3,0x08) CR_TAB
4316 AS2 (mul,%B0,%3) CR_TAB
4317 AS2 (mov,%A0,r1) CR_TAB
4318 AS1 (clr,%B0) CR_TAB
4319 AS1 (clr,__zero_reg__));
4320 }
4321 if (optimize_size && scratch)
4322 break; /* 5 */
4323 if (scratch)
4324 {
4325 *len = 6;
4326 return (AS2 (mov,%A0,%B0) CR_TAB
4327 AS1 (clr,%B0) CR_TAB
4328 AS1 (swap,%A0) CR_TAB
4329 AS1 (lsr,%A0) CR_TAB
4330 AS2 (ldi,%3,0x07) CR_TAB
4331 "and %A0,%3");
4332 }
4333 if (AVR_HAVE_MUL)
4334 {
4335 *len = 6;
4336 return ("set" CR_TAB
4337 AS2 (bld,r1,3) CR_TAB
4338 AS2 (mul,%B0,r1) CR_TAB
4339 AS2 (mov,%A0,r1) CR_TAB
4340 AS1 (clr,%B0) CR_TAB
4341 AS1 (clr,__zero_reg__));
4342 }
4343 *len = 7;
4344 return (AS2 (mov,%A0,%B0) CR_TAB
4345 AS1 (clr,%B0) CR_TAB
4346 AS1 (lsr,%A0) CR_TAB
4347 AS1 (lsr,%A0) CR_TAB
4348 AS1 (lsr,%A0) CR_TAB
4349 AS1 (lsr,%A0) CR_TAB
4350 AS1 (lsr,%A0));
4351
4352 case 14:
4353 if (AVR_HAVE_MUL && ldi_ok)
4354 {
4355 *len = 5;
4356 return (AS2 (ldi,%A0,0x04) CR_TAB
4357 AS2 (mul,%B0,%A0) CR_TAB
4358 AS2 (mov,%A0,r1) CR_TAB
4359 AS1 (clr,%B0) CR_TAB
4360 AS1 (clr,__zero_reg__));
4361 }
4362 if (AVR_HAVE_MUL && scratch)
4363 {
4364 *len = 5;
4365 return (AS2 (ldi,%3,0x04) CR_TAB
4366 AS2 (mul,%B0,%3) CR_TAB
4367 AS2 (mov,%A0,r1) CR_TAB
4368 AS1 (clr,%B0) CR_TAB
4369 AS1 (clr,__zero_reg__));
4370 }
4371 if (optimize_size && ldi_ok)
4372 {
4373 *len = 5;
4374 return (AS2 (mov,%A0,%B0) CR_TAB
4375 AS2 (ldi,%B0,6) "\n1:\t"
4376 AS1 (lsr,%A0) CR_TAB
4377 AS1 (dec,%B0) CR_TAB
4378 AS1 (brne,1b));
4379 }
4380 if (optimize_size && scratch)
4381 break; /* 5 */
4382 *len = 6;
4383 return (AS1 (clr,%A0) CR_TAB
4384 AS1 (lsl,%B0) CR_TAB
4385 AS1 (rol,%A0) CR_TAB
4386 AS1 (lsl,%B0) CR_TAB
4387 AS1 (rol,%A0) CR_TAB
4388 AS1 (clr,%B0));
4389
4390 case 15:
4391 *len = 4;
4392 return (AS1 (clr,%A0) CR_TAB
4393 AS1 (lsl,%B0) CR_TAB
4394 AS1 (rol,%A0) CR_TAB
4395 AS1 (clr,%B0));
4396 }
4397 len = t;
4398 }
4399 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4400 AS1 (ror,%A0)),
4401 insn, operands, len, 2);
4402 return "";
4403 }
4404
4405 /* 32bit logic shift right ((unsigned int)x >> i) */
4406
4407 const char *
4408 lshrsi3_out (rtx insn, rtx operands[], int *len)
4409 {
4410 if (GET_CODE (operands[2]) == CONST_INT)
4411 {
4412 int k;
4413 int *t = len;
4414
4415 if (!len)
4416 len = &k;
4417
4418 switch (INTVAL (operands[2]))
4419 {
4420 default:
4421 if (INTVAL (operands[2]) < 32)
4422 break;
4423
4424 if (AVR_HAVE_MOVW)
4425 return *len = 3, (AS1 (clr,%D0) CR_TAB
4426 AS1 (clr,%C0) CR_TAB
4427 AS2 (movw,%A0,%C0));
4428 *len = 4;
4429 return (AS1 (clr,%D0) CR_TAB
4430 AS1 (clr,%C0) CR_TAB
4431 AS1 (clr,%B0) CR_TAB
4432 AS1 (clr,%A0));
4433
4434 case 8:
4435 {
4436 int reg0 = true_regnum (operands[0]);
4437 int reg1 = true_regnum (operands[1]);
4438 *len = 4;
4439 if (reg0 <= reg1)
4440 return (AS2 (mov,%A0,%B1) CR_TAB
4441 AS2 (mov,%B0,%C1) CR_TAB
4442 AS2 (mov,%C0,%D1) CR_TAB
4443 AS1 (clr,%D0));
4444 else
4445 return (AS1 (clr,%D0) CR_TAB
4446 AS2 (mov,%C0,%D1) CR_TAB
4447 AS2 (mov,%B0,%C1) CR_TAB
4448 AS2 (mov,%A0,%B1));
4449 }
4450
4451 case 16:
4452 {
4453 int reg0 = true_regnum (operands[0]);
4454 int reg1 = true_regnum (operands[1]);
4455
4456 if (reg0 == reg1 + 2)
4457 return *len = 2, (AS1 (clr,%C0) CR_TAB
4458 AS1 (clr,%D0));
4459 if (AVR_HAVE_MOVW)
4460 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4461 AS1 (clr,%C0) CR_TAB
4462 AS1 (clr,%D0));
4463 else
4464 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4465 AS2 (mov,%A0,%C1) CR_TAB
4466 AS1 (clr,%C0) CR_TAB
4467 AS1 (clr,%D0));
4468 }
4469
4470 case 24:
4471 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4472 AS1 (clr,%B0) CR_TAB
4473 AS1 (clr,%C0) CR_TAB
4474 AS1 (clr,%D0));
4475
4476 case 31:
4477 *len = 6;
4478 return (AS1 (clr,%A0) CR_TAB
4479 AS2 (sbrc,%D0,7) CR_TAB
4480 AS1 (inc,%A0) CR_TAB
4481 AS1 (clr,%B0) CR_TAB
4482 AS1 (clr,%C0) CR_TAB
4483 AS1 (clr,%D0));
4484 }
4485 len = t;
4486 }
4487 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4488 AS1 (ror,%C0) CR_TAB
4489 AS1 (ror,%B0) CR_TAB
4490 AS1 (ror,%A0)),
4491 insn, operands, len, 4);
4492 return "";
4493 }
4494
4495 /* Create RTL split patterns for byte sized rotate expressions. This
4496 produces a series of move instructions and considers overlap situations.
4497 Overlapping non-HImode operands need a scratch register. */
4498
4499 bool
4500 avr_rotate_bytes (rtx operands[])
4501 {
4502 int i, j;
4503 enum machine_mode mode = GET_MODE (operands[0]);
4504 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4505 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4506 int num = INTVAL (operands[2]);
4507 rtx scratch = operands[3];
4508 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4509 Word move if no scratch is needed, otherwise use size of scratch. */
4510 enum machine_mode move_mode = QImode;
4511 int move_size, offset, size;
4512
4513 if (num & 0xf)
4514 move_mode = QImode;
4515 else if ((mode == SImode && !same_reg) || !overlapped)
4516 move_mode = HImode;
4517 else
4518 move_mode = GET_MODE (scratch);
4519
4520 /* Force DI rotate to use QI moves since other DI moves are currently split
4521 into QI moves so forward propagation works better. */
4522 if (mode == DImode)
4523 move_mode = QImode;
4524 /* Make scratch smaller if needed. */
4525 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4526 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4527
4528 move_size = GET_MODE_SIZE (move_mode);
4529 /* Number of bytes/words to rotate. */
4530 offset = (num >> 3) / move_size;
4531 /* Number of moves needed. */
4532 size = GET_MODE_SIZE (mode) / move_size;
4533 /* Himode byte swap is special case to avoid a scratch register. */
4534 if (mode == HImode && same_reg)
4535 {
4536 /* HImode byte swap, using xor. This is as quick as using scratch. */
4537 rtx src, dst;
4538 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4539 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4540 if (!rtx_equal_p (dst, src))
4541 {
4542 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4543 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4544 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4545 }
4546 }
4547 else
4548 {
4549 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4550 /* Create linked list of moves to determine move order. */
4551 struct {
4552 rtx src, dst;
4553 int links;
4554 } move[MAX_SIZE + 8];
4555 int blocked, moves;
4556
4557 gcc_assert (size <= MAX_SIZE);
4558 /* Generate list of subreg moves. */
4559 for (i = 0; i < size; i++)
4560 {
4561 int from = i;
4562 int to = (from + offset) % size;
4563 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4564 mode, from * move_size);
4565 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4566 mode, to * move_size);
4567 move[i].links = -1;
4568 }
4569 /* Mark dependence where a dst of one move is the src of another move.
4570 The first move is a conflict as it must wait until second is
4571 performed. We ignore moves to self - we catch this later. */
4572 if (overlapped)
4573 for (i = 0; i < size; i++)
4574 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4575 for (j = 0; j < size; j++)
4576 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4577 {
4578 /* The dst of move i is the src of move j. */
4579 move[i].links = j;
4580 break;
4581 }
4582
4583 blocked = -1;
4584 moves = 0;
4585 /* Go through move list and perform non-conflicting moves. As each
4586 non-overlapping move is made, it may remove other conflicts
4587 so the process is repeated until no conflicts remain. */
4588 do
4589 {
4590 blocked = -1;
4591 moves = 0;
4592 /* Emit move where dst is not also a src or we have used that
4593 src already. */
4594 for (i = 0; i < size; i++)
4595 if (move[i].src != NULL_RTX)
4596 {
4597 if (move[i].links == -1
4598 || move[move[i].links].src == NULL_RTX)
4599 {
4600 moves++;
4601 /* Ignore NOP moves to self. */
4602 if (!rtx_equal_p (move[i].dst, move[i].src))
4603 emit_move_insn (move[i].dst, move[i].src);
4604
4605 /* Remove conflict from list. */
4606 move[i].src = NULL_RTX;
4607 }
4608 else
4609 blocked = i;
4610 }
4611
4612 /* Check for deadlock. This is when no moves occurred and we have
4613 at least one blocked move. */
4614 if (moves == 0 && blocked != -1)
4615 {
4616 /* Need to use scratch register to break deadlock.
4617 Add move to put dst of blocked move into scratch.
4618 When this move occurs, it will break chain deadlock.
4619 The scratch register is substituted for real move. */
4620
4621 move[size].src = move[blocked].dst;
4622 move[size].dst = scratch;
4623 /* Scratch move is never blocked. */
4624 move[size].links = -1;
4625 /* Make sure we have valid link. */
4626 gcc_assert (move[blocked].links != -1);
4627 /* Replace src of blocking move with scratch reg. */
4628 move[move[blocked].links].src = scratch;
4629 /* Make dependent on scratch move occuring. */
4630 move[blocked].links = size;
4631 size=size+1;
4632 }
4633 }
4634 while (blocked != -1);
4635 }
4636 return true;
4637 }
4638
4639 /* Modifies the length assigned to instruction INSN
4640 LEN is the initially computed length of the insn. */
4641
4642 int
4643 adjust_insn_length (rtx insn, int len)
4644 {
4645 rtx patt = PATTERN (insn);
4646 rtx set;
4647
4648 if (GET_CODE (patt) == SET)
4649 {
4650 rtx op[10];
4651 op[1] = SET_SRC (patt);
4652 op[0] = SET_DEST (patt);
4653 if (general_operand (op[1], VOIDmode)
4654 && general_operand (op[0], VOIDmode))
4655 {
4656 switch (GET_MODE (op[0]))
4657 {
4658 case QImode:
4659 output_movqi (insn, op, &len);
4660 break;
4661 case HImode:
4662 output_movhi (insn, op, &len);
4663 break;
4664 case SImode:
4665 case SFmode:
4666 output_movsisf (insn, op, &len);
4667 break;
4668 default:
4669 break;
4670 }
4671 }
4672 else if (op[0] == cc0_rtx && REG_P (op[1]))
4673 {
4674 switch (GET_MODE (op[1]))
4675 {
4676 case HImode: out_tsthi (insn, op[1], &len); break;
4677 case SImode: out_tstsi (insn, op[1], &len); break;
4678 default: break;
4679 }
4680 }
4681 else if (GET_CODE (op[1]) == AND)
4682 {
4683 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4684 {
4685 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4686 if (GET_MODE (op[1]) == SImode)
4687 len = (((mask & 0xff) != 0xff)
4688 + ((mask & 0xff00) != 0xff00)
4689 + ((mask & 0xff0000L) != 0xff0000L)
4690 + ((mask & 0xff000000L) != 0xff000000L));
4691 else if (GET_MODE (op[1]) == HImode)
4692 len = (((mask & 0xff) != 0xff)
4693 + ((mask & 0xff00) != 0xff00));
4694 }
4695 }
4696 else if (GET_CODE (op[1]) == IOR)
4697 {
4698 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4699 {
4700 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4701 if (GET_MODE (op[1]) == SImode)
4702 len = (((mask & 0xff) != 0)
4703 + ((mask & 0xff00) != 0)
4704 + ((mask & 0xff0000L) != 0)
4705 + ((mask & 0xff000000L) != 0));
4706 else if (GET_MODE (op[1]) == HImode)
4707 len = (((mask & 0xff) != 0)
4708 + ((mask & 0xff00) != 0));
4709 }
4710 }
4711 }
4712 set = single_set (insn);
4713 if (set)
4714 {
4715 rtx op[10];
4716
4717 op[1] = SET_SRC (set);
4718 op[0] = SET_DEST (set);
4719
4720 if (GET_CODE (patt) == PARALLEL
4721 && general_operand (op[1], VOIDmode)
4722 && general_operand (op[0], VOIDmode))
4723 {
4724 if (XVECLEN (patt, 0) == 2)
4725 op[2] = XVECEXP (patt, 0, 1);
4726
4727 switch (GET_MODE (op[0]))
4728 {
4729 case QImode:
4730 len = 2;
4731 break;
4732 case HImode:
4733 output_reload_inhi (insn, op, &len);
4734 break;
4735 case SImode:
4736 case SFmode:
4737 output_reload_insisf (insn, op, &len);
4738 break;
4739 default:
4740 break;
4741 }
4742 }
4743 else if (GET_CODE (op[1]) == ASHIFT
4744 || GET_CODE (op[1]) == ASHIFTRT
4745 || GET_CODE (op[1]) == LSHIFTRT)
4746 {
4747 rtx ops[10];
4748 ops[0] = op[0];
4749 ops[1] = XEXP (op[1],0);
4750 ops[2] = XEXP (op[1],1);
4751 switch (GET_CODE (op[1]))
4752 {
4753 case ASHIFT:
4754 switch (GET_MODE (op[0]))
4755 {
4756 case QImode: ashlqi3_out (insn,ops,&len); break;
4757 case HImode: ashlhi3_out (insn,ops,&len); break;
4758 case SImode: ashlsi3_out (insn,ops,&len); break;
4759 default: break;
4760 }
4761 break;
4762 case ASHIFTRT:
4763 switch (GET_MODE (op[0]))
4764 {
4765 case QImode: ashrqi3_out (insn,ops,&len); break;
4766 case HImode: ashrhi3_out (insn,ops,&len); break;
4767 case SImode: ashrsi3_out (insn,ops,&len); break;
4768 default: break;
4769 }
4770 break;
4771 case LSHIFTRT:
4772 switch (GET_MODE (op[0]))
4773 {
4774 case QImode: lshrqi3_out (insn,ops,&len); break;
4775 case HImode: lshrhi3_out (insn,ops,&len); break;
4776 case SImode: lshrsi3_out (insn,ops,&len); break;
4777 default: break;
4778 }
4779 break;
4780 default:
4781 break;
4782 }
4783 }
4784 }
4785 return len;
4786 }
4787
4788 /* Return nonzero if register REG dead after INSN. */
4789
4790 int
4791 reg_unused_after (rtx insn, rtx reg)
4792 {
4793 return (dead_or_set_p (insn, reg)
4794 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4795 }
4796
4797 /* Return nonzero if REG is not used after INSN.
4798 We assume REG is a reload reg, and therefore does
4799 not live past labels. It may live past calls or jumps though. */
4800
4801 int
4802 _reg_unused_after (rtx insn, rtx reg)
4803 {
4804 enum rtx_code code;
4805 rtx set;
4806
4807 /* If the reg is set by this instruction, then it is safe for our
4808 case. Disregard the case where this is a store to memory, since
4809 we are checking a register used in the store address. */
4810 set = single_set (insn);
4811 if (set && GET_CODE (SET_DEST (set)) != MEM
4812 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4813 return 1;
4814
4815 while ((insn = NEXT_INSN (insn)))
4816 {
4817 rtx set;
4818 code = GET_CODE (insn);
4819
4820 #if 0
4821 /* If this is a label that existed before reload, then the register
4822 if dead here. However, if this is a label added by reorg, then
4823 the register may still be live here. We can't tell the difference,
4824 so we just ignore labels completely. */
4825 if (code == CODE_LABEL)
4826 return 1;
4827 /* else */
4828 #endif
4829
4830 if (!INSN_P (insn))
4831 continue;
4832
4833 if (code == JUMP_INSN)
4834 return 0;
4835
4836 /* If this is a sequence, we must handle them all at once.
4837 We could have for instance a call that sets the target register,
4838 and an insn in a delay slot that uses the register. In this case,
4839 we must return 0. */
4840 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4841 {
4842 int i;
4843 int retval = 0;
4844
4845 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4846 {
4847 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4848 rtx set = single_set (this_insn);
4849
4850 if (GET_CODE (this_insn) == CALL_INSN)
4851 code = CALL_INSN;
4852 else if (GET_CODE (this_insn) == JUMP_INSN)
4853 {
4854 if (INSN_ANNULLED_BRANCH_P (this_insn))
4855 return 0;
4856 code = JUMP_INSN;
4857 }
4858
4859 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4860 return 0;
4861 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4862 {
4863 if (GET_CODE (SET_DEST (set)) != MEM)
4864 retval = 1;
4865 else
4866 return 0;
4867 }
4868 if (set == 0
4869 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4870 return 0;
4871 }
4872 if (retval == 1)
4873 return 1;
4874 else if (code == JUMP_INSN)
4875 return 0;
4876 }
4877
4878 if (code == CALL_INSN)
4879 {
4880 rtx tem;
4881 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4882 if (GET_CODE (XEXP (tem, 0)) == USE
4883 && REG_P (XEXP (XEXP (tem, 0), 0))
4884 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4885 return 0;
4886 if (call_used_regs[REGNO (reg)])
4887 return 1;
4888 }
4889
4890 set = single_set (insn);
4891
4892 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4893 return 0;
4894 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4895 return GET_CODE (SET_DEST (set)) != MEM;
4896 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4897 return 0;
4898 }
4899 return 1;
4900 }
4901
4902 /* Target hook for assembling integer objects. The AVR version needs
4903 special handling for references to certain labels. */
4904
4905 static bool
4906 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4907 {
4908 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4909 && text_segment_operand (x, VOIDmode) )
4910 {
4911 fputs ("\t.word\tgs(", asm_out_file);
4912 output_addr_const (asm_out_file, x);
4913 fputs (")\n", asm_out_file);
4914 return true;
4915 }
4916 return default_assemble_integer (x, size, aligned_p);
4917 }
4918
4919 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4920
4921 void
4922 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4923 {
4924
4925 /* If the function has the 'signal' or 'interrupt' attribute, test to
4926 make sure that the name of the function is "__vector_NN" so as to
4927 catch when the user misspells the interrupt vector name. */
4928
4929 if (cfun->machine->is_interrupt)
4930 {
4931 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4932 {
4933 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4934 "%qs appears to be a misspelled interrupt handler",
4935 name);
4936 }
4937 }
4938 else if (cfun->machine->is_signal)
4939 {
4940 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4941 {
4942 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4943 "%qs appears to be a misspelled signal handler",
4944 name);
4945 }
4946 }
4947
4948 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4949 ASM_OUTPUT_LABEL (file, name);
4950 }
4951
4952 /* The routine used to output NUL terminated strings. We use a special
4953 version of this for most svr4 targets because doing so makes the
4954 generated assembly code more compact (and thus faster to assemble)
4955 as well as more readable, especially for targets like the i386
4956 (where the only alternative is to output character sequences as
4957 comma separated lists of numbers). */
4958
4959 void
4960 gas_output_limited_string(FILE *file, const char *str)
4961 {
4962 const unsigned char *_limited_str = (const unsigned char *) str;
4963 unsigned ch;
4964 fprintf (file, "%s\"", STRING_ASM_OP);
4965 for (; (ch = *_limited_str); _limited_str++)
4966 {
4967 int escape;
4968 switch (escape = ESCAPES[ch])
4969 {
4970 case 0:
4971 putc (ch, file);
4972 break;
4973 case 1:
4974 fprintf (file, "\\%03o", ch);
4975 break;
4976 default:
4977 putc ('\\', file);
4978 putc (escape, file);
4979 break;
4980 }
4981 }
4982 fprintf (file, "\"\n");
4983 }
4984
4985 /* The routine used to output sequences of byte values. We use a special
4986 version of this for most svr4 targets because doing so makes the
4987 generated assembly code more compact (and thus faster to assemble)
4988 as well as more readable. Note that if we find subparts of the
4989 character sequence which end with NUL (and which are shorter than
4990 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4991
4992 void
4993 gas_output_ascii(FILE *file, const char *str, size_t length)
4994 {
4995 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4996 const unsigned char *limit = _ascii_bytes + length;
4997 unsigned bytes_in_chunk = 0;
4998 for (; _ascii_bytes < limit; _ascii_bytes++)
4999 {
5000 const unsigned char *p;
5001 if (bytes_in_chunk >= 60)
5002 {
5003 fprintf (file, "\"\n");
5004 bytes_in_chunk = 0;
5005 }
5006 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
5007 continue;
5008 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
5009 {
5010 if (bytes_in_chunk > 0)
5011 {
5012 fprintf (file, "\"\n");
5013 bytes_in_chunk = 0;
5014 }
5015 gas_output_limited_string (file, (const char*)_ascii_bytes);
5016 _ascii_bytes = p;
5017 }
5018 else
5019 {
5020 int escape;
5021 unsigned ch;
5022 if (bytes_in_chunk == 0)
5023 fprintf (file, "\t.ascii\t\"");
5024 switch (escape = ESCAPES[ch = *_ascii_bytes])
5025 {
5026 case 0:
5027 putc (ch, file);
5028 bytes_in_chunk++;
5029 break;
5030 case 1:
5031 fprintf (file, "\\%03o", ch);
5032 bytes_in_chunk += 4;
5033 break;
5034 default:
5035 putc ('\\', file);
5036 putc (escape, file);
5037 bytes_in_chunk += 2;
5038 break;
5039 }
5040 }
5041 }
5042 if (bytes_in_chunk > 0)
5043 fprintf (file, "\"\n");
5044 }
5045
5046 /* Return value is nonzero if pseudos that have been
5047 assigned to registers of class CLASS would likely be spilled
5048 because registers of CLASS are needed for spill registers. */
5049
5050 static bool
5051 avr_class_likely_spilled_p (reg_class_t c)
5052 {
5053 return (c != ALL_REGS && c != ADDW_REGS);
5054 }
5055
5056 /* Valid attributes:
5057 progmem - put data to program memory;
5058 signal - make a function to be hardware interrupt. After function
5059 prologue interrupts are disabled;
5060 interrupt - make a function to be hardware interrupt. After function
5061 prologue interrupts are enabled;
5062 naked - don't generate function prologue/epilogue and `ret' command.
5063
5064 Only `progmem' attribute valid for type. */
5065
5066 /* Handle a "progmem" attribute; arguments as in
5067 struct attribute_spec.handler. */
5068 static tree
5069 avr_handle_progmem_attribute (tree *node, tree name,
5070 tree args ATTRIBUTE_UNUSED,
5071 int flags ATTRIBUTE_UNUSED,
5072 bool *no_add_attrs)
5073 {
5074 if (DECL_P (*node))
5075 {
5076 if (TREE_CODE (*node) == TYPE_DECL)
5077 {
5078 /* This is really a decl attribute, not a type attribute,
5079 but try to handle it for GCC 3.0 backwards compatibility. */
5080
5081 tree type = TREE_TYPE (*node);
5082 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5083 tree newtype = build_type_attribute_variant (type, attr);
5084
5085 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5086 TREE_TYPE (*node) = newtype;
5087 *no_add_attrs = true;
5088 }
5089 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5090 {
5091 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5092 {
5093 warning (0, "only initialized variables can be placed into "
5094 "program memory area");
5095 *no_add_attrs = true;
5096 }
5097 }
5098 else
5099 {
5100 warning (OPT_Wattributes, "%qE attribute ignored",
5101 name);
5102 *no_add_attrs = true;
5103 }
5104 }
5105
5106 return NULL_TREE;
5107 }
5108
5109 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5110 struct attribute_spec.handler. */
5111
5112 static tree
5113 avr_handle_fndecl_attribute (tree *node, tree name,
5114 tree args ATTRIBUTE_UNUSED,
5115 int flags ATTRIBUTE_UNUSED,
5116 bool *no_add_attrs)
5117 {
5118 if (TREE_CODE (*node) != FUNCTION_DECL)
5119 {
5120 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5121 name);
5122 *no_add_attrs = true;
5123 }
5124
5125 return NULL_TREE;
5126 }
5127
5128 static tree
5129 avr_handle_fntype_attribute (tree *node, tree name,
5130 tree args ATTRIBUTE_UNUSED,
5131 int flags ATTRIBUTE_UNUSED,
5132 bool *no_add_attrs)
5133 {
5134 if (TREE_CODE (*node) != FUNCTION_TYPE)
5135 {
5136 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5137 name);
5138 *no_add_attrs = true;
5139 }
5140
5141 return NULL_TREE;
5142 }
5143
5144 /* Look for attribute `progmem' in DECL
5145 if found return 1, otherwise 0. */
5146
5147 int
5148 avr_progmem_p (tree decl, tree attributes)
5149 {
5150 tree a;
5151
5152 if (TREE_CODE (decl) != VAR_DECL)
5153 return 0;
5154
5155 if (NULL_TREE
5156 != lookup_attribute ("progmem", attributes))
5157 return 1;
5158
5159 a=decl;
5160 do
5161 a = TREE_TYPE(a);
5162 while (TREE_CODE (a) == ARRAY_TYPE);
5163
5164 if (a == error_mark_node)
5165 return 0;
5166
5167 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5168 return 1;
5169
5170 return 0;
5171 }
5172
5173 /* Add the section attribute if the variable is in progmem. */
5174
5175 static void
5176 avr_insert_attributes (tree node, tree *attributes)
5177 {
5178 if (TREE_CODE (node) == VAR_DECL
5179 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5180 && avr_progmem_p (node, *attributes))
5181 {
5182 if (TREE_READONLY (node))
5183 {
5184 static const char dsec[] = ".progmem.data";
5185
5186 *attributes = tree_cons (get_identifier ("section"),
5187 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5188 *attributes);
5189 }
5190 else
5191 {
5192 error ("variable %q+D must be const in order to be put into"
5193 " read-only section by means of %<__attribute__((progmem))%>",
5194 node);
5195 }
5196 }
5197 }
5198
5199 /* A get_unnamed_section callback for switching to progmem_section. */
5200
5201 static void
5202 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5203 {
5204 fprintf (asm_out_file,
5205 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5206 AVR_HAVE_JMP_CALL ? "a" : "ax");
5207 /* Should already be aligned, this is just to be safe if it isn't. */
5208 fprintf (asm_out_file, "\t.p2align 1\n");
5209 }
5210
5211
5212 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5213 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5214 /* Track need of __do_clear_bss. */
5215
5216 void
5217 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5218 const char *name, unsigned HOST_WIDE_INT size,
5219 unsigned int align, bool local_p)
5220 {
5221 avr_need_clear_bss_p = true;
5222
5223 if (local_p)
5224 {
5225 fputs ("\t.local\t", stream);
5226 assemble_name (stream, name);
5227 fputs ("\n", stream);
5228 }
5229
5230 fputs ("\t.comm\t", stream);
5231 assemble_name (stream, name);
5232 fprintf (stream,
5233 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5234 size, align / BITS_PER_UNIT);
5235 }
5236
5237
5238 /* Unnamed section callback for data_section
5239 to track need of __do_copy_data. */
5240
5241 static void
5242 avr_output_data_section_asm_op (const void *data)
5243 {
5244 avr_need_copy_data_p = true;
5245
5246 /* Dispatch to default. */
5247 output_section_asm_op (data);
5248 }
5249
5250
5251 /* Unnamed section callback for bss_section
5252 to track need of __do_clear_bss. */
5253
5254 static void
5255 avr_output_bss_section_asm_op (const void *data)
5256 {
5257 avr_need_clear_bss_p = true;
5258
5259 /* Dispatch to default. */
5260 output_section_asm_op (data);
5261 }
5262
5263
5264 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5265
5266 static void
5267 avr_asm_init_sections (void)
5268 {
5269 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5270 avr_output_progmem_section_asm_op,
5271 NULL);
5272 readonly_data_section = data_section;
5273
5274 data_section->unnamed.callback = avr_output_data_section_asm_op;
5275 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5276 }
5277
5278
5279 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5280 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5281
5282 void
5283 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5284 {
5285 if (!avr_need_copy_data_p)
5286 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5287 || 0 == strncmp (name, ".rodata", 7)
5288 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5289
5290 if (!avr_need_clear_bss_p)
5291 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5292
5293 default_elf_asm_named_section (name, flags, decl);
5294 }
5295
5296 static unsigned int
5297 avr_section_type_flags (tree decl, const char *name, int reloc)
5298 {
5299 unsigned int flags = default_section_type_flags (decl, name, reloc);
5300
5301 if (strncmp (name, ".noinit", 7) == 0)
5302 {
5303 if (decl && TREE_CODE (decl) == VAR_DECL
5304 && DECL_INITIAL (decl) == NULL_TREE)
5305 flags |= SECTION_BSS; /* @nobits */
5306 else
5307 warning (0, "only uninitialized variables can be placed in the "
5308 ".noinit section");
5309 }
5310
5311 return flags;
5312 }
5313
5314
5315 /* Implement `TARGET_ASM_FILE_START'. */
5316 /* Outputs some appropriate text to go at the start of an assembler
5317 file. */
5318
5319 static void
5320 avr_file_start (void)
5321 {
5322 if (avr_current_arch->asm_only)
5323 error ("MCU %qs supported for assembler only", avr_mcu_name);
5324
5325 default_file_start ();
5326
5327 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5328 fputs ("__SREG__ = 0x3f\n"
5329 "__SP_H__ = 0x3e\n"
5330 "__SP_L__ = 0x3d\n", asm_out_file);
5331
5332 fputs ("__tmp_reg__ = 0\n"
5333 "__zero_reg__ = 1\n", asm_out_file);
5334 }
5335
5336
5337 /* Implement `TARGET_ASM_FILE_END'. */
5338 /* Outputs to the stdio stream FILE some
5339 appropriate text to go at the end of an assembler file. */
5340
5341 static void
5342 avr_file_end (void)
5343 {
5344 /* Output these only if there is anything in the
5345 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5346 input section(s) - some code size can be saved by not
5347 linking in the initialization code from libgcc if resp.
5348 sections are empty. */
5349
5350 if (avr_need_copy_data_p)
5351 fputs (".global __do_copy_data\n", asm_out_file);
5352
5353 if (avr_need_clear_bss_p)
5354 fputs (".global __do_clear_bss\n", asm_out_file);
5355 }
5356
5357 /* Choose the order in which to allocate hard registers for
5358 pseudo-registers local to a basic block.
5359
5360 Store the desired register order in the array `reg_alloc_order'.
5361 Element 0 should be the register to allocate first; element 1, the
5362 next register; and so on. */
5363
5364 void
5365 order_regs_for_local_alloc (void)
5366 {
5367 unsigned int i;
5368 static const int order_0[] = {
5369 24,25,
5370 18,19,
5371 20,21,
5372 22,23,
5373 30,31,
5374 26,27,
5375 28,29,
5376 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5377 0,1,
5378 32,33,34,35
5379 };
5380 static const int order_1[] = {
5381 18,19,
5382 20,21,
5383 22,23,
5384 24,25,
5385 30,31,
5386 26,27,
5387 28,29,
5388 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5389 0,1,
5390 32,33,34,35
5391 };
5392 static const int order_2[] = {
5393 25,24,
5394 23,22,
5395 21,20,
5396 19,18,
5397 30,31,
5398 26,27,
5399 28,29,
5400 17,16,
5401 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5402 1,0,
5403 32,33,34,35
5404 };
5405
5406 const int *order = (TARGET_ORDER_1 ? order_1 :
5407 TARGET_ORDER_2 ? order_2 :
5408 order_0);
5409 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5410 reg_alloc_order[i] = order[i];
5411 }
5412
5413
5414 /* Implement `TARGET_REGISTER_MOVE_COST' */
5415
5416 static int
5417 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5418 reg_class_t from, reg_class_t to)
5419 {
5420 return (from == STACK_REG ? 6
5421 : to == STACK_REG ? 12
5422 : 2);
5423 }
5424
5425
5426 /* Implement `TARGET_MEMORY_MOVE_COST' */
5427
5428 static int
5429 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5430 bool in ATTRIBUTE_UNUSED)
5431 {
5432 return (mode == QImode ? 2
5433 : mode == HImode ? 4
5434 : mode == SImode ? 8
5435 : mode == SFmode ? 8
5436 : 16);
5437 }
5438
5439
5440 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5441 cost of an RTX operand given its context. X is the rtx of the
5442 operand, MODE is its mode, and OUTER is the rtx_code of this
5443 operand's parent operator. */
5444
5445 static int
5446 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5447 bool speed)
5448 {
5449 enum rtx_code code = GET_CODE (x);
5450 int total;
5451
5452 switch (code)
5453 {
5454 case REG:
5455 case SUBREG:
5456 return 0;
5457
5458 case CONST_INT:
5459 case CONST_DOUBLE:
5460 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5461
5462 default:
5463 break;
5464 }
5465
5466 total = 0;
5467 avr_rtx_costs (x, code, outer, &total, speed);
5468 return total;
5469 }
5470
5471 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5472 is to be calculated. Return true if the complete cost has been
5473 computed, and false if subexpressions should be scanned. In either
5474 case, *TOTAL contains the cost result. */
5475
5476 static bool
5477 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5478 bool speed)
5479 {
5480 enum rtx_code code = (enum rtx_code) codearg;
5481 enum machine_mode mode = GET_MODE (x);
5482 HOST_WIDE_INT val;
5483
5484 switch (code)
5485 {
5486 case CONST_INT:
5487 case CONST_DOUBLE:
5488 /* Immediate constants are as cheap as registers. */
5489 *total = 0;
5490 return true;
5491
5492 case MEM:
5493 case CONST:
5494 case LABEL_REF:
5495 case SYMBOL_REF:
5496 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5497 return true;
5498
5499 case NEG:
5500 switch (mode)
5501 {
5502 case QImode:
5503 case SFmode:
5504 *total = COSTS_N_INSNS (1);
5505 break;
5506
5507 case HImode:
5508 *total = COSTS_N_INSNS (3);
5509 break;
5510
5511 case SImode:
5512 *total = COSTS_N_INSNS (7);
5513 break;
5514
5515 default:
5516 return false;
5517 }
5518 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5519 return true;
5520
5521 case ABS:
5522 switch (mode)
5523 {
5524 case QImode:
5525 case SFmode:
5526 *total = COSTS_N_INSNS (1);
5527 break;
5528
5529 default:
5530 return false;
5531 }
5532 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5533 return true;
5534
5535 case NOT:
5536 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5537 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5538 return true;
5539
5540 case ZERO_EXTEND:
5541 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5542 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5543 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5544 return true;
5545
5546 case SIGN_EXTEND:
5547 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5548 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5549 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5550 return true;
5551
5552 case PLUS:
5553 switch (mode)
5554 {
5555 case QImode:
5556 *total = COSTS_N_INSNS (1);
5557 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5558 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5559 break;
5560
5561 case HImode:
5562 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5563 {
5564 *total = COSTS_N_INSNS (2);
5565 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5566 }
5567 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5568 *total = COSTS_N_INSNS (1);
5569 else
5570 *total = COSTS_N_INSNS (2);
5571 break;
5572
5573 case SImode:
5574 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5575 {
5576 *total = COSTS_N_INSNS (4);
5577 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5578 }
5579 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5580 *total = COSTS_N_INSNS (1);
5581 else
5582 *total = COSTS_N_INSNS (4);
5583 break;
5584
5585 default:
5586 return false;
5587 }
5588 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5589 return true;
5590
5591 case MINUS:
5592 case AND:
5593 case IOR:
5594 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5595 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5596 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5597 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5598 return true;
5599
5600 case XOR:
5601 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5602 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5603 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5604 return true;
5605
5606 case MULT:
5607 switch (mode)
5608 {
5609 case QImode:
5610 if (AVR_HAVE_MUL)
5611 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5612 else if (!speed)
5613 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5614 else
5615 return false;
5616 break;
5617
5618 case HImode:
5619 if (AVR_HAVE_MUL)
5620 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5621 else if (!speed)
5622 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5623 else
5624 return false;
5625 break;
5626
5627 default:
5628 return false;
5629 }
5630 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5631 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5632 return true;
5633
5634 case DIV:
5635 case MOD:
5636 case UDIV:
5637 case UMOD:
5638 if (!speed)
5639 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5640 else
5641 return false;
5642 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5644 return true;
5645
5646 case ROTATE:
5647 switch (mode)
5648 {
5649 case QImode:
5650 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5651 *total = COSTS_N_INSNS (1);
5652
5653 break;
5654
5655 case HImode:
5656 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5657 *total = COSTS_N_INSNS (3);
5658
5659 break;
5660
5661 case SImode:
5662 if (CONST_INT_P (XEXP (x, 1)))
5663 switch (INTVAL (XEXP (x, 1)))
5664 {
5665 case 8:
5666 case 24:
5667 *total = COSTS_N_INSNS (5);
5668 break;
5669 case 16:
5670 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5671 break;
5672 }
5673 break;
5674
5675 default:
5676 return false;
5677 }
5678 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5679 return true;
5680
5681 case ASHIFT:
5682 switch (mode)
5683 {
5684 case QImode:
5685 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5686 {
5687 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5688 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5689 }
5690 else
5691 {
5692 val = INTVAL (XEXP (x, 1));
5693 if (val == 7)
5694 *total = COSTS_N_INSNS (3);
5695 else if (val >= 0 && val <= 7)
5696 *total = COSTS_N_INSNS (val);
5697 else
5698 *total = COSTS_N_INSNS (1);
5699 }
5700 break;
5701
5702 case HImode:
5703 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5704 {
5705 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5706 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5707 }
5708 else
5709 switch (INTVAL (XEXP (x, 1)))
5710 {
5711 case 0:
5712 *total = 0;
5713 break;
5714 case 1:
5715 case 8:
5716 *total = COSTS_N_INSNS (2);
5717 break;
5718 case 9:
5719 *total = COSTS_N_INSNS (3);
5720 break;
5721 case 2:
5722 case 3:
5723 case 10:
5724 case 15:
5725 *total = COSTS_N_INSNS (4);
5726 break;
5727 case 7:
5728 case 11:
5729 case 12:
5730 *total = COSTS_N_INSNS (5);
5731 break;
5732 case 4:
5733 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5734 break;
5735 case 6:
5736 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5737 break;
5738 case 5:
5739 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5740 break;
5741 default:
5742 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5743 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5744 }
5745 break;
5746
5747 case SImode:
5748 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5749 {
5750 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5751 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5752 }
5753 else
5754 switch (INTVAL (XEXP (x, 1)))
5755 {
5756 case 0:
5757 *total = 0;
5758 break;
5759 case 24:
5760 *total = COSTS_N_INSNS (3);
5761 break;
5762 case 1:
5763 case 8:
5764 case 16:
5765 *total = COSTS_N_INSNS (4);
5766 break;
5767 case 31:
5768 *total = COSTS_N_INSNS (6);
5769 break;
5770 case 2:
5771 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5772 break;
5773 default:
5774 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5775 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5776 }
5777 break;
5778
5779 default:
5780 return false;
5781 }
5782 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5783 return true;
5784
5785 case ASHIFTRT:
5786 switch (mode)
5787 {
5788 case QImode:
5789 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5790 {
5791 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5792 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5793 }
5794 else
5795 {
5796 val = INTVAL (XEXP (x, 1));
5797 if (val == 6)
5798 *total = COSTS_N_INSNS (4);
5799 else if (val == 7)
5800 *total = COSTS_N_INSNS (2);
5801 else if (val >= 0 && val <= 7)
5802 *total = COSTS_N_INSNS (val);
5803 else
5804 *total = COSTS_N_INSNS (1);
5805 }
5806 break;
5807
5808 case HImode:
5809 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5810 {
5811 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5812 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5813 }
5814 else
5815 switch (INTVAL (XEXP (x, 1)))
5816 {
5817 case 0:
5818 *total = 0;
5819 break;
5820 case 1:
5821 *total = COSTS_N_INSNS (2);
5822 break;
5823 case 15:
5824 *total = COSTS_N_INSNS (3);
5825 break;
5826 case 2:
5827 case 7:
5828 case 8:
5829 case 9:
5830 *total = COSTS_N_INSNS (4);
5831 break;
5832 case 10:
5833 case 14:
5834 *total = COSTS_N_INSNS (5);
5835 break;
5836 case 11:
5837 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5838 break;
5839 case 12:
5840 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5841 break;
5842 case 6:
5843 case 13:
5844 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5845 break;
5846 default:
5847 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5848 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5849 }
5850 break;
5851
5852 case SImode:
5853 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5854 {
5855 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5856 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5857 }
5858 else
5859 switch (INTVAL (XEXP (x, 1)))
5860 {
5861 case 0:
5862 *total = 0;
5863 break;
5864 case 1:
5865 *total = COSTS_N_INSNS (4);
5866 break;
5867 case 8:
5868 case 16:
5869 case 24:
5870 *total = COSTS_N_INSNS (6);
5871 break;
5872 case 2:
5873 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5874 break;
5875 case 31:
5876 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5877 break;
5878 default:
5879 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5880 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5881 }
5882 break;
5883
5884 default:
5885 return false;
5886 }
5887 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5888 return true;
5889
5890 case LSHIFTRT:
5891 switch (mode)
5892 {
5893 case QImode:
5894 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5895 {
5896 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5897 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5898 }
5899 else
5900 {
5901 val = INTVAL (XEXP (x, 1));
5902 if (val == 7)
5903 *total = COSTS_N_INSNS (3);
5904 else if (val >= 0 && val <= 7)
5905 *total = COSTS_N_INSNS (val);
5906 else
5907 *total = COSTS_N_INSNS (1);
5908 }
5909 break;
5910
5911 case HImode:
5912 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5913 {
5914 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5916 }
5917 else
5918 switch (INTVAL (XEXP (x, 1)))
5919 {
5920 case 0:
5921 *total = 0;
5922 break;
5923 case 1:
5924 case 8:
5925 *total = COSTS_N_INSNS (2);
5926 break;
5927 case 9:
5928 *total = COSTS_N_INSNS (3);
5929 break;
5930 case 2:
5931 case 10:
5932 case 15:
5933 *total = COSTS_N_INSNS (4);
5934 break;
5935 case 7:
5936 case 11:
5937 *total = COSTS_N_INSNS (5);
5938 break;
5939 case 3:
5940 case 12:
5941 case 13:
5942 case 14:
5943 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5944 break;
5945 case 4:
5946 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5947 break;
5948 case 5:
5949 case 6:
5950 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5951 break;
5952 default:
5953 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5954 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5955 }
5956 break;
5957
5958 case SImode:
5959 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5960 {
5961 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5962 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5963 }
5964 else
5965 switch (INTVAL (XEXP (x, 1)))
5966 {
5967 case 0:
5968 *total = 0;
5969 break;
5970 case 1:
5971 *total = COSTS_N_INSNS (4);
5972 break;
5973 case 2:
5974 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5975 break;
5976 case 8:
5977 case 16:
5978 case 24:
5979 *total = COSTS_N_INSNS (4);
5980 break;
5981 case 31:
5982 *total = COSTS_N_INSNS (6);
5983 break;
5984 default:
5985 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5986 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5987 }
5988 break;
5989
5990 default:
5991 return false;
5992 }
5993 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5994 return true;
5995
5996 case COMPARE:
5997 switch (GET_MODE (XEXP (x, 0)))
5998 {
5999 case QImode:
6000 *total = COSTS_N_INSNS (1);
6001 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6002 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6003 break;
6004
6005 case HImode:
6006 *total = COSTS_N_INSNS (2);
6007 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6008 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6009 else if (INTVAL (XEXP (x, 1)) != 0)
6010 *total += COSTS_N_INSNS (1);
6011 break;
6012
6013 case SImode:
6014 *total = COSTS_N_INSNS (4);
6015 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6017 else if (INTVAL (XEXP (x, 1)) != 0)
6018 *total += COSTS_N_INSNS (3);
6019 break;
6020
6021 default:
6022 return false;
6023 }
6024 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6025 return true;
6026
6027 default:
6028 break;
6029 }
6030 return false;
6031 }
6032
6033 /* Calculate the cost of a memory address. */
6034
6035 static int
6036 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6037 {
6038 if (GET_CODE (x) == PLUS
6039 && GET_CODE (XEXP (x,1)) == CONST_INT
6040 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6041 && INTVAL (XEXP (x,1)) >= 61)
6042 return 18;
6043 if (CONSTANT_ADDRESS_P (x))
6044 {
6045 if (optimize > 0 && io_address_operand (x, QImode))
6046 return 2;
6047 return 4;
6048 }
6049 return 4;
6050 }
6051
6052 /* Test for extra memory constraint 'Q'.
6053 It's a memory address based on Y or Z pointer with valid displacement. */
6054
6055 int
6056 extra_constraint_Q (rtx x)
6057 {
6058 if (GET_CODE (XEXP (x,0)) == PLUS
6059 && REG_P (XEXP (XEXP (x,0), 0))
6060 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6061 && (INTVAL (XEXP (XEXP (x,0), 1))
6062 <= MAX_LD_OFFSET (GET_MODE (x))))
6063 {
6064 rtx xx = XEXP (XEXP (x,0), 0);
6065 int regno = REGNO (xx);
6066 if (TARGET_ALL_DEBUG)
6067 {
6068 fprintf (stderr, ("extra_constraint:\n"
6069 "reload_completed: %d\n"
6070 "reload_in_progress: %d\n"),
6071 reload_completed, reload_in_progress);
6072 debug_rtx (x);
6073 }
6074 if (regno >= FIRST_PSEUDO_REGISTER)
6075 return 1; /* allocate pseudos */
6076 else if (regno == REG_Z || regno == REG_Y)
6077 return 1; /* strictly check */
6078 else if (xx == frame_pointer_rtx
6079 || xx == arg_pointer_rtx)
6080 return 1; /* XXX frame & arg pointer checks */
6081 }
6082 return 0;
6083 }
6084
6085 /* Convert condition code CONDITION to the valid AVR condition code. */
6086
6087 RTX_CODE
6088 avr_normalize_condition (RTX_CODE condition)
6089 {
6090 switch (condition)
6091 {
6092 case GT:
6093 return GE;
6094 case GTU:
6095 return GEU;
6096 case LE:
6097 return LT;
6098 case LEU:
6099 return LTU;
6100 default:
6101 gcc_unreachable ();
6102 }
6103 }
6104
6105 /* This function optimizes conditional jumps. */
6106
6107 static void
6108 avr_reorg (void)
6109 {
6110 rtx insn, pattern;
6111
6112 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6113 {
6114 if (! (GET_CODE (insn) == INSN
6115 || GET_CODE (insn) == CALL_INSN
6116 || GET_CODE (insn) == JUMP_INSN)
6117 || !single_set (insn))
6118 continue;
6119
6120 pattern = PATTERN (insn);
6121
6122 if (GET_CODE (pattern) == PARALLEL)
6123 pattern = XVECEXP (pattern, 0, 0);
6124 if (GET_CODE (pattern) == SET
6125 && SET_DEST (pattern) == cc0_rtx
6126 && compare_diff_p (insn))
6127 {
6128 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6129 {
6130 /* Now we work under compare insn. */
6131
6132 pattern = SET_SRC (pattern);
6133 if (true_regnum (XEXP (pattern,0)) >= 0
6134 && true_regnum (XEXP (pattern,1)) >= 0 )
6135 {
6136 rtx x = XEXP (pattern,0);
6137 rtx next = next_real_insn (insn);
6138 rtx pat = PATTERN (next);
6139 rtx src = SET_SRC (pat);
6140 rtx t = XEXP (src,0);
6141 PUT_CODE (t, swap_condition (GET_CODE (t)));
6142 XEXP (pattern,0) = XEXP (pattern,1);
6143 XEXP (pattern,1) = x;
6144 INSN_CODE (next) = -1;
6145 }
6146 else if (true_regnum (XEXP (pattern, 0)) >= 0
6147 && XEXP (pattern, 1) == const0_rtx)
6148 {
6149 /* This is a tst insn, we can reverse it. */
6150 rtx next = next_real_insn (insn);
6151 rtx pat = PATTERN (next);
6152 rtx src = SET_SRC (pat);
6153 rtx t = XEXP (src,0);
6154
6155 PUT_CODE (t, swap_condition (GET_CODE (t)));
6156 XEXP (pattern, 1) = XEXP (pattern, 0);
6157 XEXP (pattern, 0) = const0_rtx;
6158 INSN_CODE (next) = -1;
6159 INSN_CODE (insn) = -1;
6160 }
6161 else if (true_regnum (XEXP (pattern,0)) >= 0
6162 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6163 {
6164 rtx x = XEXP (pattern,1);
6165 rtx next = next_real_insn (insn);
6166 rtx pat = PATTERN (next);
6167 rtx src = SET_SRC (pat);
6168 rtx t = XEXP (src,0);
6169 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6170
6171 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6172 {
6173 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6174 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6175 INSN_CODE (next) = -1;
6176 INSN_CODE (insn) = -1;
6177 }
6178 }
6179 }
6180 }
6181 }
6182 }
6183
6184 /* Returns register number for function return value.*/
6185
6186 static inline unsigned int
6187 avr_ret_register (void)
6188 {
6189 return 24;
6190 }
6191
6192 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6193
6194 static bool
6195 avr_function_value_regno_p (const unsigned int regno)
6196 {
6197 return (regno == avr_ret_register ());
6198 }
6199
6200 /* Create an RTX representing the place where a
6201 library function returns a value of mode MODE. */
6202
6203 static rtx
6204 avr_libcall_value (enum machine_mode mode,
6205 const_rtx func ATTRIBUTE_UNUSED)
6206 {
6207 int offs = GET_MODE_SIZE (mode);
6208 if (offs < 2)
6209 offs = 2;
6210 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6211 }
6212
6213 /* Create an RTX representing the place where a
6214 function returns a value of data type VALTYPE. */
6215
6216 static rtx
6217 avr_function_value (const_tree type,
6218 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6219 bool outgoing ATTRIBUTE_UNUSED)
6220 {
6221 unsigned int offs;
6222
6223 if (TYPE_MODE (type) != BLKmode)
6224 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6225
6226 offs = int_size_in_bytes (type);
6227 if (offs < 2)
6228 offs = 2;
6229 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6230 offs = GET_MODE_SIZE (SImode);
6231 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6232 offs = GET_MODE_SIZE (DImode);
6233
6234 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6235 }
6236
6237 int
6238 test_hard_reg_class (enum reg_class rclass, rtx x)
6239 {
6240 int regno = true_regnum (x);
6241 if (regno < 0)
6242 return 0;
6243
6244 if (TEST_HARD_REG_CLASS (rclass, regno))
6245 return 1;
6246
6247 return 0;
6248 }
6249
6250
6251 int
6252 jump_over_one_insn_p (rtx insn, rtx dest)
6253 {
6254 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6255 ? XEXP (dest, 0)
6256 : dest);
6257 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6258 int dest_addr = INSN_ADDRESSES (uid);
6259 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6260 }
6261
6262 /* Returns 1 if a value of mode MODE can be stored starting with hard
6263 register number REGNO. On the enhanced core, anything larger than
6264 1 byte must start in even numbered register for "movw" to work
6265 (this way we don't have to check for odd registers everywhere). */
6266
6267 int
6268 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6269 {
6270 /* Disallow QImode in stack pointer regs. */
6271 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6272 return 0;
6273
6274 /* The only thing that can go into registers r28:r29 is a Pmode. */
6275 if (regno == REG_Y && mode == Pmode)
6276 return 1;
6277
6278 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6279 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6280 return 0;
6281
6282 if (mode == QImode)
6283 return 1;
6284
6285 /* Modes larger than QImode occupy consecutive registers. */
6286 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6287 return 0;
6288
6289 /* All modes larger than QImode should start in an even register. */
6290 return !(regno & 1);
6291 }
6292
6293 const char *
6294 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6295 {
6296 int tmp;
6297 if (!len)
6298 len = &tmp;
6299
6300 if (GET_CODE (operands[1]) == CONST_INT)
6301 {
6302 int val = INTVAL (operands[1]);
6303 if ((val & 0xff) == 0)
6304 {
6305 *len = 3;
6306 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6307 AS2 (ldi,%2,hi8(%1)) CR_TAB
6308 AS2 (mov,%B0,%2));
6309 }
6310 else if ((val & 0xff00) == 0)
6311 {
6312 *len = 3;
6313 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6314 AS2 (mov,%A0,%2) CR_TAB
6315 AS2 (mov,%B0,__zero_reg__));
6316 }
6317 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6318 {
6319 *len = 3;
6320 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6321 AS2 (mov,%A0,%2) CR_TAB
6322 AS2 (mov,%B0,%2));
6323 }
6324 }
6325 *len = 4;
6326 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6327 AS2 (mov,%A0,%2) CR_TAB
6328 AS2 (ldi,%2,hi8(%1)) CR_TAB
6329 AS2 (mov,%B0,%2));
6330 }
6331
6332
6333 const char *
6334 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6335 {
6336 rtx src = operands[1];
6337 int cnst = (GET_CODE (src) == CONST_INT);
6338
6339 if (len)
6340 {
6341 if (cnst)
6342 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6343 + ((INTVAL (src) & 0xff00) != 0)
6344 + ((INTVAL (src) & 0xff0000) != 0)
6345 + ((INTVAL (src) & 0xff000000) != 0);
6346 else
6347 *len = 8;
6348
6349 return "";
6350 }
6351
6352 if (cnst && ((INTVAL (src) & 0xff) == 0))
6353 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6354 else
6355 {
6356 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6357 output_asm_insn (AS2 (mov, %A0, %2), operands);
6358 }
6359 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6360 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6361 else
6362 {
6363 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6364 output_asm_insn (AS2 (mov, %B0, %2), operands);
6365 }
6366 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6367 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6368 else
6369 {
6370 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6371 output_asm_insn (AS2 (mov, %C0, %2), operands);
6372 }
6373 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6374 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6375 else
6376 {
6377 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6378 output_asm_insn (AS2 (mov, %D0, %2), operands);
6379 }
6380 return "";
6381 }
6382
6383 void
6384 avr_output_bld (rtx operands[], int bit_nr)
6385 {
6386 static char s[] = "bld %A0,0";
6387
6388 s[5] = 'A' + (bit_nr >> 3);
6389 s[8] = '0' + (bit_nr & 7);
6390 output_asm_insn (s, operands);
6391 }
6392
6393 void
6394 avr_output_addr_vec_elt (FILE *stream, int value)
6395 {
6396 switch_to_section (progmem_section);
6397 if (AVR_HAVE_JMP_CALL)
6398 fprintf (stream, "\t.word gs(.L%d)\n", value);
6399 else
6400 fprintf (stream, "\trjmp .L%d\n", value);
6401 }
6402
6403 /* Returns true if SCRATCH are safe to be allocated as a scratch
6404 registers (for a define_peephole2) in the current function. */
6405
6406 bool
6407 avr_hard_regno_scratch_ok (unsigned int regno)
6408 {
6409 /* Interrupt functions can only use registers that have already been saved
6410 by the prologue, even if they would normally be call-clobbered. */
6411
6412 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6413 && !df_regs_ever_live_p (regno))
6414 return false;
6415
6416 return true;
6417 }
6418
6419 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6420
6421 int
6422 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6423 unsigned int new_reg)
6424 {
6425 /* Interrupt functions can only use registers that have already been
6426 saved by the prologue, even if they would normally be
6427 call-clobbered. */
6428
6429 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6430 && !df_regs_ever_live_p (new_reg))
6431 return 0;
6432
6433 return 1;
6434 }
6435
6436 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6437 or memory location in the I/O space (QImode only).
6438
6439 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6440 Operand 1: register operand to test, or CONST_INT memory address.
6441 Operand 2: bit number.
6442 Operand 3: label to jump to if the test is true. */
6443
6444 const char *
6445 avr_out_sbxx_branch (rtx insn, rtx operands[])
6446 {
6447 enum rtx_code comp = GET_CODE (operands[0]);
6448 int long_jump = (get_attr_length (insn) >= 4);
6449 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6450
6451 if (comp == GE)
6452 comp = EQ;
6453 else if (comp == LT)
6454 comp = NE;
6455
6456 if (reverse)
6457 comp = reverse_condition (comp);
6458
6459 if (GET_CODE (operands[1]) == CONST_INT)
6460 {
6461 if (INTVAL (operands[1]) < 0x40)
6462 {
6463 if (comp == EQ)
6464 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6465 else
6466 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6467 }
6468 else
6469 {
6470 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6471 if (comp == EQ)
6472 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6473 else
6474 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6475 }
6476 }
6477 else /* GET_CODE (operands[1]) == REG */
6478 {
6479 if (GET_MODE (operands[1]) == QImode)
6480 {
6481 if (comp == EQ)
6482 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6483 else
6484 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6485 }
6486 else /* HImode or SImode */
6487 {
6488 static char buf[] = "sbrc %A1,0";
6489 int bit_nr = INTVAL (operands[2]);
6490 buf[3] = (comp == EQ) ? 's' : 'c';
6491 buf[6] = 'A' + (bit_nr >> 3);
6492 buf[9] = '0' + (bit_nr & 7);
6493 output_asm_insn (buf, operands);
6494 }
6495 }
6496
6497 if (long_jump)
6498 return (AS1 (rjmp,.+4) CR_TAB
6499 AS1 (jmp,%x3));
6500 if (!reverse)
6501 return AS1 (rjmp,%x3);
6502 return "";
6503 }
6504
6505 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6506
6507 static void
6508 avr_asm_out_ctor (rtx symbol, int priority)
6509 {
6510 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6511 default_ctor_section_asm_out_constructor (symbol, priority);
6512 }
6513
6514 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6515
6516 static void
6517 avr_asm_out_dtor (rtx symbol, int priority)
6518 {
6519 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6520 default_dtor_section_asm_out_destructor (symbol, priority);
6521 }
6522
6523 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6524
6525 static bool
6526 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6527 {
6528 if (TYPE_MODE (type) == BLKmode)
6529 {
6530 HOST_WIDE_INT size = int_size_in_bytes (type);
6531 return (size == -1 || size > 8);
6532 }
6533 else
6534 return false;
6535 }
6536
6537 /* Worker function for CASE_VALUES_THRESHOLD. */
6538
6539 unsigned int avr_case_values_threshold (void)
6540 {
6541 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6542 }
6543
6544 /* Helper for __builtin_avr_delay_cycles */
6545
6546 static void
6547 avr_expand_delay_cycles (rtx operands0)
6548 {
6549 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6550 unsigned HOST_WIDE_INT cycles_used;
6551 unsigned HOST_WIDE_INT loop_count;
6552
6553 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6554 {
6555 loop_count = ((cycles - 9) / 6) + 1;
6556 cycles_used = ((loop_count - 1) * 6) + 9;
6557 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6558 cycles -= cycles_used;
6559 }
6560
6561 if (IN_RANGE (cycles, 262145, 83886081))
6562 {
6563 loop_count = ((cycles - 7) / 5) + 1;
6564 if (loop_count > 0xFFFFFF)
6565 loop_count = 0xFFFFFF;
6566 cycles_used = ((loop_count - 1) * 5) + 7;
6567 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6568 cycles -= cycles_used;
6569 }
6570
6571 if (IN_RANGE (cycles, 768, 262144))
6572 {
6573 loop_count = ((cycles - 5) / 4) + 1;
6574 if (loop_count > 0xFFFF)
6575 loop_count = 0xFFFF;
6576 cycles_used = ((loop_count - 1) * 4) + 5;
6577 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6578 cycles -= cycles_used;
6579 }
6580
6581 if (IN_RANGE (cycles, 6, 767))
6582 {
6583 loop_count = cycles / 3;
6584 if (loop_count > 255)
6585 loop_count = 255;
6586 cycles_used = loop_count * 3;
6587 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6588 cycles -= cycles_used;
6589 }
6590
6591 while (cycles >= 2)
6592 {
6593 emit_insn (gen_nopv (GEN_INT(2)));
6594 cycles -= 2;
6595 }
6596
6597 if (cycles == 1)
6598 {
6599 emit_insn (gen_nopv (GEN_INT(1)));
6600 cycles--;
6601 }
6602 }
6603
6604 /* IDs for all the AVR builtins. */
6605
6606 enum avr_builtin_id
6607 {
6608 AVR_BUILTIN_NOP,
6609 AVR_BUILTIN_SEI,
6610 AVR_BUILTIN_CLI,
6611 AVR_BUILTIN_WDR,
6612 AVR_BUILTIN_SLEEP,
6613 AVR_BUILTIN_SWAP,
6614 AVR_BUILTIN_FMUL,
6615 AVR_BUILTIN_FMULS,
6616 AVR_BUILTIN_FMULSU,
6617 AVR_BUILTIN_DELAY_CYCLES
6618 };
6619
6620 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6621 do \
6622 { \
6623 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6624 NULL, NULL_TREE); \
6625 } while (0)
6626
6627
6628 /* Implement `TARGET_INIT_BUILTINS' */
6629 /* Set up all builtin functions for this target. */
6630
6631 static void
6632 avr_init_builtins (void)
6633 {
6634 tree void_ftype_void
6635 = build_function_type_list (void_type_node, NULL_TREE);
6636 tree uchar_ftype_uchar
6637 = build_function_type_list (unsigned_char_type_node,
6638 unsigned_char_type_node,
6639 NULL_TREE);
6640 tree uint_ftype_uchar_uchar
6641 = build_function_type_list (unsigned_type_node,
6642 unsigned_char_type_node,
6643 unsigned_char_type_node,
6644 NULL_TREE);
6645 tree int_ftype_char_char
6646 = build_function_type_list (integer_type_node,
6647 char_type_node,
6648 char_type_node,
6649 NULL_TREE);
6650 tree int_ftype_char_uchar
6651 = build_function_type_list (integer_type_node,
6652 char_type_node,
6653 unsigned_char_type_node,
6654 NULL_TREE);
6655 tree void_ftype_ulong
6656 = build_function_type_list (void_type_node,
6657 long_unsigned_type_node,
6658 NULL_TREE);
6659
6660 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6661 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6662 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6663 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6664 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6665 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6666 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6667 AVR_BUILTIN_DELAY_CYCLES);
6668
6669 if (AVR_HAVE_MUL)
6670 {
6671 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6672 in libgcc. For fmul and fmuls this is straight forward with
6673 upcoming fixed point support. */
6674
6675 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6676 AVR_BUILTIN_FMUL);
6677 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6678 AVR_BUILTIN_FMULS);
6679 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6680 AVR_BUILTIN_FMULSU);
6681 }
6682 }
6683
6684 #undef DEF_BUILTIN
6685
6686 struct avr_builtin_description
6687 {
6688 const enum insn_code icode;
6689 const char *const name;
6690 const enum avr_builtin_id id;
6691 };
6692
6693 static const struct avr_builtin_description
6694 bdesc_1arg[] =
6695 {
6696 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6697 };
6698
6699 static const struct avr_builtin_description
6700 bdesc_2arg[] =
6701 {
6702 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6703 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6704 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6705 };
6706
6707 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6708
6709 static rtx
6710 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6711 rtx target)
6712 {
6713 rtx pat;
6714 tree arg0 = CALL_EXPR_ARG (exp, 0);
6715 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6716 enum machine_mode op0mode = GET_MODE (op0);
6717 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6718 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6719
6720 if (! target
6721 || GET_MODE (target) != tmode
6722 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6723 {
6724 target = gen_reg_rtx (tmode);
6725 }
6726
6727 if (op0mode == SImode && mode0 == HImode)
6728 {
6729 op0mode = HImode;
6730 op0 = gen_lowpart (HImode, op0);
6731 }
6732
6733 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6734
6735 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6736 op0 = copy_to_mode_reg (mode0, op0);
6737
6738 pat = GEN_FCN (icode) (target, op0);
6739 if (! pat)
6740 return 0;
6741
6742 emit_insn (pat);
6743
6744 return target;
6745 }
6746
6747
6748 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6749
6750 static rtx
6751 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6752 {
6753 rtx pat;
6754 tree arg0 = CALL_EXPR_ARG (exp, 0);
6755 tree arg1 = CALL_EXPR_ARG (exp, 1);
6756 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6757 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6758 enum machine_mode op0mode = GET_MODE (op0);
6759 enum machine_mode op1mode = GET_MODE (op1);
6760 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6761 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6762 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6763
6764 if (! target
6765 || GET_MODE (target) != tmode
6766 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6767 {
6768 target = gen_reg_rtx (tmode);
6769 }
6770
6771 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6772 {
6773 op0mode = HImode;
6774 op0 = gen_lowpart (HImode, op0);
6775 }
6776
6777 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6778 {
6779 op1mode = HImode;
6780 op1 = gen_lowpart (HImode, op1);
6781 }
6782
6783 /* In case the insn wants input operands in modes different from
6784 the result, abort. */
6785
6786 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6787 && (op1mode == mode1 || op1mode == VOIDmode));
6788
6789 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6790 op0 = copy_to_mode_reg (mode0, op0);
6791
6792 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6793 op1 = copy_to_mode_reg (mode1, op1);
6794
6795 pat = GEN_FCN (icode) (target, op0, op1);
6796
6797 if (! pat)
6798 return 0;
6799
6800 emit_insn (pat);
6801 return target;
6802 }
6803
6804
6805 /* Expand an expression EXP that calls a built-in function,
6806 with result going to TARGET if that's convenient
6807 (and in mode MODE if that's convenient).
6808 SUBTARGET may be used as the target for computing one of EXP's operands.
6809 IGNORE is nonzero if the value is to be ignored. */
6810
6811 static rtx
6812 avr_expand_builtin (tree exp, rtx target,
6813 rtx subtarget ATTRIBUTE_UNUSED,
6814 enum machine_mode mode ATTRIBUTE_UNUSED,
6815 int ignore ATTRIBUTE_UNUSED)
6816 {
6817 size_t i;
6818 const struct avr_builtin_description *d;
6819 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6820 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6821 tree arg0;
6822 rtx op0;
6823
6824 switch (id)
6825 {
6826 case AVR_BUILTIN_NOP:
6827 emit_insn (gen_nopv (GEN_INT(1)));
6828 return 0;
6829
6830 case AVR_BUILTIN_SEI:
6831 emit_insn (gen_enable_interrupt ());
6832 return 0;
6833
6834 case AVR_BUILTIN_CLI:
6835 emit_insn (gen_disable_interrupt ());
6836 return 0;
6837
6838 case AVR_BUILTIN_WDR:
6839 emit_insn (gen_wdr ());
6840 return 0;
6841
6842 case AVR_BUILTIN_SLEEP:
6843 emit_insn (gen_sleep ());
6844 return 0;
6845
6846 case AVR_BUILTIN_DELAY_CYCLES:
6847 {
6848 arg0 = CALL_EXPR_ARG (exp, 0);
6849 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6850
6851 if (! CONST_INT_P (op0))
6852 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6853
6854 avr_expand_delay_cycles (op0);
6855 return 0;
6856 }
6857 }
6858
6859 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6860 if (d->id == id)
6861 return avr_expand_unop_builtin (d->icode, exp, target);
6862
6863 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6864 if (d->id == id)
6865 return avr_expand_binop_builtin (d->icode, exp, target);
6866
6867 gcc_unreachable ();
6868 }
6869
6870
6871 #include "gt-avr.h"