]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/avr/avr.c
2014-10-16 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998-2014 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov (chertykov@gmail.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "varasm.h"
36 #include "print-tree.h"
37 #include "calls.h"
38 #include "stor-layout.h"
39 #include "stringpool.h"
40 #include "output.h"
41 #include "expr.h"
42 #include "c-family/c-common.h"
43 #include "diagnostic-core.h"
44 #include "obstack.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "vec.h"
48 #include "machmode.h"
49 #include "input.h"
50 #include "function.h"
51 #include "recog.h"
52 #include "optabs.h"
53 #include "ggc.h"
54 #include "langhooks.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "target-def.h"
58 #include "params.h"
59 #include "df.h"
60 #include "builtins.h"
61
62 /* Maximal allowed offset for an address in the LD command */
63 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
64
65 /* Return true if STR starts with PREFIX and false, otherwise. */
66 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
67
68 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
69 address space where data is to be located.
70 As the only non-generic address spaces are all located in flash,
71 this can be used to test if data shall go into some .progmem* section.
72 This must be the rightmost field of machine dependent section flags. */
73 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
74
75 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
76 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
77
78 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
79 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
80 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
81 do { \
82 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
83 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
84 } while (0)
85
86 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
87 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
88 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
89 / SYMBOL_FLAG_MACH_DEP)
90
91 /* Known address spaces. The order must be the same as in the respective
92 enum from avr.h (or designated initialized must be used). */
93 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
94 {
95 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
96 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
97 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
98 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
99 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
100 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
101 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
102 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
103 };
104
105
106 /* Holding RAM addresses of some SFRs used by the compiler and that
107 are unique over all devices in an architecture like 'avr4'. */
108
109 typedef struct
110 {
111 /* SREG: The processor status */
112 int sreg;
113
114 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
115 int ccp;
116 int rampd;
117 int rampx;
118 int rampy;
119
120 /* RAMPZ: The high byte of 24-bit address used with ELPM */
121 int rampz;
122
123 /* SP: The stack pointer and its low and high byte */
124 int sp_l;
125 int sp_h;
126 } avr_addr_t;
127
128 static avr_addr_t avr_addr;
129
130
131 /* Prototypes for local helper functions. */
132
133 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
134 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
135 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
136 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
137 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
138 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
139
140 static int get_sequence_length (rtx_insn *insns);
141 static int sequent_regs_live (void);
142 static const char *ptrreg_to_str (int);
143 static const char *cond_string (enum rtx_code);
144 static int avr_num_arg_regs (enum machine_mode, const_tree);
145 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
146 int, bool);
147 static void output_reload_in_const (rtx*, rtx, int*, bool);
148 static struct machine_function * avr_init_machine_status (void);
149
150
151 /* Prototypes for hook implementors if needed before their implementation. */
152
153 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
154
155
156 /* Allocate registers from r25 to r8 for parameters for function calls. */
157 #define FIRST_CUM_REG 26
158
159 /* Implicit target register of LPM instruction (R0) */
160 extern GTY(()) rtx lpm_reg_rtx;
161 rtx lpm_reg_rtx;
162
163 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
164 extern GTY(()) rtx lpm_addr_reg_rtx;
165 rtx lpm_addr_reg_rtx;
166
167 /* Temporary register RTX (reg:QI TMP_REGNO) */
168 extern GTY(()) rtx tmp_reg_rtx;
169 rtx tmp_reg_rtx;
170
171 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
172 extern GTY(()) rtx zero_reg_rtx;
173 rtx zero_reg_rtx;
174
175 /* RTXs for all general purpose registers as QImode */
176 extern GTY(()) rtx all_regs_rtx[32];
177 rtx all_regs_rtx[32];
178
179 /* SREG, the processor status */
180 extern GTY(()) rtx sreg_rtx;
181 rtx sreg_rtx;
182
183 /* RAMP* special function registers */
184 extern GTY(()) rtx rampd_rtx;
185 extern GTY(()) rtx rampx_rtx;
186 extern GTY(()) rtx rampy_rtx;
187 extern GTY(()) rtx rampz_rtx;
188 rtx rampd_rtx;
189 rtx rampx_rtx;
190 rtx rampy_rtx;
191 rtx rampz_rtx;
192
193 /* RTX containing the strings "" and "e", respectively */
194 static GTY(()) rtx xstring_empty;
195 static GTY(()) rtx xstring_e;
196
197 /* Current architecture. */
198 const avr_arch_t *avr_current_arch;
199
200 /* Current device. */
201 const avr_mcu_t *avr_current_device;
202
203 /* Section to put switch tables in. */
204 static GTY(()) section *progmem_swtable_section;
205
206 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
207 or to address space __flash* or __memx. Only used as singletons inside
208 avr_asm_select_section, but it must not be local there because of GTY. */
209 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
210
211 /* Condition for insns/expanders from avr-dimode.md. */
212 bool avr_have_dimode = true;
213
214 /* To track if code will use .bss and/or .data. */
215 bool avr_need_clear_bss_p = false;
216 bool avr_need_copy_data_p = false;
217
218 \f
219 /* Transform UP into lowercase and write the result to LO.
220 You must provide enough space for LO. Return LO. */
221
222 static char*
223 avr_tolower (char *lo, const char *up)
224 {
225 char *lo0 = lo;
226
227 for (; *up; up++, lo++)
228 *lo = TOLOWER (*up);
229
230 *lo = '\0';
231
232 return lo0;
233 }
234
235
236 /* Custom function to count number of set bits. */
237
238 static inline int
239 avr_popcount (unsigned int val)
240 {
241 int pop = 0;
242
243 while (val)
244 {
245 val &= val-1;
246 pop++;
247 }
248
249 return pop;
250 }
251
252
253 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
254 Return true if the least significant N_BYTES bytes of XVAL all have a
255 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
256 of integers which contains an integer N iff bit N of POP_MASK is set. */
257
258 bool
259 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
260 {
261 int i;
262
263 enum machine_mode mode = GET_MODE (xval);
264
265 if (VOIDmode == mode)
266 mode = SImode;
267
268 for (i = 0; i < n_bytes; i++)
269 {
270 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
271 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
272
273 if (0 == (pop_mask & (1 << avr_popcount (val8))))
274 return false;
275 }
276
277 return true;
278 }
279
280
281 /* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
282 the bit representation of X by "casting" it to CONST_INT. */
283
284 rtx
285 avr_to_int_mode (rtx x)
286 {
287 enum machine_mode mode = GET_MODE (x);
288
289 return VOIDmode == mode
290 ? x
291 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
292 }
293
294
295 /* Implement `TARGET_OPTION_OVERRIDE'. */
296
297 static void
298 avr_option_override (void)
299 {
300 /* Disable -fdelete-null-pointer-checks option for AVR target.
301 This option compiler assumes that dereferencing of a null pointer
302 would halt the program. For AVR this assumption is not true and
303 programs can safely dereference null pointers. Changes made by this
304 option may not work properly for AVR. So disable this option. */
305
306 flag_delete_null_pointer_checks = 0;
307
308 /* caller-save.c looks for call-clobbered hard registers that are assigned
309 to pseudos that cross calls and tries so save-restore them around calls
310 in order to reduce the number of stack slots needed.
311
312 This might lead to situations where reload is no more able to cope
313 with the challenge of AVR's very few address registers and fails to
314 perform the requested spills. */
315
316 if (avr_strict_X)
317 flag_caller_saves = 0;
318
319 /* Unwind tables currently require a frame pointer for correctness,
320 see toplev.c:process_options(). */
321
322 if ((flag_unwind_tables
323 || flag_non_call_exceptions
324 || flag_asynchronous_unwind_tables)
325 && !ACCUMULATE_OUTGOING_ARGS)
326 {
327 flag_omit_frame_pointer = 0;
328 }
329
330 if (flag_pic == 1)
331 warning (OPT_fpic, "-fpic is not supported");
332 if (flag_pic == 2)
333 warning (OPT_fPIC, "-fPIC is not supported");
334 if (flag_pie == 1)
335 warning (OPT_fpie, "-fpie is not supported");
336 if (flag_pie == 2)
337 warning (OPT_fPIE, "-fPIE is not supported");
338
339 /* Search for mcu arch.
340 ??? We should probably just put the architecture-default device
341 settings in the architecture struct and remove any notion of a current
342 device from gcc. */
343
344 for (avr_current_device = avr_mcu_types; ; avr_current_device++)
345 {
346 if (!avr_current_device->name)
347 fatal_error ("mcu not found");
348 if (!avr_current_device->macro
349 && avr_current_device->arch == avr_arch_index)
350 break;
351 }
352
353 avr_current_arch = &avr_arch_types[avr_arch_index];
354 if (avr_n_flash < 0)
355 avr_n_flash = avr_current_device->n_flash;
356
357 /* RAM addresses of some SFRs common to all devices in respective arch. */
358
359 /* SREG: Status Register containing flags like I (global IRQ) */
360 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
361
362 /* RAMPZ: Address' high part when loading via ELPM */
363 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
364
365 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
366 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
367 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
368 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
369
370 /* SP: Stack Pointer (SP_H:SP_L) */
371 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
372 avr_addr.sp_h = avr_addr.sp_l + 1;
373
374 init_machine_status = avr_init_machine_status;
375
376 avr_log_set_avr_log();
377 }
378
379 /* Function to set up the backend function structure. */
380
381 static struct machine_function *
382 avr_init_machine_status (void)
383 {
384 return ggc_cleared_alloc<machine_function> ();
385 }
386
387
388 /* Implement `INIT_EXPANDERS'. */
389 /* The function works like a singleton. */
390
391 void
392 avr_init_expanders (void)
393 {
394 int regno;
395
396 for (regno = 0; regno < 32; regno ++)
397 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
398
399 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
400 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
401 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
402
403 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
404
405 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
406 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
407 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
408 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
409 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
410
411 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
412 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
413 }
414
415
416 /* Implement `REGNO_REG_CLASS'. */
417 /* Return register class for register R. */
418
419 enum reg_class
420 avr_regno_reg_class (int r)
421 {
422 static const enum reg_class reg_class_tab[] =
423 {
424 R0_REG,
425 /* r1 - r15 */
426 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
427 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
428 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
429 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
430 /* r16 - r23 */
431 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
432 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
433 /* r24, r25 */
434 ADDW_REGS, ADDW_REGS,
435 /* X: r26, 27 */
436 POINTER_X_REGS, POINTER_X_REGS,
437 /* Y: r28, r29 */
438 POINTER_Y_REGS, POINTER_Y_REGS,
439 /* Z: r30, r31 */
440 POINTER_Z_REGS, POINTER_Z_REGS,
441 /* SP: SPL, SPH */
442 STACK_REG, STACK_REG
443 };
444
445 if (r <= 33)
446 return reg_class_tab[r];
447
448 return ALL_REGS;
449 }
450
451
452 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
453
454 static bool
455 avr_scalar_mode_supported_p (enum machine_mode mode)
456 {
457 if (ALL_FIXED_POINT_MODE_P (mode))
458 return true;
459
460 if (PSImode == mode)
461 return true;
462
463 return default_scalar_mode_supported_p (mode);
464 }
465
466
467 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
468
469 static bool
470 avr_decl_flash_p (tree decl)
471 {
472 if (TREE_CODE (decl) != VAR_DECL
473 || TREE_TYPE (decl) == error_mark_node)
474 {
475 return false;
476 }
477
478 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
479 }
480
481
482 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
483 address space and FALSE, otherwise. */
484
485 static bool
486 avr_decl_memx_p (tree decl)
487 {
488 if (TREE_CODE (decl) != VAR_DECL
489 || TREE_TYPE (decl) == error_mark_node)
490 {
491 return false;
492 }
493
494 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
495 }
496
497
498 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
499
500 bool
501 avr_mem_flash_p (rtx x)
502 {
503 return (MEM_P (x)
504 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
505 }
506
507
508 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
509 address space and FALSE, otherwise. */
510
511 bool
512 avr_mem_memx_p (rtx x)
513 {
514 return (MEM_P (x)
515 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
516 }
517
518
519 /* A helper for the subsequent function attribute used to dig for
520 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
521
522 static inline int
523 avr_lookup_function_attribute1 (const_tree func, const char *name)
524 {
525 if (FUNCTION_DECL == TREE_CODE (func))
526 {
527 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
528 {
529 return true;
530 }
531
532 func = TREE_TYPE (func);
533 }
534
535 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
536 || TREE_CODE (func) == METHOD_TYPE);
537
538 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
539 }
540
541 /* Return nonzero if FUNC is a naked function. */
542
543 static int
544 avr_naked_function_p (tree func)
545 {
546 return avr_lookup_function_attribute1 (func, "naked");
547 }
548
549 /* Return nonzero if FUNC is an interrupt function as specified
550 by the "interrupt" attribute. */
551
552 static int
553 avr_interrupt_function_p (tree func)
554 {
555 return avr_lookup_function_attribute1 (func, "interrupt");
556 }
557
558 /* Return nonzero if FUNC is a signal function as specified
559 by the "signal" attribute. */
560
561 static int
562 avr_signal_function_p (tree func)
563 {
564 return avr_lookup_function_attribute1 (func, "signal");
565 }
566
567 /* Return nonzero if FUNC is an OS_task function. */
568
569 static int
570 avr_OS_task_function_p (tree func)
571 {
572 return avr_lookup_function_attribute1 (func, "OS_task");
573 }
574
575 /* Return nonzero if FUNC is an OS_main function. */
576
577 static int
578 avr_OS_main_function_p (tree func)
579 {
580 return avr_lookup_function_attribute1 (func, "OS_main");
581 }
582
583
584 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
585 /* Sanity cheching for above function attributes. */
586
587 static void
588 avr_set_current_function (tree decl)
589 {
590 location_t loc;
591 const char *isr;
592
593 if (decl == NULL_TREE
594 || current_function_decl == NULL_TREE
595 || current_function_decl == error_mark_node
596 || ! cfun->machine
597 || cfun->machine->attributes_checked_p)
598 return;
599
600 loc = DECL_SOURCE_LOCATION (decl);
601
602 cfun->machine->is_naked = avr_naked_function_p (decl);
603 cfun->machine->is_signal = avr_signal_function_p (decl);
604 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
607
608 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
609
610 /* Too much attributes make no sense as they request conflicting features. */
611
612 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
613 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
614 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
615 " exclusive", "OS_task", "OS_main", isr);
616
617 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
618
619 if (cfun->machine->is_naked
620 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
621 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
622 " no effect on %qs function", "OS_task", "OS_main", "naked");
623
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
625 {
626 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
627 tree ret = TREE_TYPE (TREE_TYPE (decl));
628 const char *name;
629
630 name = DECL_ASSEMBLER_NAME_SET_P (decl)
631 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
632 : IDENTIFIER_POINTER (DECL_NAME (decl));
633
634 /* Skip a leading '*' that might still prefix the assembler name,
635 e.g. in non-LTO runs. */
636
637 name = default_strip_name_encoding (name);
638
639 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
640 using this when it switched from SIGNAL and INTERRUPT to ISR. */
641
642 if (cfun->machine->is_interrupt)
643 cfun->machine->is_signal = 0;
644
645 /* Interrupt handlers must be void __vector (void) functions. */
646
647 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
648 error_at (loc, "%qs function cannot have arguments", isr);
649
650 if (TREE_CODE (ret) != VOID_TYPE)
651 error_at (loc, "%qs function cannot return a value", isr);
652
653 /* If the function has the 'signal' or 'interrupt' attribute, ensure
654 that the name of the function is "__vector_NN" so as to catch
655 when the user misspells the vector name. */
656
657 if (!STR_PREFIX_P (name, "__vector"))
658 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
659 name, isr);
660 }
661
662 /* Don't print the above diagnostics more than once. */
663
664 cfun->machine->attributes_checked_p = 1;
665 }
666
667
668 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
669
670 int
671 avr_accumulate_outgoing_args (void)
672 {
673 if (!cfun)
674 return TARGET_ACCUMULATE_OUTGOING_ARGS;
675
676 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
677 what offset is correct. In some cases it is relative to
678 virtual_outgoing_args_rtx and in others it is relative to
679 virtual_stack_vars_rtx. For example code see
680 gcc.c-torture/execute/built-in-setjmp.c
681 gcc.c-torture/execute/builtins/sprintf-chk.c */
682
683 return (TARGET_ACCUMULATE_OUTGOING_ARGS
684 && !(cfun->calls_setjmp
685 || cfun->has_nonlocal_label));
686 }
687
688
689 /* Report contribution of accumulated outgoing arguments to stack size. */
690
691 static inline int
692 avr_outgoing_args_size (void)
693 {
694 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
695 }
696
697
698 /* Implement `STARTING_FRAME_OFFSET'. */
699 /* This is the offset from the frame pointer register to the first stack slot
700 that contains a variable living in the frame. */
701
702 int
703 avr_starting_frame_offset (void)
704 {
705 return 1 + avr_outgoing_args_size ();
706 }
707
708
709 /* Return the number of hard registers to push/pop in the prologue/epilogue
710 of the current function, and optionally store these registers in SET. */
711
712 static int
713 avr_regs_to_save (HARD_REG_SET *set)
714 {
715 int reg, count;
716 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
717
718 if (set)
719 CLEAR_HARD_REG_SET (*set);
720 count = 0;
721
722 /* No need to save any registers if the function never returns or
723 has the "OS_task" or "OS_main" attribute. */
724
725 if (TREE_THIS_VOLATILE (current_function_decl)
726 || cfun->machine->is_OS_task
727 || cfun->machine->is_OS_main)
728 return 0;
729
730 for (reg = 0; reg < 32; reg++)
731 {
732 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
733 any global register variables. */
734
735 if (fixed_regs[reg])
736 continue;
737
738 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
739 || (df_regs_ever_live_p (reg)
740 && (int_or_sig_p || !call_used_regs[reg])
741 /* Don't record frame pointer registers here. They are treated
742 indivitually in prologue. */
743 && !(frame_pointer_needed
744 && (reg == REG_Y || reg == (REG_Y+1)))))
745 {
746 if (set)
747 SET_HARD_REG_BIT (*set, reg);
748 count++;
749 }
750 }
751 return count;
752 }
753
754
755 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
756
757 static bool
758 avr_allocate_stack_slots_for_args (void)
759 {
760 return !cfun->machine->is_naked;
761 }
762
763
764 /* Return true if register FROM can be eliminated via register TO. */
765
766 static bool
767 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
768 {
769 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
770 || !frame_pointer_needed);
771 }
772
773
774 /* Implement `TARGET_WARN_FUNC_RETURN'. */
775
776 static bool
777 avr_warn_func_return (tree decl)
778 {
779 /* Naked functions are implemented entirely in assembly, including the
780 return sequence, so suppress warnings about this. */
781
782 return !avr_naked_function_p (decl);
783 }
784
785 /* Compute offset between arg_pointer and frame_pointer. */
786
787 int
788 avr_initial_elimination_offset (int from, int to)
789 {
790 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
791 return 0;
792 else
793 {
794 int offset = frame_pointer_needed ? 2 : 0;
795 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
796
797 offset += avr_regs_to_save (NULL);
798 return (get_frame_size () + avr_outgoing_args_size()
799 + avr_pc_size + 1 + offset);
800 }
801 }
802
803
804 /* Helper for the function below. */
805
806 static void
807 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
808 {
809 *node = make_node (FIXED_POINT_TYPE);
810 TYPE_SATURATING (*node) = sat_p;
811 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
812 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
813 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
814 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
815 TYPE_ALIGN (*node) = 8;
816 SET_TYPE_MODE (*node, mode);
817
818 layout_type (*node);
819 }
820
821
822 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
823
824 static tree
825 avr_build_builtin_va_list (void)
826 {
827 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
828 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
829 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
830 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
831 to the long long accum modes instead of the desired [U]TAmode.
832
833 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
834 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
835 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
836 libgcc to detect IBIT and FBIT. */
837
838 avr_adjust_type_node (&ta_type_node, TAmode, 0);
839 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
840 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
841 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
842
843 unsigned_long_long_accum_type_node = uta_type_node;
844 long_long_accum_type_node = ta_type_node;
845 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
846 sat_long_long_accum_type_node = sat_ta_type_node;
847
848 /* Dispatch to the default handler. */
849
850 return std_build_builtin_va_list ();
851 }
852
853
854 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
855 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
856 frame pointer by +STARTING_FRAME_OFFSET.
857 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
858 avoids creating add/sub of offset in nonlocal goto and setjmp. */
859
860 static rtx
861 avr_builtin_setjmp_frame_value (void)
862 {
863 rtx xval = gen_reg_rtx (Pmode);
864 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
865 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
866 return xval;
867 }
868
869
870 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
871 This is return address of function. */
872
873 rtx
874 avr_return_addr_rtx (int count, rtx tem)
875 {
876 rtx r;
877
878 /* Can only return this function's return address. Others not supported. */
879 if (count)
880 return NULL;
881
882 if (AVR_3_BYTE_PC)
883 {
884 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
885 warning (0, "%<builtin_return_address%> contains only 2 bytes"
886 " of address");
887 }
888 else
889 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
890
891 r = gen_rtx_PLUS (Pmode, tem, r);
892 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
893 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
894 return r;
895 }
896
897 /* Return 1 if the function epilogue is just a single "ret". */
898
899 int
900 avr_simple_epilogue (void)
901 {
902 return (! frame_pointer_needed
903 && get_frame_size () == 0
904 && avr_outgoing_args_size() == 0
905 && avr_regs_to_save (NULL) == 0
906 && ! cfun->machine->is_interrupt
907 && ! cfun->machine->is_signal
908 && ! cfun->machine->is_naked
909 && ! TREE_THIS_VOLATILE (current_function_decl));
910 }
911
912 /* This function checks sequence of live registers. */
913
914 static int
915 sequent_regs_live (void)
916 {
917 int reg;
918 int live_seq = 0;
919 int cur_seq = 0;
920
921 for (reg = 0; reg < 18; ++reg)
922 {
923 if (fixed_regs[reg])
924 {
925 /* Don't recognize sequences that contain global register
926 variables. */
927
928 if (live_seq != 0)
929 return 0;
930 else
931 continue;
932 }
933
934 if (!call_used_regs[reg])
935 {
936 if (df_regs_ever_live_p (reg))
937 {
938 ++live_seq;
939 ++cur_seq;
940 }
941 else
942 cur_seq = 0;
943 }
944 }
945
946 if (!frame_pointer_needed)
947 {
948 if (df_regs_ever_live_p (REG_Y))
949 {
950 ++live_seq;
951 ++cur_seq;
952 }
953 else
954 cur_seq = 0;
955
956 if (df_regs_ever_live_p (REG_Y+1))
957 {
958 ++live_seq;
959 ++cur_seq;
960 }
961 else
962 cur_seq = 0;
963 }
964 else
965 {
966 cur_seq += 2;
967 live_seq += 2;
968 }
969 return (cur_seq == live_seq) ? live_seq : 0;
970 }
971
972 /* Obtain the length sequence of insns. */
973
974 int
975 get_sequence_length (rtx_insn *insns)
976 {
977 rtx_insn *insn;
978 int length;
979
980 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
981 length += get_attr_length (insn);
982
983 return length;
984 }
985
986
987 /* Implement `INCOMING_RETURN_ADDR_RTX'. */
988
989 rtx
990 avr_incoming_return_addr_rtx (void)
991 {
992 /* The return address is at the top of the stack. Note that the push
993 was via post-decrement, which means the actual address is off by one. */
994 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
995 }
996
997 /* Helper for expand_prologue. Emit a push of a byte register. */
998
999 static void
1000 emit_push_byte (unsigned regno, bool frame_related_p)
1001 {
1002 rtx mem, reg;
1003 rtx_insn *insn;
1004
1005 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1006 mem = gen_frame_mem (QImode, mem);
1007 reg = gen_rtx_REG (QImode, regno);
1008
1009 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
1010 if (frame_related_p)
1011 RTX_FRAME_RELATED_P (insn) = 1;
1012
1013 cfun->machine->stack_usage++;
1014 }
1015
1016
1017 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1018 SFR is a MEM representing the memory location of the SFR.
1019 If CLR_P then clear the SFR after the push using zero_reg. */
1020
1021 static void
1022 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1023 {
1024 rtx_insn *insn;
1025
1026 gcc_assert (MEM_P (sfr));
1027
1028 /* IN __tmp_reg__, IO(SFR) */
1029 insn = emit_move_insn (tmp_reg_rtx, sfr);
1030 if (frame_related_p)
1031 RTX_FRAME_RELATED_P (insn) = 1;
1032
1033 /* PUSH __tmp_reg__ */
1034 emit_push_byte (TMP_REGNO, frame_related_p);
1035
1036 if (clr_p)
1037 {
1038 /* OUT IO(SFR), __zero_reg__ */
1039 insn = emit_move_insn (sfr, const0_rtx);
1040 if (frame_related_p)
1041 RTX_FRAME_RELATED_P (insn) = 1;
1042 }
1043 }
1044
1045 static void
1046 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1047 {
1048 rtx_insn *insn;
1049 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1050 int live_seq = sequent_regs_live ();
1051
1052 HOST_WIDE_INT size_max
1053 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1054
1055 bool minimize = (TARGET_CALL_PROLOGUES
1056 && size < size_max
1057 && live_seq
1058 && !isr_p
1059 && !cfun->machine->is_OS_task
1060 && !cfun->machine->is_OS_main);
1061
1062 if (minimize
1063 && (frame_pointer_needed
1064 || avr_outgoing_args_size() > 8
1065 || (AVR_2_BYTE_PC && live_seq > 6)
1066 || live_seq > 7))
1067 {
1068 rtx pattern;
1069 int first_reg, reg, offset;
1070
1071 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1072 gen_int_mode (size, HImode));
1073
1074 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1075 gen_int_mode (live_seq+size, HImode));
1076 insn = emit_insn (pattern);
1077 RTX_FRAME_RELATED_P (insn) = 1;
1078
1079 /* Describe the effect of the unspec_volatile call to prologue_saves.
1080 Note that this formulation assumes that add_reg_note pushes the
1081 notes to the front. Thus we build them in the reverse order of
1082 how we want dwarf2out to process them. */
1083
1084 /* The function does always set frame_pointer_rtx, but whether that
1085 is going to be permanent in the function is frame_pointer_needed. */
1086
1087 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1088 gen_rtx_SET (VOIDmode, (frame_pointer_needed
1089 ? frame_pointer_rtx
1090 : stack_pointer_rtx),
1091 plus_constant (Pmode, stack_pointer_rtx,
1092 -(size + live_seq))));
1093
1094 /* Note that live_seq always contains r28+r29, but the other
1095 registers to be saved are all below 18. */
1096
1097 first_reg = 18 - (live_seq - 2);
1098
1099 for (reg = 29, offset = -live_seq + 1;
1100 reg >= first_reg;
1101 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1102 {
1103 rtx m, r;
1104
1105 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1106 offset));
1107 r = gen_rtx_REG (QImode, reg);
1108 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1109 }
1110
1111 cfun->machine->stack_usage += size + live_seq;
1112 }
1113 else /* !minimize */
1114 {
1115 int reg;
1116
1117 for (reg = 0; reg < 32; ++reg)
1118 if (TEST_HARD_REG_BIT (set, reg))
1119 emit_push_byte (reg, true);
1120
1121 if (frame_pointer_needed
1122 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1123 {
1124 /* Push frame pointer. Always be consistent about the
1125 ordering of pushes -- epilogue_restores expects the
1126 register pair to be pushed low byte first. */
1127
1128 emit_push_byte (REG_Y, true);
1129 emit_push_byte (REG_Y + 1, true);
1130 }
1131
1132 if (frame_pointer_needed
1133 && size == 0)
1134 {
1135 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1136 RTX_FRAME_RELATED_P (insn) = 1;
1137 }
1138
1139 if (size != 0)
1140 {
1141 /* Creating a frame can be done by direct manipulation of the
1142 stack or via the frame pointer. These two methods are:
1143 fp = sp
1144 fp -= size
1145 sp = fp
1146 or
1147 sp -= size
1148 fp = sp (*)
1149 the optimum method depends on function type, stack and
1150 frame size. To avoid a complex logic, both methods are
1151 tested and shortest is selected.
1152
1153 There is also the case where SIZE != 0 and no frame pointer is
1154 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1155 In that case, insn (*) is not needed in that case.
1156 We use the X register as scratch. This is save because in X
1157 is call-clobbered.
1158 In an interrupt routine, the case of SIZE != 0 together with
1159 !frame_pointer_needed can only occur if the function is not a
1160 leaf function and thus X has already been saved. */
1161
1162 int irq_state = -1;
1163 HOST_WIDE_INT size_cfa = size, neg_size;
1164 rtx_insn *fp_plus_insns;
1165 rtx fp, my_fp;
1166
1167 gcc_assert (frame_pointer_needed
1168 || !isr_p
1169 || !crtl->is_leaf);
1170
1171 fp = my_fp = (frame_pointer_needed
1172 ? frame_pointer_rtx
1173 : gen_rtx_REG (Pmode, REG_X));
1174
1175 if (AVR_HAVE_8BIT_SP)
1176 {
1177 /* The high byte (r29) does not change:
1178 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1179
1180 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1181 }
1182
1183 /* Cut down size and avoid size = 0 so that we don't run
1184 into ICE like PR52488 in the remainder. */
1185
1186 if (size > size_max)
1187 {
1188 /* Don't error so that insane code from newlib still compiles
1189 and does not break building newlib. As PR51345 is implemented
1190 now, there are multilib variants with -msp8.
1191
1192 If user wants sanity checks he can use -Wstack-usage=
1193 or similar options.
1194
1195 For CFA we emit the original, non-saturated size so that
1196 the generic machinery is aware of the real stack usage and
1197 will print the above diagnostic as expected. */
1198
1199 size = size_max;
1200 }
1201
1202 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1203 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1204
1205 /************ Method 1: Adjust frame pointer ************/
1206
1207 start_sequence ();
1208
1209 /* Normally, the dwarf2out frame-related-expr interpreter does
1210 not expect to have the CFA change once the frame pointer is
1211 set up. Thus, we avoid marking the move insn below and
1212 instead indicate that the entire operation is complete after
1213 the frame pointer subtraction is done. */
1214
1215 insn = emit_move_insn (fp, stack_pointer_rtx);
1216 if (frame_pointer_needed)
1217 {
1218 RTX_FRAME_RELATED_P (insn) = 1;
1219 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1220 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1221 }
1222
1223 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1224 my_fp, neg_size));
1225
1226 if (frame_pointer_needed)
1227 {
1228 RTX_FRAME_RELATED_P (insn) = 1;
1229 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1230 gen_rtx_SET (VOIDmode, fp,
1231 plus_constant (Pmode, fp,
1232 -size_cfa)));
1233 }
1234
1235 /* Copy to stack pointer. Note that since we've already
1236 changed the CFA to the frame pointer this operation
1237 need not be annotated if frame pointer is needed.
1238 Always move through unspec, see PR50063.
1239 For meaning of irq_state see movhi_sp_r insn. */
1240
1241 if (cfun->machine->is_interrupt)
1242 irq_state = 1;
1243
1244 if (TARGET_NO_INTERRUPTS
1245 || cfun->machine->is_signal
1246 || cfun->machine->is_OS_main)
1247 irq_state = 0;
1248
1249 if (AVR_HAVE_8BIT_SP)
1250 irq_state = 2;
1251
1252 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1253 fp, GEN_INT (irq_state)));
1254 if (!frame_pointer_needed)
1255 {
1256 RTX_FRAME_RELATED_P (insn) = 1;
1257 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1258 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1259 plus_constant (Pmode,
1260 stack_pointer_rtx,
1261 -size_cfa)));
1262 }
1263
1264 fp_plus_insns = get_insns ();
1265 end_sequence ();
1266
1267 /************ Method 2: Adjust Stack pointer ************/
1268
1269 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1270 can only handle specific offsets. */
1271
1272 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1273 {
1274 rtx_insn *sp_plus_insns;
1275
1276 start_sequence ();
1277
1278 insn = emit_move_insn (stack_pointer_rtx,
1279 plus_constant (Pmode, stack_pointer_rtx,
1280 -size));
1281 RTX_FRAME_RELATED_P (insn) = 1;
1282 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1283 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1284 plus_constant (Pmode,
1285 stack_pointer_rtx,
1286 -size_cfa)));
1287 if (frame_pointer_needed)
1288 {
1289 insn = emit_move_insn (fp, stack_pointer_rtx);
1290 RTX_FRAME_RELATED_P (insn) = 1;
1291 }
1292
1293 sp_plus_insns = get_insns ();
1294 end_sequence ();
1295
1296 /************ Use shortest method ************/
1297
1298 emit_insn (get_sequence_length (sp_plus_insns)
1299 < get_sequence_length (fp_plus_insns)
1300 ? sp_plus_insns
1301 : fp_plus_insns);
1302 }
1303 else
1304 {
1305 emit_insn (fp_plus_insns);
1306 }
1307
1308 cfun->machine->stack_usage += size_cfa;
1309 } /* !minimize && size != 0 */
1310 } /* !minimize */
1311 }
1312
1313
1314 /* Output function prologue. */
1315
1316 void
1317 avr_expand_prologue (void)
1318 {
1319 HARD_REG_SET set;
1320 HOST_WIDE_INT size;
1321
1322 size = get_frame_size() + avr_outgoing_args_size();
1323
1324 cfun->machine->stack_usage = 0;
1325
1326 /* Prologue: naked. */
1327 if (cfun->machine->is_naked)
1328 {
1329 return;
1330 }
1331
1332 avr_regs_to_save (&set);
1333
1334 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1335 {
1336 /* Enable interrupts. */
1337 if (cfun->machine->is_interrupt)
1338 emit_insn (gen_enable_interrupt ());
1339
1340 /* Push zero reg. */
1341 emit_push_byte (ZERO_REGNO, true);
1342
1343 /* Push tmp reg. */
1344 emit_push_byte (TMP_REGNO, true);
1345
1346 /* Push SREG. */
1347 /* ??? There's no dwarf2 column reserved for SREG. */
1348 emit_push_sfr (sreg_rtx, false, false /* clr */);
1349
1350 /* Clear zero reg. */
1351 emit_move_insn (zero_reg_rtx, const0_rtx);
1352
1353 /* Prevent any attempt to delete the setting of ZERO_REG! */
1354 emit_use (zero_reg_rtx);
1355
1356 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1357 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1358
1359 if (AVR_HAVE_RAMPD)
1360 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1361
1362 if (AVR_HAVE_RAMPX
1363 && TEST_HARD_REG_BIT (set, REG_X)
1364 && TEST_HARD_REG_BIT (set, REG_X + 1))
1365 {
1366 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1367 }
1368
1369 if (AVR_HAVE_RAMPY
1370 && (frame_pointer_needed
1371 || (TEST_HARD_REG_BIT (set, REG_Y)
1372 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1373 {
1374 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1375 }
1376
1377 if (AVR_HAVE_RAMPZ
1378 && TEST_HARD_REG_BIT (set, REG_Z)
1379 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1380 {
1381 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1382 }
1383 } /* is_interrupt is_signal */
1384
1385 avr_prologue_setup_frame (size, set);
1386
1387 if (flag_stack_usage_info)
1388 current_function_static_stack_size = cfun->machine->stack_usage;
1389 }
1390
1391
1392 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
1393 /* Output summary at end of function prologue. */
1394
1395 static void
1396 avr_asm_function_end_prologue (FILE *file)
1397 {
1398 if (cfun->machine->is_naked)
1399 {
1400 fputs ("/* prologue: naked */\n", file);
1401 }
1402 else
1403 {
1404 if (cfun->machine->is_interrupt)
1405 {
1406 fputs ("/* prologue: Interrupt */\n", file);
1407 }
1408 else if (cfun->machine->is_signal)
1409 {
1410 fputs ("/* prologue: Signal */\n", file);
1411 }
1412 else
1413 fputs ("/* prologue: function */\n", file);
1414 }
1415
1416 if (ACCUMULATE_OUTGOING_ARGS)
1417 fprintf (file, "/* outgoing args size = %d */\n",
1418 avr_outgoing_args_size());
1419
1420 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1421 get_frame_size());
1422 fprintf (file, "/* stack size = %d */\n",
1423 cfun->machine->stack_usage);
1424 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1425 usage for offset so that SP + .L__stack_offset = return address. */
1426 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1427 }
1428
1429
1430 /* Implement `EPILOGUE_USES'. */
1431
1432 int
1433 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1434 {
1435 if (reload_completed
1436 && cfun->machine
1437 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1438 return 1;
1439 return 0;
1440 }
1441
1442 /* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
1443
1444 static void
1445 emit_pop_byte (unsigned regno)
1446 {
1447 rtx mem, reg;
1448
1449 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1450 mem = gen_frame_mem (QImode, mem);
1451 reg = gen_rtx_REG (QImode, regno);
1452
1453 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1454 }
1455
1456 /* Output RTL epilogue. */
1457
1458 void
1459 avr_expand_epilogue (bool sibcall_p)
1460 {
1461 int reg;
1462 int live_seq;
1463 HARD_REG_SET set;
1464 int minimize;
1465 HOST_WIDE_INT size;
1466 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1467
1468 size = get_frame_size() + avr_outgoing_args_size();
1469
1470 /* epilogue: naked */
1471 if (cfun->machine->is_naked)
1472 {
1473 gcc_assert (!sibcall_p);
1474
1475 emit_jump_insn (gen_return ());
1476 return;
1477 }
1478
1479 avr_regs_to_save (&set);
1480 live_seq = sequent_regs_live ();
1481
1482 minimize = (TARGET_CALL_PROLOGUES
1483 && live_seq
1484 && !isr_p
1485 && !cfun->machine->is_OS_task
1486 && !cfun->machine->is_OS_main);
1487
1488 if (minimize
1489 && (live_seq > 4
1490 || frame_pointer_needed
1491 || size))
1492 {
1493 /* Get rid of frame. */
1494
1495 if (!frame_pointer_needed)
1496 {
1497 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1498 }
1499
1500 if (size)
1501 {
1502 emit_move_insn (frame_pointer_rtx,
1503 plus_constant (Pmode, frame_pointer_rtx, size));
1504 }
1505
1506 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1507 return;
1508 }
1509
1510 if (size)
1511 {
1512 /* Try two methods to adjust stack and select shortest. */
1513
1514 int irq_state = -1;
1515 rtx fp, my_fp;
1516 rtx_insn *fp_plus_insns;
1517 HOST_WIDE_INT size_max;
1518
1519 gcc_assert (frame_pointer_needed
1520 || !isr_p
1521 || !crtl->is_leaf);
1522
1523 fp = my_fp = (frame_pointer_needed
1524 ? frame_pointer_rtx
1525 : gen_rtx_REG (Pmode, REG_X));
1526
1527 if (AVR_HAVE_8BIT_SP)
1528 {
1529 /* The high byte (r29) does not change:
1530 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1531
1532 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1533 }
1534
1535 /* For rationale see comment in prologue generation. */
1536
1537 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1538 if (size > size_max)
1539 size = size_max;
1540 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1541
1542 /********** Method 1: Adjust fp register **********/
1543
1544 start_sequence ();
1545
1546 if (!frame_pointer_needed)
1547 emit_move_insn (fp, stack_pointer_rtx);
1548
1549 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1550
1551 /* Copy to stack pointer. */
1552
1553 if (TARGET_NO_INTERRUPTS)
1554 irq_state = 0;
1555
1556 if (AVR_HAVE_8BIT_SP)
1557 irq_state = 2;
1558
1559 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1560 GEN_INT (irq_state)));
1561
1562 fp_plus_insns = get_insns ();
1563 end_sequence ();
1564
1565 /********** Method 2: Adjust Stack pointer **********/
1566
1567 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1568 {
1569 rtx_insn *sp_plus_insns;
1570
1571 start_sequence ();
1572
1573 emit_move_insn (stack_pointer_rtx,
1574 plus_constant (Pmode, stack_pointer_rtx, size));
1575
1576 sp_plus_insns = get_insns ();
1577 end_sequence ();
1578
1579 /************ Use shortest method ************/
1580
1581 emit_insn (get_sequence_length (sp_plus_insns)
1582 < get_sequence_length (fp_plus_insns)
1583 ? sp_plus_insns
1584 : fp_plus_insns);
1585 }
1586 else
1587 emit_insn (fp_plus_insns);
1588 } /* size != 0 */
1589
1590 if (frame_pointer_needed
1591 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1592 {
1593 /* Restore previous frame_pointer. See avr_expand_prologue for
1594 rationale for not using pophi. */
1595
1596 emit_pop_byte (REG_Y + 1);
1597 emit_pop_byte (REG_Y);
1598 }
1599
1600 /* Restore used registers. */
1601
1602 for (reg = 31; reg >= 0; --reg)
1603 if (TEST_HARD_REG_BIT (set, reg))
1604 emit_pop_byte (reg);
1605
1606 if (isr_p)
1607 {
1608 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1609 The conditions to restore them must be tha same as in prologue. */
1610
1611 if (AVR_HAVE_RAMPZ
1612 && TEST_HARD_REG_BIT (set, REG_Z)
1613 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1614 {
1615 emit_pop_byte (TMP_REGNO);
1616 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1617 }
1618
1619 if (AVR_HAVE_RAMPY
1620 && (frame_pointer_needed
1621 || (TEST_HARD_REG_BIT (set, REG_Y)
1622 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1623 {
1624 emit_pop_byte (TMP_REGNO);
1625 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1626 }
1627
1628 if (AVR_HAVE_RAMPX
1629 && TEST_HARD_REG_BIT (set, REG_X)
1630 && TEST_HARD_REG_BIT (set, REG_X + 1))
1631 {
1632 emit_pop_byte (TMP_REGNO);
1633 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1634 }
1635
1636 if (AVR_HAVE_RAMPD)
1637 {
1638 emit_pop_byte (TMP_REGNO);
1639 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1640 }
1641
1642 /* Restore SREG using tmp_reg as scratch. */
1643
1644 emit_pop_byte (TMP_REGNO);
1645 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1646
1647 /* Restore tmp REG. */
1648 emit_pop_byte (TMP_REGNO);
1649
1650 /* Restore zero REG. */
1651 emit_pop_byte (ZERO_REGNO);
1652 }
1653
1654 if (!sibcall_p)
1655 emit_jump_insn (gen_return ());
1656 }
1657
1658
1659 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
1660
1661 static void
1662 avr_asm_function_begin_epilogue (FILE *file)
1663 {
1664 fprintf (file, "/* epilogue start */\n");
1665 }
1666
1667
1668 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1669
1670 static bool
1671 avr_cannot_modify_jumps_p (void)
1672 {
1673
1674 /* Naked Functions must not have any instructions after
1675 their epilogue, see PR42240 */
1676
1677 if (reload_completed
1678 && cfun->machine
1679 && cfun->machine->is_naked)
1680 {
1681 return true;
1682 }
1683
1684 return false;
1685 }
1686
1687
1688 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1689
1690 static bool
1691 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1692 {
1693 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1694 This hook just serves to hack around PR rtl-optimization/52543 by
1695 claiming that non-generic addresses were mode-dependent so that
1696 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1697 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1698 generic address space which is not true. */
1699
1700 return !ADDR_SPACE_GENERIC_P (as);
1701 }
1702
1703
1704 /* Helper function for `avr_legitimate_address_p'. */
1705
1706 static inline bool
1707 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1708 RTX_CODE outer_code, bool strict)
1709 {
1710 return (REG_P (reg)
1711 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1712 as, outer_code, UNKNOWN)
1713 || (!strict
1714 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1715 }
1716
1717
1718 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1719 machine for a memory operand of mode MODE. */
1720
1721 static bool
1722 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1723 {
1724 bool ok = CONSTANT_ADDRESS_P (x);
1725
1726 switch (GET_CODE (x))
1727 {
1728 case REG:
1729 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1730 MEM, strict);
1731
1732 if (strict
1733 && GET_MODE_SIZE (mode) > 4
1734 && REG_X == REGNO (x))
1735 {
1736 ok = false;
1737 }
1738 break;
1739
1740 case POST_INC:
1741 case PRE_DEC:
1742 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1743 GET_CODE (x), strict);
1744 break;
1745
1746 case PLUS:
1747 {
1748 rtx reg = XEXP (x, 0);
1749 rtx op1 = XEXP (x, 1);
1750
1751 if (REG_P (reg)
1752 && CONST_INT_P (op1)
1753 && INTVAL (op1) >= 0)
1754 {
1755 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1756
1757 if (fit)
1758 {
1759 ok = (! strict
1760 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1761 PLUS, strict));
1762
1763 if (reg == frame_pointer_rtx
1764 || reg == arg_pointer_rtx)
1765 {
1766 ok = true;
1767 }
1768 }
1769 else if (frame_pointer_needed
1770 && reg == frame_pointer_rtx)
1771 {
1772 ok = true;
1773 }
1774 }
1775 }
1776 break;
1777
1778 default:
1779 break;
1780 }
1781
1782 if (avr_log.legitimate_address_p)
1783 {
1784 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1785 "reload_completed=%d reload_in_progress=%d %s:",
1786 ok, mode, strict, reload_completed, reload_in_progress,
1787 reg_renumber ? "(reg_renumber)" : "");
1788
1789 if (GET_CODE (x) == PLUS
1790 && REG_P (XEXP (x, 0))
1791 && CONST_INT_P (XEXP (x, 1))
1792 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1793 && reg_renumber)
1794 {
1795 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1796 true_regnum (XEXP (x, 0)));
1797 }
1798
1799 avr_edump ("\n%r\n", x);
1800 }
1801
1802 return ok;
1803 }
1804
1805
1806 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1807 now only a helper for avr_addr_space_legitimize_address. */
1808 /* Attempts to replace X with a valid
1809 memory address for an operand of mode MODE */
1810
1811 static rtx
1812 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1813 {
1814 bool big_offset_p = false;
1815
1816 x = oldx;
1817
1818 if (GET_CODE (oldx) == PLUS
1819 && REG_P (XEXP (oldx, 0)))
1820 {
1821 if (REG_P (XEXP (oldx, 1)))
1822 x = force_reg (GET_MODE (oldx), oldx);
1823 else if (CONST_INT_P (XEXP (oldx, 1)))
1824 {
1825 int offs = INTVAL (XEXP (oldx, 1));
1826 if (frame_pointer_rtx != XEXP (oldx, 0)
1827 && offs > MAX_LD_OFFSET (mode))
1828 {
1829 big_offset_p = true;
1830 x = force_reg (GET_MODE (oldx), oldx);
1831 }
1832 }
1833 }
1834
1835 if (avr_log.legitimize_address)
1836 {
1837 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1838
1839 if (x != oldx)
1840 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1841 }
1842
1843 return x;
1844 }
1845
1846
1847 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1848 /* This will allow register R26/27 to be used where it is no worse than normal
1849 base pointers R28/29 or R30/31. For example, if base offset is greater
1850 than 63 bytes or for R++ or --R addressing. */
1851
1852 rtx
1853 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1854 int opnum, int type, int addr_type,
1855 int ind_levels ATTRIBUTE_UNUSED,
1856 rtx (*mk_memloc)(rtx,int))
1857 {
1858 rtx x = *px;
1859
1860 if (avr_log.legitimize_reload_address)
1861 avr_edump ("\n%?:%m %r\n", mode, x);
1862
1863 if (1 && (GET_CODE (x) == POST_INC
1864 || GET_CODE (x) == PRE_DEC))
1865 {
1866 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1867 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1868 opnum, RELOAD_OTHER);
1869
1870 if (avr_log.legitimize_reload_address)
1871 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1872 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1873
1874 return x;
1875 }
1876
1877 if (GET_CODE (x) == PLUS
1878 && REG_P (XEXP (x, 0))
1879 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1880 && CONST_INT_P (XEXP (x, 1))
1881 && INTVAL (XEXP (x, 1)) >= 1)
1882 {
1883 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1884
1885 if (fit)
1886 {
1887 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1888 {
1889 int regno = REGNO (XEXP (x, 0));
1890 rtx mem = mk_memloc (x, regno);
1891
1892 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1893 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1894 1, (enum reload_type) addr_type);
1895
1896 if (avr_log.legitimize_reload_address)
1897 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1898 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1899
1900 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1901 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1902 opnum, (enum reload_type) type);
1903
1904 if (avr_log.legitimize_reload_address)
1905 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1906 BASE_POINTER_REGS, mem, NULL_RTX);
1907
1908 return x;
1909 }
1910 }
1911 else if (! (frame_pointer_needed
1912 && XEXP (x, 0) == frame_pointer_rtx))
1913 {
1914 push_reload (x, NULL_RTX, px, NULL,
1915 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1916 opnum, (enum reload_type) type);
1917
1918 if (avr_log.legitimize_reload_address)
1919 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1920 POINTER_REGS, x, NULL_RTX);
1921
1922 return x;
1923 }
1924 }
1925
1926 return NULL_RTX;
1927 }
1928
1929
1930 /* Implement `TARGET_SECONDARY_RELOAD' */
1931
1932 static reg_class_t
1933 avr_secondary_reload (bool in_p, rtx x,
1934 reg_class_t reload_class ATTRIBUTE_UNUSED,
1935 enum machine_mode mode, secondary_reload_info *sri)
1936 {
1937 if (in_p
1938 && MEM_P (x)
1939 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1940 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1941 {
1942 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1943
1944 switch (mode)
1945 {
1946 default:
1947 gcc_unreachable();
1948
1949 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1950 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1951 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1952
1953 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1954 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1955 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1956 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1957 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1958
1959 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1960
1961 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1962 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1963 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1964 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1965 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1966 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1967 }
1968 }
1969
1970 return NO_REGS;
1971 }
1972
1973
1974 /* Helper function to print assembler resp. track instruction
1975 sequence lengths. Always return "".
1976
1977 If PLEN == NULL:
1978 Output assembler code from template TPL with operands supplied
1979 by OPERANDS. This is just forwarding to output_asm_insn.
1980
1981 If PLEN != NULL:
1982 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1983 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1984 Don't output anything.
1985 */
1986
1987 static const char*
1988 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1989 {
1990 if (NULL == plen)
1991 {
1992 output_asm_insn (tpl, operands);
1993 }
1994 else
1995 {
1996 if (n_words < 0)
1997 *plen = -n_words;
1998 else
1999 *plen += n_words;
2000 }
2001
2002 return "";
2003 }
2004
2005
2006 /* Return a pointer register name as a string. */
2007
2008 static const char*
2009 ptrreg_to_str (int regno)
2010 {
2011 switch (regno)
2012 {
2013 case REG_X: return "X";
2014 case REG_Y: return "Y";
2015 case REG_Z: return "Z";
2016 default:
2017 output_operand_lossage ("address operand requires constraint for"
2018 " X, Y, or Z register");
2019 }
2020 return NULL;
2021 }
2022
2023 /* Return the condition name as a string.
2024 Used in conditional jump constructing */
2025
2026 static const char*
2027 cond_string (enum rtx_code code)
2028 {
2029 switch (code)
2030 {
2031 case NE:
2032 return "ne";
2033 case EQ:
2034 return "eq";
2035 case GE:
2036 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2037 return "pl";
2038 else
2039 return "ge";
2040 case LT:
2041 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2042 return "mi";
2043 else
2044 return "lt";
2045 case GEU:
2046 return "sh";
2047 case LTU:
2048 return "lo";
2049 default:
2050 gcc_unreachable ();
2051 }
2052
2053 return "";
2054 }
2055
2056
2057 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
2058 /* Output ADDR to FILE as address. */
2059
2060 static void
2061 avr_print_operand_address (FILE *file, rtx addr)
2062 {
2063 switch (GET_CODE (addr))
2064 {
2065 case REG:
2066 fprintf (file, ptrreg_to_str (REGNO (addr)));
2067 break;
2068
2069 case PRE_DEC:
2070 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2071 break;
2072
2073 case POST_INC:
2074 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2075 break;
2076
2077 default:
2078 if (CONSTANT_ADDRESS_P (addr)
2079 && text_segment_operand (addr, VOIDmode))
2080 {
2081 rtx x = addr;
2082 if (GET_CODE (x) == CONST)
2083 x = XEXP (x, 0);
2084 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2085 {
2086 /* Assembler gs() will implant word address. Make offset
2087 a byte offset inside gs() for assembler. This is
2088 needed because the more logical (constant+gs(sym)) is not
2089 accepted by gas. For 128K and smaller devices this is ok.
2090 For large devices it will create a trampoline to offset
2091 from symbol which may not be what the user really wanted. */
2092
2093 fprintf (file, "gs(");
2094 output_addr_const (file, XEXP (x,0));
2095 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2096 2 * INTVAL (XEXP (x, 1)));
2097 if (AVR_3_BYTE_PC)
2098 if (warning (0, "pointer offset from symbol maybe incorrect"))
2099 {
2100 output_addr_const (stderr, addr);
2101 fprintf(stderr,"\n");
2102 }
2103 }
2104 else
2105 {
2106 fprintf (file, "gs(");
2107 output_addr_const (file, addr);
2108 fprintf (file, ")");
2109 }
2110 }
2111 else
2112 output_addr_const (file, addr);
2113 }
2114 }
2115
2116
2117 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2118
2119 static bool
2120 avr_print_operand_punct_valid_p (unsigned char code)
2121 {
2122 return code == '~' || code == '!';
2123 }
2124
2125
2126 /* Implement `TARGET_PRINT_OPERAND'. */
2127 /* Output X as assembler operand to file FILE.
2128 For a description of supported %-codes, see top of avr.md. */
2129
2130 static void
2131 avr_print_operand (FILE *file, rtx x, int code)
2132 {
2133 int abcd = 0;
2134
2135 if (code >= 'A' && code <= 'D')
2136 abcd = code - 'A';
2137
2138 if (code == '~')
2139 {
2140 if (!AVR_HAVE_JMP_CALL)
2141 fputc ('r', file);
2142 }
2143 else if (code == '!')
2144 {
2145 if (AVR_HAVE_EIJMP_EICALL)
2146 fputc ('e', file);
2147 }
2148 else if (code == 't'
2149 || code == 'T')
2150 {
2151 static int t_regno = -1;
2152 static int t_nbits = -1;
2153
2154 if (REG_P (x) && t_regno < 0 && code == 'T')
2155 {
2156 t_regno = REGNO (x);
2157 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2158 }
2159 else if (CONST_INT_P (x) && t_regno >= 0
2160 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2161 {
2162 int bpos = INTVAL (x);
2163
2164 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2165 if (code == 'T')
2166 fprintf (file, ",%d", bpos % 8);
2167
2168 t_regno = -1;
2169 }
2170 else
2171 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2172 }
2173 else if (REG_P (x))
2174 {
2175 if (x == zero_reg_rtx)
2176 fprintf (file, "__zero_reg__");
2177 else if (code == 'r' && REGNO (x) < 32)
2178 fprintf (file, "%d", (int) REGNO (x));
2179 else
2180 fprintf (file, reg_names[REGNO (x) + abcd]);
2181 }
2182 else if (CONST_INT_P (x))
2183 {
2184 HOST_WIDE_INT ival = INTVAL (x);
2185
2186 if ('i' != code)
2187 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2188 else if (low_io_address_operand (x, VOIDmode)
2189 || high_io_address_operand (x, VOIDmode))
2190 {
2191 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2192 fprintf (file, "__RAMPZ__");
2193 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2194 fprintf (file, "__RAMPY__");
2195 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2196 fprintf (file, "__RAMPX__");
2197 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2198 fprintf (file, "__RAMPD__");
2199 else if (AVR_XMEGA && ival == avr_addr.ccp)
2200 fprintf (file, "__CCP__");
2201 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2202 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2203 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2204 else
2205 {
2206 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2207 ival - avr_current_arch->sfr_offset);
2208 }
2209 }
2210 else
2211 fatal_insn ("bad address, not an I/O address:", x);
2212 }
2213 else if (MEM_P (x))
2214 {
2215 rtx addr = XEXP (x, 0);
2216
2217 if (code == 'm')
2218 {
2219 if (!CONSTANT_P (addr))
2220 fatal_insn ("bad address, not a constant:", addr);
2221 /* Assembler template with m-code is data - not progmem section */
2222 if (text_segment_operand (addr, VOIDmode))
2223 if (warning (0, "accessing data memory with"
2224 " program memory address"))
2225 {
2226 output_addr_const (stderr, addr);
2227 fprintf(stderr,"\n");
2228 }
2229 output_addr_const (file, addr);
2230 }
2231 else if (code == 'i')
2232 {
2233 avr_print_operand (file, addr, 'i');
2234 }
2235 else if (code == 'o')
2236 {
2237 if (GET_CODE (addr) != PLUS)
2238 fatal_insn ("bad address, not (reg+disp):", addr);
2239
2240 avr_print_operand (file, XEXP (addr, 1), 0);
2241 }
2242 else if (code == 'p' || code == 'r')
2243 {
2244 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2245 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2246
2247 if (code == 'p')
2248 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2249 else
2250 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2251 }
2252 else if (GET_CODE (addr) == PLUS)
2253 {
2254 avr_print_operand_address (file, XEXP (addr,0));
2255 if (REGNO (XEXP (addr, 0)) == REG_X)
2256 fatal_insn ("internal compiler error. Bad address:"
2257 ,addr);
2258 fputc ('+', file);
2259 avr_print_operand (file, XEXP (addr,1), code);
2260 }
2261 else
2262 avr_print_operand_address (file, addr);
2263 }
2264 else if (code == 'i')
2265 {
2266 if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2267 avr_print_operand_address
2268 (file, plus_constant (HImode, x, -avr_current_arch->sfr_offset));
2269 else
2270 fatal_insn ("bad address, not an I/O address:", x);
2271 }
2272 else if (code == 'x')
2273 {
2274 /* Constant progmem address - like used in jmp or call */
2275 if (0 == text_segment_operand (x, VOIDmode))
2276 if (warning (0, "accessing program memory"
2277 " with data memory address"))
2278 {
2279 output_addr_const (stderr, x);
2280 fprintf(stderr,"\n");
2281 }
2282 /* Use normal symbol for direct address no linker trampoline needed */
2283 output_addr_const (file, x);
2284 }
2285 else if (CONST_FIXED_P (x))
2286 {
2287 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2288 if (code != 0)
2289 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2290 code);
2291 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2292 }
2293 else if (GET_CODE (x) == CONST_DOUBLE)
2294 {
2295 long val;
2296 REAL_VALUE_TYPE rv;
2297 if (GET_MODE (x) != SFmode)
2298 fatal_insn ("internal compiler error. Unknown mode:", x);
2299 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2300 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2301 fprintf (file, "0x%lx", val);
2302 }
2303 else if (GET_CODE (x) == CONST_STRING)
2304 fputs (XSTR (x, 0), file);
2305 else if (code == 'j')
2306 fputs (cond_string (GET_CODE (x)), file);
2307 else if (code == 'k')
2308 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2309 else
2310 avr_print_operand_address (file, x);
2311 }
2312
2313
2314 /* Worker function for `NOTICE_UPDATE_CC'. */
2315 /* Update the condition code in the INSN. */
2316
2317 void
2318 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2319 {
2320 rtx set;
2321 enum attr_cc cc = get_attr_cc (insn);
2322
2323 switch (cc)
2324 {
2325 default:
2326 break;
2327
2328 case CC_PLUS:
2329 case CC_LDI:
2330 {
2331 rtx *op = recog_data.operand;
2332 int len_dummy, icc;
2333
2334 /* Extract insn's operands. */
2335 extract_constrain_insn_cached (insn);
2336
2337 switch (cc)
2338 {
2339 default:
2340 gcc_unreachable();
2341
2342 case CC_PLUS:
2343 avr_out_plus (insn, op, &len_dummy, &icc);
2344 cc = (enum attr_cc) icc;
2345 break;
2346
2347 case CC_LDI:
2348
2349 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2350 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2351 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
2352 ? CC_CLOBBER
2353 /* Any other "r,rL" combination does not alter cc0. */
2354 : CC_NONE;
2355
2356 break;
2357 } /* inner switch */
2358
2359 break;
2360 }
2361 } /* outer swicth */
2362
2363 switch (cc)
2364 {
2365 default:
2366 /* Special values like CC_OUT_PLUS from above have been
2367 mapped to "standard" CC_* values so we never come here. */
2368
2369 gcc_unreachable();
2370 break;
2371
2372 case CC_NONE:
2373 /* Insn does not affect CC at all. */
2374 break;
2375
2376 case CC_SET_N:
2377 CC_STATUS_INIT;
2378 break;
2379
2380 case CC_SET_ZN:
2381 set = single_set (insn);
2382 CC_STATUS_INIT;
2383 if (set)
2384 {
2385 cc_status.flags |= CC_NO_OVERFLOW;
2386 cc_status.value1 = SET_DEST (set);
2387 }
2388 break;
2389
2390 case CC_SET_VZN:
2391 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2392 of this combination, cf. also PR61055. */
2393 CC_STATUS_INIT;
2394 break;
2395
2396 case CC_SET_CZN:
2397 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2398 The V flag may or may not be known but that's ok because
2399 alter_cond will change tests to use EQ/NE. */
2400 set = single_set (insn);
2401 CC_STATUS_INIT;
2402 if (set)
2403 {
2404 cc_status.value1 = SET_DEST (set);
2405 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2406 }
2407 break;
2408
2409 case CC_COMPARE:
2410 set = single_set (insn);
2411 CC_STATUS_INIT;
2412 if (set)
2413 cc_status.value1 = SET_SRC (set);
2414 break;
2415
2416 case CC_CLOBBER:
2417 /* Insn doesn't leave CC in a usable state. */
2418 CC_STATUS_INIT;
2419 break;
2420 }
2421 }
2422
2423 /* Choose mode for jump insn:
2424 1 - relative jump in range -63 <= x <= 62 ;
2425 2 - relative jump in range -2046 <= x <= 2045 ;
2426 3 - absolute jump (only for ATmega[16]03). */
2427
2428 int
2429 avr_jump_mode (rtx x, rtx_insn *insn)
2430 {
2431 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2432 ? XEXP (x, 0) : x));
2433 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2434 int jump_distance = cur_addr - dest_addr;
2435
2436 if (-63 <= jump_distance && jump_distance <= 62)
2437 return 1;
2438 else if (-2046 <= jump_distance && jump_distance <= 2045)
2439 return 2;
2440 else if (AVR_HAVE_JMP_CALL)
2441 return 3;
2442
2443 return 2;
2444 }
2445
2446 /* Return an AVR condition jump commands.
2447 X is a comparison RTX.
2448 LEN is a number returned by avr_jump_mode function.
2449 If REVERSE nonzero then condition code in X must be reversed. */
2450
2451 const char*
2452 ret_cond_branch (rtx x, int len, int reverse)
2453 {
2454 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2455
2456 switch (cond)
2457 {
2458 case GT:
2459 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2460 return (len == 1 ? ("breq .+2" CR_TAB
2461 "brpl %0") :
2462 len == 2 ? ("breq .+4" CR_TAB
2463 "brmi .+2" CR_TAB
2464 "rjmp %0") :
2465 ("breq .+6" CR_TAB
2466 "brmi .+4" CR_TAB
2467 "jmp %0"));
2468
2469 else
2470 return (len == 1 ? ("breq .+2" CR_TAB
2471 "brge %0") :
2472 len == 2 ? ("breq .+4" CR_TAB
2473 "brlt .+2" CR_TAB
2474 "rjmp %0") :
2475 ("breq .+6" CR_TAB
2476 "brlt .+4" CR_TAB
2477 "jmp %0"));
2478 case GTU:
2479 return (len == 1 ? ("breq .+2" CR_TAB
2480 "brsh %0") :
2481 len == 2 ? ("breq .+4" CR_TAB
2482 "brlo .+2" CR_TAB
2483 "rjmp %0") :
2484 ("breq .+6" CR_TAB
2485 "brlo .+4" CR_TAB
2486 "jmp %0"));
2487 case LE:
2488 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2489 return (len == 1 ? ("breq %0" CR_TAB
2490 "brmi %0") :
2491 len == 2 ? ("breq .+2" CR_TAB
2492 "brpl .+2" CR_TAB
2493 "rjmp %0") :
2494 ("breq .+2" CR_TAB
2495 "brpl .+4" CR_TAB
2496 "jmp %0"));
2497 else
2498 return (len == 1 ? ("breq %0" CR_TAB
2499 "brlt %0") :
2500 len == 2 ? ("breq .+2" CR_TAB
2501 "brge .+2" CR_TAB
2502 "rjmp %0") :
2503 ("breq .+2" CR_TAB
2504 "brge .+4" CR_TAB
2505 "jmp %0"));
2506 case LEU:
2507 return (len == 1 ? ("breq %0" CR_TAB
2508 "brlo %0") :
2509 len == 2 ? ("breq .+2" CR_TAB
2510 "brsh .+2" CR_TAB
2511 "rjmp %0") :
2512 ("breq .+2" CR_TAB
2513 "brsh .+4" CR_TAB
2514 "jmp %0"));
2515 default:
2516 if (reverse)
2517 {
2518 switch (len)
2519 {
2520 case 1:
2521 return "br%k1 %0";
2522 case 2:
2523 return ("br%j1 .+2" CR_TAB
2524 "rjmp %0");
2525 default:
2526 return ("br%j1 .+4" CR_TAB
2527 "jmp %0");
2528 }
2529 }
2530 else
2531 {
2532 switch (len)
2533 {
2534 case 1:
2535 return "br%j1 %0";
2536 case 2:
2537 return ("br%k1 .+2" CR_TAB
2538 "rjmp %0");
2539 default:
2540 return ("br%k1 .+4" CR_TAB
2541 "jmp %0");
2542 }
2543 }
2544 }
2545 return "";
2546 }
2547
2548
2549 /* Worker function for `FINAL_PRESCAN_INSN'. */
2550 /* Output insn cost for next insn. */
2551
2552 void
2553 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
2554 int num_operands ATTRIBUTE_UNUSED)
2555 {
2556 if (avr_log.rtx_costs)
2557 {
2558 rtx set = single_set (insn);
2559
2560 if (set)
2561 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2562 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2563 else
2564 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2565 rtx_cost (PATTERN (insn), INSN, 0,
2566 optimize_insn_for_speed_p()));
2567 }
2568 }
2569
2570 /* Return 0 if undefined, 1 if always true or always false. */
2571
2572 int
2573 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2574 {
2575 unsigned int max = (mode == QImode ? 0xff :
2576 mode == HImode ? 0xffff :
2577 mode == PSImode ? 0xffffff :
2578 mode == SImode ? 0xffffffff : 0);
2579 if (max && op && CONST_INT_P (x))
2580 {
2581 if (unsigned_condition (op) != op)
2582 max >>= 1;
2583
2584 if (max != (INTVAL (x) & max)
2585 && INTVAL (x) != 0xff)
2586 return 1;
2587 }
2588 return 0;
2589 }
2590
2591
2592 /* Worker function for `FUNCTION_ARG_REGNO_P'. */
2593 /* Returns nonzero if REGNO is the number of a hard
2594 register in which function arguments are sometimes passed. */
2595
2596 int
2597 avr_function_arg_regno_p(int r)
2598 {
2599 return (r >= 8 && r <= 25);
2600 }
2601
2602
2603 /* Worker function for `INIT_CUMULATIVE_ARGS'. */
2604 /* Initializing the variable cum for the state at the beginning
2605 of the argument list. */
2606
2607 void
2608 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2609 tree fndecl ATTRIBUTE_UNUSED)
2610 {
2611 cum->nregs = 18;
2612 cum->regno = FIRST_CUM_REG;
2613 if (!libname && stdarg_p (fntype))
2614 cum->nregs = 0;
2615
2616 /* Assume the calle may be tail called */
2617
2618 cfun->machine->sibcall_fails = 0;
2619 }
2620
2621 /* Returns the number of registers to allocate for a function argument. */
2622
2623 static int
2624 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2625 {
2626 int size;
2627
2628 if (mode == BLKmode)
2629 size = int_size_in_bytes (type);
2630 else
2631 size = GET_MODE_SIZE (mode);
2632
2633 /* Align all function arguments to start in even-numbered registers.
2634 Odd-sized arguments leave holes above them. */
2635
2636 return (size + 1) & ~1;
2637 }
2638
2639
2640 /* Implement `TARGET_FUNCTION_ARG'. */
2641 /* Controls whether a function argument is passed
2642 in a register, and which register. */
2643
2644 static rtx
2645 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2646 const_tree type, bool named ATTRIBUTE_UNUSED)
2647 {
2648 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2649 int bytes = avr_num_arg_regs (mode, type);
2650
2651 if (cum->nregs && bytes <= cum->nregs)
2652 return gen_rtx_REG (mode, cum->regno - bytes);
2653
2654 return NULL_RTX;
2655 }
2656
2657
2658 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
2659 /* Update the summarizer variable CUM to advance past an argument
2660 in the argument list. */
2661
2662 static void
2663 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2664 const_tree type, bool named ATTRIBUTE_UNUSED)
2665 {
2666 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2667 int bytes = avr_num_arg_regs (mode, type);
2668
2669 cum->nregs -= bytes;
2670 cum->regno -= bytes;
2671
2672 /* A parameter is being passed in a call-saved register. As the original
2673 contents of these regs has to be restored before leaving the function,
2674 a function must not pass arguments in call-saved regs in order to get
2675 tail-called. */
2676
2677 if (cum->regno >= 8
2678 && cum->nregs >= 0
2679 && !call_used_regs[cum->regno])
2680 {
2681 /* FIXME: We ship info on failing tail-call in struct machine_function.
2682 This uses internals of calls.c:expand_call() and the way args_so_far
2683 is used. targetm.function_ok_for_sibcall() needs to be extended to
2684 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2685 dependent so that such an extension is not wanted. */
2686
2687 cfun->machine->sibcall_fails = 1;
2688 }
2689
2690 /* Test if all registers needed by the ABI are actually available. If the
2691 user has fixed a GPR needed to pass an argument, an (implicit) function
2692 call will clobber that fixed register. See PR45099 for an example. */
2693
2694 if (cum->regno >= 8
2695 && cum->nregs >= 0)
2696 {
2697 int regno;
2698
2699 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2700 if (fixed_regs[regno])
2701 warning (0, "fixed register %s used to pass parameter to function",
2702 reg_names[regno]);
2703 }
2704
2705 if (cum->nregs <= 0)
2706 {
2707 cum->nregs = 0;
2708 cum->regno = FIRST_CUM_REG;
2709 }
2710 }
2711
2712 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2713 /* Decide whether we can make a sibling call to a function. DECL is the
2714 declaration of the function being targeted by the call and EXP is the
2715 CALL_EXPR representing the call. */
2716
2717 static bool
2718 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2719 {
2720 tree fntype_callee;
2721
2722 /* Tail-calling must fail if callee-saved regs are used to pass
2723 function args. We must not tail-call when `epilogue_restores'
2724 is used. Unfortunately, we cannot tell at this point if that
2725 actually will happen or not, and we cannot step back from
2726 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2727
2728 if (cfun->machine->sibcall_fails
2729 || TARGET_CALL_PROLOGUES)
2730 {
2731 return false;
2732 }
2733
2734 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2735
2736 if (decl_callee)
2737 {
2738 decl_callee = TREE_TYPE (decl_callee);
2739 }
2740 else
2741 {
2742 decl_callee = fntype_callee;
2743
2744 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2745 && METHOD_TYPE != TREE_CODE (decl_callee))
2746 {
2747 decl_callee = TREE_TYPE (decl_callee);
2748 }
2749 }
2750
2751 /* Ensure that caller and callee have compatible epilogues */
2752
2753 if (cfun->machine->is_interrupt
2754 || cfun->machine->is_signal
2755 || cfun->machine->is_naked
2756 || avr_naked_function_p (decl_callee)
2757 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
2758 || (avr_OS_task_function_p (decl_callee)
2759 != cfun->machine->is_OS_task)
2760 || (avr_OS_main_function_p (decl_callee)
2761 != cfun->machine->is_OS_main))
2762 {
2763 return false;
2764 }
2765
2766 return true;
2767 }
2768
2769 /***********************************************************************
2770 Functions for outputting various mov's for a various modes
2771 ************************************************************************/
2772
2773 /* Return true if a value of mode MODE is read from flash by
2774 __load_* function from libgcc. */
2775
2776 bool
2777 avr_load_libgcc_p (rtx op)
2778 {
2779 enum machine_mode mode = GET_MODE (op);
2780 int n_bytes = GET_MODE_SIZE (mode);
2781
2782 return (n_bytes > 2
2783 && !AVR_HAVE_LPMX
2784 && avr_mem_flash_p (op));
2785 }
2786
2787 /* Return true if a value of mode MODE is read by __xload_* function. */
2788
2789 bool
2790 avr_xload_libgcc_p (enum machine_mode mode)
2791 {
2792 int n_bytes = GET_MODE_SIZE (mode);
2793
2794 return (n_bytes > 1
2795 || avr_n_flash > 1);
2796 }
2797
2798
2799 /* Fixme: This is a hack because secondary reloads don't works as expected.
2800
2801 Find an unused d-register to be used as scratch in INSN.
2802 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2803 is a register, skip all possible return values that overlap EXCLUDE.
2804 The policy for the returned register is similar to that of
2805 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2806 of INSN.
2807
2808 Return a QImode d-register or NULL_RTX if nothing found. */
2809
2810 static rtx
2811 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
2812 {
2813 int regno;
2814 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2815 || avr_signal_function_p (current_function_decl));
2816
2817 for (regno = 16; regno < 32; regno++)
2818 {
2819 rtx reg = all_regs_rtx[regno];
2820
2821 if ((exclude
2822 && reg_overlap_mentioned_p (exclude, reg))
2823 || fixed_regs[regno])
2824 {
2825 continue;
2826 }
2827
2828 /* Try non-live register */
2829
2830 if (!df_regs_ever_live_p (regno)
2831 && (TREE_THIS_VOLATILE (current_function_decl)
2832 || cfun->machine->is_OS_task
2833 || cfun->machine->is_OS_main
2834 || (!isr_p && call_used_regs[regno])))
2835 {
2836 return reg;
2837 }
2838
2839 /* Any live register can be used if it is unused after.
2840 Prologue/epilogue will care for it as needed. */
2841
2842 if (df_regs_ever_live_p (regno)
2843 && reg_unused_after (insn, reg))
2844 {
2845 return reg;
2846 }
2847 }
2848
2849 return NULL_RTX;
2850 }
2851
2852
2853 /* Helper function for the next function in the case where only restricted
2854 version of LPM instruction is available. */
2855
2856 static const char*
2857 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
2858 {
2859 rtx dest = xop[0];
2860 rtx addr = xop[1];
2861 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2862 int regno_dest;
2863
2864 regno_dest = REGNO (dest);
2865
2866 /* The implicit target register of LPM. */
2867 xop[3] = lpm_reg_rtx;
2868
2869 switch (GET_CODE (addr))
2870 {
2871 default:
2872 gcc_unreachable();
2873
2874 case REG:
2875
2876 gcc_assert (REG_Z == REGNO (addr));
2877
2878 switch (n_bytes)
2879 {
2880 default:
2881 gcc_unreachable();
2882
2883 case 1:
2884 avr_asm_len ("%4lpm", xop, plen, 1);
2885
2886 if (regno_dest != LPM_REGNO)
2887 avr_asm_len ("mov %0,%3", xop, plen, 1);
2888
2889 return "";
2890
2891 case 2:
2892 if (REGNO (dest) == REG_Z)
2893 return avr_asm_len ("%4lpm" CR_TAB
2894 "push %3" CR_TAB
2895 "adiw %2,1" CR_TAB
2896 "%4lpm" CR_TAB
2897 "mov %B0,%3" CR_TAB
2898 "pop %A0", xop, plen, 6);
2899
2900 avr_asm_len ("%4lpm" CR_TAB
2901 "mov %A0,%3" CR_TAB
2902 "adiw %2,1" CR_TAB
2903 "%4lpm" CR_TAB
2904 "mov %B0,%3", xop, plen, 5);
2905
2906 if (!reg_unused_after (insn, addr))
2907 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2908
2909 break; /* 2 */
2910 }
2911
2912 break; /* REG */
2913
2914 case POST_INC:
2915
2916 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2917 && n_bytes <= 4);
2918
2919 if (regno_dest == LPM_REGNO)
2920 avr_asm_len ("%4lpm" CR_TAB
2921 "adiw %2,1", xop, plen, 2);
2922 else
2923 avr_asm_len ("%4lpm" CR_TAB
2924 "mov %A0,%3" CR_TAB
2925 "adiw %2,1", xop, plen, 3);
2926
2927 if (n_bytes >= 2)
2928 avr_asm_len ("%4lpm" CR_TAB
2929 "mov %B0,%3" CR_TAB
2930 "adiw %2,1", xop, plen, 3);
2931
2932 if (n_bytes >= 3)
2933 avr_asm_len ("%4lpm" CR_TAB
2934 "mov %C0,%3" CR_TAB
2935 "adiw %2,1", xop, plen, 3);
2936
2937 if (n_bytes >= 4)
2938 avr_asm_len ("%4lpm" CR_TAB
2939 "mov %D0,%3" CR_TAB
2940 "adiw %2,1", xop, plen, 3);
2941
2942 break; /* POST_INC */
2943
2944 } /* switch CODE (addr) */
2945
2946 return "";
2947 }
2948
2949
2950 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2951 OP[1] in AS1 to register OP[0].
2952 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2953 Return "". */
2954
2955 const char*
2956 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
2957 {
2958 rtx xop[7];
2959 rtx dest = op[0];
2960 rtx src = SET_SRC (single_set (insn));
2961 rtx addr;
2962 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2963 int segment;
2964 RTX_CODE code;
2965 addr_space_t as = MEM_ADDR_SPACE (src);
2966
2967 if (plen)
2968 *plen = 0;
2969
2970 if (MEM_P (dest))
2971 {
2972 warning (0, "writing to address space %qs not supported",
2973 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2974
2975 return "";
2976 }
2977
2978 addr = XEXP (src, 0);
2979 code = GET_CODE (addr);
2980
2981 gcc_assert (REG_P (dest));
2982 gcc_assert (REG == code || POST_INC == code);
2983
2984 xop[0] = dest;
2985 xop[1] = addr;
2986 xop[2] = lpm_addr_reg_rtx;
2987 xop[4] = xstring_empty;
2988 xop[5] = tmp_reg_rtx;
2989 xop[6] = XEXP (rampz_rtx, 0);
2990
2991 segment = avr_addrspace[as].segment;
2992
2993 /* Set RAMPZ as needed. */
2994
2995 if (segment)
2996 {
2997 xop[4] = GEN_INT (segment);
2998 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2999
3000 if (xop[3] != NULL_RTX)
3001 {
3002 avr_asm_len ("ldi %3,%4" CR_TAB
3003 "out %i6,%3", xop, plen, 2);
3004 }
3005 else if (segment == 1)
3006 {
3007 avr_asm_len ("clr %5" CR_TAB
3008 "inc %5" CR_TAB
3009 "out %i6,%5", xop, plen, 3);
3010 }
3011 else
3012 {
3013 avr_asm_len ("mov %5,%2" CR_TAB
3014 "ldi %2,%4" CR_TAB
3015 "out %i6,%2" CR_TAB
3016 "mov %2,%5", xop, plen, 4);
3017 }
3018
3019 xop[4] = xstring_e;
3020
3021 if (!AVR_HAVE_ELPMX)
3022 return avr_out_lpm_no_lpmx (insn, xop, plen);
3023 }
3024 else if (!AVR_HAVE_LPMX)
3025 {
3026 return avr_out_lpm_no_lpmx (insn, xop, plen);
3027 }
3028
3029 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3030
3031 switch (GET_CODE (addr))
3032 {
3033 default:
3034 gcc_unreachable();
3035
3036 case REG:
3037
3038 gcc_assert (REG_Z == REGNO (addr));
3039
3040 switch (n_bytes)
3041 {
3042 default:
3043 gcc_unreachable();
3044
3045 case 1:
3046 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3047
3048 case 2:
3049 if (REGNO (dest) == REG_Z)
3050 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3051 "%4lpm %B0,%a2" CR_TAB
3052 "mov %A0,%5", xop, plen, 3);
3053 else
3054 {
3055 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3056 "%4lpm %B0,%a2", xop, plen, 2);
3057
3058 if (!reg_unused_after (insn, addr))
3059 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3060 }
3061
3062 break; /* 2 */
3063
3064 case 3:
3065
3066 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3067 "%4lpm %B0,%a2+" CR_TAB
3068 "%4lpm %C0,%a2", xop, plen, 3);
3069
3070 if (!reg_unused_after (insn, addr))
3071 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3072
3073 break; /* 3 */
3074
3075 case 4:
3076
3077 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3078 "%4lpm %B0,%a2+", xop, plen, 2);
3079
3080 if (REGNO (dest) == REG_Z - 2)
3081 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3082 "%4lpm %C0,%a2" CR_TAB
3083 "mov %D0,%5", xop, plen, 3);
3084 else
3085 {
3086 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3087 "%4lpm %D0,%a2", xop, plen, 2);
3088
3089 if (!reg_unused_after (insn, addr))
3090 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3091 }
3092
3093 break; /* 4 */
3094 } /* n_bytes */
3095
3096 break; /* REG */
3097
3098 case POST_INC:
3099
3100 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3101 && n_bytes <= 4);
3102
3103 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3104 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3105 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3106 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3107
3108 break; /* POST_INC */
3109
3110 } /* switch CODE (addr) */
3111
3112 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3113 {
3114 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
3115
3116 xop[0] = zero_reg_rtx;
3117 avr_asm_len ("out %i6,%0", xop, plen, 1);
3118 }
3119
3120 return "";
3121 }
3122
3123
3124 /* Worker function for xload_8 insn. */
3125
3126 const char*
3127 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3128 {
3129 rtx xop[4];
3130
3131 xop[0] = op[0];
3132 xop[1] = op[1];
3133 xop[2] = lpm_addr_reg_rtx;
3134 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3135
3136 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3137
3138 avr_asm_len ("sbrc %1,7" CR_TAB
3139 "ld %3,%a2", xop, plen, 2);
3140
3141 if (REGNO (xop[0]) != REGNO (xop[3]))
3142 avr_asm_len ("mov %0,%3", xop, plen, 1);
3143
3144 return "";
3145 }
3146
3147
3148 const char*
3149 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3150 {
3151 rtx dest = operands[0];
3152 rtx src = operands[1];
3153
3154 if (avr_mem_flash_p (src)
3155 || avr_mem_flash_p (dest))
3156 {
3157 return avr_out_lpm (insn, operands, plen);
3158 }
3159
3160 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3161
3162 if (REG_P (dest))
3163 {
3164 if (REG_P (src)) /* mov r,r */
3165 {
3166 if (test_hard_reg_class (STACK_REG, dest))
3167 return avr_asm_len ("out %0,%1", operands, plen, -1);
3168 else if (test_hard_reg_class (STACK_REG, src))
3169 return avr_asm_len ("in %0,%1", operands, plen, -1);
3170
3171 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3172 }
3173 else if (CONSTANT_P (src))
3174 {
3175 output_reload_in_const (operands, NULL_RTX, plen, false);
3176 return "";
3177 }
3178 else if (MEM_P (src))
3179 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3180 }
3181 else if (MEM_P (dest))
3182 {
3183 rtx xop[2];
3184
3185 xop[0] = dest;
3186 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3187
3188 return out_movqi_mr_r (insn, xop, plen);
3189 }
3190
3191 return "";
3192 }
3193
3194
3195 const char *
3196 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3197 {
3198 rtx dest = xop[0];
3199 rtx src = xop[1];
3200
3201 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3202
3203 if (avr_mem_flash_p (src)
3204 || avr_mem_flash_p (dest))
3205 {
3206 return avr_out_lpm (insn, xop, plen);
3207 }
3208
3209 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3210
3211 if (REG_P (dest))
3212 {
3213 if (REG_P (src)) /* mov r,r */
3214 {
3215 if (test_hard_reg_class (STACK_REG, dest))
3216 {
3217 if (AVR_HAVE_8BIT_SP)
3218 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3219
3220 if (AVR_XMEGA)
3221 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3222 "out __SP_H__,%B1", xop, plen, -2);
3223
3224 /* Use simple load of SP if no interrupts are used. */
3225
3226 return TARGET_NO_INTERRUPTS
3227 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3228 "out __SP_L__,%A1", xop, plen, -2)
3229 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3230 "cli" CR_TAB
3231 "out __SP_H__,%B1" CR_TAB
3232 "out __SREG__,__tmp_reg__" CR_TAB
3233 "out __SP_L__,%A1", xop, plen, -5);
3234 }
3235 else if (test_hard_reg_class (STACK_REG, src))
3236 {
3237 return !AVR_HAVE_SPH
3238 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3239 "clr %B0", xop, plen, -2)
3240
3241 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3242 "in %B0,__SP_H__", xop, plen, -2);
3243 }
3244
3245 return AVR_HAVE_MOVW
3246 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3247
3248 : avr_asm_len ("mov %A0,%A1" CR_TAB
3249 "mov %B0,%B1", xop, plen, -2);
3250 } /* REG_P (src) */
3251 else if (CONSTANT_P (src))
3252 {
3253 return output_reload_inhi (xop, NULL, plen);
3254 }
3255 else if (MEM_P (src))
3256 {
3257 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3258 }
3259 }
3260 else if (MEM_P (dest))
3261 {
3262 rtx xop[2];
3263
3264 xop[0] = dest;
3265 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3266
3267 return out_movhi_mr_r (insn, xop, plen);
3268 }
3269
3270 fatal_insn ("invalid insn:", insn);
3271
3272 return "";
3273 }
3274
3275 static const char*
3276 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3277 {
3278 rtx dest = op[0];
3279 rtx src = op[1];
3280 rtx x = XEXP (src, 0);
3281
3282 if (CONSTANT_ADDRESS_P (x))
3283 {
3284 return optimize > 0 && io_address_operand (x, QImode)
3285 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3286 : avr_asm_len ("lds %0,%m1", op, plen, -2);
3287 }
3288 else if (GET_CODE (x) == PLUS
3289 && REG_P (XEXP (x, 0))
3290 && CONST_INT_P (XEXP (x, 1)))
3291 {
3292 /* memory access by reg+disp */
3293
3294 int disp = INTVAL (XEXP (x, 1));
3295
3296 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3297 {
3298 if (REGNO (XEXP (x, 0)) != REG_Y)
3299 fatal_insn ("incorrect insn:",insn);
3300
3301 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3302 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3303 "ldd %0,Y+63" CR_TAB
3304 "sbiw r28,%o1-63", op, plen, -3);
3305
3306 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3307 "sbci r29,hi8(-%o1)" CR_TAB
3308 "ld %0,Y" CR_TAB
3309 "subi r28,lo8(%o1)" CR_TAB
3310 "sbci r29,hi8(%o1)", op, plen, -5);
3311 }
3312 else if (REGNO (XEXP (x, 0)) == REG_X)
3313 {
3314 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3315 it but I have this situation with extremal optimizing options. */
3316
3317 avr_asm_len ("adiw r26,%o1" CR_TAB
3318 "ld %0,X", op, plen, -2);
3319
3320 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3321 && !reg_unused_after (insn, XEXP (x,0)))
3322 {
3323 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3324 }
3325
3326 return "";
3327 }
3328
3329 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3330 }
3331
3332 return avr_asm_len ("ld %0,%1", op, plen, -1);
3333 }
3334
3335 static const char*
3336 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3337 {
3338 rtx dest = op[0];
3339 rtx src = op[1];
3340 rtx base = XEXP (src, 0);
3341 int reg_dest = true_regnum (dest);
3342 int reg_base = true_regnum (base);
3343 /* "volatile" forces reading low byte first, even if less efficient,
3344 for correct operation with 16-bit I/O registers. */
3345 int mem_volatile_p = MEM_VOLATILE_P (src);
3346
3347 if (reg_base > 0)
3348 {
3349 if (reg_dest == reg_base) /* R = (R) */
3350 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3351 "ld %B0,%1" CR_TAB
3352 "mov %A0,__tmp_reg__", op, plen, -3);
3353
3354 if (reg_base != REG_X)
3355 return avr_asm_len ("ld %A0,%1" CR_TAB
3356 "ldd %B0,%1+1", op, plen, -2);
3357
3358 avr_asm_len ("ld %A0,X+" CR_TAB
3359 "ld %B0,X", op, plen, -2);
3360
3361 if (!reg_unused_after (insn, base))
3362 avr_asm_len ("sbiw r26,1", op, plen, 1);
3363
3364 return "";
3365 }
3366 else if (GET_CODE (base) == PLUS) /* (R + i) */
3367 {
3368 int disp = INTVAL (XEXP (base, 1));
3369 int reg_base = true_regnum (XEXP (base, 0));
3370
3371 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3372 {
3373 if (REGNO (XEXP (base, 0)) != REG_Y)
3374 fatal_insn ("incorrect insn:",insn);
3375
3376 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3377 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3378 "ldd %A0,Y+62" CR_TAB
3379 "ldd %B0,Y+63" CR_TAB
3380 "sbiw r28,%o1-62", op, plen, -4)
3381
3382 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3383 "sbci r29,hi8(-%o1)" CR_TAB
3384 "ld %A0,Y" CR_TAB
3385 "ldd %B0,Y+1" CR_TAB
3386 "subi r28,lo8(%o1)" CR_TAB
3387 "sbci r29,hi8(%o1)", op, plen, -6);
3388 }
3389
3390 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3391 it but I have this situation with extremal
3392 optimization options. */
3393
3394 if (reg_base == REG_X)
3395 return reg_base == reg_dest
3396 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3397 "ld __tmp_reg__,X+" CR_TAB
3398 "ld %B0,X" CR_TAB
3399 "mov %A0,__tmp_reg__", op, plen, -4)
3400
3401 : avr_asm_len ("adiw r26,%o1" CR_TAB
3402 "ld %A0,X+" CR_TAB
3403 "ld %B0,X" CR_TAB
3404 "sbiw r26,%o1+1", op, plen, -4);
3405
3406 return reg_base == reg_dest
3407 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3408 "ldd %B0,%B1" CR_TAB
3409 "mov %A0,__tmp_reg__", op, plen, -3)
3410
3411 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3412 "ldd %B0,%B1", op, plen, -2);
3413 }
3414 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3415 {
3416 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3417 fatal_insn ("incorrect insn:", insn);
3418
3419 if (!mem_volatile_p)
3420 return avr_asm_len ("ld %B0,%1" CR_TAB
3421 "ld %A0,%1", op, plen, -2);
3422
3423 return REGNO (XEXP (base, 0)) == REG_X
3424 ? avr_asm_len ("sbiw r26,2" CR_TAB
3425 "ld %A0,X+" CR_TAB
3426 "ld %B0,X" CR_TAB
3427 "sbiw r26,1", op, plen, -4)
3428
3429 : avr_asm_len ("sbiw %r1,2" CR_TAB
3430 "ld %A0,%p1" CR_TAB
3431 "ldd %B0,%p1+1", op, plen, -3);
3432 }
3433 else if (GET_CODE (base) == POST_INC) /* (R++) */
3434 {
3435 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3436 fatal_insn ("incorrect insn:", insn);
3437
3438 return avr_asm_len ("ld %A0,%1" CR_TAB
3439 "ld %B0,%1", op, plen, -2);
3440 }
3441 else if (CONSTANT_ADDRESS_P (base))
3442 {
3443 return optimize > 0 && io_address_operand (base, HImode)
3444 ? avr_asm_len ("in %A0,%i1" CR_TAB
3445 "in %B0,%i1+1", op, plen, -2)
3446
3447 : avr_asm_len ("lds %A0,%m1" CR_TAB
3448 "lds %B0,%m1+1", op, plen, -4);
3449 }
3450
3451 fatal_insn ("unknown move insn:",insn);
3452 return "";
3453 }
3454
3455 static const char*
3456 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
3457 {
3458 rtx dest = op[0];
3459 rtx src = op[1];
3460 rtx base = XEXP (src, 0);
3461 int reg_dest = true_regnum (dest);
3462 int reg_base = true_regnum (base);
3463 int tmp;
3464
3465 if (!l)
3466 l = &tmp;
3467
3468 if (reg_base > 0)
3469 {
3470 if (reg_base == REG_X) /* (R26) */
3471 {
3472 if (reg_dest == REG_X)
3473 /* "ld r26,-X" is undefined */
3474 return *l=7, ("adiw r26,3" CR_TAB
3475 "ld r29,X" CR_TAB
3476 "ld r28,-X" CR_TAB
3477 "ld __tmp_reg__,-X" CR_TAB
3478 "sbiw r26,1" CR_TAB
3479 "ld r26,X" CR_TAB
3480 "mov r27,__tmp_reg__");
3481 else if (reg_dest == REG_X - 2)
3482 return *l=5, ("ld %A0,X+" CR_TAB
3483 "ld %B0,X+" CR_TAB
3484 "ld __tmp_reg__,X+" CR_TAB
3485 "ld %D0,X" CR_TAB
3486 "mov %C0,__tmp_reg__");
3487 else if (reg_unused_after (insn, base))
3488 return *l=4, ("ld %A0,X+" CR_TAB
3489 "ld %B0,X+" CR_TAB
3490 "ld %C0,X+" CR_TAB
3491 "ld %D0,X");
3492 else
3493 return *l=5, ("ld %A0,X+" CR_TAB
3494 "ld %B0,X+" CR_TAB
3495 "ld %C0,X+" CR_TAB
3496 "ld %D0,X" CR_TAB
3497 "sbiw r26,3");
3498 }
3499 else
3500 {
3501 if (reg_dest == reg_base)
3502 return *l=5, ("ldd %D0,%1+3" CR_TAB
3503 "ldd %C0,%1+2" CR_TAB
3504 "ldd __tmp_reg__,%1+1" CR_TAB
3505 "ld %A0,%1" CR_TAB
3506 "mov %B0,__tmp_reg__");
3507 else if (reg_base == reg_dest + 2)
3508 return *l=5, ("ld %A0,%1" CR_TAB
3509 "ldd %B0,%1+1" CR_TAB
3510 "ldd __tmp_reg__,%1+2" CR_TAB
3511 "ldd %D0,%1+3" CR_TAB
3512 "mov %C0,__tmp_reg__");
3513 else
3514 return *l=4, ("ld %A0,%1" CR_TAB
3515 "ldd %B0,%1+1" CR_TAB
3516 "ldd %C0,%1+2" CR_TAB
3517 "ldd %D0,%1+3");
3518 }
3519 }
3520 else if (GET_CODE (base) == PLUS) /* (R + i) */
3521 {
3522 int disp = INTVAL (XEXP (base, 1));
3523
3524 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3525 {
3526 if (REGNO (XEXP (base, 0)) != REG_Y)
3527 fatal_insn ("incorrect insn:",insn);
3528
3529 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3530 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3531 "ldd %A0,Y+60" CR_TAB
3532 "ldd %B0,Y+61" CR_TAB
3533 "ldd %C0,Y+62" CR_TAB
3534 "ldd %D0,Y+63" CR_TAB
3535 "sbiw r28,%o1-60");
3536
3537 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3538 "sbci r29,hi8(-%o1)" CR_TAB
3539 "ld %A0,Y" CR_TAB
3540 "ldd %B0,Y+1" CR_TAB
3541 "ldd %C0,Y+2" CR_TAB
3542 "ldd %D0,Y+3" CR_TAB
3543 "subi r28,lo8(%o1)" CR_TAB
3544 "sbci r29,hi8(%o1)");
3545 }
3546
3547 reg_base = true_regnum (XEXP (base, 0));
3548 if (reg_base == REG_X)
3549 {
3550 /* R = (X + d) */
3551 if (reg_dest == REG_X)
3552 {
3553 *l = 7;
3554 /* "ld r26,-X" is undefined */
3555 return ("adiw r26,%o1+3" CR_TAB
3556 "ld r29,X" CR_TAB
3557 "ld r28,-X" CR_TAB
3558 "ld __tmp_reg__,-X" CR_TAB
3559 "sbiw r26,1" CR_TAB
3560 "ld r26,X" CR_TAB
3561 "mov r27,__tmp_reg__");
3562 }
3563 *l = 6;
3564 if (reg_dest == REG_X - 2)
3565 return ("adiw r26,%o1" CR_TAB
3566 "ld r24,X+" CR_TAB
3567 "ld r25,X+" CR_TAB
3568 "ld __tmp_reg__,X+" CR_TAB
3569 "ld r27,X" CR_TAB
3570 "mov r26,__tmp_reg__");
3571
3572 return ("adiw r26,%o1" CR_TAB
3573 "ld %A0,X+" CR_TAB
3574 "ld %B0,X+" CR_TAB
3575 "ld %C0,X+" CR_TAB
3576 "ld %D0,X" CR_TAB
3577 "sbiw r26,%o1+3");
3578 }
3579 if (reg_dest == reg_base)
3580 return *l=5, ("ldd %D0,%D1" CR_TAB
3581 "ldd %C0,%C1" CR_TAB
3582 "ldd __tmp_reg__,%B1" CR_TAB
3583 "ldd %A0,%A1" CR_TAB
3584 "mov %B0,__tmp_reg__");
3585 else if (reg_dest == reg_base - 2)
3586 return *l=5, ("ldd %A0,%A1" CR_TAB
3587 "ldd %B0,%B1" CR_TAB
3588 "ldd __tmp_reg__,%C1" CR_TAB
3589 "ldd %D0,%D1" CR_TAB
3590 "mov %C0,__tmp_reg__");
3591 return *l=4, ("ldd %A0,%A1" CR_TAB
3592 "ldd %B0,%B1" CR_TAB
3593 "ldd %C0,%C1" CR_TAB
3594 "ldd %D0,%D1");
3595 }
3596 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3597 return *l=4, ("ld %D0,%1" CR_TAB
3598 "ld %C0,%1" CR_TAB
3599 "ld %B0,%1" CR_TAB
3600 "ld %A0,%1");
3601 else if (GET_CODE (base) == POST_INC) /* (R++) */
3602 return *l=4, ("ld %A0,%1" CR_TAB
3603 "ld %B0,%1" CR_TAB
3604 "ld %C0,%1" CR_TAB
3605 "ld %D0,%1");
3606 else if (CONSTANT_ADDRESS_P (base))
3607 return *l=8, ("lds %A0,%m1" CR_TAB
3608 "lds %B0,%m1+1" CR_TAB
3609 "lds %C0,%m1+2" CR_TAB
3610 "lds %D0,%m1+3");
3611
3612 fatal_insn ("unknown move insn:",insn);
3613 return "";
3614 }
3615
3616 static const char*
3617 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
3618 {
3619 rtx dest = op[0];
3620 rtx src = op[1];
3621 rtx base = XEXP (dest, 0);
3622 int reg_base = true_regnum (base);
3623 int reg_src = true_regnum (src);
3624 int tmp;
3625
3626 if (!l)
3627 l = &tmp;
3628
3629 if (CONSTANT_ADDRESS_P (base))
3630 return *l=8,("sts %m0,%A1" CR_TAB
3631 "sts %m0+1,%B1" CR_TAB
3632 "sts %m0+2,%C1" CR_TAB
3633 "sts %m0+3,%D1");
3634 if (reg_base > 0) /* (r) */
3635 {
3636 if (reg_base == REG_X) /* (R26) */
3637 {
3638 if (reg_src == REG_X)
3639 {
3640 /* "st X+,r26" is undefined */
3641 if (reg_unused_after (insn, base))
3642 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3643 "st X,r26" CR_TAB
3644 "adiw r26,1" CR_TAB
3645 "st X+,__tmp_reg__" CR_TAB
3646 "st X+,r28" CR_TAB
3647 "st X,r29");
3648 else
3649 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3650 "st X,r26" CR_TAB
3651 "adiw r26,1" CR_TAB
3652 "st X+,__tmp_reg__" CR_TAB
3653 "st X+,r28" CR_TAB
3654 "st X,r29" CR_TAB
3655 "sbiw r26,3");
3656 }
3657 else if (reg_base == reg_src + 2)
3658 {
3659 if (reg_unused_after (insn, base))
3660 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3661 "mov __tmp_reg__,%D1" CR_TAB
3662 "st %0+,%A1" CR_TAB
3663 "st %0+,%B1" CR_TAB
3664 "st %0+,__zero_reg__" CR_TAB
3665 "st %0,__tmp_reg__" CR_TAB
3666 "clr __zero_reg__");
3667 else
3668 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3669 "mov __tmp_reg__,%D1" CR_TAB
3670 "st %0+,%A1" CR_TAB
3671 "st %0+,%B1" CR_TAB
3672 "st %0+,__zero_reg__" CR_TAB
3673 "st %0,__tmp_reg__" CR_TAB
3674 "clr __zero_reg__" CR_TAB
3675 "sbiw r26,3");
3676 }
3677 return *l=5, ("st %0+,%A1" CR_TAB
3678 "st %0+,%B1" CR_TAB
3679 "st %0+,%C1" CR_TAB
3680 "st %0,%D1" CR_TAB
3681 "sbiw r26,3");
3682 }
3683 else
3684 return *l=4, ("st %0,%A1" CR_TAB
3685 "std %0+1,%B1" CR_TAB
3686 "std %0+2,%C1" CR_TAB
3687 "std %0+3,%D1");
3688 }
3689 else if (GET_CODE (base) == PLUS) /* (R + i) */
3690 {
3691 int disp = INTVAL (XEXP (base, 1));
3692 reg_base = REGNO (XEXP (base, 0));
3693 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3694 {
3695 if (reg_base != REG_Y)
3696 fatal_insn ("incorrect insn:",insn);
3697
3698 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3699 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3700 "std Y+60,%A1" CR_TAB
3701 "std Y+61,%B1" CR_TAB
3702 "std Y+62,%C1" CR_TAB
3703 "std Y+63,%D1" CR_TAB
3704 "sbiw r28,%o0-60");
3705
3706 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3707 "sbci r29,hi8(-%o0)" CR_TAB
3708 "st Y,%A1" CR_TAB
3709 "std Y+1,%B1" CR_TAB
3710 "std Y+2,%C1" CR_TAB
3711 "std Y+3,%D1" CR_TAB
3712 "subi r28,lo8(%o0)" CR_TAB
3713 "sbci r29,hi8(%o0)");
3714 }
3715 if (reg_base == REG_X)
3716 {
3717 /* (X + d) = R */
3718 if (reg_src == REG_X)
3719 {
3720 *l = 9;
3721 return ("mov __tmp_reg__,r26" CR_TAB
3722 "mov __zero_reg__,r27" CR_TAB
3723 "adiw r26,%o0" CR_TAB
3724 "st X+,__tmp_reg__" CR_TAB
3725 "st X+,__zero_reg__" CR_TAB
3726 "st X+,r28" CR_TAB
3727 "st X,r29" CR_TAB
3728 "clr __zero_reg__" CR_TAB
3729 "sbiw r26,%o0+3");
3730 }
3731 else if (reg_src == REG_X - 2)
3732 {
3733 *l = 9;
3734 return ("mov __tmp_reg__,r26" CR_TAB
3735 "mov __zero_reg__,r27" CR_TAB
3736 "adiw r26,%o0" CR_TAB
3737 "st X+,r24" CR_TAB
3738 "st X+,r25" CR_TAB
3739 "st X+,__tmp_reg__" CR_TAB
3740 "st X,__zero_reg__" CR_TAB
3741 "clr __zero_reg__" CR_TAB
3742 "sbiw r26,%o0+3");
3743 }
3744 *l = 6;
3745 return ("adiw r26,%o0" CR_TAB
3746 "st X+,%A1" CR_TAB
3747 "st X+,%B1" CR_TAB
3748 "st X+,%C1" CR_TAB
3749 "st X,%D1" CR_TAB
3750 "sbiw r26,%o0+3");
3751 }
3752 return *l=4, ("std %A0,%A1" CR_TAB
3753 "std %B0,%B1" CR_TAB
3754 "std %C0,%C1" CR_TAB
3755 "std %D0,%D1");
3756 }
3757 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3758 return *l=4, ("st %0,%D1" CR_TAB
3759 "st %0,%C1" CR_TAB
3760 "st %0,%B1" CR_TAB
3761 "st %0,%A1");
3762 else if (GET_CODE (base) == POST_INC) /* (R++) */
3763 return *l=4, ("st %0,%A1" CR_TAB
3764 "st %0,%B1" CR_TAB
3765 "st %0,%C1" CR_TAB
3766 "st %0,%D1");
3767 fatal_insn ("unknown move insn:",insn);
3768 return "";
3769 }
3770
3771 const char *
3772 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
3773 {
3774 int dummy;
3775 rtx dest = operands[0];
3776 rtx src = operands[1];
3777 int *real_l = l;
3778
3779 if (avr_mem_flash_p (src)
3780 || avr_mem_flash_p (dest))
3781 {
3782 return avr_out_lpm (insn, operands, real_l);
3783 }
3784
3785 if (!l)
3786 l = &dummy;
3787
3788 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3789 if (REG_P (dest))
3790 {
3791 if (REG_P (src)) /* mov r,r */
3792 {
3793 if (true_regnum (dest) > true_regnum (src))
3794 {
3795 if (AVR_HAVE_MOVW)
3796 {
3797 *l = 2;
3798 return ("movw %C0,%C1" CR_TAB
3799 "movw %A0,%A1");
3800 }
3801 *l = 4;
3802 return ("mov %D0,%D1" CR_TAB
3803 "mov %C0,%C1" CR_TAB
3804 "mov %B0,%B1" CR_TAB
3805 "mov %A0,%A1");
3806 }
3807 else
3808 {
3809 if (AVR_HAVE_MOVW)
3810 {
3811 *l = 2;
3812 return ("movw %A0,%A1" CR_TAB
3813 "movw %C0,%C1");
3814 }
3815 *l = 4;
3816 return ("mov %A0,%A1" CR_TAB
3817 "mov %B0,%B1" CR_TAB
3818 "mov %C0,%C1" CR_TAB
3819 "mov %D0,%D1");
3820 }
3821 }
3822 else if (CONSTANT_P (src))
3823 {
3824 return output_reload_insisf (operands, NULL_RTX, real_l);
3825 }
3826 else if (MEM_P (src))
3827 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3828 }
3829 else if (MEM_P (dest))
3830 {
3831 const char *templ;
3832
3833 if (src == CONST0_RTX (GET_MODE (dest)))
3834 operands[1] = zero_reg_rtx;
3835
3836 templ = out_movsi_mr_r (insn, operands, real_l);
3837
3838 if (!real_l)
3839 output_asm_insn (templ, operands);
3840
3841 operands[1] = src;
3842 return "";
3843 }
3844 fatal_insn ("invalid insn:", insn);
3845 return "";
3846 }
3847
3848
3849 /* Handle loads of 24-bit types from memory to register. */
3850
3851 static const char*
3852 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
3853 {
3854 rtx dest = op[0];
3855 rtx src = op[1];
3856 rtx base = XEXP (src, 0);
3857 int reg_dest = true_regnum (dest);
3858 int reg_base = true_regnum (base);
3859
3860 if (reg_base > 0)
3861 {
3862 if (reg_base == REG_X) /* (R26) */
3863 {
3864 if (reg_dest == REG_X)
3865 /* "ld r26,-X" is undefined */
3866 return avr_asm_len ("adiw r26,2" CR_TAB
3867 "ld r28,X" CR_TAB
3868 "ld __tmp_reg__,-X" CR_TAB
3869 "sbiw r26,1" CR_TAB
3870 "ld r26,X" CR_TAB
3871 "mov r27,__tmp_reg__", op, plen, -6);
3872 else
3873 {
3874 avr_asm_len ("ld %A0,X+" CR_TAB
3875 "ld %B0,X+" CR_TAB
3876 "ld %C0,X", op, plen, -3);
3877
3878 if (reg_dest != REG_X - 2
3879 && !reg_unused_after (insn, base))
3880 {
3881 avr_asm_len ("sbiw r26,2", op, plen, 1);
3882 }
3883
3884 return "";
3885 }
3886 }
3887 else /* reg_base != REG_X */
3888 {
3889 if (reg_dest == reg_base)
3890 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3891 "ldd __tmp_reg__,%1+1" CR_TAB
3892 "ld %A0,%1" CR_TAB
3893 "mov %B0,__tmp_reg__", op, plen, -4);
3894 else
3895 return avr_asm_len ("ld %A0,%1" CR_TAB
3896 "ldd %B0,%1+1" CR_TAB
3897 "ldd %C0,%1+2", op, plen, -3);
3898 }
3899 }
3900 else if (GET_CODE (base) == PLUS) /* (R + i) */
3901 {
3902 int disp = INTVAL (XEXP (base, 1));
3903
3904 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3905 {
3906 if (REGNO (XEXP (base, 0)) != REG_Y)
3907 fatal_insn ("incorrect insn:",insn);
3908
3909 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3910 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3911 "ldd %A0,Y+61" CR_TAB
3912 "ldd %B0,Y+62" CR_TAB
3913 "ldd %C0,Y+63" CR_TAB
3914 "sbiw r28,%o1-61", op, plen, -5);
3915
3916 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3917 "sbci r29,hi8(-%o1)" CR_TAB
3918 "ld %A0,Y" CR_TAB
3919 "ldd %B0,Y+1" CR_TAB
3920 "ldd %C0,Y+2" CR_TAB
3921 "subi r28,lo8(%o1)" CR_TAB
3922 "sbci r29,hi8(%o1)", op, plen, -7);
3923 }
3924
3925 reg_base = true_regnum (XEXP (base, 0));
3926 if (reg_base == REG_X)
3927 {
3928 /* R = (X + d) */
3929 if (reg_dest == REG_X)
3930 {
3931 /* "ld r26,-X" is undefined */
3932 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3933 "ld r28,X" CR_TAB
3934 "ld __tmp_reg__,-X" CR_TAB
3935 "sbiw r26,1" CR_TAB
3936 "ld r26,X" CR_TAB
3937 "mov r27,__tmp_reg__", op, plen, -6);
3938 }
3939
3940 avr_asm_len ("adiw r26,%o1" CR_TAB
3941 "ld %A0,X+" CR_TAB
3942 "ld %B0,X+" CR_TAB
3943 "ld %C0,X", op, plen, -4);
3944
3945 if (reg_dest != REG_W
3946 && !reg_unused_after (insn, XEXP (base, 0)))
3947 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3948
3949 return "";
3950 }
3951
3952 if (reg_dest == reg_base)
3953 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3954 "ldd __tmp_reg__,%B1" CR_TAB
3955 "ldd %A0,%A1" CR_TAB
3956 "mov %B0,__tmp_reg__", op, plen, -4);
3957
3958 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3959 "ldd %B0,%B1" CR_TAB
3960 "ldd %C0,%C1", op, plen, -3);
3961 }
3962 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3963 return avr_asm_len ("ld %C0,%1" CR_TAB
3964 "ld %B0,%1" CR_TAB
3965 "ld %A0,%1", op, plen, -3);
3966 else if (GET_CODE (base) == POST_INC) /* (R++) */
3967 return avr_asm_len ("ld %A0,%1" CR_TAB
3968 "ld %B0,%1" CR_TAB
3969 "ld %C0,%1", op, plen, -3);
3970
3971 else if (CONSTANT_ADDRESS_P (base))
3972 return avr_asm_len ("lds %A0,%m1" CR_TAB
3973 "lds %B0,%m1+1" CR_TAB
3974 "lds %C0,%m1+2", op, plen , -6);
3975
3976 fatal_insn ("unknown move insn:",insn);
3977 return "";
3978 }
3979
3980 /* Handle store of 24-bit type from register or zero to memory. */
3981
3982 static const char*
3983 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
3984 {
3985 rtx dest = op[0];
3986 rtx src = op[1];
3987 rtx base = XEXP (dest, 0);
3988 int reg_base = true_regnum (base);
3989
3990 if (CONSTANT_ADDRESS_P (base))
3991 return avr_asm_len ("sts %m0,%A1" CR_TAB
3992 "sts %m0+1,%B1" CR_TAB
3993 "sts %m0+2,%C1", op, plen, -6);
3994
3995 if (reg_base > 0) /* (r) */
3996 {
3997 if (reg_base == REG_X) /* (R26) */
3998 {
3999 gcc_assert (!reg_overlap_mentioned_p (base, src));
4000
4001 avr_asm_len ("st %0+,%A1" CR_TAB
4002 "st %0+,%B1" CR_TAB
4003 "st %0,%C1", op, plen, -3);
4004
4005 if (!reg_unused_after (insn, base))
4006 avr_asm_len ("sbiw r26,2", op, plen, 1);
4007
4008 return "";
4009 }
4010 else
4011 return avr_asm_len ("st %0,%A1" CR_TAB
4012 "std %0+1,%B1" CR_TAB
4013 "std %0+2,%C1", op, plen, -3);
4014 }
4015 else if (GET_CODE (base) == PLUS) /* (R + i) */
4016 {
4017 int disp = INTVAL (XEXP (base, 1));
4018 reg_base = REGNO (XEXP (base, 0));
4019
4020 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4021 {
4022 if (reg_base != REG_Y)
4023 fatal_insn ("incorrect insn:",insn);
4024
4025 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4026 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4027 "std Y+61,%A1" CR_TAB
4028 "std Y+62,%B1" CR_TAB
4029 "std Y+63,%C1" CR_TAB
4030 "sbiw r28,%o0-61", op, plen, -5);
4031
4032 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4033 "sbci r29,hi8(-%o0)" CR_TAB
4034 "st Y,%A1" CR_TAB
4035 "std Y+1,%B1" CR_TAB
4036 "std Y+2,%C1" CR_TAB
4037 "subi r28,lo8(%o0)" CR_TAB
4038 "sbci r29,hi8(%o0)", op, plen, -7);
4039 }
4040 if (reg_base == REG_X)
4041 {
4042 /* (X + d) = R */
4043 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4044
4045 avr_asm_len ("adiw r26,%o0" CR_TAB
4046 "st X+,%A1" CR_TAB
4047 "st X+,%B1" CR_TAB
4048 "st X,%C1", op, plen, -4);
4049
4050 if (!reg_unused_after (insn, XEXP (base, 0)))
4051 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4052
4053 return "";
4054 }
4055
4056 return avr_asm_len ("std %A0,%A1" CR_TAB
4057 "std %B0,%B1" CR_TAB
4058 "std %C0,%C1", op, plen, -3);
4059 }
4060 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4061 return avr_asm_len ("st %0,%C1" CR_TAB
4062 "st %0,%B1" CR_TAB
4063 "st %0,%A1", op, plen, -3);
4064 else if (GET_CODE (base) == POST_INC) /* (R++) */
4065 return avr_asm_len ("st %0,%A1" CR_TAB
4066 "st %0,%B1" CR_TAB
4067 "st %0,%C1", op, plen, -3);
4068
4069 fatal_insn ("unknown move insn:",insn);
4070 return "";
4071 }
4072
4073
4074 /* Move around 24-bit stuff. */
4075
4076 const char *
4077 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
4078 {
4079 rtx dest = op[0];
4080 rtx src = op[1];
4081
4082 if (avr_mem_flash_p (src)
4083 || avr_mem_flash_p (dest))
4084 {
4085 return avr_out_lpm (insn, op, plen);
4086 }
4087
4088 if (register_operand (dest, VOIDmode))
4089 {
4090 if (register_operand (src, VOIDmode)) /* mov r,r */
4091 {
4092 if (true_regnum (dest) > true_regnum (src))
4093 {
4094 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4095
4096 if (AVR_HAVE_MOVW)
4097 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4098 else
4099 return avr_asm_len ("mov %B0,%B1" CR_TAB
4100 "mov %A0,%A1", op, plen, 2);
4101 }
4102 else
4103 {
4104 if (AVR_HAVE_MOVW)
4105 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4106 else
4107 avr_asm_len ("mov %A0,%A1" CR_TAB
4108 "mov %B0,%B1", op, plen, -2);
4109
4110 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4111 }
4112 }
4113 else if (CONSTANT_P (src))
4114 {
4115 return avr_out_reload_inpsi (op, NULL_RTX, plen);
4116 }
4117 else if (MEM_P (src))
4118 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4119 }
4120 else if (MEM_P (dest))
4121 {
4122 rtx xop[2];
4123
4124 xop[0] = dest;
4125 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4126
4127 return avr_out_store_psi (insn, xop, plen);
4128 }
4129
4130 fatal_insn ("invalid insn:", insn);
4131 return "";
4132 }
4133
4134
4135 static const char*
4136 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4137 {
4138 rtx dest = op[0];
4139 rtx src = op[1];
4140 rtx x = XEXP (dest, 0);
4141
4142 if (CONSTANT_ADDRESS_P (x))
4143 {
4144 return optimize > 0 && io_address_operand (x, QImode)
4145 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4146 : avr_asm_len ("sts %m0,%1", op, plen, -2);
4147 }
4148 else if (GET_CODE (x) == PLUS
4149 && REG_P (XEXP (x, 0))
4150 && CONST_INT_P (XEXP (x, 1)))
4151 {
4152 /* memory access by reg+disp */
4153
4154 int disp = INTVAL (XEXP (x, 1));
4155
4156 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4157 {
4158 if (REGNO (XEXP (x, 0)) != REG_Y)
4159 fatal_insn ("incorrect insn:",insn);
4160
4161 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4162 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4163 "std Y+63,%1" CR_TAB
4164 "sbiw r28,%o0-63", op, plen, -3);
4165
4166 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4167 "sbci r29,hi8(-%o0)" CR_TAB
4168 "st Y,%1" CR_TAB
4169 "subi r28,lo8(%o0)" CR_TAB
4170 "sbci r29,hi8(%o0)", op, plen, -5);
4171 }
4172 else if (REGNO (XEXP (x,0)) == REG_X)
4173 {
4174 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4175 {
4176 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4177 "adiw r26,%o0" CR_TAB
4178 "st X,__tmp_reg__", op, plen, -3);
4179 }
4180 else
4181 {
4182 avr_asm_len ("adiw r26,%o0" CR_TAB
4183 "st X,%1", op, plen, -2);
4184 }
4185
4186 if (!reg_unused_after (insn, XEXP (x,0)))
4187 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4188
4189 return "";
4190 }
4191
4192 return avr_asm_len ("std %0,%1", op, plen, -1);
4193 }
4194
4195 return avr_asm_len ("st %0,%1", op, plen, -1);
4196 }
4197
4198
4199 /* Helper for the next function for XMEGA. It does the same
4200 but with low byte first. */
4201
4202 static const char*
4203 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
4204 {
4205 rtx dest = op[0];
4206 rtx src = op[1];
4207 rtx base = XEXP (dest, 0);
4208 int reg_base = true_regnum (base);
4209 int reg_src = true_regnum (src);
4210
4211 /* "volatile" forces writing low byte first, even if less efficient,
4212 for correct operation with 16-bit I/O registers like SP. */
4213 int mem_volatile_p = MEM_VOLATILE_P (dest);
4214
4215 if (CONSTANT_ADDRESS_P (base))
4216 return optimize > 0 && io_address_operand (base, HImode)
4217 ? avr_asm_len ("out %i0,%A1" CR_TAB
4218 "out %i0+1,%B1", op, plen, -2)
4219
4220 : avr_asm_len ("sts %m0,%A1" CR_TAB
4221 "sts %m0+1,%B1", op, plen, -4);
4222
4223 if (reg_base > 0)
4224 {
4225 if (reg_base != REG_X)
4226 return avr_asm_len ("st %0,%A1" CR_TAB
4227 "std %0+1,%B1", op, plen, -2);
4228
4229 if (reg_src == REG_X)
4230 /* "st X+,r26" and "st -X,r26" are undefined. */
4231 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4232 "st X,r26" CR_TAB
4233 "adiw r26,1" CR_TAB
4234 "st X,__tmp_reg__", op, plen, -4);
4235 else
4236 avr_asm_len ("st X+,%A1" CR_TAB
4237 "st X,%B1", op, plen, -2);
4238
4239 return reg_unused_after (insn, base)
4240 ? ""
4241 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4242 }
4243 else if (GET_CODE (base) == PLUS)
4244 {
4245 int disp = INTVAL (XEXP (base, 1));
4246 reg_base = REGNO (XEXP (base, 0));
4247 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4248 {
4249 if (reg_base != REG_Y)
4250 fatal_insn ("incorrect insn:",insn);
4251
4252 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4253 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4254 "std Y+62,%A1" CR_TAB
4255 "std Y+63,%B1" CR_TAB
4256 "sbiw r28,%o0-62", op, plen, -4)
4257
4258 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4259 "sbci r29,hi8(-%o0)" CR_TAB
4260 "st Y,%A1" CR_TAB
4261 "std Y+1,%B1" CR_TAB
4262 "subi r28,lo8(%o0)" CR_TAB
4263 "sbci r29,hi8(%o0)", op, plen, -6);
4264 }
4265
4266 if (reg_base != REG_X)
4267 return avr_asm_len ("std %A0,%A1" CR_TAB
4268 "std %B0,%B1", op, plen, -2);
4269 /* (X + d) = R */
4270 return reg_src == REG_X
4271 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4272 "mov __zero_reg__,r27" CR_TAB
4273 "adiw r26,%o0" CR_TAB
4274 "st X+,__tmp_reg__" CR_TAB
4275 "st X,__zero_reg__" CR_TAB
4276 "clr __zero_reg__" CR_TAB
4277 "sbiw r26,%o0+1", op, plen, -7)
4278
4279 : avr_asm_len ("adiw r26,%o0" CR_TAB
4280 "st X+,%A1" CR_TAB
4281 "st X,%B1" CR_TAB
4282 "sbiw r26,%o0+1", op, plen, -4);
4283 }
4284 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4285 {
4286 if (!mem_volatile_p)
4287 return avr_asm_len ("st %0,%B1" CR_TAB
4288 "st %0,%A1", op, plen, -2);
4289
4290 return REGNO (XEXP (base, 0)) == REG_X
4291 ? avr_asm_len ("sbiw r26,2" CR_TAB
4292 "st X+,%A1" CR_TAB
4293 "st X,%B1" CR_TAB
4294 "sbiw r26,1", op, plen, -4)
4295
4296 : avr_asm_len ("sbiw %r0,2" CR_TAB
4297 "st %p0,%A1" CR_TAB
4298 "std %p0+1,%B1", op, plen, -3);
4299 }
4300 else if (GET_CODE (base) == POST_INC) /* (R++) */
4301 {
4302 return avr_asm_len ("st %0,%A1" CR_TAB
4303 "st %0,%B1", op, plen, -2);
4304
4305 }
4306 fatal_insn ("unknown move insn:",insn);
4307 return "";
4308 }
4309
4310
4311 static const char*
4312 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4313 {
4314 rtx dest = op[0];
4315 rtx src = op[1];
4316 rtx base = XEXP (dest, 0);
4317 int reg_base = true_regnum (base);
4318 int reg_src = true_regnum (src);
4319 int mem_volatile_p;
4320
4321 /* "volatile" forces writing high-byte first (no-xmega) resp.
4322 low-byte first (xmega) even if less efficient, for correct
4323 operation with 16-bit I/O registers like. */
4324
4325 if (AVR_XMEGA)
4326 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4327
4328 mem_volatile_p = MEM_VOLATILE_P (dest);
4329
4330 if (CONSTANT_ADDRESS_P (base))
4331 return optimize > 0 && io_address_operand (base, HImode)
4332 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4333 "out %i0,%A1", op, plen, -2)
4334
4335 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4336 "sts %m0,%A1", op, plen, -4);
4337
4338 if (reg_base > 0)
4339 {
4340 if (reg_base != REG_X)
4341 return avr_asm_len ("std %0+1,%B1" CR_TAB
4342 "st %0,%A1", op, plen, -2);
4343
4344 if (reg_src == REG_X)
4345 /* "st X+,r26" and "st -X,r26" are undefined. */
4346 return !mem_volatile_p && reg_unused_after (insn, src)
4347 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4348 "st X,r26" CR_TAB
4349 "adiw r26,1" CR_TAB
4350 "st X,__tmp_reg__", op, plen, -4)
4351
4352 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4353 "adiw r26,1" CR_TAB
4354 "st X,__tmp_reg__" CR_TAB
4355 "sbiw r26,1" CR_TAB
4356 "st X,r26", op, plen, -5);
4357
4358 return !mem_volatile_p && reg_unused_after (insn, base)
4359 ? avr_asm_len ("st X+,%A1" CR_TAB
4360 "st X,%B1", op, plen, -2)
4361 : avr_asm_len ("adiw r26,1" CR_TAB
4362 "st X,%B1" CR_TAB
4363 "st -X,%A1", op, plen, -3);
4364 }
4365 else if (GET_CODE (base) == PLUS)
4366 {
4367 int disp = INTVAL (XEXP (base, 1));
4368 reg_base = REGNO (XEXP (base, 0));
4369 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4370 {
4371 if (reg_base != REG_Y)
4372 fatal_insn ("incorrect insn:",insn);
4373
4374 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4375 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4376 "std Y+63,%B1" CR_TAB
4377 "std Y+62,%A1" CR_TAB
4378 "sbiw r28,%o0-62", op, plen, -4)
4379
4380 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4381 "sbci r29,hi8(-%o0)" CR_TAB
4382 "std Y+1,%B1" CR_TAB
4383 "st Y,%A1" CR_TAB
4384 "subi r28,lo8(%o0)" CR_TAB
4385 "sbci r29,hi8(%o0)", op, plen, -6);
4386 }
4387
4388 if (reg_base != REG_X)
4389 return avr_asm_len ("std %B0,%B1" CR_TAB
4390 "std %A0,%A1", op, plen, -2);
4391 /* (X + d) = R */
4392 return reg_src == REG_X
4393 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4394 "mov __zero_reg__,r27" CR_TAB
4395 "adiw r26,%o0+1" CR_TAB
4396 "st X,__zero_reg__" CR_TAB
4397 "st -X,__tmp_reg__" CR_TAB
4398 "clr __zero_reg__" CR_TAB
4399 "sbiw r26,%o0", op, plen, -7)
4400
4401 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4402 "st X,%B1" CR_TAB
4403 "st -X,%A1" CR_TAB
4404 "sbiw r26,%o0", op, plen, -4);
4405 }
4406 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4407 {
4408 return avr_asm_len ("st %0,%B1" CR_TAB
4409 "st %0,%A1", op, plen, -2);
4410 }
4411 else if (GET_CODE (base) == POST_INC) /* (R++) */
4412 {
4413 if (!mem_volatile_p)
4414 return avr_asm_len ("st %0,%A1" CR_TAB
4415 "st %0,%B1", op, plen, -2);
4416
4417 return REGNO (XEXP (base, 0)) == REG_X
4418 ? avr_asm_len ("adiw r26,1" CR_TAB
4419 "st X,%B1" CR_TAB
4420 "st -X,%A1" CR_TAB
4421 "adiw r26,2", op, plen, -4)
4422
4423 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4424 "st %p0,%A1" CR_TAB
4425 "adiw %r0,2", op, plen, -3);
4426 }
4427 fatal_insn ("unknown move insn:",insn);
4428 return "";
4429 }
4430
4431 /* Return 1 if frame pointer for current function required. */
4432
4433 static bool
4434 avr_frame_pointer_required_p (void)
4435 {
4436 return (cfun->calls_alloca
4437 || cfun->calls_setjmp
4438 || cfun->has_nonlocal_label
4439 || crtl->args.info.nregs == 0
4440 || get_frame_size () > 0);
4441 }
4442
4443 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4444
4445 static RTX_CODE
4446 compare_condition (rtx_insn *insn)
4447 {
4448 rtx_insn *next = next_real_insn (insn);
4449
4450 if (next && JUMP_P (next))
4451 {
4452 rtx pat = PATTERN (next);
4453 rtx src = SET_SRC (pat);
4454
4455 if (IF_THEN_ELSE == GET_CODE (src))
4456 return GET_CODE (XEXP (src, 0));
4457 }
4458
4459 return UNKNOWN;
4460 }
4461
4462
4463 /* Returns true iff INSN is a tst insn that only tests the sign. */
4464
4465 static bool
4466 compare_sign_p (rtx_insn *insn)
4467 {
4468 RTX_CODE cond = compare_condition (insn);
4469 return (cond == GE || cond == LT);
4470 }
4471
4472
4473 /* Returns true iff the next insn is a JUMP_INSN with a condition
4474 that needs to be swapped (GT, GTU, LE, LEU). */
4475
4476 static bool
4477 compare_diff_p (rtx_insn *insn)
4478 {
4479 RTX_CODE cond = compare_condition (insn);
4480 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4481 }
4482
4483 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4484
4485 static bool
4486 compare_eq_p (rtx_insn *insn)
4487 {
4488 RTX_CODE cond = compare_condition (insn);
4489 return (cond == EQ || cond == NE);
4490 }
4491
4492
4493 /* Output compare instruction
4494
4495 compare (XOP[0], XOP[1])
4496
4497 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
4498 XOP[2] is an 8-bit scratch register as needed.
4499
4500 PLEN == NULL: Output instructions.
4501 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4502 Don't output anything. */
4503
4504 const char*
4505 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
4506 {
4507 /* Register to compare and value to compare against. */
4508 rtx xreg = xop[0];
4509 rtx xval = xop[1];
4510
4511 /* MODE of the comparison. */
4512 enum machine_mode mode;
4513
4514 /* Number of bytes to operate on. */
4515 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4516
4517 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4518 int clobber_val = -1;
4519
4520 /* Map fixed mode operands to integer operands with the same binary
4521 representation. They are easier to handle in the remainder. */
4522
4523 if (CONST_FIXED_P (xval))
4524 {
4525 xreg = avr_to_int_mode (xop[0]);
4526 xval = avr_to_int_mode (xop[1]);
4527 }
4528
4529 mode = GET_MODE (xreg);
4530
4531 gcc_assert (REG_P (xreg));
4532 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4533 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4534
4535 if (plen)
4536 *plen = 0;
4537
4538 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4539 against 0 by ORing the bytes. This is one instruction shorter.
4540 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4541 and therefore don't use this. */
4542
4543 if (!test_hard_reg_class (LD_REGS, xreg)
4544 && compare_eq_p (insn)
4545 && reg_unused_after (insn, xreg))
4546 {
4547 if (xval == const1_rtx)
4548 {
4549 avr_asm_len ("dec %A0" CR_TAB
4550 "or %A0,%B0", xop, plen, 2);
4551
4552 if (n_bytes >= 3)
4553 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4554
4555 if (n_bytes >= 4)
4556 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4557
4558 return "";
4559 }
4560 else if (xval == constm1_rtx)
4561 {
4562 if (n_bytes >= 4)
4563 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4564
4565 if (n_bytes >= 3)
4566 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4567
4568 return avr_asm_len ("and %A0,%B0" CR_TAB
4569 "com %A0", xop, plen, 2);
4570 }
4571 }
4572
4573 for (i = 0; i < n_bytes; i++)
4574 {
4575 /* We compare byte-wise. */
4576 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4577 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4578
4579 /* 8-bit value to compare with this byte. */
4580 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4581
4582 /* Registers R16..R31 can operate with immediate. */
4583 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4584
4585 xop[0] = reg8;
4586 xop[1] = gen_int_mode (val8, QImode);
4587
4588 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4589
4590 if (i == 0
4591 && test_hard_reg_class (ADDW_REGS, reg8))
4592 {
4593 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4594
4595 if (IN_RANGE (val16, 0, 63)
4596 && (val8 == 0
4597 || reg_unused_after (insn, xreg)))
4598 {
4599 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4600 i++;
4601 continue;
4602 }
4603
4604 if (n_bytes == 2
4605 && IN_RANGE (val16, -63, -1)
4606 && compare_eq_p (insn)
4607 && reg_unused_after (insn, xreg))
4608 {
4609 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4610 }
4611 }
4612
4613 /* Comparing against 0 is easy. */
4614
4615 if (val8 == 0)
4616 {
4617 avr_asm_len (i == 0
4618 ? "cp %0,__zero_reg__"
4619 : "cpc %0,__zero_reg__", xop, plen, 1);
4620 continue;
4621 }
4622
4623 /* Upper registers can compare and subtract-with-carry immediates.
4624 Notice that compare instructions do the same as respective subtract
4625 instruction; the only difference is that comparisons don't write
4626 the result back to the target register. */
4627
4628 if (ld_reg_p)
4629 {
4630 if (i == 0)
4631 {
4632 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4633 continue;
4634 }
4635 else if (reg_unused_after (insn, xreg))
4636 {
4637 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4638 continue;
4639 }
4640 }
4641
4642 /* Must load the value into the scratch register. */
4643
4644 gcc_assert (REG_P (xop[2]));
4645
4646 if (clobber_val != (int) val8)
4647 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4648 clobber_val = (int) val8;
4649
4650 avr_asm_len (i == 0
4651 ? "cp %0,%2"
4652 : "cpc %0,%2", xop, plen, 1);
4653 }
4654
4655 return "";
4656 }
4657
4658
4659 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4660
4661 const char*
4662 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
4663 {
4664 rtx xop[3];
4665
4666 xop[0] = gen_rtx_REG (DImode, 18);
4667 xop[1] = op[0];
4668 xop[2] = op[1];
4669
4670 return avr_out_compare (insn, xop, plen);
4671 }
4672
4673 /* Output test instruction for HImode. */
4674
4675 const char*
4676 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
4677 {
4678 if (compare_sign_p (insn))
4679 {
4680 avr_asm_len ("tst %B0", op, plen, -1);
4681 }
4682 else if (reg_unused_after (insn, op[0])
4683 && compare_eq_p (insn))
4684 {
4685 /* Faster than sbiw if we can clobber the operand. */
4686 avr_asm_len ("or %A0,%B0", op, plen, -1);
4687 }
4688 else
4689 {
4690 avr_out_compare (insn, op, plen);
4691 }
4692
4693 return "";
4694 }
4695
4696
4697 /* Output test instruction for PSImode. */
4698
4699 const char*
4700 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
4701 {
4702 if (compare_sign_p (insn))
4703 {
4704 avr_asm_len ("tst %C0", op, plen, -1);
4705 }
4706 else if (reg_unused_after (insn, op[0])
4707 && compare_eq_p (insn))
4708 {
4709 /* Faster than sbiw if we can clobber the operand. */
4710 avr_asm_len ("or %A0,%B0" CR_TAB
4711 "or %A0,%C0", op, plen, -2);
4712 }
4713 else
4714 {
4715 avr_out_compare (insn, op, plen);
4716 }
4717
4718 return "";
4719 }
4720
4721
4722 /* Output test instruction for SImode. */
4723
4724 const char*
4725 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
4726 {
4727 if (compare_sign_p (insn))
4728 {
4729 avr_asm_len ("tst %D0", op, plen, -1);
4730 }
4731 else if (reg_unused_after (insn, op[0])
4732 && compare_eq_p (insn))
4733 {
4734 /* Faster than sbiw if we can clobber the operand. */
4735 avr_asm_len ("or %A0,%B0" CR_TAB
4736 "or %A0,%C0" CR_TAB
4737 "or %A0,%D0", op, plen, -3);
4738 }
4739 else
4740 {
4741 avr_out_compare (insn, op, plen);
4742 }
4743
4744 return "";
4745 }
4746
4747
4748 /* Generate asm equivalent for various shifts. This only handles cases
4749 that are not already carefully hand-optimized in ?sh??i3_out.
4750
4751 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4752 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4753 OPERANDS[3] is a QImode scratch register from LD regs if
4754 available and SCRATCH, otherwise (no scratch available)
4755
4756 TEMPL is an assembler template that shifts by one position.
4757 T_LEN is the length of this template. */
4758
4759 void
4760 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
4761 int *plen, int t_len)
4762 {
4763 bool second_label = true;
4764 bool saved_in_tmp = false;
4765 bool use_zero_reg = false;
4766 rtx op[5];
4767
4768 op[0] = operands[0];
4769 op[1] = operands[1];
4770 op[2] = operands[2];
4771 op[3] = operands[3];
4772
4773 if (plen)
4774 *plen = 0;
4775
4776 if (CONST_INT_P (operands[2]))
4777 {
4778 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4779 && REG_P (operands[3]));
4780 int count = INTVAL (operands[2]);
4781 int max_len = 10; /* If larger than this, always use a loop. */
4782
4783 if (count <= 0)
4784 return;
4785
4786 if (count < 8 && !scratch)
4787 use_zero_reg = true;
4788
4789 if (optimize_size)
4790 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4791
4792 if (t_len * count <= max_len)
4793 {
4794 /* Output shifts inline with no loop - faster. */
4795
4796 while (count-- > 0)
4797 avr_asm_len (templ, op, plen, t_len);
4798
4799 return;
4800 }
4801
4802 if (scratch)
4803 {
4804 avr_asm_len ("ldi %3,%2", op, plen, 1);
4805 }
4806 else if (use_zero_reg)
4807 {
4808 /* Hack to save one word: use __zero_reg__ as loop counter.
4809 Set one bit, then shift in a loop until it is 0 again. */
4810
4811 op[3] = zero_reg_rtx;
4812
4813 avr_asm_len ("set" CR_TAB
4814 "bld %3,%2-1", op, plen, 2);
4815 }
4816 else
4817 {
4818 /* No scratch register available, use one from LD_REGS (saved in
4819 __tmp_reg__) that doesn't overlap with registers to shift. */
4820
4821 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4822 op[4] = tmp_reg_rtx;
4823 saved_in_tmp = true;
4824
4825 avr_asm_len ("mov %4,%3" CR_TAB
4826 "ldi %3,%2", op, plen, 2);
4827 }
4828
4829 second_label = false;
4830 }
4831 else if (MEM_P (op[2]))
4832 {
4833 rtx op_mov[2];
4834
4835 op_mov[0] = op[3] = tmp_reg_rtx;
4836 op_mov[1] = op[2];
4837
4838 out_movqi_r_mr (insn, op_mov, plen);
4839 }
4840 else if (register_operand (op[2], QImode))
4841 {
4842 op[3] = op[2];
4843
4844 if (!reg_unused_after (insn, op[2])
4845 || reg_overlap_mentioned_p (op[0], op[2]))
4846 {
4847 op[3] = tmp_reg_rtx;
4848 avr_asm_len ("mov %3,%2", op, plen, 1);
4849 }
4850 }
4851 else
4852 fatal_insn ("bad shift insn:", insn);
4853
4854 if (second_label)
4855 avr_asm_len ("rjmp 2f", op, plen, 1);
4856
4857 avr_asm_len ("1:", op, plen, 0);
4858 avr_asm_len (templ, op, plen, t_len);
4859
4860 if (second_label)
4861 avr_asm_len ("2:", op, plen, 0);
4862
4863 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4864 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4865
4866 if (saved_in_tmp)
4867 avr_asm_len ("mov %3,%4", op, plen, 1);
4868 }
4869
4870
4871 /* 8bit shift left ((char)x << i) */
4872
4873 const char *
4874 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
4875 {
4876 if (GET_CODE (operands[2]) == CONST_INT)
4877 {
4878 int k;
4879
4880 if (!len)
4881 len = &k;
4882
4883 switch (INTVAL (operands[2]))
4884 {
4885 default:
4886 if (INTVAL (operands[2]) < 8)
4887 break;
4888
4889 *len = 1;
4890 return "clr %0";
4891
4892 case 1:
4893 *len = 1;
4894 return "lsl %0";
4895
4896 case 2:
4897 *len = 2;
4898 return ("lsl %0" CR_TAB
4899 "lsl %0");
4900
4901 case 3:
4902 *len = 3;
4903 return ("lsl %0" CR_TAB
4904 "lsl %0" CR_TAB
4905 "lsl %0");
4906
4907 case 4:
4908 if (test_hard_reg_class (LD_REGS, operands[0]))
4909 {
4910 *len = 2;
4911 return ("swap %0" CR_TAB
4912 "andi %0,0xf0");
4913 }
4914 *len = 4;
4915 return ("lsl %0" CR_TAB
4916 "lsl %0" CR_TAB
4917 "lsl %0" CR_TAB
4918 "lsl %0");
4919
4920 case 5:
4921 if (test_hard_reg_class (LD_REGS, operands[0]))
4922 {
4923 *len = 3;
4924 return ("swap %0" CR_TAB
4925 "lsl %0" CR_TAB
4926 "andi %0,0xe0");
4927 }
4928 *len = 5;
4929 return ("lsl %0" CR_TAB
4930 "lsl %0" CR_TAB
4931 "lsl %0" CR_TAB
4932 "lsl %0" CR_TAB
4933 "lsl %0");
4934
4935 case 6:
4936 if (test_hard_reg_class (LD_REGS, operands[0]))
4937 {
4938 *len = 4;
4939 return ("swap %0" CR_TAB
4940 "lsl %0" CR_TAB
4941 "lsl %0" CR_TAB
4942 "andi %0,0xc0");
4943 }
4944 *len = 6;
4945 return ("lsl %0" CR_TAB
4946 "lsl %0" CR_TAB
4947 "lsl %0" CR_TAB
4948 "lsl %0" CR_TAB
4949 "lsl %0" CR_TAB
4950 "lsl %0");
4951
4952 case 7:
4953 *len = 3;
4954 return ("ror %0" CR_TAB
4955 "clr %0" CR_TAB
4956 "ror %0");
4957 }
4958 }
4959 else if (CONSTANT_P (operands[2]))
4960 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4961
4962 out_shift_with_cnt ("lsl %0",
4963 insn, operands, len, 1);
4964 return "";
4965 }
4966
4967
4968 /* 16bit shift left ((short)x << i) */
4969
4970 const char *
4971 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
4972 {
4973 if (GET_CODE (operands[2]) == CONST_INT)
4974 {
4975 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4976 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4977 int k;
4978 int *t = len;
4979
4980 if (!len)
4981 len = &k;
4982
4983 switch (INTVAL (operands[2]))
4984 {
4985 default:
4986 if (INTVAL (operands[2]) < 16)
4987 break;
4988
4989 *len = 2;
4990 return ("clr %B0" CR_TAB
4991 "clr %A0");
4992
4993 case 4:
4994 if (optimize_size && scratch)
4995 break; /* 5 */
4996 if (ldi_ok)
4997 {
4998 *len = 6;
4999 return ("swap %A0" CR_TAB
5000 "swap %B0" CR_TAB
5001 "andi %B0,0xf0" CR_TAB
5002 "eor %B0,%A0" CR_TAB
5003 "andi %A0,0xf0" CR_TAB
5004 "eor %B0,%A0");
5005 }
5006 if (scratch)
5007 {
5008 *len = 7;
5009 return ("swap %A0" CR_TAB
5010 "swap %B0" CR_TAB
5011 "ldi %3,0xf0" CR_TAB
5012 "and %B0,%3" CR_TAB
5013 "eor %B0,%A0" CR_TAB
5014 "and %A0,%3" CR_TAB
5015 "eor %B0,%A0");
5016 }
5017 break; /* optimize_size ? 6 : 8 */
5018
5019 case 5:
5020 if (optimize_size)
5021 break; /* scratch ? 5 : 6 */
5022 if (ldi_ok)
5023 {
5024 *len = 8;
5025 return ("lsl %A0" CR_TAB
5026 "rol %B0" CR_TAB
5027 "swap %A0" CR_TAB
5028 "swap %B0" CR_TAB
5029 "andi %B0,0xf0" CR_TAB
5030 "eor %B0,%A0" CR_TAB
5031 "andi %A0,0xf0" CR_TAB
5032 "eor %B0,%A0");
5033 }
5034 if (scratch)
5035 {
5036 *len = 9;
5037 return ("lsl %A0" CR_TAB
5038 "rol %B0" CR_TAB
5039 "swap %A0" CR_TAB
5040 "swap %B0" CR_TAB
5041 "ldi %3,0xf0" CR_TAB
5042 "and %B0,%3" CR_TAB
5043 "eor %B0,%A0" CR_TAB
5044 "and %A0,%3" CR_TAB
5045 "eor %B0,%A0");
5046 }
5047 break; /* 10 */
5048
5049 case 6:
5050 if (optimize_size)
5051 break; /* scratch ? 5 : 6 */
5052 *len = 9;
5053 return ("clr __tmp_reg__" CR_TAB
5054 "lsr %B0" CR_TAB
5055 "ror %A0" CR_TAB
5056 "ror __tmp_reg__" CR_TAB
5057 "lsr %B0" CR_TAB
5058 "ror %A0" CR_TAB
5059 "ror __tmp_reg__" CR_TAB
5060 "mov %B0,%A0" CR_TAB
5061 "mov %A0,__tmp_reg__");
5062
5063 case 7:
5064 *len = 5;
5065 return ("lsr %B0" CR_TAB
5066 "mov %B0,%A0" CR_TAB
5067 "clr %A0" CR_TAB
5068 "ror %B0" CR_TAB
5069 "ror %A0");
5070
5071 case 8:
5072 return *len = 2, ("mov %B0,%A1" CR_TAB
5073 "clr %A0");
5074
5075 case 9:
5076 *len = 3;
5077 return ("mov %B0,%A0" CR_TAB
5078 "clr %A0" CR_TAB
5079 "lsl %B0");
5080
5081 case 10:
5082 *len = 4;
5083 return ("mov %B0,%A0" CR_TAB
5084 "clr %A0" CR_TAB
5085 "lsl %B0" CR_TAB
5086 "lsl %B0");
5087
5088 case 11:
5089 *len = 5;
5090 return ("mov %B0,%A0" CR_TAB
5091 "clr %A0" CR_TAB
5092 "lsl %B0" CR_TAB
5093 "lsl %B0" CR_TAB
5094 "lsl %B0");
5095
5096 case 12:
5097 if (ldi_ok)
5098 {
5099 *len = 4;
5100 return ("mov %B0,%A0" CR_TAB
5101 "clr %A0" CR_TAB
5102 "swap %B0" CR_TAB
5103 "andi %B0,0xf0");
5104 }
5105 if (scratch)
5106 {
5107 *len = 5;
5108 return ("mov %B0,%A0" CR_TAB
5109 "clr %A0" CR_TAB
5110 "swap %B0" CR_TAB
5111 "ldi %3,0xf0" CR_TAB
5112 "and %B0,%3");
5113 }
5114 *len = 6;
5115 return ("mov %B0,%A0" CR_TAB
5116 "clr %A0" CR_TAB
5117 "lsl %B0" CR_TAB
5118 "lsl %B0" CR_TAB
5119 "lsl %B0" CR_TAB
5120 "lsl %B0");
5121
5122 case 13:
5123 if (ldi_ok)
5124 {
5125 *len = 5;
5126 return ("mov %B0,%A0" CR_TAB
5127 "clr %A0" CR_TAB
5128 "swap %B0" CR_TAB
5129 "lsl %B0" CR_TAB
5130 "andi %B0,0xe0");
5131 }
5132 if (AVR_HAVE_MUL && scratch)
5133 {
5134 *len = 5;
5135 return ("ldi %3,0x20" CR_TAB
5136 "mul %A0,%3" CR_TAB
5137 "mov %B0,r0" CR_TAB
5138 "clr %A0" CR_TAB
5139 "clr __zero_reg__");
5140 }
5141 if (optimize_size && scratch)
5142 break; /* 5 */
5143 if (scratch)
5144 {
5145 *len = 6;
5146 return ("mov %B0,%A0" CR_TAB
5147 "clr %A0" CR_TAB
5148 "swap %B0" CR_TAB
5149 "lsl %B0" CR_TAB
5150 "ldi %3,0xe0" CR_TAB
5151 "and %B0,%3");
5152 }
5153 if (AVR_HAVE_MUL)
5154 {
5155 *len = 6;
5156 return ("set" CR_TAB
5157 "bld r1,5" CR_TAB
5158 "mul %A0,r1" CR_TAB
5159 "mov %B0,r0" CR_TAB
5160 "clr %A0" CR_TAB
5161 "clr __zero_reg__");
5162 }
5163 *len = 7;
5164 return ("mov %B0,%A0" CR_TAB
5165 "clr %A0" CR_TAB
5166 "lsl %B0" CR_TAB
5167 "lsl %B0" CR_TAB
5168 "lsl %B0" CR_TAB
5169 "lsl %B0" CR_TAB
5170 "lsl %B0");
5171
5172 case 14:
5173 if (AVR_HAVE_MUL && ldi_ok)
5174 {
5175 *len = 5;
5176 return ("ldi %B0,0x40" CR_TAB
5177 "mul %A0,%B0" CR_TAB
5178 "mov %B0,r0" CR_TAB
5179 "clr %A0" CR_TAB
5180 "clr __zero_reg__");
5181 }
5182 if (AVR_HAVE_MUL && scratch)
5183 {
5184 *len = 5;
5185 return ("ldi %3,0x40" CR_TAB
5186 "mul %A0,%3" CR_TAB
5187 "mov %B0,r0" CR_TAB
5188 "clr %A0" CR_TAB
5189 "clr __zero_reg__");
5190 }
5191 if (optimize_size && ldi_ok)
5192 {
5193 *len = 5;
5194 return ("mov %B0,%A0" CR_TAB
5195 "ldi %A0,6" "\n1:\t"
5196 "lsl %B0" CR_TAB
5197 "dec %A0" CR_TAB
5198 "brne 1b");
5199 }
5200 if (optimize_size && scratch)
5201 break; /* 5 */
5202 *len = 6;
5203 return ("clr %B0" CR_TAB
5204 "lsr %A0" CR_TAB
5205 "ror %B0" CR_TAB
5206 "lsr %A0" CR_TAB
5207 "ror %B0" CR_TAB
5208 "clr %A0");
5209
5210 case 15:
5211 *len = 4;
5212 return ("clr %B0" CR_TAB
5213 "lsr %A0" CR_TAB
5214 "ror %B0" CR_TAB
5215 "clr %A0");
5216 }
5217 len = t;
5218 }
5219 out_shift_with_cnt ("lsl %A0" CR_TAB
5220 "rol %B0", insn, operands, len, 2);
5221 return "";
5222 }
5223
5224
5225 /* 24-bit shift left */
5226
5227 const char*
5228 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
5229 {
5230 if (plen)
5231 *plen = 0;
5232
5233 if (CONST_INT_P (op[2]))
5234 {
5235 switch (INTVAL (op[2]))
5236 {
5237 default:
5238 if (INTVAL (op[2]) < 24)
5239 break;
5240
5241 return avr_asm_len ("clr %A0" CR_TAB
5242 "clr %B0" CR_TAB
5243 "clr %C0", op, plen, 3);
5244
5245 case 8:
5246 {
5247 int reg0 = REGNO (op[0]);
5248 int reg1 = REGNO (op[1]);
5249
5250 if (reg0 >= reg1)
5251 return avr_asm_len ("mov %C0,%B1" CR_TAB
5252 "mov %B0,%A1" CR_TAB
5253 "clr %A0", op, plen, 3);
5254 else
5255 return avr_asm_len ("clr %A0" CR_TAB
5256 "mov %B0,%A1" CR_TAB
5257 "mov %C0,%B1", op, plen, 3);
5258 }
5259
5260 case 16:
5261 {
5262 int reg0 = REGNO (op[0]);
5263 int reg1 = REGNO (op[1]);
5264
5265 if (reg0 + 2 != reg1)
5266 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5267
5268 return avr_asm_len ("clr %B0" CR_TAB
5269 "clr %A0", op, plen, 2);
5270 }
5271
5272 case 23:
5273 return avr_asm_len ("clr %C0" CR_TAB
5274 "lsr %A0" CR_TAB
5275 "ror %C0" CR_TAB
5276 "clr %B0" CR_TAB
5277 "clr %A0", op, plen, 5);
5278 }
5279 }
5280
5281 out_shift_with_cnt ("lsl %A0" CR_TAB
5282 "rol %B0" CR_TAB
5283 "rol %C0", insn, op, plen, 3);
5284 return "";
5285 }
5286
5287
5288 /* 32bit shift left ((long)x << i) */
5289
5290 const char *
5291 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
5292 {
5293 if (GET_CODE (operands[2]) == CONST_INT)
5294 {
5295 int k;
5296 int *t = len;
5297
5298 if (!len)
5299 len = &k;
5300
5301 switch (INTVAL (operands[2]))
5302 {
5303 default:
5304 if (INTVAL (operands[2]) < 32)
5305 break;
5306
5307 if (AVR_HAVE_MOVW)
5308 return *len = 3, ("clr %D0" CR_TAB
5309 "clr %C0" CR_TAB
5310 "movw %A0,%C0");
5311 *len = 4;
5312 return ("clr %D0" CR_TAB
5313 "clr %C0" CR_TAB
5314 "clr %B0" CR_TAB
5315 "clr %A0");
5316
5317 case 8:
5318 {
5319 int reg0 = true_regnum (operands[0]);
5320 int reg1 = true_regnum (operands[1]);
5321 *len = 4;
5322 if (reg0 >= reg1)
5323 return ("mov %D0,%C1" CR_TAB
5324 "mov %C0,%B1" CR_TAB
5325 "mov %B0,%A1" CR_TAB
5326 "clr %A0");
5327 else
5328 return ("clr %A0" CR_TAB
5329 "mov %B0,%A1" CR_TAB
5330 "mov %C0,%B1" CR_TAB
5331 "mov %D0,%C1");
5332 }
5333
5334 case 16:
5335 {
5336 int reg0 = true_regnum (operands[0]);
5337 int reg1 = true_regnum (operands[1]);
5338 if (reg0 + 2 == reg1)
5339 return *len = 2, ("clr %B0" CR_TAB
5340 "clr %A0");
5341 if (AVR_HAVE_MOVW)
5342 return *len = 3, ("movw %C0,%A1" CR_TAB
5343 "clr %B0" CR_TAB
5344 "clr %A0");
5345 else
5346 return *len = 4, ("mov %C0,%A1" CR_TAB
5347 "mov %D0,%B1" CR_TAB
5348 "clr %B0" CR_TAB
5349 "clr %A0");
5350 }
5351
5352 case 24:
5353 *len = 4;
5354 return ("mov %D0,%A1" CR_TAB
5355 "clr %C0" CR_TAB
5356 "clr %B0" CR_TAB
5357 "clr %A0");
5358
5359 case 31:
5360 *len = 6;
5361 return ("clr %D0" CR_TAB
5362 "lsr %A0" CR_TAB
5363 "ror %D0" CR_TAB
5364 "clr %C0" CR_TAB
5365 "clr %B0" CR_TAB
5366 "clr %A0");
5367 }
5368 len = t;
5369 }
5370 out_shift_with_cnt ("lsl %A0" CR_TAB
5371 "rol %B0" CR_TAB
5372 "rol %C0" CR_TAB
5373 "rol %D0", insn, operands, len, 4);
5374 return "";
5375 }
5376
5377 /* 8bit arithmetic shift right ((signed char)x >> i) */
5378
5379 const char *
5380 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
5381 {
5382 if (GET_CODE (operands[2]) == CONST_INT)
5383 {
5384 int k;
5385
5386 if (!len)
5387 len = &k;
5388
5389 switch (INTVAL (operands[2]))
5390 {
5391 case 1:
5392 *len = 1;
5393 return "asr %0";
5394
5395 case 2:
5396 *len = 2;
5397 return ("asr %0" CR_TAB
5398 "asr %0");
5399
5400 case 3:
5401 *len = 3;
5402 return ("asr %0" CR_TAB
5403 "asr %0" CR_TAB
5404 "asr %0");
5405
5406 case 4:
5407 *len = 4;
5408 return ("asr %0" CR_TAB
5409 "asr %0" CR_TAB
5410 "asr %0" CR_TAB
5411 "asr %0");
5412
5413 case 5:
5414 *len = 5;
5415 return ("asr %0" CR_TAB
5416 "asr %0" CR_TAB
5417 "asr %0" CR_TAB
5418 "asr %0" CR_TAB
5419 "asr %0");
5420
5421 case 6:
5422 *len = 4;
5423 return ("bst %0,6" CR_TAB
5424 "lsl %0" CR_TAB
5425 "sbc %0,%0" CR_TAB
5426 "bld %0,0");
5427
5428 default:
5429 if (INTVAL (operands[2]) < 8)
5430 break;
5431
5432 /* fall through */
5433
5434 case 7:
5435 *len = 2;
5436 return ("lsl %0" CR_TAB
5437 "sbc %0,%0");
5438 }
5439 }
5440 else if (CONSTANT_P (operands[2]))
5441 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5442
5443 out_shift_with_cnt ("asr %0",
5444 insn, operands, len, 1);
5445 return "";
5446 }
5447
5448
5449 /* 16bit arithmetic shift right ((signed short)x >> i) */
5450
5451 const char *
5452 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
5453 {
5454 if (GET_CODE (operands[2]) == CONST_INT)
5455 {
5456 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5457 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5458 int k;
5459 int *t = len;
5460
5461 if (!len)
5462 len = &k;
5463
5464 switch (INTVAL (operands[2]))
5465 {
5466 case 4:
5467 case 5:
5468 /* XXX try to optimize this too? */
5469 break;
5470
5471 case 6:
5472 if (optimize_size)
5473 break; /* scratch ? 5 : 6 */
5474 *len = 8;
5475 return ("mov __tmp_reg__,%A0" CR_TAB
5476 "mov %A0,%B0" CR_TAB
5477 "lsl __tmp_reg__" CR_TAB
5478 "rol %A0" CR_TAB
5479 "sbc %B0,%B0" CR_TAB
5480 "lsl __tmp_reg__" CR_TAB
5481 "rol %A0" CR_TAB
5482 "rol %B0");
5483
5484 case 7:
5485 *len = 4;
5486 return ("lsl %A0" CR_TAB
5487 "mov %A0,%B0" CR_TAB
5488 "rol %A0" CR_TAB
5489 "sbc %B0,%B0");
5490
5491 case 8:
5492 {
5493 int reg0 = true_regnum (operands[0]);
5494 int reg1 = true_regnum (operands[1]);
5495
5496 if (reg0 == reg1)
5497 return *len = 3, ("mov %A0,%B0" CR_TAB
5498 "lsl %B0" CR_TAB
5499 "sbc %B0,%B0");
5500 else
5501 return *len = 4, ("mov %A0,%B1" CR_TAB
5502 "clr %B0" CR_TAB
5503 "sbrc %A0,7" CR_TAB
5504 "dec %B0");
5505 }
5506
5507 case 9:
5508 *len = 4;
5509 return ("mov %A0,%B0" CR_TAB
5510 "lsl %B0" CR_TAB
5511 "sbc %B0,%B0" CR_TAB
5512 "asr %A0");
5513
5514 case 10:
5515 *len = 5;
5516 return ("mov %A0,%B0" CR_TAB
5517 "lsl %B0" CR_TAB
5518 "sbc %B0,%B0" CR_TAB
5519 "asr %A0" CR_TAB
5520 "asr %A0");
5521
5522 case 11:
5523 if (AVR_HAVE_MUL && ldi_ok)
5524 {
5525 *len = 5;
5526 return ("ldi %A0,0x20" CR_TAB
5527 "muls %B0,%A0" CR_TAB
5528 "mov %A0,r1" CR_TAB
5529 "sbc %B0,%B0" CR_TAB
5530 "clr __zero_reg__");
5531 }
5532 if (optimize_size && scratch)
5533 break; /* 5 */
5534 *len = 6;
5535 return ("mov %A0,%B0" CR_TAB
5536 "lsl %B0" CR_TAB
5537 "sbc %B0,%B0" CR_TAB
5538 "asr %A0" CR_TAB
5539 "asr %A0" CR_TAB
5540 "asr %A0");
5541
5542 case 12:
5543 if (AVR_HAVE_MUL && ldi_ok)
5544 {
5545 *len = 5;
5546 return ("ldi %A0,0x10" CR_TAB
5547 "muls %B0,%A0" CR_TAB
5548 "mov %A0,r1" CR_TAB
5549 "sbc %B0,%B0" CR_TAB
5550 "clr __zero_reg__");
5551 }
5552 if (optimize_size && scratch)
5553 break; /* 5 */
5554 *len = 7;
5555 return ("mov %A0,%B0" CR_TAB
5556 "lsl %B0" CR_TAB
5557 "sbc %B0,%B0" CR_TAB
5558 "asr %A0" CR_TAB
5559 "asr %A0" CR_TAB
5560 "asr %A0" CR_TAB
5561 "asr %A0");
5562
5563 case 13:
5564 if (AVR_HAVE_MUL && ldi_ok)
5565 {
5566 *len = 5;
5567 return ("ldi %A0,0x08" CR_TAB
5568 "muls %B0,%A0" CR_TAB
5569 "mov %A0,r1" CR_TAB
5570 "sbc %B0,%B0" CR_TAB
5571 "clr __zero_reg__");
5572 }
5573 if (optimize_size)
5574 break; /* scratch ? 5 : 7 */
5575 *len = 8;
5576 return ("mov %A0,%B0" CR_TAB
5577 "lsl %B0" CR_TAB
5578 "sbc %B0,%B0" CR_TAB
5579 "asr %A0" CR_TAB
5580 "asr %A0" CR_TAB
5581 "asr %A0" CR_TAB
5582 "asr %A0" CR_TAB
5583 "asr %A0");
5584
5585 case 14:
5586 *len = 5;
5587 return ("lsl %B0" CR_TAB
5588 "sbc %A0,%A0" CR_TAB
5589 "lsl %B0" CR_TAB
5590 "mov %B0,%A0" CR_TAB
5591 "rol %A0");
5592
5593 default:
5594 if (INTVAL (operands[2]) < 16)
5595 break;
5596
5597 /* fall through */
5598
5599 case 15:
5600 return *len = 3, ("lsl %B0" CR_TAB
5601 "sbc %A0,%A0" CR_TAB
5602 "mov %B0,%A0");
5603 }
5604 len = t;
5605 }
5606 out_shift_with_cnt ("asr %B0" CR_TAB
5607 "ror %A0", insn, operands, len, 2);
5608 return "";
5609 }
5610
5611
5612 /* 24-bit arithmetic shift right */
5613
5614 const char*
5615 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
5616 {
5617 int dest = REGNO (op[0]);
5618 int src = REGNO (op[1]);
5619
5620 if (CONST_INT_P (op[2]))
5621 {
5622 if (plen)
5623 *plen = 0;
5624
5625 switch (INTVAL (op[2]))
5626 {
5627 case 8:
5628 if (dest <= src)
5629 return avr_asm_len ("mov %A0,%B1" CR_TAB
5630 "mov %B0,%C1" CR_TAB
5631 "clr %C0" CR_TAB
5632 "sbrc %B0,7" CR_TAB
5633 "dec %C0", op, plen, 5);
5634 else
5635 return avr_asm_len ("clr %C0" CR_TAB
5636 "sbrc %C1,7" CR_TAB
5637 "dec %C0" CR_TAB
5638 "mov %B0,%C1" CR_TAB
5639 "mov %A0,%B1", op, plen, 5);
5640
5641 case 16:
5642 if (dest != src + 2)
5643 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5644
5645 return avr_asm_len ("clr %B0" CR_TAB
5646 "sbrc %A0,7" CR_TAB
5647 "com %B0" CR_TAB
5648 "mov %C0,%B0", op, plen, 4);
5649
5650 default:
5651 if (INTVAL (op[2]) < 24)
5652 break;
5653
5654 /* fall through */
5655
5656 case 23:
5657 return avr_asm_len ("lsl %C0" CR_TAB
5658 "sbc %A0,%A0" CR_TAB
5659 "mov %B0,%A0" CR_TAB
5660 "mov %C0,%A0", op, plen, 4);
5661 } /* switch */
5662 }
5663
5664 out_shift_with_cnt ("asr %C0" CR_TAB
5665 "ror %B0" CR_TAB
5666 "ror %A0", insn, op, plen, 3);
5667 return "";
5668 }
5669
5670
5671 /* 32-bit arithmetic shift right ((signed long)x >> i) */
5672
5673 const char *
5674 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
5675 {
5676 if (GET_CODE (operands[2]) == CONST_INT)
5677 {
5678 int k;
5679 int *t = len;
5680
5681 if (!len)
5682 len = &k;
5683
5684 switch (INTVAL (operands[2]))
5685 {
5686 case 8:
5687 {
5688 int reg0 = true_regnum (operands[0]);
5689 int reg1 = true_regnum (operands[1]);
5690 *len=6;
5691 if (reg0 <= reg1)
5692 return ("mov %A0,%B1" CR_TAB
5693 "mov %B0,%C1" CR_TAB
5694 "mov %C0,%D1" CR_TAB
5695 "clr %D0" CR_TAB
5696 "sbrc %C0,7" CR_TAB
5697 "dec %D0");
5698 else
5699 return ("clr %D0" CR_TAB
5700 "sbrc %D1,7" CR_TAB
5701 "dec %D0" CR_TAB
5702 "mov %C0,%D1" CR_TAB
5703 "mov %B0,%C1" CR_TAB
5704 "mov %A0,%B1");
5705 }
5706
5707 case 16:
5708 {
5709 int reg0 = true_regnum (operands[0]);
5710 int reg1 = true_regnum (operands[1]);
5711
5712 if (reg0 == reg1 + 2)
5713 return *len = 4, ("clr %D0" CR_TAB
5714 "sbrc %B0,7" CR_TAB
5715 "com %D0" CR_TAB
5716 "mov %C0,%D0");
5717 if (AVR_HAVE_MOVW)
5718 return *len = 5, ("movw %A0,%C1" CR_TAB
5719 "clr %D0" CR_TAB
5720 "sbrc %B0,7" CR_TAB
5721 "com %D0" CR_TAB
5722 "mov %C0,%D0");
5723 else
5724 return *len = 6, ("mov %B0,%D1" CR_TAB
5725 "mov %A0,%C1" CR_TAB
5726 "clr %D0" CR_TAB
5727 "sbrc %B0,7" CR_TAB
5728 "com %D0" CR_TAB
5729 "mov %C0,%D0");
5730 }
5731
5732 case 24:
5733 return *len = 6, ("mov %A0,%D1" CR_TAB
5734 "clr %D0" CR_TAB
5735 "sbrc %A0,7" CR_TAB
5736 "com %D0" CR_TAB
5737 "mov %B0,%D0" CR_TAB
5738 "mov %C0,%D0");
5739
5740 default:
5741 if (INTVAL (operands[2]) < 32)
5742 break;
5743
5744 /* fall through */
5745
5746 case 31:
5747 if (AVR_HAVE_MOVW)
5748 return *len = 4, ("lsl %D0" CR_TAB
5749 "sbc %A0,%A0" CR_TAB
5750 "mov %B0,%A0" CR_TAB
5751 "movw %C0,%A0");
5752 else
5753 return *len = 5, ("lsl %D0" CR_TAB
5754 "sbc %A0,%A0" CR_TAB
5755 "mov %B0,%A0" CR_TAB
5756 "mov %C0,%A0" CR_TAB
5757 "mov %D0,%A0");
5758 }
5759 len = t;
5760 }
5761 out_shift_with_cnt ("asr %D0" CR_TAB
5762 "ror %C0" CR_TAB
5763 "ror %B0" CR_TAB
5764 "ror %A0", insn, operands, len, 4);
5765 return "";
5766 }
5767
5768 /* 8-bit logic shift right ((unsigned char)x >> i) */
5769
5770 const char *
5771 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
5772 {
5773 if (GET_CODE (operands[2]) == CONST_INT)
5774 {
5775 int k;
5776
5777 if (!len)
5778 len = &k;
5779
5780 switch (INTVAL (operands[2]))
5781 {
5782 default:
5783 if (INTVAL (operands[2]) < 8)
5784 break;
5785
5786 *len = 1;
5787 return "clr %0";
5788
5789 case 1:
5790 *len = 1;
5791 return "lsr %0";
5792
5793 case 2:
5794 *len = 2;
5795 return ("lsr %0" CR_TAB
5796 "lsr %0");
5797 case 3:
5798 *len = 3;
5799 return ("lsr %0" CR_TAB
5800 "lsr %0" CR_TAB
5801 "lsr %0");
5802
5803 case 4:
5804 if (test_hard_reg_class (LD_REGS, operands[0]))
5805 {
5806 *len=2;
5807 return ("swap %0" CR_TAB
5808 "andi %0,0x0f");
5809 }
5810 *len = 4;
5811 return ("lsr %0" CR_TAB
5812 "lsr %0" CR_TAB
5813 "lsr %0" CR_TAB
5814 "lsr %0");
5815
5816 case 5:
5817 if (test_hard_reg_class (LD_REGS, operands[0]))
5818 {
5819 *len = 3;
5820 return ("swap %0" CR_TAB
5821 "lsr %0" CR_TAB
5822 "andi %0,0x7");
5823 }
5824 *len = 5;
5825 return ("lsr %0" CR_TAB
5826 "lsr %0" CR_TAB
5827 "lsr %0" CR_TAB
5828 "lsr %0" CR_TAB
5829 "lsr %0");
5830
5831 case 6:
5832 if (test_hard_reg_class (LD_REGS, operands[0]))
5833 {
5834 *len = 4;
5835 return ("swap %0" CR_TAB
5836 "lsr %0" CR_TAB
5837 "lsr %0" CR_TAB
5838 "andi %0,0x3");
5839 }
5840 *len = 6;
5841 return ("lsr %0" CR_TAB
5842 "lsr %0" CR_TAB
5843 "lsr %0" CR_TAB
5844 "lsr %0" CR_TAB
5845 "lsr %0" CR_TAB
5846 "lsr %0");
5847
5848 case 7:
5849 *len = 3;
5850 return ("rol %0" CR_TAB
5851 "clr %0" CR_TAB
5852 "rol %0");
5853 }
5854 }
5855 else if (CONSTANT_P (operands[2]))
5856 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5857
5858 out_shift_with_cnt ("lsr %0",
5859 insn, operands, len, 1);
5860 return "";
5861 }
5862
5863 /* 16-bit logic shift right ((unsigned short)x >> i) */
5864
5865 const char *
5866 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
5867 {
5868 if (GET_CODE (operands[2]) == CONST_INT)
5869 {
5870 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5871 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5872 int k;
5873 int *t = len;
5874
5875 if (!len)
5876 len = &k;
5877
5878 switch (INTVAL (operands[2]))
5879 {
5880 default:
5881 if (INTVAL (operands[2]) < 16)
5882 break;
5883
5884 *len = 2;
5885 return ("clr %B0" CR_TAB
5886 "clr %A0");
5887
5888 case 4:
5889 if (optimize_size && scratch)
5890 break; /* 5 */
5891 if (ldi_ok)
5892 {
5893 *len = 6;
5894 return ("swap %B0" CR_TAB
5895 "swap %A0" CR_TAB
5896 "andi %A0,0x0f" CR_TAB
5897 "eor %A0,%B0" CR_TAB
5898 "andi %B0,0x0f" CR_TAB
5899 "eor %A0,%B0");
5900 }
5901 if (scratch)
5902 {
5903 *len = 7;
5904 return ("swap %B0" CR_TAB
5905 "swap %A0" CR_TAB
5906 "ldi %3,0x0f" CR_TAB
5907 "and %A0,%3" CR_TAB
5908 "eor %A0,%B0" CR_TAB
5909 "and %B0,%3" CR_TAB
5910 "eor %A0,%B0");
5911 }
5912 break; /* optimize_size ? 6 : 8 */
5913
5914 case 5:
5915 if (optimize_size)
5916 break; /* scratch ? 5 : 6 */
5917 if (ldi_ok)
5918 {
5919 *len = 8;
5920 return ("lsr %B0" CR_TAB
5921 "ror %A0" CR_TAB
5922 "swap %B0" CR_TAB
5923 "swap %A0" CR_TAB
5924 "andi %A0,0x0f" CR_TAB
5925 "eor %A0,%B0" CR_TAB
5926 "andi %B0,0x0f" CR_TAB
5927 "eor %A0,%B0");
5928 }
5929 if (scratch)
5930 {
5931 *len = 9;
5932 return ("lsr %B0" CR_TAB
5933 "ror %A0" CR_TAB
5934 "swap %B0" CR_TAB
5935 "swap %A0" CR_TAB
5936 "ldi %3,0x0f" CR_TAB
5937 "and %A0,%3" CR_TAB
5938 "eor %A0,%B0" CR_TAB
5939 "and %B0,%3" CR_TAB
5940 "eor %A0,%B0");
5941 }
5942 break; /* 10 */
5943
5944 case 6:
5945 if (optimize_size)
5946 break; /* scratch ? 5 : 6 */
5947 *len = 9;
5948 return ("clr __tmp_reg__" CR_TAB
5949 "lsl %A0" CR_TAB
5950 "rol %B0" CR_TAB
5951 "rol __tmp_reg__" CR_TAB
5952 "lsl %A0" CR_TAB
5953 "rol %B0" CR_TAB
5954 "rol __tmp_reg__" CR_TAB
5955 "mov %A0,%B0" CR_TAB
5956 "mov %B0,__tmp_reg__");
5957
5958 case 7:
5959 *len = 5;
5960 return ("lsl %A0" CR_TAB
5961 "mov %A0,%B0" CR_TAB
5962 "rol %A0" CR_TAB
5963 "sbc %B0,%B0" CR_TAB
5964 "neg %B0");
5965
5966 case 8:
5967 return *len = 2, ("mov %A0,%B1" CR_TAB
5968 "clr %B0");
5969
5970 case 9:
5971 *len = 3;
5972 return ("mov %A0,%B0" CR_TAB
5973 "clr %B0" CR_TAB
5974 "lsr %A0");
5975
5976 case 10:
5977 *len = 4;
5978 return ("mov %A0,%B0" CR_TAB
5979 "clr %B0" CR_TAB
5980 "lsr %A0" CR_TAB
5981 "lsr %A0");
5982
5983 case 11:
5984 *len = 5;
5985 return ("mov %A0,%B0" CR_TAB
5986 "clr %B0" CR_TAB
5987 "lsr %A0" CR_TAB
5988 "lsr %A0" CR_TAB
5989 "lsr %A0");
5990
5991 case 12:
5992 if (ldi_ok)
5993 {
5994 *len = 4;
5995 return ("mov %A0,%B0" CR_TAB
5996 "clr %B0" CR_TAB
5997 "swap %A0" CR_TAB
5998 "andi %A0,0x0f");
5999 }
6000 if (scratch)
6001 {
6002 *len = 5;
6003 return ("mov %A0,%B0" CR_TAB
6004 "clr %B0" CR_TAB
6005 "swap %A0" CR_TAB
6006 "ldi %3,0x0f" CR_TAB
6007 "and %A0,%3");
6008 }
6009 *len = 6;
6010 return ("mov %A0,%B0" CR_TAB
6011 "clr %B0" CR_TAB
6012 "lsr %A0" CR_TAB
6013 "lsr %A0" CR_TAB
6014 "lsr %A0" CR_TAB
6015 "lsr %A0");
6016
6017 case 13:
6018 if (ldi_ok)
6019 {
6020 *len = 5;
6021 return ("mov %A0,%B0" CR_TAB
6022 "clr %B0" CR_TAB
6023 "swap %A0" CR_TAB
6024 "lsr %A0" CR_TAB
6025 "andi %A0,0x07");
6026 }
6027 if (AVR_HAVE_MUL && scratch)
6028 {
6029 *len = 5;
6030 return ("ldi %3,0x08" CR_TAB
6031 "mul %B0,%3" CR_TAB
6032 "mov %A0,r1" CR_TAB
6033 "clr %B0" CR_TAB
6034 "clr __zero_reg__");
6035 }
6036 if (optimize_size && scratch)
6037 break; /* 5 */
6038 if (scratch)
6039 {
6040 *len = 6;
6041 return ("mov %A0,%B0" CR_TAB
6042 "clr %B0" CR_TAB
6043 "swap %A0" CR_TAB
6044 "lsr %A0" CR_TAB
6045 "ldi %3,0x07" CR_TAB
6046 "and %A0,%3");
6047 }
6048 if (AVR_HAVE_MUL)
6049 {
6050 *len = 6;
6051 return ("set" CR_TAB
6052 "bld r1,3" CR_TAB
6053 "mul %B0,r1" CR_TAB
6054 "mov %A0,r1" CR_TAB
6055 "clr %B0" CR_TAB
6056 "clr __zero_reg__");
6057 }
6058 *len = 7;
6059 return ("mov %A0,%B0" CR_TAB
6060 "clr %B0" CR_TAB
6061 "lsr %A0" CR_TAB
6062 "lsr %A0" CR_TAB
6063 "lsr %A0" CR_TAB
6064 "lsr %A0" CR_TAB
6065 "lsr %A0");
6066
6067 case 14:
6068 if (AVR_HAVE_MUL && ldi_ok)
6069 {
6070 *len = 5;
6071 return ("ldi %A0,0x04" CR_TAB
6072 "mul %B0,%A0" CR_TAB
6073 "mov %A0,r1" CR_TAB
6074 "clr %B0" CR_TAB
6075 "clr __zero_reg__");
6076 }
6077 if (AVR_HAVE_MUL && scratch)
6078 {
6079 *len = 5;
6080 return ("ldi %3,0x04" CR_TAB
6081 "mul %B0,%3" CR_TAB
6082 "mov %A0,r1" CR_TAB
6083 "clr %B0" CR_TAB
6084 "clr __zero_reg__");
6085 }
6086 if (optimize_size && ldi_ok)
6087 {
6088 *len = 5;
6089 return ("mov %A0,%B0" CR_TAB
6090 "ldi %B0,6" "\n1:\t"
6091 "lsr %A0" CR_TAB
6092 "dec %B0" CR_TAB
6093 "brne 1b");
6094 }
6095 if (optimize_size && scratch)
6096 break; /* 5 */
6097 *len = 6;
6098 return ("clr %A0" CR_TAB
6099 "lsl %B0" CR_TAB
6100 "rol %A0" CR_TAB
6101 "lsl %B0" CR_TAB
6102 "rol %A0" CR_TAB
6103 "clr %B0");
6104
6105 case 15:
6106 *len = 4;
6107 return ("clr %A0" CR_TAB
6108 "lsl %B0" CR_TAB
6109 "rol %A0" CR_TAB
6110 "clr %B0");
6111 }
6112 len = t;
6113 }
6114 out_shift_with_cnt ("lsr %B0" CR_TAB
6115 "ror %A0", insn, operands, len, 2);
6116 return "";
6117 }
6118
6119
6120 /* 24-bit logic shift right */
6121
6122 const char*
6123 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6124 {
6125 int dest = REGNO (op[0]);
6126 int src = REGNO (op[1]);
6127
6128 if (CONST_INT_P (op[2]))
6129 {
6130 if (plen)
6131 *plen = 0;
6132
6133 switch (INTVAL (op[2]))
6134 {
6135 case 8:
6136 if (dest <= src)
6137 return avr_asm_len ("mov %A0,%B1" CR_TAB
6138 "mov %B0,%C1" CR_TAB
6139 "clr %C0", op, plen, 3);
6140 else
6141 return avr_asm_len ("clr %C0" CR_TAB
6142 "mov %B0,%C1" CR_TAB
6143 "mov %A0,%B1", op, plen, 3);
6144
6145 case 16:
6146 if (dest != src + 2)
6147 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6148
6149 return avr_asm_len ("clr %B0" CR_TAB
6150 "clr %C0", op, plen, 2);
6151
6152 default:
6153 if (INTVAL (op[2]) < 24)
6154 break;
6155
6156 /* fall through */
6157
6158 case 23:
6159 return avr_asm_len ("clr %A0" CR_TAB
6160 "sbrc %C0,7" CR_TAB
6161 "inc %A0" CR_TAB
6162 "clr %B0" CR_TAB
6163 "clr %C0", op, plen, 5);
6164 } /* switch */
6165 }
6166
6167 out_shift_with_cnt ("lsr %C0" CR_TAB
6168 "ror %B0" CR_TAB
6169 "ror %A0", insn, op, plen, 3);
6170 return "";
6171 }
6172
6173
6174 /* 32-bit logic shift right ((unsigned int)x >> i) */
6175
6176 const char *
6177 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6178 {
6179 if (GET_CODE (operands[2]) == CONST_INT)
6180 {
6181 int k;
6182 int *t = len;
6183
6184 if (!len)
6185 len = &k;
6186
6187 switch (INTVAL (operands[2]))
6188 {
6189 default:
6190 if (INTVAL (operands[2]) < 32)
6191 break;
6192
6193 if (AVR_HAVE_MOVW)
6194 return *len = 3, ("clr %D0" CR_TAB
6195 "clr %C0" CR_TAB
6196 "movw %A0,%C0");
6197 *len = 4;
6198 return ("clr %D0" CR_TAB
6199 "clr %C0" CR_TAB
6200 "clr %B0" CR_TAB
6201 "clr %A0");
6202
6203 case 8:
6204 {
6205 int reg0 = true_regnum (operands[0]);
6206 int reg1 = true_regnum (operands[1]);
6207 *len = 4;
6208 if (reg0 <= reg1)
6209 return ("mov %A0,%B1" CR_TAB
6210 "mov %B0,%C1" CR_TAB
6211 "mov %C0,%D1" CR_TAB
6212 "clr %D0");
6213 else
6214 return ("clr %D0" CR_TAB
6215 "mov %C0,%D1" CR_TAB
6216 "mov %B0,%C1" CR_TAB
6217 "mov %A0,%B1");
6218 }
6219
6220 case 16:
6221 {
6222 int reg0 = true_regnum (operands[0]);
6223 int reg1 = true_regnum (operands[1]);
6224
6225 if (reg0 == reg1 + 2)
6226 return *len = 2, ("clr %C0" CR_TAB
6227 "clr %D0");
6228 if (AVR_HAVE_MOVW)
6229 return *len = 3, ("movw %A0,%C1" CR_TAB
6230 "clr %C0" CR_TAB
6231 "clr %D0");
6232 else
6233 return *len = 4, ("mov %B0,%D1" CR_TAB
6234 "mov %A0,%C1" CR_TAB
6235 "clr %C0" CR_TAB
6236 "clr %D0");
6237 }
6238
6239 case 24:
6240 return *len = 4, ("mov %A0,%D1" CR_TAB
6241 "clr %B0" CR_TAB
6242 "clr %C0" CR_TAB
6243 "clr %D0");
6244
6245 case 31:
6246 *len = 6;
6247 return ("clr %A0" CR_TAB
6248 "sbrc %D0,7" CR_TAB
6249 "inc %A0" CR_TAB
6250 "clr %B0" CR_TAB
6251 "clr %C0" CR_TAB
6252 "clr %D0");
6253 }
6254 len = t;
6255 }
6256 out_shift_with_cnt ("lsr %D0" CR_TAB
6257 "ror %C0" CR_TAB
6258 "ror %B0" CR_TAB
6259 "ror %A0", insn, operands, len, 4);
6260 return "";
6261 }
6262
6263
6264 /* Output addition of register XOP[0] and compile time constant XOP[2].
6265 CODE == PLUS: perform addition by using ADD instructions or
6266 CODE == MINUS: perform addition by using SUB instructions:
6267
6268 XOP[0] = XOP[0] + XOP[2]
6269
6270 Or perform addition/subtraction with register XOP[2] depending on CODE:
6271
6272 XOP[0] = XOP[0] +/- XOP[2]
6273
6274 If PLEN == NULL, print assembler instructions to perform the operation;
6275 otherwise, set *PLEN to the length of the instruction sequence (in words)
6276 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6277 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6278
6279 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6280 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6281 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6282 the subtrahend in the original insn, provided it is a compile time constant.
6283 In all other cases, SIGN is 0.
6284
6285 If OUT_LABEL is true, print the final 0: label which is needed for
6286 saturated addition / subtraction. The only case where OUT_LABEL = false
6287 is useful is for saturated addition / subtraction performed during
6288 fixed-point rounding, cf. `avr_out_round'. */
6289
6290 static void
6291 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6292 enum rtx_code code_sat, int sign, bool out_label)
6293 {
6294 /* MODE of the operation. */
6295 enum machine_mode mode = GET_MODE (xop[0]);
6296
6297 /* INT_MODE of the same size. */
6298 enum machine_mode imode = int_mode_for_mode (mode);
6299
6300 /* Number of bytes to operate on. */
6301 int i, n_bytes = GET_MODE_SIZE (mode);
6302
6303 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6304 int clobber_val = -1;
6305
6306 /* op[0]: 8-bit destination register
6307 op[1]: 8-bit const int
6308 op[2]: 8-bit scratch register */
6309 rtx op[3];
6310
6311 /* Started the operation? Before starting the operation we may skip
6312 adding 0. This is no more true after the operation started because
6313 carry must be taken into account. */
6314 bool started = false;
6315
6316 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6317 rtx xval = xop[2];
6318
6319 /* Output a BRVC instruction. Only needed with saturation. */
6320 bool out_brvc = true;
6321
6322 if (plen)
6323 *plen = 0;
6324
6325 if (REG_P (xop[2]))
6326 {
6327 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
6328
6329 for (i = 0; i < n_bytes; i++)
6330 {
6331 /* We operate byte-wise on the destination. */
6332 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6333 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6334
6335 if (i == 0)
6336 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6337 op, plen, 1);
6338 else
6339 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6340 op, plen, 1);
6341 }
6342
6343 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6344 {
6345 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6346
6347 if (MINUS == code)
6348 return;
6349 }
6350
6351 goto saturate;
6352 }
6353
6354 /* Except in the case of ADIW with 16-bit register (see below)
6355 addition does not set cc0 in a usable way. */
6356
6357 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6358
6359 if (CONST_FIXED_P (xval))
6360 xval = avr_to_int_mode (xval);
6361
6362 /* Adding/Subtracting zero is a no-op. */
6363
6364 if (xval == const0_rtx)
6365 {
6366 *pcc = CC_NONE;
6367 return;
6368 }
6369
6370 if (MINUS == code)
6371 xval = simplify_unary_operation (NEG, imode, xval, imode);
6372
6373 op[2] = xop[3];
6374
6375 if (SS_PLUS == code_sat && MINUS == code
6376 && sign < 0
6377 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6378 & GET_MODE_MASK (QImode)))
6379 {
6380 /* We compute x + 0x80 by means of SUB instructions. We negated the
6381 constant subtrahend above and are left with x - (-128) so that we
6382 need something like SUBI r,128 which does not exist because SUBI sets
6383 V according to the sign of the subtrahend. Notice the only case
6384 where this must be done is when NEG overflowed in case [2s] because
6385 the V computation needs the right sign of the subtrahend. */
6386
6387 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6388
6389 avr_asm_len ("subi %0,128" CR_TAB
6390 "brmi 0f", &msb, plen, 2);
6391 out_brvc = false;
6392
6393 goto saturate;
6394 }
6395
6396 for (i = 0; i < n_bytes; i++)
6397 {
6398 /* We operate byte-wise on the destination. */
6399 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6400 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6401
6402 /* 8-bit value to operate with this byte. */
6403 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6404
6405 /* Registers R16..R31 can operate with immediate. */
6406 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6407
6408 op[0] = reg8;
6409 op[1] = gen_int_mode (val8, QImode);
6410
6411 /* To get usable cc0 no low-bytes must have been skipped. */
6412
6413 if (i && !started)
6414 *pcc = CC_CLOBBER;
6415
6416 if (!started
6417 && i % 2 == 0
6418 && i + 2 <= n_bytes
6419 && test_hard_reg_class (ADDW_REGS, reg8))
6420 {
6421 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6422 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6423
6424 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6425 i.e. operate word-wise. */
6426
6427 if (val16 < 64)
6428 {
6429 if (val16 != 0)
6430 {
6431 started = true;
6432 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6433 op, plen, 1);
6434
6435 if (n_bytes == 2 && PLUS == code)
6436 *pcc = CC_SET_CZN;
6437 }
6438
6439 i++;
6440 continue;
6441 }
6442 }
6443
6444 if (val8 == 0)
6445 {
6446 if (started)
6447 avr_asm_len (code == PLUS
6448 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6449 op, plen, 1);
6450 continue;
6451 }
6452 else if ((val8 == 1 || val8 == 0xff)
6453 && UNKNOWN == code_sat
6454 && !started
6455 && i == n_bytes - 1)
6456 {
6457 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6458 op, plen, 1);
6459 *pcc = CC_CLOBBER;
6460 break;
6461 }
6462
6463 switch (code)
6464 {
6465 case PLUS:
6466
6467 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6468
6469 if (plen != NULL && UNKNOWN != code_sat)
6470 {
6471 /* This belongs to the x + 0x80 corner case. The code with
6472 ADD instruction is not smaller, thus make this case
6473 expensive so that the caller won't pick it. */
6474
6475 *plen += 10;
6476 break;
6477 }
6478
6479 if (clobber_val != (int) val8)
6480 avr_asm_len ("ldi %2,%1", op, plen, 1);
6481 clobber_val = (int) val8;
6482
6483 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6484
6485 break; /* PLUS */
6486
6487 case MINUS:
6488
6489 if (ld_reg_p)
6490 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6491 else
6492 {
6493 gcc_assert (plen != NULL || REG_P (op[2]));
6494
6495 if (clobber_val != (int) val8)
6496 avr_asm_len ("ldi %2,%1", op, plen, 1);
6497 clobber_val = (int) val8;
6498
6499 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6500 }
6501
6502 break; /* MINUS */
6503
6504 default:
6505 /* Unknown code */
6506 gcc_unreachable();
6507 }
6508
6509 started = true;
6510
6511 } /* for all sub-bytes */
6512
6513 saturate:
6514
6515 if (UNKNOWN == code_sat)
6516 return;
6517
6518 *pcc = (int) CC_CLOBBER;
6519
6520 /* Vanilla addition/subtraction is done. We are left with saturation.
6521
6522 We have to compute A = A <op> B where A is a register and
6523 B is a register or a non-zero compile time constant CONST.
6524 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6525 B stands for the original operand $2 in INSN. In the case of B = CONST,
6526 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6527
6528 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6529
6530
6531 unsigned
6532 operation | code | sat if | b is | sat value | case
6533 -----------------+-------+----------+--------------+-----------+-------
6534 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6535 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6536 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6537 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6538
6539
6540 signed
6541 operation | code | sat if | b is | sat value | case
6542 -----------------+-------+----------+--------------+-----------+-------
6543 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6544 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6545 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6546 - as a + (-b) | add | V == 1 | const | s- | [4s]
6547
6548 s+ = b < 0 ? -0x80 : 0x7f
6549 s- = b < 0 ? 0x7f : -0x80
6550
6551 The cases a - b actually perform a - (-(-b)) if B is CONST.
6552 */
6553
6554 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6555 op[1] = n_bytes > 1
6556 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6557 : NULL_RTX;
6558
6559 bool need_copy = true;
6560 int len_call = 1 + AVR_HAVE_JMP_CALL;
6561
6562 switch (code_sat)
6563 {
6564 default:
6565 gcc_unreachable();
6566
6567 case SS_PLUS:
6568 case SS_MINUS:
6569
6570 if (out_brvc)
6571 avr_asm_len ("brvc 0f", op, plen, 1);
6572
6573 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6574 {
6575 /* [1s,reg] */
6576
6577 if (n_bytes == 1)
6578 avr_asm_len ("ldi %0,0x7f" CR_TAB
6579 "adc %0,__zero_reg__", op, plen, 2);
6580 else
6581 avr_asm_len ("ldi %0,0x7f" CR_TAB
6582 "ldi %1,0xff" CR_TAB
6583 "adc %1,__zero_reg__" CR_TAB
6584 "adc %0,__zero_reg__", op, plen, 4);
6585 }
6586 else if (sign == 0 && PLUS == code)
6587 {
6588 /* [1s,reg] */
6589
6590 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6591
6592 if (n_bytes == 1)
6593 avr_asm_len ("ldi %0,0x80" CR_TAB
6594 "sbrs %2,7" CR_TAB
6595 "dec %0", op, plen, 3);
6596 else
6597 avr_asm_len ("ldi %0,0x80" CR_TAB
6598 "cp %2,%0" CR_TAB
6599 "sbc %1,%1" CR_TAB
6600 "sbci %0,0", op, plen, 4);
6601 }
6602 else if (sign == 0 && MINUS == code)
6603 {
6604 /* [3s,reg] */
6605
6606 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6607
6608 if (n_bytes == 1)
6609 avr_asm_len ("ldi %0,0x7f" CR_TAB
6610 "sbrs %2,7" CR_TAB
6611 "inc %0", op, plen, 3);
6612 else
6613 avr_asm_len ("ldi %0,0x7f" CR_TAB
6614 "cp %0,%2" CR_TAB
6615 "sbc %1,%1" CR_TAB
6616 "sbci %0,-1", op, plen, 4);
6617 }
6618 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6619 {
6620 /* [1s,const,B < 0] [2s,B < 0] */
6621 /* [3s,const,B > 0] [4s,B > 0] */
6622
6623 if (n_bytes == 8)
6624 {
6625 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6626 need_copy = false;
6627 }
6628
6629 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6630 if (n_bytes > 1 && need_copy)
6631 avr_asm_len ("clr %1", op, plen, 1);
6632 }
6633 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6634 {
6635 /* [1s,const,B > 0] [2s,B > 0] */
6636 /* [3s,const,B < 0] [4s,B < 0] */
6637
6638 if (n_bytes == 8)
6639 {
6640 avr_asm_len ("sec" CR_TAB
6641 "%~call __sbc_8", op, plen, 1 + len_call);
6642 need_copy = false;
6643 }
6644
6645 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6646 if (n_bytes > 1 && need_copy)
6647 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6648 }
6649 else
6650 gcc_unreachable();
6651
6652 break;
6653
6654 case US_PLUS:
6655 /* [1u] : [2u] */
6656
6657 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6658
6659 if (n_bytes == 8)
6660 {
6661 if (MINUS == code)
6662 avr_asm_len ("sec", op, plen, 1);
6663 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6664
6665 need_copy = false;
6666 }
6667 else
6668 {
6669 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6670 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6671 else
6672 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6673 op, plen, 1);
6674 }
6675 break; /* US_PLUS */
6676
6677 case US_MINUS:
6678 /* [4u] : [3u] */
6679
6680 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6681
6682 if (n_bytes == 8)
6683 {
6684 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6685 need_copy = false;
6686 }
6687 else
6688 avr_asm_len ("clr %0", op, plen, 1);
6689
6690 break;
6691 }
6692
6693 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6694 Now copy the right value to the LSBs. */
6695
6696 if (need_copy && n_bytes > 1)
6697 {
6698 if (US_MINUS == code_sat || US_PLUS == code_sat)
6699 {
6700 avr_asm_len ("mov %1,%0", op, plen, 1);
6701
6702 if (n_bytes > 2)
6703 {
6704 op[0] = xop[0];
6705 if (AVR_HAVE_MOVW)
6706 avr_asm_len ("movw %0,%1", op, plen, 1);
6707 else
6708 avr_asm_len ("mov %A0,%1" CR_TAB
6709 "mov %B0,%1", op, plen, 2);
6710 }
6711 }
6712 else if (n_bytes > 2)
6713 {
6714 op[0] = xop[0];
6715 avr_asm_len ("mov %A0,%1" CR_TAB
6716 "mov %B0,%1", op, plen, 2);
6717 }
6718 }
6719
6720 if (need_copy && n_bytes == 8)
6721 {
6722 if (AVR_HAVE_MOVW)
6723 avr_asm_len ("movw %r0+2,%0" CR_TAB
6724 "movw %r0+4,%0", xop, plen, 2);
6725 else
6726 avr_asm_len ("mov %r0+2,%0" CR_TAB
6727 "mov %r0+3,%0" CR_TAB
6728 "mov %r0+4,%0" CR_TAB
6729 "mov %r0+5,%0", xop, plen, 4);
6730 }
6731
6732 if (out_label)
6733 avr_asm_len ("0:", op, plen, 0);
6734 }
6735
6736
6737 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6738 is ont a compile-time constant:
6739
6740 XOP[0] = XOP[0] +/- XOP[2]
6741
6742 This is a helper for the function below. The only insns that need this
6743 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6744
6745 static const char*
6746 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6747 {
6748 enum machine_mode mode = GET_MODE (xop[0]);
6749
6750 /* Only pointer modes want to add symbols. */
6751
6752 gcc_assert (mode == HImode || mode == PSImode);
6753
6754 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6755
6756 avr_asm_len (PLUS == code
6757 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6758 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6759 xop, plen, -2);
6760
6761 if (PSImode == mode)
6762 avr_asm_len (PLUS == code
6763 ? "sbci %C0,hlo8(-(%2))"
6764 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6765 return "";
6766 }
6767
6768
6769 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6770
6771 INSN is a single_set insn or an insn pattern with a binary operation as
6772 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6773
6774 XOP are the operands of INSN. In the case of 64-bit operations with
6775 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6776 The non-saturating insns up to 32 bits may or may not supply a "d" class
6777 scratch as XOP[3].
6778
6779 If PLEN == NULL output the instructions.
6780 If PLEN != NULL set *PLEN to the length of the sequence in words.
6781
6782 PCC is a pointer to store the instructions' effect on cc0.
6783 PCC may be NULL.
6784
6785 PLEN and PCC default to NULL.
6786
6787 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
6788
6789 Return "" */
6790
6791 const char*
6792 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6793 {
6794 int cc_plus, cc_minus, cc_dummy;
6795 int len_plus, len_minus;
6796 rtx op[4];
6797 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
6798 rtx xdest = SET_DEST (xpattern);
6799 enum machine_mode mode = GET_MODE (xdest);
6800 enum machine_mode imode = int_mode_for_mode (mode);
6801 int n_bytes = GET_MODE_SIZE (mode);
6802 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6803 enum rtx_code code
6804 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6805 ? PLUS : MINUS);
6806
6807 if (!pcc)
6808 pcc = &cc_dummy;
6809
6810 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
6811
6812 if (PLUS == code_sat || MINUS == code_sat)
6813 code_sat = UNKNOWN;
6814
6815 if (n_bytes <= 4 && REG_P (xop[2]))
6816 {
6817 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6818 return "";
6819 }
6820
6821 if (8 == n_bytes)
6822 {
6823 op[0] = gen_rtx_REG (DImode, ACC_A);
6824 op[1] = gen_rtx_REG (DImode, ACC_A);
6825 op[2] = avr_to_int_mode (xop[0]);
6826 }
6827 else
6828 {
6829 if (!REG_P (xop[2])
6830 && !CONST_INT_P (xop[2])
6831 && !CONST_FIXED_P (xop[2]))
6832 {
6833 return avr_out_plus_symbol (xop, code, plen, pcc);
6834 }
6835
6836 op[0] = avr_to_int_mode (xop[0]);
6837 op[1] = avr_to_int_mode (xop[1]);
6838 op[2] = avr_to_int_mode (xop[2]);
6839 }
6840
6841 /* Saturations and 64-bit operations don't have a clobber operand.
6842 For the other cases, the caller will provide a proper XOP[3]. */
6843
6844 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6845 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6846
6847 /* Saturation will need the sign of the original operand. */
6848
6849 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6850 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6851
6852 /* If we subtract and the subtrahend is a constant, then negate it
6853 so that avr_out_plus_1 can be used. */
6854
6855 if (MINUS == code)
6856 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6857
6858 /* Work out the shortest sequence. */
6859
6860 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
6861 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
6862
6863 if (plen)
6864 {
6865 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6866 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6867 }
6868 else if (len_minus <= len_plus)
6869 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6870 else
6871 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6872
6873 return "";
6874 }
6875
6876
6877 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6878 time constant XOP[2]:
6879
6880 XOP[0] = XOP[0] <op> XOP[2]
6881
6882 and return "". If PLEN == NULL, print assembler instructions to perform the
6883 operation; otherwise, set *PLEN to the length of the instruction sequence
6884 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6885 register or SCRATCH if no clobber register is needed for the operation.
6886 INSN is an INSN_P or a pattern of an insn. */
6887
6888 const char*
6889 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6890 {
6891 /* CODE and MODE of the operation. */
6892 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
6893 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6894 enum machine_mode mode = GET_MODE (xop[0]);
6895
6896 /* Number of bytes to operate on. */
6897 int i, n_bytes = GET_MODE_SIZE (mode);
6898
6899 /* Value of T-flag (0 or 1) or -1 if unknow. */
6900 int set_t = -1;
6901
6902 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6903 int clobber_val = -1;
6904
6905 /* op[0]: 8-bit destination register
6906 op[1]: 8-bit const int
6907 op[2]: 8-bit clobber register or SCRATCH
6908 op[3]: 8-bit register containing 0xff or NULL_RTX */
6909 rtx op[4];
6910
6911 op[2] = xop[3];
6912 op[3] = NULL_RTX;
6913
6914 if (plen)
6915 *plen = 0;
6916
6917 for (i = 0; i < n_bytes; i++)
6918 {
6919 /* We operate byte-wise on the destination. */
6920 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6921 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6922
6923 /* 8-bit value to operate with this byte. */
6924 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6925
6926 /* Number of bits set in the current byte of the constant. */
6927 int pop8 = avr_popcount (val8);
6928
6929 /* Registers R16..R31 can operate with immediate. */
6930 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6931
6932 op[0] = reg8;
6933 op[1] = GEN_INT (val8);
6934
6935 switch (code)
6936 {
6937 case IOR:
6938
6939 if (0 == pop8)
6940 continue;
6941 else if (ld_reg_p)
6942 avr_asm_len ("ori %0,%1", op, plen, 1);
6943 else if (1 == pop8)
6944 {
6945 if (set_t != 1)
6946 avr_asm_len ("set", op, plen, 1);
6947 set_t = 1;
6948
6949 op[1] = GEN_INT (exact_log2 (val8));
6950 avr_asm_len ("bld %0,%1", op, plen, 1);
6951 }
6952 else if (8 == pop8)
6953 {
6954 if (op[3] != NULL_RTX)
6955 avr_asm_len ("mov %0,%3", op, plen, 1);
6956 else
6957 avr_asm_len ("clr %0" CR_TAB
6958 "dec %0", op, plen, 2);
6959
6960 op[3] = op[0];
6961 }
6962 else
6963 {
6964 if (clobber_val != (int) val8)
6965 avr_asm_len ("ldi %2,%1", op, plen, 1);
6966 clobber_val = (int) val8;
6967
6968 avr_asm_len ("or %0,%2", op, plen, 1);
6969 }
6970
6971 continue; /* IOR */
6972
6973 case AND:
6974
6975 if (8 == pop8)
6976 continue;
6977 else if (0 == pop8)
6978 avr_asm_len ("clr %0", op, plen, 1);
6979 else if (ld_reg_p)
6980 avr_asm_len ("andi %0,%1", op, plen, 1);
6981 else if (7 == pop8)
6982 {
6983 if (set_t != 0)
6984 avr_asm_len ("clt", op, plen, 1);
6985 set_t = 0;
6986
6987 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6988 avr_asm_len ("bld %0,%1", op, plen, 1);
6989 }
6990 else
6991 {
6992 if (clobber_val != (int) val8)
6993 avr_asm_len ("ldi %2,%1", op, plen, 1);
6994 clobber_val = (int) val8;
6995
6996 avr_asm_len ("and %0,%2", op, plen, 1);
6997 }
6998
6999 continue; /* AND */
7000
7001 case XOR:
7002
7003 if (0 == pop8)
7004 continue;
7005 else if (8 == pop8)
7006 avr_asm_len ("com %0", op, plen, 1);
7007 else if (ld_reg_p && val8 == (1 << 7))
7008 avr_asm_len ("subi %0,%1", op, plen, 1);
7009 else
7010 {
7011 if (clobber_val != (int) val8)
7012 avr_asm_len ("ldi %2,%1", op, plen, 1);
7013 clobber_val = (int) val8;
7014
7015 avr_asm_len ("eor %0,%2", op, plen, 1);
7016 }
7017
7018 continue; /* XOR */
7019
7020 default:
7021 /* Unknown rtx_code */
7022 gcc_unreachable();
7023 }
7024 } /* for all sub-bytes */
7025
7026 return "";
7027 }
7028
7029
7030 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7031 PLEN != NULL: Set *PLEN to the length of that sequence.
7032 Return "". */
7033
7034 const char*
7035 avr_out_addto_sp (rtx *op, int *plen)
7036 {
7037 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7038 int addend = INTVAL (op[0]);
7039
7040 if (plen)
7041 *plen = 0;
7042
7043 if (addend < 0)
7044 {
7045 if (flag_verbose_asm || flag_print_asm_name)
7046 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7047
7048 while (addend <= -pc_len)
7049 {
7050 addend += pc_len;
7051 avr_asm_len ("rcall .", op, plen, 1);
7052 }
7053
7054 while (addend++ < 0)
7055 avr_asm_len ("push __zero_reg__", op, plen, 1);
7056 }
7057 else if (addend > 0)
7058 {
7059 if (flag_verbose_asm || flag_print_asm_name)
7060 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7061
7062 while (addend-- > 0)
7063 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7064 }
7065
7066 return "";
7067 }
7068
7069
7070 /* Outputs instructions needed for fixed point type conversion.
7071 This includes converting between any fixed point type, as well
7072 as converting to any integer type. Conversion between integer
7073 types is not supported.
7074
7075 Converting signed fractional types requires a bit shift if converting
7076 to or from any unsigned fractional type because the decimal place is
7077 shifted by 1 bit. When the destination is a signed fractional, the sign
7078 is stored in either the carry or T bit. */
7079
7080 const char*
7081 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
7082 {
7083 size_t i;
7084 rtx xop[6];
7085 RTX_CODE shift = UNKNOWN;
7086 bool sign_in_carry = false;
7087 bool msb_in_carry = false;
7088 bool lsb_in_tmp_reg = false;
7089 bool lsb_in_carry = false;
7090 bool frac_rounded = false;
7091 const char *code_ashift = "lsl %0";
7092
7093
7094 #define MAY_CLOBBER(RR) \
7095 /* Shorthand used below. */ \
7096 ((sign_bytes \
7097 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7098 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
7099 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7100 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7101
7102 struct
7103 {
7104 /* bytes : Length of operand in bytes.
7105 ibyte : Length of integral part in bytes.
7106 fbyte, fbit : Length of fractional part in bytes, bits. */
7107
7108 bool sbit;
7109 unsigned fbit, bytes, ibyte, fbyte;
7110 unsigned regno, regno_msb;
7111 } dest, src, *val[2] = { &dest, &src };
7112
7113 if (plen)
7114 *plen = 0;
7115
7116 /* Step 0: Determine information on source and destination operand we
7117 ====== will need in the remainder. */
7118
7119 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7120 {
7121 enum machine_mode mode;
7122
7123 xop[i] = operands[i];
7124
7125 mode = GET_MODE (xop[i]);
7126
7127 val[i]->bytes = GET_MODE_SIZE (mode);
7128 val[i]->regno = REGNO (xop[i]);
7129 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7130
7131 if (SCALAR_INT_MODE_P (mode))
7132 {
7133 val[i]->sbit = intsigned;
7134 val[i]->fbit = 0;
7135 }
7136 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7137 {
7138 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7139 val[i]->fbit = GET_MODE_FBIT (mode);
7140 }
7141 else
7142 fatal_insn ("unsupported fixed-point conversion", insn);
7143
7144 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7145 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7146 }
7147
7148 // Byte offset of the decimal point taking into account different place
7149 // of the decimal point in input and output and different register numbers
7150 // of input and output.
7151 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7152
7153 // Number of destination bytes that will come from sign / zero extension.
7154 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7155
7156 // Number of bytes at the low end to be filled with zeros.
7157 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7158
7159 // Do we have a 16-Bit register that is cleared?
7160 rtx clrw = NULL_RTX;
7161
7162 bool sign_extend = src.sbit && sign_bytes;
7163
7164 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7165 shift = ASHIFT;
7166 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7167 shift = ASHIFTRT;
7168 else if (dest.fbit % 8 == src.fbit % 8)
7169 shift = UNKNOWN;
7170 else
7171 gcc_unreachable();
7172
7173 /* If we need to round the fraction part, we might need to save/round it
7174 before clobbering any of it in Step 1. Also, we might to want to do
7175 the rounding now to make use of LD_REGS. */
7176 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7177 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7178 && !TARGET_FRACT_CONV_TRUNC)
7179 {
7180 bool overlap
7181 = (src.regno <=
7182 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
7183 && dest.regno - offset -1 >= dest.regno);
7184 unsigned s0 = dest.regno - offset -1;
7185 bool use_src = true;
7186 unsigned sn;
7187 unsigned copied_msb = src.regno_msb;
7188 bool have_carry = false;
7189
7190 if (src.ibyte > dest.ibyte)
7191 copied_msb -= src.ibyte - dest.ibyte;
7192
7193 for (sn = s0; sn <= copied_msb; sn++)
7194 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
7195 && !reg_unused_after (insn, all_regs_rtx[sn]))
7196 use_src = false;
7197 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
7198 {
7199 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7200 &all_regs_rtx[src.regno_msb], plen, 2);
7201 sn = src.regno;
7202 if (sn < s0)
7203 {
7204 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
7205 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
7206 else
7207 avr_asm_len ("sec" CR_TAB "cpc %0,__zero_reg__",
7208 &all_regs_rtx[sn], plen, 2);
7209 have_carry = true;
7210 }
7211 while (++sn < s0)
7212 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7213 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
7214 &all_regs_rtx[s0], plen, 1);
7215 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7216 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
7217 avr_asm_len ("\n0:", NULL, plen, 0);
7218 frac_rounded = true;
7219 }
7220 else if (use_src && overlap)
7221 {
7222 avr_asm_len ("clr __tmp_reg__" CR_TAB
7223 "sbrc %1,0" CR_TAB "dec __tmp_reg__", xop, plen, 1);
7224 sn = src.regno;
7225 if (sn < s0)
7226 {
7227 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7228 have_carry = true;
7229 }
7230 while (++sn < s0)
7231 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
7232 if (have_carry)
7233 avr_asm_len ("clt" CR_TAB "bld __tmp_reg__,7" CR_TAB
7234 "adc %0,__tmp_reg__",
7235 &all_regs_rtx[s0], plen, 1);
7236 else
7237 avr_asm_len ("lsr __tmp_reg" CR_TAB "add %0,__tmp_reg__",
7238 &all_regs_rtx[s0], plen, 2);
7239 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
7240 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7241 frac_rounded = true;
7242 }
7243 else if (overlap)
7244 {
7245 bool use_src
7246 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
7247 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
7248 || reg_unused_after (insn, all_regs_rtx[s0])));
7249 xop[2] = all_regs_rtx[s0];
7250 unsigned sn = src.regno;
7251 if (!use_src || sn == s0)
7252 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7253 /* We need to consider to-be-discarded bits
7254 if the value is negative. */
7255 if (sn < s0)
7256 {
7257 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7258 &all_regs_rtx[src.regno_msb], plen, 2);
7259 /* Test to-be-discarded bytes for any nozero bits.
7260 ??? Could use OR or SBIW to test two registers at once. */
7261 if (sn < s0)
7262 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7263 while (++sn < s0)
7264 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
7265 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
7266 if (use_src)
7267 avr_asm_len ("breq 0f" CR_TAB
7268 "ori %2,1" "\n0:\t" "mov __tmp_reg__,%2",
7269 xop, plen, 3);
7270 else
7271 avr_asm_len ("breq 0f" CR_TAB
7272 "set" CR_TAB "bld __tmp_reg__,0\n0:",
7273 xop, plen, 3);
7274 }
7275 lsb_in_tmp_reg = true;
7276 }
7277 }
7278
7279 /* Step 1: Clear bytes at the low end and copy payload bits from source
7280 ====== to destination. */
7281
7282 int step = offset < 0 ? 1 : -1;
7283 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7284
7285 // We cleared at least that number of registers.
7286 int clr_n = 0;
7287
7288 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7289 {
7290 // Next regno of destination is needed for MOVW
7291 unsigned d1 = d0 + step;
7292
7293 // Current and next regno of source
7294 signed s0 = d0 - offset;
7295 signed s1 = s0 + step;
7296
7297 // Must current resp. next regno be CLRed? This applies to the low
7298 // bytes of the destination that have no associated source bytes.
7299 bool clr0 = s0 < (signed) src.regno;
7300 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7301
7302 // First gather what code to emit (if any) and additional step to
7303 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7304 // is the source rtx for the current loop iteration.
7305 const char *code = NULL;
7306 int stepw = 0;
7307
7308 if (clr0)
7309 {
7310 if (AVR_HAVE_MOVW && clr1 && clrw)
7311 {
7312 xop[2] = all_regs_rtx[d0 & ~1];
7313 xop[3] = clrw;
7314 code = "movw %2,%3";
7315 stepw = step;
7316 }
7317 else
7318 {
7319 xop[2] = all_regs_rtx[d0];
7320 code = "clr %2";
7321
7322 if (++clr_n >= 2
7323 && !clrw
7324 && d0 % 2 == (step > 0))
7325 {
7326 clrw = all_regs_rtx[d0 & ~1];
7327 }
7328 }
7329 }
7330 else if (offset && s0 <= (signed) src.regno_msb)
7331 {
7332 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7333 && d0 % 2 == (offset > 0)
7334 && d1 <= dest.regno_msb && d1 >= dest.regno
7335 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
7336
7337 xop[2] = all_regs_rtx[d0 & ~movw];
7338 xop[3] = all_regs_rtx[s0 & ~movw];
7339 code = movw ? "movw %2,%3" : "mov %2,%3";
7340 stepw = step * movw;
7341 }
7342
7343 if (code)
7344 {
7345 if (sign_extend && shift != ASHIFT && !sign_in_carry
7346 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7347 {
7348 /* We are going to override the sign bit. If we sign-extend,
7349 store the sign in the Carry flag. This is not needed if
7350 the destination will be ASHIFT is the remainder because
7351 the ASHIFT will set Carry without extra instruction. */
7352
7353 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7354 sign_in_carry = true;
7355 }
7356
7357 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7358
7359 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7360 && src.ibyte > dest.ibyte
7361 && (d0 == src_msb || d0 + stepw == src_msb))
7362 {
7363 /* We are going to override the MSB. If we shift right,
7364 store the MSB in the Carry flag. This is only needed if
7365 we don't sign-extend becaue with sign-extension the MSB
7366 (the sign) will be produced by the sign extension. */
7367
7368 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7369 msb_in_carry = true;
7370 }
7371
7372 unsigned src_lsb = dest.regno - offset -1;
7373
7374 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7375 && !lsb_in_tmp_reg
7376 && (d0 == src_lsb || d0 + stepw == src_lsb))
7377 {
7378 /* We are going to override the new LSB; store it into carry. */
7379
7380 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7381 code_ashift = "rol %0";
7382 lsb_in_carry = true;
7383 }
7384
7385 avr_asm_len (code, xop, plen, 1);
7386 d0 += stepw;
7387 }
7388 }
7389
7390 /* Step 2: Shift destination left by 1 bit position. This might be needed
7391 ====== for signed input and unsigned output. */
7392
7393 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7394 {
7395 unsigned s0 = dest.regno - offset -1;
7396
7397 /* n1169 4.1.4 says:
7398 "Conversions from a fixed-point to an integer type round toward zero."
7399 Hence, converting a fract type to integer only gives a non-zero result
7400 for -1. */
7401 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7402 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
7403 && !TARGET_FRACT_CONV_TRUNC)
7404 {
7405 gcc_assert (s0 == src.regno_msb);
7406 /* Check if the input is -1. We do that by checking if negating
7407 the input causes an integer overflow. */
7408 unsigned sn = src.regno;
7409 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7410 while (sn <= s0)
7411 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
7412
7413 /* Overflow goes with set carry. Clear carry otherwise. */
7414 avr_asm_len ("brvs 0f" CR_TAB "clc\n0:", NULL, plen, 2);
7415 }
7416 /* Likewise, when converting from accumulator types to integer, we
7417 need to round up negative values. */
7418 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
7419 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
7420 && !TARGET_FRACT_CONV_TRUNC
7421 && !frac_rounded)
7422 {
7423 bool have_carry = false;
7424
7425 xop[2] = all_regs_rtx[s0];
7426 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
7427 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7428 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
7429 &all_regs_rtx[src.regno_msb], plen, 2);
7430 if (!lsb_in_tmp_reg)
7431 {
7432 unsigned sn = src.regno;
7433 if (sn < s0)
7434 {
7435 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
7436 plen, 1);
7437 have_carry = true;
7438 }
7439 while (++sn < s0)
7440 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
7441 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
7442 }
7443 /* Add in C and the rounding value 127. */
7444 /* If the destination msb is a sign byte, and in LD_REGS,
7445 grab it as a temporary. */
7446 if (sign_bytes
7447 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
7448 dest.regno_msb))
7449 {
7450 xop[3] = all_regs_rtx[dest.regno_msb];
7451 avr_asm_len ("ldi %3,127", xop, plen, 1);
7452 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
7453 : have_carry ? "adc %2,%3"
7454 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
7455 : "add %2,%3"),
7456 xop, plen, 1);
7457 }
7458 else
7459 {
7460 /* Fall back to use __zero_reg__ as a temporary. */
7461 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
7462 if (have_carry)
7463 avr_asm_len ("clt" CR_TAB "bld __zero_reg__,7", NULL, plen, 2);
7464 else
7465 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
7466 avr_asm_len ((have_carry && lsb_in_tmp_reg
7467 ? "adc __tmp_reg__,__zero_reg__"
7468 : have_carry ? "adc %2,__zero_reg__"
7469 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
7470 : "add %2,__zero_reg__"),
7471 xop, plen, 1);
7472 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
7473 }
7474 for (d0 = dest.regno + zero_bytes;
7475 d0 <= dest.regno_msb - sign_bytes; d0++)
7476 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
7477 avr_asm_len (lsb_in_tmp_reg
7478 ? "\n0:\t" "lsl __tmp_reg__" : "\n0:\t" "lsl %2",
7479 xop, plen, 1);
7480 }
7481 else if (MAY_CLOBBER (s0))
7482 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7483 else
7484 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7485 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7486
7487 code_ashift = "rol %0";
7488 lsb_in_carry = true;
7489 }
7490
7491 if (shift == ASHIFT)
7492 {
7493 for (d0 = dest.regno + zero_bytes;
7494 d0 <= dest.regno_msb - sign_bytes; d0++)
7495 {
7496 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7497 code_ashift = "rol %0";
7498 }
7499
7500 lsb_in_carry = false;
7501 sign_in_carry = true;
7502 }
7503
7504 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7505 ======= it in sign-extension below. */
7506
7507 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7508 && src.ibyte > dest.ibyte)
7509 {
7510 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7511
7512 if (MAY_CLOBBER (s0))
7513 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7514 else
7515 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7516 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7517
7518 msb_in_carry = true;
7519 }
7520
7521 /* Step 3: Sign-extend or zero-extend the destination as needed.
7522 ====== */
7523
7524 if (sign_extend && !sign_in_carry)
7525 {
7526 unsigned s0 = src.regno_msb;
7527
7528 if (MAY_CLOBBER (s0))
7529 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7530 else
7531 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7532 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7533
7534 sign_in_carry = true;
7535 }
7536
7537 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7538
7539 unsigned copies = 0;
7540 rtx movw = sign_extend ? NULL_RTX : clrw;
7541
7542 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7543 {
7544 if (AVR_HAVE_MOVW && movw
7545 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7546 {
7547 xop[2] = all_regs_rtx[d0];
7548 xop[3] = movw;
7549 avr_asm_len ("movw %2,%3", xop, plen, 1);
7550 d0++;
7551 }
7552 else
7553 {
7554 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7555 &all_regs_rtx[d0], plen, 1);
7556
7557 if (++copies >= 2 && !movw && d0 % 2 == 1)
7558 movw = all_regs_rtx[d0-1];
7559 }
7560 } /* for */
7561
7562
7563 /* Step 4: Right shift the destination. This might be needed for
7564 ====== conversions from unsigned to signed. */
7565
7566 if (shift == ASHIFTRT)
7567 {
7568 const char *code_ashiftrt = "lsr %0";
7569
7570 if (sign_extend || msb_in_carry)
7571 code_ashiftrt = "ror %0";
7572
7573 if (src.sbit && src.ibyte == dest.ibyte)
7574 code_ashiftrt = "asr %0";
7575
7576 for (d0 = dest.regno_msb - sign_bytes;
7577 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7578 {
7579 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7580 code_ashiftrt = "ror %0";
7581 }
7582 }
7583
7584 #undef MAY_CLOBBER
7585
7586 return "";
7587 }
7588
7589
7590 /* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
7591 XOP[2] is the rounding point, a CONST_INT. The function prints the
7592 instruction sequence if PLEN = NULL and computes the length in words
7593 of the sequence if PLEN != NULL. Most of this function deals with
7594 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
7595
7596 const char*
7597 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7598 {
7599 enum machine_mode mode = GET_MODE (xop[0]);
7600 enum machine_mode imode = int_mode_for_mode (mode);
7601 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7602 int fbit = (int) GET_MODE_FBIT (mode);
7603 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7604 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
7605 GET_MODE_PRECISION (imode));
7606 // Lengths of PLUS and AND parts.
7607 int len_add = 0, *plen_add = plen ? &len_add : NULL;
7608 int len_and = 0, *plen_and = plen ? &len_and : NULL;
7609
7610 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
7611 // the saturated addition so that we can emit the "rjmp 1f" before the
7612 // "0:" below.
7613
7614 rtx xadd = const_fixed_from_double_int (i_add, mode);
7615 rtx xpattern, xsrc, op[4];
7616
7617 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7618 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7619 : gen_rtx_US_PLUS (mode, xop[1], xadd);
7620 xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7621
7622 op[0] = xop[0];
7623 op[1] = xop[1];
7624 op[2] = xadd;
7625 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7626
7627 avr_asm_len ("rjmp 1f" CR_TAB
7628 "0:", NULL, plen_add, 1);
7629
7630 // Keep all bits from RP and higher: ... 2^(-RP)
7631 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
7632 // Rounding point ^^^^^^^
7633 // Added above ^^^^^^^^^
7634 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7635 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
7636
7637 xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7638
7639 op[0] = xreg;
7640 op[1] = xreg;
7641 op[2] = xmask;
7642 op[3] = gen_rtx_SCRATCH (QImode);
7643 avr_out_bitop (xpattern, op, plen_and);
7644 avr_asm_len ("1:", NULL, plen, 0);
7645
7646 if (plen)
7647 *plen = len_add + len_and;
7648
7649 return "";
7650 }
7651
7652
7653 /* Create RTL split patterns for byte sized rotate expressions. This
7654 produces a series of move instructions and considers overlap situations.
7655 Overlapping non-HImode operands need a scratch register. */
7656
7657 bool
7658 avr_rotate_bytes (rtx operands[])
7659 {
7660 int i, j;
7661 enum machine_mode mode = GET_MODE (operands[0]);
7662 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7663 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7664 int num = INTVAL (operands[2]);
7665 rtx scratch = operands[3];
7666 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7667 Word move if no scratch is needed, otherwise use size of scratch. */
7668 enum machine_mode move_mode = QImode;
7669 int move_size, offset, size;
7670
7671 if (num & 0xf)
7672 move_mode = QImode;
7673 else if ((mode == SImode && !same_reg) || !overlapped)
7674 move_mode = HImode;
7675 else
7676 move_mode = GET_MODE (scratch);
7677
7678 /* Force DI rotate to use QI moves since other DI moves are currently split
7679 into QI moves so forward propagation works better. */
7680 if (mode == DImode)
7681 move_mode = QImode;
7682 /* Make scratch smaller if needed. */
7683 if (SCRATCH != GET_CODE (scratch)
7684 && HImode == GET_MODE (scratch)
7685 && QImode == move_mode)
7686 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7687
7688 move_size = GET_MODE_SIZE (move_mode);
7689 /* Number of bytes/words to rotate. */
7690 offset = (num >> 3) / move_size;
7691 /* Number of moves needed. */
7692 size = GET_MODE_SIZE (mode) / move_size;
7693 /* Himode byte swap is special case to avoid a scratch register. */
7694 if (mode == HImode && same_reg)
7695 {
7696 /* HImode byte swap, using xor. This is as quick as using scratch. */
7697 rtx src, dst;
7698 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7699 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7700 if (!rtx_equal_p (dst, src))
7701 {
7702 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7703 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7704 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7705 }
7706 }
7707 else
7708 {
7709 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
7710 /* Create linked list of moves to determine move order. */
7711 struct {
7712 rtx src, dst;
7713 int links;
7714 } move[MAX_SIZE + 8];
7715 int blocked, moves;
7716
7717 gcc_assert (size <= MAX_SIZE);
7718 /* Generate list of subreg moves. */
7719 for (i = 0; i < size; i++)
7720 {
7721 int from = i;
7722 int to = (from + offset) % size;
7723 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7724 mode, from * move_size);
7725 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7726 mode, to * move_size);
7727 move[i].links = -1;
7728 }
7729 /* Mark dependence where a dst of one move is the src of another move.
7730 The first move is a conflict as it must wait until second is
7731 performed. We ignore moves to self - we catch this later. */
7732 if (overlapped)
7733 for (i = 0; i < size; i++)
7734 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7735 for (j = 0; j < size; j++)
7736 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7737 {
7738 /* The dst of move i is the src of move j. */
7739 move[i].links = j;
7740 break;
7741 }
7742
7743 blocked = -1;
7744 moves = 0;
7745 /* Go through move list and perform non-conflicting moves. As each
7746 non-overlapping move is made, it may remove other conflicts
7747 so the process is repeated until no conflicts remain. */
7748 do
7749 {
7750 blocked = -1;
7751 moves = 0;
7752 /* Emit move where dst is not also a src or we have used that
7753 src already. */
7754 for (i = 0; i < size; i++)
7755 if (move[i].src != NULL_RTX)
7756 {
7757 if (move[i].links == -1
7758 || move[move[i].links].src == NULL_RTX)
7759 {
7760 moves++;
7761 /* Ignore NOP moves to self. */
7762 if (!rtx_equal_p (move[i].dst, move[i].src))
7763 emit_move_insn (move[i].dst, move[i].src);
7764
7765 /* Remove conflict from list. */
7766 move[i].src = NULL_RTX;
7767 }
7768 else
7769 blocked = i;
7770 }
7771
7772 /* Check for deadlock. This is when no moves occurred and we have
7773 at least one blocked move. */
7774 if (moves == 0 && blocked != -1)
7775 {
7776 /* Need to use scratch register to break deadlock.
7777 Add move to put dst of blocked move into scratch.
7778 When this move occurs, it will break chain deadlock.
7779 The scratch register is substituted for real move. */
7780
7781 gcc_assert (SCRATCH != GET_CODE (scratch));
7782
7783 move[size].src = move[blocked].dst;
7784 move[size].dst = scratch;
7785 /* Scratch move is never blocked. */
7786 move[size].links = -1;
7787 /* Make sure we have valid link. */
7788 gcc_assert (move[blocked].links != -1);
7789 /* Replace src of blocking move with scratch reg. */
7790 move[move[blocked].links].src = scratch;
7791 /* Make dependent on scratch move occurring. */
7792 move[blocked].links = size;
7793 size=size+1;
7794 }
7795 }
7796 while (blocked != -1);
7797 }
7798 return true;
7799 }
7800
7801
7802 /* Worker function for `ADJUST_INSN_LENGTH'. */
7803 /* Modifies the length assigned to instruction INSN
7804 LEN is the initially computed length of the insn. */
7805
7806 int
7807 avr_adjust_insn_length (rtx_insn *insn, int len)
7808 {
7809 rtx *op = recog_data.operand;
7810 enum attr_adjust_len adjust_len;
7811
7812 /* Some complex insns don't need length adjustment and therefore
7813 the length need not/must not be adjusted for these insns.
7814 It is easier to state this in an insn attribute "adjust_len" than
7815 to clutter up code here... */
7816
7817 if (JUMP_TABLE_DATA_P (insn) || recog_memoized (insn) == -1)
7818 {
7819 return len;
7820 }
7821
7822 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7823
7824 adjust_len = get_attr_adjust_len (insn);
7825
7826 if (adjust_len == ADJUST_LEN_NO)
7827 {
7828 /* Nothing to adjust: The length from attribute "length" is fine.
7829 This is the default. */
7830
7831 return len;
7832 }
7833
7834 /* Extract insn's operands. */
7835
7836 extract_constrain_insn_cached (insn);
7837
7838 /* Dispatch to right function. */
7839
7840 switch (adjust_len)
7841 {
7842 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7843 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7844 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7845
7846 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7847
7848 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7849 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7850
7851 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7852 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7853 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7854 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7855 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7856 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7857 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7858
7859 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7860 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7861 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7862
7863 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7864 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7865 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7866 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7867 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7868
7869 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7870 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7871 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7872
7873 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7874 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7875 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7876
7877 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7878 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7879 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7880
7881 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7882 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7883 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7884
7885 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7886
7887 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7888
7889 default:
7890 gcc_unreachable();
7891 }
7892
7893 return len;
7894 }
7895
7896 /* Return nonzero if register REG dead after INSN. */
7897
7898 int
7899 reg_unused_after (rtx_insn *insn, rtx reg)
7900 {
7901 return (dead_or_set_p (insn, reg)
7902 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7903 }
7904
7905 /* Return nonzero if REG is not used after INSN.
7906 We assume REG is a reload reg, and therefore does
7907 not live past labels. It may live past calls or jumps though. */
7908
7909 int
7910 _reg_unused_after (rtx_insn *insn, rtx reg)
7911 {
7912 enum rtx_code code;
7913 rtx set;
7914
7915 /* If the reg is set by this instruction, then it is safe for our
7916 case. Disregard the case where this is a store to memory, since
7917 we are checking a register used in the store address. */
7918 set = single_set (insn);
7919 if (set && GET_CODE (SET_DEST (set)) != MEM
7920 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7921 return 1;
7922
7923 while ((insn = NEXT_INSN (insn)))
7924 {
7925 rtx set;
7926 code = GET_CODE (insn);
7927
7928 #if 0
7929 /* If this is a label that existed before reload, then the register
7930 if dead here. However, if this is a label added by reorg, then
7931 the register may still be live here. We can't tell the difference,
7932 so we just ignore labels completely. */
7933 if (code == CODE_LABEL)
7934 return 1;
7935 /* else */
7936 #endif
7937
7938 if (!INSN_P (insn))
7939 continue;
7940
7941 if (code == JUMP_INSN)
7942 return 0;
7943
7944 /* If this is a sequence, we must handle them all at once.
7945 We could have for instance a call that sets the target register,
7946 and an insn in a delay slot that uses the register. In this case,
7947 we must return 0. */
7948 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7949 {
7950 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
7951 int i;
7952 int retval = 0;
7953
7954 for (i = 0; i < seq->len (); i++)
7955 {
7956 rtx_insn *this_insn = seq->insn (i);
7957 rtx set = single_set (this_insn);
7958
7959 if (CALL_P (this_insn))
7960 code = CALL_INSN;
7961 else if (JUMP_P (this_insn))
7962 {
7963 if (INSN_ANNULLED_BRANCH_P (this_insn))
7964 return 0;
7965 code = JUMP_INSN;
7966 }
7967
7968 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7969 return 0;
7970 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7971 {
7972 if (GET_CODE (SET_DEST (set)) != MEM)
7973 retval = 1;
7974 else
7975 return 0;
7976 }
7977 if (set == 0
7978 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7979 return 0;
7980 }
7981 if (retval == 1)
7982 return 1;
7983 else if (code == JUMP_INSN)
7984 return 0;
7985 }
7986
7987 if (code == CALL_INSN)
7988 {
7989 rtx tem;
7990 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7991 if (GET_CODE (XEXP (tem, 0)) == USE
7992 && REG_P (XEXP (XEXP (tem, 0), 0))
7993 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7994 return 0;
7995 if (call_used_regs[REGNO (reg)])
7996 return 1;
7997 }
7998
7999 set = single_set (insn);
8000
8001 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8002 return 0;
8003 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8004 return GET_CODE (SET_DEST (set)) != MEM;
8005 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8006 return 0;
8007 }
8008 return 1;
8009 }
8010
8011
8012 /* Implement `TARGET_ASM_INTEGER'. */
8013 /* Target hook for assembling integer objects. The AVR version needs
8014 special handling for references to certain labels. */
8015
8016 static bool
8017 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
8018 {
8019 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
8020 && text_segment_operand (x, VOIDmode))
8021 {
8022 fputs ("\t.word\tgs(", asm_out_file);
8023 output_addr_const (asm_out_file, x);
8024 fputs (")\n", asm_out_file);
8025
8026 return true;
8027 }
8028 else if (GET_MODE (x) == PSImode)
8029 {
8030 /* This needs binutils 2.23+, see PR binutils/13503 */
8031
8032 fputs ("\t.byte\tlo8(", asm_out_file);
8033 output_addr_const (asm_out_file, x);
8034 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8035
8036 fputs ("\t.byte\thi8(", asm_out_file);
8037 output_addr_const (asm_out_file, x);
8038 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8039
8040 fputs ("\t.byte\thh8(", asm_out_file);
8041 output_addr_const (asm_out_file, x);
8042 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8043
8044 return true;
8045 }
8046 else if (CONST_FIXED_P (x))
8047 {
8048 unsigned n;
8049
8050 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8051
8052 for (n = 0; n < size; n++)
8053 {
8054 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8055 default_assemble_integer (xn, 1, aligned_p);
8056 }
8057
8058 return true;
8059 }
8060
8061 return default_assemble_integer (x, size, aligned_p);
8062 }
8063
8064
8065 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
8066 /* Return value is nonzero if pseudos that have been
8067 assigned to registers of class CLASS would likely be spilled
8068 because registers of CLASS are needed for spill registers. */
8069
8070 static bool
8071 avr_class_likely_spilled_p (reg_class_t c)
8072 {
8073 return (c != ALL_REGS && c != ADDW_REGS);
8074 }
8075
8076
8077 /* Valid attributes:
8078 progmem - Put data to program memory.
8079 signal - Make a function to be hardware interrupt.
8080 After function prologue interrupts remain disabled.
8081 interrupt - Make a function to be hardware interrupt. Before function
8082 prologue interrupts are enabled by means of SEI.
8083 naked - Don't generate function prologue/epilogue and RET
8084 instruction. */
8085
8086 /* Handle a "progmem" attribute; arguments as in
8087 struct attribute_spec.handler. */
8088
8089 static tree
8090 avr_handle_progmem_attribute (tree *node, tree name,
8091 tree args ATTRIBUTE_UNUSED,
8092 int flags ATTRIBUTE_UNUSED,
8093 bool *no_add_attrs)
8094 {
8095 if (DECL_P (*node))
8096 {
8097 if (TREE_CODE (*node) == TYPE_DECL)
8098 {
8099 /* This is really a decl attribute, not a type attribute,
8100 but try to handle it for GCC 3.0 backwards compatibility. */
8101
8102 tree type = TREE_TYPE (*node);
8103 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8104 tree newtype = build_type_attribute_variant (type, attr);
8105
8106 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8107 TREE_TYPE (*node) = newtype;
8108 *no_add_attrs = true;
8109 }
8110 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
8111 {
8112 *no_add_attrs = false;
8113 }
8114 else
8115 {
8116 warning (OPT_Wattributes, "%qE attribute ignored",
8117 name);
8118 *no_add_attrs = true;
8119 }
8120 }
8121
8122 return NULL_TREE;
8123 }
8124
8125 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8126 struct attribute_spec.handler. */
8127
8128 static tree
8129 avr_handle_fndecl_attribute (tree *node, tree name,
8130 tree args ATTRIBUTE_UNUSED,
8131 int flags ATTRIBUTE_UNUSED,
8132 bool *no_add_attrs)
8133 {
8134 if (TREE_CODE (*node) != FUNCTION_DECL)
8135 {
8136 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8137 name);
8138 *no_add_attrs = true;
8139 }
8140
8141 return NULL_TREE;
8142 }
8143
8144 static tree
8145 avr_handle_fntype_attribute (tree *node, tree name,
8146 tree args ATTRIBUTE_UNUSED,
8147 int flags ATTRIBUTE_UNUSED,
8148 bool *no_add_attrs)
8149 {
8150 if (TREE_CODE (*node) != FUNCTION_TYPE)
8151 {
8152 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8153 name);
8154 *no_add_attrs = true;
8155 }
8156
8157 return NULL_TREE;
8158 }
8159
8160 static tree
8161 avr_handle_addr_attribute (tree *node, tree name, tree args,
8162 int flags ATTRIBUTE_UNUSED, bool *no_add)
8163 {
8164 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
8165 location_t loc = DECL_SOURCE_LOCATION (*node);
8166
8167 if (TREE_CODE (*node) != VAR_DECL)
8168 {
8169 warning_at (loc, 0, "%qE attribute only applies to variables", name);
8170 *no_add = true;
8171 }
8172
8173 if (args != NULL_TREE)
8174 {
8175 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
8176 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
8177 tree arg = TREE_VALUE (args);
8178 if (TREE_CODE (arg) != INTEGER_CST)
8179 {
8180 warning (0, "%qE attribute allows only an integer constant argument",
8181 name);
8182 *no_add = true;
8183 }
8184 else if (io_p
8185 && (!tree_fits_shwi_p (arg)
8186 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
8187 ? low_io_address_operand : io_address_operand)
8188 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
8189 {
8190 warning_at (loc, 0, "%qE attribute address out of range", name);
8191 *no_add = true;
8192 }
8193 else
8194 {
8195 tree attribs = DECL_ATTRIBUTES (*node);
8196 const char *names[] = { "io", "io_low", "address", NULL } ;
8197 for (const char **p = names; *p; p++)
8198 {
8199 tree other = lookup_attribute (*p, attribs);
8200 if (other && TREE_VALUE (other))
8201 {
8202 warning_at (loc, 0,
8203 "both %s and %qE attribute provide address",
8204 *p, name);
8205 *no_add = true;
8206 break;
8207 }
8208 }
8209 }
8210 }
8211
8212 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
8213 warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
8214
8215 return NULL_TREE;
8216 }
8217
8218 rtx
8219 avr_eval_addr_attrib (rtx x)
8220 {
8221 if (GET_CODE (x) == SYMBOL_REF
8222 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
8223 {
8224 tree decl = SYMBOL_REF_DECL (x);
8225 tree attr = NULL_TREE;
8226
8227 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
8228 {
8229 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
8230 gcc_assert (attr);
8231 }
8232 if (!attr || !TREE_VALUE (attr))
8233 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
8234 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
8235 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
8236 }
8237 return x;
8238 }
8239
8240
8241 /* AVR attributes. */
8242 static const struct attribute_spec
8243 avr_attribute_table[] =
8244 {
8245 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
8246 affects_type_identity } */
8247 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
8248 false },
8249 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8250 false },
8251 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
8252 false },
8253 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
8254 false },
8255 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
8256 false },
8257 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
8258 false },
8259 { "io", 0, 1, false, false, false, avr_handle_addr_attribute,
8260 false },
8261 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute,
8262 false },
8263 { "address", 1, 1, false, false, false, avr_handle_addr_attribute,
8264 false },
8265 { NULL, 0, 0, false, false, false, NULL, false }
8266 };
8267
8268
8269 /* Look if DECL shall be placed in program memory space by
8270 means of attribute `progmem' or some address-space qualifier.
8271 Return non-zero if DECL is data that must end up in Flash and
8272 zero if the data lives in RAM (.bss, .data, .rodata, ...).
8273
8274 Return 2 if DECL is located in 24-bit flash address-space
8275 Return 1 if DECL is located in 16-bit flash address-space
8276 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
8277 Return 0 otherwise */
8278
8279 int
8280 avr_progmem_p (tree decl, tree attributes)
8281 {
8282 tree a;
8283
8284 if (TREE_CODE (decl) != VAR_DECL)
8285 return 0;
8286
8287 if (avr_decl_memx_p (decl))
8288 return 2;
8289
8290 if (avr_decl_flash_p (decl))
8291 return 1;
8292
8293 if (NULL_TREE
8294 != lookup_attribute ("progmem", attributes))
8295 return -1;
8296
8297 a = decl;
8298
8299 do
8300 a = TREE_TYPE(a);
8301 while (TREE_CODE (a) == ARRAY_TYPE);
8302
8303 if (a == error_mark_node)
8304 return 0;
8305
8306 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
8307 return -1;
8308
8309 return 0;
8310 }
8311
8312
8313 /* Scan type TYP for pointer references to address space ASn.
8314 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
8315 the AS are also declared to be CONST.
8316 Otherwise, return the respective address space, i.e. a value != 0. */
8317
8318 static addr_space_t
8319 avr_nonconst_pointer_addrspace (tree typ)
8320 {
8321 while (ARRAY_TYPE == TREE_CODE (typ))
8322 typ = TREE_TYPE (typ);
8323
8324 if (POINTER_TYPE_P (typ))
8325 {
8326 addr_space_t as;
8327 tree target = TREE_TYPE (typ);
8328
8329 /* Pointer to function: Test the function's return type. */
8330
8331 if (FUNCTION_TYPE == TREE_CODE (target))
8332 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8333
8334 /* "Ordinary" pointers... */
8335
8336 while (TREE_CODE (target) == ARRAY_TYPE)
8337 target = TREE_TYPE (target);
8338
8339 /* Pointers to non-generic address space must be const.
8340 Refuse address spaces outside the device's flash. */
8341
8342 as = TYPE_ADDR_SPACE (target);
8343
8344 if (!ADDR_SPACE_GENERIC_P (as)
8345 && (!TYPE_READONLY (target)
8346 || avr_addrspace[as].segment >= avr_n_flash))
8347 {
8348 return as;
8349 }
8350
8351 /* Scan pointer's target type. */
8352
8353 return avr_nonconst_pointer_addrspace (target);
8354 }
8355
8356 return ADDR_SPACE_GENERIC;
8357 }
8358
8359
8360 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8361 go along with CONST qualifier. Writing to these address spaces should
8362 be detected and complained about as early as possible. */
8363
8364 static bool
8365 avr_pgm_check_var_decl (tree node)
8366 {
8367 const char *reason = NULL;
8368
8369 addr_space_t as = ADDR_SPACE_GENERIC;
8370
8371 gcc_assert (as == 0);
8372
8373 if (avr_log.progmem)
8374 avr_edump ("%?: %t\n", node);
8375
8376 switch (TREE_CODE (node))
8377 {
8378 default:
8379 break;
8380
8381 case VAR_DECL:
8382 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8383 reason = "variable";
8384 break;
8385
8386 case PARM_DECL:
8387 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8388 reason = "function parameter";
8389 break;
8390
8391 case FIELD_DECL:
8392 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8393 reason = "structure field";
8394 break;
8395
8396 case FUNCTION_DECL:
8397 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8398 as)
8399 reason = "return type of function";
8400 break;
8401
8402 case POINTER_TYPE:
8403 if (as = avr_nonconst_pointer_addrspace (node), as)
8404 reason = "pointer";
8405 break;
8406 }
8407
8408 if (reason)
8409 {
8410 if (avr_addrspace[as].segment >= avr_n_flash)
8411 {
8412 if (TYPE_P (node))
8413 error ("%qT uses address space %qs beyond flash of %qs",
8414 node, avr_addrspace[as].name, avr_current_device->name);
8415 else
8416 error ("%s %q+D uses address space %qs beyond flash of %qs",
8417 reason, node, avr_addrspace[as].name,
8418 avr_current_device->name);
8419 }
8420 else
8421 {
8422 if (TYPE_P (node))
8423 error ("pointer targeting address space %qs must be const in %qT",
8424 avr_addrspace[as].name, node);
8425 else
8426 error ("pointer targeting address space %qs must be const"
8427 " in %s %q+D",
8428 avr_addrspace[as].name, reason, node);
8429 }
8430 }
8431
8432 return reason == NULL;
8433 }
8434
8435
8436 /* Add the section attribute if the variable is in progmem. */
8437
8438 static void
8439 avr_insert_attributes (tree node, tree *attributes)
8440 {
8441 avr_pgm_check_var_decl (node);
8442
8443 if (TREE_CODE (node) == VAR_DECL
8444 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8445 && avr_progmem_p (node, *attributes))
8446 {
8447 addr_space_t as;
8448 tree node0 = node;
8449
8450 /* For C++, we have to peel arrays in order to get correct
8451 determination of readonlyness. */
8452
8453 do
8454 node0 = TREE_TYPE (node0);
8455 while (TREE_CODE (node0) == ARRAY_TYPE);
8456
8457 if (error_mark_node == node0)
8458 return;
8459
8460 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8461
8462 if (avr_addrspace[as].segment >= avr_n_flash)
8463 {
8464 error ("variable %q+D located in address space %qs"
8465 " beyond flash of %qs",
8466 node, avr_addrspace[as].name, avr_current_device->name);
8467 }
8468
8469 if (!TYPE_READONLY (node0)
8470 && !TREE_READONLY (node))
8471 {
8472 const char *reason = "__attribute__((progmem))";
8473
8474 if (!ADDR_SPACE_GENERIC_P (as))
8475 reason = avr_addrspace[as].name;
8476
8477 if (avr_log.progmem)
8478 avr_edump ("\n%?: %t\n%t\n", node, node0);
8479
8480 error ("variable %q+D must be const in order to be put into"
8481 " read-only section by means of %qs", node, reason);
8482 }
8483 }
8484 }
8485
8486
8487 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8488 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8489 /* Track need of __do_clear_bss. */
8490
8491 void
8492 avr_asm_output_aligned_decl_common (FILE * stream,
8493 tree decl,
8494 const char *name,
8495 unsigned HOST_WIDE_INT size,
8496 unsigned int align, bool local_p)
8497 {
8498 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
8499 rtx symbol;
8500
8501 if (mem != NULL_RTX && MEM_P (mem)
8502 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
8503 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
8504 {
8505
8506 if (!local_p)
8507 {
8508 fprintf (stream, "\t.globl\t");
8509 assemble_name (stream, name);
8510 fprintf (stream, "\n");
8511 }
8512 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
8513 {
8514 assemble_name (stream, name);
8515 fprintf (stream, " = %ld\n",
8516 (long) INTVAL (avr_eval_addr_attrib (symbol)));
8517 }
8518 else if (local_p)
8519 error_at (DECL_SOURCE_LOCATION (decl),
8520 "static IO declaration for %q+D needs an address", decl);
8521 return;
8522 }
8523
8524 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8525 There is no need to trigger __do_clear_bss code for them. */
8526
8527 if (!STR_PREFIX_P (name, "__gnu_lto"))
8528 avr_need_clear_bss_p = true;
8529
8530 if (local_p)
8531 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8532 else
8533 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8534 }
8535
8536 void
8537 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
8538 unsigned HOST_WIDE_INT size, int align,
8539 void (*default_func)
8540 (FILE *, tree, const char *,
8541 unsigned HOST_WIDE_INT, int))
8542 {
8543 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
8544 rtx symbol;
8545
8546 if (mem != NULL_RTX && MEM_P (mem)
8547 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
8548 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
8549 {
8550 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
8551 error_at (DECL_SOURCE_LOCATION (decl),
8552 "IO definition for %q+D needs an address", decl);
8553 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
8554 }
8555 else
8556 default_func (file, decl, name, size, align);
8557 }
8558
8559
8560 /* Unnamed section callback for data_section
8561 to track need of __do_copy_data. */
8562
8563 static void
8564 avr_output_data_section_asm_op (const void *data)
8565 {
8566 avr_need_copy_data_p = true;
8567
8568 /* Dispatch to default. */
8569 output_section_asm_op (data);
8570 }
8571
8572
8573 /* Unnamed section callback for bss_section
8574 to track need of __do_clear_bss. */
8575
8576 static void
8577 avr_output_bss_section_asm_op (const void *data)
8578 {
8579 avr_need_clear_bss_p = true;
8580
8581 /* Dispatch to default. */
8582 output_section_asm_op (data);
8583 }
8584
8585
8586 /* Unnamed section callback for progmem*.data sections. */
8587
8588 static void
8589 avr_output_progmem_section_asm_op (const void *data)
8590 {
8591 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8592 (const char*) data);
8593 }
8594
8595
8596 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
8597
8598 static void
8599 avr_asm_init_sections (void)
8600 {
8601 /* Set up a section for jump tables. Alignment is handled by
8602 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8603
8604 if (AVR_HAVE_JMP_CALL)
8605 {
8606 progmem_swtable_section
8607 = get_unnamed_section (0, output_section_asm_op,
8608 "\t.section\t.progmem.gcc_sw_table"
8609 ",\"a\",@progbits");
8610 }
8611 else
8612 {
8613 progmem_swtable_section
8614 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8615 "\t.section\t.progmem.gcc_sw_table"
8616 ",\"ax\",@progbits");
8617 }
8618
8619 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8620 resp. `avr_need_copy_data_p'. */
8621
8622 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8623 data_section->unnamed.callback = avr_output_data_section_asm_op;
8624 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8625 }
8626
8627
8628 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8629
8630 static section*
8631 avr_asm_function_rodata_section (tree decl)
8632 {
8633 /* If a function is unused and optimized out by -ffunction-sections
8634 and --gc-sections, ensure that the same will happen for its jump
8635 tables by putting them into individual sections. */
8636
8637 unsigned int flags;
8638 section * frodata;
8639
8640 /* Get the frodata section from the default function in varasm.c
8641 but treat function-associated data-like jump tables as code
8642 rather than as user defined data. AVR has no constant pools. */
8643 {
8644 int fdata = flag_data_sections;
8645
8646 flag_data_sections = flag_function_sections;
8647 frodata = default_function_rodata_section (decl);
8648 flag_data_sections = fdata;
8649 flags = frodata->common.flags;
8650 }
8651
8652 if (frodata != readonly_data_section
8653 && flags & SECTION_NAMED)
8654 {
8655 /* Adjust section flags and replace section name prefix. */
8656
8657 unsigned int i;
8658
8659 static const char* const prefix[] =
8660 {
8661 ".rodata", ".progmem.gcc_sw_table",
8662 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8663 };
8664
8665 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8666 {
8667 const char * old_prefix = prefix[i];
8668 const char * new_prefix = prefix[i+1];
8669 const char * name = frodata->named.name;
8670
8671 if (STR_PREFIX_P (name, old_prefix))
8672 {
8673 const char *rname = ACONCAT ((new_prefix,
8674 name + strlen (old_prefix), NULL));
8675 flags &= ~SECTION_CODE;
8676 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8677
8678 return get_section (rname, flags, frodata->named.decl);
8679 }
8680 }
8681 }
8682
8683 return progmem_swtable_section;
8684 }
8685
8686
8687 /* Implement `TARGET_ASM_NAMED_SECTION'. */
8688 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
8689
8690 static void
8691 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8692 {
8693 if (flags & AVR_SECTION_PROGMEM)
8694 {
8695 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8696 const char *old_prefix = ".rodata";
8697 const char *new_prefix = avr_addrspace[as].section_name;
8698
8699 if (STR_PREFIX_P (name, old_prefix))
8700 {
8701 const char *sname = ACONCAT ((new_prefix,
8702 name + strlen (old_prefix), NULL));
8703 default_elf_asm_named_section (sname, flags, decl);
8704 return;
8705 }
8706
8707 default_elf_asm_named_section (new_prefix, flags, decl);
8708 return;
8709 }
8710
8711 if (!avr_need_copy_data_p)
8712 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8713 || STR_PREFIX_P (name, ".rodata")
8714 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8715
8716 if (!avr_need_clear_bss_p)
8717 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8718
8719 default_elf_asm_named_section (name, flags, decl);
8720 }
8721
8722
8723 /* Implement `TARGET_SECTION_TYPE_FLAGS'. */
8724
8725 static unsigned int
8726 avr_section_type_flags (tree decl, const char *name, int reloc)
8727 {
8728 unsigned int flags = default_section_type_flags (decl, name, reloc);
8729
8730 if (STR_PREFIX_P (name, ".noinit"))
8731 {
8732 if (decl && TREE_CODE (decl) == VAR_DECL
8733 && DECL_INITIAL (decl) == NULL_TREE)
8734 flags |= SECTION_BSS; /* @nobits */
8735 else
8736 warning (0, "only uninitialized variables can be placed in the "
8737 ".noinit section");
8738 }
8739
8740 if (decl && DECL_P (decl)
8741 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8742 {
8743 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8744
8745 /* Attribute progmem puts data in generic address space.
8746 Set section flags as if it was in __flash to get the right
8747 section prefix in the remainder. */
8748
8749 if (ADDR_SPACE_GENERIC_P (as))
8750 as = ADDR_SPACE_FLASH;
8751
8752 flags |= as * SECTION_MACH_DEP;
8753 flags &= ~SECTION_WRITE;
8754 flags &= ~SECTION_BSS;
8755 }
8756
8757 return flags;
8758 }
8759
8760
8761 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
8762
8763 static void
8764 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8765 {
8766 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8767 readily available, see PR34734. So we postpone the warning
8768 about uninitialized data in program memory section until here. */
8769
8770 if (new_decl_p
8771 && decl && DECL_P (decl)
8772 && NULL_TREE == DECL_INITIAL (decl)
8773 && !DECL_EXTERNAL (decl)
8774 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8775 {
8776 warning (OPT_Wuninitialized,
8777 "uninitialized variable %q+D put into "
8778 "program memory area", decl);
8779 }
8780
8781 default_encode_section_info (decl, rtl, new_decl_p);
8782
8783 if (decl && DECL_P (decl)
8784 && TREE_CODE (decl) != FUNCTION_DECL
8785 && MEM_P (rtl)
8786 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8787 {
8788 rtx sym = XEXP (rtl, 0);
8789 tree type = TREE_TYPE (decl);
8790 tree attr = DECL_ATTRIBUTES (decl);
8791 if (type == error_mark_node)
8792 return;
8793
8794 addr_space_t as = TYPE_ADDR_SPACE (type);
8795
8796 /* PSTR strings are in generic space but located in flash:
8797 patch address space. */
8798
8799 if (-1 == avr_progmem_p (decl, attr))
8800 as = ADDR_SPACE_FLASH;
8801
8802 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8803
8804 tree io_low_attr = lookup_attribute ("io_low", attr);
8805 tree io_attr = lookup_attribute ("io", attr);
8806 tree addr_attr;
8807 if (io_low_attr
8808 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
8809 addr_attr = io_attr;
8810 else if (io_attr
8811 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
8812 addr_attr = io_attr;
8813 else
8814 addr_attr = lookup_attribute ("address", attr);
8815 if (io_low_attr
8816 || (io_attr && addr_attr &&
8817 low_io_address_operand (GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
8818 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
8819 if (io_attr || io_low_attr)
8820 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
8821 /* If we have an (io) address attribute specification, but the variable
8822 is external, treat the address as only a tentative definition
8823 to be used to determine if an io port is in the lower range, but
8824 don't use the exact value for constant propagation. */
8825 if (addr_attr && !DECL_EXTERNAL (decl))
8826 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
8827 }
8828 }
8829
8830
8831 /* Implement `TARGET_ASM_SELECT_SECTION' */
8832
8833 static section *
8834 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8835 {
8836 section * sect = default_elf_select_section (decl, reloc, align);
8837
8838 if (decl && DECL_P (decl)
8839 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8840 {
8841 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8842
8843 /* __progmem__ goes in generic space but shall be allocated to
8844 .progmem.data */
8845
8846 if (ADDR_SPACE_GENERIC_P (as))
8847 as = ADDR_SPACE_FLASH;
8848
8849 if (sect->common.flags & SECTION_NAMED)
8850 {
8851 const char * name = sect->named.name;
8852 const char * old_prefix = ".rodata";
8853 const char * new_prefix = avr_addrspace[as].section_name;
8854
8855 if (STR_PREFIX_P (name, old_prefix))
8856 {
8857 const char *sname = ACONCAT ((new_prefix,
8858 name + strlen (old_prefix), NULL));
8859 return get_section (sname, sect->common.flags, sect->named.decl);
8860 }
8861 }
8862
8863 if (!progmem_section[as])
8864 {
8865 progmem_section[as]
8866 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8867 avr_addrspace[as].section_name);
8868 }
8869
8870 return progmem_section[as];
8871 }
8872
8873 return sect;
8874 }
8875
8876 /* Implement `TARGET_ASM_FILE_START'. */
8877 /* Outputs some text at the start of each assembler file. */
8878
8879 static void
8880 avr_file_start (void)
8881 {
8882 int sfr_offset = avr_current_arch->sfr_offset;
8883
8884 if (avr_current_arch->asm_only)
8885 error ("MCU %qs supported for assembler only", avr_current_device->name);
8886
8887 default_file_start ();
8888
8889 /* Print I/O addresses of some SFRs used with IN and OUT. */
8890
8891 if (AVR_HAVE_SPH)
8892 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8893
8894 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8895 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8896 if (AVR_HAVE_RAMPZ)
8897 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8898 if (AVR_HAVE_RAMPY)
8899 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8900 if (AVR_HAVE_RAMPX)
8901 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8902 if (AVR_HAVE_RAMPD)
8903 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8904 if (AVR_XMEGA)
8905 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8906 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8907 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8908 }
8909
8910
8911 /* Implement `TARGET_ASM_FILE_END'. */
8912 /* Outputs to the stdio stream FILE some
8913 appropriate text to go at the end of an assembler file. */
8914
8915 static void
8916 avr_file_end (void)
8917 {
8918 /* Output these only if there is anything in the
8919 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8920 input section(s) - some code size can be saved by not
8921 linking in the initialization code from libgcc if resp.
8922 sections are empty, see PR18145. */
8923
8924 if (avr_need_copy_data_p)
8925 fputs (".global __do_copy_data\n", asm_out_file);
8926
8927 if (avr_need_clear_bss_p)
8928 fputs (".global __do_clear_bss\n", asm_out_file);
8929 }
8930
8931
8932 /* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
8933 /* Choose the order in which to allocate hard registers for
8934 pseudo-registers local to a basic block.
8935
8936 Store the desired register order in the array `reg_alloc_order'.
8937 Element 0 should be the register to allocate first; element 1, the
8938 next register; and so on. */
8939
8940 void
8941 avr_adjust_reg_alloc_order (void)
8942 {
8943 unsigned int i;
8944 static const int order_0[] =
8945 {
8946 24, 25,
8947 18, 19, 20, 21, 22, 23,
8948 30, 31,
8949 26, 27, 28, 29,
8950 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8951 0, 1,
8952 32, 33, 34, 35
8953 };
8954 static const int order_1[] =
8955 {
8956 18, 19, 20, 21, 22, 23, 24, 25,
8957 30, 31,
8958 26, 27, 28, 29,
8959 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8960 0, 1,
8961 32, 33, 34, 35
8962 };
8963 static const int order_2[] =
8964 {
8965 25, 24, 23, 22, 21, 20, 19, 18,
8966 30, 31,
8967 26, 27, 28, 29,
8968 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8969 1, 0,
8970 32, 33, 34, 35
8971 };
8972
8973 const int *order = (TARGET_ORDER_1 ? order_1 :
8974 TARGET_ORDER_2 ? order_2 :
8975 order_0);
8976 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8977 reg_alloc_order[i] = order[i];
8978 }
8979
8980
8981 /* Implement `TARGET_REGISTER_MOVE_COST' */
8982
8983 static int
8984 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8985 reg_class_t from, reg_class_t to)
8986 {
8987 return (from == STACK_REG ? 6
8988 : to == STACK_REG ? 12
8989 : 2);
8990 }
8991
8992
8993 /* Implement `TARGET_MEMORY_MOVE_COST' */
8994
8995 static int
8996 avr_memory_move_cost (enum machine_mode mode,
8997 reg_class_t rclass ATTRIBUTE_UNUSED,
8998 bool in ATTRIBUTE_UNUSED)
8999 {
9000 return (mode == QImode ? 2
9001 : mode == HImode ? 4
9002 : mode == SImode ? 8
9003 : mode == SFmode ? 8
9004 : 16);
9005 }
9006
9007
9008 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9009 cost of an RTX operand given its context. X is the rtx of the
9010 operand, MODE is its mode, and OUTER is the rtx_code of this
9011 operand's parent operator. */
9012
9013 static int
9014 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
9015 int opno, bool speed)
9016 {
9017 enum rtx_code code = GET_CODE (x);
9018 int total;
9019
9020 switch (code)
9021 {
9022 case REG:
9023 case SUBREG:
9024 return 0;
9025
9026 case CONST_INT:
9027 case CONST_FIXED:
9028 case CONST_DOUBLE:
9029 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
9030
9031 default:
9032 break;
9033 }
9034
9035 total = 0;
9036 avr_rtx_costs (x, code, outer, opno, &total, speed);
9037 return total;
9038 }
9039
9040 /* Worker function for AVR backend's rtx_cost function.
9041 X is rtx expression whose cost is to be calculated.
9042 Return true if the complete cost has been computed.
9043 Return false if subexpressions should be scanned.
9044 In either case, *TOTAL contains the cost result. */
9045
9046 static bool
9047 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
9048 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
9049 {
9050 enum rtx_code code = (enum rtx_code) codearg;
9051 enum machine_mode mode = GET_MODE (x);
9052 HOST_WIDE_INT val;
9053
9054 switch (code)
9055 {
9056 case CONST_INT:
9057 case CONST_FIXED:
9058 case CONST_DOUBLE:
9059 case SYMBOL_REF:
9060 case CONST:
9061 case LABEL_REF:
9062 /* Immediate constants are as cheap as registers. */
9063 *total = 0;
9064 return true;
9065
9066 case MEM:
9067 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9068 return true;
9069
9070 case NEG:
9071 switch (mode)
9072 {
9073 case QImode:
9074 case SFmode:
9075 *total = COSTS_N_INSNS (1);
9076 break;
9077
9078 case HImode:
9079 case PSImode:
9080 case SImode:
9081 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
9082 break;
9083
9084 default:
9085 return false;
9086 }
9087 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9088 return true;
9089
9090 case ABS:
9091 switch (mode)
9092 {
9093 case QImode:
9094 case SFmode:
9095 *total = COSTS_N_INSNS (1);
9096 break;
9097
9098 default:
9099 return false;
9100 }
9101 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9102 return true;
9103
9104 case NOT:
9105 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9106 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9107 return true;
9108
9109 case ZERO_EXTEND:
9110 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
9111 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
9112 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9113 return true;
9114
9115 case SIGN_EXTEND:
9116 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
9117 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
9118 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9119 return true;
9120
9121 case PLUS:
9122 switch (mode)
9123 {
9124 case QImode:
9125 if (AVR_HAVE_MUL
9126 && MULT == GET_CODE (XEXP (x, 0))
9127 && register_operand (XEXP (x, 1), QImode))
9128 {
9129 /* multiply-add */
9130 *total = COSTS_N_INSNS (speed ? 4 : 3);
9131 /* multiply-add with constant: will be split and load constant. */
9132 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9133 *total = COSTS_N_INSNS (1) + *total;
9134 return true;
9135 }
9136 *total = COSTS_N_INSNS (1);
9137 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9138 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9139 break;
9140
9141 case HImode:
9142 if (AVR_HAVE_MUL
9143 && (MULT == GET_CODE (XEXP (x, 0))
9144 || ASHIFT == GET_CODE (XEXP (x, 0)))
9145 && register_operand (XEXP (x, 1), HImode)
9146 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
9147 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
9148 {
9149 /* multiply-add */
9150 *total = COSTS_N_INSNS (speed ? 5 : 4);
9151 /* multiply-add with constant: will be split and load constant. */
9152 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9153 *total = COSTS_N_INSNS (1) + *total;
9154 return true;
9155 }
9156 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9157 {
9158 *total = COSTS_N_INSNS (2);
9159 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9160 speed);
9161 }
9162 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
9163 *total = COSTS_N_INSNS (1);
9164 else
9165 *total = COSTS_N_INSNS (2);
9166 break;
9167
9168 case PSImode:
9169 if (!CONST_INT_P (XEXP (x, 1)))
9170 {
9171 *total = COSTS_N_INSNS (3);
9172 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9173 speed);
9174 }
9175 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
9176 *total = COSTS_N_INSNS (2);
9177 else
9178 *total = COSTS_N_INSNS (3);
9179 break;
9180
9181 case SImode:
9182 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9183 {
9184 *total = COSTS_N_INSNS (4);
9185 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9186 speed);
9187 }
9188 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
9189 *total = COSTS_N_INSNS (1);
9190 else
9191 *total = COSTS_N_INSNS (4);
9192 break;
9193
9194 default:
9195 return false;
9196 }
9197 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9198 return true;
9199
9200 case MINUS:
9201 if (AVR_HAVE_MUL
9202 && QImode == mode
9203 && register_operand (XEXP (x, 0), QImode)
9204 && MULT == GET_CODE (XEXP (x, 1)))
9205 {
9206 /* multiply-sub */
9207 *total = COSTS_N_INSNS (speed ? 4 : 3);
9208 /* multiply-sub with constant: will be split and load constant. */
9209 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9210 *total = COSTS_N_INSNS (1) + *total;
9211 return true;
9212 }
9213 if (AVR_HAVE_MUL
9214 && HImode == mode
9215 && register_operand (XEXP (x, 0), HImode)
9216 && (MULT == GET_CODE (XEXP (x, 1))
9217 || ASHIFT == GET_CODE (XEXP (x, 1)))
9218 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
9219 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
9220 {
9221 /* multiply-sub */
9222 *total = COSTS_N_INSNS (speed ? 5 : 4);
9223 /* multiply-sub with constant: will be split and load constant. */
9224 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
9225 *total = COSTS_N_INSNS (1) + *total;
9226 return true;
9227 }
9228 /* FALLTHRU */
9229 case AND:
9230 case IOR:
9231 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9232 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9233 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9235 return true;
9236
9237 case XOR:
9238 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9239 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9241 return true;
9242
9243 case MULT:
9244 switch (mode)
9245 {
9246 case QImode:
9247 if (AVR_HAVE_MUL)
9248 *total = COSTS_N_INSNS (!speed ? 3 : 4);
9249 else if (!speed)
9250 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9251 else
9252 return false;
9253 break;
9254
9255 case HImode:
9256 if (AVR_HAVE_MUL)
9257 {
9258 rtx op0 = XEXP (x, 0);
9259 rtx op1 = XEXP (x, 1);
9260 enum rtx_code code0 = GET_CODE (op0);
9261 enum rtx_code code1 = GET_CODE (op1);
9262 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
9263 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
9264
9265 if (ex0
9266 && (u8_operand (op1, HImode)
9267 || s8_operand (op1, HImode)))
9268 {
9269 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9270 return true;
9271 }
9272 if (ex0
9273 && register_operand (op1, HImode))
9274 {
9275 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9276 return true;
9277 }
9278 else if (ex0 || ex1)
9279 {
9280 *total = COSTS_N_INSNS (!speed ? 3 : 5);
9281 return true;
9282 }
9283 else if (register_operand (op0, HImode)
9284 && (u8_operand (op1, HImode)
9285 || s8_operand (op1, HImode)))
9286 {
9287 *total = COSTS_N_INSNS (!speed ? 6 : 9);
9288 return true;
9289 }
9290 else
9291 *total = COSTS_N_INSNS (!speed ? 7 : 10);
9292 }
9293 else if (!speed)
9294 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9295 else
9296 return false;
9297 break;
9298
9299 case PSImode:
9300 if (!speed)
9301 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9302 else
9303 *total = 10;
9304 break;
9305
9306 case SImode:
9307 if (AVR_HAVE_MUL)
9308 {
9309 if (!speed)
9310 {
9311 /* Add some additional costs besides CALL like moves etc. */
9312
9313 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9314 }
9315 else
9316 {
9317 /* Just a rough estimate. Even with -O2 we don't want bulky
9318 code expanded inline. */
9319
9320 *total = COSTS_N_INSNS (25);
9321 }
9322 }
9323 else
9324 {
9325 if (speed)
9326 *total = COSTS_N_INSNS (300);
9327 else
9328 /* Add some additional costs besides CALL like moves etc. */
9329 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
9330 }
9331
9332 return true;
9333
9334 default:
9335 return false;
9336 }
9337 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9338 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9339 return true;
9340
9341 case DIV:
9342 case MOD:
9343 case UDIV:
9344 case UMOD:
9345 if (!speed)
9346 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
9347 else
9348 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
9349 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9350 /* For div/mod with const-int divisor we have at least the cost of
9351 loading the divisor. */
9352 if (CONST_INT_P (XEXP (x, 1)))
9353 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
9354 /* Add some overall penaly for clobbering and moving around registers */
9355 *total += COSTS_N_INSNS (2);
9356 return true;
9357
9358 case ROTATE:
9359 switch (mode)
9360 {
9361 case QImode:
9362 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
9363 *total = COSTS_N_INSNS (1);
9364
9365 break;
9366
9367 case HImode:
9368 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
9369 *total = COSTS_N_INSNS (3);
9370
9371 break;
9372
9373 case SImode:
9374 if (CONST_INT_P (XEXP (x, 1)))
9375 switch (INTVAL (XEXP (x, 1)))
9376 {
9377 case 8:
9378 case 24:
9379 *total = COSTS_N_INSNS (5);
9380 break;
9381 case 16:
9382 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
9383 break;
9384 }
9385 break;
9386
9387 default:
9388 return false;
9389 }
9390 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9391 return true;
9392
9393 case ASHIFT:
9394 switch (mode)
9395 {
9396 case QImode:
9397 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9398 {
9399 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9401 speed);
9402 }
9403 else
9404 {
9405 val = INTVAL (XEXP (x, 1));
9406 if (val == 7)
9407 *total = COSTS_N_INSNS (3);
9408 else if (val >= 0 && val <= 7)
9409 *total = COSTS_N_INSNS (val);
9410 else
9411 *total = COSTS_N_INSNS (1);
9412 }
9413 break;
9414
9415 case HImode:
9416 if (AVR_HAVE_MUL)
9417 {
9418 if (const_2_to_7_operand (XEXP (x, 1), HImode)
9419 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9420 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9421 {
9422 *total = COSTS_N_INSNS (!speed ? 4 : 6);
9423 return true;
9424 }
9425 }
9426
9427 if (const1_rtx == (XEXP (x, 1))
9428 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9429 {
9430 *total = COSTS_N_INSNS (2);
9431 return true;
9432 }
9433
9434 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9435 {
9436 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9437 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9438 speed);
9439 }
9440 else
9441 switch (INTVAL (XEXP (x, 1)))
9442 {
9443 case 0:
9444 *total = 0;
9445 break;
9446 case 1:
9447 case 8:
9448 *total = COSTS_N_INSNS (2);
9449 break;
9450 case 9:
9451 *total = COSTS_N_INSNS (3);
9452 break;
9453 case 2:
9454 case 3:
9455 case 10:
9456 case 15:
9457 *total = COSTS_N_INSNS (4);
9458 break;
9459 case 7:
9460 case 11:
9461 case 12:
9462 *total = COSTS_N_INSNS (5);
9463 break;
9464 case 4:
9465 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9466 break;
9467 case 6:
9468 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9469 break;
9470 case 5:
9471 *total = COSTS_N_INSNS (!speed ? 5 : 10);
9472 break;
9473 default:
9474 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9475 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9476 speed);
9477 }
9478 break;
9479
9480 case PSImode:
9481 if (!CONST_INT_P (XEXP (x, 1)))
9482 {
9483 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9484 }
9485 else
9486 switch (INTVAL (XEXP (x, 1)))
9487 {
9488 case 0:
9489 *total = 0;
9490 break;
9491 case 1:
9492 case 8:
9493 case 16:
9494 *total = COSTS_N_INSNS (3);
9495 break;
9496 case 23:
9497 *total = COSTS_N_INSNS (5);
9498 break;
9499 default:
9500 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9501 break;
9502 }
9503 break;
9504
9505 case SImode:
9506 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9507 {
9508 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9509 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9510 speed);
9511 }
9512 else
9513 switch (INTVAL (XEXP (x, 1)))
9514 {
9515 case 0:
9516 *total = 0;
9517 break;
9518 case 24:
9519 *total = COSTS_N_INSNS (3);
9520 break;
9521 case 1:
9522 case 8:
9523 case 16:
9524 *total = COSTS_N_INSNS (4);
9525 break;
9526 case 31:
9527 *total = COSTS_N_INSNS (6);
9528 break;
9529 case 2:
9530 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9531 break;
9532 default:
9533 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9534 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9535 speed);
9536 }
9537 break;
9538
9539 default:
9540 return false;
9541 }
9542 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9543 return true;
9544
9545 case ASHIFTRT:
9546 switch (mode)
9547 {
9548 case QImode:
9549 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9550 {
9551 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9552 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9553 speed);
9554 }
9555 else
9556 {
9557 val = INTVAL (XEXP (x, 1));
9558 if (val == 6)
9559 *total = COSTS_N_INSNS (4);
9560 else if (val == 7)
9561 *total = COSTS_N_INSNS (2);
9562 else if (val >= 0 && val <= 7)
9563 *total = COSTS_N_INSNS (val);
9564 else
9565 *total = COSTS_N_INSNS (1);
9566 }
9567 break;
9568
9569 case HImode:
9570 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9571 {
9572 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9573 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9574 speed);
9575 }
9576 else
9577 switch (INTVAL (XEXP (x, 1)))
9578 {
9579 case 0:
9580 *total = 0;
9581 break;
9582 case 1:
9583 *total = COSTS_N_INSNS (2);
9584 break;
9585 case 15:
9586 *total = COSTS_N_INSNS (3);
9587 break;
9588 case 2:
9589 case 7:
9590 case 8:
9591 case 9:
9592 *total = COSTS_N_INSNS (4);
9593 break;
9594 case 10:
9595 case 14:
9596 *total = COSTS_N_INSNS (5);
9597 break;
9598 case 11:
9599 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9600 break;
9601 case 12:
9602 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9603 break;
9604 case 6:
9605 case 13:
9606 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9607 break;
9608 default:
9609 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9610 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9611 speed);
9612 }
9613 break;
9614
9615 case PSImode:
9616 if (!CONST_INT_P (XEXP (x, 1)))
9617 {
9618 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9619 }
9620 else
9621 switch (INTVAL (XEXP (x, 1)))
9622 {
9623 case 0:
9624 *total = 0;
9625 break;
9626 case 1:
9627 *total = COSTS_N_INSNS (3);
9628 break;
9629 case 16:
9630 case 8:
9631 *total = COSTS_N_INSNS (5);
9632 break;
9633 case 23:
9634 *total = COSTS_N_INSNS (4);
9635 break;
9636 default:
9637 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9638 break;
9639 }
9640 break;
9641
9642 case SImode:
9643 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9644 {
9645 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9646 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9647 speed);
9648 }
9649 else
9650 switch (INTVAL (XEXP (x, 1)))
9651 {
9652 case 0:
9653 *total = 0;
9654 break;
9655 case 1:
9656 *total = COSTS_N_INSNS (4);
9657 break;
9658 case 8:
9659 case 16:
9660 case 24:
9661 *total = COSTS_N_INSNS (6);
9662 break;
9663 case 2:
9664 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9665 break;
9666 case 31:
9667 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9668 break;
9669 default:
9670 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9671 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9672 speed);
9673 }
9674 break;
9675
9676 default:
9677 return false;
9678 }
9679 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9680 return true;
9681
9682 case LSHIFTRT:
9683 switch (mode)
9684 {
9685 case QImode:
9686 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9687 {
9688 *total = COSTS_N_INSNS (!speed ? 4 : 17);
9689 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9690 speed);
9691 }
9692 else
9693 {
9694 val = INTVAL (XEXP (x, 1));
9695 if (val == 7)
9696 *total = COSTS_N_INSNS (3);
9697 else if (val >= 0 && val <= 7)
9698 *total = COSTS_N_INSNS (val);
9699 else
9700 *total = COSTS_N_INSNS (1);
9701 }
9702 break;
9703
9704 case HImode:
9705 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9706 {
9707 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9708 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9709 speed);
9710 }
9711 else
9712 switch (INTVAL (XEXP (x, 1)))
9713 {
9714 case 0:
9715 *total = 0;
9716 break;
9717 case 1:
9718 case 8:
9719 *total = COSTS_N_INSNS (2);
9720 break;
9721 case 9:
9722 *total = COSTS_N_INSNS (3);
9723 break;
9724 case 2:
9725 case 10:
9726 case 15:
9727 *total = COSTS_N_INSNS (4);
9728 break;
9729 case 7:
9730 case 11:
9731 *total = COSTS_N_INSNS (5);
9732 break;
9733 case 3:
9734 case 12:
9735 case 13:
9736 case 14:
9737 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9738 break;
9739 case 4:
9740 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9741 break;
9742 case 5:
9743 case 6:
9744 *total = COSTS_N_INSNS (!speed ? 5 : 9);
9745 break;
9746 default:
9747 *total = COSTS_N_INSNS (!speed ? 5 : 41);
9748 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9749 speed);
9750 }
9751 break;
9752
9753 case PSImode:
9754 if (!CONST_INT_P (XEXP (x, 1)))
9755 {
9756 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9757 }
9758 else
9759 switch (INTVAL (XEXP (x, 1)))
9760 {
9761 case 0:
9762 *total = 0;
9763 break;
9764 case 1:
9765 case 8:
9766 case 16:
9767 *total = COSTS_N_INSNS (3);
9768 break;
9769 case 23:
9770 *total = COSTS_N_INSNS (5);
9771 break;
9772 default:
9773 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9774 break;
9775 }
9776 break;
9777
9778 case SImode:
9779 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9780 {
9781 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9782 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9783 speed);
9784 }
9785 else
9786 switch (INTVAL (XEXP (x, 1)))
9787 {
9788 case 0:
9789 *total = 0;
9790 break;
9791 case 1:
9792 *total = COSTS_N_INSNS (4);
9793 break;
9794 case 2:
9795 *total = COSTS_N_INSNS (!speed ? 7 : 8);
9796 break;
9797 case 8:
9798 case 16:
9799 case 24:
9800 *total = COSTS_N_INSNS (4);
9801 break;
9802 case 31:
9803 *total = COSTS_N_INSNS (6);
9804 break;
9805 default:
9806 *total = COSTS_N_INSNS (!speed ? 7 : 113);
9807 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9808 speed);
9809 }
9810 break;
9811
9812 default:
9813 return false;
9814 }
9815 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9816 return true;
9817
9818 case COMPARE:
9819 switch (GET_MODE (XEXP (x, 0)))
9820 {
9821 case QImode:
9822 *total = COSTS_N_INSNS (1);
9823 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9824 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9825 break;
9826
9827 case HImode:
9828 *total = COSTS_N_INSNS (2);
9829 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9830 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9831 else if (INTVAL (XEXP (x, 1)) != 0)
9832 *total += COSTS_N_INSNS (1);
9833 break;
9834
9835 case PSImode:
9836 *total = COSTS_N_INSNS (3);
9837 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9838 *total += COSTS_N_INSNS (2);
9839 break;
9840
9841 case SImode:
9842 *total = COSTS_N_INSNS (4);
9843 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9844 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9845 else if (INTVAL (XEXP (x, 1)) != 0)
9846 *total += COSTS_N_INSNS (3);
9847 break;
9848
9849 default:
9850 return false;
9851 }
9852 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9853 return true;
9854
9855 case TRUNCATE:
9856 if (AVR_HAVE_MUL
9857 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9858 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9859 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9860 {
9861 if (QImode == mode || HImode == mode)
9862 {
9863 *total = COSTS_N_INSNS (2);
9864 return true;
9865 }
9866 }
9867 break;
9868
9869 default:
9870 break;
9871 }
9872 return false;
9873 }
9874
9875
9876 /* Implement `TARGET_RTX_COSTS'. */
9877
9878 static bool
9879 avr_rtx_costs (rtx x, int codearg, int outer_code,
9880 int opno, int *total, bool speed)
9881 {
9882 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9883 opno, total, speed);
9884
9885 if (avr_log.rtx_costs)
9886 {
9887 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9888 done, speed ? "speed" : "size", *total, outer_code, x);
9889 }
9890
9891 return done;
9892 }
9893
9894
9895 /* Implement `TARGET_ADDRESS_COST'. */
9896
9897 static int
9898 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9899 addr_space_t as ATTRIBUTE_UNUSED,
9900 bool speed ATTRIBUTE_UNUSED)
9901 {
9902 int cost = 4;
9903
9904 if (GET_CODE (x) == PLUS
9905 && CONST_INT_P (XEXP (x, 1))
9906 && (REG_P (XEXP (x, 0))
9907 || GET_CODE (XEXP (x, 0)) == SUBREG))
9908 {
9909 if (INTVAL (XEXP (x, 1)) >= 61)
9910 cost = 18;
9911 }
9912 else if (CONSTANT_ADDRESS_P (x))
9913 {
9914 if (optimize > 0
9915 && io_address_operand (x, QImode))
9916 cost = 2;
9917 }
9918
9919 if (avr_log.address_cost)
9920 avr_edump ("\n%?: %d = %r\n", cost, x);
9921
9922 return cost;
9923 }
9924
9925 /* Test for extra memory constraint 'Q'.
9926 It's a memory address based on Y or Z pointer with valid displacement. */
9927
9928 int
9929 extra_constraint_Q (rtx x)
9930 {
9931 int ok = 0;
9932
9933 if (GET_CODE (XEXP (x,0)) == PLUS
9934 && REG_P (XEXP (XEXP (x,0), 0))
9935 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9936 && (INTVAL (XEXP (XEXP (x,0), 1))
9937 <= MAX_LD_OFFSET (GET_MODE (x))))
9938 {
9939 rtx xx = XEXP (XEXP (x,0), 0);
9940 int regno = REGNO (xx);
9941
9942 ok = (/* allocate pseudos */
9943 regno >= FIRST_PSEUDO_REGISTER
9944 /* strictly check */
9945 || regno == REG_Z || regno == REG_Y
9946 /* XXX frame & arg pointer checks */
9947 || xx == frame_pointer_rtx
9948 || xx == arg_pointer_rtx);
9949
9950 if (avr_log.constraints)
9951 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9952 ok, reload_completed, reload_in_progress, x);
9953 }
9954
9955 return ok;
9956 }
9957
9958 /* Convert condition code CONDITION to the valid AVR condition code. */
9959
9960 RTX_CODE
9961 avr_normalize_condition (RTX_CODE condition)
9962 {
9963 switch (condition)
9964 {
9965 case GT:
9966 return GE;
9967 case GTU:
9968 return GEU;
9969 case LE:
9970 return LT;
9971 case LEU:
9972 return LTU;
9973 default:
9974 gcc_unreachable ();
9975 }
9976 }
9977
9978 /* Helper function for `avr_reorg'. */
9979
9980 static rtx
9981 avr_compare_pattern (rtx_insn *insn)
9982 {
9983 rtx pattern = single_set (insn);
9984
9985 if (pattern
9986 && NONJUMP_INSN_P (insn)
9987 && SET_DEST (pattern) == cc0_rtx
9988 && GET_CODE (SET_SRC (pattern)) == COMPARE)
9989 {
9990 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9991 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9992
9993 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9994 They must not be swapped, thus skip them. */
9995
9996 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9997 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9998 return pattern;
9999 }
10000
10001 return NULL_RTX;
10002 }
10003
10004 /* Helper function for `avr_reorg'. */
10005
10006 /* Expansion of switch/case decision trees leads to code like
10007
10008 cc0 = compare (Reg, Num)
10009 if (cc0 == 0)
10010 goto L1
10011
10012 cc0 = compare (Reg, Num)
10013 if (cc0 > 0)
10014 goto L2
10015
10016 The second comparison is superfluous and can be deleted.
10017 The second jump condition can be transformed from a
10018 "difficult" one to a "simple" one because "cc0 > 0" and
10019 "cc0 >= 0" will have the same effect here.
10020
10021 This function relies on the way switch/case is being expaned
10022 as binary decision tree. For example code see PR 49903.
10023
10024 Return TRUE if optimization performed.
10025 Return FALSE if nothing changed.
10026
10027 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10028
10029 We don't want to do this in text peephole because it is
10030 tedious to work out jump offsets there and the second comparison
10031 might have been transormed by `avr_reorg'.
10032
10033 RTL peephole won't do because peephole2 does not scan across
10034 basic blocks. */
10035
10036 static bool
10037 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
10038 {
10039 rtx comp1, ifelse1, xcond1;
10040 rtx_insn *branch1;
10041 rtx comp2, ifelse2, xcond2;
10042 rtx_insn *branch2, *insn2;
10043 enum rtx_code code;
10044 rtx_insn *jump;
10045 rtx target, cond;
10046
10047 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10048
10049 branch1 = next_nonnote_nondebug_insn (insn1);
10050 if (!branch1 || !JUMP_P (branch1))
10051 return false;
10052
10053 insn2 = next_nonnote_nondebug_insn (branch1);
10054 if (!insn2 || !avr_compare_pattern (insn2))
10055 return false;
10056
10057 branch2 = next_nonnote_nondebug_insn (insn2);
10058 if (!branch2 || !JUMP_P (branch2))
10059 return false;
10060
10061 comp1 = avr_compare_pattern (insn1);
10062 comp2 = avr_compare_pattern (insn2);
10063 xcond1 = single_set (branch1);
10064 xcond2 = single_set (branch2);
10065
10066 if (!comp1 || !comp2
10067 || !rtx_equal_p (comp1, comp2)
10068 || !xcond1 || SET_DEST (xcond1) != pc_rtx
10069 || !xcond2 || SET_DEST (xcond2) != pc_rtx
10070 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
10071 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
10072 {
10073 return false;
10074 }
10075
10076 comp1 = SET_SRC (comp1);
10077 ifelse1 = SET_SRC (xcond1);
10078 ifelse2 = SET_SRC (xcond2);
10079
10080 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10081
10082 if (EQ != GET_CODE (XEXP (ifelse1, 0))
10083 || !REG_P (XEXP (comp1, 0))
10084 || !CONST_INT_P (XEXP (comp1, 1))
10085 || XEXP (ifelse1, 2) != pc_rtx
10086 || XEXP (ifelse2, 2) != pc_rtx
10087 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
10088 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
10089 || !COMPARISON_P (XEXP (ifelse2, 0))
10090 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
10091 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
10092 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
10093 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
10094 {
10095 return false;
10096 }
10097
10098 /* We filtered the insn sequence to look like
10099
10100 (set (cc0)
10101 (compare (reg:M N)
10102 (const_int VAL)))
10103 (set (pc)
10104 (if_then_else (eq (cc0)
10105 (const_int 0))
10106 (label_ref L1)
10107 (pc)))
10108
10109 (set (cc0)
10110 (compare (reg:M N)
10111 (const_int VAL)))
10112 (set (pc)
10113 (if_then_else (CODE (cc0)
10114 (const_int 0))
10115 (label_ref L2)
10116 (pc)))
10117 */
10118
10119 code = GET_CODE (XEXP (ifelse2, 0));
10120
10121 /* Map GT/GTU to GE/GEU which is easier for AVR.
10122 The first two instructions compare/branch on EQ
10123 so we may replace the difficult
10124
10125 if (x == VAL) goto L1;
10126 if (x > VAL) goto L2;
10127
10128 with easy
10129
10130 if (x == VAL) goto L1;
10131 if (x >= VAL) goto L2;
10132
10133 Similarly, replace LE/LEU by LT/LTU. */
10134
10135 switch (code)
10136 {
10137 case EQ:
10138 case LT: case LTU:
10139 case GE: case GEU:
10140 break;
10141
10142 case LE: case LEU:
10143 case GT: case GTU:
10144 code = avr_normalize_condition (code);
10145 break;
10146
10147 default:
10148 return false;
10149 }
10150
10151 /* Wrap the branches into UNSPECs so they won't be changed or
10152 optimized in the remainder. */
10153
10154 target = XEXP (XEXP (ifelse1, 1), 0);
10155 cond = XEXP (ifelse1, 0);
10156 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
10157
10158 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
10159
10160 target = XEXP (XEXP (ifelse2, 1), 0);
10161 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10162 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
10163
10164 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
10165
10166 /* The comparisons in insn1 and insn2 are exactly the same;
10167 insn2 is superfluous so delete it. */
10168
10169 delete_insn (insn2);
10170 delete_insn (branch1);
10171 delete_insn (branch2);
10172
10173 return true;
10174 }
10175
10176
10177 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
10178 /* Optimize conditional jumps. */
10179
10180 static void
10181 avr_reorg (void)
10182 {
10183 rtx_insn *insn = get_insns();
10184
10185 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
10186 {
10187 rtx pattern = avr_compare_pattern (insn);
10188
10189 if (!pattern)
10190 continue;
10191
10192 if (optimize
10193 && avr_reorg_remove_redundant_compare (insn))
10194 {
10195 continue;
10196 }
10197
10198 if (compare_diff_p (insn))
10199 {
10200 /* Now we work under compare insn with difficult branch. */
10201
10202 rtx next = next_real_insn (insn);
10203 rtx pat = PATTERN (next);
10204
10205 pattern = SET_SRC (pattern);
10206
10207 if (true_regnum (XEXP (pattern, 0)) >= 0
10208 && true_regnum (XEXP (pattern, 1)) >= 0)
10209 {
10210 rtx x = XEXP (pattern, 0);
10211 rtx src = SET_SRC (pat);
10212 rtx t = XEXP (src,0);
10213 PUT_CODE (t, swap_condition (GET_CODE (t)));
10214 XEXP (pattern, 0) = XEXP (pattern, 1);
10215 XEXP (pattern, 1) = x;
10216 INSN_CODE (next) = -1;
10217 }
10218 else if (true_regnum (XEXP (pattern, 0)) >= 0
10219 && XEXP (pattern, 1) == const0_rtx)
10220 {
10221 /* This is a tst insn, we can reverse it. */
10222 rtx src = SET_SRC (pat);
10223 rtx t = XEXP (src,0);
10224
10225 PUT_CODE (t, swap_condition (GET_CODE (t)));
10226 XEXP (pattern, 1) = XEXP (pattern, 0);
10227 XEXP (pattern, 0) = const0_rtx;
10228 INSN_CODE (next) = -1;
10229 INSN_CODE (insn) = -1;
10230 }
10231 else if (true_regnum (XEXP (pattern, 0)) >= 0
10232 && CONST_INT_P (XEXP (pattern, 1)))
10233 {
10234 rtx x = XEXP (pattern, 1);
10235 rtx src = SET_SRC (pat);
10236 rtx t = XEXP (src,0);
10237 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
10238
10239 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
10240 {
10241 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
10242 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
10243 INSN_CODE (next) = -1;
10244 INSN_CODE (insn) = -1;
10245 }
10246 }
10247 }
10248 }
10249 }
10250
10251 /* Returns register number for function return value.*/
10252
10253 static inline unsigned int
10254 avr_ret_register (void)
10255 {
10256 return 24;
10257 }
10258
10259
10260 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
10261
10262 static bool
10263 avr_function_value_regno_p (const unsigned int regno)
10264 {
10265 return (regno == avr_ret_register ());
10266 }
10267
10268
10269 /* Implement `TARGET_LIBCALL_VALUE'. */
10270 /* Create an RTX representing the place where a
10271 library function returns a value of mode MODE. */
10272
10273 static rtx
10274 avr_libcall_value (enum machine_mode mode,
10275 const_rtx func ATTRIBUTE_UNUSED)
10276 {
10277 int offs = GET_MODE_SIZE (mode);
10278
10279 if (offs <= 4)
10280 offs = (offs + 1) & ~1;
10281
10282 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
10283 }
10284
10285
10286 /* Implement `TARGET_FUNCTION_VALUE'. */
10287 /* Create an RTX representing the place where a
10288 function returns a value of data type VALTYPE. */
10289
10290 static rtx
10291 avr_function_value (const_tree type,
10292 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
10293 bool outgoing ATTRIBUTE_UNUSED)
10294 {
10295 unsigned int offs;
10296
10297 if (TYPE_MODE (type) != BLKmode)
10298 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
10299
10300 offs = int_size_in_bytes (type);
10301 if (offs < 2)
10302 offs = 2;
10303 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
10304 offs = GET_MODE_SIZE (SImode);
10305 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
10306 offs = GET_MODE_SIZE (DImode);
10307
10308 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
10309 }
10310
10311 int
10312 test_hard_reg_class (enum reg_class rclass, rtx x)
10313 {
10314 int regno = true_regnum (x);
10315 if (regno < 0)
10316 return 0;
10317
10318 if (TEST_HARD_REG_CLASS (rclass, regno))
10319 return 1;
10320
10321 return 0;
10322 }
10323
10324
10325 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
10326 and thus is suitable to be skipped by CPSE, SBRC, etc. */
10327
10328 static bool
10329 avr_2word_insn_p (rtx_insn *insn)
10330 {
10331 if (TARGET_SKIP_BUG
10332 || !insn
10333 || 2 != get_attr_length (insn))
10334 {
10335 return false;
10336 }
10337
10338 switch (INSN_CODE (insn))
10339 {
10340 default:
10341 return false;
10342
10343 case CODE_FOR_movqi_insn:
10344 case CODE_FOR_movuqq_insn:
10345 case CODE_FOR_movqq_insn:
10346 {
10347 rtx set = single_set (insn);
10348 rtx src = SET_SRC (set);
10349 rtx dest = SET_DEST (set);
10350
10351 /* Factor out LDS and STS from movqi_insn. */
10352
10353 if (MEM_P (dest)
10354 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
10355 {
10356 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
10357 }
10358 else if (REG_P (dest)
10359 && MEM_P (src))
10360 {
10361 return CONSTANT_ADDRESS_P (XEXP (src, 0));
10362 }
10363
10364 return false;
10365 }
10366
10367 case CODE_FOR_call_insn:
10368 case CODE_FOR_call_value_insn:
10369 return true;
10370 }
10371 }
10372
10373
10374 int
10375 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
10376 {
10377 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
10378 ? XEXP (dest, 0)
10379 : dest);
10380 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
10381 int dest_addr = INSN_ADDRESSES (uid);
10382 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
10383
10384 return (jump_offset == 1
10385 || (jump_offset == 2
10386 && avr_2word_insn_p (next_active_insn (insn))));
10387 }
10388
10389
10390 /* Worker function for `HARD_REGNO_MODE_OK'. */
10391 /* Returns 1 if a value of mode MODE can be stored starting with hard
10392 register number REGNO. On the enhanced core, anything larger than
10393 1 byte must start in even numbered register for "movw" to work
10394 (this way we don't have to check for odd registers everywhere). */
10395
10396 int
10397 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
10398 {
10399 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
10400 Disallowing QI et al. in these regs might lead to code like
10401 (set (subreg:QI (reg:HI 28) n) ...)
10402 which will result in wrong code because reload does not
10403 handle SUBREGs of hard regsisters like this.
10404 This could be fixed in reload. However, it appears
10405 that fixing reload is not wanted by reload people. */
10406
10407 /* Any GENERAL_REGS register can hold 8-bit values. */
10408
10409 if (GET_MODE_SIZE (mode) == 1)
10410 return 1;
10411
10412 /* FIXME: Ideally, the following test is not needed.
10413 However, it turned out that it can reduce the number
10414 of spill fails. AVR and it's poor endowment with
10415 address registers is extreme stress test for reload. */
10416
10417 if (GET_MODE_SIZE (mode) >= 4
10418 && regno >= REG_X)
10419 return 0;
10420
10421 /* All modes larger than 8 bits should start in an even register. */
10422
10423 return !(regno & 1);
10424 }
10425
10426
10427 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
10428
10429 int
10430 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10431 {
10432 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10433 represent valid hard registers like, e.g. HI:29. Returning TRUE
10434 for such registers can lead to performance degradation as mentioned
10435 in PR53595. Thus, report invalid hard registers as FALSE. */
10436
10437 if (!avr_hard_regno_mode_ok (regno, mode))
10438 return 0;
10439
10440 /* Return true if any of the following boundaries is crossed:
10441 17/18, 27/28 and 29/30. */
10442
10443 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10444 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10445 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10446 }
10447
10448
10449 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
10450
10451 enum reg_class
10452 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10453 addr_space_t as, RTX_CODE outer_code,
10454 RTX_CODE index_code ATTRIBUTE_UNUSED)
10455 {
10456 if (!ADDR_SPACE_GENERIC_P (as))
10457 {
10458 return POINTER_Z_REGS;
10459 }
10460
10461 if (!avr_strict_X)
10462 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10463
10464 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10465 }
10466
10467
10468 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
10469
10470 bool
10471 avr_regno_mode_code_ok_for_base_p (int regno,
10472 enum machine_mode mode ATTRIBUTE_UNUSED,
10473 addr_space_t as ATTRIBUTE_UNUSED,
10474 RTX_CODE outer_code,
10475 RTX_CODE index_code ATTRIBUTE_UNUSED)
10476 {
10477 bool ok = false;
10478
10479 if (!ADDR_SPACE_GENERIC_P (as))
10480 {
10481 if (regno < FIRST_PSEUDO_REGISTER
10482 && regno == REG_Z)
10483 {
10484 return true;
10485 }
10486
10487 if (reg_renumber)
10488 {
10489 regno = reg_renumber[regno];
10490
10491 if (regno == REG_Z)
10492 {
10493 return true;
10494 }
10495 }
10496
10497 return false;
10498 }
10499
10500 if (regno < FIRST_PSEUDO_REGISTER
10501 && (regno == REG_X
10502 || regno == REG_Y
10503 || regno == REG_Z
10504 || regno == ARG_POINTER_REGNUM))
10505 {
10506 ok = true;
10507 }
10508 else if (reg_renumber)
10509 {
10510 regno = reg_renumber[regno];
10511
10512 if (regno == REG_X
10513 || regno == REG_Y
10514 || regno == REG_Z
10515 || regno == ARG_POINTER_REGNUM)
10516 {
10517 ok = true;
10518 }
10519 }
10520
10521 if (avr_strict_X
10522 && PLUS == outer_code
10523 && regno == REG_X)
10524 {
10525 ok = false;
10526 }
10527
10528 return ok;
10529 }
10530
10531
10532 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
10533 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10534 CLOBBER_REG is a QI clobber register or NULL_RTX.
10535 LEN == NULL: output instructions.
10536 LEN != NULL: set *LEN to the length of the instruction sequence
10537 (in words) printed with LEN = NULL.
10538 If CLEAR_P is true, OP[0] had been cleard to Zero already.
10539 If CLEAR_P is false, nothing is known about OP[0].
10540
10541 The effect on cc0 is as follows:
10542
10543 Load 0 to any register except ZERO_REG : NONE
10544 Load ld register with any value : NONE
10545 Anything else: : CLOBBER */
10546
10547 static void
10548 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10549 {
10550 rtx src = op[1];
10551 rtx dest = op[0];
10552 rtx xval, xdest[4];
10553 int ival[4];
10554 int clobber_val = 1234;
10555 bool cooked_clobber_p = false;
10556 bool set_p = false;
10557 enum machine_mode mode = GET_MODE (dest);
10558 int n, n_bytes = GET_MODE_SIZE (mode);
10559
10560 gcc_assert (REG_P (dest)
10561 && CONSTANT_P (src));
10562
10563 if (len)
10564 *len = 0;
10565
10566 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10567 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10568
10569 if (REGNO (dest) < 16
10570 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10571 {
10572 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10573 }
10574
10575 /* We might need a clobber reg but don't have one. Look at the value to
10576 be loaded more closely. A clobber is only needed if it is a symbol
10577 or contains a byte that is neither 0, -1 or a power of 2. */
10578
10579 if (NULL_RTX == clobber_reg
10580 && !test_hard_reg_class (LD_REGS, dest)
10581 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10582 || !avr_popcount_each_byte (src, n_bytes,
10583 (1 << 0) | (1 << 1) | (1 << 8))))
10584 {
10585 /* We have no clobber register but need one. Cook one up.
10586 That's cheaper than loading from constant pool. */
10587
10588 cooked_clobber_p = true;
10589 clobber_reg = all_regs_rtx[REG_Z + 1];
10590 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10591 }
10592
10593 /* Now start filling DEST from LSB to MSB. */
10594
10595 for (n = 0; n < n_bytes; n++)
10596 {
10597 int ldreg_p;
10598 bool done_byte = false;
10599 int j;
10600 rtx xop[3];
10601
10602 /* Crop the n-th destination byte. */
10603
10604 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10605 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10606
10607 if (!CONST_INT_P (src)
10608 && !CONST_FIXED_P (src)
10609 && !CONST_DOUBLE_P (src))
10610 {
10611 static const char* const asm_code[][2] =
10612 {
10613 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10614 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10615 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10616 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10617 };
10618
10619 xop[0] = xdest[n];
10620 xop[1] = src;
10621 xop[2] = clobber_reg;
10622
10623 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10624
10625 continue;
10626 }
10627
10628 /* Crop the n-th source byte. */
10629
10630 xval = simplify_gen_subreg (QImode, src, mode, n);
10631 ival[n] = INTVAL (xval);
10632
10633 /* Look if we can reuse the low word by means of MOVW. */
10634
10635 if (n == 2
10636 && n_bytes >= 4
10637 && AVR_HAVE_MOVW)
10638 {
10639 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10640 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10641
10642 if (INTVAL (lo16) == INTVAL (hi16))
10643 {
10644 if (0 != INTVAL (lo16)
10645 || !clear_p)
10646 {
10647 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10648 }
10649
10650 break;
10651 }
10652 }
10653
10654 /* Don't use CLR so that cc0 is set as expected. */
10655
10656 if (ival[n] == 0)
10657 {
10658 if (!clear_p)
10659 avr_asm_len (ldreg_p ? "ldi %0,0"
10660 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10661 : "mov %0,__zero_reg__",
10662 &xdest[n], len, 1);
10663 continue;
10664 }
10665
10666 if (clobber_val == ival[n]
10667 && REGNO (clobber_reg) == REGNO (xdest[n]))
10668 {
10669 continue;
10670 }
10671
10672 /* LD_REGS can use LDI to move a constant value */
10673
10674 if (ldreg_p)
10675 {
10676 xop[0] = xdest[n];
10677 xop[1] = xval;
10678 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10679 continue;
10680 }
10681
10682 /* Try to reuse value already loaded in some lower byte. */
10683
10684 for (j = 0; j < n; j++)
10685 if (ival[j] == ival[n])
10686 {
10687 xop[0] = xdest[n];
10688 xop[1] = xdest[j];
10689
10690 avr_asm_len ("mov %0,%1", xop, len, 1);
10691 done_byte = true;
10692 break;
10693 }
10694
10695 if (done_byte)
10696 continue;
10697
10698 /* Need no clobber reg for -1: Use CLR/DEC */
10699
10700 if (-1 == ival[n])
10701 {
10702 if (!clear_p)
10703 avr_asm_len ("clr %0", &xdest[n], len, 1);
10704
10705 avr_asm_len ("dec %0", &xdest[n], len, 1);
10706 continue;
10707 }
10708 else if (1 == ival[n])
10709 {
10710 if (!clear_p)
10711 avr_asm_len ("clr %0", &xdest[n], len, 1);
10712
10713 avr_asm_len ("inc %0", &xdest[n], len, 1);
10714 continue;
10715 }
10716
10717 /* Use T flag or INC to manage powers of 2 if we have
10718 no clobber reg. */
10719
10720 if (NULL_RTX == clobber_reg
10721 && single_one_operand (xval, QImode))
10722 {
10723 xop[0] = xdest[n];
10724 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10725
10726 gcc_assert (constm1_rtx != xop[1]);
10727
10728 if (!set_p)
10729 {
10730 set_p = true;
10731 avr_asm_len ("set", xop, len, 1);
10732 }
10733
10734 if (!clear_p)
10735 avr_asm_len ("clr %0", xop, len, 1);
10736
10737 avr_asm_len ("bld %0,%1", xop, len, 1);
10738 continue;
10739 }
10740
10741 /* We actually need the LD_REGS clobber reg. */
10742
10743 gcc_assert (NULL_RTX != clobber_reg);
10744
10745 xop[0] = xdest[n];
10746 xop[1] = xval;
10747 xop[2] = clobber_reg;
10748 clobber_val = ival[n];
10749
10750 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10751 "mov %0,%2", xop, len, 2);
10752 }
10753
10754 /* If we cooked up a clobber reg above, restore it. */
10755
10756 if (cooked_clobber_p)
10757 {
10758 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10759 }
10760 }
10761
10762
10763 /* Reload the constant OP[1] into the HI register OP[0].
10764 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10765 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10766 need a clobber reg or have to cook one up.
10767
10768 PLEN == NULL: Output instructions.
10769 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10770 by the insns printed.
10771
10772 Return "". */
10773
10774 const char*
10775 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10776 {
10777 output_reload_in_const (op, clobber_reg, plen, false);
10778 return "";
10779 }
10780
10781
10782 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10783 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10784 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10785 need a clobber reg or have to cook one up.
10786
10787 LEN == NULL: Output instructions.
10788
10789 LEN != NULL: Output nothing. Set *LEN to number of words occupied
10790 by the insns printed.
10791
10792 Return "". */
10793
10794 const char *
10795 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10796 {
10797 if (AVR_HAVE_MOVW
10798 && !test_hard_reg_class (LD_REGS, op[0])
10799 && (CONST_INT_P (op[1])
10800 || CONST_FIXED_P (op[1])
10801 || CONST_DOUBLE_P (op[1])))
10802 {
10803 int len_clr, len_noclr;
10804
10805 /* In some cases it is better to clear the destination beforehand, e.g.
10806
10807 CLR R2 CLR R3 MOVW R4,R2 INC R2
10808
10809 is shorther than
10810
10811 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10812
10813 We find it too tedious to work that out in the print function.
10814 Instead, we call the print function twice to get the lengths of
10815 both methods and use the shortest one. */
10816
10817 output_reload_in_const (op, clobber_reg, &len_clr, true);
10818 output_reload_in_const (op, clobber_reg, &len_noclr, false);
10819
10820 if (len_noclr - len_clr == 4)
10821 {
10822 /* Default needs 4 CLR instructions: clear register beforehand. */
10823
10824 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10825 "mov %B0,__zero_reg__" CR_TAB
10826 "movw %C0,%A0", &op[0], len, 3);
10827
10828 output_reload_in_const (op, clobber_reg, len, true);
10829
10830 if (len)
10831 *len += 3;
10832
10833 return "";
10834 }
10835 }
10836
10837 /* Default: destination not pre-cleared. */
10838
10839 output_reload_in_const (op, clobber_reg, len, false);
10840 return "";
10841 }
10842
10843 const char*
10844 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10845 {
10846 output_reload_in_const (op, clobber_reg, len, false);
10847 return "";
10848 }
10849
10850
10851 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
10852
10853 void
10854 avr_output_addr_vec_elt (FILE *stream, int value)
10855 {
10856 if (AVR_HAVE_JMP_CALL)
10857 fprintf (stream, "\t.word gs(.L%d)\n", value);
10858 else
10859 fprintf (stream, "\trjmp .L%d\n", value);
10860 }
10861
10862
10863 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
10864 /* Returns true if SCRATCH are safe to be allocated as a scratch
10865 registers (for a define_peephole2) in the current function. */
10866
10867 static bool
10868 avr_hard_regno_scratch_ok (unsigned int regno)
10869 {
10870 /* Interrupt functions can only use registers that have already been saved
10871 by the prologue, even if they would normally be call-clobbered. */
10872
10873 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10874 && !df_regs_ever_live_p (regno))
10875 return false;
10876
10877 /* Don't allow hard registers that might be part of the frame pointer.
10878 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10879 and don't care for a frame pointer that spans more than one register. */
10880
10881 if ((!reload_completed || frame_pointer_needed)
10882 && (regno == REG_Y || regno == REG_Y + 1))
10883 {
10884 return false;
10885 }
10886
10887 return true;
10888 }
10889
10890
10891 /* Worker function for `HARD_REGNO_RENAME_OK'. */
10892 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10893
10894 int
10895 avr_hard_regno_rename_ok (unsigned int old_reg,
10896 unsigned int new_reg)
10897 {
10898 /* Interrupt functions can only use registers that have already been
10899 saved by the prologue, even if they would normally be
10900 call-clobbered. */
10901
10902 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10903 && !df_regs_ever_live_p (new_reg))
10904 return 0;
10905
10906 /* Don't allow hard registers that might be part of the frame pointer.
10907 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10908 and don't care for a frame pointer that spans more than one register. */
10909
10910 if ((!reload_completed || frame_pointer_needed)
10911 && (old_reg == REG_Y || old_reg == REG_Y + 1
10912 || new_reg == REG_Y || new_reg == REG_Y + 1))
10913 {
10914 return 0;
10915 }
10916
10917 return 1;
10918 }
10919
10920 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10921 or memory location in the I/O space (QImode only).
10922
10923 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10924 Operand 1: register operand to test, or CONST_INT memory address.
10925 Operand 2: bit number.
10926 Operand 3: label to jump to if the test is true. */
10927
10928 const char*
10929 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
10930 {
10931 enum rtx_code comp = GET_CODE (operands[0]);
10932 bool long_jump = get_attr_length (insn) >= 4;
10933 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10934
10935 if (comp == GE)
10936 comp = EQ;
10937 else if (comp == LT)
10938 comp = NE;
10939
10940 if (reverse)
10941 comp = reverse_condition (comp);
10942
10943 switch (GET_CODE (operands[1]))
10944 {
10945 default:
10946 gcc_unreachable();
10947
10948 case CONST_INT:
10949 case CONST:
10950 case SYMBOL_REF:
10951
10952 if (low_io_address_operand (operands[1], QImode))
10953 {
10954 if (comp == EQ)
10955 output_asm_insn ("sbis %i1,%2", operands);
10956 else
10957 output_asm_insn ("sbic %i1,%2", operands);
10958 }
10959 else
10960 {
10961 gcc_assert (io_address_operand (operands[1], QImode));
10962 output_asm_insn ("in __tmp_reg__,%i1", operands);
10963 if (comp == EQ)
10964 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10965 else
10966 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10967 }
10968
10969 break; /* CONST_INT */
10970
10971 case REG:
10972
10973 if (comp == EQ)
10974 output_asm_insn ("sbrs %T1%T2", operands);
10975 else
10976 output_asm_insn ("sbrc %T1%T2", operands);
10977
10978 break; /* REG */
10979 } /* switch */
10980
10981 if (long_jump)
10982 return ("rjmp .+4" CR_TAB
10983 "jmp %x3");
10984
10985 if (!reverse)
10986 return "rjmp %x3";
10987
10988 return "";
10989 }
10990
10991 /* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
10992
10993 static void
10994 avr_asm_out_ctor (rtx symbol, int priority)
10995 {
10996 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10997 default_ctor_section_asm_out_constructor (symbol, priority);
10998 }
10999
11000
11001 /* Worker function for `TARGET_ASM_DESTRUCTOR'. */
11002
11003 static void
11004 avr_asm_out_dtor (rtx symbol, int priority)
11005 {
11006 fputs ("\t.global __do_global_dtors\n", asm_out_file);
11007 default_dtor_section_asm_out_destructor (symbol, priority);
11008 }
11009
11010
11011 /* Worker function for `TARGET_RETURN_IN_MEMORY'. */
11012
11013 static bool
11014 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
11015 {
11016 if (TYPE_MODE (type) == BLKmode)
11017 {
11018 HOST_WIDE_INT size = int_size_in_bytes (type);
11019 return (size == -1 || size > 8);
11020 }
11021 else
11022 return false;
11023 }
11024
11025
11026 /* Implement `CASE_VALUES_THRESHOLD'. */
11027 /* Supply the default for --param case-values-threshold=0 */
11028
11029 static unsigned int
11030 avr_case_values_threshold (void)
11031 {
11032 /* The exact break-even point between a jump table and an if-else tree
11033 depends on several factors not available here like, e.g. if 8-bit
11034 comparisons can be used in the if-else tree or not, on the
11035 range of the case values, if the case value can be reused, on the
11036 register allocation, etc. '7' appears to be a good choice. */
11037
11038 return 7;
11039 }
11040
11041
11042 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11043
11044 static enum machine_mode
11045 avr_addr_space_address_mode (addr_space_t as)
11046 {
11047 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
11048 }
11049
11050
11051 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
11052
11053 static enum machine_mode
11054 avr_addr_space_pointer_mode (addr_space_t as)
11055 {
11056 return avr_addr_space_address_mode (as);
11057 }
11058
11059
11060 /* Helper for following function. */
11061
11062 static bool
11063 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
11064 {
11065 gcc_assert (REG_P (reg));
11066
11067 if (strict)
11068 {
11069 return REGNO (reg) == REG_Z;
11070 }
11071
11072 /* Avoid combine to propagate hard regs. */
11073
11074 if (can_create_pseudo_p()
11075 && REGNO (reg) < REG_Z)
11076 {
11077 return false;
11078 }
11079
11080 return true;
11081 }
11082
11083
11084 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
11085
11086 static bool
11087 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
11088 bool strict, addr_space_t as)
11089 {
11090 bool ok = false;
11091
11092 switch (as)
11093 {
11094 default:
11095 gcc_unreachable();
11096
11097 case ADDR_SPACE_GENERIC:
11098 return avr_legitimate_address_p (mode, x, strict);
11099
11100 case ADDR_SPACE_FLASH:
11101 case ADDR_SPACE_FLASH1:
11102 case ADDR_SPACE_FLASH2:
11103 case ADDR_SPACE_FLASH3:
11104 case ADDR_SPACE_FLASH4:
11105 case ADDR_SPACE_FLASH5:
11106
11107 switch (GET_CODE (x))
11108 {
11109 case REG:
11110 ok = avr_reg_ok_for_pgm_addr (x, strict);
11111 break;
11112
11113 case POST_INC:
11114 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
11115 break;
11116
11117 default:
11118 break;
11119 }
11120
11121 break; /* FLASH */
11122
11123 case ADDR_SPACE_MEMX:
11124 if (REG_P (x))
11125 ok = (!strict
11126 && can_create_pseudo_p());
11127
11128 if (LO_SUM == GET_CODE (x))
11129 {
11130 rtx hi = XEXP (x, 0);
11131 rtx lo = XEXP (x, 1);
11132
11133 ok = (REG_P (hi)
11134 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
11135 && REG_P (lo)
11136 && REGNO (lo) == REG_Z);
11137 }
11138
11139 break; /* MEMX */
11140 }
11141
11142 if (avr_log.legitimate_address_p)
11143 {
11144 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
11145 "reload_completed=%d reload_in_progress=%d %s:",
11146 ok, mode, strict, reload_completed, reload_in_progress,
11147 reg_renumber ? "(reg_renumber)" : "");
11148
11149 if (GET_CODE (x) == PLUS
11150 && REG_P (XEXP (x, 0))
11151 && CONST_INT_P (XEXP (x, 1))
11152 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
11153 && reg_renumber)
11154 {
11155 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
11156 true_regnum (XEXP (x, 0)));
11157 }
11158
11159 avr_edump ("\n%r\n", x);
11160 }
11161
11162 return ok;
11163 }
11164
11165
11166 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
11167
11168 static rtx
11169 avr_addr_space_legitimize_address (rtx x, rtx old_x,
11170 enum machine_mode mode, addr_space_t as)
11171 {
11172 if (ADDR_SPACE_GENERIC_P (as))
11173 return avr_legitimize_address (x, old_x, mode);
11174
11175 if (avr_log.legitimize_address)
11176 {
11177 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
11178 }
11179
11180 return old_x;
11181 }
11182
11183
11184 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
11185
11186 static rtx
11187 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
11188 {
11189 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
11190 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
11191
11192 if (avr_log.progmem)
11193 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
11194 src, type_from, type_to);
11195
11196 /* Up-casting from 16-bit to 24-bit pointer. */
11197
11198 if (as_from != ADDR_SPACE_MEMX
11199 && as_to == ADDR_SPACE_MEMX)
11200 {
11201 int msb;
11202 rtx sym = src;
11203 rtx reg = gen_reg_rtx (PSImode);
11204
11205 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
11206 sym = XEXP (sym, 0);
11207
11208 /* Look at symbol flags: avr_encode_section_info set the flags
11209 also if attribute progmem was seen so that we get the right
11210 promotion for, e.g. PSTR-like strings that reside in generic space
11211 but are located in flash. In that case we patch the incoming
11212 address space. */
11213
11214 if (SYMBOL_REF == GET_CODE (sym)
11215 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
11216 {
11217 as_from = ADDR_SPACE_FLASH;
11218 }
11219
11220 /* Linearize memory: RAM has bit 23 set. */
11221
11222 msb = ADDR_SPACE_GENERIC_P (as_from)
11223 ? 0x80
11224 : avr_addrspace[as_from].segment;
11225
11226 src = force_reg (Pmode, src);
11227
11228 emit_insn (msb == 0
11229 ? gen_zero_extendhipsi2 (reg, src)
11230 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
11231
11232 return reg;
11233 }
11234
11235 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
11236
11237 if (as_from == ADDR_SPACE_MEMX
11238 && as_to != ADDR_SPACE_MEMX)
11239 {
11240 rtx new_src = gen_reg_rtx (Pmode);
11241
11242 src = force_reg (PSImode, src);
11243
11244 emit_move_insn (new_src,
11245 simplify_gen_subreg (Pmode, src, PSImode, 0));
11246 return new_src;
11247 }
11248
11249 return src;
11250 }
11251
11252
11253 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
11254
11255 static bool
11256 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
11257 addr_space_t superset ATTRIBUTE_UNUSED)
11258 {
11259 /* Allow any kind of pointer mess. */
11260
11261 return true;
11262 }
11263
11264
11265 /* Implement `TARGET_CONVERT_TO_TYPE'. */
11266
11267 static tree
11268 avr_convert_to_type (tree type, tree expr)
11269 {
11270 /* Print a diagnose for pointer conversion that changes the address
11271 space of the pointer target to a non-enclosing address space,
11272 provided -Waddr-space-convert is on.
11273
11274 FIXME: Filter out cases where the target object is known to
11275 be located in the right memory, like in
11276
11277 (const __flash*) PSTR ("text")
11278
11279 Also try to distinguish between explicit casts requested by
11280 the user and implicit casts like
11281
11282 void f (const __flash char*);
11283
11284 void g (const char *p)
11285 {
11286 f ((const __flash*) p);
11287 }
11288
11289 under the assumption that an explicit casts means that the user
11290 knows what he is doing, e.g. interface with PSTR or old style
11291 code with progmem and pgm_read_xxx.
11292 */
11293
11294 if (avr_warn_addr_space_convert
11295 && expr != error_mark_node
11296 && POINTER_TYPE_P (type)
11297 && POINTER_TYPE_P (TREE_TYPE (expr)))
11298 {
11299 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
11300 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
11301
11302 if (avr_log.progmem)
11303 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
11304
11305 if (as_new != ADDR_SPACE_MEMX
11306 && as_new != as_old)
11307 {
11308 location_t loc = EXPR_LOCATION (expr);
11309 const char *name_old = avr_addrspace[as_old].name;
11310 const char *name_new = avr_addrspace[as_new].name;
11311
11312 warning (OPT_Waddr_space_convert,
11313 "conversion from address space %qs to address space %qs",
11314 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
11315 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
11316
11317 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
11318 }
11319 }
11320
11321 return NULL_TREE;
11322 }
11323
11324
11325 /* Worker function for movmemhi expander.
11326 XOP[0] Destination as MEM:BLK
11327 XOP[1] Source " "
11328 XOP[2] # Bytes to copy
11329
11330 Return TRUE if the expansion is accomplished.
11331 Return FALSE if the operand compination is not supported. */
11332
11333 bool
11334 avr_emit_movmemhi (rtx *xop)
11335 {
11336 HOST_WIDE_INT count;
11337 enum machine_mode loop_mode;
11338 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
11339 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
11340 rtx a_hi8 = NULL_RTX;
11341
11342 if (avr_mem_flash_p (xop[0]))
11343 return false;
11344
11345 if (!CONST_INT_P (xop[2]))
11346 return false;
11347
11348 count = INTVAL (xop[2]);
11349 if (count <= 0)
11350 return false;
11351
11352 a_src = XEXP (xop[1], 0);
11353 a_dest = XEXP (xop[0], 0);
11354
11355 if (PSImode == GET_MODE (a_src))
11356 {
11357 gcc_assert (as == ADDR_SPACE_MEMX);
11358
11359 loop_mode = (count < 0x100) ? QImode : HImode;
11360 loop_reg = gen_rtx_REG (loop_mode, 24);
11361 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
11362
11363 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
11364 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
11365 }
11366 else
11367 {
11368 int segment = avr_addrspace[as].segment;
11369
11370 if (segment
11371 && avr_n_flash > 1)
11372 {
11373 a_hi8 = GEN_INT (segment);
11374 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
11375 }
11376 else if (!ADDR_SPACE_GENERIC_P (as))
11377 {
11378 as = ADDR_SPACE_FLASH;
11379 }
11380
11381 addr1 = a_src;
11382
11383 loop_mode = (count <= 0x100) ? QImode : HImode;
11384 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
11385 }
11386
11387 xas = GEN_INT (as);
11388
11389 /* FIXME: Register allocator might come up with spill fails if it is left
11390 on its own. Thus, we allocate the pointer registers by hand:
11391 Z = source address
11392 X = destination address */
11393
11394 emit_move_insn (lpm_addr_reg_rtx, addr1);
11395 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
11396
11397 /* FIXME: Register allocator does a bad job and might spill address
11398 register(s) inside the loop leading to additional move instruction
11399 to/from stack which could clobber tmp_reg. Thus, do *not* emit
11400 load and store as separate insns. Instead, we perform the copy
11401 by means of one monolithic insn. */
11402
11403 gcc_assert (TMP_REGNO == LPM_REGNO);
11404
11405 if (as != ADDR_SPACE_MEMX)
11406 {
11407 /* Load instruction ([E]LPM or LD) is known at compile time:
11408 Do the copy-loop inline. */
11409
11410 rtx (*fun) (rtx, rtx, rtx)
11411 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11412
11413 insn = fun (xas, loop_reg, loop_reg);
11414 }
11415 else
11416 {
11417 rtx (*fun) (rtx, rtx)
11418 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11419
11420 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11421
11422 insn = fun (xas, GEN_INT (avr_addr.rampz));
11423 }
11424
11425 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11426 emit_insn (insn);
11427
11428 return true;
11429 }
11430
11431
11432 /* Print assembler for movmem_qi, movmem_hi insns...
11433 $0 : Address Space
11434 $1, $2 : Loop register
11435 Z : Source address
11436 X : Destination address
11437 */
11438
11439 const char*
11440 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11441 {
11442 addr_space_t as = (addr_space_t) INTVAL (op[0]);
11443 enum machine_mode loop_mode = GET_MODE (op[1]);
11444 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11445 rtx xop[3];
11446
11447 if (plen)
11448 *plen = 0;
11449
11450 xop[0] = op[0];
11451 xop[1] = op[1];
11452 xop[2] = tmp_reg_rtx;
11453
11454 /* Loop label */
11455
11456 avr_asm_len ("0:", xop, plen, 0);
11457
11458 /* Load with post-increment */
11459
11460 switch (as)
11461 {
11462 default:
11463 gcc_unreachable();
11464
11465 case ADDR_SPACE_GENERIC:
11466
11467 avr_asm_len ("ld %2,Z+", xop, plen, 1);
11468 break;
11469
11470 case ADDR_SPACE_FLASH:
11471
11472 if (AVR_HAVE_LPMX)
11473 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11474 else
11475 avr_asm_len ("lpm" CR_TAB
11476 "adiw r30,1", xop, plen, 2);
11477 break;
11478
11479 case ADDR_SPACE_FLASH1:
11480 case ADDR_SPACE_FLASH2:
11481 case ADDR_SPACE_FLASH3:
11482 case ADDR_SPACE_FLASH4:
11483 case ADDR_SPACE_FLASH5:
11484
11485 if (AVR_HAVE_ELPMX)
11486 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11487 else
11488 avr_asm_len ("elpm" CR_TAB
11489 "adiw r30,1", xop, plen, 2);
11490 break;
11491 }
11492
11493 /* Store with post-increment */
11494
11495 avr_asm_len ("st X+,%2", xop, plen, 1);
11496
11497 /* Decrement loop-counter and set Z-flag */
11498
11499 if (QImode == loop_mode)
11500 {
11501 avr_asm_len ("dec %1", xop, plen, 1);
11502 }
11503 else if (sbiw_p)
11504 {
11505 avr_asm_len ("sbiw %1,1", xop, plen, 1);
11506 }
11507 else
11508 {
11509 avr_asm_len ("subi %A1,1" CR_TAB
11510 "sbci %B1,0", xop, plen, 2);
11511 }
11512
11513 /* Loop until zero */
11514
11515 return avr_asm_len ("brne 0b", xop, plen, 1);
11516 }
11517
11518
11519 \f
11520 /* Helper for __builtin_avr_delay_cycles */
11521
11522 static rtx
11523 avr_mem_clobber (void)
11524 {
11525 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11526 MEM_VOLATILE_P (mem) = 1;
11527 return mem;
11528 }
11529
11530 static void
11531 avr_expand_delay_cycles (rtx operands0)
11532 {
11533 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11534 unsigned HOST_WIDE_INT cycles_used;
11535 unsigned HOST_WIDE_INT loop_count;
11536
11537 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11538 {
11539 loop_count = ((cycles - 9) / 6) + 1;
11540 cycles_used = ((loop_count - 1) * 6) + 9;
11541 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11542 avr_mem_clobber()));
11543 cycles -= cycles_used;
11544 }
11545
11546 if (IN_RANGE (cycles, 262145, 83886081))
11547 {
11548 loop_count = ((cycles - 7) / 5) + 1;
11549 if (loop_count > 0xFFFFFF)
11550 loop_count = 0xFFFFFF;
11551 cycles_used = ((loop_count - 1) * 5) + 7;
11552 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11553 avr_mem_clobber()));
11554 cycles -= cycles_used;
11555 }
11556
11557 if (IN_RANGE (cycles, 768, 262144))
11558 {
11559 loop_count = ((cycles - 5) / 4) + 1;
11560 if (loop_count > 0xFFFF)
11561 loop_count = 0xFFFF;
11562 cycles_used = ((loop_count - 1) * 4) + 5;
11563 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11564 avr_mem_clobber()));
11565 cycles -= cycles_used;
11566 }
11567
11568 if (IN_RANGE (cycles, 6, 767))
11569 {
11570 loop_count = cycles / 3;
11571 if (loop_count > 255)
11572 loop_count = 255;
11573 cycles_used = loop_count * 3;
11574 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11575 avr_mem_clobber()));
11576 cycles -= cycles_used;
11577 }
11578
11579 while (cycles >= 2)
11580 {
11581 emit_insn (gen_nopv (GEN_INT(2)));
11582 cycles -= 2;
11583 }
11584
11585 if (cycles == 1)
11586 {
11587 emit_insn (gen_nopv (GEN_INT(1)));
11588 cycles--;
11589 }
11590 }
11591
11592
11593 /* Compute the image of x under f, i.e. perform x --> f(x) */
11594
11595 static int
11596 avr_map (unsigned int f, int x)
11597 {
11598 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
11599 }
11600
11601
11602 /* Return some metrics of map A. */
11603
11604 enum
11605 {
11606 /* Number of fixed points in { 0 ... 7 } */
11607 MAP_FIXED_0_7,
11608
11609 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11610 MAP_NONFIXED_0_7,
11611
11612 /* Mask representing the fixed points in { 0 ... 7 } */
11613 MAP_MASK_FIXED_0_7,
11614
11615 /* Size of the preimage of { 0 ... 7 } */
11616 MAP_PREIMAGE_0_7,
11617
11618 /* Mask that represents the preimage of { f } */
11619 MAP_MASK_PREIMAGE_F
11620 };
11621
11622 static unsigned
11623 avr_map_metric (unsigned int a, int mode)
11624 {
11625 unsigned i, metric = 0;
11626
11627 for (i = 0; i < 8; i++)
11628 {
11629 unsigned ai = avr_map (a, i);
11630
11631 if (mode == MAP_FIXED_0_7)
11632 metric += ai == i;
11633 else if (mode == MAP_NONFIXED_0_7)
11634 metric += ai < 8 && ai != i;
11635 else if (mode == MAP_MASK_FIXED_0_7)
11636 metric |= ((unsigned) (ai == i)) << i;
11637 else if (mode == MAP_PREIMAGE_0_7)
11638 metric += ai < 8;
11639 else if (mode == MAP_MASK_PREIMAGE_F)
11640 metric |= ((unsigned) (ai == 0xf)) << i;
11641 else
11642 gcc_unreachable();
11643 }
11644
11645 return metric;
11646 }
11647
11648
11649 /* Return true if IVAL has a 0xf in its hexadecimal representation
11650 and false, otherwise. Only nibbles 0..7 are taken into account.
11651 Used as constraint helper for C0f and Cxf. */
11652
11653 bool
11654 avr_has_nibble_0xf (rtx ival)
11655 {
11656 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
11657 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11658 }
11659
11660
11661 /* We have a set of bits that are mapped by a function F.
11662 Try to decompose F by means of a second function G so that
11663
11664 F = F o G^-1 o G
11665
11666 and
11667
11668 cost (F o G^-1) + cost (G) < cost (F)
11669
11670 Example: Suppose builtin insert_bits supplies us with the map
11671 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11672 nibble of the result, we can just as well rotate the bits before inserting
11673 them and use the map 0x7654ffff which is cheaper than the original map.
11674 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11675
11676 typedef struct
11677 {
11678 /* tree code of binary function G */
11679 enum tree_code code;
11680
11681 /* The constant second argument of G */
11682 int arg;
11683
11684 /* G^-1, the inverse of G (*, arg) */
11685 unsigned ginv;
11686
11687 /* The cost of appplying G (*, arg) */
11688 int cost;
11689
11690 /* The composition F o G^-1 (*, arg) for some function F */
11691 unsigned int map;
11692
11693 /* For debug purpose only */
11694 const char *str;
11695 } avr_map_op_t;
11696
11697 static const avr_map_op_t avr_map_op[] =
11698 {
11699 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
11700 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
11701 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
11702 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
11703 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
11704 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
11705 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
11706 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
11707 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
11708 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
11709 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
11710 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
11711 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
11712 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
11713 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
11714 };
11715
11716
11717 /* Try to decompose F as F = (F o G^-1) o G as described above.
11718 The result is a struct representing F o G^-1 and G.
11719 If result.cost < 0 then such a decomposition does not exist. */
11720
11721 static avr_map_op_t
11722 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
11723 {
11724 int i;
11725 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11726 avr_map_op_t f_ginv = *g;
11727 unsigned int ginv = g->ginv;
11728
11729 f_ginv.cost = -1;
11730
11731 /* Step 1: Computing F o G^-1 */
11732
11733 for (i = 7; i >= 0; i--)
11734 {
11735 int x = avr_map (f, i);
11736
11737 if (x <= 7)
11738 {
11739 x = avr_map (ginv, x);
11740
11741 /* The bit is no element of the image of G: no avail (cost = -1) */
11742
11743 if (x > 7)
11744 return f_ginv;
11745 }
11746
11747 f_ginv.map = (f_ginv.map << 4) + x;
11748 }
11749
11750 /* Step 2: Compute the cost of the operations.
11751 The overall cost of doing an operation prior to the insertion is
11752 the cost of the insertion plus the cost of the operation. */
11753
11754 /* Step 2a: Compute cost of F o G^-1 */
11755
11756 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11757 {
11758 /* The mapping consists only of fixed points and can be folded
11759 to AND/OR logic in the remainder. Reasonable cost is 3. */
11760
11761 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11762 }
11763 else
11764 {
11765 rtx xop[4];
11766
11767 /* Get the cost of the insn by calling the output worker with some
11768 fake values. Mimic effect of reloading xop[3]: Unused operands
11769 are mapped to 0 and used operands are reloaded to xop[0]. */
11770
11771 xop[0] = all_regs_rtx[24];
11772 xop[1] = gen_int_mode (f_ginv.map, SImode);
11773 xop[2] = all_regs_rtx[25];
11774 xop[3] = val_used_p ? xop[0] : const0_rtx;
11775
11776 avr_out_insert_bits (xop, &f_ginv.cost);
11777
11778 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11779 }
11780
11781 /* Step 2b: Add cost of G */
11782
11783 f_ginv.cost += g->cost;
11784
11785 if (avr_log.builtin)
11786 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11787
11788 return f_ginv;
11789 }
11790
11791
11792 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11793 XOP[0] and XOP[1] don't overlap.
11794 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11795 If FIXP_P = false: Just move the bit if its position in the destination
11796 is different to its source position. */
11797
11798 static void
11799 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
11800 {
11801 int bit_dest, b;
11802
11803 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11804 int t_bit_src = -1;
11805
11806 /* We order the operations according to the requested source bit b. */
11807
11808 for (b = 0; b < 8; b++)
11809 for (bit_dest = 0; bit_dest < 8; bit_dest++)
11810 {
11811 int bit_src = avr_map (map, bit_dest);
11812
11813 if (b != bit_src
11814 || bit_src >= 8
11815 /* Same position: No need to copy as requested by FIXP_P. */
11816 || (bit_dest == bit_src && !fixp_p))
11817 continue;
11818
11819 if (t_bit_src != bit_src)
11820 {
11821 /* Source bit is not yet in T: Store it to T. */
11822
11823 t_bit_src = bit_src;
11824
11825 xop[3] = GEN_INT (bit_src);
11826 avr_asm_len ("bst %T1%T3", xop, plen, 1);
11827 }
11828
11829 /* Load destination bit with T. */
11830
11831 xop[3] = GEN_INT (bit_dest);
11832 avr_asm_len ("bld %T0%T3", xop, plen, 1);
11833 }
11834 }
11835
11836
11837 /* PLEN == 0: Print assembler code for `insert_bits'.
11838 PLEN != 0: Compute code length in bytes.
11839
11840 OP[0]: Result
11841 OP[1]: The mapping composed of nibbles. If nibble no. N is
11842 0: Bit N of result is copied from bit OP[2].0
11843 ... ...
11844 7: Bit N of result is copied from bit OP[2].7
11845 0xf: Bit N of result is copied from bit OP[3].N
11846 OP[2]: Bits to be inserted
11847 OP[3]: Target value */
11848
11849 const char*
11850 avr_out_insert_bits (rtx *op, int *plen)
11851 {
11852 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
11853 unsigned mask_fixed;
11854 bool fixp_p = true;
11855 rtx xop[4];
11856
11857 xop[0] = op[0];
11858 xop[1] = op[2];
11859 xop[2] = op[3];
11860
11861 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11862
11863 if (plen)
11864 *plen = 0;
11865 else if (flag_print_asm_name)
11866 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
11867
11868 /* If MAP has fixed points it might be better to initialize the result
11869 with the bits to be inserted instead of moving all bits by hand. */
11870
11871 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11872
11873 if (REGNO (xop[0]) == REGNO (xop[1]))
11874 {
11875 /* Avoid early-clobber conflicts */
11876
11877 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11878 xop[1] = tmp_reg_rtx;
11879 fixp_p = false;
11880 }
11881
11882 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11883 {
11884 /* XOP[2] is used and reloaded to XOP[0] already */
11885
11886 int n_fix = 0, n_nofix = 0;
11887
11888 gcc_assert (REG_P (xop[2]));
11889
11890 /* Get the code size of the bit insertions; once with all bits
11891 moved and once with fixed points omitted. */
11892
11893 avr_move_bits (xop, map, true, &n_fix);
11894 avr_move_bits (xop, map, false, &n_nofix);
11895
11896 if (fixp_p && n_fix - n_nofix > 3)
11897 {
11898 xop[3] = gen_int_mode (~mask_fixed, QImode);
11899
11900 avr_asm_len ("eor %0,%1" CR_TAB
11901 "andi %0,%3" CR_TAB
11902 "eor %0,%1", xop, plen, 3);
11903 fixp_p = false;
11904 }
11905 }
11906 else
11907 {
11908 /* XOP[2] is unused */
11909
11910 if (fixp_p && mask_fixed)
11911 {
11912 avr_asm_len ("mov %0,%1", xop, plen, 1);
11913 fixp_p = false;
11914 }
11915 }
11916
11917 /* Move/insert remaining bits. */
11918
11919 avr_move_bits (xop, map, fixp_p, plen);
11920
11921 return "";
11922 }
11923
11924
11925 /* IDs for all the AVR builtins. */
11926
11927 enum avr_builtin_id
11928 {
11929 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
11930 AVR_BUILTIN_ ## NAME,
11931 #include "builtins.def"
11932 #undef DEF_BUILTIN
11933
11934 AVR_BUILTIN_COUNT
11935 };
11936
11937 struct GTY(()) avr_builtin_description
11938 {
11939 enum insn_code icode;
11940 int n_args;
11941 tree fndecl;
11942 };
11943
11944
11945 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11946 that a built-in's ID can be used to access the built-in by means of
11947 avr_bdesc[ID] */
11948
11949 static GTY(()) struct avr_builtin_description
11950 avr_bdesc[AVR_BUILTIN_COUNT] =
11951 {
11952 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
11953 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11954 #include "builtins.def"
11955 #undef DEF_BUILTIN
11956 };
11957
11958
11959 /* Implement `TARGET_BUILTIN_DECL'. */
11960
11961 static tree
11962 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11963 {
11964 if (id < AVR_BUILTIN_COUNT)
11965 return avr_bdesc[id].fndecl;
11966
11967 return error_mark_node;
11968 }
11969
11970
11971 static void
11972 avr_init_builtin_int24 (void)
11973 {
11974 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11975 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11976
11977 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11978 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11979 }
11980
11981
11982 /* Implement `TARGET_INIT_BUILTINS' */
11983 /* Set up all builtin functions for this target. */
11984
11985 static void
11986 avr_init_builtins (void)
11987 {
11988 tree void_ftype_void
11989 = build_function_type_list (void_type_node, NULL_TREE);
11990 tree uchar_ftype_uchar
11991 = build_function_type_list (unsigned_char_type_node,
11992 unsigned_char_type_node,
11993 NULL_TREE);
11994 tree uint_ftype_uchar_uchar
11995 = build_function_type_list (unsigned_type_node,
11996 unsigned_char_type_node,
11997 unsigned_char_type_node,
11998 NULL_TREE);
11999 tree int_ftype_char_char
12000 = build_function_type_list (integer_type_node,
12001 char_type_node,
12002 char_type_node,
12003 NULL_TREE);
12004 tree int_ftype_char_uchar
12005 = build_function_type_list (integer_type_node,
12006 char_type_node,
12007 unsigned_char_type_node,
12008 NULL_TREE);
12009 tree void_ftype_ulong
12010 = build_function_type_list (void_type_node,
12011 long_unsigned_type_node,
12012 NULL_TREE);
12013
12014 tree uchar_ftype_ulong_uchar_uchar
12015 = build_function_type_list (unsigned_char_type_node,
12016 long_unsigned_type_node,
12017 unsigned_char_type_node,
12018 unsigned_char_type_node,
12019 NULL_TREE);
12020
12021 tree const_memx_void_node
12022 = build_qualified_type (void_type_node,
12023 TYPE_QUAL_CONST
12024 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
12025
12026 tree const_memx_ptr_type_node
12027 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
12028
12029 tree char_ftype_const_memx_ptr
12030 = build_function_type_list (char_type_node,
12031 const_memx_ptr_type_node,
12032 NULL);
12033
12034 #define ITYP(T) \
12035 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
12036
12037 #define FX_FTYPE_FX(fx) \
12038 tree fx##r_ftype_##fx##r \
12039 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
12040 tree fx##k_ftype_##fx##k \
12041 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
12042
12043 #define FX_FTYPE_FX_INT(fx) \
12044 tree fx##r_ftype_##fx##r_int \
12045 = build_function_type_list (node_##fx##r, node_##fx##r, \
12046 integer_type_node, NULL); \
12047 tree fx##k_ftype_##fx##k_int \
12048 = build_function_type_list (node_##fx##k, node_##fx##k, \
12049 integer_type_node, NULL)
12050
12051 #define INT_FTYPE_FX(fx) \
12052 tree int_ftype_##fx##r \
12053 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
12054 tree int_ftype_##fx##k \
12055 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
12056
12057 #define INTX_FTYPE_FX(fx) \
12058 tree int##fx##r_ftype_##fx##r \
12059 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
12060 tree int##fx##k_ftype_##fx##k \
12061 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
12062
12063 #define FX_FTYPE_INTX(fx) \
12064 tree fx##r_ftype_int##fx##r \
12065 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
12066 tree fx##k_ftype_int##fx##k \
12067 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
12068
12069 tree node_hr = short_fract_type_node;
12070 tree node_nr = fract_type_node;
12071 tree node_lr = long_fract_type_node;
12072 tree node_llr = long_long_fract_type_node;
12073
12074 tree node_uhr = unsigned_short_fract_type_node;
12075 tree node_unr = unsigned_fract_type_node;
12076 tree node_ulr = unsigned_long_fract_type_node;
12077 tree node_ullr = unsigned_long_long_fract_type_node;
12078
12079 tree node_hk = short_accum_type_node;
12080 tree node_nk = accum_type_node;
12081 tree node_lk = long_accum_type_node;
12082 tree node_llk = long_long_accum_type_node;
12083
12084 tree node_uhk = unsigned_short_accum_type_node;
12085 tree node_unk = unsigned_accum_type_node;
12086 tree node_ulk = unsigned_long_accum_type_node;
12087 tree node_ullk = unsigned_long_long_accum_type_node;
12088
12089
12090 /* For absfx builtins. */
12091
12092 FX_FTYPE_FX (h);
12093 FX_FTYPE_FX (n);
12094 FX_FTYPE_FX (l);
12095 FX_FTYPE_FX (ll);
12096
12097 /* For roundfx builtins. */
12098
12099 FX_FTYPE_FX_INT (h);
12100 FX_FTYPE_FX_INT (n);
12101 FX_FTYPE_FX_INT (l);
12102 FX_FTYPE_FX_INT (ll);
12103
12104 FX_FTYPE_FX_INT (uh);
12105 FX_FTYPE_FX_INT (un);
12106 FX_FTYPE_FX_INT (ul);
12107 FX_FTYPE_FX_INT (ull);
12108
12109 /* For countlsfx builtins. */
12110
12111 INT_FTYPE_FX (h);
12112 INT_FTYPE_FX (n);
12113 INT_FTYPE_FX (l);
12114 INT_FTYPE_FX (ll);
12115
12116 INT_FTYPE_FX (uh);
12117 INT_FTYPE_FX (un);
12118 INT_FTYPE_FX (ul);
12119 INT_FTYPE_FX (ull);
12120
12121 /* For bitsfx builtins. */
12122
12123 INTX_FTYPE_FX (h);
12124 INTX_FTYPE_FX (n);
12125 INTX_FTYPE_FX (l);
12126 INTX_FTYPE_FX (ll);
12127
12128 INTX_FTYPE_FX (uh);
12129 INTX_FTYPE_FX (un);
12130 INTX_FTYPE_FX (ul);
12131 INTX_FTYPE_FX (ull);
12132
12133 /* For fxbits builtins. */
12134
12135 FX_FTYPE_INTX (h);
12136 FX_FTYPE_INTX (n);
12137 FX_FTYPE_INTX (l);
12138 FX_FTYPE_INTX (ll);
12139
12140 FX_FTYPE_INTX (uh);
12141 FX_FTYPE_INTX (un);
12142 FX_FTYPE_INTX (ul);
12143 FX_FTYPE_INTX (ull);
12144
12145
12146 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
12147 { \
12148 int id = AVR_BUILTIN_ ## NAME; \
12149 const char *Name = "__builtin_avr_" #NAME; \
12150 char *name = (char*) alloca (1 + strlen (Name)); \
12151 \
12152 gcc_assert (id < AVR_BUILTIN_COUNT); \
12153 avr_bdesc[id].fndecl \
12154 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
12155 BUILT_IN_MD, LIBNAME, NULL_TREE); \
12156 }
12157 #include "builtins.def"
12158 #undef DEF_BUILTIN
12159
12160 avr_init_builtin_int24 ();
12161 }
12162
12163
12164 /* Subroutine of avr_expand_builtin to expand vanilla builtins
12165 with non-void result and 1 ... 3 arguments. */
12166
12167 static rtx
12168 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
12169 {
12170 rtx pat, xop[3];
12171 int n, n_args = call_expr_nargs (exp);
12172 enum machine_mode tmode = insn_data[icode].operand[0].mode;
12173
12174 gcc_assert (n_args >= 1 && n_args <= 3);
12175
12176 if (target == NULL_RTX
12177 || GET_MODE (target) != tmode
12178 || !insn_data[icode].operand[0].predicate (target, tmode))
12179 {
12180 target = gen_reg_rtx (tmode);
12181 }
12182
12183 for (n = 0; n < n_args; n++)
12184 {
12185 tree arg = CALL_EXPR_ARG (exp, n);
12186 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12187 enum machine_mode opmode = GET_MODE (op);
12188 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
12189
12190 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
12191 {
12192 opmode = HImode;
12193 op = gen_lowpart (HImode, op);
12194 }
12195
12196 /* In case the insn wants input operands in modes different from
12197 the result, abort. */
12198
12199 gcc_assert (opmode == mode || opmode == VOIDmode);
12200
12201 if (!insn_data[icode].operand[n+1].predicate (op, mode))
12202 op = copy_to_mode_reg (mode, op);
12203
12204 xop[n] = op;
12205 }
12206
12207 switch (n_args)
12208 {
12209 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
12210 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
12211 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
12212
12213 default:
12214 gcc_unreachable();
12215 }
12216
12217 if (pat == NULL_RTX)
12218 return NULL_RTX;
12219
12220 emit_insn (pat);
12221
12222 return target;
12223 }
12224
12225
12226 /* Implement `TARGET_EXPAND_BUILTIN'. */
12227 /* Expand an expression EXP that calls a built-in function,
12228 with result going to TARGET if that's convenient
12229 (and in mode MODE if that's convenient).
12230 SUBTARGET may be used as the target for computing one of EXP's operands.
12231 IGNORE is nonzero if the value is to be ignored. */
12232
12233 static rtx
12234 avr_expand_builtin (tree exp, rtx target,
12235 rtx subtarget ATTRIBUTE_UNUSED,
12236 enum machine_mode mode ATTRIBUTE_UNUSED,
12237 int ignore)
12238 {
12239 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
12240 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
12241 unsigned int id = DECL_FUNCTION_CODE (fndecl);
12242 const struct avr_builtin_description *d = &avr_bdesc[id];
12243 tree arg0;
12244 rtx op0;
12245
12246 gcc_assert (id < AVR_BUILTIN_COUNT);
12247
12248 switch (id)
12249 {
12250 case AVR_BUILTIN_NOP:
12251 emit_insn (gen_nopv (GEN_INT(1)));
12252 return 0;
12253
12254 case AVR_BUILTIN_DELAY_CYCLES:
12255 {
12256 arg0 = CALL_EXPR_ARG (exp, 0);
12257 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12258
12259 if (!CONST_INT_P (op0))
12260 error ("%s expects a compile time integer constant", bname);
12261 else
12262 avr_expand_delay_cycles (op0);
12263
12264 return NULL_RTX;
12265 }
12266
12267 case AVR_BUILTIN_INSERT_BITS:
12268 {
12269 arg0 = CALL_EXPR_ARG (exp, 0);
12270 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
12271
12272 if (!CONST_INT_P (op0))
12273 {
12274 error ("%s expects a compile time long integer constant"
12275 " as first argument", bname);
12276 return target;
12277 }
12278
12279 break;
12280 }
12281
12282 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
12283 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
12284 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
12285 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
12286
12287 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
12288 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
12289 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
12290 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
12291
12292 /* Warn about odd rounding. Rounding points >= FBIT will have
12293 no effect. */
12294
12295 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
12296 break;
12297
12298 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
12299
12300 if (rbit >= (int) GET_MODE_FBIT (mode))
12301 {
12302 warning (OPT_Wextra, "rounding to %d bits has no effect for "
12303 "fixed-point value with %d fractional bits",
12304 rbit, GET_MODE_FBIT (mode));
12305
12306 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
12307 EXPAND_NORMAL);
12308 }
12309 else if (rbit <= - (int) GET_MODE_IBIT (mode))
12310 {
12311 warning (0, "rounding result will always be 0");
12312 return CONST0_RTX (mode);
12313 }
12314
12315 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
12316
12317 TR 18037 only specifies results for RP > 0. However, the
12318 remaining cases of -IBIT < RP <= 0 can easily be supported
12319 without any additional overhead. */
12320
12321 break; /* round */
12322 }
12323
12324 /* No fold found and no insn: Call support function from libgcc. */
12325
12326 if (d->icode == CODE_FOR_nothing
12327 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
12328 {
12329 return expand_call (exp, target, ignore);
12330 }
12331
12332 /* No special treatment needed: vanilla expand. */
12333
12334 gcc_assert (d->icode != CODE_FOR_nothing);
12335 gcc_assert (d->n_args == call_expr_nargs (exp));
12336
12337 if (d->n_args == 0)
12338 {
12339 emit_insn ((GEN_FCN (d->icode)) (target));
12340 return NULL_RTX;
12341 }
12342
12343 return avr_default_expand_builtin (d->icode, exp, target);
12344 }
12345
12346
12347 /* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
12348
12349 static tree
12350 avr_fold_absfx (tree tval)
12351 {
12352 if (FIXED_CST != TREE_CODE (tval))
12353 return NULL_TREE;
12354
12355 /* Our fixed-points have no padding: Use double_int payload directly. */
12356
12357 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
12358 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
12359 double_int ival = fval.data.sext (bits);
12360
12361 if (!ival.is_negative())
12362 return tval;
12363
12364 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
12365
12366 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
12367 ? double_int::max_value (bits, false)
12368 : -ival;
12369
12370 return build_fixed (TREE_TYPE (tval), fval);
12371 }
12372
12373
12374 /* Implement `TARGET_FOLD_BUILTIN'. */
12375
12376 static tree
12377 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
12378 bool ignore ATTRIBUTE_UNUSED)
12379 {
12380 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
12381 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
12382
12383 if (!optimize)
12384 return NULL_TREE;
12385
12386 switch (fcode)
12387 {
12388 default:
12389 break;
12390
12391 case AVR_BUILTIN_SWAP:
12392 {
12393 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12394 build_int_cst (val_type, 4));
12395 }
12396
12397 case AVR_BUILTIN_ABSHR:
12398 case AVR_BUILTIN_ABSR:
12399 case AVR_BUILTIN_ABSLR:
12400 case AVR_BUILTIN_ABSLLR:
12401
12402 case AVR_BUILTIN_ABSHK:
12403 case AVR_BUILTIN_ABSK:
12404 case AVR_BUILTIN_ABSLK:
12405 case AVR_BUILTIN_ABSLLK:
12406 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
12407
12408 return avr_fold_absfx (arg[0]);
12409
12410 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
12411 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
12412 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
12413 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
12414
12415 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
12416 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
12417 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
12418 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
12419
12420 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
12421 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
12422 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
12423 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
12424
12425 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
12426 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
12427 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
12428 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
12429
12430 gcc_assert (TYPE_PRECISION (val_type)
12431 == TYPE_PRECISION (TREE_TYPE (arg[0])));
12432
12433 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12434
12435 case AVR_BUILTIN_INSERT_BITS:
12436 {
12437 tree tbits = arg[1];
12438 tree tval = arg[2];
12439 tree tmap;
12440 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12441 unsigned int map;
12442 bool changed = false;
12443 unsigned i;
12444 avr_map_op_t best_g;
12445
12446 if (TREE_CODE (arg[0]) != INTEGER_CST)
12447 {
12448 /* No constant as first argument: Don't fold this and run into
12449 error in avr_expand_builtin. */
12450
12451 break;
12452 }
12453
12454 tmap = wide_int_to_tree (map_type, arg[0]);
12455 map = TREE_INT_CST_LOW (tmap);
12456
12457 if (TREE_CODE (tval) != INTEGER_CST
12458 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12459 {
12460 /* There are no F in the map, i.e. 3rd operand is unused.
12461 Replace that argument with some constant to render
12462 respective input unused. */
12463
12464 tval = build_int_cst (val_type, 0);
12465 changed = true;
12466 }
12467
12468 if (TREE_CODE (tbits) != INTEGER_CST
12469 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12470 {
12471 /* Similar for the bits to be inserted. If they are unused,
12472 we can just as well pass 0. */
12473
12474 tbits = build_int_cst (val_type, 0);
12475 }
12476
12477 if (TREE_CODE (tbits) == INTEGER_CST)
12478 {
12479 /* Inserting bits known at compile time is easy and can be
12480 performed by AND and OR with appropriate masks. */
12481
12482 int bits = TREE_INT_CST_LOW (tbits);
12483 int mask_ior = 0, mask_and = 0xff;
12484
12485 for (i = 0; i < 8; i++)
12486 {
12487 int mi = avr_map (map, i);
12488
12489 if (mi < 8)
12490 {
12491 if (bits & (1 << mi)) mask_ior |= (1 << i);
12492 else mask_and &= ~(1 << i);
12493 }
12494 }
12495
12496 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12497 build_int_cst (val_type, mask_ior));
12498 return fold_build2 (BIT_AND_EXPR, val_type, tval,
12499 build_int_cst (val_type, mask_and));
12500 }
12501
12502 if (changed)
12503 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12504
12505 /* If bits don't change their position we can use vanilla logic
12506 to merge the two arguments. */
12507
12508 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12509 {
12510 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12511 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12512
12513 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12514 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12515 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12516 }
12517
12518 /* Try to decomposing map to reduce overall cost. */
12519
12520 if (avr_log.builtin)
12521 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
12522
12523 best_g = avr_map_op[0];
12524 best_g.cost = 1000;
12525
12526 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12527 {
12528 avr_map_op_t g
12529 = avr_map_decompose (map, avr_map_op + i,
12530 TREE_CODE (tval) == INTEGER_CST);
12531
12532 if (g.cost >= 0 && g.cost < best_g.cost)
12533 best_g = g;
12534 }
12535
12536 if (avr_log.builtin)
12537 avr_edump ("\n");
12538
12539 if (best_g.arg == 0)
12540 /* No optimization found */
12541 break;
12542
12543 /* Apply operation G to the 2nd argument. */
12544
12545 if (avr_log.builtin)
12546 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
12547 best_g.str, best_g.arg, best_g.map, best_g.cost);
12548
12549 /* Do right-shifts arithmetically: They copy the MSB instead of
12550 shifting in a non-usable value (0) as with logic right-shift. */
12551
12552 tbits = fold_convert (signed_char_type_node, tbits);
12553 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12554 build_int_cst (val_type, best_g.arg));
12555 tbits = fold_convert (val_type, tbits);
12556
12557 /* Use map o G^-1 instead of original map to undo the effect of G. */
12558
12559 tmap = wide_int_to_tree (map_type, best_g.map);
12560
12561 return build_call_expr (fndecl, 3, tmap, tbits, tval);
12562 } /* AVR_BUILTIN_INSERT_BITS */
12563 }
12564
12565 return NULL_TREE;
12566 }
12567
12568 \f
12569
12570 /* Initialize the GCC target structure. */
12571
12572 #undef TARGET_ASM_ALIGNED_HI_OP
12573 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12574 #undef TARGET_ASM_ALIGNED_SI_OP
12575 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12576 #undef TARGET_ASM_UNALIGNED_HI_OP
12577 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12578 #undef TARGET_ASM_UNALIGNED_SI_OP
12579 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12580 #undef TARGET_ASM_INTEGER
12581 #define TARGET_ASM_INTEGER avr_assemble_integer
12582 #undef TARGET_ASM_FILE_START
12583 #define TARGET_ASM_FILE_START avr_file_start
12584 #undef TARGET_ASM_FILE_END
12585 #define TARGET_ASM_FILE_END avr_file_end
12586
12587 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
12588 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12589 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12590 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12591
12592 #undef TARGET_FUNCTION_VALUE
12593 #define TARGET_FUNCTION_VALUE avr_function_value
12594 #undef TARGET_LIBCALL_VALUE
12595 #define TARGET_LIBCALL_VALUE avr_libcall_value
12596 #undef TARGET_FUNCTION_VALUE_REGNO_P
12597 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12598
12599 #undef TARGET_ATTRIBUTE_TABLE
12600 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12601 #undef TARGET_INSERT_ATTRIBUTES
12602 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12603 #undef TARGET_SECTION_TYPE_FLAGS
12604 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12605
12606 #undef TARGET_ASM_NAMED_SECTION
12607 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12608 #undef TARGET_ASM_INIT_SECTIONS
12609 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12610 #undef TARGET_ENCODE_SECTION_INFO
12611 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12612 #undef TARGET_ASM_SELECT_SECTION
12613 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12614
12615 #undef TARGET_REGISTER_MOVE_COST
12616 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12617 #undef TARGET_MEMORY_MOVE_COST
12618 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12619 #undef TARGET_RTX_COSTS
12620 #define TARGET_RTX_COSTS avr_rtx_costs
12621 #undef TARGET_ADDRESS_COST
12622 #define TARGET_ADDRESS_COST avr_address_cost
12623 #undef TARGET_MACHINE_DEPENDENT_REORG
12624 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12625 #undef TARGET_FUNCTION_ARG
12626 #define TARGET_FUNCTION_ARG avr_function_arg
12627 #undef TARGET_FUNCTION_ARG_ADVANCE
12628 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12629
12630 #undef TARGET_SET_CURRENT_FUNCTION
12631 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12632
12633 #undef TARGET_RETURN_IN_MEMORY
12634 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12635
12636 #undef TARGET_STRICT_ARGUMENT_NAMING
12637 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12638
12639 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
12640 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12641
12642 #undef TARGET_HARD_REGNO_SCRATCH_OK
12643 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12644 #undef TARGET_CASE_VALUES_THRESHOLD
12645 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12646
12647 #undef TARGET_FRAME_POINTER_REQUIRED
12648 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12649 #undef TARGET_CAN_ELIMINATE
12650 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12651
12652 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12653 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12654
12655 #undef TARGET_WARN_FUNC_RETURN
12656 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12657
12658 #undef TARGET_CLASS_LIKELY_SPILLED_P
12659 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12660
12661 #undef TARGET_OPTION_OVERRIDE
12662 #define TARGET_OPTION_OVERRIDE avr_option_override
12663
12664 #undef TARGET_CANNOT_MODIFY_JUMPS_P
12665 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12666
12667 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
12668 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12669
12670 #undef TARGET_INIT_BUILTINS
12671 #define TARGET_INIT_BUILTINS avr_init_builtins
12672
12673 #undef TARGET_BUILTIN_DECL
12674 #define TARGET_BUILTIN_DECL avr_builtin_decl
12675
12676 #undef TARGET_EXPAND_BUILTIN
12677 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12678
12679 #undef TARGET_FOLD_BUILTIN
12680 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12681
12682 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
12683 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12684
12685 #undef TARGET_SCALAR_MODE_SUPPORTED_P
12686 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12687
12688 #undef TARGET_BUILD_BUILTIN_VA_LIST
12689 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12690
12691 #undef TARGET_FIXED_POINT_SUPPORTED_P
12692 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12693
12694 #undef TARGET_CONVERT_TO_TYPE
12695 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12696
12697 #undef TARGET_ADDR_SPACE_SUBSET_P
12698 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12699
12700 #undef TARGET_ADDR_SPACE_CONVERT
12701 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12702
12703 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
12704 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12705
12706 #undef TARGET_ADDR_SPACE_POINTER_MODE
12707 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12708
12709 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12710 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
12711 avr_addr_space_legitimate_address_p
12712
12713 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12714 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12715
12716 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
12717 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12718
12719 #undef TARGET_SECONDARY_RELOAD
12720 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12721
12722 #undef TARGET_PRINT_OPERAND
12723 #define TARGET_PRINT_OPERAND avr_print_operand
12724 #undef TARGET_PRINT_OPERAND_ADDRESS
12725 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12726 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
12727 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12728
12729 struct gcc_target targetm = TARGET_INITIALIZER;
12730
12731 \f
12732 #include "gt-avr.h"