]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/avr/avr.c
libgcc/
[thirdparty/gcc.git] / gcc / config / avr / avr.c
CommitLineData
a28e4651 1/* Subroutines for insn-output.c for ATMEL AVR micro controllers
a64bd5e4 2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
73475e84 3 2009, 2010, 2011 Free Software Foundation, Inc.
947dd720 4 Contributed by Denis Chertykov (chertykov@gmail.com)
a28e4651 5
187b36cf 6 This file is part of GCC.
a28e4651 7
187b36cf 8 GCC is free software; you can redistribute it and/or modify
a28e4651 9 it under the terms of the GNU General Public License as published by
038d1e19 10 the Free Software Foundation; either version 3, or (at your option)
a28e4651 11 any later version.
12
187b36cf 13 GCC is distributed in the hope that it will be useful,
a28e4651 14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
038d1e19 19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
a28e4651 21
22#include "config.h"
3337ec92 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
a28e4651 26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
a28e4651 29#include "insn-config.h"
30#include "conditions.h"
a28e4651 31#include "insn-attr.h"
c5be380e 32#include "insn-codes.h"
a28e4651 33#include "flags.h"
34#include "reload.h"
35#include "tree.h"
9bfdb494 36#include "output.h"
a28e4651 37#include "expr.h"
4202ef11 38#include "c-family/c-common.h"
0b205f4c 39#include "diagnostic-core.h"
a28e4651 40#include "obstack.h"
41#include "function.h"
42#include "recog.h"
c5be380e 43#include "optabs.h"
c84f2269 44#include "ggc.h"
c5be380e 45#include "langhooks.h"
a28e4651 46#include "tm_p.h"
a767736d 47#include "target.h"
48#include "target-def.h"
9c12cc94 49#include "params.h"
a4c6e6a2 50#include "df.h"
a28e4651 51
1cb39658 52/* Maximal allowed offset for an address in the LD command */
53#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
a28e4651 54
53026b2c 55/* Return true if STR starts with PREFIX and false, otherwise. */
56#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57
9d734fa8 58/* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
5bd39e93 63#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
bf412f98 64
ed2541ea 65/* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66#define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67
68/* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70#define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 do { \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
74 } while (0)
75
76/* Read address-space from SYMBOL_REF_FLAGS of SYM */
77#define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
80
9d734fa8 81/* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83const avr_addrspace_t avr_addrspace[] =
84{
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
590da9f2 86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
9d734fa8 93 { 0 , 0, 0, NULL, 0 }
94};
95
96/* Map 64-k Flash segment to section prefix. */
97static const char* const progmem_section_prefix[6] =
98 {
99 ".progmem.data",
100 ".progmem1.data",
101 ".progmem2.data",
102 ".progmem3.data",
103 ".progmem4.data",
104 ".progmem5.data"
105 };
106
72851b68 107/* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
109
110typedef struct
111{
112 /* SREG: The pocessor status */
113 int sreg;
114
0b6cf66f 115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
116 int ccp;
117 int rampd;
118 int rampx;
119 int rampy;
120
72851b68 121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
122 int rampz;
123
124 /* SP: The stack pointer and its low and high byte */
125 int sp_l;
126 int sp_h;
127} avr_addr_t;
128
129static avr_addr_t avr_addr;
130
a45076aa 131
132/* Prototypes for local helper functions. */
133
644ac9c5 134static const char* out_movqi_r_mr (rtx, rtx[], int*);
135static const char* out_movhi_r_mr (rtx, rtx[], int*);
136static const char* out_movsi_r_mr (rtx, rtx[], int*);
137static const char* out_movqi_mr_r (rtx, rtx[], int*);
138static const char* out_movhi_mr_r (rtx, rtx[], int*);
139static const char* out_movsi_mr_r (rtx, rtx[], int*);
140
206a5129 141static int avr_naked_function_p (tree);
142static int interrupt_function_p (tree);
143static int signal_function_p (tree);
ba8273a8 144static int avr_OS_task_function_p (tree);
a6e595be 145static int avr_OS_main_function_p (tree);
206a5129 146static int avr_regs_to_save (HARD_REG_SET *);
58f62c92 147static int get_sequence_length (rtx insns);
206a5129 148static int sequent_regs_live (void);
149static const char *ptrreg_to_str (int);
150static const char *cond_string (enum rtx_code);
36f949a2 151static int avr_num_arg_regs (enum machine_mode, const_tree);
20d892d1 152static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
a49907f9 153 int, bool);
154static void output_reload_in_const (rtx*, rtx, int*, bool);
df3d6232 155static struct machine_function * avr_init_machine_status (void);
a45076aa 156
157
158/* Prototypes for hook implementors if needed before their implementation. */
159
160static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
161
b1665fa2 162
20c71901 163/* Allocate registers from r25 to r8 for parameters for function calls. */
a28e4651 164#define FIRST_CUM_REG 26
165
4202ef11 166/* Implicit target register of LPM instruction (R0) */
2d86450c 167extern GTY(()) rtx lpm_reg_rtx;
168rtx lpm_reg_rtx;
4202ef11 169
170/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
2d86450c 171extern GTY(()) rtx lpm_addr_reg_rtx;
172rtx lpm_addr_reg_rtx;
4202ef11 173
2d86450c 174/* Temporary register RTX (reg:QI TMP_REGNO) */
175extern GTY(()) rtx tmp_reg_rtx;
176rtx tmp_reg_rtx;
a28e4651 177
2d86450c 178/* Zeroed register RTX (reg:QI ZERO_REGNO) */
179extern GTY(()) rtx zero_reg_rtx;
180rtx zero_reg_rtx;
181
182/* RTXs for all general purpose registers as QImode */
183extern GTY(()) rtx all_regs_rtx[32];
184rtx all_regs_rtx[32];
e511e253 185
0b6cf66f 186/* SREG, the processor status */
187extern GTY(()) rtx sreg_rtx;
188rtx sreg_rtx;
189
190/* RAMP* special function registers */
191extern GTY(()) rtx rampd_rtx;
192extern GTY(()) rtx rampx_rtx;
193extern GTY(()) rtx rampy_rtx;
2d86450c 194extern GTY(()) rtx rampz_rtx;
0b6cf66f 195rtx rampd_rtx;
196rtx rampx_rtx;
197rtx rampy_rtx;
2d86450c 198rtx rampz_rtx;
5bd39e93 199
200/* RTX containing the strings "" and "e", respectively */
201static GTY(()) rtx xstring_empty;
202static GTY(()) rtx xstring_e;
203
c284a148 204/* Preprocessor macros to define depending on MCU type. */
33169f97 205const char *avr_extra_arch_macro;
c284a148 206
b1eb5c83 207/* Current architecture. */
208const struct base_arch_s *avr_current_arch;
209
795cff42 210/* Current device. */
211const struct mcu_type_s *avr_current_device;
e511e253 212
c3f18f18 213/* Section to put switch tables in. */
214static GTY(()) section *progmem_swtable_section;
a28e4651 215
9d734fa8 216/* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
590da9f2 217 or to address space __flash*. */
5bd39e93 218static GTY(()) section *progmem_section[6];
219
83921eda 220/* Condition for insns/expanders from avr-dimode.md. */
221bool avr_have_dimode = true;
222
7c2339f8 223/* To track if code will use .bss and/or .data. */
224bool avr_need_clear_bss_p = false;
225bool avr_need_copy_data_p = false;
226
a767736d 227\f
bf412f98 228
6be828c1 229/* Custom function to count number of set bits. */
230
231static inline int
232avr_popcount (unsigned int val)
233{
234 int pop = 0;
235
236 while (val)
237 {
238 val &= val-1;
239 pop++;
240 }
241
242 return pop;
243}
244
245
2f2d376f 246/* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
6be828c1 250
251bool
252avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
253{
254 int i;
255
2f2d376f 256 enum machine_mode mode = GET_MODE (xval);
257
258 if (VOIDmode == mode)
259 mode = SImode;
260
6be828c1 261 for (i = 0; i < n_bytes; i++)
262 {
2f2d376f 263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6be828c1 264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
265
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
267 return false;
268 }
269
270 return true;
271}
272
4c834714 273static void
274avr_option_override (void)
a28e4651 275{
6e9e3dbe 276 flag_delete_null_pointer_checks = 0;
277
f9efb148 278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
281
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
285
286 if (avr_strict_X)
287 flag_caller_saves = 0;
288
344ae919 289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
291
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables)
295 && !ACCUMULATE_OUTGOING_ARGS)
296 {
297 flag_omit_frame_pointer = 0;
298 }
344ae919 299
c1535dd2 300 avr_current_device = &avr_mcu_types[avr_mcu_index];
3eb18e1f 301 avr_current_arch = &avr_arch_types[avr_current_device->arch];
302 avr_extra_arch_macro = avr_current_device->macro;
72851b68 303
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
305
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
308
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
311
0b6cf66f 312 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
313 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
314 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
315 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
316
72851b68 317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
319 avr_addr.sp_h = avr_addr.sp_l + 1;
21167a30 320
df3d6232 321 init_machine_status = avr_init_machine_status;
47fe598e 322
323 avr_log_set_avr_log();
a28e4651 324}
325
df3d6232 326/* Function to set up the backend function structure. */
327
328static struct machine_function *
329avr_init_machine_status (void)
330{
ba72912a 331 return ggc_alloc_cleared_machine_function ();
df3d6232 332}
333
5bd39e93 334
335/* Implement `INIT_EXPANDERS'. */
336/* The function works like a singleton. */
337
338void
339avr_init_expanders (void)
340{
341 int regno;
342
5bd39e93 343 for (regno = 0; regno < 32; regno ++)
344 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
345
346 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
347 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
348 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
349
350 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
351
0b6cf66f 352 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
353 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
354 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
355 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
72851b68 356 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
5bd39e93 357
358 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
359 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
360}
361
362
20c71901 363/* Return register class for register R. */
a28e4651 364
365enum reg_class
206a5129 366avr_regno_reg_class (int r)
a28e4651 367{
7104fbe4 368 static const enum reg_class reg_class_tab[] =
369 {
370 R0_REG,
371 /* r1 - r15 */
372 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
373 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
374 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
375 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
376 /* r16 - r23 */
377 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
378 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
379 /* r24, r25 */
380 ADDW_REGS, ADDW_REGS,
381 /* X: r26, 27 */
382 POINTER_X_REGS, POINTER_X_REGS,
383 /* Y: r28, r29 */
384 POINTER_Y_REGS, POINTER_Y_REGS,
385 /* Z: r30, r31 */
386 POINTER_Z_REGS, POINTER_Z_REGS,
387 /* SP: SPL, SPH */
388 STACK_REG, STACK_REG
389 };
390
a28e4651 391 if (r <= 33)
392 return reg_class_tab[r];
7104fbe4 393
a28e4651 394 return ALL_REGS;
395}
396
02d9a2c3 397
398static bool
399avr_scalar_mode_supported_p (enum machine_mode mode)
400{
401 if (PSImode == mode)
402 return true;
403
404 return default_scalar_mode_supported_p (mode);
405}
406
407
4202ef11 408/* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
409
410static bool
590da9f2 411avr_decl_flash_p (tree decl)
4202ef11 412{
5bd39e93 413 if (TREE_CODE (decl) != VAR_DECL
414 || TREE_TYPE (decl) == error_mark_node)
415 {
416 return false;
417 }
4202ef11 418
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
420}
421
422
5bd39e93 423/* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
425
426static bool
590da9f2 427avr_decl_memx_p (tree decl)
5bd39e93 428{
429 if (TREE_CODE (decl) != VAR_DECL
430 || TREE_TYPE (decl) == error_mark_node)
431 {
432 return false;
433 }
434
590da9f2 435 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
5bd39e93 436}
437
438
4202ef11 439/* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
440
441bool
590da9f2 442avr_mem_flash_p (rtx x)
4202ef11 443{
444 return (MEM_P (x)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
446}
447
448
5bd39e93 449/* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
451
452bool
590da9f2 453avr_mem_memx_p (rtx x)
5bd39e93 454{
455 return (MEM_P (x)
590da9f2 456 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
5bd39e93 457}
458
459
32969c63 460/* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
462
463static inline int
464avr_lookup_function_attribute1 (const_tree func, const char *name)
465{
466 if (FUNCTION_DECL == TREE_CODE (func))
467 {
468 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
469 {
470 return true;
471 }
472
473 func = TREE_TYPE (func);
474 }
475
476 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
477 || TREE_CODE (func) == METHOD_TYPE);
478
479 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
480}
481
e3e08e7f 482/* Return nonzero if FUNC is a naked function. */
a28e4651 483
484static int
206a5129 485avr_naked_function_p (tree func)
a28e4651 486{
32969c63 487 return avr_lookup_function_attribute1 (func, "naked");
a28e4651 488}
489
490/* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
492
493static int
206a5129 494interrupt_function_p (tree func)
a28e4651 495{
32969c63 496 return avr_lookup_function_attribute1 (func, "interrupt");
a28e4651 497}
498
1cb39658 499/* Return nonzero if FUNC is a signal function as specified
a28e4651 500 by the "signal" attribute. */
501
502static int
206a5129 503signal_function_p (tree func)
a28e4651 504{
32969c63 505 return avr_lookup_function_attribute1 (func, "signal");
a28e4651 506}
507
b0e2b973 508/* Return nonzero if FUNC is an OS_task function. */
ba8273a8 509
510static int
511avr_OS_task_function_p (tree func)
512{
32969c63 513 return avr_lookup_function_attribute1 (func, "OS_task");
ba8273a8 514}
515
b0e2b973 516/* Return nonzero if FUNC is an OS_main function. */
a6e595be 517
518static int
519avr_OS_main_function_p (tree func)
520{
32969c63 521 return avr_lookup_function_attribute1 (func, "OS_main");
a6e595be 522}
523
a12b9b80 524
525/* Implement `ACCUMULATE_OUTGOING_ARGS'. */
c529cd37 526
527int
a12b9b80 528avr_accumulate_outgoing_args (void)
529{
530 if (!cfun)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS;
532
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
539
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun->calls_setjmp
542 || cfun->has_nonlocal_label));
543}
544
545
546/* Report contribution of accumulated outgoing arguments to stack size. */
547
548static inline int
549avr_outgoing_args_size (void)
550{
551 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
552}
553
554
555/* Implement `STARTING_FRAME_OFFSET'. */
556/* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
558
559int
560avr_starting_frame_offset (void)
561{
562 return 1 + avr_outgoing_args_size ();
563}
564
565
f0973368 566/* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
568
569static int
206a5129 570avr_regs_to_save (HARD_REG_SET *set)
f0973368 571{
572 int reg, count;
573 int int_or_sig_p = (interrupt_function_p (current_function_decl)
a12b9b80 574 || signal_function_p (current_function_decl));
f2323747 575
f0973368 576 if (set)
577 CLEAR_HARD_REG_SET (*set);
578 count = 0;
c3bcd5a9 579
ba8273a8 580 /* No need to save any registers if the function never returns or
b0e2b973 581 has the "OS_task" or "OS_main" attribute. */
ba8273a8 582 if (TREE_THIS_VOLATILE (current_function_decl)
a6e595be 583 || cfun->machine->is_OS_task
584 || cfun->machine->is_OS_main)
c3bcd5a9 585 return 0;
586
f0973368 587 for (reg = 0; reg < 32; reg++)
588 {
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
a12b9b80 590 any global register variables. */
f0973368 591 if (fixed_regs[reg])
a12b9b80 592 continue;
f0973368 593
568a74a2 594 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
a12b9b80 595 || (df_regs_ever_live_p (reg)
596 && (int_or_sig_p || !call_used_regs[reg])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg == REG_Y || reg == (REG_Y+1)))))
601 {
602 if (set)
603 SET_HARD_REG_BIT (*set, reg);
604 count++;
605 }
f0973368 606 }
607 return count;
608}
609
ebdd0478 610/* Return true if register FROM can be eliminated via register TO. */
611
a45076aa 612static bool
9f42c829 613avr_can_eliminate (const int from, const int to)
ebdd0478 614{
9f42c829 615 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
616 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
617 || ((from == FRAME_POINTER_REGNUM
618 || from == FRAME_POINTER_REGNUM + 1)
619 && !frame_pointer_needed));
ebdd0478 620}
621
20c71901 622/* Compute offset between arg_pointer and frame_pointer. */
a28e4651 623
624int
9f42c829 625avr_initial_elimination_offset (int from, int to)
a28e4651 626{
9f42c829 627 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
628 return 0;
629 else
1cb39658 630 {
9f42c829 631 int offset = frame_pointer_needed ? 2 : 0;
632 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
633
f0973368 634 offset += avr_regs_to_save (NULL);
9f42c829 635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size + 1 + offset);
a28e4651 637 }
a28e4651 638}
639
b1665fa2 640/* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
644
a45076aa 645static rtx
646avr_builtin_setjmp_frame_value (void)
b1665fa2 647{
648 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
a45076aa 649 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
b1665fa2 650}
651
34413b10 652/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
654rtx
36f949a2 655avr_return_addr_rtx (int count, rtx tem)
34413b10 656{
657 rtx r;
658
b0e2b973 659 /* Can only return this function's return address. Others not supported. */
34413b10 660 if (count)
661 return NULL;
662
663 if (AVR_3_BYTE_PC)
664 {
665 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
667 }
668 else
669 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
670
671 r = gen_rtx_PLUS (Pmode, tem, r);
672 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
673 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
674 return r;
675}
676
bf522d86 677/* Return 1 if the function epilogue is just a single "ret". */
678
679int
206a5129 680avr_simple_epilogue (void)
bf522d86 681{
682 return (! frame_pointer_needed
a12b9b80 683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL) == 0
686 && ! interrupt_function_p (current_function_decl)
687 && ! signal_function_p (current_function_decl)
688 && ! avr_naked_function_p (current_function_decl)
689 && ! TREE_THIS_VOLATILE (current_function_decl));
bf522d86 690}
691
20c71901 692/* This function checks sequence of live registers. */
a28e4651 693
694static int
206a5129 695sequent_regs_live (void)
a28e4651 696{
697 int reg;
698 int live_seq=0;
699 int cur_seq=0;
700
701 for (reg = 0; reg < 18; ++reg)
702 {
3d83581f 703 if (fixed_regs[reg])
704 {
705 /* Don't recognize sequences that contain global register
706 variables. */
707
708 if (live_seq != 0)
709 return 0;
710 else
711 continue;
712 }
713
a28e4651 714 if (!call_used_regs[reg])
715 {
3072d30e 716 if (df_regs_ever_live_p (reg))
a28e4651 717 {
718 ++live_seq;
719 ++cur_seq;
720 }
721 else
722 cur_seq = 0;
723 }
724 }
725
726 if (!frame_pointer_needed)
727 {
3072d30e 728 if (df_regs_ever_live_p (REG_Y))
a28e4651 729 {
730 ++live_seq;
731 ++cur_seq;
732 }
733 else
734 cur_seq = 0;
735
3072d30e 736 if (df_regs_ever_live_p (REG_Y+1))
a28e4651 737 {
738 ++live_seq;
739 ++cur_seq;
740 }
741 else
742 cur_seq = 0;
743 }
744 else
745 {
746 cur_seq += 2;
747 live_seq += 2;
748 }
749 return (cur_seq == live_seq) ? live_seq : 0;
750}
751
58f62c92 752/* Obtain the length sequence of insns. */
753
754int
755get_sequence_length (rtx insns)
756{
757 rtx insn;
758 int length;
759
760 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
761 length += get_attr_length (insn);
762
763 return length;
764}
765
e484266f 766/* Implement INCOMING_RETURN_ADDR_RTX. */
767
768rtx
769avr_incoming_return_addr_rtx (void)
770{
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
774}
775
776/* Helper for expand_prologue. Emit a push of a byte register. */
777
778static void
779emit_push_byte (unsigned regno, bool frame_related_p)
780{
781 rtx mem, reg, insn;
782
783 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
784 mem = gen_frame_mem (QImode, mem);
785 reg = gen_rtx_REG (QImode, regno);
786
787 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
788 if (frame_related_p)
789 RTX_FRAME_RELATED_P (insn) = 1;
790
791 cfun->machine->stack_usage++;
792}
793
0b6cf66f 794
795/* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
798
799static void
800emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
801{
802 rtx insn;
803
804 gcc_assert (MEM_P (sfr));
805
806 /* IN __tmp_reg__, IO(SFR) */
807 insn = emit_move_insn (tmp_reg_rtx, sfr);
808 if (frame_related_p)
809 RTX_FRAME_RELATED_P (insn) = 1;
810
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO, frame_related_p);
813
814 if (clr_p)
815 {
816 /* OUT IO(SFR), __zero_reg__ */
817 insn = emit_move_insn (sfr, const0_rtx);
818 if (frame_related_p)
819 RTX_FRAME_RELATED_P (insn) = 1;
820 }
821}
822
a12b9b80 823static void
824avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
825{
826 rtx insn;
827 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
828 int live_seq = sequent_regs_live ();
829
830 bool minimize = (TARGET_CALL_PROLOGUES
831 && live_seq
832 && !isr_p
833 && !cfun->machine->is_OS_task
834 && !cfun->machine->is_OS_main);
835
836 if (minimize
837 && (frame_pointer_needed
838 || avr_outgoing_args_size() > 8
839 || (AVR_2_BYTE_PC && live_seq > 6)
840 || live_seq > 7))
841 {
842 rtx pattern;
843 int first_reg, reg, offset;
844
845 emit_move_insn (gen_rtx_REG (HImode, REG_X),
846 gen_int_mode (size, HImode));
847
848 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
849 gen_int_mode (live_seq+size, HImode));
850 insn = emit_insn (pattern);
851 RTX_FRAME_RELATED_P (insn) = 1;
852
853 /* Describe the effect of the unspec_volatile call to prologue_saves.
854 Note that this formulation assumes that add_reg_note pushes the
855 notes to the front. Thus we build them in the reverse order of
856 how we want dwarf2out to process them. */
857
9f42c829 858 /* The function does always set frame_pointer_rtx, but whether that
a12b9b80 859 is going to be permanent in the function is frame_pointer_needed. */
860
861 add_reg_note (insn, REG_CFA_ADJUST_CFA,
862 gen_rtx_SET (VOIDmode, (frame_pointer_needed
9f42c829 863 ? frame_pointer_rtx
a12b9b80 864 : stack_pointer_rtx),
865 plus_constant (stack_pointer_rtx,
866 -(size + live_seq))));
867
868 /* Note that live_seq always contains r28+r29, but the other
869 registers to be saved are all below 18. */
870
871 first_reg = 18 - (live_seq - 2);
872
873 for (reg = 29, offset = -live_seq + 1;
874 reg >= first_reg;
875 reg = (reg == 28 ? 17 : reg - 1), ++offset)
876 {
877 rtx m, r;
878
879 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
880 r = gen_rtx_REG (QImode, reg);
881 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
882 }
883
884 cfun->machine->stack_usage += size + live_seq;
885 }
886 else /* !minimize */
887 {
888 int reg;
889
890 for (reg = 0; reg < 32; ++reg)
891 if (TEST_HARD_REG_BIT (set, reg))
892 emit_push_byte (reg, true);
893
894 if (frame_pointer_needed
895 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
896 {
897 /* Push frame pointer. Always be consistent about the
898 ordering of pushes -- epilogue_restores expects the
899 register pair to be pushed low byte first. */
900
901 emit_push_byte (REG_Y, true);
902 emit_push_byte (REG_Y + 1, true);
903 }
904
905 if (frame_pointer_needed
906 && size == 0)
907 {
9f42c829 908 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 909 RTX_FRAME_RELATED_P (insn) = 1;
910 }
911
912 if (size != 0)
913 {
914 /* Creating a frame can be done by direct manipulation of the
915 stack or via the frame pointer. These two methods are:
916 fp = sp
917 fp -= size
918 sp = fp
919 or
920 sp -= size
921 fp = sp (*)
922 the optimum method depends on function type, stack and
923 frame size. To avoid a complex logic, both methods are
924 tested and shortest is selected.
925
926 There is also the case where SIZE != 0 and no frame pointer is
927 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
928 In that case, insn (*) is not needed in that case.
929 We use the X register as scratch. This is save because in X
930 is call-clobbered.
931 In an interrupt routine, the case of SIZE != 0 together with
932 !frame_pointer_needed can only occur if the function is not a
933 leaf function and thus X has already been saved. */
934
b5b90b5a 935 int irq_state = -1;
a12b9b80 936 rtx fp_plus_insns, fp, my_fp;
a12b9b80 937
938 gcc_assert (frame_pointer_needed
939 || !isr_p
940 || !current_function_is_leaf);
941
942 fp = my_fp = (frame_pointer_needed
9f42c829 943 ? frame_pointer_rtx
a12b9b80 944 : gen_rtx_REG (Pmode, REG_X));
945
946 if (AVR_HAVE_8BIT_SP)
947 {
948 /* The high byte (r29) does not change:
b5b90b5a 949 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
a12b9b80 950
9f42c829 951 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 952 }
953
954 /************ Method 1: Adjust frame pointer ************/
955
956 start_sequence ();
957
958 /* Normally, the dwarf2out frame-related-expr interpreter does
959 not expect to have the CFA change once the frame pointer is
960 set up. Thus, we avoid marking the move insn below and
961 instead indicate that the entire operation is complete after
962 the frame pointer subtraction is done. */
963
964 insn = emit_move_insn (fp, stack_pointer_rtx);
b5b90b5a 965 if (frame_pointer_needed)
966 {
967 RTX_FRAME_RELATED_P (insn) = 1;
968 add_reg_note (insn, REG_CFA_ADJUST_CFA,
969 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
970 }
a12b9b80 971
972 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
a12b9b80 973 if (frame_pointer_needed)
974 {
b5b90b5a 975 RTX_FRAME_RELATED_P (insn) = 1;
a12b9b80 976 add_reg_note (insn, REG_CFA_ADJUST_CFA,
b5b90b5a 977 gen_rtx_SET (VOIDmode, fp,
978 plus_constant (fp, -size)));
a12b9b80 979 }
980
981 /* Copy to stack pointer. Note that since we've already
982 changed the CFA to the frame pointer this operation
b5b90b5a 983 need not be annotated if frame pointer is needed.
984 Always move through unspec, see PR50063.
985 For meaning of irq_state see movhi_sp_r insn. */
a12b9b80 986
b5b90b5a 987 if (cfun->machine->is_interrupt)
988 irq_state = 1;
989
990 if (TARGET_NO_INTERRUPTS
991 || cfun->machine->is_signal
992 || cfun->machine->is_OS_main)
993 irq_state = 0;
a12b9b80 994
b5b90b5a 995 if (AVR_HAVE_8BIT_SP)
996 irq_state = 2;
997
998 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
999 fp, GEN_INT (irq_state)));
1000 if (!frame_pointer_needed)
1001 {
1002 RTX_FRAME_RELATED_P (insn) = 1;
1003 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1004 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1005 plus_constant (stack_pointer_rtx,
1006 -size)));
1007 }
1008
a12b9b80 1009 fp_plus_insns = get_insns ();
1010 end_sequence ();
1011
1012 /************ Method 2: Adjust Stack pointer ************/
1013
1014 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1015 can only handle specific offsets. */
1016
1017 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1018 {
1019 rtx sp_plus_insns;
1020
1021 start_sequence ();
1022
b5b90b5a 1023 insn = emit_move_insn (stack_pointer_rtx,
1024 plus_constant (stack_pointer_rtx, -size));
a12b9b80 1025 RTX_FRAME_RELATED_P (insn) = 1;
b5b90b5a 1026 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1027 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1028 plus_constant (stack_pointer_rtx,
1029 -size)));
a12b9b80 1030 if (frame_pointer_needed)
1031 {
1032 insn = emit_move_insn (fp, stack_pointer_rtx);
1033 RTX_FRAME_RELATED_P (insn) = 1;
1034 }
1035
1036 sp_plus_insns = get_insns ();
1037 end_sequence ();
1038
1039 /************ Use shortest method ************/
1040
1041 emit_insn (get_sequence_length (sp_plus_insns)
1042 < get_sequence_length (fp_plus_insns)
1043 ? sp_plus_insns
1044 : fp_plus_insns);
1045 }
1046 else
1047 {
1048 emit_insn (fp_plus_insns);
1049 }
1050
1051 cfun->machine->stack_usage += size;
1052 } /* !minimize && size != 0 */
1053 } /* !minimize */
1054}
1055
e484266f 1056
df3d6232 1057/* Output function prologue. */
a28e4651 1058
df3d6232 1059void
1060expand_prologue (void)
a7690ba9 1061{
637dc5b6 1062 HARD_REG_SET set;
a12b9b80 1063 HOST_WIDE_INT size;
1064
1065 size = get_frame_size() + avr_outgoing_args_size();
df3d6232 1066
1067 /* Init cfun->machine. */
df3d6232 1068 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1069 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1070 cfun->machine->is_signal = signal_function_p (current_function_decl);
ba8273a8 1071 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
a6e595be 1072 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
34413b10 1073 cfun->machine->stack_usage = 0;
df3d6232 1074
1075 /* Prologue: naked. */
1076 if (cfun->machine->is_naked)
a7690ba9 1077 {
df3d6232 1078 return;
a7690ba9 1079 }
a7690ba9 1080
637dc5b6 1081 avr_regs_to_save (&set);
a7690ba9 1082
df3d6232 1083 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
a7690ba9 1084 {
e484266f 1085 /* Enable interrupts. */
df3d6232 1086 if (cfun->machine->is_interrupt)
a12b9b80 1087 emit_insn (gen_enable_interrupt ());
1088
df3d6232 1089 /* Push zero reg. */
e484266f 1090 emit_push_byte (ZERO_REGNO, true);
df3d6232 1091
1092 /* Push tmp reg. */
e484266f 1093 emit_push_byte (TMP_REGNO, true);
df3d6232 1094
1095 /* Push SREG. */
e484266f 1096 /* ??? There's no dwarf2 column reserved for SREG. */
0b6cf66f 1097 emit_push_sfr (sreg_rtx, false, false /* clr */);
637dc5b6 1098
df3d6232 1099 /* Clear zero reg. */
e484266f 1100 emit_move_insn (zero_reg_rtx, const0_rtx);
a7690ba9 1101
df3d6232 1102 /* Prevent any attempt to delete the setting of ZERO_REG! */
18b42941 1103 emit_use (zero_reg_rtx);
0b6cf66f 1104
1105 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1106 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1107
1108 if (AVR_HAVE_RAMPD)
1109 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1110
1111 if (AVR_HAVE_RAMPX
1112 && TEST_HARD_REG_BIT (set, REG_X)
1113 && TEST_HARD_REG_BIT (set, REG_X + 1))
1114 {
1115 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1116 }
1117
1118 if (AVR_HAVE_RAMPY
1119 && (frame_pointer_needed
1120 || (TEST_HARD_REG_BIT (set, REG_Y)
1121 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1122 {
1123 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1124 }
1125
1126 if (AVR_HAVE_RAMPZ
1127 && TEST_HARD_REG_BIT (set, REG_Z)
1128 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1129 {
1130 emit_push_sfr (rampz_rtx, false /* frame-related */, true /* clr */);
1131 }
1132 } /* is_interrupt is_signal */
200b5210 1133
a12b9b80 1134 avr_prologue_setup_frame (size, set);
1135
8c0dd614 1136 if (flag_stack_usage_info)
200b5210 1137 current_function_static_stack_size = cfun->machine->stack_usage;
a7690ba9 1138}
1139
df3d6232 1140/* Output summary at end of function prologue. */
a28e4651 1141
17d9b0c3 1142static void
df3d6232 1143avr_asm_function_end_prologue (FILE *file)
a28e4651 1144{
df3d6232 1145 if (cfun->machine->is_naked)
a28e4651 1146 {
c3bcd5a9 1147 fputs ("/* prologue: naked */\n", file);
a28e4651 1148 }
df3d6232 1149 else
a28e4651 1150 {
df3d6232 1151 if (cfun->machine->is_interrupt)
1152 {
1153 fputs ("/* prologue: Interrupt */\n", file);
1154 }
1155 else if (cfun->machine->is_signal)
1156 {
1157 fputs ("/* prologue: Signal */\n", file);
1158 }
a28e4651 1159 else
df3d6232 1160 fputs ("/* prologue: function */\n", file);
a28e4651 1161 }
a12b9b80 1162
1163 if (ACCUMULATE_OUTGOING_ARGS)
1164 fprintf (file, "/* outgoing args size = %d */\n",
1165 avr_outgoing_args_size());
1166
df3d6232 1167 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1168 get_frame_size());
34413b10 1169 fprintf (file, "/* stack size = %d */\n",
1170 cfun->machine->stack_usage);
1171 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1172 usage for offset so that SP + .L__stack_offset = return address. */
1173 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
df3d6232 1174}
f0973368 1175
e067eab2 1176
df3d6232 1177/* Implement EPILOGUE_USES. */
c3bcd5a9 1178
df3d6232 1179int
1180avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1181{
1182 if (reload_completed
1183 && cfun->machine
1184 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1185 return 1;
1186 return 0;
a28e4651 1187}
1188
e484266f 1189/* Helper for expand_epilogue. Emit a pop of a byte register. */
1190
1191static void
1192emit_pop_byte (unsigned regno)
1193{
1194 rtx mem, reg;
1195
1196 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1197 mem = gen_frame_mem (QImode, mem);
1198 reg = gen_rtx_REG (QImode, regno);
1199
1200 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1201}
1202
df3d6232 1203/* Output RTL epilogue. */
a28e4651 1204
df3d6232 1205void
32969c63 1206expand_epilogue (bool sibcall_p)
a28e4651 1207{
1208 int reg;
a28e4651 1209 int live_seq;
637dc5b6 1210 HARD_REG_SET set;
a28e4651 1211 int minimize;
a12b9b80 1212 HOST_WIDE_INT size;
1213 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1214
1215 size = get_frame_size() + avr_outgoing_args_size();
df3d6232 1216
1217 /* epilogue: naked */
1218 if (cfun->machine->is_naked)
c3bcd5a9 1219 {
32969c63 1220 gcc_assert (!sibcall_p);
1221
f7b38f2d 1222 emit_jump_insn (gen_return ());
df3d6232 1223 return;
a28e4651 1224 }
1225
637dc5b6 1226 avr_regs_to_save (&set);
a28e4651 1227 live_seq = sequent_regs_live ();
a12b9b80 1228
a28e4651 1229 minimize = (TARGET_CALL_PROLOGUES
a12b9b80 1230 && live_seq
1231 && !isr_p
1232 && !cfun->machine->is_OS_task
1233 && !cfun->machine->is_OS_main);
a28e4651 1234
a12b9b80 1235 if (minimize
1236 && (live_seq > 4
1237 || frame_pointer_needed
1238 || size))
a28e4651 1239 {
a12b9b80 1240 /* Get rid of frame. */
1241
1242 if (!frame_pointer_needed)
1243 {
9f42c829 1244 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 1245 }
1246
1247 if (size)
1248 {
9f42c829 1249 emit_move_insn (frame_pointer_rtx,
1250 plus_constant (frame_pointer_rtx, size));
a12b9b80 1251 }
1252
f7b38f2d 1253 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
a12b9b80 1254 return;
a28e4651 1255 }
a12b9b80 1256
1257 if (size)
a28e4651 1258 {
a12b9b80 1259 /* Try two methods to adjust stack and select shortest. */
b5b90b5a 1260
1261 int irq_state = -1;
a12b9b80 1262 rtx fp, my_fp;
1263 rtx fp_plus_insns;
e484266f 1264
a12b9b80 1265 gcc_assert (frame_pointer_needed
1266 || !isr_p
1267 || !current_function_is_leaf);
1268
1269 fp = my_fp = (frame_pointer_needed
9f42c829 1270 ? frame_pointer_rtx
a12b9b80 1271 : gen_rtx_REG (Pmode, REG_X));
58f62c92 1272
a12b9b80 1273 if (AVR_HAVE_8BIT_SP)
1274 {
1275 /* The high byte (r29) does not change:
1276 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1277
9f42c829 1278 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 1279 }
1280
1281 /********** Method 1: Adjust fp register **********/
1282
1283 start_sequence ();
58f62c92 1284
a12b9b80 1285 if (!frame_pointer_needed)
1286 emit_move_insn (fp, stack_pointer_rtx);
58f62c92 1287
a12b9b80 1288 emit_move_insn (my_fp, plus_constant (my_fp, size));
58f62c92 1289
a12b9b80 1290 /* Copy to stack pointer. */
b5b90b5a 1291
1292 if (TARGET_NO_INTERRUPTS)
1293 irq_state = 0;
1294
1295 if (AVR_HAVE_8BIT_SP)
1296 irq_state = 2;
1297
1298 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1299 GEN_INT (irq_state)));
e484266f 1300
a12b9b80 1301 fp_plus_insns = get_insns ();
1302 end_sequence ();
58f62c92 1303
a12b9b80 1304 /********** Method 2: Adjust Stack pointer **********/
1305
1306 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1307 {
1308 rtx sp_plus_insns;
58f62c92 1309
a12b9b80 1310 start_sequence ();
58f62c92 1311
a12b9b80 1312 emit_move_insn (stack_pointer_rtx,
1313 plus_constant (stack_pointer_rtx, size));
e484266f 1314
a12b9b80 1315 sp_plus_insns = get_insns ();
1316 end_sequence ();
e484266f 1317
a12b9b80 1318 /************ Use shortest method ************/
1319
1320 emit_insn (get_sequence_length (sp_plus_insns)
1321 < get_sequence_length (fp_plus_insns)
1322 ? sp_plus_insns
1323 : fp_plus_insns);
1324 }
1325 else
1326 emit_insn (fp_plus_insns);
1327 } /* size != 0 */
1328
1329 if (frame_pointer_needed
1330 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1331 {
1332 /* Restore previous frame_pointer. See expand_prologue for
1333 rationale for not using pophi. */
1334
1335 emit_pop_byte (REG_Y + 1);
1336 emit_pop_byte (REG_Y);
1337 }
f0973368 1338
a12b9b80 1339 /* Restore used registers. */
1340
1341 for (reg = 31; reg >= 0; --reg)
1342 if (TEST_HARD_REG_BIT (set, reg))
1343 emit_pop_byte (reg);
df3d6232 1344
a12b9b80 1345 if (isr_p)
1346 {
0b6cf66f 1347 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1348 The conditions to restore them must be tha same as in prologue. */
a12b9b80 1349
0b6cf66f 1350 if (AVR_HAVE_RAMPX
1351 && TEST_HARD_REG_BIT (set, REG_X)
1352 && TEST_HARD_REG_BIT (set, REG_X + 1))
1353 {
1354 emit_pop_byte (TMP_REGNO);
1355 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1356 }
1357
1358 if (AVR_HAVE_RAMPY
1359 && (frame_pointer_needed
1360 || (TEST_HARD_REG_BIT (set, REG_Y)
1361 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1362 {
1363 emit_pop_byte (TMP_REGNO);
1364 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1365 }
1366
1367 if (AVR_HAVE_RAMPZ
a12b9b80 1368 && TEST_HARD_REG_BIT (set, REG_Z)
1369 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1370 {
e484266f 1371 emit_pop_byte (TMP_REGNO);
5bd39e93 1372 emit_move_insn (rampz_rtx, tmp_reg_rtx);
df3d6232 1373 }
c3bcd5a9 1374
0b6cf66f 1375 if (AVR_HAVE_RAMPD)
1376 {
1377 emit_pop_byte (TMP_REGNO);
1378 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1379 }
1380
1381 /* Restore SREG using tmp_reg as scratch. */
a12b9b80 1382
1383 emit_pop_byte (TMP_REGNO);
0b6cf66f 1384 emit_move_insn (sreg_rtx, tmp_reg_rtx);
a12b9b80 1385
1386 /* Restore tmp REG. */
1387 emit_pop_byte (TMP_REGNO);
1388
1389 /* Restore zero REG. */
1390 emit_pop_byte (ZERO_REGNO);
df3d6232 1391 }
a12b9b80 1392
1393 if (!sibcall_p)
1394 emit_jump_insn (gen_return ());
a28e4651 1395}
1396
df3d6232 1397/* Output summary messages at beginning of function epilogue. */
1398
1399static void
1400avr_asm_function_begin_epilogue (FILE *file)
1401{
1402 fprintf (file, "/* epilogue start */\n");
1403}
a28e4651 1404
1f959902 1405
1406/* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1407
1408static bool
1409avr_cannot_modify_jumps_p (void)
1410{
1411
1412 /* Naked Functions must not have any instructions after
1413 their epilogue, see PR42240 */
1414
1415 if (reload_completed
1416 && cfun->machine
1417 && cfun->machine->is_naked)
1418 {
1419 return true;
1420 }
1421
1422 return false;
1423}
1424
1425
8b0ecac5 1426/* Helper function for `avr_legitimate_address_p'. */
1427
1428static inline bool
f8a8fc7b 1429avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
f9efb148 1430 RTX_CODE outer_code, bool strict)
8b0ecac5 1431{
1432 return (REG_P (reg)
f8a8fc7b 1433 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1434 as, outer_code, UNKNOWN)
8b0ecac5 1435 || (!strict
1436 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1437}
1438
1439
a28e4651 1440/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1441 machine for a memory operand of mode MODE. */
1442
8b0ecac5 1443static bool
fd50b071 1444avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
a28e4651 1445{
f9efb148 1446 bool ok = CONSTANT_ADDRESS_P (x);
3d4d979d 1447
f9efb148 1448 switch (GET_CODE (x))
a28e4651 1449 {
f9efb148 1450 case REG:
1451 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1452 MEM, strict);
1453
1454 if (strict
1455 && DImode == mode
1456 && REG_X == REGNO (x))
8b0ecac5 1457 {
f9efb148 1458 ok = false;
8b0ecac5 1459 }
f9efb148 1460 break;
1461
1462 case POST_INC:
1463 case PRE_DEC:
1464 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1465 GET_CODE (x), strict);
1466 break;
ae86bb47 1467
f9efb148 1468 case PLUS:
1469 {
1470 rtx reg = XEXP (x, 0);
1471 rtx op1 = XEXP (x, 1);
1472
1473 if (REG_P (reg)
1474 && CONST_INT_P (op1)
1475 && INTVAL (op1) >= 0)
1476 {
1477 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1478
1479 if (fit)
1480 {
1481 ok = (! strict
1482 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1483 PLUS, strict));
1484
1485 if (reg == frame_pointer_rtx
1486 || reg == arg_pointer_rtx)
1487 {
1488 ok = true;
1489 }
1490 }
1491 else if (frame_pointer_needed
1492 && reg == frame_pointer_rtx)
1493 {
1494 ok = true;
1495 }
1496 }
1497 }
1498 break;
1499
1500 default:
1501 break;
1502 }
1503
ae86bb47 1504 if (avr_log.legitimate_address_p)
a28e4651 1505 {
f9efb148 1506 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
ae86bb47 1507 "reload_completed=%d reload_in_progress=%d %s:",
f9efb148 1508 ok, mode, strict, reload_completed, reload_in_progress,
ae86bb47 1509 reg_renumber ? "(reg_renumber)" : "");
1510
1511 if (GET_CODE (x) == PLUS
1512 && REG_P (XEXP (x, 0))
1513 && CONST_INT_P (XEXP (x, 1))
1514 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1515 && reg_renumber)
1516 {
1517 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1518 true_regnum (XEXP (x, 0)));
1519 }
1520
1521 avr_edump ("\n%r\n", x);
a28e4651 1522 }
ae86bb47 1523
f9efb148 1524 return ok;
a28e4651 1525}
1526
4202ef11 1527
1528/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1529 now only a helper for avr_addr_space_legitimize_address. */
a28e4651 1530/* Attempts to replace X with a valid
1531 memory address for an operand of mode MODE */
1532
8b0ecac5 1533static rtx
9f42c829 1534avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
a28e4651 1535{
9f42c829 1536 bool big_offset_p = false;
1537
1538 x = oldx;
1539
1540 if (GET_CODE (oldx) == PLUS
1541 && REG_P (XEXP (oldx, 0)))
1542 {
1543 if (REG_P (XEXP (oldx, 1)))
1544 x = force_reg (GET_MODE (oldx), oldx);
1545 else if (CONST_INT_P (XEXP (oldx, 1)))
1546 {
1547 int offs = INTVAL (XEXP (oldx, 1));
1548 if (frame_pointer_rtx != XEXP (oldx, 0)
1549 && offs > MAX_LD_OFFSET (mode))
1550 {
1551 big_offset_p = true;
1552 x = force_reg (GET_MODE (oldx), oldx);
1553 }
1554 }
1555 }
1556
1557 if (avr_log.legitimize_address)
1558 {
1559 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1560
1561 if (x != oldx)
1562 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1563 }
1564
a28e4651 1565 return x;
1566}
1567
1568
68a79dfc 1569/* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1570/* This will allow register R26/27 to be used where it is no worse than normal
1571 base pointers R28/29 or R30/31. For example, if base offset is greater
1572 than 63 bytes or for R++ or --R addressing. */
1573
1574rtx
1f46ee39 1575avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
68a79dfc 1576 int opnum, int type, int addr_type,
1577 int ind_levels ATTRIBUTE_UNUSED,
1578 rtx (*mk_memloc)(rtx,int))
1579{
1f46ee39 1580 rtx x = *px;
1581
68a79dfc 1582 if (avr_log.legitimize_reload_address)
1583 avr_edump ("\n%?:%m %r\n", mode, x);
1584
1585 if (1 && (GET_CODE (x) == POST_INC
1586 || GET_CODE (x) == PRE_DEC))
1587 {
1588 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1589 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1590 opnum, RELOAD_OTHER);
1591
1592 if (avr_log.legitimize_reload_address)
1f46ee39 1593 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1594 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1595
1596 return x;
1597 }
1598
1599 if (GET_CODE (x) == PLUS
1600 && REG_P (XEXP (x, 0))
1601 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1602 && CONST_INT_P (XEXP (x, 1))
1603 && INTVAL (XEXP (x, 1)) >= 1)
1604 {
1605 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1606
1607 if (fit)
1608 {
1609 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1610 {
1611 int regno = REGNO (XEXP (x, 0));
1612 rtx mem = mk_memloc (x, regno);
1613
1614 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1615 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1616 1, addr_type);
1617
1618 if (avr_log.legitimize_reload_address)
1f46ee39 1619 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1620 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1621
1622 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1623 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1624 opnum, type);
1625
1626 if (avr_log.legitimize_reload_address)
1f46ee39 1627 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1628 BASE_POINTER_REGS, mem, NULL_RTX);
1629
1630 return x;
1631 }
1632 }
1633 else if (! (frame_pointer_needed
9f42c829 1634 && XEXP (x, 0) == frame_pointer_rtx))
68a79dfc 1635 {
1f46ee39 1636 push_reload (x, NULL_RTX, px, NULL,
68a79dfc 1637 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1638 opnum, type);
1639
1640 if (avr_log.legitimize_reload_address)
1f46ee39 1641 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1642 POINTER_REGS, x, NULL_RTX);
1643
1644 return x;
1645 }
1646 }
1647
1648 return NULL_RTX;
1649}
1650
1651
9ce2d202 1652/* Helper function to print assembler resp. track instruction
02d9a2c3 1653 sequence lengths. Always return "".
9ce2d202 1654
1655 If PLEN == NULL:
1656 Output assembler code from template TPL with operands supplied
1657 by OPERANDS. This is just forwarding to output_asm_insn.
1658
1659 If PLEN != NULL:
dfd52f2b 1660 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1661 If N_WORDS < 0 Set *PLEN to -N_WORDS.
9ce2d202 1662 Don't output anything.
1663*/
1664
02d9a2c3 1665static const char*
9ce2d202 1666avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1667{
1668 if (NULL == plen)
1669 {
1670 output_asm_insn (tpl, operands);
1671 }
1672 else
1673 {
dfd52f2b 1674 if (n_words < 0)
1675 *plen = -n_words;
1676 else
1677 *plen += n_words;
9ce2d202 1678 }
02d9a2c3 1679
1680 return "";
9ce2d202 1681}
1682
1683
20c71901 1684/* Return a pointer register name as a string. */
a28e4651 1685
37ac04dc 1686static const char *
206a5129 1687ptrreg_to_str (int regno)
a28e4651 1688{
1689 switch (regno)
1690 {
1691 case REG_X: return "X";
1692 case REG_Y: return "Y";
1693 case REG_Z: return "Z";
1694 default:
a45076aa 1695 output_operand_lossage ("address operand requires constraint for"
1696 " X, Y, or Z register");
a28e4651 1697 }
1698 return NULL;
1699}
1700
1701/* Return the condition name as a string.
1702 Used in conditional jump constructing */
1703
37ac04dc 1704static const char *
206a5129 1705cond_string (enum rtx_code code)
a28e4651 1706{
1707 switch (code)
1708 {
1709 case NE:
1710 return "ne";
1711 case EQ:
1712 return "eq";
1713 case GE:
1714 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1715 return "pl";
1716 else
1717 return "ge";
a28e4651 1718 case LT:
1719 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1720 return "mi";
1721 else
1722 return "lt";
1723 case GEU:
1724 return "sh";
a28e4651 1725 case LTU:
1726 return "lo";
1727 default:
8ef66241 1728 gcc_unreachable ();
a28e4651 1729 }
02d9a2c3 1730
1731 return "";
a28e4651 1732}
1733
62a6a7ee 1734
1735/* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
20c71901 1736/* Output ADDR to FILE as address. */
a28e4651 1737
62a6a7ee 1738static void
1739avr_print_operand_address (FILE *file, rtx addr)
a28e4651 1740{
1741 switch (GET_CODE (addr))
1742 {
1743 case REG:
1744 fprintf (file, ptrreg_to_str (REGNO (addr)));
1745 break;
1746
1747 case PRE_DEC:
1748 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1749 break;
1750
1751 case POST_INC:
1752 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1753 break;
1754
1755 default:
1756 if (CONSTANT_ADDRESS_P (addr)
310f64db 1757 && text_segment_operand (addr, VOIDmode))
a28e4651 1758 {
163e62fb 1759 rtx x = addr;
1760 if (GET_CODE (x) == CONST)
1761 x = XEXP (x, 0);
310f64db 1762 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1763 {
1764 /* Assembler gs() will implant word address. Make offset
a45076aa 1765 a byte offset inside gs() for assembler. This is
1766 needed because the more logical (constant+gs(sym)) is not
1767 accepted by gas. For 128K and lower devices this is ok.
1768 For large devices it will create a Trampoline to offset
1769 from symbol which may not be what the user really wanted. */
310f64db 1770 fprintf (file, "gs(");
1771 output_addr_const (file, XEXP (x,0));
a45076aa 1772 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1773 2 * INTVAL (XEXP (x, 1)));
310f64db 1774 if (AVR_3_BYTE_PC)
bf776685 1775 if (warning (0, "pointer offset from symbol maybe incorrect"))
310f64db 1776 {
1777 output_addr_const (stderr, addr);
1778 fprintf(stderr,"\n");
1779 }
1780 }
1781 else
1782 {
1783 fprintf (file, "gs(");
1784 output_addr_const (file, addr);
1785 fprintf (file, ")");
1786 }
a28e4651 1787 }
1788 else
1789 output_addr_const (file, addr);
1790 }
1791}
1792
1793
62a6a7ee 1794/* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1795
1796static bool
1797avr_print_operand_punct_valid_p (unsigned char code)
1798{
1799 return code == '~' || code == '!';
1800}
1801
1802
1803/* Implement `TARGET_PRINT_OPERAND'. */
384f6361 1804/* Output X as assembler operand to file FILE.
1805 For a description of supported %-codes, see top of avr.md. */
1806
62a6a7ee 1807static void
1808avr_print_operand (FILE *file, rtx x, int code)
a28e4651 1809{
1810 int abcd = 0;
1811
1812 if (code >= 'A' && code <= 'D')
1813 abcd = code - 'A';
1814
3b351734 1815 if (code == '~')
1816 {
4f0e2214 1817 if (!AVR_HAVE_JMP_CALL)
3b351734 1818 fputc ('r', file);
1819 }
90ef7269 1820 else if (code == '!')
1821 {
1822 if (AVR_HAVE_EIJMP_EICALL)
1823 fputc ('e', file);
1824 }
384f6361 1825 else if (code == 't'
1826 || code == 'T')
1827 {
1828 static int t_regno = -1;
1829 static int t_nbits = -1;
1830
1831 if (REG_P (x) && t_regno < 0 && code == 'T')
1832 {
1833 t_regno = REGNO (x);
1834 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1835 }
1836 else if (CONST_INT_P (x) && t_regno >= 0
1837 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1838 {
1839 int bpos = INTVAL (x);
1840
1841 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1842 if (code == 'T')
1843 fprintf (file, ",%d", bpos % 8);
1844
1845 t_regno = -1;
1846 }
1847 else
1848 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1849 }
3b351734 1850 else if (REG_P (x))
a28e4651 1851 {
1852 if (x == zero_reg_rtx)
3b351734 1853 fprintf (file, "__zero_reg__");
a28e4651 1854 else
1855 fprintf (file, reg_names[true_regnum (x) + abcd]);
1856 }
96871982 1857 else if (CONST_INT_P (x))
1858 {
1859 HOST_WIDE_INT ival = INTVAL (x);
1860
1861 if ('i' != code)
1862 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1863 else if (low_io_address_operand (x, VOIDmode)
1864 || high_io_address_operand (x, VOIDmode))
1865 {
0b6cf66f 1866 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1867 fprintf (file, "__RAMPZ__");
1868 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1869 fprintf (file, "__RAMPY__");
1870 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1871 fprintf (file, "__RAMPX__");
1872 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1873 fprintf (file, "__RAMPD__");
1874 else if (AVR_XMEGA && ival == avr_addr.ccp)
1875 fprintf (file, "__CCP__");
72851b68 1876 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1877 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1878 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1879 else
96871982 1880 {
96871982 1881 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1882 ival - avr_current_arch->sfr_offset);
96871982 1883 }
1884 }
1885 else
1886 fatal_insn ("bad address, not an I/O address:", x);
1887 }
1888 else if (MEM_P (x))
a28e4651 1889 {
a45076aa 1890 rtx addr = XEXP (x, 0);
1891
310f64db 1892 if (code == 'm')
a28e4651 1893 {
a45076aa 1894 if (!CONSTANT_P (addr))
644ac9c5 1895 fatal_insn ("bad address, not a constant:", addr);
a45076aa 1896 /* Assembler template with m-code is data - not progmem section */
1897 if (text_segment_operand (addr, VOIDmode))
1898 if (warning (0, "accessing data memory with"
1899 " program memory address"))
1900 {
1901 output_addr_const (stderr, addr);
1902 fprintf(stderr,"\n");
1903 }
1904 output_addr_const (file, addr);
a28e4651 1905 }
644ac9c5 1906 else if (code == 'i')
1907 {
62a6a7ee 1908 avr_print_operand (file, addr, 'i');
644ac9c5 1909 }
3b351734 1910 else if (code == 'o')
1911 {
1912 if (GET_CODE (addr) != PLUS)
68435912 1913 fatal_insn ("bad address, not (reg+disp):", addr);
3b351734 1914
62a6a7ee 1915 avr_print_operand (file, XEXP (addr, 1), 0);
3b351734 1916 }
f43bae99 1917 else if (code == 'p' || code == 'r')
1918 {
1919 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1920 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1921
1922 if (code == 'p')
62a6a7ee 1923 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
f43bae99 1924 else
62a6a7ee 1925 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
f43bae99 1926 }
a28e4651 1927 else if (GET_CODE (addr) == PLUS)
1928 {
62a6a7ee 1929 avr_print_operand_address (file, XEXP (addr,0));
a28e4651 1930 if (REGNO (XEXP (addr, 0)) == REG_X)
68435912 1931 fatal_insn ("internal compiler error. Bad address:"
a28e4651 1932 ,addr);
1933 fputc ('+', file);
62a6a7ee 1934 avr_print_operand (file, XEXP (addr,1), code);
a28e4651 1935 }
1936 else
62a6a7ee 1937 avr_print_operand_address (file, addr);
a28e4651 1938 }
96871982 1939 else if (code == 'i')
1940 {
1941 fatal_insn ("bad address, not an I/O address:", x);
1942 }
310f64db 1943 else if (code == 'x')
1944 {
1945 /* Constant progmem address - like used in jmp or call */
1946 if (0 == text_segment_operand (x, VOIDmode))
a45076aa 1947 if (warning (0, "accessing program memory"
1948 " with data memory address"))
310f64db 1949 {
1950 output_addr_const (stderr, x);
1951 fprintf(stderr,"\n");
1952 }
1953 /* Use normal symbol for direct address no linker trampoline needed */
1954 output_addr_const (file, x);
1955 }
a28e4651 1956 else if (GET_CODE (x) == CONST_DOUBLE)
1957 {
1958 long val;
1959 REAL_VALUE_TYPE rv;
1960 if (GET_MODE (x) != SFmode)
68435912 1961 fatal_insn ("internal compiler error. Unknown mode:", x);
a28e4651 1962 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1963 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
7fe1d31c 1964 fprintf (file, "0x%lx", val);
a28e4651 1965 }
5bd39e93 1966 else if (GET_CODE (x) == CONST_STRING)
1967 fputs (XSTR (x, 0), file);
a28e4651 1968 else if (code == 'j')
7fe1d31c 1969 fputs (cond_string (GET_CODE (x)), file);
a28e4651 1970 else if (code == 'k')
7fe1d31c 1971 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
a28e4651 1972 else
62a6a7ee 1973 avr_print_operand_address (file, x);
a28e4651 1974}
1975
a28e4651 1976/* Update the condition code in the INSN. */
1977
1978void
206a5129 1979notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
a28e4651 1980{
1cb39658 1981 rtx set;
bcad9375 1982 enum attr_cc cc = get_attr_cc (insn);
1cb39658 1983
bcad9375 1984 switch (cc)
a28e4651 1985 {
bcad9375 1986 default:
1987 break;
1988
1989 case CC_OUT_PLUS:
eac146f2 1990 case CC_OUT_PLUS_NOCLOBBER:
f4806884 1991 case CC_LDI:
bcad9375 1992 {
1993 rtx *op = recog_data.operand;
1994 int len_dummy, icc;
1995
1996 /* Extract insn's operands. */
1997 extract_constrain_insn_cached (insn);
eac146f2 1998
f4806884 1999 switch (cc)
2000 {
2001 default:
2002 gcc_unreachable();
2003
2004 case CC_OUT_PLUS:
2005 avr_out_plus (op, &len_dummy, &icc);
2006 cc = (enum attr_cc) icc;
2007 break;
2008
2009 case CC_OUT_PLUS_NOCLOBBER:
2010 avr_out_plus_noclobber (op, &len_dummy, &icc);
2011 cc = (enum attr_cc) icc;
2012 break;
2013
2014 case CC_LDI:
2015
2016 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2017 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2018 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2019 ? CC_CLOBBER
2020 /* Any other "r,rL" combination does not alter cc0. */
2021 : CC_NONE;
2022
2023 break;
2024 } /* inner switch */
2025
bcad9375 2026 break;
2027 }
f4806884 2028 } /* outer swicth */
bcad9375 2029
2030 switch (cc)
2031 {
2032 default:
2033 /* Special values like CC_OUT_PLUS from above have been
2034 mapped to "standard" CC_* values so we never come here. */
2035
2036 gcc_unreachable();
2037 break;
2038
a28e4651 2039 case CC_NONE:
2040 /* Insn does not affect CC at all. */
2041 break;
2042
2043 case CC_SET_N:
2044 CC_STATUS_INIT;
2045 break;
2046
2047 case CC_SET_ZN:
1cb39658 2048 set = single_set (insn);
2049 CC_STATUS_INIT;
2050 if (set)
2051 {
2052 cc_status.flags |= CC_NO_OVERFLOW;
2053 cc_status.value1 = SET_DEST (set);
2054 }
a28e4651 2055 break;
2056
2057 case CC_SET_CZN:
2058 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059 The V flag may or may not be known but that's ok because
2060 alter_cond will change tests to use EQ/NE. */
1cb39658 2061 set = single_set (insn);
2062 CC_STATUS_INIT;
2063 if (set)
2064 {
2065 cc_status.value1 = SET_DEST (set);
2066 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2067 }
a28e4651 2068 break;
2069
2070 case CC_COMPARE:
1cb39658 2071 set = single_set (insn);
2072 CC_STATUS_INIT;
2073 if (set)
2074 cc_status.value1 = SET_SRC (set);
a28e4651 2075 break;
1cb39658 2076
a28e4651 2077 case CC_CLOBBER:
2078 /* Insn doesn't leave CC in a usable state. */
2079 CC_STATUS_INIT;
2080 break;
2081 }
2082}
2083
a28e4651 2084/* Choose mode for jump insn:
2085 1 - relative jump in range -63 <= x <= 62 ;
2086 2 - relative jump in range -2046 <= x <= 2045 ;
2087 3 - absolute jump (only for ATmega[16]03). */
2088
2089int
206a5129 2090avr_jump_mode (rtx x, rtx insn)
a28e4651 2091{
ef51d1e3 2092 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
47fc0706 2093 ? XEXP (x, 0) : x));
2094 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
a28e4651 2095 int jump_distance = cur_addr - dest_addr;
2096
2097 if (-63 <= jump_distance && jump_distance <= 62)
2098 return 1;
2099 else if (-2046 <= jump_distance && jump_distance <= 2045)
2100 return 2;
4f0e2214 2101 else if (AVR_HAVE_JMP_CALL)
a28e4651 2102 return 3;
2103
2104 return 2;
2105}
2106
e7d17bf3 2107/* return an AVR condition jump commands.
2108 X is a comparison RTX.
2109 LEN is a number returned by avr_jump_mode function.
2110 if REVERSE nonzero then condition code in X must be reversed. */
a28e4651 2111
37ac04dc 2112const char *
206a5129 2113ret_cond_branch (rtx x, int len, int reverse)
a28e4651 2114{
e7d17bf3 2115 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2116
a28e4651 2117 switch (cond)
2118 {
2119 case GT:
2120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2121 return (len == 1 ? ("breq .+2" CR_TAB
2122 "brpl %0") :
2123 len == 2 ? ("breq .+4" CR_TAB
2124 "brmi .+2" CR_TAB
2125 "rjmp %0") :
2126 ("breq .+6" CR_TAB
2127 "brmi .+4" CR_TAB
2128 "jmp %0"));
a28e4651 2129
2130 else
02a011e9 2131 return (len == 1 ? ("breq .+2" CR_TAB
2132 "brge %0") :
2133 len == 2 ? ("breq .+4" CR_TAB
2134 "brlt .+2" CR_TAB
2135 "rjmp %0") :
2136 ("breq .+6" CR_TAB
2137 "brlt .+4" CR_TAB
2138 "jmp %0"));
a28e4651 2139 case GTU:
02a011e9 2140 return (len == 1 ? ("breq .+2" CR_TAB
2141 "brsh %0") :
2142 len == 2 ? ("breq .+4" CR_TAB
2143 "brlo .+2" CR_TAB
2144 "rjmp %0") :
2145 ("breq .+6" CR_TAB
2146 "brlo .+4" CR_TAB
2147 "jmp %0"));
a28e4651 2148 case LE:
2149 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2150 return (len == 1 ? ("breq %0" CR_TAB
2151 "brmi %0") :
2152 len == 2 ? ("breq .+2" CR_TAB
2153 "brpl .+2" CR_TAB
2154 "rjmp %0") :
2155 ("breq .+2" CR_TAB
2156 "brpl .+4" CR_TAB
2157 "jmp %0"));
a28e4651 2158 else
02a011e9 2159 return (len == 1 ? ("breq %0" CR_TAB
2160 "brlt %0") :
2161 len == 2 ? ("breq .+2" CR_TAB
2162 "brge .+2" CR_TAB
2163 "rjmp %0") :
2164 ("breq .+2" CR_TAB
2165 "brge .+4" CR_TAB
2166 "jmp %0"));
a28e4651 2167 case LEU:
02a011e9 2168 return (len == 1 ? ("breq %0" CR_TAB
2169 "brlo %0") :
2170 len == 2 ? ("breq .+2" CR_TAB
2171 "brsh .+2" CR_TAB
2172 "rjmp %0") :
2173 ("breq .+2" CR_TAB
2174 "brsh .+4" CR_TAB
2175 "jmp %0"));
a28e4651 2176 default:
e7d17bf3 2177 if (reverse)
2178 {
2179 switch (len)
2180 {
2181 case 1:
02a011e9 2182 return "br%k1 %0";
e7d17bf3 2183 case 2:
02a011e9 2184 return ("br%j1 .+2" CR_TAB
2185 "rjmp %0");
e7d17bf3 2186 default:
02a011e9 2187 return ("br%j1 .+4" CR_TAB
2188 "jmp %0");
e7d17bf3 2189 }
2190 }
02a011e9 2191 else
2192 {
2193 switch (len)
2194 {
2195 case 1:
2196 return "br%j1 %0";
2197 case 2:
2198 return ("br%k1 .+2" CR_TAB
2199 "rjmp %0");
2200 default:
2201 return ("br%k1 .+4" CR_TAB
2202 "jmp %0");
2203 }
2204 }
a28e4651 2205 }
2206 return "";
2207}
2208
fe74bc77 2209/* Output insn cost for next insn. */
a28e4651 2210
2211void
206a5129 2212final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
114786e6 2213 int num_operands ATTRIBUTE_UNUSED)
a28e4651 2214{
ae86bb47 2215 if (avr_log.rtx_costs)
a28e4651 2216 {
114786e6 2217 rtx set = single_set (insn);
2218
2219 if (set)
2220 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
7013e87c 2221 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
114786e6 2222 else
2223 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
20d892d1 2224 rtx_cost (PATTERN (insn), INSN, 0,
2225 optimize_insn_for_speed_p()));
a28e4651 2226 }
a28e4651 2227}
2228
37ac04dc 2229/* Return 0 if undefined, 1 if always true or always false. */
a28e4651 2230
2231int
8deb3959 2232avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
a28e4651 2233{
2234 unsigned int max = (mode == QImode ? 0xff :
2235 mode == HImode ? 0xffff :
02d9a2c3 2236 mode == PSImode ? 0xffffff :
30435bf8 2237 mode == SImode ? 0xffffffff : 0);
8deb3959 2238 if (max && op && GET_CODE (x) == CONST_INT)
a28e4651 2239 {
8deb3959 2240 if (unsigned_condition (op) != op)
a28e4651 2241 max >>= 1;
2242
2243 if (max != (INTVAL (x) & max)
2244 && INTVAL (x) != 0xff)
2245 return 1;
2246 }
2247 return 0;
2248}
2249
2250
2251/* Returns nonzero if REGNO is the number of a hard
2252 register in which function arguments are sometimes passed. */
2253
2254int
206a5129 2255function_arg_regno_p(int r)
a28e4651 2256{
2257 return (r >= 8 && r <= 25);
2258}
2259
2260/* Initializing the variable cum for the state at the beginning
2261 of the argument list. */
2262
2263void
206a5129 2264init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2265 tree fndecl ATTRIBUTE_UNUSED)
a28e4651 2266{
2267 cum->nregs = 18;
2268 cum->regno = FIRST_CUM_REG;
257d99c3 2269 if (!libname && stdarg_p (fntype))
2270 cum->nregs = 0;
32969c63 2271
2272 /* Assume the calle may be tail called */
2273
2274 cfun->machine->sibcall_fails = 0;
a28e4651 2275}
2276
0af74aa0 2277/* Returns the number of registers to allocate for a function argument. */
2278
2279static int
36f949a2 2280avr_num_arg_regs (enum machine_mode mode, const_tree type)
0af74aa0 2281{
2282 int size;
2283
2284 if (mode == BLKmode)
2285 size = int_size_in_bytes (type);
2286 else
2287 size = GET_MODE_SIZE (mode);
2288
b681d971 2289 /* Align all function arguments to start in even-numbered registers.
2290 Odd-sized arguments leave holes above them. */
0af74aa0 2291
b681d971 2292 return (size + 1) & ~1;
0af74aa0 2293}
2294
a28e4651 2295/* Controls whether a function argument is passed
20c71901 2296 in a register, and which register. */
a28e4651 2297
e8509bef 2298static rtx
39cba157 2299avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
e8509bef 2300 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2301{
39cba157 2302 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2303 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2304
2305 if (cum->nregs && bytes <= cum->nregs)
1a83b3ff 2306 return gen_rtx_REG (mode, cum->regno - bytes);
0af74aa0 2307
a28e4651 2308 return NULL_RTX;
2309}
2310
2311/* Update the summarizer variable CUM to advance past an argument
2312 in the argument list. */
2313
e8509bef 2314static void
39cba157 2315avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
e8509bef 2316 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2317{
39cba157 2318 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2319 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2320
a28e4651 2321 cum->nregs -= bytes;
2322 cum->regno -= bytes;
2323
32969c63 2324 /* A parameter is being passed in a call-saved register. As the original
2325 contents of these regs has to be restored before leaving the function,
2326 a function must not pass arguments in call-saved regs in order to get
2327 tail-called. */
2328
e46ab6d3 2329 if (cum->regno >= 8
2330 && cum->nregs >= 0
32969c63 2331 && !call_used_regs[cum->regno])
2332 {
2333 /* FIXME: We ship info on failing tail-call in struct machine_function.
2334 This uses internals of calls.c:expand_call() and the way args_so_far
2335 is used. targetm.function_ok_for_sibcall() needs to be extended to
2336 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337 dependent so that such an extension is not wanted. */
2338
2339 cfun->machine->sibcall_fails = 1;
2340 }
2341
04f600bb 2342 /* Test if all registers needed by the ABI are actually available. If the
2343 user has fixed a GPR needed to pass an argument, an (implicit) function
363cd51e 2344 call will clobber that fixed register. See PR45099 for an example. */
04f600bb 2345
e46ab6d3 2346 if (cum->regno >= 8
2347 && cum->nregs >= 0)
04f600bb 2348 {
2349 int regno;
2350
2351 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2352 if (fixed_regs[regno])
363cd51e 2353 warning (0, "fixed register %s used to pass parameter to function",
2354 reg_names[regno]);
04f600bb 2355 }
2356
a28e4651 2357 if (cum->nregs <= 0)
2358 {
2359 cum->nregs = 0;
2360 cum->regno = FIRST_CUM_REG;
2361 }
a28e4651 2362}
2363
32969c63 2364/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365/* Decide whether we can make a sibling call to a function. DECL is the
2366 declaration of the function being targeted by the call and EXP is the
2367 CALL_EXPR representing the call. */
2368
2369static bool
2370avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2371{
2372 tree fntype_callee;
2373
2374 /* Tail-calling must fail if callee-saved regs are used to pass
2375 function args. We must not tail-call when `epilogue_restores'
2376 is used. Unfortunately, we cannot tell at this point if that
2377 actually will happen or not, and we cannot step back from
2378 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2379
2380 if (cfun->machine->sibcall_fails
2381 || TARGET_CALL_PROLOGUES)
2382 {
2383 return false;
2384 }
2385
2386 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2387
2388 if (decl_callee)
2389 {
2390 decl_callee = TREE_TYPE (decl_callee);
2391 }
2392 else
2393 {
2394 decl_callee = fntype_callee;
2395
2396 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2397 && METHOD_TYPE != TREE_CODE (decl_callee))
2398 {
2399 decl_callee = TREE_TYPE (decl_callee);
2400 }
2401 }
2402
2403 /* Ensure that caller and callee have compatible epilogues */
2404
2405 if (interrupt_function_p (current_function_decl)
2406 || signal_function_p (current_function_decl)
2407 || avr_naked_function_p (decl_callee)
2408 || avr_naked_function_p (current_function_decl)
2409 /* FIXME: For OS_task and OS_main, we are over-conservative.
2410 This is due to missing documentation of these attributes
2411 and what they actually should do and should not do. */
2412 || (avr_OS_task_function_p (decl_callee)
2413 != avr_OS_task_function_p (current_function_decl))
2414 || (avr_OS_main_function_p (decl_callee)
2415 != avr_OS_main_function_p (current_function_decl)))
2416 {
2417 return false;
2418 }
2419
2420 return true;
2421}
2422
a28e4651 2423/***********************************************************************
2424 Functions for outputting various mov's for a various modes
2425************************************************************************/
4202ef11 2426
2427/* Return true if a value of mode MODE is read from flash by
2428 __load_* function from libgcc. */
2429
2430bool
2431avr_load_libgcc_p (rtx op)
2432{
2433 enum machine_mode mode = GET_MODE (op);
2434 int n_bytes = GET_MODE_SIZE (mode);
2435
2436 return (n_bytes > 2
2437 && !AVR_HAVE_LPMX
590da9f2 2438 && avr_mem_flash_p (op));
4202ef11 2439}
2440
5bd39e93 2441/* Return true if a value of mode MODE is read by __xload_* function. */
2442
2443bool
2444avr_xload_libgcc_p (enum machine_mode mode)
2445{
2446 int n_bytes = GET_MODE_SIZE (mode);
2447
2448 return (n_bytes > 1
e508bf98 2449 || avr_current_device->n_flash > 1);
5bd39e93 2450}
2451
2452
2453/* Find an unused d-register to be used as scratch in INSN.
2454 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455 is a register, skip all possible return values that overlap EXCLUDE.
2456 The policy for the returned register is similar to that of
2457 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2458 of INSN.
2459
2460 Return a QImode d-register or NULL_RTX if nothing found. */
2461
2462static rtx
2463avr_find_unused_d_reg (rtx insn, rtx exclude)
2464{
2465 int regno;
2466 bool isr_p = (interrupt_function_p (current_function_decl)
2467 || signal_function_p (current_function_decl));
2468
2469 for (regno = 16; regno < 32; regno++)
2470 {
2471 rtx reg = all_regs_rtx[regno];
2472
2473 if ((exclude
2474 && reg_overlap_mentioned_p (exclude, reg))
2475 || fixed_regs[regno])
2476 {
2477 continue;
2478 }
2479
2480 /* Try non-live register */
2481
2482 if (!df_regs_ever_live_p (regno)
2483 && (TREE_THIS_VOLATILE (current_function_decl)
2484 || cfun->machine->is_OS_task
2485 || cfun->machine->is_OS_main
2486 || (!isr_p && call_used_regs[regno])))
2487 {
2488 return reg;
2489 }
2490
2491 /* Any live register can be used if it is unused after.
2492 Prologue/epilogue will care for it as needed. */
2493
2494 if (df_regs_ever_live_p (regno)
2495 && reg_unused_after (insn, reg))
2496 {
2497 return reg;
2498 }
2499 }
2500
2501 return NULL_RTX;
2502}
2503
4202ef11 2504
2505/* Helper function for the next function in the case where only restricted
2506 version of LPM instruction is available. */
2507
2508static const char*
2509avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2510{
2511 rtx dest = xop[0];
2512 rtx addr = xop[1];
2513 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2514 int regno_dest;
2515
2516 regno_dest = REGNO (dest);
2517
2518 /* The implicit target register of LPM. */
2519 xop[3] = lpm_reg_rtx;
2520
2521 switch (GET_CODE (addr))
2522 {
2523 default:
2524 gcc_unreachable();
2525
2526 case REG:
2527
2528 gcc_assert (REG_Z == REGNO (addr));
2529
2530 switch (n_bytes)
2531 {
2532 default:
2533 gcc_unreachable();
2534
2535 case 1:
5bd39e93 2536 avr_asm_len ("%4lpm", xop, plen, 1);
2537
2538 if (regno_dest != LPM_REGNO)
2539 avr_asm_len ("mov %0,%3", xop, plen, 1);
2540
2541 return "";
4202ef11 2542
2543 case 2:
2544 if (REGNO (dest) == REG_Z)
5bd39e93 2545 return avr_asm_len ("%4lpm" CR_TAB
4202ef11 2546 "push %3" CR_TAB
2547 "adiw %2,1" CR_TAB
5bd39e93 2548 "%4lpm" CR_TAB
4202ef11 2549 "mov %B0,%3" CR_TAB
2550 "pop %A0", xop, plen, 6);
5bd39e93 2551
2552 avr_asm_len ("%4lpm" CR_TAB
2553 "mov %A0,%3" CR_TAB
2554 "adiw %2,1" CR_TAB
2555 "%4lpm" CR_TAB
2556 "mov %B0,%3", xop, plen, 5);
4202ef11 2557
5bd39e93 2558 if (!reg_unused_after (insn, addr))
2559 avr_asm_len ("sbiw %2,1", xop, plen, 1);
4202ef11 2560
2561 break; /* 2 */
2562 }
2563
2564 break; /* REG */
2565
2566 case POST_INC:
2567
2568 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
5bd39e93 2569 && n_bytes <= 4);
4202ef11 2570
5bd39e93 2571 if (regno_dest == LPM_REGNO)
2572 avr_asm_len ("%4lpm" CR_TAB
2573 "adiw %2,1", xop, plen, 2);
2574 else
2575 avr_asm_len ("%4lpm" CR_TAB
2576 "mov %A0,%3" CR_TAB
2577 "adiw %2,1", xop, plen, 3);
4202ef11 2578
2579 if (n_bytes >= 2)
5bd39e93 2580 avr_asm_len ("%4lpm" CR_TAB
4202ef11 2581 "mov %B0,%3" CR_TAB
2582 "adiw %2,1", xop, plen, 3);
2583
5bd39e93 2584 if (n_bytes >= 3)
2585 avr_asm_len ("%4lpm" CR_TAB
2586 "mov %C0,%3" CR_TAB
2587 "adiw %2,1", xop, plen, 3);
2588
2589 if (n_bytes >= 4)
2590 avr_asm_len ("%4lpm" CR_TAB
2591 "mov %D0,%3" CR_TAB
2592 "adiw %2,1", xop, plen, 3);
2593
4202ef11 2594 break; /* POST_INC */
2595
2596 } /* switch CODE (addr) */
2597
2598 return "";
2599}
2600
2601
2602/* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603 OP[1] in AS1 to register OP[0].
2604 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2605 Return "". */
2606
2607static const char*
2608avr_out_lpm (rtx insn, rtx *op, int *plen)
2609{
5bd39e93 2610 rtx xop[6];
4202ef11 2611 rtx dest = op[0];
2612 rtx src = SET_SRC (single_set (insn));
2613 rtx addr;
2614 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2615 int regno_dest;
5bd39e93 2616 int segment;
9d734fa8 2617 RTX_CODE code;
ed2541ea 2618 addr_space_t as = MEM_ADDR_SPACE (src);
4202ef11 2619
2620 if (plen)
2621 *plen = 0;
2622
2623 if (MEM_P (dest))
2624 {
2625 warning (0, "writing to address space %qs not supported",
9d734fa8 2626 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
4202ef11 2627
2628 return "";
2629 }
2630
9d734fa8 2631 addr = XEXP (src, 0);
2632 code = GET_CODE (addr);
5bd39e93 2633
9d734fa8 2634 gcc_assert (REG_P (dest));
ed2541ea 2635 gcc_assert (REG == code || POST_INC == code);
4202ef11 2636
2637 xop[0] = dest;
2638 xop[1] = addr;
2639 xop[2] = lpm_addr_reg_rtx;
5bd39e93 2640 xop[4] = xstring_empty;
2641 xop[5] = tmp_reg_rtx;
4202ef11 2642
2643 regno_dest = REGNO (dest);
2644
e508bf98 2645 segment = avr_addrspace[as].segment;
5bd39e93 2646
2647 /* Set RAMPZ as needed. */
2648
2649 if (segment)
2650 {
2651 xop[4] = GEN_INT (segment);
2652
2653 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2654 xop[3])
2655 {
2656 avr_asm_len ("ldi %3,%4" CR_TAB
2657 "out __RAMPZ__,%3", xop, plen, 2);
2658 }
2659 else if (segment == 1)
2660 {
2661 avr_asm_len ("clr %5" CR_TAB
2662 "inc %5" CR_TAB
2663 "out __RAMPZ__,%5", xop, plen, 3);
2664 }
2665 else
2666 {
2667 avr_asm_len ("mov %5,%2" CR_TAB
2668 "ldi %2,%4" CR_TAB
2669 "out __RAMPZ__,%2" CR_TAB
2670 "mov %2,%5", xop, plen, 4);
2671 }
2672
2673 xop[4] = xstring_e;
5bd39e93 2674
9d734fa8 2675 if (!AVR_HAVE_ELPMX)
2676 return avr_out_lpm_no_lpmx (insn, xop, plen);
2677 }
2678 else if (!AVR_HAVE_LPMX)
4202ef11 2679 {
2680 return avr_out_lpm_no_lpmx (insn, xop, plen);
2681 }
2682
9d734fa8 2683 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2684
4202ef11 2685 switch (GET_CODE (addr))
2686 {
2687 default:
2688 gcc_unreachable();
2689
2690 case REG:
2691
2692 gcc_assert (REG_Z == REGNO (addr));
2693
2694 switch (n_bytes)
2695 {
2696 default:
2697 gcc_unreachable();
2698
2699 case 1:
5bd39e93 2700 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
4202ef11 2701
2702 case 2:
2703 if (REGNO (dest) == REG_Z)
5bd39e93 2704 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705 "%4lpm %B0,%a2" CR_TAB
2706 "mov %A0,%5", xop, plen, 3);
4202ef11 2707 else
2708 {
5bd39e93 2709 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710 "%4lpm %B0,%a2", xop, plen, 2);
4202ef11 2711
2712 if (!reg_unused_after (insn, addr))
2713 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2714 }
2715
2716 break; /* 2 */
2717
2718 case 3:
2719
5bd39e93 2720 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721 "%4lpm %B0,%a2+" CR_TAB
2722 "%4lpm %C0,%a2", xop, plen, 3);
4202ef11 2723
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2726
2727 break; /* 3 */
2728
2729 case 4:
2730
5bd39e93 2731 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732 "%4lpm %B0,%a2+", xop, plen, 2);
4202ef11 2733
2734 if (REGNO (dest) == REG_Z - 2)
5bd39e93 2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %C0,%a2" CR_TAB
2737 "mov %D0,%5", xop, plen, 3);
4202ef11 2738 else
2739 {
5bd39e93 2740 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741 "%4lpm %D0,%a2", xop, plen, 2);
4202ef11 2742
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2745 }
2746
2747 break; /* 4 */
2748 } /* n_bytes */
2749
2750 break; /* REG */
2751
2752 case POST_INC:
2753
2754 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2755 && n_bytes <= 4);
2756
5bd39e93 2757 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
4202ef11 2761
2762 break; /* POST_INC */
2763
2764 } /* switch CODE (addr) */
be20cbdd 2765
2766 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
2767 {
2768 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
4202ef11 2769
be20cbdd 2770 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2771 }
2772
4202ef11 2773 return "";
2774}
2775
2776
ed2541ea 2777/* Worker function for xload_8 insn. */
5bd39e93 2778
2779const char*
ed2541ea 2780avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 2781{
ed2541ea 2782 rtx xop[4];
5bd39e93 2783
ed2541ea 2784 xop[0] = op[0];
5bd39e93 2785 xop[1] = op[1];
2786 xop[2] = lpm_addr_reg_rtx;
ed2541ea 2787 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
5bd39e93 2788
ed2541ea 2789 if (plen)
2790 *plen = 0;
5bd39e93 2791
ee3ce577 2792 avr_asm_len ("sbrc %1,7" CR_TAB
2793 "ld %3,%a2" CR_TAB
2794 "sbrs %1,7", xop, plen, 3);
5bd39e93 2795
ed2541ea 2796 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
5bd39e93 2797
ed2541ea 2798 if (REGNO (xop[0]) != REGNO (xop[3]))
2799 avr_asm_len ("mov %0,%3", xop, plen, 1);
5bd39e93 2800
2801 return "";
2802}
2803
2804
37ac04dc 2805const char *
206a5129 2806output_movqi (rtx insn, rtx operands[], int *l)
a28e4651 2807{
e511e253 2808 int dummy;
2809 rtx dest = operands[0];
2810 rtx src = operands[1];
2811 int *real_l = l;
2812
590da9f2 2813 if (avr_mem_flash_p (src)
2814 || avr_mem_flash_p (dest))
4202ef11 2815 {
2816 return avr_out_lpm (insn, operands, real_l);
2817 }
2818
e511e253 2819 if (!l)
2820 l = &dummy;
2821
2822 *l = 1;
2823
2824 if (register_operand (dest, QImode))
a28e4651 2825 {
e511e253 2826 if (register_operand (src, QImode)) /* mov r,r */
a28e4651 2827 {
e511e253 2828 if (test_hard_reg_class (STACK_REG, dest))
02a011e9 2829 return "out %0,%1";
e511e253 2830 else if (test_hard_reg_class (STACK_REG, src))
02a011e9 2831 return "in %0,%1";
e511e253 2832
02a011e9 2833 return "mov %0,%1";
e511e253 2834 }
2835 else if (CONSTANT_P (src))
a49907f9 2836 {
2837 output_reload_in_const (operands, NULL_RTX, real_l, false);
2838 return "";
2839 }
e511e253 2840 else if (GET_CODE (src) == MEM)
2841 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2842 }
2843 else if (GET_CODE (dest) == MEM)
2844 {
644ac9c5 2845 rtx xop[2];
37ac04dc 2846
644ac9c5 2847 xop[0] = dest;
2848 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
e511e253 2849
644ac9c5 2850 return out_movqi_mr_r (insn, xop, real_l);
e511e253 2851 }
2852 return "";
2853}
2854
2855
37ac04dc 2856const char *
8c8193e1 2857output_movhi (rtx insn, rtx xop[], int *plen)
e511e253 2858{
8c8193e1 2859 rtx dest = xop[0];
2860 rtx src = xop[1];
2861
2862 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
e511e253 2863
590da9f2 2864 if (avr_mem_flash_p (src)
2865 || avr_mem_flash_p (dest))
4202ef11 2866 {
8c8193e1 2867 return avr_out_lpm (insn, xop, plen);
4202ef11 2868 }
2869
8c8193e1 2870 if (REG_P (dest))
e511e253 2871 {
8c8193e1 2872 if (REG_P (src)) /* mov r,r */
2873 {
2874 if (test_hard_reg_class (STACK_REG, dest))
2875 {
2876 if (AVR_HAVE_8BIT_SP)
2877 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
0b6cf66f 2878
2879 if (AVR_XMEGA)
2880 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2881 "out __SP_H__,%B1", xop, plen, -2);
8c8193e1 2882
2883 /* Use simple load of SP if no interrupts are used. */
2884
2885 return TARGET_NO_INTERRUPTS
2886 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2887 "out __SP_L__,%A1", xop, plen, -2)
2888
2889 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2890 "cli" CR_TAB
2891 "out __SP_H__,%B1" CR_TAB
2892 "out __SREG__,__tmp_reg__" CR_TAB
2893 "out __SP_L__,%A1", xop, plen, -5);
2894 }
2895 else if (test_hard_reg_class (STACK_REG, src))
2896 {
2897 return AVR_HAVE_8BIT_SP
2898 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2899 "clr %B0", xop, plen, -2)
2900
2901 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2902 "in %B0,__SP_H__", xop, plen, -2);
2903 }
e511e253 2904
8c8193e1 2905 return AVR_HAVE_MOVW
2906 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2907
2908 : avr_asm_len ("mov %A0,%A1" CR_TAB
2909 "mov %B0,%B1", xop, plen, -2);
2910 } /* REG_P (src) */
e511e253 2911 else if (CONSTANT_P (src))
2f2d376f 2912 {
8c8193e1 2913 return output_reload_inhi (xop, NULL, plen);
2914 }
2915 else if (MEM_P (src))
2916 {
2917 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2f2d376f 2918 }
e511e253 2919 }
8c8193e1 2920 else if (MEM_P (dest))
e511e253 2921 {
644ac9c5 2922 rtx xop[2];
37ac04dc 2923
644ac9c5 2924 xop[0] = dest;
2925 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
37ac04dc 2926
8c8193e1 2927 return out_movhi_mr_r (insn, xop, plen);
a28e4651 2928 }
8c8193e1 2929
68435912 2930 fatal_insn ("invalid insn:", insn);
8c8193e1 2931
e511e253 2932 return "";
2933}
2934
644ac9c5 2935static const char*
5bd39e93 2936out_movqi_r_mr (rtx insn, rtx op[], int *plen)
e511e253 2937{
37ac04dc 2938 rtx dest = op[0];
2939 rtx src = op[1];
2940 rtx x = XEXP (src, 0);
e511e253 2941
2942 if (CONSTANT_ADDRESS_P (x))
2943 {
644ac9c5 2944 return optimize > 0 && io_address_operand (x, QImode)
2945 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2946 : avr_asm_len ("lds %0,%m1", op, plen, -2);
e511e253 2947 }
e511e253 2948 else if (GET_CODE (x) == PLUS
5bd39e93 2949 && REG_P (XEXP (x, 0))
2950 && CONST_INT_P (XEXP (x, 1)))
e511e253 2951 {
5bd39e93 2952 /* memory access by reg+disp */
db45d3ed 2953
5bd39e93 2954 int disp = INTVAL (XEXP (x, 1));
2955
2956 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2957 {
2958 if (REGNO (XEXP (x, 0)) != REG_Y)
2959 fatal_insn ("incorrect insn:",insn);
db45d3ed 2960
5bd39e93 2961 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2962 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2963 "ldd %0,Y+63" CR_TAB
2964 "sbiw r28,%o1-63", op, plen, -3);
2965
2966 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2967 "sbci r29,hi8(-%o1)" CR_TAB
2968 "ld %0,Y" CR_TAB
2969 "subi r28,lo8(%o1)" CR_TAB
2970 "sbci r29,hi8(%o1)", op, plen, -5);
2971 }
2972 else if (REGNO (XEXP (x, 0)) == REG_X)
2973 {
2974 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2975 it but I have this situation with extremal optimizing options. */
2976
2977 avr_asm_len ("adiw r26,%o1" CR_TAB
2978 "ld %0,X", op, plen, -2);
2979
2980 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2981 && !reg_unused_after (insn, XEXP (x,0)))
2982 {
2983 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2984 }
2985
2986 return "";
2987 }
2988
2989 return avr_asm_len ("ldd %0,%1", op, plen, -1);
e511e253 2990 }
5bd39e93 2991
2992 return avr_asm_len ("ld %0,%1", op, plen, -1);
a28e4651 2993}
2994
644ac9c5 2995static const char*
2996out_movhi_r_mr (rtx insn, rtx op[], int *plen)
a28e4651 2997{
37ac04dc 2998 rtx dest = op[0];
2999 rtx src = op[1];
3000 rtx base = XEXP (src, 0);
3001 int reg_dest = true_regnum (dest);
3002 int reg_base = true_regnum (base);
f43bae99 3003 /* "volatile" forces reading low byte first, even if less efficient,
3004 for correct operation with 16-bit I/O registers. */
3005 int mem_volatile_p = MEM_VOLATILE_P (src);
a28e4651 3006
3007 if (reg_base > 0)
3008 {
3009 if (reg_dest == reg_base) /* R = (R) */
644ac9c5 3010 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3011 "ld %B0,%1" CR_TAB
3012 "mov %A0,__tmp_reg__", op, plen, -3);
3013
3014 if (reg_base != REG_X)
3015 return avr_asm_len ("ld %A0,%1" CR_TAB
3016 "ldd %B0,%1+1", op, plen, -2);
3017
3018 avr_asm_len ("ld %A0,X+" CR_TAB
3019 "ld %B0,X", op, plen, -2);
3020
3021 if (!reg_unused_after (insn, base))
3022 avr_asm_len ("sbiw r26,1", op, plen, 1);
3023
3024 return "";
a28e4651 3025 }
37ac04dc 3026 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3027 {
37ac04dc 3028 int disp = INTVAL (XEXP (base, 1));
3029 int reg_base = true_regnum (XEXP (base, 0));
a28e4651 3030
37ac04dc 3031 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
644ac9c5 3032 {
3033 if (REGNO (XEXP (base, 0)) != REG_Y)
3034 fatal_insn ("incorrect insn:",insn);
3035
3036 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3037 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3038 "ldd %A0,Y+62" CR_TAB
3039 "ldd %B0,Y+63" CR_TAB
3040 "sbiw r28,%o1-62", op, plen, -4)
3041
3042 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3043 "sbci r29,hi8(-%o1)" CR_TAB
3044 "ld %A0,Y" CR_TAB
3045 "ldd %B0,Y+1" CR_TAB
3046 "subi r28,lo8(%o1)" CR_TAB
3047 "sbci r29,hi8(%o1)", op, plen, -6);
3048 }
3049
3050 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3051 it but I have this situation with extremal
3052 optimization options. */
db45d3ed 3053
a28e4651 3054 if (reg_base == REG_X)
644ac9c5 3055 return reg_base == reg_dest
3056 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3057 "ld __tmp_reg__,X+" CR_TAB
3058 "ld %B0,X" CR_TAB
3059 "mov %A0,__tmp_reg__", op, plen, -4)
e511e253 3060
644ac9c5 3061 : avr_asm_len ("adiw r26,%o1" CR_TAB
3062 "ld %A0,X+" CR_TAB
3063 "ld %B0,X" CR_TAB
3064 "sbiw r26,%o1+1", op, plen, -4);
e511e253 3065
644ac9c5 3066 return reg_base == reg_dest
3067 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3068 "ldd %B0,%B1" CR_TAB
3069 "mov %A0,__tmp_reg__", op, plen, -3)
3070
3071 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3072 "ldd %B0,%B1", op, plen, -2);
a28e4651 3073 }
37ac04dc 3074 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
a28e4651 3075 {
37ac04dc 3076 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3077 fatal_insn ("incorrect insn:", insn);
e511e253 3078
644ac9c5 3079 if (!mem_volatile_p)
3080 return avr_asm_len ("ld %B0,%1" CR_TAB
3081 "ld %A0,%1", op, plen, -2);
3082
3083 return REGNO (XEXP (base, 0)) == REG_X
3084 ? avr_asm_len ("sbiw r26,2" CR_TAB
3085 "ld %A0,X+" CR_TAB
3086 "ld %B0,X" CR_TAB
3087 "sbiw r26,1", op, plen, -4)
3088
3089 : avr_asm_len ("sbiw %r1,2" CR_TAB
3090 "ld %A0,%p1" CR_TAB
3091 "ldd %B0,%p1+1", op, plen, -3);
a28e4651 3092 }
37ac04dc 3093 else if (GET_CODE (base) == POST_INC) /* (R++) */
a28e4651 3094 {
37ac04dc 3095 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3096 fatal_insn ("incorrect insn:", insn);
e511e253 3097
644ac9c5 3098 return avr_asm_len ("ld %A0,%1" CR_TAB
3099 "ld %B0,%1", op, plen, -2);
a28e4651 3100 }
37ac04dc 3101 else if (CONSTANT_ADDRESS_P (base))
e511e253 3102 {
644ac9c5 3103 return optimize > 0 && io_address_operand (base, HImode)
3104 ? avr_asm_len ("in %A0,%i1" CR_TAB
3105 "in %B0,%i1+1", op, plen, -2)
3106
3107 : avr_asm_len ("lds %A0,%m1" CR_TAB
3108 "lds %B0,%m1+1", op, plen, -4);
e511e253 3109 }
3110
68435912 3111 fatal_insn ("unknown move insn:",insn);
a28e4651 3112 return "";
3113}
3114
644ac9c5 3115static const char*
206a5129 3116out_movsi_r_mr (rtx insn, rtx op[], int *l)
a28e4651 3117{
37ac04dc 3118 rtx dest = op[0];
3119 rtx src = op[1];
3120 rtx base = XEXP (src, 0);
3121 int reg_dest = true_regnum (dest);
3122 int reg_base = true_regnum (base);
a28e4651 3123 int tmp;
e511e253 3124
a28e4651 3125 if (!l)
e511e253 3126 l = &tmp;
3127
a28e4651 3128 if (reg_base > 0)
3129 {
3130 if (reg_base == REG_X) /* (R26) */
3131 {
3132 if (reg_dest == REG_X)
66ad0834 3133 /* "ld r26,-X" is undefined */
02a011e9 3134 return *l=7, ("adiw r26,3" CR_TAB
3135 "ld r29,X" CR_TAB
3136 "ld r28,-X" CR_TAB
3137 "ld __tmp_reg__,-X" CR_TAB
3138 "sbiw r26,1" CR_TAB
3139 "ld r26,X" CR_TAB
3140 "mov r27,__tmp_reg__");
a28e4651 3141 else if (reg_dest == REG_X - 2)
02a011e9 3142 return *l=5, ("ld %A0,X+" CR_TAB
3143 "ld %B0,X+" CR_TAB
3144 "ld __tmp_reg__,X+" CR_TAB
3145 "ld %D0,X" CR_TAB
3146 "mov %C0,__tmp_reg__");
37ac04dc 3147 else if (reg_unused_after (insn, base))
02a011e9 3148 return *l=4, ("ld %A0,X+" CR_TAB
3149 "ld %B0,X+" CR_TAB
3150 "ld %C0,X+" CR_TAB
3151 "ld %D0,X");
a28e4651 3152 else
02a011e9 3153 return *l=5, ("ld %A0,X+" CR_TAB
3154 "ld %B0,X+" CR_TAB
3155 "ld %C0,X+" CR_TAB
3156 "ld %D0,X" CR_TAB
3157 "sbiw r26,3");
a28e4651 3158 }
3159 else
3160 {
3161 if (reg_dest == reg_base)
02a011e9 3162 return *l=5, ("ldd %D0,%1+3" CR_TAB
3163 "ldd %C0,%1+2" CR_TAB
3164 "ldd __tmp_reg__,%1+1" CR_TAB
3165 "ld %A0,%1" CR_TAB
3166 "mov %B0,__tmp_reg__");
a28e4651 3167 else if (reg_base == reg_dest + 2)
02a011e9 3168 return *l=5, ("ld %A0,%1" CR_TAB
3169 "ldd %B0,%1+1" CR_TAB
3170 "ldd __tmp_reg__,%1+2" CR_TAB
3171 "ldd %D0,%1+3" CR_TAB
3172 "mov %C0,__tmp_reg__");
a28e4651 3173 else
02a011e9 3174 return *l=4, ("ld %A0,%1" CR_TAB
3175 "ldd %B0,%1+1" CR_TAB
3176 "ldd %C0,%1+2" CR_TAB
3177 "ldd %D0,%1+3");
a28e4651 3178 }
3179 }
37ac04dc 3180 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3181 {
37ac04dc 3182 int disp = INTVAL (XEXP (base, 1));
a28e4651 3183
37ac04dc 3184 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
a28e4651 3185 {
37ac04dc 3186 if (REGNO (XEXP (base, 0)) != REG_Y)
68435912 3187 fatal_insn ("incorrect insn:",insn);
db45d3ed 3188
37ac04dc 3189 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
02a011e9 3190 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3191 "ldd %A0,Y+60" CR_TAB
3192 "ldd %B0,Y+61" CR_TAB
3193 "ldd %C0,Y+62" CR_TAB
3194 "ldd %D0,Y+63" CR_TAB
3195 "sbiw r28,%o1-60");
3196
3197 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3198 "sbci r29,hi8(-%o1)" CR_TAB
3199 "ld %A0,Y" CR_TAB
3200 "ldd %B0,Y+1" CR_TAB
3201 "ldd %C0,Y+2" CR_TAB
3202 "ldd %D0,Y+3" CR_TAB
3203 "subi r28,lo8(%o1)" CR_TAB
3204 "sbci r29,hi8(%o1)");
a28e4651 3205 }
3206
37ac04dc 3207 reg_base = true_regnum (XEXP (base, 0));
66ad0834 3208 if (reg_base == REG_X)
3209 {
3210 /* R = (X + d) */
3211 if (reg_dest == REG_X)
3212 {
3213 *l = 7;
3214 /* "ld r26,-X" is undefined */
02a011e9 3215 return ("adiw r26,%o1+3" CR_TAB
3216 "ld r29,X" CR_TAB
3217 "ld r28,-X" CR_TAB
3218 "ld __tmp_reg__,-X" CR_TAB
3219 "sbiw r26,1" CR_TAB
3220 "ld r26,X" CR_TAB
3221 "mov r27,__tmp_reg__");
66ad0834 3222 }
3223 *l = 6;
3224 if (reg_dest == REG_X - 2)
02a011e9 3225 return ("adiw r26,%o1" CR_TAB
3226 "ld r24,X+" CR_TAB
3227 "ld r25,X+" CR_TAB
3228 "ld __tmp_reg__,X+" CR_TAB
3229 "ld r27,X" CR_TAB
3230 "mov r26,__tmp_reg__");
3231
3232 return ("adiw r26,%o1" CR_TAB
3233 "ld %A0,X+" CR_TAB
3234 "ld %B0,X+" CR_TAB
3235 "ld %C0,X+" CR_TAB
3236 "ld %D0,X" CR_TAB
3237 "sbiw r26,%o1+3");
66ad0834 3238 }
a28e4651 3239 if (reg_dest == reg_base)
02a011e9 3240 return *l=5, ("ldd %D0,%D1" CR_TAB
3241 "ldd %C0,%C1" CR_TAB
3242 "ldd __tmp_reg__,%B1" CR_TAB
3243 "ldd %A0,%A1" CR_TAB
3244 "mov %B0,__tmp_reg__");
a28e4651 3245 else if (reg_dest == reg_base - 2)
02a011e9 3246 return *l=5, ("ldd %A0,%A1" CR_TAB
3247 "ldd %B0,%B1" CR_TAB
3248 "ldd __tmp_reg__,%C1" CR_TAB
3249 "ldd %D0,%D1" CR_TAB
3250 "mov %C0,__tmp_reg__");
3251 return *l=4, ("ldd %A0,%A1" CR_TAB
3252 "ldd %B0,%B1" CR_TAB
3253 "ldd %C0,%C1" CR_TAB
3254 "ldd %D0,%D1");
a28e4651 3255 }
37ac04dc 3256 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 3257 return *l=4, ("ld %D0,%1" CR_TAB
3258 "ld %C0,%1" CR_TAB
3259 "ld %B0,%1" CR_TAB
3260 "ld %A0,%1");
37ac04dc 3261 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 3262 return *l=4, ("ld %A0,%1" CR_TAB
3263 "ld %B0,%1" CR_TAB
3264 "ld %C0,%1" CR_TAB
3265 "ld %D0,%1");
37ac04dc 3266 else if (CONSTANT_ADDRESS_P (base))
02a011e9 3267 return *l=8, ("lds %A0,%m1" CR_TAB
3268 "lds %B0,%m1+1" CR_TAB
3269 "lds %C0,%m1+2" CR_TAB
3270 "lds %D0,%m1+3");
a28e4651 3271
68435912 3272 fatal_insn ("unknown move insn:",insn);
a28e4651 3273 return "";
3274}
3275
644ac9c5 3276static const char*
206a5129 3277out_movsi_mr_r (rtx insn, rtx op[], int *l)
a28e4651 3278{
37ac04dc 3279 rtx dest = op[0];
3280 rtx src = op[1];
3281 rtx base = XEXP (dest, 0);
3282 int reg_base = true_regnum (base);
3283 int reg_src = true_regnum (src);
a28e4651 3284 int tmp;
e511e253 3285
a28e4651 3286 if (!l)
3287 l = &tmp;
e511e253 3288
37ac04dc 3289 if (CONSTANT_ADDRESS_P (base))
02a011e9 3290 return *l=8,("sts %m0,%A1" CR_TAB
3291 "sts %m0+1,%B1" CR_TAB
3292 "sts %m0+2,%C1" CR_TAB
3293 "sts %m0+3,%D1");
a28e4651 3294 if (reg_base > 0) /* (r) */
3295 {
3296 if (reg_base == REG_X) /* (R26) */
3297 {
37ac04dc 3298 if (reg_src == REG_X)
a28e4651 3299 {
66ad0834 3300 /* "st X+,r26" is undefined */
37ac04dc 3301 if (reg_unused_after (insn, base))
02a011e9 3302 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3303 "st X,r26" CR_TAB
3304 "adiw r26,1" CR_TAB
3305 "st X+,__tmp_reg__" CR_TAB
3306 "st X+,r28" CR_TAB
3307 "st X,r29");
a28e4651 3308 else
02a011e9 3309 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3310 "st X,r26" CR_TAB
3311 "adiw r26,1" CR_TAB
3312 "st X+,__tmp_reg__" CR_TAB
3313 "st X+,r28" CR_TAB
3314 "st X,r29" CR_TAB
3315 "sbiw r26,3");
a28e4651 3316 }
37ac04dc 3317 else if (reg_base == reg_src + 2)
a28e4651 3318 {
37ac04dc 3319 if (reg_unused_after (insn, base))
02a011e9 3320 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3321 "mov __tmp_reg__,%D1" CR_TAB
3322 "st %0+,%A1" CR_TAB
3323 "st %0+,%B1" CR_TAB
3324 "st %0+,__zero_reg__" CR_TAB
3325 "st %0,__tmp_reg__" CR_TAB
3326 "clr __zero_reg__");
a28e4651 3327 else
02a011e9 3328 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3329 "mov __tmp_reg__,%D1" CR_TAB
3330 "st %0+,%A1" CR_TAB
3331 "st %0+,%B1" CR_TAB
3332 "st %0+,__zero_reg__" CR_TAB
3333 "st %0,__tmp_reg__" CR_TAB
3334 "clr __zero_reg__" CR_TAB
3335 "sbiw r26,3");
a28e4651 3336 }
02a011e9 3337 return *l=5, ("st %0+,%A1" CR_TAB
3338 "st %0+,%B1" CR_TAB
3339 "st %0+,%C1" CR_TAB
3340 "st %0,%D1" CR_TAB
3341 "sbiw r26,3");
a28e4651 3342 }
3343 else
02a011e9 3344 return *l=4, ("st %0,%A1" CR_TAB
3345 "std %0+1,%B1" CR_TAB
3346 "std %0+2,%C1" CR_TAB
3347 "std %0+3,%D1");
a28e4651 3348 }
37ac04dc 3349 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3350 {
37ac04dc 3351 int disp = INTVAL (XEXP (base, 1));
66ad0834 3352 reg_base = REGNO (XEXP (base, 0));
37ac04dc 3353 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
a28e4651 3354 {
66ad0834 3355 if (reg_base != REG_Y)
68435912 3356 fatal_insn ("incorrect insn:",insn);
db45d3ed 3357
37ac04dc 3358 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
02a011e9 3359 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3360 "std Y+60,%A1" CR_TAB
3361 "std Y+61,%B1" CR_TAB
3362 "std Y+62,%C1" CR_TAB
3363 "std Y+63,%D1" CR_TAB
3364 "sbiw r28,%o0-60");
3365
3366 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3367 "sbci r29,hi8(-%o0)" CR_TAB
3368 "st Y,%A1" CR_TAB
3369 "std Y+1,%B1" CR_TAB
3370 "std Y+2,%C1" CR_TAB
3371 "std Y+3,%D1" CR_TAB
3372 "subi r28,lo8(%o0)" CR_TAB
3373 "sbci r29,hi8(%o0)");
a28e4651 3374 }
66ad0834 3375 if (reg_base == REG_X)
3376 {
3377 /* (X + d) = R */
3378 if (reg_src == REG_X)
3379 {
3380 *l = 9;
02a011e9 3381 return ("mov __tmp_reg__,r26" CR_TAB
3382 "mov __zero_reg__,r27" CR_TAB
3383 "adiw r26,%o0" CR_TAB
3384 "st X+,__tmp_reg__" CR_TAB
3385 "st X+,__zero_reg__" CR_TAB
3386 "st X+,r28" CR_TAB
3387 "st X,r29" CR_TAB
3388 "clr __zero_reg__" CR_TAB
3389 "sbiw r26,%o0+3");
66ad0834 3390 }
3391 else if (reg_src == REG_X - 2)
3392 {
3393 *l = 9;
02a011e9 3394 return ("mov __tmp_reg__,r26" CR_TAB
3395 "mov __zero_reg__,r27" CR_TAB
3396 "adiw r26,%o0" CR_TAB
3397 "st X+,r24" CR_TAB
3398 "st X+,r25" CR_TAB
3399 "st X+,__tmp_reg__" CR_TAB
3400 "st X,__zero_reg__" CR_TAB
3401 "clr __zero_reg__" CR_TAB
3402 "sbiw r26,%o0+3");
66ad0834 3403 }
3404 *l = 6;
02a011e9 3405 return ("adiw r26,%o0" CR_TAB
3406 "st X+,%A1" CR_TAB
3407 "st X+,%B1" CR_TAB
3408 "st X+,%C1" CR_TAB
3409 "st X,%D1" CR_TAB
3410 "sbiw r26,%o0+3");
66ad0834 3411 }
02a011e9 3412 return *l=4, ("std %A0,%A1" CR_TAB
3413 "std %B0,%B1" CR_TAB
3414 "std %C0,%C1" CR_TAB
3415 "std %D0,%D1");
a28e4651 3416 }
37ac04dc 3417 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 3418 return *l=4, ("st %0,%D1" CR_TAB
3419 "st %0,%C1" CR_TAB
3420 "st %0,%B1" CR_TAB
3421 "st %0,%A1");
37ac04dc 3422 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 3423 return *l=4, ("st %0,%A1" CR_TAB
3424 "st %0,%B1" CR_TAB
3425 "st %0,%C1" CR_TAB
3426 "st %0,%D1");
68435912 3427 fatal_insn ("unknown move insn:",insn);
a28e4651 3428 return "";
3429}
3430
37ac04dc 3431const char *
28913f6b 3432output_movsisf (rtx insn, rtx operands[], int *l)
a28e4651 3433{
e511e253 3434 int dummy;
3435 rtx dest = operands[0];
3436 rtx src = operands[1];
3437 int *real_l = l;
3438
590da9f2 3439 if (avr_mem_flash_p (src)
3440 || avr_mem_flash_p (dest))
4202ef11 3441 {
3442 return avr_out_lpm (insn, operands, real_l);
3443 }
3444
e511e253 3445 if (!l)
3446 l = &dummy;
3447
3448 if (register_operand (dest, VOIDmode))
a28e4651 3449 {
e511e253 3450 if (register_operand (src, VOIDmode)) /* mov r,r */
0af74aa0 3451 {
e511e253 3452 if (true_regnum (dest) > true_regnum (src))
3453 {
0aab73c2 3454 if (AVR_HAVE_MOVW)
e511e253 3455 {
3456 *l = 2;
02a011e9 3457 return ("movw %C0,%C1" CR_TAB
3458 "movw %A0,%A1");
e511e253 3459 }
3460 *l = 4;
02a011e9 3461 return ("mov %D0,%D1" CR_TAB
3462 "mov %C0,%C1" CR_TAB
3463 "mov %B0,%B1" CR_TAB
3464 "mov %A0,%A1");
e511e253 3465 }
0af74aa0 3466 else
e511e253 3467 {
0aab73c2 3468 if (AVR_HAVE_MOVW)
e511e253 3469 {
3470 *l = 2;
02a011e9 3471 return ("movw %A0,%A1" CR_TAB
3472 "movw %C0,%C1");
e511e253 3473 }
3474 *l = 4;
02a011e9 3475 return ("mov %A0,%A1" CR_TAB
3476 "mov %B0,%B1" CR_TAB
3477 "mov %C0,%C1" CR_TAB
3478 "mov %D0,%D1");
e511e253 3479 }
0af74aa0 3480 }
e511e253 3481 else if (CONSTANT_P (src))
0af74aa0 3482 {
644ac9c5 3483 return output_reload_insisf (operands, NULL_RTX, real_l);
3484 }
e511e253 3485 else if (GET_CODE (src) == MEM)
3486 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3487 }
3488 else if (GET_CODE (dest) == MEM)
3489 {
8deb3959 3490 const char *templ;
37ac04dc 3491
9ce2d202 3492 if (src == CONST0_RTX (GET_MODE (dest)))
e511e253 3493 operands[1] = zero_reg_rtx;
37ac04dc 3494
8deb3959 3495 templ = out_movsi_mr_r (insn, operands, real_l);
e511e253 3496
3497 if (!real_l)
8deb3959 3498 output_asm_insn (templ, operands);
37ac04dc 3499
3500 operands[1] = src;
e511e253 3501 return "";
a28e4651 3502 }
68435912 3503 fatal_insn ("invalid insn:", insn);
a28e4651 3504 return "";
3505}
3506
02d9a2c3 3507
3508/* Handle loads of 24-bit types from memory to register. */
3509
3510static const char*
3511avr_out_load_psi (rtx insn, rtx *op, int *plen)
3512{
3513 rtx dest = op[0];
3514 rtx src = op[1];
3515 rtx base = XEXP (src, 0);
3516 int reg_dest = true_regnum (dest);
3517 int reg_base = true_regnum (base);
3518
3519 if (reg_base > 0)
3520 {
3521 if (reg_base == REG_X) /* (R26) */
3522 {
3523 if (reg_dest == REG_X)
3524 /* "ld r26,-X" is undefined */
3525 return avr_asm_len ("adiw r26,2" CR_TAB
3526 "ld r28,X" CR_TAB
3527 "ld __tmp_reg__,-X" CR_TAB
3528 "sbiw r26,1" CR_TAB
3529 "ld r26,X" CR_TAB
3530 "mov r27,__tmp_reg__", op, plen, -6);
3531 else
3532 {
3533 avr_asm_len ("ld %A0,X+" CR_TAB
3534 "ld %B0,X+" CR_TAB
3535 "ld %C0,X", op, plen, -3);
3536
3537 if (reg_dest != REG_X - 2
3538 && !reg_unused_after (insn, base))
3539 {
3540 avr_asm_len ("sbiw r26,2", op, plen, 1);
3541 }
3542
3543 return "";
3544 }
3545 }
3546 else /* reg_base != REG_X */
3547 {
3548 if (reg_dest == reg_base)
3549 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3550 "ldd __tmp_reg__,%1+1" CR_TAB
3551 "ld %A0,%1" CR_TAB
3552 "mov %B0,__tmp_reg__", op, plen, -4);
3553 else
3554 return avr_asm_len ("ld %A0,%1" CR_TAB
3555 "ldd %B0,%1+1" CR_TAB
3556 "ldd %C0,%1+2", op, plen, -3);
3557 }
3558 }
3559 else if (GET_CODE (base) == PLUS) /* (R + i) */
3560 {
3561 int disp = INTVAL (XEXP (base, 1));
3562
3563 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3564 {
3565 if (REGNO (XEXP (base, 0)) != REG_Y)
3566 fatal_insn ("incorrect insn:",insn);
3567
3568 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3569 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3570 "ldd %A0,Y+61" CR_TAB
3571 "ldd %B0,Y+62" CR_TAB
3572 "ldd %C0,Y+63" CR_TAB
3573 "sbiw r28,%o1-61", op, plen, -5);
3574
3575 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3576 "sbci r29,hi8(-%o1)" CR_TAB
3577 "ld %A0,Y" CR_TAB
3578 "ldd %B0,Y+1" CR_TAB
3579 "ldd %C0,Y+2" CR_TAB
3580 "subi r28,lo8(%o1)" CR_TAB
3581 "sbci r29,hi8(%o1)", op, plen, -7);
3582 }
3583
3584 reg_base = true_regnum (XEXP (base, 0));
3585 if (reg_base == REG_X)
3586 {
3587 /* R = (X + d) */
3588 if (reg_dest == REG_X)
3589 {
3590 /* "ld r26,-X" is undefined */
3591 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3592 "ld r28,X" CR_TAB
3593 "ld __tmp_reg__,-X" CR_TAB
3594 "sbiw r26,1" CR_TAB
3595 "ld r26,X" CR_TAB
3596 "mov r27,__tmp_reg__", op, plen, -6);
3597 }
3598
3599 avr_asm_len ("adiw r26,%o1" CR_TAB
3600 "ld r24,X+" CR_TAB
3601 "ld r25,X+" CR_TAB
3602 "ld r26,X", op, plen, -4);
3603
3604 if (reg_dest != REG_X - 2)
3605 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3606
3607 return "";
3608 }
3609
3610 if (reg_dest == reg_base)
3611 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3612 "ldd __tmp_reg__,%B1" CR_TAB
3613 "ldd %A0,%A1" CR_TAB
3614 "mov %B0,__tmp_reg__", op, plen, -4);
3615
3616 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3617 "ldd %B0,%B1" CR_TAB
3618 "ldd %C0,%C1", op, plen, -3);
3619 }
3620 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3621 return avr_asm_len ("ld %C0,%1" CR_TAB
3622 "ld %B0,%1" CR_TAB
3623 "ld %A0,%1", op, plen, -3);
3624 else if (GET_CODE (base) == POST_INC) /* (R++) */
3625 return avr_asm_len ("ld %A0,%1" CR_TAB
3626 "ld %B0,%1" CR_TAB
3627 "ld %C0,%1", op, plen, -3);
3628
3629 else if (CONSTANT_ADDRESS_P (base))
3630 return avr_asm_len ("lds %A0,%m1" CR_TAB
3631 "lds %B0,%m1+1" CR_TAB
3632 "lds %C0,%m1+2", op, plen , -6);
3633
3634 fatal_insn ("unknown move insn:",insn);
3635 return "";
3636}
3637
3638/* Handle store of 24-bit type from register or zero to memory. */
3639
3640static const char*
3641avr_out_store_psi (rtx insn, rtx *op, int *plen)
3642{
3643 rtx dest = op[0];
3644 rtx src = op[1];
3645 rtx base = XEXP (dest, 0);
3646 int reg_base = true_regnum (base);
3647
3648 if (CONSTANT_ADDRESS_P (base))
3649 return avr_asm_len ("sts %m0,%A1" CR_TAB
3650 "sts %m0+1,%B1" CR_TAB
3651 "sts %m0+2,%C1", op, plen, -6);
3652
3653 if (reg_base > 0) /* (r) */
3654 {
3655 if (reg_base == REG_X) /* (R26) */
3656 {
3657 gcc_assert (!reg_overlap_mentioned_p (base, src));
3658
3659 avr_asm_len ("st %0+,%A1" CR_TAB
3660 "st %0+,%B1" CR_TAB
3661 "st %0,%C1", op, plen, -3);
3662
3663 if (!reg_unused_after (insn, base))
3664 avr_asm_len ("sbiw r26,2", op, plen, 1);
3665
3666 return "";
3667 }
3668 else
3669 return avr_asm_len ("st %0,%A1" CR_TAB
3670 "std %0+1,%B1" CR_TAB
3671 "std %0+2,%C1", op, plen, -3);
3672 }
3673 else if (GET_CODE (base) == PLUS) /* (R + i) */
3674 {
3675 int disp = INTVAL (XEXP (base, 1));
3676 reg_base = REGNO (XEXP (base, 0));
3677
3678 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3679 {
3680 if (reg_base != REG_Y)
3681 fatal_insn ("incorrect insn:",insn);
3682
3683 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3684 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3685 "std Y+61,%A1" CR_TAB
3686 "std Y+62,%B1" CR_TAB
3687 "std Y+63,%C1" CR_TAB
3688 "sbiw r28,%o0-60", op, plen, -5);
3689
3690 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3691 "sbci r29,hi8(-%o0)" CR_TAB
3692 "st Y,%A1" CR_TAB
3693 "std Y+1,%B1" CR_TAB
3694 "std Y+2,%C1" CR_TAB
3695 "subi r28,lo8(%o0)" CR_TAB
3696 "sbci r29,hi8(%o0)", op, plen, -7);
3697 }
3698 if (reg_base == REG_X)
3699 {
3700 /* (X + d) = R */
3701 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3702
3703 avr_asm_len ("adiw r26,%o0" CR_TAB
3704 "st X+,%A1" CR_TAB
3705 "st X+,%B1" CR_TAB
3706 "st X,%C1", op, plen, -4);
3707
3708 if (!reg_unused_after (insn, XEXP (base, 0)))
3709 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3710
3711 return "";
3712 }
3713
3714 return avr_asm_len ("std %A0,%A1" CR_TAB
3715 "std %B0,%B1" CR_TAB
3716 "std %C0,%C1", op, plen, -3);
3717 }
3718 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3719 return avr_asm_len ("st %0,%C1" CR_TAB
3720 "st %0,%B1" CR_TAB
3721 "st %0,%A1", op, plen, -3);
3722 else if (GET_CODE (base) == POST_INC) /* (R++) */
3723 return avr_asm_len ("st %0,%A1" CR_TAB
3724 "st %0,%B1" CR_TAB
3725 "st %0,%C1", op, plen, -3);
3726
3727 fatal_insn ("unknown move insn:",insn);
3728 return "";
3729}
3730
3731
3732/* Move around 24-bit stuff. */
3733
3734const char *
3735avr_out_movpsi (rtx insn, rtx *op, int *plen)
3736{
3737 rtx dest = op[0];
3738 rtx src = op[1];
3739
590da9f2 3740 if (avr_mem_flash_p (src)
3741 || avr_mem_flash_p (dest))
4202ef11 3742 {
3743 return avr_out_lpm (insn, op, plen);
3744 }
3745
02d9a2c3 3746 if (register_operand (dest, VOIDmode))
3747 {
3748 if (register_operand (src, VOIDmode)) /* mov r,r */
3749 {
3750 if (true_regnum (dest) > true_regnum (src))
3751 {
3752 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3753
3754 if (AVR_HAVE_MOVW)
3755 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3756 else
3757 return avr_asm_len ("mov %B0,%B1" CR_TAB
3758 "mov %A0,%A1", op, plen, 2);
3759 }
3760 else
3761 {
3762 if (AVR_HAVE_MOVW)
3763 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3764 else
3765 avr_asm_len ("mov %A0,%A1" CR_TAB
3766 "mov %B0,%B1", op, plen, -2);
3767
3768 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3769 }
3770 }
02d9a2c3 3771 else if (CONSTANT_P (src))
3772 {
644ac9c5 3773 return avr_out_reload_inpsi (op, NULL_RTX, plen);
02d9a2c3 3774 }
3775 else if (MEM_P (src))
3776 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3777 }
3778 else if (MEM_P (dest))
3779 {
644ac9c5 3780 rtx xop[2];
3781
3782 xop[0] = dest;
3783 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
02d9a2c3 3784
644ac9c5 3785 return avr_out_store_psi (insn, xop, plen);
02d9a2c3 3786 }
3787
3788 fatal_insn ("invalid insn:", insn);
3789 return "";
3790}
3791
3792
644ac9c5 3793static const char*
5bd39e93 3794out_movqi_mr_r (rtx insn, rtx op[], int *plen)
a28e4651 3795{
37ac04dc 3796 rtx dest = op[0];
3797 rtx src = op[1];
3798 rtx x = XEXP (dest, 0);
e511e253 3799
3800 if (CONSTANT_ADDRESS_P (x))
a28e4651 3801 {
644ac9c5 3802 return optimize > 0 && io_address_operand (x, QImode)
3803 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3804 : avr_asm_len ("sts %m0,%1", op, plen, -2);
e511e253 3805 }
5bd39e93 3806 else if (GET_CODE (x) == PLUS
3807 && REG_P (XEXP (x, 0))
3808 && CONST_INT_P (XEXP (x, 1)))
e511e253 3809 {
5bd39e93 3810 /* memory access by reg+disp */
db45d3ed 3811
5bd39e93 3812 int disp = INTVAL (XEXP (x, 1));
db45d3ed 3813
5bd39e93 3814 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3815 {
3816 if (REGNO (XEXP (x, 0)) != REG_Y)
3817 fatal_insn ("incorrect insn:",insn);
3818
3819 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3820 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3821 "std Y+63,%1" CR_TAB
3822 "sbiw r28,%o0-63", op, plen, -3);
3823
3824 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3825 "sbci r29,hi8(-%o0)" CR_TAB
3826 "st Y,%1" CR_TAB
3827 "subi r28,lo8(%o0)" CR_TAB
3828 "sbci r29,hi8(%o0)", op, plen, -5);
3829 }
e511e253 3830 else if (REGNO (XEXP (x,0)) == REG_X)
5bd39e93 3831 {
3832 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3833 {
3834 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3835 "adiw r26,%o0" CR_TAB
3836 "st X,__tmp_reg__", op, plen, -3);
3837 }
3838 else
3839 {
3840 avr_asm_len ("adiw r26,%o0" CR_TAB
3841 "st X,%1", op, plen, -2);
3842 }
3843
3844 if (!reg_unused_after (insn, XEXP (x,0)))
3845 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
db45d3ed 3846
5bd39e93 3847 return "";
3848 }
3849
578c09ca 3850 return avr_asm_len ("std %0,%1", op, plen, -1);
a28e4651 3851 }
5bd39e93 3852
7e7baeb5 3853 return avr_asm_len ("st %0,%1", op, plen, -1);
a28e4651 3854}
3855
0b6cf66f 3856
3857/* Helper for the next function for XMEGA. It does the same
3858 but with low byte first. */
3859
644ac9c5 3860static const char*
0b6cf66f 3861avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
a28e4651 3862{
37ac04dc 3863 rtx dest = op[0];
3864 rtx src = op[1];
3865 rtx base = XEXP (dest, 0);
3866 int reg_base = true_regnum (base);
3867 int reg_src = true_regnum (src);
0b6cf66f 3868
3869 /* "volatile" forces writing low byte first, even if less efficient,
3870 for correct operation with 16-bit I/O registers like SP. */
f43bae99 3871 int mem_volatile_p = MEM_VOLATILE_P (dest);
f43bae99 3872
0b6cf66f 3873 if (CONSTANT_ADDRESS_P (base))
3874 return optimize > 0 && io_address_operand (base, HImode)
3875 ? avr_asm_len ("out %i0,%A1" CR_TAB
3876 "out %i0+1,%B1", op, plen, -2)
3877
3878 : avr_asm_len ("sts %m0,%A1" CR_TAB
3879 "sts %m0+1,%B1", op, plen, -4);
3880
3881 if (reg_base > 0)
3882 {
3883 if (reg_base != REG_X)
3884 return avr_asm_len ("st %0,%A1" CR_TAB
3885 "std %0+1,%B1", op, plen, -2);
3886
3887 if (reg_src == REG_X)
3888 /* "st X+,r26" and "st -X,r26" are undefined. */
3889 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3890 "st X,r26" CR_TAB
3891 "adiw r26,1" CR_TAB
3892 "st X,__tmp_reg__", op, plen, -4);
3893 else
3894 avr_asm_len ("st X+,%A1" CR_TAB
3895 "st X,%B1", op, plen, -2);
00c52bc9 3896
3897 return reg_unused_after (insn, base)
0b6cf66f 3898 ? ""
3899 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3900 }
3901 else if (GET_CODE (base) == PLUS)
3902 {
3903 int disp = INTVAL (XEXP (base, 1));
3904 reg_base = REGNO (XEXP (base, 0));
3905 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3906 {
3907 if (reg_base != REG_Y)
3908 fatal_insn ("incorrect insn:",insn);
3909
3910 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3911 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3912 "std Y+62,%A1" CR_TAB
3913 "std Y+63,%B1" CR_TAB
3914 "sbiw r28,%o0-62", op, plen, -4)
3915
3916 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3917 "sbci r29,hi8(-%o0)" CR_TAB
3918 "st Y,%A1" CR_TAB
3919 "std Y+1,%B1" CR_TAB
3920 "subi r28,lo8(%o0)" CR_TAB
3921 "sbci r29,hi8(%o0)", op, plen, -6);
3922 }
3923
3924 if (reg_base != REG_X)
3925 return avr_asm_len ("std %A0,%A1" CR_TAB
3926 "std %B0,%B1", op, plen, -2);
3927 /* (X + d) = R */
3928 return reg_src == REG_X
3929 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3930 "mov __zero_reg__,r27" CR_TAB
3931 "adiw r26,%o0" CR_TAB
3932 "st X+,__tmp_reg__" CR_TAB
3933 "st X,__zero_reg__" CR_TAB
3934 "clr __zero_reg__" CR_TAB
3935 "sbiw r26,%o0+1", op, plen, -7)
3936
3937 : avr_asm_len ("adiw r26,%o0" CR_TAB
3938 "st X+,%A1" CR_TAB
3939 "st X,%B1" CR_TAB
3940 "sbiw r26,%o0+1", op, plen, -4);
3941 }
3942 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3943 {
3944 if (!mem_volatile_p)
3945 return avr_asm_len ("st %0,%B1" CR_TAB
3946 "st %0,%A1", op, plen, -2);
3947
3948 return REGNO (XEXP (base, 0)) == REG_X
3949 ? avr_asm_len ("sbiw r26,2" CR_TAB
3950 "st X+,%A1" CR_TAB
3951 "st X,%B1" CR_TAB
3952 "sbiw r26,1", op, plen, -4)
3953
3954 : avr_asm_len ("sbiw %r0,2" CR_TAB
3955 "st %p0,%A1" CR_TAB
3956 "std %p0+1,%B1", op, plen, -3);
3957 }
3958 else if (GET_CODE (base) == POST_INC) /* (R++) */
3959 {
3960 return avr_asm_len ("st %0,%A1" CR_TAB
3961 "st %0,%B1", op, plen, -2);
3962
3963 }
3964 fatal_insn ("unknown move insn:",insn);
3965 return "";
3966}
3967
3968
3969static const char*
3970out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3971{
3972 rtx dest = op[0];
3973 rtx src = op[1];
3974 rtx base = XEXP (dest, 0);
3975 int reg_base = true_regnum (base);
3976 int reg_src = true_regnum (src);
3977 int mem_volatile_p;
3978
3979 /* "volatile" forces writing high-byte first (no-xmega) resp.
3980 low-byte first (xmega) even if less efficient, for correct
3981 operation with 16-bit I/O registers like. */
3982
3983 if (AVR_XMEGA)
3984 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3985
3986 mem_volatile_p = MEM_VOLATILE_P (dest);
3987
37ac04dc 3988 if (CONSTANT_ADDRESS_P (base))
644ac9c5 3989 return optimize > 0 && io_address_operand (base, HImode)
3990 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3991 "out %i0,%A1", op, plen, -2)
3992
3993 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3994 "sts %m0,%A1", op, plen, -4);
3995
a28e4651 3996 if (reg_base > 0)
3997 {
644ac9c5 3998 if (reg_base != REG_X)
3999 return avr_asm_len ("std %0+1,%B1" CR_TAB
4000 "st %0,%A1", op, plen, -2);
4001
4002 if (reg_src == REG_X)
4003 /* "st X+,r26" and "st -X,r26" are undefined. */
4004 return !mem_volatile_p && reg_unused_after (insn, src)
4005 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4006 "st X,r26" CR_TAB
4007 "adiw r26,1" CR_TAB
4008 "st X,__tmp_reg__", op, plen, -4)
4009
4010 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4011 "adiw r26,1" CR_TAB
4012 "st X,__tmp_reg__" CR_TAB
4013 "sbiw r26,1" CR_TAB
4014 "st X,r26", op, plen, -5);
4015
4016 return !mem_volatile_p && reg_unused_after (insn, base)
4017 ? avr_asm_len ("st X+,%A1" CR_TAB
4018 "st X,%B1", op, plen, -2)
4019 : avr_asm_len ("adiw r26,1" CR_TAB
4020 "st X,%B1" CR_TAB
4021 "st -X,%A1", op, plen, -3);
a28e4651 4022 }
37ac04dc 4023 else if (GET_CODE (base) == PLUS)
a28e4651 4024 {
37ac04dc 4025 int disp = INTVAL (XEXP (base, 1));
66ad0834 4026 reg_base = REGNO (XEXP (base, 0));
37ac04dc 4027 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
644ac9c5 4028 {
4029 if (reg_base != REG_Y)
4030 fatal_insn ("incorrect insn:",insn);
4031
4032 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4033 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4034 "std Y+63,%B1" CR_TAB
4035 "std Y+62,%A1" CR_TAB
4036 "sbiw r28,%o0-62", op, plen, -4)
4037
4038 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4039 "sbci r29,hi8(-%o0)" CR_TAB
4040 "std Y+1,%B1" CR_TAB
4041 "st Y,%A1" CR_TAB
4042 "subi r28,lo8(%o0)" CR_TAB
4043 "sbci r29,hi8(%o0)", op, plen, -6);
4044 }
4045
4046 if (reg_base != REG_X)
4047 return avr_asm_len ("std %B0,%B1" CR_TAB
4048 "std %A0,%A1", op, plen, -2);
4049 /* (X + d) = R */
4050 return reg_src == REG_X
4051 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4052 "mov __zero_reg__,r27" CR_TAB
4053 "adiw r26,%o0+1" CR_TAB
4054 "st X,__zero_reg__" CR_TAB
4055 "st -X,__tmp_reg__" CR_TAB
4056 "clr __zero_reg__" CR_TAB
4057 "sbiw r26,%o0", op, plen, -7)
4058
4059 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4060 "st X,%B1" CR_TAB
4061 "st -X,%A1" CR_TAB
4062 "sbiw r26,%o0", op, plen, -4);
e511e253 4063 }
37ac04dc 4064 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
644ac9c5 4065 {
4066 return avr_asm_len ("st %0,%B1" CR_TAB
4067 "st %0,%A1", op, plen, -2);
4068 }
37ac04dc 4069 else if (GET_CODE (base) == POST_INC) /* (R++) */
f43bae99 4070 {
644ac9c5 4071 if (!mem_volatile_p)
4072 return avr_asm_len ("st %0,%A1" CR_TAB
4073 "st %0,%B1", op, plen, -2);
4074
4075 return REGNO (XEXP (base, 0)) == REG_X
4076 ? avr_asm_len ("adiw r26,1" CR_TAB
4077 "st X,%B1" CR_TAB
4078 "st -X,%A1" CR_TAB
4079 "adiw r26,2", op, plen, -4)
f43bae99 4080
644ac9c5 4081 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4082 "st %p0,%A1" CR_TAB
4083 "adiw %r0,2", op, plen, -3);
f43bae99 4084 }
68435912 4085 fatal_insn ("unknown move insn:",insn);
a28e4651 4086 return "";
4087}
4088
20c71901 4089/* Return 1 if frame pointer for current function required. */
a28e4651 4090
a45076aa 4091static bool
ebdd0478 4092avr_frame_pointer_required_p (void)
a28e4651 4093{
18d50ae6 4094 return (cfun->calls_alloca
a12b9b80 4095 || cfun->calls_setjmp
4096 || cfun->has_nonlocal_label
4097 || crtl->args.info.nregs == 0
4098 || get_frame_size () > 0);
a28e4651 4099}
4100
faf8f400 4101/* Returns the condition of compare insn INSN, or UNKNOWN. */
a28e4651 4102
faf8f400 4103static RTX_CODE
206a5129 4104compare_condition (rtx insn)
a28e4651 4105{
4106 rtx next = next_real_insn (insn);
cffa155c 4107
4108 if (next && JUMP_P (next))
a28e4651 4109 {
4110 rtx pat = PATTERN (next);
4111 rtx src = SET_SRC (pat);
cffa155c 4112
4113 if (IF_THEN_ELSE == GET_CODE (src))
4114 return GET_CODE (XEXP (src, 0));
a28e4651 4115 }
cffa155c 4116
4117 return UNKNOWN;
faf8f400 4118}
4119
faf8f400 4120
dfd52f2b 4121/* Returns true iff INSN is a tst insn that only tests the sign. */
4122
4123static bool
206a5129 4124compare_sign_p (rtx insn)
faf8f400 4125{
4126 RTX_CODE cond = compare_condition (insn);
4127 return (cond == GE || cond == LT);
4128}
4129
dfd52f2b 4130
4131/* Returns true iff the next insn is a JUMP_INSN with a condition
faf8f400 4132 that needs to be swapped (GT, GTU, LE, LEU). */
4133
dfd52f2b 4134static bool
206a5129 4135compare_diff_p (rtx insn)
faf8f400 4136{
4137 RTX_CODE cond = compare_condition (insn);
a28e4651 4138 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4139}
4140
dfd52f2b 4141/* Returns true iff INSN is a compare insn with the EQ or NE condition. */
a28e4651 4142
dfd52f2b 4143static bool
206a5129 4144compare_eq_p (rtx insn)
a28e4651 4145{
faf8f400 4146 RTX_CODE cond = compare_condition (insn);
a28e4651 4147 return (cond == EQ || cond == NE);
4148}
4149
4150
dfd52f2b 4151/* Output compare instruction
4152
4153 compare (XOP[0], XOP[1])
4154
4155 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4156 XOP[2] is an 8-bit scratch register as needed.
4157
4158 PLEN == NULL: Output instructions.
4159 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4160 Don't output anything. */
4161
4162const char*
4163avr_out_compare (rtx insn, rtx *xop, int *plen)
4164{
4165 /* Register to compare and value to compare against. */
4166 rtx xreg = xop[0];
4167 rtx xval = xop[1];
4168
4169 /* MODE of the comparison. */
4170 enum machine_mode mode = GET_MODE (xreg);
4171
4172 /* Number of bytes to operate on. */
4173 int i, n_bytes = GET_MODE_SIZE (mode);
4174
4175 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4176 int clobber_val = -1;
4177
83921eda 4178 gcc_assert (REG_P (xreg));
4179 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4180 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
dfd52f2b 4181
4182 if (plen)
4183 *plen = 0;
4184
7f94020e 4185 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
83921eda 4186 against 0 by ORing the bytes. This is one instruction shorter.
4187 Notice that DImode comparisons are always against reg:DI 18
4188 and therefore don't use this. */
7f94020e 4189
4190 if (!test_hard_reg_class (LD_REGS, xreg)
4191 && compare_eq_p (insn)
4192 && reg_unused_after (insn, xreg))
4193 {
4194 if (xval == const1_rtx)
4195 {
4196 avr_asm_len ("dec %A0" CR_TAB
4197 "or %A0,%B0", xop, plen, 2);
4198
02d9a2c3 4199 if (n_bytes >= 3)
4200 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4201
4202 if (n_bytes >= 4)
4203 avr_asm_len ("or %A0,%D0", xop, plen, 1);
7f94020e 4204
4205 return "";
4206 }
4207 else if (xval == constm1_rtx)
4208 {
02d9a2c3 4209 if (n_bytes >= 4)
4210 avr_asm_len ("and %A0,%D0", xop, plen, 1);
7f94020e 4211
02d9a2c3 4212 if (n_bytes >= 3)
4213 avr_asm_len ("and %A0,%C0", xop, plen, 1);
7f94020e 4214
02d9a2c3 4215 return avr_asm_len ("and %A0,%B0" CR_TAB
4216 "com %A0", xop, plen, 2);
7f94020e 4217 }
4218 }
4219
dfd52f2b 4220 for (i = 0; i < n_bytes; i++)
4221 {
4222 /* We compare byte-wise. */
4223 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4224 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4225
4226 /* 8-bit value to compare with this byte. */
4227 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4228
4229 /* Registers R16..R31 can operate with immediate. */
4230 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4231
4232 xop[0] = reg8;
4233 xop[1] = gen_int_mode (val8, QImode);
4234
4235 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4236
4237 if (i == 0
4238 && test_hard_reg_class (ADDW_REGS, reg8))
4239 {
4240 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4241
4242 if (IN_RANGE (val16, 0, 63)
4243 && (val8 == 0
4244 || reg_unused_after (insn, xreg)))
4245 {
4246 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4247 i++;
4248 continue;
4249 }
4250
4251 if (n_bytes == 2
4252 && IN_RANGE (val16, -63, -1)
4253 && compare_eq_p (insn)
4254 && reg_unused_after (insn, xreg))
4255 {
02d9a2c3 4256 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
dfd52f2b 4257 }
4258 }
4259
4260 /* Comparing against 0 is easy. */
4261
4262 if (val8 == 0)
4263 {
4264 avr_asm_len (i == 0
4265 ? "cp %0,__zero_reg__"
4266 : "cpc %0,__zero_reg__", xop, plen, 1);
4267 continue;
4268 }
4269
4270 /* Upper registers can compare and subtract-with-carry immediates.
4271 Notice that compare instructions do the same as respective subtract
4272 instruction; the only difference is that comparisons don't write
4273 the result back to the target register. */
4274
4275 if (ld_reg_p)
4276 {
4277 if (i == 0)
4278 {
4279 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4280 continue;
4281 }
4282 else if (reg_unused_after (insn, xreg))
4283 {
4284 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4285 continue;
4286 }
4287 }
4288
4289 /* Must load the value into the scratch register. */
4290
4291 gcc_assert (REG_P (xop[2]));
4292
4293 if (clobber_val != (int) val8)
4294 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4295 clobber_val = (int) val8;
4296
4297 avr_asm_len (i == 0
4298 ? "cp %0,%2"
4299 : "cpc %0,%2", xop, plen, 1);
4300 }
4301
4302 return "";
4303}
4304
4305
83921eda 4306/* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4307
4308const char*
4309avr_out_compare64 (rtx insn, rtx *op, int *plen)
4310{
4311 rtx xop[3];
4312
4313 xop[0] = gen_rtx_REG (DImode, 18);
4314 xop[1] = op[0];
4315 xop[2] = op[1];
4316
4317 return avr_out_compare (insn, xop, plen);
4318}
4319
20c71901 4320/* Output test instruction for HImode. */
a28e4651 4321
dfd52f2b 4322const char*
4323avr_out_tsthi (rtx insn, rtx *op, int *plen)
a28e4651 4324{
faf8f400 4325 if (compare_sign_p (insn))
a28e4651 4326 {
dfd52f2b 4327 avr_asm_len ("tst %B0", op, plen, -1);
a28e4651 4328 }
dfd52f2b 4329 else if (reg_unused_after (insn, op[0])
4330 && compare_eq_p (insn))
a28e4651 4331 {
20c71901 4332 /* Faster than sbiw if we can clobber the operand. */
dfd52f2b 4333 avr_asm_len ("or %A0,%B0", op, plen, -1);
a28e4651 4334 }
dfd52f2b 4335 else
a28e4651 4336 {
dfd52f2b 4337 avr_out_compare (insn, op, plen);
a28e4651 4338 }
dfd52f2b 4339
4340 return "";
a28e4651 4341}
4342
4343
02d9a2c3 4344/* Output test instruction for PSImode. */
4345
4346const char*
4347avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4348{
4349 if (compare_sign_p (insn))
4350 {
4351 avr_asm_len ("tst %C0", op, plen, -1);
4352 }
4353 else if (reg_unused_after (insn, op[0])
4354 && compare_eq_p (insn))
4355 {
4356 /* Faster than sbiw if we can clobber the operand. */
4357 avr_asm_len ("or %A0,%B0" CR_TAB
4358 "or %A0,%C0", op, plen, -2);
4359 }
4360 else
4361 {
4362 avr_out_compare (insn, op, plen);
4363 }
4364
4365 return "";
4366}
4367
4368
20c71901 4369/* Output test instruction for SImode. */
a28e4651 4370
dfd52f2b 4371const char*
4372avr_out_tstsi (rtx insn, rtx *op, int *plen)
a28e4651 4373{
faf8f400 4374 if (compare_sign_p (insn))
a28e4651 4375 {
dfd52f2b 4376 avr_asm_len ("tst %D0", op, plen, -1);
a28e4651 4377 }
dfd52f2b 4378 else if (reg_unused_after (insn, op[0])
4379 && compare_eq_p (insn))
a28e4651 4380 {
dfd52f2b 4381 /* Faster than sbiw if we can clobber the operand. */
4382 avr_asm_len ("or %A0,%B0" CR_TAB
4383 "or %A0,%C0" CR_TAB
4384 "or %A0,%D0", op, plen, -3);
4385 }
4386 else
4387 {
4388 avr_out_compare (insn, op, plen);
a28e4651 4389 }
dfd52f2b 4390
4391 return "";
a28e4651 4392}
4393
4394
644ac9c5 4395/* Generate asm equivalent for various shifts. This only handles cases
4396 that are not already carefully hand-optimized in ?sh??i3_out.
4397
4398 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4399 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4400 OPERANDS[3] is a QImode scratch register from LD regs if
4401 available and SCRATCH, otherwise (no scratch available)
4402
4403 TEMPL is an assembler template that shifts by one position.
4404 T_LEN is the length of this template. */
a28e4651 4405
4406void
8deb3959 4407out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
644ac9c5 4408 int *plen, int t_len)
a28e4651 4409{
644ac9c5 4410 bool second_label = true;
4411 bool saved_in_tmp = false;
4412 bool use_zero_reg = false;
4413 rtx op[5];
b681d971 4414
a28e4651 4415 op[0] = operands[0];
4416 op[1] = operands[1];
4417 op[2] = operands[2];
4418 op[3] = operands[3];
b681d971 4419
644ac9c5 4420 if (plen)
4421 *plen = 0;
b681d971 4422
4202ef11 4423 if (CONST_INT_P (operands[2]))
a28e4651 4424 {
4202ef11 4425 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4426 && REG_P (operands[3]));
b681d971 4427 int count = INTVAL (operands[2]);
4428 int max_len = 10; /* If larger than this, always use a loop. */
4429
5667001b 4430 if (count <= 0)
644ac9c5 4431 return;
5667001b 4432
b681d971 4433 if (count < 8 && !scratch)
644ac9c5 4434 use_zero_reg = true;
b681d971 4435
4436 if (optimize_size)
644ac9c5 4437 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
b681d971 4438
4439 if (t_len * count <= max_len)
644ac9c5 4440 {
4441 /* Output shifts inline with no loop - faster. */
4442
4443 while (count-- > 0)
4444 avr_asm_len (templ, op, plen, t_len);
b681d971 4445
644ac9c5 4446 return;
4447 }
b681d971 4448
4449 if (scratch)
644ac9c5 4450 {
4451 avr_asm_len ("ldi %3,%2", op, plen, 1);
4452 }
b681d971 4453 else if (use_zero_reg)
644ac9c5 4454 {
4455 /* Hack to save one word: use __zero_reg__ as loop counter.
4456 Set one bit, then shift in a loop until it is 0 again. */
b681d971 4457
644ac9c5 4458 op[3] = zero_reg_rtx;
4459
4460 avr_asm_len ("set" CR_TAB
4461 "bld %3,%2-1", op, plen, 2);
4462 }
a28e4651 4463 else
644ac9c5 4464 {
4465 /* No scratch register available, use one from LD_REGS (saved in
4466 __tmp_reg__) that doesn't overlap with registers to shift. */
b681d971 4467
644ac9c5 4468 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4469 op[4] = tmp_reg_rtx;
4470 saved_in_tmp = true;
b681d971 4471
644ac9c5 4472 avr_asm_len ("mov %4,%3" CR_TAB
4473 "ldi %3,%2", op, plen, 2);
4474 }
b681d971 4475
644ac9c5 4476 second_label = false;
a28e4651 4477 }
644ac9c5 4478 else if (MEM_P (op[2]))
a28e4651 4479 {
644ac9c5 4480 rtx op_mov[2];
361aa610 4481
644ac9c5 4482 op_mov[0] = op[3] = tmp_reg_rtx;
a28e4651 4483 op_mov[1] = op[2];
b681d971 4484
644ac9c5 4485 out_movqi_r_mr (insn, op_mov, plen);
a28e4651 4486 }
644ac9c5 4487 else if (register_operand (op[2], QImode))
a28e4651 4488 {
644ac9c5 4489 op[3] = op[2];
4490
4491 if (!reg_unused_after (insn, op[2])
4492 || reg_overlap_mentioned_p (op[0], op[2]))
959d6369 4493 {
644ac9c5 4494 op[3] = tmp_reg_rtx;
4495 avr_asm_len ("mov %3,%2", op, plen, 1);
959d6369 4496 }
b681d971 4497 }
4498 else
68435912 4499 fatal_insn ("bad shift insn:", insn);
b681d971 4500
4501 if (second_label)
644ac9c5 4502 avr_asm_len ("rjmp 2f", op, plen, 1);
b681d971 4503
644ac9c5 4504 avr_asm_len ("1:", op, plen, 0);
4505 avr_asm_len (templ, op, plen, t_len);
4506
4507 if (second_label)
4508 avr_asm_len ("2:", op, plen, 0);
4509
4510 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4511 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4512
4513 if (saved_in_tmp)
4514 avr_asm_len ("mov %3,%4", op, plen, 1);
a28e4651 4515}
4516
4517
4518/* 8bit shift left ((char)x << i) */
4519
37ac04dc 4520const char *
206a5129 4521ashlqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 4522{
4523 if (GET_CODE (operands[2]) == CONST_INT)
4524 {
4525 int k;
1cb39658 4526
a28e4651 4527 if (!len)
4528 len = &k;
1cb39658 4529
a28e4651 4530 switch (INTVAL (operands[2]))
4531 {
1cb39658 4532 default:
5667001b 4533 if (INTVAL (operands[2]) < 8)
4534 break;
4535
1cb39658 4536 *len = 1;
02a011e9 4537 return "clr %0";
1cb39658 4538
a28e4651 4539 case 1:
1cb39658 4540 *len = 1;
02a011e9 4541 return "lsl %0";
1cb39658 4542
a28e4651 4543 case 2:
1cb39658 4544 *len = 2;
02a011e9 4545 return ("lsl %0" CR_TAB
4546 "lsl %0");
1cb39658 4547
a28e4651 4548 case 3:
1cb39658 4549 *len = 3;
02a011e9 4550 return ("lsl %0" CR_TAB
4551 "lsl %0" CR_TAB
4552 "lsl %0");
1cb39658 4553
a28e4651 4554 case 4:
0af74aa0 4555 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4556 {
1cb39658 4557 *len = 2;
02a011e9 4558 return ("swap %0" CR_TAB
4559 "andi %0,0xf0");
a28e4651 4560 }
1cb39658 4561 *len = 4;
02a011e9 4562 return ("lsl %0" CR_TAB
4563 "lsl %0" CR_TAB
4564 "lsl %0" CR_TAB
4565 "lsl %0");
1cb39658 4566
a28e4651 4567 case 5:
0af74aa0 4568 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4569 {
1cb39658 4570 *len = 3;
02a011e9 4571 return ("swap %0" CR_TAB
4572 "lsl %0" CR_TAB
4573 "andi %0,0xe0");
a28e4651 4574 }
1cb39658 4575 *len = 5;
02a011e9 4576 return ("lsl %0" CR_TAB
4577 "lsl %0" CR_TAB
4578 "lsl %0" CR_TAB
4579 "lsl %0" CR_TAB
4580 "lsl %0");
1cb39658 4581
a28e4651 4582 case 6:
0af74aa0 4583 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4584 {
1cb39658 4585 *len = 4;
02a011e9 4586 return ("swap %0" CR_TAB
4587 "lsl %0" CR_TAB
4588 "lsl %0" CR_TAB
4589 "andi %0,0xc0");
a28e4651 4590 }
1cb39658 4591 *len = 6;
02a011e9 4592 return ("lsl %0" CR_TAB
4593 "lsl %0" CR_TAB
4594 "lsl %0" CR_TAB
4595 "lsl %0" CR_TAB
4596 "lsl %0" CR_TAB
4597 "lsl %0");
1cb39658 4598
a28e4651 4599 case 7:
1cb39658 4600 *len = 3;
02a011e9 4601 return ("ror %0" CR_TAB
4602 "clr %0" CR_TAB
4603 "ror %0");
a28e4651 4604 }
4605 }
1cb39658 4606 else if (CONSTANT_P (operands[2]))
68435912 4607 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 4608
02a011e9 4609 out_shift_with_cnt ("lsl %0",
4610 insn, operands, len, 1);
a28e4651 4611 return "";
4612}
4613
4614
4615/* 16bit shift left ((short)x << i) */
4616
37ac04dc 4617const char *
206a5129 4618ashlhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 4619{
4620 if (GET_CODE (operands[2]) == CONST_INT)
4621 {
b681d971 4622 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4623 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 4624 int k;
b681d971 4625 int *t = len;
1cb39658 4626
a28e4651 4627 if (!len)
4628 len = &k;
1cb39658 4629
a28e4651 4630 switch (INTVAL (operands[2]))
4631 {
5667001b 4632 default:
4633 if (INTVAL (operands[2]) < 16)
4634 break;
4635
4636 *len = 2;
02a011e9 4637 return ("clr %B0" CR_TAB
4638 "clr %A0");
5667001b 4639
b681d971 4640 case 4:
4641 if (optimize_size && scratch)
4642 break; /* 5 */
4643 if (ldi_ok)
4644 {
4645 *len = 6;
02a011e9 4646 return ("swap %A0" CR_TAB
4647 "swap %B0" CR_TAB
4648 "andi %B0,0xf0" CR_TAB
4649 "eor %B0,%A0" CR_TAB
4650 "andi %A0,0xf0" CR_TAB
4651 "eor %B0,%A0");
b681d971 4652 }
4653 if (scratch)
4654 {
4655 *len = 7;
02a011e9 4656 return ("swap %A0" CR_TAB
4657 "swap %B0" CR_TAB
4658 "ldi %3,0xf0" CR_TAB
ef51d1e3 4659 "and %B0,%3" CR_TAB
02a011e9 4660 "eor %B0,%A0" CR_TAB
ef51d1e3 4661 "and %A0,%3" CR_TAB
02a011e9 4662 "eor %B0,%A0");
b681d971 4663 }
4664 break; /* optimize_size ? 6 : 8 */
1cb39658 4665
b681d971 4666 case 5:
4667 if (optimize_size)
4668 break; /* scratch ? 5 : 6 */
4669 if (ldi_ok)
4670 {
4671 *len = 8;
02a011e9 4672 return ("lsl %A0" CR_TAB
4673 "rol %B0" CR_TAB
4674 "swap %A0" CR_TAB
4675 "swap %B0" CR_TAB
4676 "andi %B0,0xf0" CR_TAB
4677 "eor %B0,%A0" CR_TAB
4678 "andi %A0,0xf0" CR_TAB
4679 "eor %B0,%A0");
b681d971 4680 }
4681 if (scratch)
4682 {
4683 *len = 9;
02a011e9 4684 return ("lsl %A0" CR_TAB
4685 "rol %B0" CR_TAB
4686 "swap %A0" CR_TAB
4687 "swap %B0" CR_TAB
4688 "ldi %3,0xf0" CR_TAB
ef51d1e3 4689 "and %B0,%3" CR_TAB
02a011e9 4690 "eor %B0,%A0" CR_TAB
ef51d1e3 4691 "and %A0,%3" CR_TAB
02a011e9 4692 "eor %B0,%A0");
b681d971 4693 }
4694 break; /* 10 */
4695
4696 case 6:
4697 if (optimize_size)
4698 break; /* scratch ? 5 : 6 */
4699 *len = 9;
02a011e9 4700 return ("clr __tmp_reg__" CR_TAB
4701 "lsr %B0" CR_TAB
4702 "ror %A0" CR_TAB
4703 "ror __tmp_reg__" CR_TAB
4704 "lsr %B0" CR_TAB
4705 "ror %A0" CR_TAB
4706 "ror __tmp_reg__" CR_TAB
4707 "mov %B0,%A0" CR_TAB
4708 "mov %A0,__tmp_reg__");
1cb39658 4709
28f5cc4d 4710 case 7:
4711 *len = 5;
02a011e9 4712 return ("lsr %B0" CR_TAB
4713 "mov %B0,%A0" CR_TAB
4714 "clr %A0" CR_TAB
4715 "ror %B0" CR_TAB
4716 "ror %A0");
28f5cc4d 4717
a28e4651 4718 case 8:
02a011e9 4719 return *len = 2, ("mov %B0,%A1" CR_TAB
4720 "clr %A0");
28f5cc4d 4721
4722 case 9:
4723 *len = 3;
02a011e9 4724 return ("mov %B0,%A0" CR_TAB
4725 "clr %A0" CR_TAB
4726 "lsl %B0");
28f5cc4d 4727
4728 case 10:
4729 *len = 4;
02a011e9 4730 return ("mov %B0,%A0" CR_TAB
4731 "clr %A0" CR_TAB
4732 "lsl %B0" CR_TAB
4733 "lsl %B0");
28f5cc4d 4734
4735 case 11:
4736 *len = 5;
02a011e9 4737 return ("mov %B0,%A0" CR_TAB
4738 "clr %A0" CR_TAB
4739 "lsl %B0" CR_TAB
4740 "lsl %B0" CR_TAB
4741 "lsl %B0");
28f5cc4d 4742
4743 case 12:
b681d971 4744 if (ldi_ok)
28f5cc4d 4745 {
4746 *len = 4;
02a011e9 4747 return ("mov %B0,%A0" CR_TAB
4748 "clr %A0" CR_TAB
4749 "swap %B0" CR_TAB
4750 "andi %B0,0xf0");
28f5cc4d 4751 }
b681d971 4752 if (scratch)
4753 {
4754 *len = 5;
02a011e9 4755 return ("mov %B0,%A0" CR_TAB
4756 "clr %A0" CR_TAB
4757 "swap %B0" CR_TAB
4758 "ldi %3,0xf0" CR_TAB
ef51d1e3 4759 "and %B0,%3");
b681d971 4760 }
4761 *len = 6;
02a011e9 4762 return ("mov %B0,%A0" CR_TAB
4763 "clr %A0" CR_TAB
4764 "lsl %B0" CR_TAB
4765 "lsl %B0" CR_TAB
4766 "lsl %B0" CR_TAB
4767 "lsl %B0");
28f5cc4d 4768
4769 case 13:
b681d971 4770 if (ldi_ok)
28f5cc4d 4771 {
4772 *len = 5;
02a011e9 4773 return ("mov %B0,%A0" CR_TAB
4774 "clr %A0" CR_TAB
4775 "swap %B0" CR_TAB
4776 "lsl %B0" CR_TAB
4777 "andi %B0,0xe0");
28f5cc4d 4778 }
8cc5a1af 4779 if (AVR_HAVE_MUL && scratch)
28f5cc4d 4780 {
4781 *len = 5;
02a011e9 4782 return ("ldi %3,0x20" CR_TAB
4783 "mul %A0,%3" CR_TAB
4784 "mov %B0,r0" CR_TAB
4785 "clr %A0" CR_TAB
4786 "clr __zero_reg__");
28f5cc4d 4787 }
b681d971 4788 if (optimize_size && scratch)
4789 break; /* 5 */
4790 if (scratch)
4791 {
4792 *len = 6;
02a011e9 4793 return ("mov %B0,%A0" CR_TAB
4794 "clr %A0" CR_TAB
4795 "swap %B0" CR_TAB
4796 "lsl %B0" CR_TAB
4797 "ldi %3,0xe0" CR_TAB
ef51d1e3 4798 "and %B0,%3");
b681d971 4799 }
8cc5a1af 4800 if (AVR_HAVE_MUL)
b681d971 4801 {
4802 *len = 6;
4803 return ("set" CR_TAB
02a011e9 4804 "bld r1,5" CR_TAB
4805 "mul %A0,r1" CR_TAB
4806 "mov %B0,r0" CR_TAB
4807 "clr %A0" CR_TAB
4808 "clr __zero_reg__");
b681d971 4809 }
4810 *len = 7;
02a011e9 4811 return ("mov %B0,%A0" CR_TAB
4812 "clr %A0" CR_TAB
4813 "lsl %B0" CR_TAB
4814 "lsl %B0" CR_TAB
4815 "lsl %B0" CR_TAB
4816 "lsl %B0" CR_TAB
4817 "lsl %B0");
28f5cc4d 4818
4819 case 14:
8cc5a1af 4820 if (AVR_HAVE_MUL && ldi_ok)
b681d971 4821 {
4822 *len = 5;
02a011e9 4823 return ("ldi %B0,0x40" CR_TAB
4824 "mul %A0,%B0" CR_TAB
4825 "mov %B0,r0" CR_TAB
4826 "clr %A0" CR_TAB
4827 "clr __zero_reg__");
b681d971 4828 }
8cc5a1af 4829 if (AVR_HAVE_MUL && scratch)
28f5cc4d 4830 {
4831 *len = 5;
02a011e9 4832 return ("ldi %3,0x40" CR_TAB
4833 "mul %A0,%3" CR_TAB
4834 "mov %B0,r0" CR_TAB
4835 "clr %A0" CR_TAB
4836 "clr __zero_reg__");
28f5cc4d 4837 }
b681d971 4838 if (optimize_size && ldi_ok)
4839 {
4840 *len = 5;
02a011e9 4841 return ("mov %B0,%A0" CR_TAB
4842 "ldi %A0,6" "\n1:\t"
4843 "lsl %B0" CR_TAB
4844 "dec %A0" CR_TAB
4845 "brne 1b");
b681d971 4846 }
4847 if (optimize_size && scratch)
4848 break; /* 5 */
4849 *len = 6;
02a011e9 4850 return ("clr %B0" CR_TAB
4851 "lsr %A0" CR_TAB
4852 "ror %B0" CR_TAB
4853 "lsr %A0" CR_TAB
4854 "ror %B0" CR_TAB
4855 "clr %A0");
28f5cc4d 4856
4857 case 15:
4858 *len = 4;
02a011e9 4859 return ("clr %B0" CR_TAB
4860 "lsr %A0" CR_TAB
4861 "ror %B0" CR_TAB
4862 "clr %A0");
a28e4651 4863 }
28f5cc4d 4864 len = t;
a28e4651 4865 }
02a011e9 4866 out_shift_with_cnt ("lsl %A0" CR_TAB
4867 "rol %B0", insn, operands, len, 2);
a28e4651 4868 return "";
4869}
4870
4871
02d9a2c3 4872/* 24-bit shift left */
4873
4874const char*
4875avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4876{
4877 if (plen)
4878 *plen = 0;
4879
4880 if (CONST_INT_P (op[2]))
4881 {
4882 switch (INTVAL (op[2]))
4883 {
4884 default:
4885 if (INTVAL (op[2]) < 24)
4886 break;
4887
4888 return avr_asm_len ("clr %A0" CR_TAB
4889 "clr %B0" CR_TAB
4890 "clr %C0", op, plen, 3);
4891
4892 case 8:
4893 {
4894 int reg0 = REGNO (op[0]);
4895 int reg1 = REGNO (op[1]);
4896
4897 if (reg0 >= reg1)
4898 return avr_asm_len ("mov %C0,%B1" CR_TAB
4899 "mov %B0,%A1" CR_TAB
4900 "clr %A0", op, plen, 3);
4901 else
4902 return avr_asm_len ("clr %A0" CR_TAB
4903 "mov %B0,%A1" CR_TAB
4904 "mov %C0,%B1", op, plen, 3);
4905 }
4906
4907 case 16:
4908 {
4909 int reg0 = REGNO (op[0]);
4910 int reg1 = REGNO (op[1]);
4911
4912 if (reg0 + 2 != reg1)
4913 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4914
4915 return avr_asm_len ("clr %B0" CR_TAB
4916 "clr %A0", op, plen, 2);
4917 }
4918
4919 case 23:
4920 return avr_asm_len ("clr %C0" CR_TAB
4921 "lsr %A0" CR_TAB
4922 "ror %C0" CR_TAB
4923 "clr %B0" CR_TAB
4924 "clr %A0", op, plen, 5);
4925 }
4926 }
4927
4928 out_shift_with_cnt ("lsl %A0" CR_TAB
4929 "rol %B0" CR_TAB
4930 "rol %C0", insn, op, plen, 3);
4931 return "";
4932}
4933
4934
a28e4651 4935/* 32bit shift left ((long)x << i) */
4936
37ac04dc 4937const char *
206a5129 4938ashlsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 4939{
4940 if (GET_CODE (operands[2]) == CONST_INT)
4941 {
4942 int k;
b681d971 4943 int *t = len;
1cb39658 4944
a28e4651 4945 if (!len)
4946 len = &k;
1cb39658 4947
a28e4651 4948 switch (INTVAL (operands[2]))
4949 {
5667001b 4950 default:
4951 if (INTVAL (operands[2]) < 32)
4952 break;
4953
0aab73c2 4954 if (AVR_HAVE_MOVW)
02a011e9 4955 return *len = 3, ("clr %D0" CR_TAB
4956 "clr %C0" CR_TAB
4957 "movw %A0,%C0");
5667001b 4958 *len = 4;
02a011e9 4959 return ("clr %D0" CR_TAB
4960 "clr %C0" CR_TAB
4961 "clr %B0" CR_TAB
4962 "clr %A0");
5667001b 4963
a28e4651 4964 case 8:
4965 {
4966 int reg0 = true_regnum (operands[0]);
4967 int reg1 = true_regnum (operands[1]);
1cb39658 4968 *len = 4;
a28e4651 4969 if (reg0 >= reg1)
02a011e9 4970 return ("mov %D0,%C1" CR_TAB
4971 "mov %C0,%B1" CR_TAB
4972 "mov %B0,%A1" CR_TAB
4973 "clr %A0");
a28e4651 4974 else
02a011e9 4975 return ("clr %A0" CR_TAB
4976 "mov %B0,%A1" CR_TAB
4977 "mov %C0,%B1" CR_TAB
4978 "mov %D0,%C1");
a28e4651 4979 }
1cb39658 4980
a28e4651 4981 case 16:
4982 {
4983 int reg0 = true_regnum (operands[0]);
4984 int reg1 = true_regnum (operands[1]);
a28e4651 4985 if (reg0 + 2 == reg1)
02a011e9 4986 return *len = 2, ("clr %B0" CR_TAB
4987 "clr %A0");
ab3a6ef8 4988 if (AVR_HAVE_MOVW)
02a011e9 4989 return *len = 3, ("movw %C0,%A1" CR_TAB
4990 "clr %B0" CR_TAB
4991 "clr %A0");
a28e4651 4992 else
02a011e9 4993 return *len = 4, ("mov %C0,%A1" CR_TAB
4994 "mov %D0,%B1" CR_TAB
4995 "clr %B0" CR_TAB
4996 "clr %A0");
a28e4651 4997 }
1cb39658 4998
a28e4651 4999 case 24:
1cb39658 5000 *len = 4;
02a011e9 5001 return ("mov %D0,%A1" CR_TAB
5002 "clr %C0" CR_TAB
5003 "clr %B0" CR_TAB
5004 "clr %A0");
28f5cc4d 5005
5006 case 31:
5007 *len = 6;
02a011e9 5008 return ("clr %D0" CR_TAB
5009 "lsr %A0" CR_TAB
5010 "ror %D0" CR_TAB
5011 "clr %C0" CR_TAB
5012 "clr %B0" CR_TAB
5013 "clr %A0");
a28e4651 5014 }
28f5cc4d 5015 len = t;
a28e4651 5016 }
02a011e9 5017 out_shift_with_cnt ("lsl %A0" CR_TAB
5018 "rol %B0" CR_TAB
5019 "rol %C0" CR_TAB
5020 "rol %D0", insn, operands, len, 4);
a28e4651 5021 return "";
5022}
5023
5024/* 8bit arithmetic shift right ((signed char)x >> i) */
5025
37ac04dc 5026const char *
206a5129 5027ashrqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5028{
5029 if (GET_CODE (operands[2]) == CONST_INT)
5030 {
a28e4651 5031 int k;
1cb39658 5032
a28e4651 5033 if (!len)
5034 len = &k;
1cb39658 5035
a28e4651 5036 switch (INTVAL (operands[2]))
5037 {
a28e4651 5038 case 1:
1cb39658 5039 *len = 1;
02a011e9 5040 return "asr %0";
1cb39658 5041
a28e4651 5042 case 2:
1cb39658 5043 *len = 2;
02a011e9 5044 return ("asr %0" CR_TAB
5045 "asr %0");
1cb39658 5046
a28e4651 5047 case 3:
1cb39658 5048 *len = 3;
02a011e9 5049 return ("asr %0" CR_TAB
5050 "asr %0" CR_TAB
5051 "asr %0");
1cb39658 5052
a28e4651 5053 case 4:
1cb39658 5054 *len = 4;
02a011e9 5055 return ("asr %0" CR_TAB
5056 "asr %0" CR_TAB
5057 "asr %0" CR_TAB
5058 "asr %0");
1cb39658 5059
5060 case 5:
5061 *len = 5;
02a011e9 5062 return ("asr %0" CR_TAB
5063 "asr %0" CR_TAB
5064 "asr %0" CR_TAB
5065 "asr %0" CR_TAB
5066 "asr %0");
1cb39658 5067
5068 case 6:
5069 *len = 4;
02a011e9 5070 return ("bst %0,6" CR_TAB
5071 "lsl %0" CR_TAB
5072 "sbc %0,%0" CR_TAB
5073 "bld %0,0");
1cb39658 5074
5075 default:
5667001b 5076 if (INTVAL (operands[2]) < 8)
5077 break;
5078
5079 /* fall through */
5080
1cb39658 5081 case 7:
5082 *len = 2;
02a011e9 5083 return ("lsl %0" CR_TAB
5084 "sbc %0,%0");
a28e4651 5085 }
5086 }
1cb39658 5087 else if (CONSTANT_P (operands[2]))
68435912 5088 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 5089
02a011e9 5090 out_shift_with_cnt ("asr %0",
5091 insn, operands, len, 1);
a28e4651 5092 return "";
5093}
5094
5095
5096/* 16bit arithmetic shift right ((signed short)x >> i) */
5097
37ac04dc 5098const char *
206a5129 5099ashrhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5100{
5101 if (GET_CODE (operands[2]) == CONST_INT)
5102 {
b681d971 5103 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5104 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 5105 int k;
1cb39658 5106 int *t = len;
5107
a28e4651 5108 if (!len)
5109 len = &k;
1cb39658 5110
a28e4651 5111 switch (INTVAL (operands[2]))
5112 {
b681d971 5113 case 4:
5114 case 5:
5115 /* XXX try to optimize this too? */
5116 break;
1cb39658 5117
b681d971 5118 case 6:
5119 if (optimize_size)
5120 break; /* scratch ? 5 : 6 */
5121 *len = 8;
02a011e9 5122 return ("mov __tmp_reg__,%A0" CR_TAB
5123 "mov %A0,%B0" CR_TAB
5124 "lsl __tmp_reg__" CR_TAB
5125 "rol %A0" CR_TAB
5126 "sbc %B0,%B0" CR_TAB
5127 "lsl __tmp_reg__" CR_TAB
5128 "rol %A0" CR_TAB
5129 "rol %B0");
1cb39658 5130
28f5cc4d 5131 case 7:
5132 *len = 4;
02a011e9 5133 return ("lsl %A0" CR_TAB
5134 "mov %A0,%B0" CR_TAB
5135 "rol %A0" CR_TAB
5136 "sbc %B0,%B0");
28f5cc4d 5137
a28e4651 5138 case 8:
b681d971 5139 {
5140 int reg0 = true_regnum (operands[0]);
5141 int reg1 = true_regnum (operands[1]);
5142
5143 if (reg0 == reg1)
02a011e9 5144 return *len = 3, ("mov %A0,%B0" CR_TAB
5145 "lsl %B0" CR_TAB
5146 "sbc %B0,%B0");
ab3a6ef8 5147 else
02a011e9 5148 return *len = 4, ("mov %A0,%B1" CR_TAB
5149 "clr %B0" CR_TAB
5150 "sbrc %A0,7" CR_TAB
5151 "dec %B0");
b681d971 5152 }
1cb39658 5153
28f5cc4d 5154 case 9:
5155 *len = 4;
02a011e9 5156 return ("mov %A0,%B0" CR_TAB
5157 "lsl %B0" CR_TAB
5158 "sbc %B0,%B0" CR_TAB
5159 "asr %A0");
28f5cc4d 5160
5161 case 10:
5162 *len = 5;
02a011e9 5163 return ("mov %A0,%B0" CR_TAB
5164 "lsl %B0" CR_TAB
5165 "sbc %B0,%B0" CR_TAB
5166 "asr %A0" CR_TAB
5167 "asr %A0");
28f5cc4d 5168
5169 case 11:
8cc5a1af 5170 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5171 {
5172 *len = 5;
02a011e9 5173 return ("ldi %A0,0x20" CR_TAB
5174 "muls %B0,%A0" CR_TAB
5175 "mov %A0,r1" CR_TAB
5176 "sbc %B0,%B0" CR_TAB
5177 "clr __zero_reg__");
28f5cc4d 5178 }
b681d971 5179 if (optimize_size && scratch)
5180 break; /* 5 */
5181 *len = 6;
02a011e9 5182 return ("mov %A0,%B0" CR_TAB
5183 "lsl %B0" CR_TAB
5184 "sbc %B0,%B0" CR_TAB
5185 "asr %A0" CR_TAB
5186 "asr %A0" CR_TAB
5187 "asr %A0");
28f5cc4d 5188
5189 case 12:
8cc5a1af 5190 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5191 {
5192 *len = 5;
02a011e9 5193 return ("ldi %A0,0x10" CR_TAB
5194 "muls %B0,%A0" CR_TAB
5195 "mov %A0,r1" CR_TAB
5196 "sbc %B0,%B0" CR_TAB
5197 "clr __zero_reg__");
28f5cc4d 5198 }
b681d971 5199 if (optimize_size && scratch)
5200 break; /* 5 */
5201 *len = 7;
02a011e9 5202 return ("mov %A0,%B0" CR_TAB
5203 "lsl %B0" CR_TAB
5204 "sbc %B0,%B0" CR_TAB
5205 "asr %A0" CR_TAB
5206 "asr %A0" CR_TAB
5207 "asr %A0" CR_TAB
5208 "asr %A0");
28f5cc4d 5209
5210 case 13:
8cc5a1af 5211 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5212 {
5213 *len = 5;
02a011e9 5214 return ("ldi %A0,0x08" CR_TAB
5215 "muls %B0,%A0" CR_TAB
5216 "mov %A0,r1" CR_TAB
5217 "sbc %B0,%B0" CR_TAB
5218 "clr __zero_reg__");
28f5cc4d 5219 }
b681d971 5220 if (optimize_size)
5221 break; /* scratch ? 5 : 7 */
5222 *len = 8;
02a011e9 5223 return ("mov %A0,%B0" CR_TAB
5224 "lsl %B0" CR_TAB
5225 "sbc %B0,%B0" CR_TAB
5226 "asr %A0" CR_TAB
5227 "asr %A0" CR_TAB
5228 "asr %A0" CR_TAB
5229 "asr %A0" CR_TAB
5230 "asr %A0");
28f5cc4d 5231
5232 case 14:
5233 *len = 5;
02a011e9 5234 return ("lsl %B0" CR_TAB
5235 "sbc %A0,%A0" CR_TAB
5236 "lsl %B0" CR_TAB
5237 "mov %B0,%A0" CR_TAB
5238 "rol %A0");
28f5cc4d 5239
5667001b 5240 default:
5241 if (INTVAL (operands[2]) < 16)
5242 break;
5243
5244 /* fall through */
5245
8a2a7305 5246 case 15:
02a011e9 5247 return *len = 3, ("lsl %B0" CR_TAB
5248 "sbc %A0,%A0" CR_TAB
5249 "mov %B0,%A0");
a28e4651 5250 }
28f5cc4d 5251 len = t;
a28e4651 5252 }
02a011e9 5253 out_shift_with_cnt ("asr %B0" CR_TAB
5254 "ror %A0", insn, operands, len, 2);
a28e4651 5255 return "";
5256}
5257
5258
02d9a2c3 5259/* 24-bit arithmetic shift right */
5260
5261const char*
5262avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5263{
5264 int dest = REGNO (op[0]);
5265 int src = REGNO (op[1]);
5266
5267 if (CONST_INT_P (op[2]))
5268 {
5269 if (plen)
5270 *plen = 0;
5271
5272 switch (INTVAL (op[2]))
5273 {
5274 case 8:
5275 if (dest <= src)
5276 return avr_asm_len ("mov %A0,%B1" CR_TAB
5277 "mov %B0,%C1" CR_TAB
5278 "clr %C0" CR_TAB
5279 "sbrc %B0,7" CR_TAB
5280 "dec %C0", op, plen, 5);
5281 else
5282 return avr_asm_len ("clr %C0" CR_TAB
5283 "sbrc %C1,7" CR_TAB
5284 "dec %C0" CR_TAB
5285 "mov %B0,%C1" CR_TAB
5286 "mov %A0,%B1", op, plen, 5);
5287
5288 case 16:
5289 if (dest != src + 2)
5290 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5291
5292 return avr_asm_len ("clr %B0" CR_TAB
5293 "sbrc %A0,7" CR_TAB
5294 "com %B0" CR_TAB
5295 "mov %C0,%B0", op, plen, 4);
5296
5297 default:
5298 if (INTVAL (op[2]) < 24)
5299 break;
5300
5301 /* fall through */
5302
8e8ae8d4 5303 case 23:
02d9a2c3 5304 return avr_asm_len ("lsl %C0" CR_TAB
5305 "sbc %A0,%A0" CR_TAB
5306 "mov %B0,%A0" CR_TAB
5307 "mov %C0,%A0", op, plen, 4);
5308 } /* switch */
5309 }
5310
5311 out_shift_with_cnt ("asr %C0" CR_TAB
5312 "ror %B0" CR_TAB
5313 "ror %A0", insn, op, plen, 3);
5314 return "";
5315}
5316
5317
a28e4651 5318/* 32bit arithmetic shift right ((signed long)x >> i) */
5319
37ac04dc 5320const char *
206a5129 5321ashrsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5322{
5323 if (GET_CODE (operands[2]) == CONST_INT)
5324 {
5325 int k;
5326 int *t = len;
1cb39658 5327
a28e4651 5328 if (!len)
5329 len = &k;
1cb39658 5330
a28e4651 5331 switch (INTVAL (operands[2]))
5332 {
a28e4651 5333 case 8:
5334 {
5335 int reg0 = true_regnum (operands[0]);
5336 int reg1 = true_regnum (operands[1]);
5337 *len=6;
5338 if (reg0 <= reg1)
02a011e9 5339 return ("mov %A0,%B1" CR_TAB
5340 "mov %B0,%C1" CR_TAB
5341 "mov %C0,%D1" CR_TAB
5342 "clr %D0" CR_TAB
5343 "sbrc %C0,7" CR_TAB
5344 "dec %D0");
a28e4651 5345 else
02a011e9 5346 return ("clr %D0" CR_TAB
5347 "sbrc %D1,7" CR_TAB
5348 "dec %D0" CR_TAB
5349 "mov %C0,%D1" CR_TAB
5350 "mov %B0,%C1" CR_TAB
5351 "mov %A0,%B1");
a28e4651 5352 }
1cb39658 5353
a28e4651 5354 case 16:
5355 {
5356 int reg0 = true_regnum (operands[0]);
5357 int reg1 = true_regnum (operands[1]);
ab3a6ef8 5358
5359 if (reg0 == reg1 + 2)
02a011e9 5360 return *len = 4, ("clr %D0" CR_TAB
5361 "sbrc %B0,7" CR_TAB
5362 "com %D0" CR_TAB
5363 "mov %C0,%D0");
ab3a6ef8 5364 if (AVR_HAVE_MOVW)
02a011e9 5365 return *len = 5, ("movw %A0,%C1" CR_TAB
5366 "clr %D0" CR_TAB
5367 "sbrc %B0,7" CR_TAB
5368 "com %D0" CR_TAB
5369 "mov %C0,%D0");
ab3a6ef8 5370 else
02a011e9 5371 return *len = 6, ("mov %B0,%D1" CR_TAB
5372 "mov %A0,%C1" CR_TAB
5373 "clr %D0" CR_TAB
5374 "sbrc %B0,7" CR_TAB
5375 "com %D0" CR_TAB
5376 "mov %C0,%D0");
a28e4651 5377 }
1cb39658 5378
a28e4651 5379 case 24:
02a011e9 5380 return *len = 6, ("mov %A0,%D1" CR_TAB
5381 "clr %D0" CR_TAB
5382 "sbrc %A0,7" CR_TAB
5383 "com %D0" CR_TAB
5384 "mov %B0,%D0" CR_TAB
5385 "mov %C0,%D0");
28f5cc4d 5386
5667001b 5387 default:
5388 if (INTVAL (operands[2]) < 32)
5389 break;
5390
5391 /* fall through */
5392
28f5cc4d 5393 case 31:
0aab73c2 5394 if (AVR_HAVE_MOVW)
02a011e9 5395 return *len = 4, ("lsl %D0" CR_TAB
5396 "sbc %A0,%A0" CR_TAB
5397 "mov %B0,%A0" CR_TAB
5398 "movw %C0,%A0");
28f5cc4d 5399 else
02a011e9 5400 return *len = 5, ("lsl %D0" CR_TAB
5401 "sbc %A0,%A0" CR_TAB
5402 "mov %B0,%A0" CR_TAB
5403 "mov %C0,%A0" CR_TAB
5404 "mov %D0,%A0");
a28e4651 5405 }
28f5cc4d 5406 len = t;
a28e4651 5407 }
02a011e9 5408 out_shift_with_cnt ("asr %D0" CR_TAB
5409 "ror %C0" CR_TAB
5410 "ror %B0" CR_TAB
5411 "ror %A0", insn, operands, len, 4);
a28e4651 5412 return "";
5413}
5414
5415/* 8bit logic shift right ((unsigned char)x >> i) */
5416
37ac04dc 5417const char *
206a5129 5418lshrqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5419{
5420 if (GET_CODE (operands[2]) == CONST_INT)
5421 {
5422 int k;
1cb39658 5423
a28e4651 5424 if (!len)
5425 len = &k;
1cb39658 5426
a28e4651 5427 switch (INTVAL (operands[2]))
5428 {
1cb39658 5429 default:
5667001b 5430 if (INTVAL (operands[2]) < 8)
5431 break;
5432
1cb39658 5433 *len = 1;
02a011e9 5434 return "clr %0";
1cb39658 5435
a28e4651 5436 case 1:
1cb39658 5437 *len = 1;
02a011e9 5438 return "lsr %0";
1cb39658 5439
a28e4651 5440 case 2:
1cb39658 5441 *len = 2;
02a011e9 5442 return ("lsr %0" CR_TAB
5443 "lsr %0");
a28e4651 5444 case 3:
1cb39658 5445 *len = 3;
02a011e9 5446 return ("lsr %0" CR_TAB
5447 "lsr %0" CR_TAB
5448 "lsr %0");
1cb39658 5449
a28e4651 5450 case 4:
0af74aa0 5451 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5452 {
5453 *len=2;
02a011e9 5454 return ("swap %0" CR_TAB
5455 "andi %0,0x0f");
a28e4651 5456 }
1cb39658 5457 *len = 4;
02a011e9 5458 return ("lsr %0" CR_TAB
5459 "lsr %0" CR_TAB
5460 "lsr %0" CR_TAB
5461 "lsr %0");
1cb39658 5462
a28e4651 5463 case 5:
0af74aa0 5464 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5465 {
1cb39658 5466 *len = 3;
02a011e9 5467 return ("swap %0" CR_TAB
5468 "lsr %0" CR_TAB
5469 "andi %0,0x7");
a28e4651 5470 }
1cb39658 5471 *len = 5;
02a011e9 5472 return ("lsr %0" CR_TAB
5473 "lsr %0" CR_TAB
5474 "lsr %0" CR_TAB
5475 "lsr %0" CR_TAB
5476 "lsr %0");
1cb39658 5477
a28e4651 5478 case 6:
0af74aa0 5479 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5480 {
1cb39658 5481 *len = 4;
02a011e9 5482 return ("swap %0" CR_TAB
5483 "lsr %0" CR_TAB
5484 "lsr %0" CR_TAB
5485 "andi %0,0x3");
a28e4651 5486 }
1cb39658 5487 *len = 6;
02a011e9 5488 return ("lsr %0" CR_TAB
5489 "lsr %0" CR_TAB
5490 "lsr %0" CR_TAB
5491 "lsr %0" CR_TAB
5492 "lsr %0" CR_TAB
5493 "lsr %0");
1cb39658 5494
a28e4651 5495 case 7:
1cb39658 5496 *len = 3;
02a011e9 5497 return ("rol %0" CR_TAB
5498 "clr %0" CR_TAB
5499 "rol %0");
a28e4651 5500 }
5501 }
1cb39658 5502 else if (CONSTANT_P (operands[2]))
68435912 5503 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 5504
02a011e9 5505 out_shift_with_cnt ("lsr %0",
5506 insn, operands, len, 1);
a28e4651 5507 return "";
5508}
5509
5510/* 16bit logic shift right ((unsigned short)x >> i) */
5511
37ac04dc 5512const char *
206a5129 5513lshrhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5514{
5515 if (GET_CODE (operands[2]) == CONST_INT)
5516 {
b681d971 5517 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5518 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 5519 int k;
1cb39658 5520 int *t = len;
b681d971 5521
a28e4651 5522 if (!len)
5523 len = &k;
1cb39658 5524
a28e4651 5525 switch (INTVAL (operands[2]))
5526 {
5667001b 5527 default:
5528 if (INTVAL (operands[2]) < 16)
5529 break;
5530
5531 *len = 2;
02a011e9 5532 return ("clr %B0" CR_TAB
5533 "clr %A0");
5667001b 5534
b681d971 5535 case 4:
5536 if (optimize_size && scratch)
5537 break; /* 5 */
5538 if (ldi_ok)
5539 {
5540 *len = 6;
02a011e9 5541 return ("swap %B0" CR_TAB
5542 "swap %A0" CR_TAB
5543 "andi %A0,0x0f" CR_TAB
5544 "eor %A0,%B0" CR_TAB
5545 "andi %B0,0x0f" CR_TAB
5546 "eor %A0,%B0");
b681d971 5547 }
5548 if (scratch)
5549 {
5550 *len = 7;
02a011e9 5551 return ("swap %B0" CR_TAB
5552 "swap %A0" CR_TAB
5553 "ldi %3,0x0f" CR_TAB
ef51d1e3 5554 "and %A0,%3" CR_TAB
02a011e9 5555 "eor %A0,%B0" CR_TAB
ef51d1e3 5556 "and %B0,%3" CR_TAB
02a011e9 5557 "eor %A0,%B0");
b681d971 5558 }
5559 break; /* optimize_size ? 6 : 8 */
5560
5561 case 5:
5562 if (optimize_size)
5563 break; /* scratch ? 5 : 6 */
5564 if (ldi_ok)
5565 {
5566 *len = 8;
02a011e9 5567 return ("lsr %B0" CR_TAB
5568 "ror %A0" CR_TAB
5569 "swap %B0" CR_TAB
5570 "swap %A0" CR_TAB
5571 "andi %A0,0x0f" CR_TAB
5572 "eor %A0,%B0" CR_TAB
5573 "andi %B0,0x0f" CR_TAB
5574 "eor %A0,%B0");
b681d971 5575 }
5576 if (scratch)
5577 {
5578 *len = 9;
02a011e9 5579 return ("lsr %B0" CR_TAB
5580 "ror %A0" CR_TAB
5581 "swap %B0" CR_TAB
5582 "swap %A0" CR_TAB
5583 "ldi %3,0x0f" CR_TAB
ef51d1e3 5584 "and %A0,%3" CR_TAB
02a011e9 5585 "eor %A0,%B0" CR_TAB
ef51d1e3 5586 "and %B0,%3" CR_TAB
02a011e9 5587 "eor %A0,%B0");
b681d971 5588 }
5589 break; /* 10 */
5590
5591 case 6:
5592 if (optimize_size)
5593 break; /* scratch ? 5 : 6 */
5594 *len = 9;
02a011e9 5595 return ("clr __tmp_reg__" CR_TAB
5596 "lsl %A0" CR_TAB
5597 "rol %B0" CR_TAB
5598 "rol __tmp_reg__" CR_TAB
5599 "lsl %A0" CR_TAB
5600 "rol %B0" CR_TAB
5601 "rol __tmp_reg__" CR_TAB
5602 "mov %A0,%B0" CR_TAB
5603 "mov %B0,__tmp_reg__");
28f5cc4d 5604
5605 case 7:
5606 *len = 5;
02a011e9 5607 return ("lsl %A0" CR_TAB
5608 "mov %A0,%B0" CR_TAB
5609 "rol %A0" CR_TAB
5610 "sbc %B0,%B0" CR_TAB
5611 "neg %B0");
28f5cc4d 5612
a28e4651 5613 case 8:
02a011e9 5614 return *len = 2, ("mov %A0,%B1" CR_TAB
5615 "clr %B0");
28f5cc4d 5616
5617 case 9:
5618 *len = 3;
02a011e9 5619 return ("mov %A0,%B0" CR_TAB
5620 "clr %B0" CR_TAB
5621 "lsr %A0");
28f5cc4d 5622
5623 case 10:
5624 *len = 4;
02a011e9 5625 return ("mov %A0,%B0" CR_TAB
5626 "clr %B0" CR_TAB
5627 "lsr %A0" CR_TAB
5628 "lsr %A0");
28f5cc4d 5629
5630 case 11:
5631 *len = 5;
02a011e9 5632 return ("mov %A0,%B0" CR_TAB
5633 "clr %B0" CR_TAB
5634 "lsr %A0" CR_TAB
5635 "lsr %A0" CR_TAB
5636 "lsr %A0");
28f5cc4d 5637
5638 case 12:
b681d971 5639 if (ldi_ok)
28f5cc4d 5640 {
5641 *len = 4;
02a011e9 5642 return ("mov %A0,%B0" CR_TAB
5643 "clr %B0" CR_TAB
5644 "swap %A0" CR_TAB
5645 "andi %A0,0x0f");
28f5cc4d 5646 }
b681d971 5647 if (scratch)
5648 {
5649 *len = 5;
02a011e9 5650 return ("mov %A0,%B0" CR_TAB
5651 "clr %B0" CR_TAB
5652 "swap %A0" CR_TAB
5653 "ldi %3,0x0f" CR_TAB
ef51d1e3 5654 "and %A0,%3");
b681d971 5655 }
5656 *len = 6;
02a011e9 5657 return ("mov %A0,%B0" CR_TAB
5658 "clr %B0" CR_TAB
5659 "lsr %A0" CR_TAB
5660 "lsr %A0" CR_TAB
5661 "lsr %A0" CR_TAB
5662 "lsr %A0");
28f5cc4d 5663
5664 case 13:
b681d971 5665 if (ldi_ok)
28f5cc4d 5666 {
5667 *len = 5;
02a011e9 5668 return ("mov %A0,%B0" CR_TAB
5669 "clr %B0" CR_TAB
5670 "swap %A0" CR_TAB
5671 "lsr %A0" CR_TAB
5672 "andi %A0,0x07");
28f5cc4d 5673 }
8cc5a1af 5674 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5675 {
5676 *len = 5;
02a011e9 5677 return ("ldi %3,0x08" CR_TAB
5678 "mul %B0,%3" CR_TAB
5679 "mov %A0,r1" CR_TAB
5680 "clr %B0" CR_TAB
5681 "clr __zero_reg__");
28f5cc4d 5682 }
b681d971 5683 if (optimize_size && scratch)
5684 break; /* 5 */
5685 if (scratch)
5686 {
5687 *len = 6;
02a011e9 5688 return ("mov %A0,%B0" CR_TAB
5689 "clr %B0" CR_TAB
5690 "swap %A0" CR_TAB
5691 "lsr %A0" CR_TAB
5692 "ldi %3,0x07" CR_TAB
ef51d1e3 5693 "and %A0,%3");
b681d971 5694 }
8cc5a1af 5695 if (AVR_HAVE_MUL)
b681d971 5696 {
5697 *len = 6;
5698 return ("set" CR_TAB
02a011e9 5699 "bld r1,3" CR_TAB
5700 "mul %B0,r1" CR_TAB
5701 "mov %A0,r1" CR_TAB
5702 "clr %B0" CR_TAB
5703 "clr __zero_reg__");
b681d971 5704 }
5705 *len = 7;
02a011e9 5706 return ("mov %A0,%B0" CR_TAB
5707 "clr %B0" CR_TAB
5708 "lsr %A0" CR_TAB
5709 "lsr %A0" CR_TAB
5710 "lsr %A0" CR_TAB
5711 "lsr %A0" CR_TAB
5712 "lsr %A0");
28f5cc4d 5713
5714 case 14:
8cc5a1af 5715 if (AVR_HAVE_MUL && ldi_ok)
b681d971 5716 {
5717 *len = 5;
02a011e9 5718 return ("ldi %A0,0x04" CR_TAB
5719 "mul %B0,%A0" CR_TAB
5720 "mov %A0,r1" CR_TAB
5721 "clr %B0" CR_TAB
5722 "clr __zero_reg__");
b681d971 5723 }
8cc5a1af 5724 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5725 {
5726 *len = 5;
02a011e9 5727 return ("ldi %3,0x04" CR_TAB
5728 "mul %B0,%3" CR_TAB
5729 "mov %A0,r1" CR_TAB
5730 "clr %B0" CR_TAB
5731 "clr __zero_reg__");
28f5cc4d 5732 }
b681d971 5733 if (optimize_size && ldi_ok)
5734 {
5735 *len = 5;
02a011e9 5736 return ("mov %A0,%B0" CR_TAB
5737 "ldi %B0,6" "\n1:\t"
5738 "lsr %A0" CR_TAB
5739 "dec %B0" CR_TAB
5740 "brne 1b");
b681d971 5741 }
5742 if (optimize_size && scratch)
5743 break; /* 5 */
5744 *len = 6;
02a011e9 5745 return ("clr %A0" CR_TAB
5746 "lsl %B0" CR_TAB
5747 "rol %A0" CR_TAB
5748 "lsl %B0" CR_TAB
5749 "rol %A0" CR_TAB
5750 "clr %B0");
28f5cc4d 5751
8a2a7305 5752 case 15:
1cb39658 5753 *len = 4;
02a011e9 5754 return ("clr %A0" CR_TAB
5755 "lsl %B0" CR_TAB
5756 "rol %A0" CR_TAB
5757 "clr %B0");
a28e4651 5758 }
28f5cc4d 5759 len = t;
a28e4651 5760 }
02a011e9 5761 out_shift_with_cnt ("lsr %B0" CR_TAB
5762 "ror %A0", insn, operands, len, 2);
a28e4651 5763 return "";
5764}
5765
02d9a2c3 5766
5767/* 24-bit logic shift right */
5768
5769const char*
5770avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5771{
5772 int dest = REGNO (op[0]);
5773 int src = REGNO (op[1]);
5774
5775 if (CONST_INT_P (op[2]))
5776 {
5777 if (plen)
5778 *plen = 0;
5779
5780 switch (INTVAL (op[2]))
5781 {
5782 case 8:
5783 if (dest <= src)
5784 return avr_asm_len ("mov %A0,%B1" CR_TAB
5785 "mov %B0,%C1" CR_TAB
5786 "clr %C0", op, plen, 3);
5787 else
5788 return avr_asm_len ("clr %C0" CR_TAB
5789 "mov %B0,%C1" CR_TAB
5790 "mov %A0,%B1", op, plen, 3);
5791
5792 case 16:
5793 if (dest != src + 2)
5794 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5795
5796 return avr_asm_len ("clr %B0" CR_TAB
5797 "clr %C0", op, plen, 2);
5798
5799 default:
5800 if (INTVAL (op[2]) < 24)
5801 break;
5802
5803 /* fall through */
5804
5805 case 23:
5806 return avr_asm_len ("clr %A0" CR_TAB
5807 "sbrc %C0,7" CR_TAB
5808 "inc %A0" CR_TAB
5809 "clr %B0" CR_TAB
5810 "clr %C0", op, plen, 5);
5811 } /* switch */
5812 }
5813
5814 out_shift_with_cnt ("lsr %C0" CR_TAB
5815 "ror %B0" CR_TAB
5816 "ror %A0", insn, op, plen, 3);
5817 return "";
5818}
5819
5820
a28e4651 5821/* 32bit logic shift right ((unsigned int)x >> i) */
5822
37ac04dc 5823const char *
206a5129 5824lshrsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5825{
5826 if (GET_CODE (operands[2]) == CONST_INT)
5827 {
5828 int k;
1cb39658 5829 int *t = len;
5830
a28e4651 5831 if (!len)
5832 len = &k;
1cb39658 5833
a28e4651 5834 switch (INTVAL (operands[2]))
5835 {
5667001b 5836 default:
5837 if (INTVAL (operands[2]) < 32)
5838 break;
5839
0aab73c2 5840 if (AVR_HAVE_MOVW)
02a011e9 5841 return *len = 3, ("clr %D0" CR_TAB
5842 "clr %C0" CR_TAB
5843 "movw %A0,%C0");
5667001b 5844 *len = 4;
02a011e9 5845 return ("clr %D0" CR_TAB
5846 "clr %C0" CR_TAB
5847 "clr %B0" CR_TAB
5848 "clr %A0");
5667001b 5849
a28e4651 5850 case 8:
5851 {
5852 int reg0 = true_regnum (operands[0]);
5853 int reg1 = true_regnum (operands[1]);
1cb39658 5854 *len = 4;
a28e4651 5855 if (reg0 <= reg1)
02a011e9 5856 return ("mov %A0,%B1" CR_TAB
5857 "mov %B0,%C1" CR_TAB
5858 "mov %C0,%D1" CR_TAB
5859 "clr %D0");
a28e4651 5860 else
02a011e9 5861 return ("clr %D0" CR_TAB
5862 "mov %C0,%D1" CR_TAB
5863 "mov %B0,%C1" CR_TAB
5864 "mov %A0,%B1");
a28e4651 5865 }
1cb39658 5866
a28e4651 5867 case 16:
5868 {
5869 int reg0 = true_regnum (operands[0]);
5870 int reg1 = true_regnum (operands[1]);
ab3a6ef8 5871
5872 if (reg0 == reg1 + 2)
02a011e9 5873 return *len = 2, ("clr %C0" CR_TAB
5874 "clr %D0");
ab3a6ef8 5875 if (AVR_HAVE_MOVW)
02a011e9 5876 return *len = 3, ("movw %A0,%C1" CR_TAB
5877 "clr %C0" CR_TAB
5878 "clr %D0");
a28e4651 5879 else
02a011e9 5880 return *len = 4, ("mov %B0,%D1" CR_TAB
5881 "mov %A0,%C1" CR_TAB
5882 "clr %C0" CR_TAB
5883 "clr %D0");
a28e4651 5884 }
1cb39658 5885
a28e4651 5886 case 24:
02a011e9 5887 return *len = 4, ("mov %A0,%D1" CR_TAB
5888 "clr %B0" CR_TAB
5889 "clr %C0" CR_TAB
5890 "clr %D0");
28f5cc4d 5891
5892 case 31:
5893 *len = 6;
02a011e9 5894 return ("clr %A0" CR_TAB
5895 "sbrc %D0,7" CR_TAB
5896 "inc %A0" CR_TAB
5897 "clr %B0" CR_TAB
5898 "clr %C0" CR_TAB
5899 "clr %D0");
a28e4651 5900 }
28f5cc4d 5901 len = t;
a28e4651 5902 }
02a011e9 5903 out_shift_with_cnt ("lsr %D0" CR_TAB
5904 "ror %C0" CR_TAB
5905 "ror %B0" CR_TAB
5906 "ror %A0", insn, operands, len, 4);
a28e4651 5907 return "";
5908}
5909
6be828c1 5910
37bcc7b9 5911/* Output addition of register XOP[0] and compile time constant XOP[2]:
5912
5913 XOP[0] = XOP[0] + XOP[2]
5914
5915 and return "". If PLEN == NULL, print assembler instructions to perform the
5916 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5917 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5918 CODE == PLUS: perform addition by using ADD instructions.
bcad9375 5919 CODE == MINUS: perform addition by using SUB instructions.
5920 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
37bcc7b9 5921
5922static void
bcad9375 5923avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
37bcc7b9 5924{
5925 /* MODE of the operation. */
5926 enum machine_mode mode = GET_MODE (xop[0]);
5927
5928 /* Number of bytes to operate on. */
5929 int i, n_bytes = GET_MODE_SIZE (mode);
5930
5931 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5932 int clobber_val = -1;
5933
5934 /* op[0]: 8-bit destination register
5935 op[1]: 8-bit const int
5936 op[2]: 8-bit scratch register */
5937 rtx op[3];
5938
5939 /* Started the operation? Before starting the operation we may skip
5940 adding 0. This is no more true after the operation started because
5941 carry must be taken into account. */
5942 bool started = false;
5943
5944 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5945 rtx xval = xop[2];
5946
eac146f2 5947 /* Except in the case of ADIW with 16-bit register (see below)
5948 addition does not set cc0 in a usable way. */
bcad9375 5949
5950 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5951
37bcc7b9 5952 if (MINUS == code)
83921eda 5953 xval = simplify_unary_operation (NEG, mode, xval, mode);
37bcc7b9 5954
5955 op[2] = xop[3];
5956
5957 if (plen)
5958 *plen = 0;
5959
5960 for (i = 0; i < n_bytes; i++)
5961 {
5962 /* We operate byte-wise on the destination. */
5963 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5964 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5965
5966 /* 8-bit value to operate with this byte. */
5967 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5968
5969 /* Registers R16..R31 can operate with immediate. */
5970 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5971
5972 op[0] = reg8;
644ac9c5 5973 op[1] = gen_int_mode (val8, QImode);
bcad9375 5974
5975 /* To get usable cc0 no low-bytes must have been skipped. */
5976
5977 if (i && !started)
5978 *pcc = CC_CLOBBER;
37bcc7b9 5979
02d9a2c3 5980 if (!started
5981 && i % 2 == 0
5982 && i + 2 <= n_bytes
37bcc7b9 5983 && test_hard_reg_class (ADDW_REGS, reg8))
5984 {
5985 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5986 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5987
5988 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5989 i.e. operate word-wise. */
5990
5991 if (val16 < 64)
5992 {
5993 if (val16 != 0)
5994 {
5995 started = true;
5996 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5997 op, plen, 1);
eac146f2 5998
5999 if (n_bytes == 2 && PLUS == code)
6000 *pcc = CC_SET_ZN;
37bcc7b9 6001 }
6002
6003 i++;
6004 continue;
6005 }
6006 }
6007
6008 if (val8 == 0)
6009 {
6010 if (started)
6011 avr_asm_len (code == PLUS
6012 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6013 op, plen, 1);
6014 continue;
6015 }
eac146f2 6016 else if ((val8 == 1 || val8 == 0xff)
6017 && !started
6018 && i == n_bytes - 1)
02d9a2c3 6019 {
eac146f2 6020 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6021 op, plen, 1);
6022 break;
02d9a2c3 6023 }
37bcc7b9 6024
6025 switch (code)
6026 {
6027 case PLUS:
6028
6029 gcc_assert (plen != NULL || REG_P (op[2]));
6030
6031 if (clobber_val != (int) val8)
6032 avr_asm_len ("ldi %2,%1", op, plen, 1);
6033 clobber_val = (int) val8;
6034
6035 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6036
6037 break; /* PLUS */
6038
6039 case MINUS:
6040
6041 if (ld_reg_p)
6042 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6043 else
6044 {
6045 gcc_assert (plen != NULL || REG_P (op[2]));
6046
6047 if (clobber_val != (int) val8)
6048 avr_asm_len ("ldi %2,%1", op, plen, 1);
6049 clobber_val = (int) val8;
6050
6051 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6052 }
6053
6054 break; /* MINUS */
6055
6056 default:
6057 /* Unknown code */
6058 gcc_unreachable();
6059 }
6060
6061 started = true;
6062
6063 } /* for all sub-bytes */
bcad9375 6064
6065 /* No output doesn't change cc0. */
6066
6067 if (plen && *plen == 0)
6068 *pcc = CC_NONE;
37bcc7b9 6069}
6070
6071
6072/* Output addition of register XOP[0] and compile time constant XOP[2]:
6073
6074 XOP[0] = XOP[0] + XOP[2]
6075
6076 and return "". If PLEN == NULL, print assembler instructions to perform the
6077 addition; otherwise, set *PLEN to the length of the instruction sequence (in
bcad9375 6078 words) printed with PLEN == NULL.
6079 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6080 condition code (with respect to XOP[0]). */
37bcc7b9 6081
6082const char*
bcad9375 6083avr_out_plus (rtx *xop, int *plen, int *pcc)
37bcc7b9 6084{
6085 int len_plus, len_minus;
bcad9375 6086 int cc_plus, cc_minus, cc_dummy;
37bcc7b9 6087
bcad9375 6088 if (!pcc)
6089 pcc = &cc_dummy;
6090
37bcc7b9 6091 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6092
bcad9375 6093 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6094 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
37bcc7b9 6095
bcad9375 6096 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6097
37bcc7b9 6098 if (plen)
bcad9375 6099 {
6100 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6101 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6102 }
37bcc7b9 6103 else if (len_minus <= len_plus)
bcad9375 6104 avr_out_plus_1 (xop, NULL, MINUS, pcc);
37bcc7b9 6105 else
bcad9375 6106 avr_out_plus_1 (xop, NULL, PLUS, pcc);
37bcc7b9 6107
6108 return "";
6109}
6110
6111
eac146f2 6112/* Same as above but XOP has just 3 entries.
6113 Supply a dummy 4th operand. */
6114
6115const char*
6116avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6117{
6118 rtx op[4];
6119
6120 op[0] = xop[0];
6121 op[1] = xop[1];
6122 op[2] = xop[2];
6123 op[3] = NULL_RTX;
6124
6125 return avr_out_plus (op, plen, pcc);
6126}
6127
83921eda 6128
6129/* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6130
6131const char*
6132avr_out_plus64 (rtx addend, int *plen)
6133{
6134 int cc_dummy;
6135 rtx op[4];
6136
6137 op[0] = gen_rtx_REG (DImode, 18);
6138 op[1] = op[0];
6139 op[2] = addend;
6140 op[3] = NULL_RTX;
6141
6142 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6143
6144 return "";
6145}
6146
6be828c1 6147/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6148 time constant XOP[2]:
6149
6150 XOP[0] = XOP[0] <op> XOP[2]
6151
6152 and return "". If PLEN == NULL, print assembler instructions to perform the
6153 operation; otherwise, set *PLEN to the length of the instruction sequence
6154 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6155 register or SCRATCH if no clobber register is needed for the operation. */
6156
6157const char*
6158avr_out_bitop (rtx insn, rtx *xop, int *plen)
6159{
6160 /* CODE and MODE of the operation. */
6161 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6162 enum machine_mode mode = GET_MODE (xop[0]);
6163
6164 /* Number of bytes to operate on. */
6165 int i, n_bytes = GET_MODE_SIZE (mode);
6166
6167 /* Value of T-flag (0 or 1) or -1 if unknow. */
6168 int set_t = -1;
6169
6170 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6171 int clobber_val = -1;
6172
6173 /* op[0]: 8-bit destination register
6174 op[1]: 8-bit const int
6175 op[2]: 8-bit clobber register or SCRATCH
6176 op[3]: 8-bit register containing 0xff or NULL_RTX */
6177 rtx op[4];
6178
6179 op[2] = xop[3];
6180 op[3] = NULL_RTX;
6181
6182 if (plen)
6183 *plen = 0;
6184
6185 for (i = 0; i < n_bytes; i++)
6186 {
6187 /* We operate byte-wise on the destination. */
6188 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6189 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6190
6191 /* 8-bit value to operate with this byte. */
6192 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6193
6194 /* Number of bits set in the current byte of the constant. */
6195 int pop8 = avr_popcount (val8);
6196
6197 /* Registers R16..R31 can operate with immediate. */
6198 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6199
6200 op[0] = reg8;
6201 op[1] = GEN_INT (val8);
6202
6203 switch (code)
6204 {
6205 case IOR:
6206
6207 if (0 == pop8)
6208 continue;
6209 else if (ld_reg_p)
6210 avr_asm_len ("ori %0,%1", op, plen, 1);
6211 else if (1 == pop8)
6212 {
6213 if (set_t != 1)
6214 avr_asm_len ("set", op, plen, 1);
6215 set_t = 1;
6216
6217 op[1] = GEN_INT (exact_log2 (val8));
6218 avr_asm_len ("bld %0,%1", op, plen, 1);
6219 }
6220 else if (8 == pop8)
6221 {
6222 if (op[3] != NULL_RTX)
6223 avr_asm_len ("mov %0,%3", op, plen, 1);
6224 else
6225 avr_asm_len ("clr %0" CR_TAB
6226 "dec %0", op, plen, 2);
6227
6228 op[3] = op[0];
6229 }
6230 else
6231 {
6232 if (clobber_val != (int) val8)
6233 avr_asm_len ("ldi %2,%1", op, plen, 1);
6234 clobber_val = (int) val8;
6235
6236 avr_asm_len ("or %0,%2", op, plen, 1);
6237 }
6238
6239 continue; /* IOR */
6240
6241 case AND:
6242
6243 if (8 == pop8)
6244 continue;
6245 else if (0 == pop8)
6246 avr_asm_len ("clr %0", op, plen, 1);
6247 else if (ld_reg_p)
6248 avr_asm_len ("andi %0,%1", op, plen, 1);
6249 else if (7 == pop8)
6250 {
6251 if (set_t != 0)
6252 avr_asm_len ("clt", op, plen, 1);
6253 set_t = 0;
6254
6255 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6256 avr_asm_len ("bld %0,%1", op, plen, 1);
6257 }
6258 else
6259 {
6260 if (clobber_val != (int) val8)
6261 avr_asm_len ("ldi %2,%1", op, plen, 1);
6262 clobber_val = (int) val8;
6263
6264 avr_asm_len ("and %0,%2", op, plen, 1);
6265 }
6266
6267 continue; /* AND */
6268
6269 case XOR:
6270
6271 if (0 == pop8)
6272 continue;
6273 else if (8 == pop8)
6274 avr_asm_len ("com %0", op, plen, 1);
6275 else if (ld_reg_p && val8 == (1 << 7))
6276 avr_asm_len ("subi %0,%1", op, plen, 1);
6277 else
6278 {
6279 if (clobber_val != (int) val8)
6280 avr_asm_len ("ldi %2,%1", op, plen, 1);
6281 clobber_val = (int) val8;
6282
6283 avr_asm_len ("eor %0,%2", op, plen, 1);
6284 }
6285
6286 continue; /* XOR */
6287
6288 default:
6289 /* Unknown rtx_code */
6290 gcc_unreachable();
6291 }
6292 } /* for all sub-bytes */
6293
6294 return "";
6295}
6296
915f904b 6297
6298/* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6299 PLEN != NULL: Set *PLEN to the length of that sequence.
6300 Return "". */
6301
6302const char*
6303avr_out_addto_sp (rtx *op, int *plen)
6304{
6305 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6306 int addend = INTVAL (op[0]);
6307
6308 if (plen)
6309 *plen = 0;
6310
6311 if (addend < 0)
6312 {
6313 if (flag_verbose_asm || flag_print_asm_name)
6314 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6315
6316 while (addend <= -pc_len)
6317 {
6318 addend += pc_len;
6319 avr_asm_len ("rcall .", op, plen, 1);
6320 }
6321
6322 while (addend++ < 0)
6323 avr_asm_len ("push __zero_reg__", op, plen, 1);
6324 }
6325 else if (addend > 0)
6326 {
6327 if (flag_verbose_asm || flag_print_asm_name)
6328 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6329
6330 while (addend-- > 0)
6331 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6332 }
6333
6334 return "";
6335}
6336
6337
9643da7d 6338/* Create RTL split patterns for byte sized rotate expressions. This
6339 produces a series of move instructions and considers overlap situations.
6340 Overlapping non-HImode operands need a scratch register. */
6341
6342bool
6343avr_rotate_bytes (rtx operands[])
6344{
6345 int i, j;
6346 enum machine_mode mode = GET_MODE (operands[0]);
6347 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6348 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6349 int num = INTVAL (operands[2]);
6350 rtx scratch = operands[3];
6351 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6352 Word move if no scratch is needed, otherwise use size of scratch. */
6353 enum machine_mode move_mode = QImode;
36f949a2 6354 int move_size, offset, size;
6355
9643da7d 6356 if (num & 0xf)
6357 move_mode = QImode;
6358 else if ((mode == SImode && !same_reg) || !overlapped)
6359 move_mode = HImode;
6360 else
6361 move_mode = GET_MODE (scratch);
6362
6363 /* Force DI rotate to use QI moves since other DI moves are currently split
6364 into QI moves so forward propagation works better. */
6365 if (mode == DImode)
6366 move_mode = QImode;
6367 /* Make scratch smaller if needed. */
ac191360 6368 if (SCRATCH != GET_CODE (scratch)
6369 && HImode == GET_MODE (scratch)
6370 && QImode == move_mode)
9643da7d 6371 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6372
36f949a2 6373 move_size = GET_MODE_SIZE (move_mode);
9643da7d 6374 /* Number of bytes/words to rotate. */
36f949a2 6375 offset = (num >> 3) / move_size;
9643da7d 6376 /* Number of moves needed. */
36f949a2 6377 size = GET_MODE_SIZE (mode) / move_size;
9643da7d 6378 /* Himode byte swap is special case to avoid a scratch register. */
6379 if (mode == HImode && same_reg)
6380 {
6381 /* HImode byte swap, using xor. This is as quick as using scratch. */
6382 rtx src, dst;
6383 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6384 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6385 if (!rtx_equal_p (dst, src))
6386 {
6387 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6388 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6389 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6390 }
6391 }
6392 else
6393 {
36f949a2 6394#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9643da7d 6395 /* Create linked list of moves to determine move order. */
6396 struct {
6397 rtx src, dst;
6398 int links;
36f949a2 6399 } move[MAX_SIZE + 8];
6400 int blocked, moves;
9643da7d 6401
36f949a2 6402 gcc_assert (size <= MAX_SIZE);
9643da7d 6403 /* Generate list of subreg moves. */
6404 for (i = 0; i < size; i++)
6405 {
6406 int from = i;
6407 int to = (from + offset) % size;
6408 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6409 mode, from * move_size);
6410 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6411 mode, to * move_size);
6412 move[i].links = -1;
6413 }
6414 /* Mark dependence where a dst of one move is the src of another move.
6415 The first move is a conflict as it must wait until second is
6416 performed. We ignore moves to self - we catch this later. */
6417 if (overlapped)
6418 for (i = 0; i < size; i++)
6419 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6420 for (j = 0; j < size; j++)
6421 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6422 {
6423 /* The dst of move i is the src of move j. */
6424 move[i].links = j;
6425 break;
6426 }
6427
36f949a2 6428 blocked = -1;
6429 moves = 0;
9643da7d 6430 /* Go through move list and perform non-conflicting moves. As each
6431 non-overlapping move is made, it may remove other conflicts
6432 so the process is repeated until no conflicts remain. */
6433 do
6434 {
6435 blocked = -1;
6436 moves = 0;
6437 /* Emit move where dst is not also a src or we have used that
6438 src already. */
6439 for (i = 0; i < size; i++)
6440 if (move[i].src != NULL_RTX)
36f949a2 6441 {
6442 if (move[i].links == -1
6443 || move[move[i].links].src == NULL_RTX)
6444 {
6445 moves++;
6446 /* Ignore NOP moves to self. */
6447 if (!rtx_equal_p (move[i].dst, move[i].src))
6448 emit_move_insn (move[i].dst, move[i].src);
9643da7d 6449
36f949a2 6450 /* Remove conflict from list. */
6451 move[i].src = NULL_RTX;
6452 }
6453 else
6454 blocked = i;
6455 }
9643da7d 6456
6457 /* Check for deadlock. This is when no moves occurred and we have
6458 at least one blocked move. */
6459 if (moves == 0 && blocked != -1)
6460 {
6461 /* Need to use scratch register to break deadlock.
6462 Add move to put dst of blocked move into scratch.
6463 When this move occurs, it will break chain deadlock.
6464 The scratch register is substituted for real move. */
6465
ac191360 6466 gcc_assert (SCRATCH != GET_CODE (scratch));
6467
9643da7d 6468 move[size].src = move[blocked].dst;
6469 move[size].dst = scratch;
6470 /* Scratch move is never blocked. */
6471 move[size].links = -1;
6472 /* Make sure we have valid link. */
6473 gcc_assert (move[blocked].links != -1);
6474 /* Replace src of blocking move with scratch reg. */
6475 move[move[blocked].links].src = scratch;
6476 /* Make dependent on scratch move occuring. */
6477 move[blocked].links = size;
6478 size=size+1;
6479 }
6480 }
6481 while (blocked != -1);
6482 }
6483 return true;
6484}
6485
a28e4651 6486/* Modifies the length assigned to instruction INSN
b0e2b973 6487 LEN is the initially computed length of the insn. */
a28e4651 6488
6489int
206a5129 6490adjust_insn_length (rtx insn, int len)
a28e4651 6491{
5bca95a8 6492 rtx *op = recog_data.operand;
28913f6b 6493 enum attr_adjust_len adjust_len;
6494
6495 /* Some complex insns don't need length adjustment and therefore
6496 the length need not/must not be adjusted for these insns.
6497 It is easier to state this in an insn attribute "adjust_len" than
6498 to clutter up code here... */
6499
6500 if (-1 == recog_memoized (insn))
6501 {
6502 return len;
6503 }
6504
6505 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6506
6507 adjust_len = get_attr_adjust_len (insn);
6508
5bca95a8 6509 if (adjust_len == ADJUST_LEN_NO)
28913f6b 6510 {
5bca95a8 6511 /* Nothing to adjust: The length from attribute "length" is fine.
6512 This is the default. */
28913f6b 6513
5bca95a8 6514 return len;
6515 }
6516
6517 /* Extract insn's operands. */
6518
6519 extract_constrain_insn_cached (insn);
6520
6521 /* Dispatch to right function. */
6522
6523 switch (adjust_len)
6524 {
6525 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
02d9a2c3 6526 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
5bca95a8 6527 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
28913f6b 6528
5bca95a8 6529 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
28913f6b 6530
bcad9375 6531 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
83921eda 6532 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
eac146f2 6533 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6534 avr_out_plus_noclobber (op, &len, NULL); break;
915f904b 6535
6536 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
28913f6b 6537
5bca95a8 6538 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6539 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
02d9a2c3 6540 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
5bca95a8 6541 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5bd39e93 6542 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6543 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6544
5bca95a8 6545 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
02d9a2c3 6546 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
5bca95a8 6547 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6548 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
83921eda 6549 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9c501a04 6550
5bca95a8 6551 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6552 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6553 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9c501a04 6554
5bca95a8 6555 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6556 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6557 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9c501a04 6558
5bca95a8 6559 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6560 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6561 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6562
02d9a2c3 6563 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6564 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6565 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6566
48264eb8 6567 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6568
15b84087 6569 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
384f6361 6570
5bca95a8 6571 default:
6572 gcc_unreachable();
a28e4651 6573 }
15b84087 6574
a28e4651 6575 return len;
6576}
6577
674a8f0b 6578/* Return nonzero if register REG dead after INSN. */
a28e4651 6579
6580int
206a5129 6581reg_unused_after (rtx insn, rtx reg)
a28e4651 6582{
e511e253 6583 return (dead_or_set_p (insn, reg)
a28e4651 6584 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6585}
6586
e3e08e7f 6587/* Return nonzero if REG is not used after INSN.
a28e4651 6588 We assume REG is a reload reg, and therefore does
6589 not live past labels. It may live past calls or jumps though. */
6590
6591int
206a5129 6592_reg_unused_after (rtx insn, rtx reg)
a28e4651 6593{
6594 enum rtx_code code;
6595 rtx set;
6596
6597 /* If the reg is set by this instruction, then it is safe for our
6598 case. Disregard the case where this is a store to memory, since
6599 we are checking a register used in the store address. */
6600 set = single_set (insn);
6601 if (set && GET_CODE (SET_DEST (set)) != MEM
6602 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6603 return 1;
6604
6605 while ((insn = NEXT_INSN (insn)))
6606 {
6720e96c 6607 rtx set;
a28e4651 6608 code = GET_CODE (insn);
6609
6610#if 0
6611 /* If this is a label that existed before reload, then the register
6612 if dead here. However, if this is a label added by reorg, then
6613 the register may still be live here. We can't tell the difference,
6614 so we just ignore labels completely. */
6615 if (code == CODE_LABEL)
6616 return 1;
6617 /* else */
6618#endif
6619
6720e96c 6620 if (!INSN_P (insn))
6621 continue;
6622
a28e4651 6623 if (code == JUMP_INSN)
6624 return 0;
6625
6626 /* If this is a sequence, we must handle them all at once.
6627 We could have for instance a call that sets the target register,
ebb11c7b 6628 and an insn in a delay slot that uses the register. In this case,
a28e4651 6629 we must return 0. */
6630 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6631 {
6632 int i;
6633 int retval = 0;
6634
6635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6636 {
6637 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6638 rtx set = single_set (this_insn);
6639
6640 if (GET_CODE (this_insn) == CALL_INSN)
6641 code = CALL_INSN;
6642 else if (GET_CODE (this_insn) == JUMP_INSN)
6643 {
6644 if (INSN_ANNULLED_BRANCH_P (this_insn))
6645 return 0;
6646 code = JUMP_INSN;
6647 }
6648
6649 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6650 return 0;
6651 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6652 {
6653 if (GET_CODE (SET_DEST (set)) != MEM)
6654 retval = 1;
6655 else
6656 return 0;
6657 }
6658 if (set == 0
6659 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6660 return 0;
6661 }
6662 if (retval == 1)
6663 return 1;
6664 else if (code == JUMP_INSN)
6665 return 0;
6666 }
6667
6668 if (code == CALL_INSN)
6669 {
6670 rtx tem;
6671 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6672 if (GET_CODE (XEXP (tem, 0)) == USE
6673 && REG_P (XEXP (XEXP (tem, 0), 0))
6674 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6675 return 0;
6676 if (call_used_regs[REGNO (reg)])
6677 return 1;
6678 }
6679
6720e96c 6680 set = single_set (insn);
a28e4651 6681
6720e96c 6682 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6683 return 0;
6684 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6685 return GET_CODE (SET_DEST (set)) != MEM;
6686 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6687 return 0;
a28e4651 6688 }
6689 return 1;
6690}
6691
5bd39e93 6692
6693/* Return RTX that represents the lower 16 bits of a constant address.
6694 Unfortunately, simplify_gen_subreg does not handle this case. */
6695
6696static rtx
6697avr_const_address_lo16 (rtx x)
6698{
6699 rtx lo16;
6700
6701 switch (GET_CODE (x))
6702 {
6703 default:
6704 break;
6705
6706 case CONST:
6707 if (PLUS == GET_CODE (XEXP (x, 0))
6708 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6709 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6710 {
6711 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6712 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6713
6714 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6715 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6716
6717 return lo16;
6718 }
6719
6720 break;
6721
6722 case SYMBOL_REF:
6723 {
6724 const char *name = XSTR (x, 0);
6725
6726 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6727 }
6728 }
6729
6730 avr_edump ("\n%?: %r\n", x);
6731 gcc_unreachable();
6732}
6733
6734
58356836 6735/* Target hook for assembling integer objects. The AVR version needs
6736 special handling for references to certain labels. */
a28e4651 6737
58356836 6738static bool
206a5129 6739avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
a28e4651 6740{
58356836 6741 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
310f64db 6742 && text_segment_operand (x, VOIDmode) )
a28e4651 6743 {
90ef7269 6744 fputs ("\t.word\tgs(", asm_out_file);
58356836 6745 output_addr_const (asm_out_file, x);
6746 fputs (")\n", asm_out_file);
5bd39e93 6747
6748 return true;
6749 }
6750 else if (GET_MODE (x) == PSImode)
6751 {
6752 default_assemble_integer (avr_const_address_lo16 (x),
6753 GET_MODE_SIZE (HImode), aligned_p);
6754
ed2541ea 6755 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6756 " extension for hh8(", asm_out_file);
5bd39e93 6757 output_addr_const (asm_out_file, x);
6758 fputs (")\"\n", asm_out_file);
6759
6760 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6761 output_addr_const (asm_out_file, x);
6762 fputs (")\n", asm_out_file);
6763
58356836 6764 return true;
a28e4651 6765 }
5bd39e93 6766
58356836 6767 return default_assemble_integer (x, size, aligned_p);
a28e4651 6768}
6769
5bd39e93 6770
7846db34 6771/* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6772
6773void
6774avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6775{
6776
6777 /* If the function has the 'signal' or 'interrupt' attribute, test to
6778 make sure that the name of the function is "__vector_NN" so as to
6779 catch when the user misspells the interrupt vector name. */
6780
6781 if (cfun->machine->is_interrupt)
6782 {
53026b2c 6783 if (!STR_PREFIX_P (name, "__vector"))
7846db34 6784 {
6785 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6786 "%qs appears to be a misspelled interrupt handler",
6787 name);
6788 }
6789 }
6790 else if (cfun->machine->is_signal)
6791 {
53026b2c 6792 if (!STR_PREFIX_P (name, "__vector"))
7846db34 6793 {
6794 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6795 "%qs appears to be a misspelled signal handler",
6796 name);
6797 }
6798 }
6799
6800 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6801 ASM_OUTPUT_LABEL (file, name);
6802}
6803
a28e4651 6804
6805/* Return value is nonzero if pseudos that have been
6806 assigned to registers of class CLASS would likely be spilled
6807 because registers of CLASS are needed for spill registers. */
6808
cb3959cc 6809static bool
6810avr_class_likely_spilled_p (reg_class_t c)
a28e4651 6811{
6812 return (c != ALL_REGS && c != ADDW_REGS);
6813}
6814
e3c541f0 6815/* Valid attributes:
a28e4651 6816 progmem - put data to program memory;
6817 signal - make a function to be hardware interrupt. After function
0af74aa0 6818 prologue interrupts are disabled;
a28e4651 6819 interrupt - make a function to be hardware interrupt. After function
0af74aa0 6820 prologue interrupts are enabled;
e3c541f0 6821 naked - don't generate function prologue/epilogue and `ret' command.
a28e4651 6822
e3c541f0 6823 Only `progmem' attribute valid for type. */
6824
e3c541f0 6825/* Handle a "progmem" attribute; arguments as in
6826 struct attribute_spec.handler. */
6827static tree
206a5129 6828avr_handle_progmem_attribute (tree *node, tree name,
6829 tree args ATTRIBUTE_UNUSED,
6830 int flags ATTRIBUTE_UNUSED,
6831 bool *no_add_attrs)
e3c541f0 6832{
6833 if (DECL_P (*node))
a28e4651 6834 {
68e7ca0a 6835 if (TREE_CODE (*node) == TYPE_DECL)
6836 {
6837 /* This is really a decl attribute, not a type attribute,
6838 but try to handle it for GCC 3.0 backwards compatibility. */
6839
6840 tree type = TREE_TYPE (*node);
6841 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6842 tree newtype = build_type_attribute_variant (type, attr);
6843
6844 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6845 TREE_TYPE (*node) = newtype;
6846 *no_add_attrs = true;
6847 }
6848 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
a28e4651 6849 {
b44e24e6 6850 *no_add_attrs = false;
e3c541f0 6851 }
6852 else
6853 {
67a779df 6854 warning (OPT_Wattributes, "%qE attribute ignored",
6855 name);
e3c541f0 6856 *no_add_attrs = true;
a28e4651 6857 }
a28e4651 6858 }
e3c541f0 6859
6860 return NULL_TREE;
a28e4651 6861}
6862
e3c541f0 6863/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6864 struct attribute_spec.handler. */
206a5129 6865
e3c541f0 6866static tree
206a5129 6867avr_handle_fndecl_attribute (tree *node, tree name,
6868 tree args ATTRIBUTE_UNUSED,
6869 int flags ATTRIBUTE_UNUSED,
6870 bool *no_add_attrs)
e3c541f0 6871{
6872 if (TREE_CODE (*node) != FUNCTION_DECL)
6873 {
67a779df 6874 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6875 name);
e3c541f0 6876 *no_add_attrs = true;
6877 }
6878
6879 return NULL_TREE;
6880}
a28e4651 6881
f86b386b 6882static tree
6883avr_handle_fntype_attribute (tree *node, tree name,
6884 tree args ATTRIBUTE_UNUSED,
6885 int flags ATTRIBUTE_UNUSED,
6886 bool *no_add_attrs)
6887{
6888 if (TREE_CODE (*node) != FUNCTION_TYPE)
6889 {
67a779df 6890 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6891 name);
f86b386b 6892 *no_add_attrs = true;
6893 }
6894
6895 return NULL_TREE;
6896}
6897
a45076aa 6898
6899/* AVR attributes. */
6900static const struct attribute_spec
6901avr_attribute_table[] =
6902{
6903 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6904 affects_type_identity } */
6905 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6906 false },
6907 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6908 false },
6909 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6910 false },
6911 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6912 false },
6913 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6914 false },
6915 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6916 false },
6917 { NULL, 0, 0, false, false, false, NULL, false }
6918};
6919
4202ef11 6920
6921/* Look if DECL shall be placed in program memory space by
6922 means of attribute `progmem' or some address-space qualifier.
6923 Return non-zero if DECL is data that must end up in Flash and
6924 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6925
5bd39e93 6926 Return 2 if DECL is located in 24-bit flash address-space
4202ef11 6927 Return 1 if DECL is located in 16-bit flash address-space
6928 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6929 Return 0 otherwise */
a28e4651 6930
6931int
148b2ce0 6932avr_progmem_p (tree decl, tree attributes)
a28e4651 6933{
6934 tree a;
6935
6936 if (TREE_CODE (decl) != VAR_DECL)
6937 return 0;
6938
590da9f2 6939 if (avr_decl_memx_p (decl))
5bd39e93 6940 return 2;
6941
590da9f2 6942 if (avr_decl_flash_p (decl))
4202ef11 6943 return 1;
6944
a28e4651 6945 if (NULL_TREE
148b2ce0 6946 != lookup_attribute ("progmem", attributes))
4202ef11 6947 return -1;
a28e4651 6948
4202ef11 6949 a = decl;
6950
a28e4651 6951 do
6952 a = TREE_TYPE(a);
6953 while (TREE_CODE (a) == ARRAY_TYPE);
6954
faf8f400 6955 if (a == error_mark_node)
6956 return 0;
6957
a28e4651 6958 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4202ef11 6959 return -1;
a28e4651 6960
6961 return 0;
6962}
6963
4202ef11 6964
6965/* Scan type TYP for pointer references to address space ASn.
6966 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6967 the AS are also declared to be CONST.
6968 Otherwise, return the respective addres space, i.e. a value != 0. */
6969
6970static addr_space_t
6971avr_nonconst_pointer_addrspace (tree typ)
6972{
6973 while (ARRAY_TYPE == TREE_CODE (typ))
6974 typ = TREE_TYPE (typ);
6975
6976 if (POINTER_TYPE_P (typ))
6977 {
e508bf98 6978 addr_space_t as;
4202ef11 6979 tree target = TREE_TYPE (typ);
6980
6981 /* Pointer to function: Test the function's return type. */
6982
6983 if (FUNCTION_TYPE == TREE_CODE (target))
6984 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6985
6986 /* "Ordinary" pointers... */
6987
6988 while (TREE_CODE (target) == ARRAY_TYPE)
6989 target = TREE_TYPE (target);
6990
e508bf98 6991 /* Pointers to non-generic address space must be const.
6992 Refuse address spaces outside the device's flash. */
4202ef11 6993
e508bf98 6994 as = TYPE_ADDR_SPACE (target);
6995
6996 if (!ADDR_SPACE_GENERIC_P (as)
6997 && (!TYPE_READONLY (target)
6998 || avr_addrspace[as].segment >= avr_current_device->n_flash))
6999 {
7000 return as;
4202ef11 7001 }
7002
7003 /* Scan pointer's target type. */
7004
7005 return avr_nonconst_pointer_addrspace (target);
7006 }
7007
7008 return ADDR_SPACE_GENERIC;
7009}
7010
7011
590da9f2 7012/* Sanity check NODE so that all pointers targeting non-generic addres spaces
7013 go along with CONST qualifier. Writing to these address spaces should
4202ef11 7014 be detected and complained about as early as possible. */
7015
7016static bool
7017avr_pgm_check_var_decl (tree node)
7018{
7019 const char *reason = NULL;
7020
7021 addr_space_t as = ADDR_SPACE_GENERIC;
7022
7023 gcc_assert (as == 0);
7024
7025 if (avr_log.progmem)
7026 avr_edump ("%?: %t\n", node);
7027
7028 switch (TREE_CODE (node))
7029 {
7030 default:
7031 break;
7032
7033 case VAR_DECL:
7034 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7035 reason = "variable";
7036 break;
7037
7038 case PARM_DECL:
7039 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7040 reason = "function parameter";
7041 break;
7042
7043 case FIELD_DECL:
7044 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7045 reason = "structure field";
7046 break;
7047
7048 case FUNCTION_DECL:
7049 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7050 as)
7051 reason = "return type of function";
7052 break;
7053
7054 case POINTER_TYPE:
7055 if (as = avr_nonconst_pointer_addrspace (node), as)
7056 reason = "pointer";
7057 break;
7058 }
7059
7060 if (reason)
7061 {
e508bf98 7062 avr_edump ("%?: %s, %d, %d\n",
7063 avr_addrspace[as].name,
7064 avr_addrspace[as].segment, avr_current_device->n_flash);
7065 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7066 {
7067 if (TYPE_P (node))
7068 error ("%qT uses address space %qs beyond flash of %qs",
7069 node, avr_addrspace[as].name, avr_current_device->name);
7070 else
7071 error ("%s %q+D uses address space %qs beyond flash of %qs",
7072 reason, node, avr_addrspace[as].name,
7073 avr_current_device->name);
7074 }
4202ef11 7075 else
e508bf98 7076 {
7077 if (TYPE_P (node))
7078 error ("pointer targeting address space %qs must be const in %qT",
7079 avr_addrspace[as].name, node);
7080 else
7081 error ("pointer targeting address space %qs must be const"
7082 " in %s %q+D",
7083 avr_addrspace[as].name, reason, node);
7084 }
4202ef11 7085 }
7086
7087 return reason == NULL;
7088}
7089
7090
a179a371 7091/* Add the section attribute if the variable is in progmem. */
7092
7811991d 7093static void
206a5129 7094avr_insert_attributes (tree node, tree *attributes)
a28e4651 7095{
4202ef11 7096 avr_pgm_check_var_decl (node);
7097
a179a371 7098 if (TREE_CODE (node) == VAR_DECL
7099 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
148b2ce0 7100 && avr_progmem_p (node, *attributes))
a28e4651 7101 {
e508bf98 7102 addr_space_t as;
c47985a0 7103 tree node0 = node;
7104
7105 /* For C++, we have to peel arrays in order to get correct
7106 determination of readonlyness. */
7107
7108 do
7109 node0 = TREE_TYPE (node0);
7110 while (TREE_CODE (node0) == ARRAY_TYPE);
7111
7112 if (error_mark_node == node0)
7113 return;
e508bf98 7114
7115 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7116
7117 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7118 {
7119 error ("variable %q+D located in address space %qs"
7120 " beyond flash of %qs",
7121 node, avr_addrspace[as].name, avr_current_device->name);
7122 }
c47985a0 7123
4202ef11 7124 if (!TYPE_READONLY (node0)
7125 && !TREE_READONLY (node))
cb7c66a8 7126 {
4202ef11 7127 const char *reason = "__attribute__((progmem))";
7128
7129 if (!ADDR_SPACE_GENERIC_P (as))
9d734fa8 7130 reason = avr_addrspace[as].name;
4202ef11 7131
7132 if (avr_log.progmem)
7133 avr_edump ("\n%?: %t\n%t\n", node, node0);
7134
cb7c66a8 7135 error ("variable %q+D must be const in order to be put into"
4202ef11 7136 " read-only section by means of %qs", node, reason);
cb7c66a8 7137 }
a28e4651 7138 }
7b4a38a6 7139}
a28e4651 7140
7c2339f8 7141
7142/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7143/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7144/* Track need of __do_clear_bss. */
7145
7146void
a45076aa 7147avr_asm_output_aligned_decl_common (FILE * stream,
7148 const_tree decl ATTRIBUTE_UNUSED,
7149 const char *name,
7150 unsigned HOST_WIDE_INT size,
7c2339f8 7151 unsigned int align, bool local_p)
7152{
5be63f82 7153 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7154 There is no need to trigger __do_clear_bss code for them. */
7155
7156 if (!STR_PREFIX_P (name, "__gnu_lto"))
7157 avr_need_clear_bss_p = true;
7c2339f8 7158
7159 if (local_p)
21440ca3 7160 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7161 else
7162 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7c2339f8 7163}
7164
7165
7166/* Unnamed section callback for data_section
7167 to track need of __do_copy_data. */
7168
7169static void
7170avr_output_data_section_asm_op (const void *data)
7171{
7172 avr_need_copy_data_p = true;
7173
7174 /* Dispatch to default. */
7175 output_section_asm_op (data);
7176}
7177
7178
7179/* Unnamed section callback for bss_section
7180 to track need of __do_clear_bss. */
7181
7182static void
7183avr_output_bss_section_asm_op (const void *data)
7184{
7185 avr_need_clear_bss_p = true;
7186
7187 /* Dispatch to default. */
7188 output_section_asm_op (data);
7189}
7190
7191
5bd39e93 7192/* Unnamed section callback for progmem*.data sections. */
7193
7194static void
7195avr_output_progmem_section_asm_op (const void *data)
7196{
7197 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7198 (const char*) data);
7199}
7200
7201
7c2339f8 7202/* Implement `TARGET_ASM_INIT_SECTIONS'. */
2f14b1f9 7203
7204static void
7205avr_asm_init_sections (void)
7206{
5bd39e93 7207 unsigned int n;
7208
c3f18f18 7209 /* Set up a section for jump tables. Alignment is handled by
7210 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7211
7212 if (AVR_HAVE_JMP_CALL)
7213 {
7214 progmem_swtable_section
7215 = get_unnamed_section (0, output_section_asm_op,
7216 "\t.section\t.progmem.gcc_sw_table"
7217 ",\"a\",@progbits");
7218 }
7219 else
7220 {
7221 progmem_swtable_section
7222 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7223 "\t.section\t.progmem.gcc_sw_table"
7224 ",\"ax\",@progbits");
7225 }
7c2339f8 7226
5bd39e93 7227 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7228 {
7229 progmem_section[n]
7230 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7231 progmem_section_prefix[n]);
7232 }
bf412f98 7233
853000f2 7234 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7235 resp. `avr_need_copy_data_p'. */
7236
7237 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7c2339f8 7238 data_section->unnamed.callback = avr_output_data_section_asm_op;
7239 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7240}
7241
7242
c3f18f18 7243/* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7244
7245static section*
7246avr_asm_function_rodata_section (tree decl)
7247{
7248 /* If a function is unused and optimized out by -ffunction-sections
7249 and --gc-sections, ensure that the same will happen for its jump
7250 tables by putting them into individual sections. */
7251
7252 unsigned int flags;
7253 section * frodata;
7254
7255 /* Get the frodata section from the default function in varasm.c
7256 but treat function-associated data-like jump tables as code
7257 rather than as user defined data. AVR has no constant pools. */
7258 {
7259 int fdata = flag_data_sections;
7260
7261 flag_data_sections = flag_function_sections;
7262 frodata = default_function_rodata_section (decl);
7263 flag_data_sections = fdata;
7264 flags = frodata->common.flags;
7265 }
7266
7267 if (frodata != readonly_data_section
7268 && flags & SECTION_NAMED)
7269 {
7270 /* Adjust section flags and replace section name prefix. */
7271
7272 unsigned int i;
7273
7274 static const char* const prefix[] =
7275 {
7276 ".rodata", ".progmem.gcc_sw_table",
7277 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7278 };
7279
7280 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7281 {
7282 const char * old_prefix = prefix[i];
7283 const char * new_prefix = prefix[i+1];
7284 const char * name = frodata->named.name;
7285
7286 if (STR_PREFIX_P (name, old_prefix))
7287 {
1b6c82cc 7288 const char *rname = ACONCAT ((new_prefix,
7289 name + strlen (old_prefix), NULL));
c3f18f18 7290 flags &= ~SECTION_CODE;
7291 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7292
7293 return get_section (rname, flags, frodata->named.decl);
7294 }
7295 }
7296 }
7297
7298 return progmem_swtable_section;
7299}
7300
7301
7c2339f8 7302/* Implement `TARGET_ASM_NAMED_SECTION'. */
7303/* Track need of __do_clear_bss, __do_copy_data for named sections. */
7304
853000f2 7305static void
7c2339f8 7306avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7307{
bf412f98 7308 if (flags & AVR_SECTION_PROGMEM)
7309 {
9d734fa8 7310 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
e508bf98 7311 int segment = avr_addrspace[as].segment;
bf412f98 7312 const char *old_prefix = ".rodata";
5bd39e93 7313 const char *new_prefix = progmem_section_prefix[segment];
bf412f98 7314
7315 if (STR_PREFIX_P (name, old_prefix))
7316 {
1b6c82cc 7317 const char *sname = ACONCAT ((new_prefix,
7318 name + strlen (old_prefix), NULL));
7319 default_elf_asm_named_section (sname, flags, decl);
7320 return;
bf412f98 7321 }
7322
1b6c82cc 7323 default_elf_asm_named_section (new_prefix, flags, decl);
bf412f98 7324 return;
7325 }
7326
7c2339f8 7327 if (!avr_need_copy_data_p)
53026b2c 7328 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7329 || STR_PREFIX_P (name, ".rodata")
7330 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7c2339f8 7331
7332 if (!avr_need_clear_bss_p)
53026b2c 7333 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7c2339f8 7334
7335 default_elf_asm_named_section (name, flags, decl);
2f14b1f9 7336}
7337
8a46ca38 7338static unsigned int
206a5129 7339avr_section_type_flags (tree decl, const char *name, int reloc)
8a46ca38 7340{
7341 unsigned int flags = default_section_type_flags (decl, name, reloc);
7342
53026b2c 7343 if (STR_PREFIX_P (name, ".noinit"))
8a46ca38 7344 {
7345 if (decl && TREE_CODE (decl) == VAR_DECL
7346 && DECL_INITIAL (decl) == NULL_TREE)
7347 flags |= SECTION_BSS; /* @nobits */
7348 else
c3ceba8e 7349 warning (0, "only uninitialized variables can be placed in the "
8a46ca38 7350 ".noinit section");
7351 }
7352
bf412f98 7353 if (decl && DECL_P (decl)
9d734fa8 7354 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 7355 {
9d734fa8 7356 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7357
7358 /* Attribute progmem puts data in generic address space.
590da9f2 7359 Set section flags as if it was in __flash to get the right
9d734fa8 7360 section prefix in the remainder. */
5bd39e93 7361
9d734fa8 7362 if (ADDR_SPACE_GENERIC_P (as))
590da9f2 7363 as = ADDR_SPACE_FLASH;
5bd39e93 7364
9d734fa8 7365 flags |= as * SECTION_MACH_DEP;
bf412f98 7366 flags &= ~SECTION_WRITE;
4202ef11 7367 flags &= ~SECTION_BSS;
bf412f98 7368 }
b44e24e6 7369
8a46ca38 7370 return flags;
7371}
7372
7c2339f8 7373
b44e24e6 7374/* Implement `TARGET_ENCODE_SECTION_INFO'. */
7375
7376static void
9d734fa8 7377avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
b44e24e6 7378{
7379 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7380 readily available, see PR34734. So we postpone the warning
7381 about uninitialized data in program memory section until here. */
7382
7383 if (new_decl_p
7384 && decl && DECL_P (decl)
7385 && NULL_TREE == DECL_INITIAL (decl)
07b2ccdc 7386 && !DECL_EXTERNAL (decl)
b44e24e6 7387 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7388 {
7389 warning (OPT_Wuninitialized,
7390 "uninitialized variable %q+D put into "
7391 "program memory area", decl);
7392 }
66824cc3 7393
7394 default_encode_section_info (decl, rtl, new_decl_p);
ed2541ea 7395
7396 if (decl && DECL_P (decl)
7397 && TREE_CODE (decl) != FUNCTION_DECL
7398 && MEM_P (rtl)
7399 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7400 {
7401 rtx sym = XEXP (rtl, 0);
7402 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7403
7404 /* PSTR strings are in generic space but located in flash:
7405 patch address space. */
7406
7407 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
590da9f2 7408 as = ADDR_SPACE_FLASH;
ed2541ea 7409
7410 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7411 }
b44e24e6 7412}
7413
7414
bf412f98 7415/* Implement `TARGET_ASM_SELECT_SECTION' */
7416
7417static section *
7418avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7419{
7420 section * sect = default_elf_select_section (decl, reloc, align);
7421
7422 if (decl && DECL_P (decl)
9d734fa8 7423 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 7424 {
9d734fa8 7425 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
e508bf98 7426 int segment = avr_addrspace[as].segment;
5bd39e93 7427
bf412f98 7428 if (sect->common.flags & SECTION_NAMED)
7429 {
7430 const char * name = sect->named.name;
7431 const char * old_prefix = ".rodata";
5bd39e93 7432 const char * new_prefix = progmem_section_prefix[segment];
bf412f98 7433
7434 if (STR_PREFIX_P (name, old_prefix))
7435 {
1b6c82cc 7436 const char *sname = ACONCAT ((new_prefix,
7437 name + strlen (old_prefix), NULL));
bf412f98 7438 return get_section (sname, sect->common.flags, sect->named.decl);
7439 }
7440 }
7441
5bd39e93 7442 return progmem_section[segment];
bf412f98 7443 }
7444
7445 return sect;
7446}
7447
7c2339f8 7448/* Implement `TARGET_ASM_FILE_START'. */
5bd39e93 7449/* Outputs some text at the start of each assembler file. */
a28e4651 7450
92c473b8 7451static void
206a5129 7452avr_file_start (void)
a28e4651 7453{
644ac9c5 7454 int sfr_offset = avr_current_arch->sfr_offset;
5bd39e93 7455
b2afd900 7456 if (avr_current_arch->asm_only)
c1535dd2 7457 error ("MCU %qs supported for assembler only", avr_current_device->name);
235d7594 7458
92c473b8 7459 default_file_start ();
7460
72851b68 7461 /* Print I/O addresses of some SFRs used with IN and OUT. */
7462
8c8193e1 7463 if (!AVR_HAVE_8BIT_SP)
72851b68 7464 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8c8193e1 7465
72851b68 7466 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7467 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
0b6cf66f 7468 if (AVR_HAVE_RAMPZ)
7469 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7470 if (AVR_HAVE_RAMPY)
7471 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7472 if (AVR_HAVE_RAMPX)
7473 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7474 if (AVR_HAVE_RAMPD)
7475 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7476 if (AVR_XMEGA)
7477 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
72851b68 7478 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7479 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
a28e4651 7480}
7481
7c2339f8 7482
7483/* Implement `TARGET_ASM_FILE_END'. */
a28e4651 7484/* Outputs to the stdio stream FILE some
7485 appropriate text to go at the end of an assembler file. */
7486
f6940372 7487static void
206a5129 7488avr_file_end (void)
a28e4651 7489{
7c2339f8 7490 /* Output these only if there is anything in the
7491 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7492 input section(s) - some code size can be saved by not
7493 linking in the initialization code from libgcc if resp.
7494 sections are empty. */
7495
7496 if (avr_need_copy_data_p)
7497 fputs (".global __do_copy_data\n", asm_out_file);
7498
7499 if (avr_need_clear_bss_p)
7500 fputs (".global __do_clear_bss\n", asm_out_file);
a28e4651 7501}
7502
7503/* Choose the order in which to allocate hard registers for
7504 pseudo-registers local to a basic block.
7505
7506 Store the desired register order in the array `reg_alloc_order'.
7507 Element 0 should be the register to allocate first; element 1, the
7508 next register; and so on. */
7509
7510void
206a5129 7511order_regs_for_local_alloc (void)
a28e4651 7512{
7513 unsigned int i;
e99c3a1d 7514 static const int order_0[] = {
a28e4651 7515 24,25,
7516 18,19,
7517 20,21,
7518 22,23,
7519 30,31,
7520 26,27,
7521 28,29,
7522 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7523 0,1,
7524 32,33,34,35
7525 };
e99c3a1d 7526 static const int order_1[] = {
a28e4651 7527 18,19,
7528 20,21,
7529 22,23,
7530 24,25,
7531 30,31,
7532 26,27,
7533 28,29,
7534 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7535 0,1,
7536 32,33,34,35
7537 };
e99c3a1d 7538 static const int order_2[] = {
a28e4651 7539 25,24,
7540 23,22,
7541 21,20,
7542 19,18,
7543 30,31,
7544 26,27,
7545 28,29,
7546 17,16,
7547 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7548 1,0,
7549 32,33,34,35
7550 };
7551
e99c3a1d 7552 const int *order = (TARGET_ORDER_1 ? order_1 :
7553 TARGET_ORDER_2 ? order_2 :
7554 order_0);
5c62f199 7555 for (i=0; i < ARRAY_SIZE (order_0); ++i)
a28e4651 7556 reg_alloc_order[i] = order[i];
7557}
7558
433a5f02 7559
dfc1e3e4 7560/* Implement `TARGET_REGISTER_MOVE_COST' */
7561
7562static int
7563avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7564 reg_class_t from, reg_class_t to)
7565{
7566 return (from == STACK_REG ? 6
7567 : to == STACK_REG ? 12
7568 : 2);
7569}
7570
7571
7572/* Implement `TARGET_MEMORY_MOVE_COST' */
7573
7574static int
a45076aa 7575avr_memory_move_cost (enum machine_mode mode,
7576 reg_class_t rclass ATTRIBUTE_UNUSED,
dfc1e3e4 7577 bool in ATTRIBUTE_UNUSED)
7578{
7579 return (mode == QImode ? 2
7580 : mode == HImode ? 4
7581 : mode == SImode ? 8
7582 : mode == SFmode ? 8
7583 : 16);
7584}
7585
7586
433a5f02 7587/* Mutually recursive subroutine of avr_rtx_cost for calculating the
7588 cost of an RTX operand given its context. X is the rtx of the
7589 operand, MODE is its mode, and OUTER is the rtx_code of this
7590 operand's parent operator. */
a28e4651 7591
fab7adbf 7592static int
f529eb25 7593avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
20d892d1 7594 int opno, bool speed)
a28e4651 7595{
433a5f02 7596 enum rtx_code code = GET_CODE (x);
7597 int total;
7598
a28e4651 7599 switch (code)
7600 {
433a5f02 7601 case REG:
7602 case SUBREG:
7603 return 0;
7604
a28e4651 7605 case CONST_INT:
433a5f02 7606 case CONST_DOUBLE:
7607 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7608
a28e4651 7609 default:
7610 break;
7611 }
433a5f02 7612
7613 total = 0;
20d892d1 7614 avr_rtx_costs (x, code, outer, opno, &total, speed);
433a5f02 7615 return total;
a28e4651 7616}
7617
ae86bb47 7618/* Worker function for AVR backend's rtx_cost function.
7619 X is rtx expression whose cost is to be calculated.
7620 Return true if the complete cost has been computed.
7621 Return false if subexpressions should be scanned.
7622 In either case, *TOTAL contains the cost result. */
433a5f02 7623
fab7adbf 7624static bool
ae86bb47 7625avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7626 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
fab7adbf 7627{
ef51d1e3 7628 enum rtx_code code = (enum rtx_code) codearg;
433a5f02 7629 enum machine_mode mode = GET_MODE (x);
7630 HOST_WIDE_INT val;
fab7adbf 7631
7632 switch (code)
7633 {
7634 case CONST_INT:
433a5f02 7635 case CONST_DOUBLE:
9685fb69 7636 case SYMBOL_REF:
f9fb96f9 7637 case CONST:
7638 case LABEL_REF:
433a5f02 7639 /* Immediate constants are as cheap as registers. */
7640 *total = 0;
7641 return true;
7642
7643 case MEM:
433a5f02 7644 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7645 return true;
7646
7647 case NEG:
7648 switch (mode)
fab7adbf 7649 {
433a5f02 7650 case QImode:
7651 case SFmode:
7652 *total = COSTS_N_INSNS (1);
7653 break;
7654
02d9a2c3 7655 case HImode:
7656 case PSImode:
7657 case SImode:
7658 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7659 break;
433a5f02 7660
7661 default:
7662 return false;
fab7adbf 7663 }
20d892d1 7664 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7665 return true;
7666
7667 case ABS:
7668 switch (mode)
fab7adbf 7669 {
433a5f02 7670 case QImode:
7671 case SFmode:
7672 *total = COSTS_N_INSNS (1);
7673 break;
7674
7675 default:
7676 return false;
fab7adbf 7677 }
20d892d1 7678 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7679 return true;
fab7adbf 7680
433a5f02 7681 case NOT:
7682 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 7683 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fab7adbf 7684 return true;
7685
433a5f02 7686 case ZERO_EXTEND:
7687 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7688 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 7689 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7690 return true;
7691
7692 case SIGN_EXTEND:
7693 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7694 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 7695 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7696 return true;
7697
7698 case PLUS:
7699 switch (mode)
7700 {
7701 case QImode:
37ee98f3 7702 if (AVR_HAVE_MUL
7703 && MULT == GET_CODE (XEXP (x, 0))
7704 && register_operand (XEXP (x, 1), QImode))
7705 {
7706 /* multiply-add */
7707 *total = COSTS_N_INSNS (speed ? 4 : 3);
7708 /* multiply-add with constant: will be split and load constant. */
7709 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7710 *total = COSTS_N_INSNS (1) + *total;
7711 return true;
7712 }
433a5f02 7713 *total = COSTS_N_INSNS (1);
7714 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 7715 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 7716 break;
7717
7718 case HImode:
73cd2c42 7719 if (AVR_HAVE_MUL
7720 && (MULT == GET_CODE (XEXP (x, 0))
7721 || ASHIFT == GET_CODE (XEXP (x, 0)))
7722 && register_operand (XEXP (x, 1), HImode)
7723 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7724 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7725 {
37ee98f3 7726 /* multiply-add */
73cd2c42 7727 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 7728 /* multiply-add with constant: will be split and load constant. */
7729 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7730 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 7731 return true;
7732 }
433a5f02 7733 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7734 {
7735 *total = COSTS_N_INSNS (2);
20d892d1 7736 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7737 speed);
433a5f02 7738 }
7739 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7740 *total = COSTS_N_INSNS (1);
7741 else
7742 *total = COSTS_N_INSNS (2);
7743 break;
7744
02d9a2c3 7745 case PSImode:
7746 if (!CONST_INT_P (XEXP (x, 1)))
7747 {
7748 *total = COSTS_N_INSNS (3);
7749 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7750 speed);
7751 }
7752 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7753 *total = COSTS_N_INSNS (2);
7754 else
7755 *total = COSTS_N_INSNS (3);
7756 break;
7757
433a5f02 7758 case SImode:
7759 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7760 {
7761 *total = COSTS_N_INSNS (4);
20d892d1 7762 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7763 speed);
433a5f02 7764 }
7765 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7766 *total = COSTS_N_INSNS (1);
7767 else
7768 *total = COSTS_N_INSNS (4);
7769 break;
7770
7771 default:
7772 return false;
7773 }
20d892d1 7774 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7775 return true;
7776
7777 case MINUS:
37ee98f3 7778 if (AVR_HAVE_MUL
7779 && QImode == mode
7780 && register_operand (XEXP (x, 0), QImode)
7781 && MULT == GET_CODE (XEXP (x, 1)))
7782 {
7783 /* multiply-sub */
7784 *total = COSTS_N_INSNS (speed ? 4 : 3);
7785 /* multiply-sub with constant: will be split and load constant. */
7786 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7787 *total = COSTS_N_INSNS (1) + *total;
7788 return true;
7789 }
73cd2c42 7790 if (AVR_HAVE_MUL
7791 && HImode == mode
7792 && register_operand (XEXP (x, 0), HImode)
7793 && (MULT == GET_CODE (XEXP (x, 1))
7794 || ASHIFT == GET_CODE (XEXP (x, 1)))
7795 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7796 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7797 {
37ee98f3 7798 /* multiply-sub */
73cd2c42 7799 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 7800 /* multiply-sub with constant: will be split and load constant. */
7801 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7802 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 7803 return true;
7804 }
02d9a2c3 7805 /* FALLTHRU */
433a5f02 7806 case AND:
7807 case IOR:
7808 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 7809 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 7810 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 7811 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 7812 return true;
7813
7814 case XOR:
7815 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 7816 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7817 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 7818 return true;
7819
7820 case MULT:
7821 switch (mode)
7822 {
7823 case QImode:
8cc5a1af 7824 if (AVR_HAVE_MUL)
f529eb25 7825 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7826 else if (!speed)
4f0e2214 7827 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 7828 else
7829 return false;
ba92127f 7830 break;
433a5f02 7831
7832 case HImode:
8cc5a1af 7833 if (AVR_HAVE_MUL)
0b90fc76 7834 {
7835 rtx op0 = XEXP (x, 0);
7836 rtx op1 = XEXP (x, 1);
7837 enum rtx_code code0 = GET_CODE (op0);
7838 enum rtx_code code1 = GET_CODE (op1);
7839 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7840 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7841
7842 if (ex0
7843 && (u8_operand (op1, HImode)
7844 || s8_operand (op1, HImode)))
7845 {
7846 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7847 return true;
7848 }
7849 if (ex0
7850 && register_operand (op1, HImode))
7851 {
7852 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7853 return true;
7854 }
7855 else if (ex0 || ex1)
7856 {
7857 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7858 return true;
7859 }
7860 else if (register_operand (op0, HImode)
7861 && (u8_operand (op1, HImode)
7862 || s8_operand (op1, HImode)))
7863 {
7864 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7865 return true;
7866 }
7867 else
7868 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7869 }
f529eb25 7870 else if (!speed)
4f0e2214 7871 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 7872 else
7873 return false;
ba92127f 7874 break;
433a5f02 7875
02d9a2c3 7876 case PSImode:
7877 if (!speed)
7878 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7879 else
7880 *total = 10;
7881 break;
7882
713e2ad9 7883 case SImode:
7884 if (AVR_HAVE_MUL)
7885 {
7886 if (!speed)
7887 {
7888 /* Add some additional costs besides CALL like moves etc. */
7889
7890 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7891 }
7892 else
7893 {
7894 /* Just a rough estimate. Even with -O2 we don't want bulky
7895 code expanded inline. */
7896
7897 *total = COSTS_N_INSNS (25);
7898 }
7899 }
7900 else
7901 {
7902 if (speed)
7903 *total = COSTS_N_INSNS (300);
7904 else
7905 /* Add some additional costs besides CALL like moves etc. */
7906 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7907 }
7908
7909 return true;
7910
433a5f02 7911 default:
7912 return false;
7913 }
20d892d1 7914 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 7916 return true;
7917
7918 case DIV:
7919 case MOD:
7920 case UDIV:
7921 case UMOD:
f529eb25 7922 if (!speed)
fd2db4d6 7923 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 7924 else
fd2db4d6 7925 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
20d892d1 7926 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fd2db4d6 7927 /* For div/mod with const-int divisor we have at least the cost of
7928 loading the divisor. */
7929 if (CONST_INT_P (XEXP (x, 1)))
7930 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7931 /* Add some overall penaly for clobbering and moving around registers */
7932 *total += COSTS_N_INSNS (2);
433a5f02 7933 return true;
7934
8f14d2e0 7935 case ROTATE:
7936 switch (mode)
7937 {
7938 case QImode:
7939 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7940 *total = COSTS_N_INSNS (1);
7941
7942 break;
7943
7944 case HImode:
7945 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7946 *total = COSTS_N_INSNS (3);
7947
7948 break;
7949
7950 case SImode:
7951 if (CONST_INT_P (XEXP (x, 1)))
7952 switch (INTVAL (XEXP (x, 1)))
7953 {
7954 case 8:
7955 case 24:
7956 *total = COSTS_N_INSNS (5);
7957 break;
7958 case 16:
7959 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7960 break;
7961 }
7962 break;
7963
7964 default:
7965 return false;
7966 }
20d892d1 7967 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8f14d2e0 7968 return true;
7969
433a5f02 7970 case ASHIFT:
7971 switch (mode)
7972 {
7973 case QImode:
7974 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7975 {
f529eb25 7976 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 7977 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7978 speed);
433a5f02 7979 }
7980 else
7981 {
7982 val = INTVAL (XEXP (x, 1));
7983 if (val == 7)
7984 *total = COSTS_N_INSNS (3);
7985 else if (val >= 0 && val <= 7)
7986 *total = COSTS_N_INSNS (val);
7987 else
7988 *total = COSTS_N_INSNS (1);
7989 }
7990 break;
7991
7992 case HImode:
0b90fc76 7993 if (AVR_HAVE_MUL)
7994 {
7995 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7996 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7997 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7998 {
7999 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8000 return true;
8001 }
8002 }
8003
37ee98f3 8004 if (const1_rtx == (XEXP (x, 1))
8005 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8006 {
8007 *total = COSTS_N_INSNS (2);
8008 return true;
8009 }
8010
433a5f02 8011 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8012 {
f529eb25 8013 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8014 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8015 speed);
433a5f02 8016 }
8017 else
8018 switch (INTVAL (XEXP (x, 1)))
8019 {
8020 case 0:
8021 *total = 0;
8022 break;
8023 case 1:
8024 case 8:
8025 *total = COSTS_N_INSNS (2);
8026 break;
8027 case 9:
8028 *total = COSTS_N_INSNS (3);
8029 break;
8030 case 2:
8031 case 3:
8032 case 10:
8033 case 15:
8034 *total = COSTS_N_INSNS (4);
8035 break;
8036 case 7:
8037 case 11:
8038 case 12:
8039 *total = COSTS_N_INSNS (5);
8040 break;
8041 case 4:
f529eb25 8042 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 8043 break;
8044 case 6:
12564c56 8045 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 8046 break;
8047 case 5:
f529eb25 8048 *total = COSTS_N_INSNS (!speed ? 5 : 10);
433a5f02 8049 break;
8050 default:
f529eb25 8051 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8053 speed);
433a5f02 8054 }
8055 break;
8056
02d9a2c3 8057 case PSImode:
8058 if (!CONST_INT_P (XEXP (x, 1)))
8059 {
8060 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8061 }
8062 else
8063 switch (INTVAL (XEXP (x, 1)))
8064 {
8065 case 0:
8066 *total = 0;
8067 break;
8068 case 1:
8069 case 8:
8070 case 16:
8071 *total = COSTS_N_INSNS (3);
8072 break;
8073 case 23:
8074 *total = COSTS_N_INSNS (5);
8075 break;
8076 default:
8077 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8078 break;
8079 }
8080 break;
8081
433a5f02 8082 case SImode:
8083 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8084 {
f529eb25 8085 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8086 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8087 speed);
433a5f02 8088 }
8089 else
8090 switch (INTVAL (XEXP (x, 1)))
8091 {
8092 case 0:
8093 *total = 0;
8094 break;
8095 case 24:
8096 *total = COSTS_N_INSNS (3);
8097 break;
8098 case 1:
8099 case 8:
8100 case 16:
8101 *total = COSTS_N_INSNS (4);
8102 break;
8103 case 31:
8104 *total = COSTS_N_INSNS (6);
8105 break;
8106 case 2:
f529eb25 8107 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 8108 break;
8109 default:
f529eb25 8110 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8111 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8112 speed);
433a5f02 8113 }
8114 break;
8115
8116 default:
8117 return false;
8118 }
20d892d1 8119 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8120 return true;
8121
8122 case ASHIFTRT:
8123 switch (mode)
8124 {
8125 case QImode:
8126 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8127 {
f529eb25 8128 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 8129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8130 speed);
433a5f02 8131 }
8132 else
8133 {
8134 val = INTVAL (XEXP (x, 1));
8135 if (val == 6)
8136 *total = COSTS_N_INSNS (4);
8137 else if (val == 7)
8138 *total = COSTS_N_INSNS (2);
8139 else if (val >= 0 && val <= 7)
8140 *total = COSTS_N_INSNS (val);
8141 else
8142 *total = COSTS_N_INSNS (1);
8143 }
8144 break;
8145
8146 case HImode:
8147 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8148 {
f529eb25 8149 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8151 speed);
433a5f02 8152 }
8153 else
8154 switch (INTVAL (XEXP (x, 1)))
8155 {
8156 case 0:
8157 *total = 0;
8158 break;
8159 case 1:
8160 *total = COSTS_N_INSNS (2);
8161 break;
8162 case 15:
8163 *total = COSTS_N_INSNS (3);
8164 break;
8165 case 2:
8166 case 7:
8167 case 8:
8168 case 9:
8169 *total = COSTS_N_INSNS (4);
8170 break;
8171 case 10:
8172 case 14:
8173 *total = COSTS_N_INSNS (5);
8174 break;
8175 case 11:
f529eb25 8176 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 8177 break;
8178 case 12:
f529eb25 8179 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 8180 break;
8181 case 6:
8182 case 13:
f529eb25 8183 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 8184 break;
8185 default:
f529eb25 8186 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8187 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8188 speed);
433a5f02 8189 }
8190 break;
8191
02d9a2c3 8192 case PSImode:
8193 if (!CONST_INT_P (XEXP (x, 1)))
8194 {
8195 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8196 }
8197 else
8198 switch (INTVAL (XEXP (x, 1)))
8199 {
8200 case 0:
8201 *total = 0;
8202 break;
8203 case 1:
8204 *total = COSTS_N_INSNS (3);
8205 break;
8206 case 16:
8207 case 8:
8208 *total = COSTS_N_INSNS (5);
8209 break;
8210 case 23:
8211 *total = COSTS_N_INSNS (4);
8212 break;
8213 default:
8214 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8215 break;
8216 }
8217 break;
8218
433a5f02 8219 case SImode:
8220 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8221 {
f529eb25 8222 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8223 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8224 speed);
433a5f02 8225 }
8226 else
8227 switch (INTVAL (XEXP (x, 1)))
8228 {
8229 case 0:
8230 *total = 0;
8231 break;
8232 case 1:
8233 *total = COSTS_N_INSNS (4);
8234 break;
8235 case 8:
8236 case 16:
8237 case 24:
8238 *total = COSTS_N_INSNS (6);
8239 break;
8240 case 2:
f529eb25 8241 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 8242 break;
8243 case 31:
0aab73c2 8244 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
433a5f02 8245 break;
8246 default:
f529eb25 8247 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8248 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8249 speed);
433a5f02 8250 }
8251 break;
8252
8253 default:
8254 return false;
8255 }
20d892d1 8256 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8257 return true;
8258
8259 case LSHIFTRT:
8260 switch (mode)
8261 {
8262 case QImode:
8263 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8264 {
f529eb25 8265 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 8266 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8267 speed);
433a5f02 8268 }
8269 else
8270 {
8271 val = INTVAL (XEXP (x, 1));
8272 if (val == 7)
8273 *total = COSTS_N_INSNS (3);
8274 else if (val >= 0 && val <= 7)
8275 *total = COSTS_N_INSNS (val);
8276 else
8277 *total = COSTS_N_INSNS (1);
8278 }
8279 break;
8280
8281 case HImode:
8282 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8283 {
f529eb25 8284 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8285 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8286 speed);
433a5f02 8287 }
8288 else
8289 switch (INTVAL (XEXP (x, 1)))
8290 {
8291 case 0:
8292 *total = 0;
8293 break;
8294 case 1:
8295 case 8:
8296 *total = COSTS_N_INSNS (2);
8297 break;
8298 case 9:
8299 *total = COSTS_N_INSNS (3);
8300 break;
8301 case 2:
8302 case 10:
8303 case 15:
8304 *total = COSTS_N_INSNS (4);
8305 break;
8306 case 7:
8307 case 11:
8308 *total = COSTS_N_INSNS (5);
8309 break;
8310 case 3:
8311 case 12:
8312 case 13:
8313 case 14:
f529eb25 8314 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 8315 break;
8316 case 4:
f529eb25 8317 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 8318 break;
8319 case 5:
8320 case 6:
f529eb25 8321 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 8322 break;
8323 default:
f529eb25 8324 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8325 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8326 speed);
433a5f02 8327 }
8328 break;
8329
02d9a2c3 8330 case PSImode:
8331 if (!CONST_INT_P (XEXP (x, 1)))
8332 {
8333 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8334 }
8335 else
8336 switch (INTVAL (XEXP (x, 1)))
8337 {
8338 case 0:
8339 *total = 0;
8340 break;
8341 case 1:
8342 case 8:
8343 case 16:
8344 *total = COSTS_N_INSNS (3);
8345 break;
8346 case 23:
8347 *total = COSTS_N_INSNS (5);
8348 break;
8349 default:
8350 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8351 break;
8352 }
8353 break;
8354
433a5f02 8355 case SImode:
8356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8357 {
f529eb25 8358 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8359 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8360 speed);
433a5f02 8361 }
8362 else
8363 switch (INTVAL (XEXP (x, 1)))
8364 {
8365 case 0:
8366 *total = 0;
8367 break;
8368 case 1:
8369 *total = COSTS_N_INSNS (4);
8370 break;
8371 case 2:
f529eb25 8372 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 8373 break;
8374 case 8:
8375 case 16:
8376 case 24:
8377 *total = COSTS_N_INSNS (4);
8378 break;
8379 case 31:
8380 *total = COSTS_N_INSNS (6);
8381 break;
8382 default:
f529eb25 8383 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8384 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8385 speed);
433a5f02 8386 }
8387 break;
8388
8389 default:
8390 return false;
8391 }
20d892d1 8392 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8393 return true;
8394
8395 case COMPARE:
8396 switch (GET_MODE (XEXP (x, 0)))
fab7adbf 8397 {
433a5f02 8398 case QImode:
8399 *total = COSTS_N_INSNS (1);
8400 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 8401 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8402 break;
8403
8404 case HImode:
8405 *total = COSTS_N_INSNS (2);
8406 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 8407 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8408 else if (INTVAL (XEXP (x, 1)) != 0)
8409 *total += COSTS_N_INSNS (1);
8410 break;
8411
02d9a2c3 8412 case PSImode:
8413 *total = COSTS_N_INSNS (3);
8414 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8415 *total += COSTS_N_INSNS (2);
8416 break;
8417
433a5f02 8418 case SImode:
8419 *total = COSTS_N_INSNS (4);
8420 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 8421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8422 else if (INTVAL (XEXP (x, 1)) != 0)
8423 *total += COSTS_N_INSNS (3);
8424 break;
8425
8426 default:
8427 return false;
fab7adbf 8428 }
20d892d1 8429 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8430 return true;
8431
12bf3919 8432 case TRUNCATE:
8433 if (AVR_HAVE_MUL
8434 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8435 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8436 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8437 {
8438 if (QImode == mode || HImode == mode)
8439 {
8440 *total = COSTS_N_INSNS (2);
8441 return true;
8442 }
8443 }
8444 break;
8445
433a5f02 8446 default:
8447 break;
fab7adbf 8448 }
433a5f02 8449 return false;
fab7adbf 8450}
8451
ae86bb47 8452
8453/* Implement `TARGET_RTX_COSTS'. */
8454
8455static bool
8456avr_rtx_costs (rtx x, int codearg, int outer_code,
8457 int opno, int *total, bool speed)
8458{
8459 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8460 opno, total, speed);
8461
8462 if (avr_log.rtx_costs)
8463 {
8464 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8465 done, speed ? "speed" : "size", *total, outer_code, x);
8466 }
8467
8468 return done;
8469}
8470
8c3bcbe3 8471
8472/* Implement `TARGET_ADDRESS_COST'. */
a28e4651 8473
ec0457a8 8474static int
f529eb25 8475avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
a28e4651 8476{
8c3bcbe3 8477 int cost = 4;
8478
a28e4651 8479 if (GET_CODE (x) == PLUS
8c3bcbe3 8480 && CONST_INT_P (XEXP (x, 1))
8481 && (REG_P (XEXP (x, 0))
8482 || GET_CODE (XEXP (x, 0)) == SUBREG))
37ac04dc 8483 {
8c3bcbe3 8484 if (INTVAL (XEXP (x, 1)) >= 61)
8485 cost = 18;
37ac04dc 8486 }
8c3bcbe3 8487 else if (CONSTANT_ADDRESS_P (x))
8488 {
8489 if (optimize > 0
8490 && io_address_operand (x, QImode))
8491 cost = 2;
8492 }
8493
8494 if (avr_log.address_cost)
8495 avr_edump ("\n%?: %d = %r\n", cost, x);
8496
8497 return cost;
a28e4651 8498}
8499
164f5b34 8500/* Test for extra memory constraint 'Q'.
8501 It's a memory address based on Y or Z pointer with valid displacement. */
a28e4651 8502
8503int
164f5b34 8504extra_constraint_Q (rtx x)
a28e4651 8505{
ae86bb47 8506 int ok = 0;
8507
164f5b34 8508 if (GET_CODE (XEXP (x,0)) == PLUS
8509 && REG_P (XEXP (XEXP (x,0), 0))
8510 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8511 && (INTVAL (XEXP (XEXP (x,0), 1))
8512 <= MAX_LD_OFFSET (GET_MODE (x))))
a28e4651 8513 {
164f5b34 8514 rtx xx = XEXP (XEXP (x,0), 0);
8515 int regno = REGNO (xx);
ae86bb47 8516
8517 ok = (/* allocate pseudos */
8518 regno >= FIRST_PSEUDO_REGISTER
8519 /* strictly check */
8520 || regno == REG_Z || regno == REG_Y
8521 /* XXX frame & arg pointer checks */
8522 || xx == frame_pointer_rtx
8523 || xx == arg_pointer_rtx);
8524
8525 if (avr_log.constraints)
8526 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8527 ok, reload_completed, reload_in_progress, x);
a28e4651 8528 }
ae86bb47 8529
8530 return ok;
a28e4651 8531}
8532
20c71901 8533/* Convert condition code CONDITION to the valid AVR condition code. */
a28e4651 8534
8535RTX_CODE
206a5129 8536avr_normalize_condition (RTX_CODE condition)
a28e4651 8537{
8538 switch (condition)
8539 {
8540 case GT:
8541 return GE;
8542 case GTU:
8543 return GEU;
8544 case LE:
8545 return LT;
8546 case LEU:
8547 return LTU;
8548 default:
8ef66241 8549 gcc_unreachable ();
a28e4651 8550 }
8551}
8552
cffa155c 8553/* Helper function for `avr_reorg'. */
8554
8555static rtx
8556avr_compare_pattern (rtx insn)
8557{
8558 rtx pattern = single_set (insn);
8559
8560 if (pattern
8561 && NONJUMP_INSN_P (insn)
8562 && SET_DEST (pattern) == cc0_rtx
83921eda 8563 && GET_CODE (SET_SRC (pattern)) == COMPARE
8564 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8565 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
cffa155c 8566 {
8567 return pattern;
8568 }
8569
8570 return NULL_RTX;
8571}
8572
8573/* Helper function for `avr_reorg'. */
8574
8575/* Expansion of switch/case decision trees leads to code like
8576
8577 cc0 = compare (Reg, Num)
8578 if (cc0 == 0)
8579 goto L1
8580
8581 cc0 = compare (Reg, Num)
8582 if (cc0 > 0)
8583 goto L2
8584
8585 The second comparison is superfluous and can be deleted.
8586 The second jump condition can be transformed from a
8587 "difficult" one to a "simple" one because "cc0 > 0" and
8588 "cc0 >= 0" will have the same effect here.
8589
8590 This function relies on the way switch/case is being expaned
8591 as binary decision tree. For example code see PR 49903.
8592
8593 Return TRUE if optimization performed.
8594 Return FALSE if nothing changed.
8595
8596 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8597
8598 We don't want to do this in text peephole because it is
8599 tedious to work out jump offsets there and the second comparison
8600 might have been transormed by `avr_reorg'.
8601
8602 RTL peephole won't do because peephole2 does not scan across
8603 basic blocks. */
8604
8605static bool
8606avr_reorg_remove_redundant_compare (rtx insn1)
8607{
8608 rtx comp1, ifelse1, xcond1, branch1;
8609 rtx comp2, ifelse2, xcond2, branch2, insn2;
8610 enum rtx_code code;
8611 rtx jump, target, cond;
8612
8613 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8614
8615 branch1 = next_nonnote_nondebug_insn (insn1);
8616 if (!branch1 || !JUMP_P (branch1))
8617 return false;
8618
8619 insn2 = next_nonnote_nondebug_insn (branch1);
8620 if (!insn2 || !avr_compare_pattern (insn2))
8621 return false;
8622
8623 branch2 = next_nonnote_nondebug_insn (insn2);
8624 if (!branch2 || !JUMP_P (branch2))
8625 return false;
8626
8627 comp1 = avr_compare_pattern (insn1);
8628 comp2 = avr_compare_pattern (insn2);
8629 xcond1 = single_set (branch1);
8630 xcond2 = single_set (branch2);
8631
8632 if (!comp1 || !comp2
8633 || !rtx_equal_p (comp1, comp2)
8634 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8635 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8636 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8637 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8638 {
8639 return false;
8640 }
8641
8642 comp1 = SET_SRC (comp1);
8643 ifelse1 = SET_SRC (xcond1);
8644 ifelse2 = SET_SRC (xcond2);
8645
8646 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8647
8648 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8649 || !REG_P (XEXP (comp1, 0))
8650 || !CONST_INT_P (XEXP (comp1, 1))
8651 || XEXP (ifelse1, 2) != pc_rtx
8652 || XEXP (ifelse2, 2) != pc_rtx
8653 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8654 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8655 || !COMPARISON_P (XEXP (ifelse2, 0))
8656 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8657 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8658 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8659 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8660 {
8661 return false;
8662 }
8663
8664 /* We filtered the insn sequence to look like
8665
8666 (set (cc0)
8667 (compare (reg:M N)
8668 (const_int VAL)))
8669 (set (pc)
8670 (if_then_else (eq (cc0)
8671 (const_int 0))
8672 (label_ref L1)
8673 (pc)))
8674
8675 (set (cc0)
8676 (compare (reg:M N)
8677 (const_int VAL)))
8678 (set (pc)
8679 (if_then_else (CODE (cc0)
8680 (const_int 0))
8681 (label_ref L2)
8682 (pc)))
8683 */
8684
8685 code = GET_CODE (XEXP (ifelse2, 0));
8686
8687 /* Map GT/GTU to GE/GEU which is easier for AVR.
8688 The first two instructions compare/branch on EQ
8689 so we may replace the difficult
8690
8691 if (x == VAL) goto L1;
8692 if (x > VAL) goto L2;
8693
8694 with easy
8695
8696 if (x == VAL) goto L1;
8697 if (x >= VAL) goto L2;
8698
8699 Similarly, replace LE/LEU by LT/LTU. */
8700
8701 switch (code)
8702 {
8703 case EQ:
8704 case LT: case LTU:
8705 case GE: case GEU:
8706 break;
8707
8708 case LE: case LEU:
8709 case GT: case GTU:
8710 code = avr_normalize_condition (code);
8711 break;
8712
8713 default:
8714 return false;
8715 }
8716
8717 /* Wrap the branches into UNSPECs so they won't be changed or
8718 optimized in the remainder. */
8719
8720 target = XEXP (XEXP (ifelse1, 1), 0);
8721 cond = XEXP (ifelse1, 0);
8722 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8723
8724 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8725
8726 target = XEXP (XEXP (ifelse2, 1), 0);
8727 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8728 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8729
8730 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8731
8732 /* The comparisons in insn1 and insn2 are exactly the same;
8733 insn2 is superfluous so delete it. */
8734
8735 delete_insn (insn2);
8736 delete_insn (branch1);
8737 delete_insn (branch2);
8738
8739 return true;
8740}
8741
8742
8743/* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8744/* Optimize conditional jumps. */
a28e4651 8745
2efea8c0 8746static void
206a5129 8747avr_reorg (void)
a28e4651 8748{
cffa155c 8749 rtx insn = get_insns();
a28e4651 8750
cffa155c 8751 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
a28e4651 8752 {
cffa155c 8753 rtx pattern = avr_compare_pattern (insn);
8754
8755 if (!pattern)
8756 continue;
a28e4651 8757
cffa155c 8758 if (optimize
8759 && avr_reorg_remove_redundant_compare (insn))
8760 {
8761 continue;
8762 }
a28e4651 8763
cffa155c 8764 if (compare_diff_p (insn))
a28e4651 8765 {
cffa155c 8766 /* Now we work under compare insn with difficult branch. */
8767
8768 rtx next = next_real_insn (insn);
8769 rtx pat = PATTERN (next);
8770
8771 pattern = SET_SRC (pattern);
8772
8773 if (true_regnum (XEXP (pattern, 0)) >= 0
8774 && true_regnum (XEXP (pattern, 1)) >= 0)
8775 {
8776 rtx x = XEXP (pattern, 0);
8777 rtx src = SET_SRC (pat);
8778 rtx t = XEXP (src,0);
8779 PUT_CODE (t, swap_condition (GET_CODE (t)));
8780 XEXP (pattern, 0) = XEXP (pattern, 1);
8781 XEXP (pattern, 1) = x;
8782 INSN_CODE (next) = -1;
8783 }
8784 else if (true_regnum (XEXP (pattern, 0)) >= 0
8785 && XEXP (pattern, 1) == const0_rtx)
8786 {
8787 /* This is a tst insn, we can reverse it. */
8788 rtx src = SET_SRC (pat);
8789 rtx t = XEXP (src,0);
74f4459c 8790
cffa155c 8791 PUT_CODE (t, swap_condition (GET_CODE (t)));
8792 XEXP (pattern, 1) = XEXP (pattern, 0);
8793 XEXP (pattern, 0) = const0_rtx;
8794 INSN_CODE (next) = -1;
8795 INSN_CODE (insn) = -1;
8796 }
8797 else if (true_regnum (XEXP (pattern, 0)) >= 0
8798 && CONST_INT_P (XEXP (pattern, 1)))
8799 {
8800 rtx x = XEXP (pattern, 1);
8801 rtx src = SET_SRC (pat);
8802 rtx t = XEXP (src,0);
8803 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8804
8805 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8806 {
8807 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8808 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8809 INSN_CODE (next) = -1;
8810 INSN_CODE (insn) = -1;
8811 }
8812 }
8813 }
a28e4651 8814 }
8815}
8816
8817/* Returns register number for function return value.*/
8818
1086ba15 8819static inline unsigned int
206a5129 8820avr_ret_register (void)
a28e4651 8821{
8822 return 24;
8823}
8824
73475e84 8825/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8826
8827static bool
8828avr_function_value_regno_p (const unsigned int regno)
8829{
8830 return (regno == avr_ret_register ());
8831}
8832
f2b32076 8833/* Create an RTX representing the place where a
a28e4651 8834 library function returns a value of mode MODE. */
8835
73475e84 8836static rtx
8837avr_libcall_value (enum machine_mode mode,
8838 const_rtx func ATTRIBUTE_UNUSED)
a28e4651 8839{
8840 int offs = GET_MODE_SIZE (mode);
02d9a2c3 8841
8842 if (offs <= 4)
8843 offs = (offs + 1) & ~1;
8844
73475e84 8845 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
a28e4651 8846}
8847
8848/* Create an RTX representing the place where a
8849 function returns a value of data type VALTYPE. */
8850
73475e84 8851static rtx
1086ba15 8852avr_function_value (const_tree type,
8853 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8854 bool outgoing ATTRIBUTE_UNUSED)
a28e4651 8855{
1cb39658 8856 unsigned int offs;
73475e84 8857
a28e4651 8858 if (TYPE_MODE (type) != BLKmode)
1086ba15 8859 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
a28e4651 8860
8861 offs = int_size_in_bytes (type);
8862 if (offs < 2)
8863 offs = 2;
8864 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8865 offs = GET_MODE_SIZE (SImode);
8866 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8867 offs = GET_MODE_SIZE (DImode);
8868
73475e84 8869 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
a28e4651 8870}
8871
8a2a7305 8872int
8deb3959 8873test_hard_reg_class (enum reg_class rclass, rtx x)
8a2a7305 8874{
8875 int regno = true_regnum (x);
8876 if (regno < 0)
8877 return 0;
cb39cd35 8878
8deb3959 8879 if (TEST_HARD_REG_CLASS (rclass, regno))
cb39cd35 8880 return 1;
8881
8882 return 0;
8a2a7305 8883}
8884
2ebcbfe8 8885
8ef28ef2 8886/* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8887 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8888
8889static bool
8890avr_2word_insn_p (rtx insn)
8891{
8892 if (avr_current_device->errata_skip
8893 || !insn
8894 || 2 != get_attr_length (insn))
8895 {
8896 return false;
8897 }
8898
8899 switch (INSN_CODE (insn))
8900 {
8901 default:
8902 return false;
8903
8904 case CODE_FOR_movqi_insn:
8905 {
8906 rtx set = single_set (insn);
8907 rtx src = SET_SRC (set);
8908 rtx dest = SET_DEST (set);
8909
8910 /* Factor out LDS and STS from movqi_insn. */
8911
8912 if (MEM_P (dest)
8913 && (REG_P (src) || src == const0_rtx))
8914 {
8915 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8916 }
8917 else if (REG_P (dest)
8918 && MEM_P (src))
8919 {
8920 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8921 }
8922
8923 return false;
8924 }
8925
8926 case CODE_FOR_call_insn:
8927 case CODE_FOR_call_value_insn:
8928 return true;
8929 }
8930}
8931
8932
2ebcbfe8 8933int
206a5129 8934jump_over_one_insn_p (rtx insn, rtx dest)
2ebcbfe8 8935{
8936 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8937 ? XEXP (dest, 0)
8938 : dest);
47fc0706 8939 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8940 int dest_addr = INSN_ADDRESSES (uid);
8ef28ef2 8941 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8942
8943 return (jump_offset == 1
8944 || (jump_offset == 2
8945 && avr_2word_insn_p (next_active_insn (insn))));
2ebcbfe8 8946}
a7690ba9 8947
8948/* Returns 1 if a value of mode MODE can be stored starting with hard
0af74aa0 8949 register number REGNO. On the enhanced core, anything larger than
8950 1 byte must start in even numbered register for "movw" to work
8951 (this way we don't have to check for odd registers everywhere). */
a7690ba9 8952
8953int
206a5129 8954avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
a7690ba9 8955{
3d4d979d 8956 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8957 Disallowing QI et al. in these regs might lead to code like
8958 (set (subreg:QI (reg:HI 28) n) ...)
8959 which will result in wrong code because reload does not
8960 handle SUBREGs of hard regsisters like this.
8961 This could be fixed in reload. However, it appears
8962 that fixing reload is not wanted by reload people. */
8963
8964 /* Any GENERAL_REGS register can hold 8-bit values. */
8965
8966 if (GET_MODE_SIZE (mode) == 1)
a7690ba9 8967 return 1;
60d76236 8968
3d4d979d 8969 /* FIXME: Ideally, the following test is not needed.
8970 However, it turned out that it can reduce the number
8971 of spill fails. AVR and it's poor endowment with
8972 address registers is extreme stress test for reload. */
8973
8974 if (GET_MODE_SIZE (mode) >= 4
8975 && regno >= REG_X)
60d76236 8976 return 0;
8977
3d4d979d 8978 /* All modes larger than 8 bits should start in an even register. */
9f42c829 8979
a7690ba9 8980 return !(regno & 1);
8981}
e511e253 8982
e511e253 8983
8b0ecac5 8984/* Implement `MODE_CODE_BASE_REG_CLASS'. */
8985
8986reg_class_t
8987avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
4202ef11 8988 addr_space_t as, RTX_CODE outer_code,
8b0ecac5 8989 RTX_CODE index_code ATTRIBUTE_UNUSED)
8990{
4202ef11 8991 if (!ADDR_SPACE_GENERIC_P (as))
8992 {
8993 return POINTER_Z_REGS;
8994 }
8995
f9efb148 8996 if (!avr_strict_X)
8997 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8998
8999 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8b0ecac5 9000}
9001
9002
9003/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9004
9005bool
9006avr_regno_mode_code_ok_for_base_p (int regno,
9007 enum machine_mode mode ATTRIBUTE_UNUSED,
f8a8fc7b 9008 addr_space_t as ATTRIBUTE_UNUSED,
f9efb148 9009 RTX_CODE outer_code,
8b0ecac5 9010 RTX_CODE index_code ATTRIBUTE_UNUSED)
9011{
f9efb148 9012 bool ok = false;
9013
4202ef11 9014 if (!ADDR_SPACE_GENERIC_P (as))
9015 {
9016 if (regno < FIRST_PSEUDO_REGISTER
9017 && regno == REG_Z)
9018 {
9019 return true;
9020 }
9021
9022 if (reg_renumber)
9023 {
9024 regno = reg_renumber[regno];
9025
9026 if (regno == REG_Z)
9027 {
9028 return true;
9029 }
9030 }
9031
9032 return false;
9033 }
9034
8b0ecac5 9035 if (regno < FIRST_PSEUDO_REGISTER
9036 && (regno == REG_X
9037 || regno == REG_Y
9038 || regno == REG_Z
9f42c829 9039 || regno == ARG_POINTER_REGNUM))
8b0ecac5 9040 {
f9efb148 9041 ok = true;
8b0ecac5 9042 }
f9efb148 9043 else if (reg_renumber)
8b0ecac5 9044 {
9045 regno = reg_renumber[regno];
9046
9047 if (regno == REG_X
9048 || regno == REG_Y
9049 || regno == REG_Z
9f42c829 9050 || regno == ARG_POINTER_REGNUM)
8b0ecac5 9051 {
f9efb148 9052 ok = true;
8b0ecac5 9053 }
9054 }
f9efb148 9055
9056 if (avr_strict_X
9057 && PLUS == outer_code
9058 && regno == REG_X)
9059 {
9060 ok = false;
9061 }
9062
9063 return ok;
8b0ecac5 9064}
9065
9066
5bca95a8 9067/* A helper for `output_reload_insisf' and `output_reload_inhi'. */
28913f6b 9068/* Set 32-bit register OP[0] to compile-time constant OP[1].
9069 CLOBBER_REG is a QI clobber register or NULL_RTX.
9070 LEN == NULL: output instructions.
9071 LEN != NULL: set *LEN to the length of the instruction sequence
9072 (in words) printed with LEN = NULL.
9073 If CLEAR_P is true, OP[0] had been cleard to Zero already.
33817c7e 9074 If CLEAR_P is false, nothing is known about OP[0].
9075
9076 The effect on cc0 is as follows:
9077
f4806884 9078 Load 0 to any register except ZERO_REG : NONE
9079 Load ld register with any value : NONE
9080 Anything else: : CLOBBER */
9ce2d202 9081
28913f6b 9082static void
2f2d376f 9083output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
e511e253 9084{
9ce2d202 9085 rtx src = op[1];
9086 rtx dest = op[0];
9087 rtx xval, xdest[4];
9088 int ival[4];
9089 int clobber_val = 1234;
9090 bool cooked_clobber_p = false;
9091 bool set_p = false;
9ce2d202 9092 enum machine_mode mode = GET_MODE (dest);
4202ef11 9093 int n, n_bytes = GET_MODE_SIZE (mode);
9ce2d202 9094
a49907f9 9095 gcc_assert (REG_P (dest)
9096 && CONSTANT_P (src));
37ac04dc 9097
9098 if (len)
9ce2d202 9099 *len = 0;
9100
9101 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9102 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9103
02d9a2c3 9104 if (REGNO (dest) < 16
9105 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
37ac04dc 9106 {
4202ef11 9107 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
37ac04dc 9108 }
e511e253 9109
a49907f9 9110 /* We might need a clobber reg but don't have one. Look at the value to
9111 be loaded more closely. A clobber is only needed if it is a symbol
9112 or contains a byte that is neither 0, -1 or a power of 2. */
9ce2d202 9113
9114 if (NULL_RTX == clobber_reg
2f2d376f 9115 && !test_hard_reg_class (LD_REGS, dest)
a49907f9 9116 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9117 || !avr_popcount_each_byte (src, n_bytes,
9118 (1 << 0) | (1 << 1) | (1 << 8))))
e511e253 9119 {
2f2d376f 9120 /* We have no clobber register but need one. Cook one up.
9121 That's cheaper than loading from constant pool. */
9122
9123 cooked_clobber_p = true;
4202ef11 9124 clobber_reg = all_regs_rtx[REG_Z + 1];
2f2d376f 9125 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
e511e253 9126 }
9ce2d202 9127
9128 /* Now start filling DEST from LSB to MSB. */
9129
a49907f9 9130 for (n = 0; n < n_bytes; n++)
e511e253 9131 {
a49907f9 9132 int ldreg_p;
9ce2d202 9133 bool done_byte = false;
4202ef11 9134 int j;
9ce2d202 9135 rtx xop[3];
9136
a49907f9 9137 /* Crop the n-th destination byte. */
9138
9ce2d202 9139 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
a49907f9 9140 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9141
9142 if (!CONST_INT_P (src)
9143 && !CONST_DOUBLE_P (src))
9144 {
9145 static const char* const asm_code[][2] =
9146 {
9147 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9148 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9149 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9150 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9151 };
9152
9153 xop[0] = xdest[n];
9154 xop[1] = src;
9155 xop[2] = clobber_reg;
9156
ed2541ea 9157 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9158
a49907f9 9159 continue;
9160 }
9161
9162 /* Crop the n-th source byte. */
9163
9164 xval = simplify_gen_subreg (QImode, src, mode, n);
9ce2d202 9165 ival[n] = INTVAL (xval);
9166
9167 /* Look if we can reuse the low word by means of MOVW. */
9168
9169 if (n == 2
a49907f9 9170 && n_bytes >= 4
9ce2d202 9171 && AVR_HAVE_MOVW)
9172 {
9173 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9174 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9175
9176 if (INTVAL (lo16) == INTVAL (hi16))
9177 {
28913f6b 9178 if (0 != INTVAL (lo16)
9179 || !clear_p)
9180 {
9181 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9182 }
9183
9ce2d202 9184 break;
9185 }
9186 }
9187
33817c7e 9188 /* Don't use CLR so that cc0 is set as expected. */
9ce2d202 9189
9190 if (ival[n] == 0)
9191 {
28913f6b 9192 if (!clear_p)
f4806884 9193 avr_asm_len (ldreg_p ? "ldi %0,0"
9194 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9195 : "mov %0,__zero_reg__",
33817c7e 9196 &xdest[n], len, 1);
9ce2d202 9197 continue;
9198 }
9199
9200 if (clobber_val == ival[n]
9201 && REGNO (clobber_reg) == REGNO (xdest[n]))
9202 {
9203 continue;
9204 }
9205
9206 /* LD_REGS can use LDI to move a constant value */
9207
a49907f9 9208 if (ldreg_p)
9ce2d202 9209 {
9210 xop[0] = xdest[n];
9211 xop[1] = xval;
9212 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9213 continue;
9214 }
9215
9216 /* Try to reuse value already loaded in some lower byte. */
9217
9218 for (j = 0; j < n; j++)
9219 if (ival[j] == ival[n])
9220 {
9221 xop[0] = xdest[n];
9222 xop[1] = xdest[j];
9223
9224 avr_asm_len ("mov %0,%1", xop, len, 1);
9225 done_byte = true;
9226 break;
9227 }
9228
9229 if (done_byte)
9230 continue;
9231
9232 /* Need no clobber reg for -1: Use CLR/DEC */
9233
9234 if (-1 == ival[n])
9235 {
28913f6b 9236 if (!clear_p)
9237 avr_asm_len ("clr %0", &xdest[n], len, 1);
9238
9239 avr_asm_len ("dec %0", &xdest[n], len, 1);
9240 continue;
9241 }
9242 else if (1 == ival[n])
9243 {
9244 if (!clear_p)
9245 avr_asm_len ("clr %0", &xdest[n], len, 1);
9246
9247 avr_asm_len ("inc %0", &xdest[n], len, 1);
9ce2d202 9248 continue;
9249 }
9250
9251 /* Use T flag or INC to manage powers of 2 if we have
9252 no clobber reg. */
9253
9254 if (NULL_RTX == clobber_reg
9255 && single_one_operand (xval, QImode))
9256 {
9ce2d202 9257 xop[0] = xdest[n];
9258 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9259
9260 gcc_assert (constm1_rtx != xop[1]);
9261
9262 if (!set_p)
9263 {
9264 set_p = true;
9265 avr_asm_len ("set", xop, len, 1);
9266 }
9267
28913f6b 9268 if (!clear_p)
9269 avr_asm_len ("clr %0", xop, len, 1);
9270
9271 avr_asm_len ("bld %0,%1", xop, len, 1);
9ce2d202 9272 continue;
9273 }
9274
9275 /* We actually need the LD_REGS clobber reg. */
9276
9277 gcc_assert (NULL_RTX != clobber_reg);
9278
9279 xop[0] = xdest[n];
9280 xop[1] = xval;
9281 xop[2] = clobber_reg;
9282 clobber_val = ival[n];
9283
9284 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9285 "mov %0,%2", xop, len, 2);
e511e253 9286 }
9ce2d202 9287
9288 /* If we cooked up a clobber reg above, restore it. */
9289
9290 if (cooked_clobber_p)
e511e253 9291 {
9ce2d202 9292 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
e511e253 9293 }
28913f6b 9294}
9295
9296
2f2d376f 9297/* Reload the constant OP[1] into the HI register OP[0].
9298 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9299 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9300 need a clobber reg or have to cook one up.
9301
9302 PLEN == NULL: Output instructions.
9303 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9304 by the insns printed.
9305
9306 Return "". */
9307
9308const char*
9309output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9310{
a49907f9 9311 output_reload_in_const (op, clobber_reg, plen, false);
2f2d376f 9312 return "";
9313}
9314
9315
28913f6b 9316/* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9317 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9318 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9319 need a clobber reg or have to cook one up.
9320
9321 LEN == NULL: Output instructions.
9322
b0e2b973 9323 LEN != NULL: Output nothing. Set *LEN to number of words occupied
28913f6b 9324 by the insns printed.
9325
9326 Return "". */
9327
9328const char *
5bca95a8 9329output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
28913f6b 9330{
28913f6b 9331 if (AVR_HAVE_MOVW
644ac9c5 9332 && !test_hard_reg_class (LD_REGS, op[0])
9333 && (CONST_INT_P (op[1])
9334 || CONST_DOUBLE_P (op[1])))
28913f6b 9335 {
9336 int len_clr, len_noclr;
9337
9338 /* In some cases it is better to clear the destination beforehand, e.g.
9339
9340 CLR R2 CLR R3 MOVW R4,R2 INC R2
9341
9342 is shorther than
9343
9344 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9345
9346 We find it too tedious to work that out in the print function.
9347 Instead, we call the print function twice to get the lengths of
9348 both methods and use the shortest one. */
9349
2f2d376f 9350 output_reload_in_const (op, clobber_reg, &len_clr, true);
9351 output_reload_in_const (op, clobber_reg, &len_noclr, false);
28913f6b 9352
9353 if (len_noclr - len_clr == 4)
9354 {
9355 /* Default needs 4 CLR instructions: clear register beforehand. */
9356
f4806884 9357 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9358 "mov %B0,__zero_reg__" CR_TAB
28913f6b 9359 "movw %C0,%A0", &op[0], len, 3);
9360
2f2d376f 9361 output_reload_in_const (op, clobber_reg, len, true);
28913f6b 9362
9363 if (len)
9364 *len += 3;
9365
9366 return "";
9367 }
9368 }
9369
9370 /* Default: destination not pre-cleared. */
9371
2f2d376f 9372 output_reload_in_const (op, clobber_reg, len, false);
e511e253 9373 return "";
9374}
b681d971 9375
02d9a2c3 9376const char *
9377avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9378{
02d9a2c3 9379 output_reload_in_const (op, clobber_reg, len, false);
9380 return "";
9381}
9382
b681d971 9383
91b18013 9384void
206a5129 9385avr_output_addr_vec_elt (FILE *stream, int value)
91b18013 9386{
90ef7269 9387 if (AVR_HAVE_JMP_CALL)
9388 fprintf (stream, "\t.word gs(.L%d)\n", value);
91b18013 9389 else
9390 fprintf (stream, "\trjmp .L%d\n", value);
91b18013 9391}
9392
5431d4c2 9393/* Returns true if SCRATCH are safe to be allocated as a scratch
51fe7379 9394 registers (for a define_peephole2) in the current function. */
9395
a45076aa 9396static bool
5431d4c2 9397avr_hard_regno_scratch_ok (unsigned int regno)
51fe7379 9398{
5431d4c2 9399 /* Interrupt functions can only use registers that have already been saved
9400 by the prologue, even if they would normally be call-clobbered. */
51fe7379 9401
5431d4c2 9402 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9403 && !df_regs_ever_live_p (regno))
9404 return false;
9405
3d4d979d 9406 /* Don't allow hard registers that might be part of the frame pointer.
9407 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9408 and don't care for a frame pointer that spans more than one register. */
9409
9410 if ((!reload_completed || frame_pointer_needed)
9411 && (regno == REG_Y || regno == REG_Y + 1))
9412 {
9413 return false;
9414 }
9415
5431d4c2 9416 return true;
51fe7379 9417}
4af90ac0 9418
afe7695c 9419/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9420
9421int
3d4d979d 9422avr_hard_regno_rename_ok (unsigned int old_reg,
afe7695c 9423 unsigned int new_reg)
9424{
9425 /* Interrupt functions can only use registers that have already been
9426 saved by the prologue, even if they would normally be
9427 call-clobbered. */
9428
9429 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9430 && !df_regs_ever_live_p (new_reg))
9431 return 0;
9432
3d4d979d 9433 /* Don't allow hard registers that might be part of the frame pointer.
9434 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9435 and don't care for a frame pointer that spans more than one register. */
9436
9437 if ((!reload_completed || frame_pointer_needed)
9438 && (old_reg == REG_Y || old_reg == REG_Y + 1
9439 || new_reg == REG_Y || new_reg == REG_Y + 1))
9440 {
9441 return 0;
9442 }
9443
afe7695c 9444 return 1;
9445}
9446
dd7bbc23 9447/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
4af90ac0 9448 or memory location in the I/O space (QImode only).
9449
9450 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9451 Operand 1: register operand to test, or CONST_INT memory address.
dd7bbc23 9452 Operand 2: bit number.
4af90ac0 9453 Operand 3: label to jump to if the test is true. */
9454
9455const char *
206a5129 9456avr_out_sbxx_branch (rtx insn, rtx operands[])
4af90ac0 9457{
9458 enum rtx_code comp = GET_CODE (operands[0]);
5bd39e93 9459 bool long_jump = get_attr_length (insn) >= 4;
9460 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
4af90ac0 9461
9462 if (comp == GE)
9463 comp = EQ;
9464 else if (comp == LT)
9465 comp = NE;
9466
9467 if (reverse)
9468 comp = reverse_condition (comp);
9469
5bd39e93 9470 switch (GET_CODE (operands[1]))
4af90ac0 9471 {
5bd39e93 9472 default:
9473 gcc_unreachable();
9474
9475 case CONST_INT:
9476
9477 if (low_io_address_operand (operands[1], QImode))
9478 {
9479 if (comp == EQ)
644ac9c5 9480 output_asm_insn ("sbis %i1,%2", operands);
5bd39e93 9481 else
644ac9c5 9482 output_asm_insn ("sbic %i1,%2", operands);
5bd39e93 9483 }
4af90ac0 9484 else
5bd39e93 9485 {
644ac9c5 9486 output_asm_insn ("in __tmp_reg__,%i1", operands);
5bd39e93 9487 if (comp == EQ)
9488 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9489 else
9490 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9491 }
9492
9493 break; /* CONST_INT */
9494
9495 case REG:
9496
16d17f31 9497 if (comp == EQ)
9498 output_asm_insn ("sbrs %T1%T2", operands);
9499 else
9500 output_asm_insn ("sbrc %T1%T2", operands);
5bd39e93 9501
9502 break; /* REG */
9503 } /* switch */
4af90ac0 9504
9505 if (long_jump)
5bd39e93 9506 return ("rjmp .+4" CR_TAB
9507 "jmp %x3");
9508
4af90ac0 9509 if (!reverse)
5bd39e93 9510 return "rjmp %x3";
9511
4af90ac0 9512 return "";
9513}
9aa7484c 9514
6644435d 9515/* Worker function for TARGET_ASM_CONSTRUCTOR. */
9516
9aa7484c 9517static void
206a5129 9518avr_asm_out_ctor (rtx symbol, int priority)
9aa7484c 9519{
9520 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9521 default_ctor_section_asm_out_constructor (symbol, priority);
9522}
9523
6644435d 9524/* Worker function for TARGET_ASM_DESTRUCTOR. */
9525
9aa7484c 9526static void
206a5129 9527avr_asm_out_dtor (rtx symbol, int priority)
9aa7484c 9528{
9529 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9530 default_dtor_section_asm_out_destructor (symbol, priority);
9531}
9532
6644435d 9533/* Worker function for TARGET_RETURN_IN_MEMORY. */
9534
cfd55026 9535static bool
fb80456a 9536avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
cfd55026 9537{
39cc9599 9538 if (TYPE_MODE (type) == BLKmode)
9539 {
9540 HOST_WIDE_INT size = int_size_in_bytes (type);
9541 return (size == -1 || size > 8);
9542 }
9543 else
9544 return false;
cfd55026 9545}
9546
b4a3be2d 9547/* Worker function for CASE_VALUES_THRESHOLD. */
9548
a45076aa 9549static unsigned int
9550avr_case_values_threshold (void)
b4a3be2d 9551{
9552 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9553}
9554
4202ef11 9555
9556/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9557
9558static enum machine_mode
5bd39e93 9559avr_addr_space_address_mode (addr_space_t as)
4202ef11 9560{
9d734fa8 9561 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
4202ef11 9562}
9563
9564
9565/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9566
9567static enum machine_mode
5bd39e93 9568avr_addr_space_pointer_mode (addr_space_t as)
4202ef11 9569{
9d734fa8 9570 return avr_addr_space_address_mode (as);
4202ef11 9571}
9572
9573
9574/* Helper for following function. */
9575
9576static bool
9577avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9578{
9579 gcc_assert (REG_P (reg));
9580
9581 if (strict)
9582 {
9583 return REGNO (reg) == REG_Z;
9584 }
9585
9586 /* Avoid combine to propagate hard regs. */
9587
9588 if (can_create_pseudo_p()
9589 && REGNO (reg) < REG_Z)
9590 {
9591 return false;
9592 }
9593
9594 return true;
9595}
9596
9597
9598/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9599
9600static bool
9601avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9602 bool strict, addr_space_t as)
9603{
9604 bool ok = false;
9605
9606 switch (as)
9607 {
9608 default:
9609 gcc_unreachable();
9610
9611 case ADDR_SPACE_GENERIC:
9612 return avr_legitimate_address_p (mode, x, strict);
9613
590da9f2 9614 case ADDR_SPACE_FLASH:
9615 case ADDR_SPACE_FLASH1:
9616 case ADDR_SPACE_FLASH2:
9617 case ADDR_SPACE_FLASH3:
9618 case ADDR_SPACE_FLASH4:
9619 case ADDR_SPACE_FLASH5:
4202ef11 9620
9621 switch (GET_CODE (x))
9622 {
9623 case REG:
9624 ok = avr_reg_ok_for_pgm_addr (x, strict);
9625 break;
9626
9627 case POST_INC:
5bd39e93 9628 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
4202ef11 9629 break;
9630
9631 default:
9632 break;
9633 }
9634
590da9f2 9635 break; /* FLASH */
5bd39e93 9636
590da9f2 9637 case ADDR_SPACE_MEMX:
5bd39e93 9638 if (REG_P (x))
9639 ok = (!strict
9640 && can_create_pseudo_p());
9641
9642 if (LO_SUM == GET_CODE (x))
9643 {
9644 rtx hi = XEXP (x, 0);
9645 rtx lo = XEXP (x, 1);
9646
9647 ok = (REG_P (hi)
9648 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9649 && REG_P (lo)
9650 && REGNO (lo) == REG_Z);
9651 }
9652
590da9f2 9653 break; /* MEMX */
4202ef11 9654 }
9655
9656 if (avr_log.legitimate_address_p)
9657 {
9658 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9659 "reload_completed=%d reload_in_progress=%d %s:",
9660 ok, mode, strict, reload_completed, reload_in_progress,
9661 reg_renumber ? "(reg_renumber)" : "");
9662
9663 if (GET_CODE (x) == PLUS
9664 && REG_P (XEXP (x, 0))
9665 && CONST_INT_P (XEXP (x, 1))
9666 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9667 && reg_renumber)
9668 {
9669 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9670 true_regnum (XEXP (x, 0)));
9671 }
9672
9673 avr_edump ("\n%r\n", x);
9674 }
9675
9676 return ok;
9677}
9678
9679
9680/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9681
9682static rtx
9683avr_addr_space_legitimize_address (rtx x, rtx old_x,
9684 enum machine_mode mode, addr_space_t as)
9685{
9686 if (ADDR_SPACE_GENERIC_P (as))
9687 return avr_legitimize_address (x, old_x, mode);
9688
9689 if (avr_log.legitimize_address)
9690 {
9691 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9692 }
9693
9694 return old_x;
9695}
9696
9697
9698/* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9699
9700static rtx
9701avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9702{
5bd39e93 9703 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9704 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9705
4202ef11 9706 if (avr_log.progmem)
9707 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9708 src, type_from, type_to);
9709
ed2541ea 9710 /* Up-casting from 16-bit to 24-bit pointer. */
9711
590da9f2 9712 if (as_from != ADDR_SPACE_MEMX
9713 && as_to == ADDR_SPACE_MEMX)
5bd39e93 9714 {
ed2541ea 9715 int msb;
9716 rtx sym = src;
9717 rtx reg = gen_reg_rtx (PSImode);
9718
9719 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9720 sym = XEXP (sym, 0);
9721
9722 /* Look at symbol flags: avr_encode_section_info set the flags
9723 also if attribute progmem was seen so that we get the right
9724 promotion for, e.g. PSTR-like strings that reside in generic space
9725 but are located in flash. In that case we patch the incoming
9726 address space. */
5bd39e93 9727
ed2541ea 9728 if (SYMBOL_REF == GET_CODE (sym)
590da9f2 9729 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
5bd39e93 9730 {
590da9f2 9731 as_from = ADDR_SPACE_FLASH;
5bd39e93 9732 }
9733
ed2541ea 9734 /* Linearize memory: RAM has bit 23 set. */
9735
9736 msb = ADDR_SPACE_GENERIC_P (as_from)
9737 ? 0x80
e508bf98 9738 : avr_addrspace[as_from].segment;
ed2541ea 9739
5bd39e93 9740 src = force_reg (Pmode, src);
9741
ed2541ea 9742 emit_insn (msb == 0
9743 ? gen_zero_extendhipsi2 (reg, src)
9744 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9745
9746 return reg;
9747 }
5bd39e93 9748
ed2541ea 9749 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
5bd39e93 9750
590da9f2 9751 if (as_from == ADDR_SPACE_MEMX
9752 && as_to != ADDR_SPACE_MEMX)
ed2541ea 9753 {
9754 rtx new_src = gen_reg_rtx (Pmode);
9755
9756 src = force_reg (PSImode, src);
9757
9758 emit_move_insn (new_src,
9759 simplify_gen_subreg (Pmode, src, PSImode, 0));
9760 return new_src;
5bd39e93 9761 }
9762
4202ef11 9763 return src;
9764}
9765
9766
9767/* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9768
9769static bool
ed2541ea 9770avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9771 addr_space_t superset ATTRIBUTE_UNUSED)
4202ef11 9772{
ed2541ea 9773 /* Allow any kind of pointer mess. */
5bd39e93 9774
9775 return true;
9776}
9777
9778
ed2541ea 9779/* Worker function for movmemhi expander.
5bd39e93 9780 XOP[0] Destination as MEM:BLK
9781 XOP[1] Source " "
9782 XOP[2] # Bytes to copy
9783
9784 Return TRUE if the expansion is accomplished.
9785 Return FALSE if the operand compination is not supported. */
9786
9787bool
9788avr_emit_movmemhi (rtx *xop)
9789{
9790 HOST_WIDE_INT count;
9791 enum machine_mode loop_mode;
9792 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
f1222c71 9793 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
5bd39e93 9794 rtx a_hi8 = NULL_RTX;
9795
590da9f2 9796 if (avr_mem_flash_p (xop[0]))
5bd39e93 9797 return false;
9798
9799 if (!CONST_INT_P (xop[2]))
9800 return false;
9801
9802 count = INTVAL (xop[2]);
9803 if (count <= 0)
9804 return false;
9805
9806 a_src = XEXP (xop[1], 0);
9807 a_dest = XEXP (xop[0], 0);
9808
5bd39e93 9809 if (PSImode == GET_MODE (a_src))
9810 {
590da9f2 9811 gcc_assert (as == ADDR_SPACE_MEMX);
ed2541ea 9812
9813 loop_mode = (count < 0x100) ? QImode : HImode;
9814 loop_reg = gen_rtx_REG (loop_mode, 24);
9815 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9816
5bd39e93 9817 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9818 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9819 }
9820 else
9821 {
e508bf98 9822 int segment = avr_addrspace[as].segment;
5bd39e93 9823
ed2541ea 9824 if (segment
e508bf98 9825 && avr_current_device->n_flash > 1)
ed2541ea 9826 {
9827 a_hi8 = GEN_INT (segment);
9828 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9829 }
9830 else if (!ADDR_SPACE_GENERIC_P (as))
9831 {
590da9f2 9832 as = ADDR_SPACE_FLASH;
ed2541ea 9833 }
9834
5bd39e93 9835 addr1 = a_src;
9836
ed2541ea 9837 loop_mode = (count <= 0x100) ? QImode : HImode;
9838 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
5bd39e93 9839 }
9840
9841 xas = GEN_INT (as);
9842
5bd39e93 9843 /* FIXME: Register allocator might come up with spill fails if it is left
ed2541ea 9844 on its own. Thus, we allocate the pointer registers by hand:
9845 Z = source address
9846 X = destination address */
5bd39e93 9847
9848 emit_move_insn (lpm_addr_reg_rtx, addr1);
f1222c71 9849 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
5bd39e93 9850
9851 /* FIXME: Register allocator does a bad job and might spill address
9852 register(s) inside the loop leading to additional move instruction
9853 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9854 load and store as seperate insns. Instead, we perform the copy
9855 by means of one monolithic insn. */
9856
ed2541ea 9857 gcc_assert (TMP_REGNO == LPM_REGNO);
9858
590da9f2 9859 if (as != ADDR_SPACE_MEMX)
5bd39e93 9860 {
ed2541ea 9861 /* Load instruction ([E]LPM or LD) is known at compile time:
9862 Do the copy-loop inline. */
9863
f1222c71 9864 rtx (*fun) (rtx, rtx, rtx)
5bd39e93 9865 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9866
f1222c71 9867 insn = fun (xas, loop_reg, loop_reg);
5bd39e93 9868 }
5bd39e93 9869 else
9870 {
f1222c71 9871 rtx (*fun) (rtx, rtx)
ed2541ea 9872 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9873
f1222c71 9874 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
5bd39e93 9875
f1222c71 9876 insn = fun (xas, GEN_INT (avr_addr.rampz));
5bd39e93 9877 }
9878
9879 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9880 emit_insn (insn);
9881
4202ef11 9882 return true;
9883}
9884
9885
5bd39e93 9886/* Print assembler for movmem_qi, movmem_hi insns...
f1222c71 9887 $0 : Address Space
9888 $1, $2 : Loop register
9889 Z : Source address
9890 X : Destination address
5bd39e93 9891*/
9892
9893const char*
f1222c71 9894avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 9895{
f1222c71 9896 addr_space_t as = (addr_space_t) INTVAL (op[0]);
9897 enum machine_mode loop_mode = GET_MODE (op[1]);
9898 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9899 rtx xop[3];
5bd39e93 9900
9901 if (plen)
9902 *plen = 0;
9903
f1222c71 9904 xop[0] = op[0];
9905 xop[1] = op[1];
9906 xop[2] = tmp_reg_rtx;
9907
5bd39e93 9908 /* Loop label */
9909
9910 avr_asm_len ("0:", xop, plen, 0);
9911
9912 /* Load with post-increment */
9913
9914 switch (as)
9915 {
9916 default:
9917 gcc_unreachable();
9918
9919 case ADDR_SPACE_GENERIC:
9920
f1222c71 9921 avr_asm_len ("ld %2,Z+", xop, plen, 1);
5bd39e93 9922 break;
9923
590da9f2 9924 case ADDR_SPACE_FLASH:
5bd39e93 9925
9926 if (AVR_HAVE_LPMX)
f1222c71 9927 avr_asm_len ("lpm %2,%Z+", xop, plen, 1);
5bd39e93 9928 else
9929 avr_asm_len ("lpm" CR_TAB
f1222c71 9930 "adiw r30,1", xop, plen, 2);
5bd39e93 9931 break;
9932
590da9f2 9933 case ADDR_SPACE_FLASH1:
9934 case ADDR_SPACE_FLASH2:
9935 case ADDR_SPACE_FLASH3:
9936 case ADDR_SPACE_FLASH4:
9937 case ADDR_SPACE_FLASH5:
5bd39e93 9938
9939 if (AVR_HAVE_ELPMX)
f1222c71 9940 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
5bd39e93 9941 else
9942 avr_asm_len ("elpm" CR_TAB
f1222c71 9943 "adiw r30,1", xop, plen, 2);
5bd39e93 9944 break;
9945 }
9946
9947 /* Store with post-increment */
9948
f1222c71 9949 avr_asm_len ("st X+,%2", xop, plen, 1);
5bd39e93 9950
9951 /* Decrement loop-counter and set Z-flag */
9952
9953 if (QImode == loop_mode)
9954 {
f1222c71 9955 avr_asm_len ("dec %1", xop, plen, 1);
5bd39e93 9956 }
9957 else if (sbiw_p)
9958 {
f1222c71 9959 avr_asm_len ("sbiw %1,1", xop, plen, 1);
5bd39e93 9960 }
9961 else
9962 {
f1222c71 9963 avr_asm_len ("subi %A1,1" CR_TAB
9964 "sbci %B1,0", xop, plen, 2);
5bd39e93 9965 }
9966
9967 /* Loop until zero */
9968
9969 return avr_asm_len ("brne 0b", xop, plen, 1);
9970}
9971
9972
9973\f
c5be380e 9974/* Helper for __builtin_avr_delay_cycles */
9975
9976static void
9977avr_expand_delay_cycles (rtx operands0)
9978{
9979 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9980 unsigned HOST_WIDE_INT cycles_used;
9981 unsigned HOST_WIDE_INT loop_count;
9982
9983 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9984 {
9985 loop_count = ((cycles - 9) / 6) + 1;
9986 cycles_used = ((loop_count - 1) * 6) + 9;
9987 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9988 cycles -= cycles_used;
9989 }
9990
9991 if (IN_RANGE (cycles, 262145, 83886081))
9992 {
9993 loop_count = ((cycles - 7) / 5) + 1;
9994 if (loop_count > 0xFFFFFF)
9995 loop_count = 0xFFFFFF;
9996 cycles_used = ((loop_count - 1) * 5) + 7;
9997 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9998 cycles -= cycles_used;
9999 }
10000
10001 if (IN_RANGE (cycles, 768, 262144))
10002 {
10003 loop_count = ((cycles - 5) / 4) + 1;
10004 if (loop_count > 0xFFFF)
10005 loop_count = 0xFFFF;
10006 cycles_used = ((loop_count - 1) * 4) + 5;
10007 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
10008 cycles -= cycles_used;
10009 }
10010
10011 if (IN_RANGE (cycles, 6, 767))
10012 {
10013 loop_count = cycles / 3;
10014 if (loop_count > 255)
10015 loop_count = 255;
10016 cycles_used = loop_count * 3;
10017 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
10018 cycles -= cycles_used;
10019 }
10020
10021 while (cycles >= 2)
10022 {
10023 emit_insn (gen_nopv (GEN_INT(2)));
10024 cycles -= 2;
10025 }
10026
10027 if (cycles == 1)
10028 {
10029 emit_insn (gen_nopv (GEN_INT(1)));
10030 cycles--;
10031 }
10032}
10033
384f6361 10034
10035/* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10036
10037static double_int
10038avr_double_int_push_digit (double_int val, int base,
10039 unsigned HOST_WIDE_INT digit)
10040{
10041 val = 0 == base
10042 ? double_int_lshift (val, 32, 64, false)
10043 : double_int_mul (val, uhwi_to_double_int (base));
10044
10045 return double_int_add (val, uhwi_to_double_int (digit));
10046}
10047
10048
10049/* Compute the image of x under f, i.e. perform x --> f(x) */
10050
10051static int
10052avr_map (double_int f, int x)
10053{
10054 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10055}
10056
10057
15b84087 10058/* Return some metrics of map A. */
384f6361 10059
15b84087 10060enum
10061 {
10062 /* Number of fixed points in { 0 ... 7 } */
10063 MAP_FIXED_0_7,
384f6361 10064
15b84087 10065 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10066 MAP_NONFIXED_0_7,
10067
10068 /* Mask representing the fixed points in { 0 ... 7 } */
10069 MAP_MASK_FIXED_0_7,
10070
10071 /* Size of the preimage of { 0 ... 7 } */
10072 MAP_PREIMAGE_0_7,
10073
10074 /* Mask that represents the preimage of { f } */
10075 MAP_MASK_PREIMAGE_F
10076 };
10077
10078static unsigned
10079avr_map_metric (double_int a, int mode)
384f6361 10080{
15b84087 10081 unsigned i, metric = 0;
384f6361 10082
15b84087 10083 for (i = 0; i < 8; i++)
10084 {
10085 unsigned ai = avr_map (a, i);
384f6361 10086
15b84087 10087 if (mode == MAP_FIXED_0_7)
10088 metric += ai == i;
10089 else if (mode == MAP_NONFIXED_0_7)
10090 metric += ai < 8 && ai != i;
10091 else if (mode == MAP_MASK_FIXED_0_7)
10092 metric |= ((unsigned) (ai == i)) << i;
10093 else if (mode == MAP_PREIMAGE_0_7)
10094 metric += ai < 8;
10095 else if (mode == MAP_MASK_PREIMAGE_F)
10096 metric |= ((unsigned) (ai == 0xf)) << i;
10097 else
10098 gcc_unreachable();
10099 }
10100
10101 return metric;
384f6361 10102}
10103
10104
15b84087 10105/* Return true if IVAL has a 0xf in its hexadecimal representation
10106 and false, otherwise. Only nibbles 0..7 are taken into account.
10107 Used as constraint helper for C0f and Cxf. */
384f6361 10108
15b84087 10109bool
10110avr_has_nibble_0xf (rtx ival)
10111{
10112 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10113}
384f6361 10114
384f6361 10115
15b84087 10116/* We have a set of bits that are mapped by a function F.
10117 Try to decompose F by means of a second function G so that
384f6361 10118
15b84087 10119 F = F o G^-1 o G
384f6361 10120
15b84087 10121 and
384f6361 10122
15b84087 10123 cost (F o G^-1) + cost (G) < cost (F)
384f6361 10124
15b84087 10125 Example: Suppose builtin insert_bits supplies us with the map
10126 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10127 nibble of the result, we can just as well rotate the bits before inserting
10128 them and use the map 0x7654ffff which is cheaper than the original map.
10129 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10130
10131typedef struct
10132{
10133 /* tree code of binary function G */
10134 enum tree_code code;
384f6361 10135
15b84087 10136 /* The constant second argument of G */
10137 int arg;
384f6361 10138
15b84087 10139 /* G^-1, the inverse of G (*, arg) */
10140 unsigned ginv;
384f6361 10141
15b84087 10142 /* The cost of appplying G (*, arg) */
10143 int cost;
384f6361 10144
15b84087 10145 /* The composition F o G^-1 (*, arg) for some function F */
10146 double_int map;
384f6361 10147
15b84087 10148 /* For debug purpose only */
10149 const char *str;
10150} avr_map_op_t;
384f6361 10151
15b84087 10152static const avr_map_op_t avr_map_op[] =
384f6361 10153 {
15b84087 10154 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10155 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10156 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10157 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10158 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10159 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10160 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10161 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10162 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10163 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10164 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10165 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10166 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10167 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10168 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
384f6361 10169 };
10170
10171
15b84087 10172/* Try to decompose F as F = (F o G^-1) o G as described above.
10173 The result is a struct representing F o G^-1 and G.
10174 If result.cost < 0 then such a decomposition does not exist. */
10175
10176static avr_map_op_t
10177avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
384f6361 10178{
15b84087 10179 int i;
10180 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10181 avr_map_op_t f_ginv = *g;
10182 double_int ginv = uhwi_to_double_int (g->ginv);
384f6361 10183
15b84087 10184 f_ginv.cost = -1;
384f6361 10185
15b84087 10186 /* Step 1: Computing F o G^-1 */
384f6361 10187
15b84087 10188 for (i = 7; i >= 0; i--)
10189 {
10190 int x = avr_map (f, i);
10191
10192 if (x <= 7)
10193 {
10194 x = avr_map (ginv, x);
384f6361 10195
15b84087 10196 /* The bit is no element of the image of G: no avail (cost = -1) */
10197
10198 if (x > 7)
10199 return f_ginv;
10200 }
10201
10202 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10203 }
384f6361 10204
15b84087 10205 /* Step 2: Compute the cost of the operations.
10206 The overall cost of doing an operation prior to the insertion is
10207 the cost of the insertion plus the cost of the operation. */
384f6361 10208
15b84087 10209 /* Step 2a: Compute cost of F o G^-1 */
384f6361 10210
15b84087 10211 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10212 {
10213 /* The mapping consists only of fixed points and can be folded
10214 to AND/OR logic in the remainder. Reasonable cost is 3. */
384f6361 10215
15b84087 10216 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10217 }
10218 else
10219 {
10220 rtx xop[4];
384f6361 10221
15b84087 10222 /* Get the cost of the insn by calling the output worker with some
10223 fake values. Mimic effect of reloading xop[3]: Unused operands
10224 are mapped to 0 and used operands are reloaded to xop[0]. */
384f6361 10225
15b84087 10226 xop[0] = all_regs_rtx[24];
10227 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10228 xop[2] = all_regs_rtx[25];
10229 xop[3] = val_used_p ? xop[0] : const0_rtx;
384f6361 10230
15b84087 10231 avr_out_insert_bits (xop, &f_ginv.cost);
10232
10233 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10234 }
10235
10236 /* Step 2b: Add cost of G */
384f6361 10237
15b84087 10238 f_ginv.cost += g->cost;
384f6361 10239
15b84087 10240 if (avr_log.builtin)
10241 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10242
10243 return f_ginv;
384f6361 10244}
10245
10246
15b84087 10247/* Insert bits from XOP[1] into XOP[0] according to MAP.
10248 XOP[0] and XOP[1] don't overlap.
10249 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10250 If FIXP_P = false: Just move the bit if its position in the destination
10251 is different to its source position. */
384f6361 10252
10253static void
15b84087 10254avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
384f6361 10255{
15b84087 10256 int bit_dest, b;
384f6361 10257
10258 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10259 int t_bit_src = -1;
10260
384f6361 10261 /* We order the operations according to the requested source bit b. */
10262
15b84087 10263 for (b = 0; b < 8; b++)
10264 for (bit_dest = 0; bit_dest < 8; bit_dest++)
384f6361 10265 {
10266 int bit_src = avr_map (map, bit_dest);
10267
10268 if (b != bit_src
15b84087 10269 || bit_src >= 8
10270 /* Same position: No need to copy as requested by FIXP_P. */
10271 || (bit_dest == bit_src && !fixp_p))
384f6361 10272 continue;
10273
10274 if (t_bit_src != bit_src)
10275 {
10276 /* Source bit is not yet in T: Store it to T. */
10277
10278 t_bit_src = bit_src;
10279
15b84087 10280 xop[3] = GEN_INT (bit_src);
10281 avr_asm_len ("bst %T1%T3", xop, plen, 1);
384f6361 10282 }
10283
10284 /* Load destination bit with T. */
10285
15b84087 10286 xop[3] = GEN_INT (bit_dest);
10287 avr_asm_len ("bld %T0%T3", xop, plen, 1);
384f6361 10288 }
10289}
10290
10291
15b84087 10292/* PLEN == 0: Print assembler code for `insert_bits'.
10293 PLEN != 0: Compute code length in bytes.
10294
10295 OP[0]: Result
10296 OP[1]: The mapping composed of nibbles. If nibble no. N is
10297 0: Bit N of result is copied from bit OP[2].0
10298 ... ...
10299 7: Bit N of result is copied from bit OP[2].7
10300 0xf: Bit N of result is copied from bit OP[3].N
10301 OP[2]: Bits to be inserted
10302 OP[3]: Target value */
384f6361 10303
10304const char*
15b84087 10305avr_out_insert_bits (rtx *op, int *plen)
384f6361 10306{
15b84087 10307 double_int map = rtx_to_double_int (op[1]);
10308 unsigned mask_fixed;
10309 bool fixp_p = true;
10310 rtx xop[4];
384f6361 10311
15b84087 10312 xop[0] = op[0];
10313 xop[1] = op[2];
10314 xop[2] = op[3];
384f6361 10315
15b84087 10316 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10317
384f6361 10318 if (plen)
10319 *plen = 0;
10320 else if (flag_print_asm_name)
15b84087 10321 fprintf (asm_out_file,
10322 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10323 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
384f6361 10324
15b84087 10325 /* If MAP has fixed points it might be better to initialize the result
10326 with the bits to be inserted instead of moving all bits by hand. */
384f6361 10327
15b84087 10328 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
384f6361 10329
15b84087 10330 if (REGNO (xop[0]) == REGNO (xop[1]))
10331 {
10332 /* Avoid early-clobber conflicts */
384f6361 10333
15b84087 10334 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10335 xop[1] = tmp_reg_rtx;
10336 fixp_p = false;
384f6361 10337 }
10338
15b84087 10339 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
384f6361 10340 {
15b84087 10341 /* XOP[2] is used and reloaded to XOP[0] already */
10342
10343 int n_fix = 0, n_nofix = 0;
10344
10345 gcc_assert (REG_P (xop[2]));
10346
10347 /* Get the code size of the bit insertions; once with all bits
10348 moved and once with fixed points omitted. */
10349
10350 avr_move_bits (xop, map, true, &n_fix);
10351 avr_move_bits (xop, map, false, &n_nofix);
10352
10353 if (fixp_p && n_fix - n_nofix > 3)
384f6361 10354 {
15b84087 10355 xop[3] = gen_int_mode (~mask_fixed, QImode);
10356
10357 avr_asm_len ("eor %0,%1" CR_TAB
10358 "andi %0,%3" CR_TAB
10359 "eor %0,%1", xop, plen, 3);
10360 fixp_p = false;
384f6361 10361 }
10362 }
384f6361 10363 else
10364 {
15b84087 10365 /* XOP[2] is unused */
10366
10367 if (fixp_p && mask_fixed)
10368 {
10369 avr_asm_len ("mov %0,%1", xop, plen, 1);
10370 fixp_p = false;
10371 }
384f6361 10372 }
15b84087 10373
10374 /* Move/insert remaining bits. */
384f6361 10375
15b84087 10376 avr_move_bits (xop, map, fixp_p, plen);
384f6361 10377
10378 return "";
10379}
10380
10381
c5be380e 10382/* IDs for all the AVR builtins. */
10383
10384enum avr_builtin_id
10385 {
c19a2f5f 10386
10387#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10388#include "builtins.def"
10389#undef DEF_BUILTIN
10390
10391 AVR_BUILTIN_COUNT
c5be380e 10392 };
10393
02d9a2c3 10394static void
10395avr_init_builtin_int24 (void)
10396{
10397 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10398 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10399
10400 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10401 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10402}
10403
c5be380e 10404/* Implement `TARGET_INIT_BUILTINS' */
10405/* Set up all builtin functions for this target. */
10406
10407static void
10408avr_init_builtins (void)
10409{
10410 tree void_ftype_void
c0010db9 10411 = build_function_type_list (void_type_node, NULL_TREE);
c5be380e 10412 tree uchar_ftype_uchar
10413 = build_function_type_list (unsigned_char_type_node,
10414 unsigned_char_type_node,
10415 NULL_TREE);
10416 tree uint_ftype_uchar_uchar
10417 = build_function_type_list (unsigned_type_node,
10418 unsigned_char_type_node,
10419 unsigned_char_type_node,
10420 NULL_TREE);
10421 tree int_ftype_char_char
10422 = build_function_type_list (integer_type_node,
10423 char_type_node,
10424 char_type_node,
10425 NULL_TREE);
10426 tree int_ftype_char_uchar
10427 = build_function_type_list (integer_type_node,
10428 char_type_node,
10429 unsigned_char_type_node,
10430 NULL_TREE);
10431 tree void_ftype_ulong
10432 = build_function_type_list (void_type_node,
10433 long_unsigned_type_node,
10434 NULL_TREE);
10435
15b84087 10436 tree uchar_ftype_ulong_uchar_uchar
384f6361 10437 = build_function_type_list (unsigned_char_type_node,
10438 long_unsigned_type_node,
10439 unsigned_char_type_node,
15b84087 10440 unsigned_char_type_node,
384f6361 10441 NULL_TREE);
10442
12ffadfa 10443 tree const_memx_void_node
10444 = build_qualified_type (void_type_node,
10445 TYPE_QUAL_CONST
10446 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
10447
10448 tree const_memx_ptr_type_node
10449 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
10450
10451 tree char_ftype_const_memx_ptr
10452 = build_function_type_list (char_type_node,
10453 const_memx_ptr_type_node,
10454 NULL);
10455
c19a2f5f 10456#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10457 add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10458#include "builtins.def"
10459#undef DEF_BUILTIN
10460
02d9a2c3 10461 avr_init_builtin_int24 ();
c5be380e 10462}
10463
c5be380e 10464
10465struct avr_builtin_description
10466{
c19a2f5f 10467 enum insn_code icode;
10468 const char *name;
10469 enum avr_builtin_id id;
10470 int n_args;
c5be380e 10471};
10472
10473static const struct avr_builtin_description
c19a2f5f 10474avr_bdesc[] =
c5be380e 10475 {
c5be380e 10476
c19a2f5f 10477#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10478 { ICODE, NAME, ID, N_ARGS },
10479#include "builtins.def"
10480#undef DEF_BUILTIN
15b84087 10481
c19a2f5f 10482 { CODE_FOR_nothing, NULL, 0, -1 }
c5be380e 10483 };
10484
c19a2f5f 10485
c5be380e 10486/* Subroutine of avr_expand_builtin to take care of unop insns. */
10487
10488static rtx
10489avr_expand_unop_builtin (enum insn_code icode, tree exp,
10490 rtx target)
10491{
10492 rtx pat;
10493 tree arg0 = CALL_EXPR_ARG (exp, 0);
1086ba15 10494 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
c5be380e 10495 enum machine_mode op0mode = GET_MODE (op0);
10496 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10497 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10498
10499 if (! target
10500 || GET_MODE (target) != tmode
10501 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10502 {
10503 target = gen_reg_rtx (tmode);
10504 }
10505
10506 if (op0mode == SImode && mode0 == HImode)
10507 {
10508 op0mode = HImode;
10509 op0 = gen_lowpart (HImode, op0);
10510 }
10511
10512 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10513
10514 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10515 op0 = copy_to_mode_reg (mode0, op0);
10516
10517 pat = GEN_FCN (icode) (target, op0);
10518 if (! pat)
10519 return 0;
10520
10521 emit_insn (pat);
10522
10523 return target;
10524}
10525
10526
10527/* Subroutine of avr_expand_builtin to take care of binop insns. */
10528
10529static rtx
10530avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10531{
10532 rtx pat;
10533 tree arg0 = CALL_EXPR_ARG (exp, 0);
10534 tree arg1 = CALL_EXPR_ARG (exp, 1);
1086ba15 10535 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10536 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
c5be380e 10537 enum machine_mode op0mode = GET_MODE (op0);
10538 enum machine_mode op1mode = GET_MODE (op1);
10539 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10540 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10541 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10542
10543 if (! target
10544 || GET_MODE (target) != tmode
10545 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10546 {
10547 target = gen_reg_rtx (tmode);
10548 }
10549
10550 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10551 {
10552 op0mode = HImode;
10553 op0 = gen_lowpart (HImode, op0);
10554 }
10555
10556 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10557 {
10558 op1mode = HImode;
10559 op1 = gen_lowpart (HImode, op1);
10560 }
10561
10562 /* In case the insn wants input operands in modes different from
10563 the result, abort. */
10564
10565 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10566 && (op1mode == mode1 || op1mode == VOIDmode));
10567
10568 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10569 op0 = copy_to_mode_reg (mode0, op0);
10570
10571 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10572 op1 = copy_to_mode_reg (mode1, op1);
10573
10574 pat = GEN_FCN (icode) (target, op0, op1);
10575
10576 if (! pat)
10577 return 0;
10578
10579 emit_insn (pat);
10580 return target;
10581}
10582
15b84087 10583/* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10584
10585static rtx
10586avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10587{
10588 rtx pat;
10589 tree arg0 = CALL_EXPR_ARG (exp, 0);
10590 tree arg1 = CALL_EXPR_ARG (exp, 1);
10591 tree arg2 = CALL_EXPR_ARG (exp, 2);
10592 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10593 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10594 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10595 enum machine_mode op0mode = GET_MODE (op0);
10596 enum machine_mode op1mode = GET_MODE (op1);
10597 enum machine_mode op2mode = GET_MODE (op2);
10598 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10599 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10600 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10601 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10602
10603 if (! target
10604 || GET_MODE (target) != tmode
10605 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10606 {
10607 target = gen_reg_rtx (tmode);
10608 }
10609
10610 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10611 {
10612 op0mode = HImode;
10613 op0 = gen_lowpart (HImode, op0);
10614 }
10615
10616 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10617 {
10618 op1mode = HImode;
10619 op1 = gen_lowpart (HImode, op1);
10620 }
10621
10622 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10623 {
10624 op2mode = HImode;
10625 op2 = gen_lowpart (HImode, op2);
10626 }
10627
10628 /* In case the insn wants input operands in modes different from
10629 the result, abort. */
10630
10631 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10632 && (op1mode == mode1 || op1mode == VOIDmode)
10633 && (op2mode == mode2 || op2mode == VOIDmode));
10634
10635 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10636 op0 = copy_to_mode_reg (mode0, op0);
10637
10638 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10639 op1 = copy_to_mode_reg (mode1, op1);
10640
10641 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10642 op2 = copy_to_mode_reg (mode2, op2);
10643
10644 pat = GEN_FCN (icode) (target, op0, op1, op2);
10645
10646 if (! pat)
10647 return 0;
10648
10649 emit_insn (pat);
10650 return target;
10651}
10652
c5be380e 10653
10654/* Expand an expression EXP that calls a built-in function,
10655 with result going to TARGET if that's convenient
10656 (and in mode MODE if that's convenient).
10657 SUBTARGET may be used as the target for computing one of EXP's operands.
10658 IGNORE is nonzero if the value is to be ignored. */
10659
10660static rtx
10661avr_expand_builtin (tree exp, rtx target,
10662 rtx subtarget ATTRIBUTE_UNUSED,
10663 enum machine_mode mode ATTRIBUTE_UNUSED,
10664 int ignore ATTRIBUTE_UNUSED)
10665{
10666 size_t i;
c5be380e 10667 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
384f6361 10668 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
c5be380e 10669 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10670 tree arg0;
10671 rtx op0;
10672
10673 switch (id)
10674 {
10675 case AVR_BUILTIN_NOP:
10676 emit_insn (gen_nopv (GEN_INT(1)));
10677 return 0;
10678
c5be380e 10679 case AVR_BUILTIN_DELAY_CYCLES:
10680 {
10681 arg0 = CALL_EXPR_ARG (exp, 0);
1086ba15 10682 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
c5be380e 10683
c19a2f5f 10684 if (!CONST_INT_P (op0))
384f6361 10685 error ("%s expects a compile time integer constant", bname);
c19a2f5f 10686 else
10687 avr_expand_delay_cycles (op0);
c5be380e 10688
c5be380e 10689 return 0;
10690 }
384f6361 10691
15b84087 10692 case AVR_BUILTIN_INSERT_BITS:
384f6361 10693 {
10694 arg0 = CALL_EXPR_ARG (exp, 0);
10695 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10696
10697 if (!CONST_INT_P (op0))
10698 {
10699 error ("%s expects a compile time long integer constant"
10700 " as first argument", bname);
10701 return target;
10702 }
10703 }
c5be380e 10704 }
10705
c19a2f5f 10706 for (i = 0; avr_bdesc[i].name; i++)
10707 {
10708 const struct avr_builtin_description *d = &avr_bdesc[i];
10709
10710 if (d->id == id)
10711 switch (d->n_args)
10712 {
10713 case 0:
10714 emit_insn ((GEN_FCN (d->icode)) (target));
10715 return 0;
10716
10717 case 1:
10718 return avr_expand_unop_builtin (d->icode, exp, target);
10719
10720 case 2:
10721 return avr_expand_binop_builtin (d->icode, exp, target);
10722
10723 case 3:
10724 return avr_expand_triop_builtin (d->icode, exp, target);
10725
10726 default:
10727 gcc_unreachable();
10728 }
10729 }
10730
c5be380e 10731 gcc_unreachable ();
10732}
10733
15b84087 10734
10735/* Implement `TARGET_FOLD_BUILTIN'. */
10736
10737static tree
10738avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10739 bool ignore ATTRIBUTE_UNUSED)
10740{
10741 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10742 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10743
10744 if (!optimize)
10745 return NULL_TREE;
10746
10747 switch (fcode)
10748 {
10749 default:
10750 break;
10751
c19a2f5f 10752 case AVR_BUILTIN_SWAP:
10753 {
10754 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10755 build_int_cst (val_type, 4));
10756 }
10757
15b84087 10758 case AVR_BUILTIN_INSERT_BITS:
10759 {
10760 tree tbits = arg[1];
10761 tree tval = arg[2];
10762 tree tmap;
10763 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
c19a2f5f 10764 double_int map;
15b84087 10765 bool changed = false;
10766 unsigned i;
10767 avr_map_op_t best_g;
c19a2f5f 10768
10769 if (TREE_CODE (arg[0]) != INTEGER_CST)
10770 {
10771 /* No constant as first argument: Don't fold this and run into
10772 error in avr_expand_builtin. */
10773
10774 break;
10775 }
15b84087 10776
c19a2f5f 10777 map = tree_to_double_int (arg[0]);
15b84087 10778 tmap = double_int_to_tree (map_type, map);
10779
10780 if (TREE_CODE (tval) != INTEGER_CST
10781 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10782 {
10783 /* There are no F in the map, i.e. 3rd operand is unused.
10784 Replace that argument with some constant to render
10785 respective input unused. */
10786
10787 tval = build_int_cst (val_type, 0);
10788 changed = true;
10789 }
10790
10791 if (TREE_CODE (tbits) != INTEGER_CST
10792 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10793 {
10794 /* Similar for the bits to be inserted. If they are unused,
10795 we can just as well pass 0. */
10796
10797 tbits = build_int_cst (val_type, 0);
10798 }
10799
10800 if (TREE_CODE (tbits) == INTEGER_CST)
10801 {
10802 /* Inserting bits known at compile time is easy and can be
10803 performed by AND and OR with appropriate masks. */
10804
10805 int bits = TREE_INT_CST_LOW (tbits);
10806 int mask_ior = 0, mask_and = 0xff;
10807
10808 for (i = 0; i < 8; i++)
10809 {
10810 int mi = avr_map (map, i);
10811
10812 if (mi < 8)
10813 {
10814 if (bits & (1 << mi)) mask_ior |= (1 << i);
10815 else mask_and &= ~(1 << i);
10816 }
10817 }
10818
10819 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10820 build_int_cst (val_type, mask_ior));
10821 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10822 build_int_cst (val_type, mask_and));
10823 }
10824
10825 if (changed)
10826 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10827
10828 /* If bits don't change their position we can use vanilla logic
10829 to merge the two arguments. */
10830
10831 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10832 {
10833 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10834 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10835
10836 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10837 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10838 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10839 }
10840
10841 /* Try to decomposing map to reduce overall cost. */
10842
10843 if (avr_log.builtin)
10844 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10845
10846 best_g = avr_map_op[0];
10847 best_g.cost = 1000;
10848
10849 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10850 {
10851 avr_map_op_t g
10852 = avr_map_decompose (map, avr_map_op + i,
10853 TREE_CODE (tval) == INTEGER_CST);
10854
10855 if (g.cost >= 0 && g.cost < best_g.cost)
10856 best_g = g;
10857 }
10858
10859 if (avr_log.builtin)
10860 avr_edump ("\n");
10861
10862 if (best_g.arg == 0)
10863 /* No optimization found */
10864 break;
10865
10866 /* Apply operation G to the 2nd argument. */
10867
10868 if (avr_log.builtin)
10869 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10870 best_g.str, best_g.arg, best_g.map, best_g.cost);
10871
10872 /* Do right-shifts arithmetically: They copy the MSB instead of
10873 shifting in a non-usable value (0) as with logic right-shift. */
10874
10875 tbits = fold_convert (signed_char_type_node, tbits);
10876 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10877 build_int_cst (val_type, best_g.arg));
10878 tbits = fold_convert (val_type, tbits);
10879
10880 /* Use map o G^-1 instead of original map to undo the effect of G. */
10881
10882 tmap = double_int_to_tree (map_type, best_g.map);
10883
10884 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10885 } /* AVR_BUILTIN_INSERT_BITS */
10886 }
10887
10888 return NULL_TREE;
10889}
10890
15b84087 10891\f
1602e4b0 10892
10893/* Initialize the GCC target structure. */
10894
10895#undef TARGET_ASM_ALIGNED_HI_OP
10896#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10897#undef TARGET_ASM_ALIGNED_SI_OP
10898#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10899#undef TARGET_ASM_UNALIGNED_HI_OP
10900#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10901#undef TARGET_ASM_UNALIGNED_SI_OP
10902#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10903#undef TARGET_ASM_INTEGER
10904#define TARGET_ASM_INTEGER avr_assemble_integer
10905#undef TARGET_ASM_FILE_START
10906#define TARGET_ASM_FILE_START avr_file_start
10907#undef TARGET_ASM_FILE_END
10908#define TARGET_ASM_FILE_END avr_file_end
10909
10910#undef TARGET_ASM_FUNCTION_END_PROLOGUE
10911#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10912#undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10913#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10914
10915#undef TARGET_FUNCTION_VALUE
10916#define TARGET_FUNCTION_VALUE avr_function_value
10917#undef TARGET_LIBCALL_VALUE
10918#define TARGET_LIBCALL_VALUE avr_libcall_value
10919#undef TARGET_FUNCTION_VALUE_REGNO_P
10920#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10921
10922#undef TARGET_ATTRIBUTE_TABLE
10923#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10924#undef TARGET_INSERT_ATTRIBUTES
10925#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10926#undef TARGET_SECTION_TYPE_FLAGS
10927#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10928
10929#undef TARGET_ASM_NAMED_SECTION
10930#define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10931#undef TARGET_ASM_INIT_SECTIONS
10932#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10933#undef TARGET_ENCODE_SECTION_INFO
10934#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10935#undef TARGET_ASM_SELECT_SECTION
10936#define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10937
10938#undef TARGET_REGISTER_MOVE_COST
10939#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10940#undef TARGET_MEMORY_MOVE_COST
10941#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10942#undef TARGET_RTX_COSTS
10943#define TARGET_RTX_COSTS avr_rtx_costs
10944#undef TARGET_ADDRESS_COST
10945#define TARGET_ADDRESS_COST avr_address_cost
10946#undef TARGET_MACHINE_DEPENDENT_REORG
10947#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10948#undef TARGET_FUNCTION_ARG
10949#define TARGET_FUNCTION_ARG avr_function_arg
10950#undef TARGET_FUNCTION_ARG_ADVANCE
10951#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10952
10953#undef TARGET_RETURN_IN_MEMORY
10954#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10955
10956#undef TARGET_STRICT_ARGUMENT_NAMING
10957#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10958
10959#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10960#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10961
10962#undef TARGET_HARD_REGNO_SCRATCH_OK
10963#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10964#undef TARGET_CASE_VALUES_THRESHOLD
10965#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10966
10967#undef TARGET_FRAME_POINTER_REQUIRED
10968#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10969#undef TARGET_CAN_ELIMINATE
10970#define TARGET_CAN_ELIMINATE avr_can_eliminate
10971
10972#undef TARGET_CLASS_LIKELY_SPILLED_P
10973#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10974
10975#undef TARGET_OPTION_OVERRIDE
10976#define TARGET_OPTION_OVERRIDE avr_option_override
10977
10978#undef TARGET_CANNOT_MODIFY_JUMPS_P
10979#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10980
10981#undef TARGET_FUNCTION_OK_FOR_SIBCALL
10982#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10983
10984#undef TARGET_INIT_BUILTINS
10985#define TARGET_INIT_BUILTINS avr_init_builtins
10986
10987#undef TARGET_EXPAND_BUILTIN
10988#define TARGET_EXPAND_BUILTIN avr_expand_builtin
10989
10990#undef TARGET_FOLD_BUILTIN
10991#define TARGET_FOLD_BUILTIN avr_fold_builtin
10992
10993#undef TARGET_ASM_FUNCTION_RODATA_SECTION
10994#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10995
10996#undef TARGET_SCALAR_MODE_SUPPORTED_P
10997#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10998
10999#undef TARGET_ADDR_SPACE_SUBSET_P
11000#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11001
11002#undef TARGET_ADDR_SPACE_CONVERT
11003#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11004
11005#undef TARGET_ADDR_SPACE_ADDRESS_MODE
11006#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11007
11008#undef TARGET_ADDR_SPACE_POINTER_MODE
11009#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11010
11011#undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11012#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11013 avr_addr_space_legitimate_address_p
11014
11015#undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11016#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11017
11018#undef TARGET_PRINT_OPERAND
11019#define TARGET_PRINT_OPERAND avr_print_operand
11020#undef TARGET_PRINT_OPERAND_ADDRESS
11021#define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11022#undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11023#define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11024
a45076aa 11025struct gcc_target targetm = TARGET_INITIALIZER;
c5be380e 11026
1602e4b0 11027\f
c84f2269 11028#include "gt-avr.h"