]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/avr/avr.c
Update Copyright years for files modified in 2011 and/or 2012.
[thirdparty/gcc.git] / gcc / config / avr / avr.c
CommitLineData
a28e4651 1/* Subroutines for insn-output.c for ATMEL AVR micro controllers
a64bd5e4 2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
71e45bc2 3 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
947dd720 4 Contributed by Denis Chertykov (chertykov@gmail.com)
a28e4651 5
187b36cf 6 This file is part of GCC.
a28e4651 7
187b36cf 8 GCC is free software; you can redistribute it and/or modify
a28e4651 9 it under the terms of the GNU General Public License as published by
038d1e19 10 the Free Software Foundation; either version 3, or (at your option)
a28e4651 11 any later version.
12
187b36cf 13 GCC is distributed in the hope that it will be useful,
a28e4651 14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
038d1e19 19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
a28e4651 21
22#include "config.h"
3337ec92 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
a28e4651 26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
a28e4651 29#include "insn-config.h"
30#include "conditions.h"
a28e4651 31#include "insn-attr.h"
c5be380e 32#include "insn-codes.h"
a28e4651 33#include "flags.h"
34#include "reload.h"
35#include "tree.h"
9bfdb494 36#include "output.h"
a28e4651 37#include "expr.h"
4202ef11 38#include "c-family/c-common.h"
0b205f4c 39#include "diagnostic-core.h"
a28e4651 40#include "obstack.h"
41#include "function.h"
42#include "recog.h"
c5be380e 43#include "optabs.h"
c84f2269 44#include "ggc.h"
c5be380e 45#include "langhooks.h"
a28e4651 46#include "tm_p.h"
a767736d 47#include "target.h"
48#include "target-def.h"
9c12cc94 49#include "params.h"
a4c6e6a2 50#include "df.h"
a28e4651 51
1cb39658 52/* Maximal allowed offset for an address in the LD command */
53#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
a28e4651 54
53026b2c 55/* Return true if STR starts with PREFIX and false, otherwise. */
56#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57
9d734fa8 58/* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
5bd39e93 63#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
bf412f98 64
ed2541ea 65/* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66#define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67
68/* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70#define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
71 do { \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
74 } while (0)
75
76/* Read address-space from SYMBOL_REF_FLAGS of SYM */
77#define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
80
9d734fa8 81/* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83const avr_addrspace_t avr_addrspace[] =
84{
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
590da9f2 86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
9d734fa8 93 { 0 , 0, 0, NULL, 0 }
94};
95
96/* Map 64-k Flash segment to section prefix. */
97static const char* const progmem_section_prefix[6] =
98 {
99 ".progmem.data",
100 ".progmem1.data",
101 ".progmem2.data",
102 ".progmem3.data",
103 ".progmem4.data",
104 ".progmem5.data"
105 };
106
72851b68 107/* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
109
110typedef struct
111{
112 /* SREG: The pocessor status */
113 int sreg;
114
0b6cf66f 115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
116 int ccp;
117 int rampd;
118 int rampx;
119 int rampy;
120
72851b68 121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
122 int rampz;
123
124 /* SP: The stack pointer and its low and high byte */
125 int sp_l;
126 int sp_h;
127} avr_addr_t;
128
129static avr_addr_t avr_addr;
130
a45076aa 131
132/* Prototypes for local helper functions. */
133
644ac9c5 134static const char* out_movqi_r_mr (rtx, rtx[], int*);
135static const char* out_movhi_r_mr (rtx, rtx[], int*);
136static const char* out_movsi_r_mr (rtx, rtx[], int*);
137static const char* out_movqi_mr_r (rtx, rtx[], int*);
138static const char* out_movhi_mr_r (rtx, rtx[], int*);
139static const char* out_movsi_mr_r (rtx, rtx[], int*);
140
58f62c92 141static int get_sequence_length (rtx insns);
206a5129 142static int sequent_regs_live (void);
143static const char *ptrreg_to_str (int);
144static const char *cond_string (enum rtx_code);
36f949a2 145static int avr_num_arg_regs (enum machine_mode, const_tree);
20d892d1 146static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
a49907f9 147 int, bool);
148static void output_reload_in_const (rtx*, rtx, int*, bool);
df3d6232 149static struct machine_function * avr_init_machine_status (void);
a45076aa 150
151
152/* Prototypes for hook implementors if needed before their implementation. */
153
154static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
155
b1665fa2 156
20c71901 157/* Allocate registers from r25 to r8 for parameters for function calls. */
a28e4651 158#define FIRST_CUM_REG 26
159
4202ef11 160/* Implicit target register of LPM instruction (R0) */
2d86450c 161extern GTY(()) rtx lpm_reg_rtx;
162rtx lpm_reg_rtx;
4202ef11 163
164/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
2d86450c 165extern GTY(()) rtx lpm_addr_reg_rtx;
166rtx lpm_addr_reg_rtx;
4202ef11 167
2d86450c 168/* Temporary register RTX (reg:QI TMP_REGNO) */
169extern GTY(()) rtx tmp_reg_rtx;
170rtx tmp_reg_rtx;
a28e4651 171
2d86450c 172/* Zeroed register RTX (reg:QI ZERO_REGNO) */
173extern GTY(()) rtx zero_reg_rtx;
174rtx zero_reg_rtx;
175
176/* RTXs for all general purpose registers as QImode */
177extern GTY(()) rtx all_regs_rtx[32];
178rtx all_regs_rtx[32];
e511e253 179
0b6cf66f 180/* SREG, the processor status */
181extern GTY(()) rtx sreg_rtx;
182rtx sreg_rtx;
183
184/* RAMP* special function registers */
185extern GTY(()) rtx rampd_rtx;
186extern GTY(()) rtx rampx_rtx;
187extern GTY(()) rtx rampy_rtx;
2d86450c 188extern GTY(()) rtx rampz_rtx;
0b6cf66f 189rtx rampd_rtx;
190rtx rampx_rtx;
191rtx rampy_rtx;
2d86450c 192rtx rampz_rtx;
5bd39e93 193
194/* RTX containing the strings "" and "e", respectively */
195static GTY(()) rtx xstring_empty;
196static GTY(()) rtx xstring_e;
197
b1eb5c83 198/* Current architecture. */
199const struct base_arch_s *avr_current_arch;
200
795cff42 201/* Current device. */
202const struct mcu_type_s *avr_current_device;
e511e253 203
c3f18f18 204/* Section to put switch tables in. */
205static GTY(()) section *progmem_swtable_section;
a28e4651 206
9d734fa8 207/* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
590da9f2 208 or to address space __flash*. */
5bd39e93 209static GTY(()) section *progmem_section[6];
210
83921eda 211/* Condition for insns/expanders from avr-dimode.md. */
212bool avr_have_dimode = true;
213
7c2339f8 214/* To track if code will use .bss and/or .data. */
215bool avr_need_clear_bss_p = false;
216bool avr_need_copy_data_p = false;
217
a767736d 218\f
bf412f98 219
6be828c1 220/* Custom function to count number of set bits. */
221
222static inline int
223avr_popcount (unsigned int val)
224{
225 int pop = 0;
226
227 while (val)
228 {
229 val &= val-1;
230 pop++;
231 }
232
233 return pop;
234}
235
236
2f2d376f 237/* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
238 Return true if the least significant N_BYTES bytes of XVAL all have a
239 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
240 of integers which contains an integer N iff bit N of POP_MASK is set. */
6be828c1 241
242bool
243avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
244{
245 int i;
246
2f2d376f 247 enum machine_mode mode = GET_MODE (xval);
248
249 if (VOIDmode == mode)
250 mode = SImode;
251
6be828c1 252 for (i = 0; i < n_bytes; i++)
253 {
2f2d376f 254 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6be828c1 255 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
256
257 if (0 == (pop_mask & (1 << avr_popcount (val8))))
258 return false;
259 }
260
261 return true;
262}
263
017c5b98 264
265/* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
266 the bit representation of X by "casting" it to CONST_INT. */
267
268rtx
269avr_to_int_mode (rtx x)
270{
271 enum machine_mode mode = GET_MODE (x);
272
273 return VOIDmode == mode
274 ? x
275 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
276}
277
278
279/* Implement `TARGET_OPTION_OVERRIDE'. */
280
4c834714 281static void
282avr_option_override (void)
a28e4651 283{
6e9e3dbe 284 flag_delete_null_pointer_checks = 0;
285
f9efb148 286 /* caller-save.c looks for call-clobbered hard registers that are assigned
287 to pseudos that cross calls and tries so save-restore them around calls
288 in order to reduce the number of stack slots needed.
289
290 This might leads to situations where reload is no more able to cope
291 with the challenge of AVR's very few address registers and fails to
292 perform the requested spills. */
293
294 if (avr_strict_X)
295 flag_caller_saves = 0;
296
344ae919 297 /* Unwind tables currently require a frame pointer for correctness,
298 see toplev.c:process_options(). */
299
300 if ((flag_unwind_tables
301 || flag_non_call_exceptions
302 || flag_asynchronous_unwind_tables)
303 && !ACCUMULATE_OUTGOING_ARGS)
304 {
305 flag_omit_frame_pointer = 0;
306 }
344ae919 307
c1535dd2 308 avr_current_device = &avr_mcu_types[avr_mcu_index];
3eb18e1f 309 avr_current_arch = &avr_arch_types[avr_current_device->arch];
72851b68 310
311 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
312
313 /* SREG: Status Register containing flags like I (global IRQ) */
314 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
315
316 /* RAMPZ: Address' high part when loading via ELPM */
317 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
318
0b6cf66f 319 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
320 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
321 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
322 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
323
72851b68 324 /* SP: Stack Pointer (SP_H:SP_L) */
325 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
326 avr_addr.sp_h = avr_addr.sp_l + 1;
21167a30 327
df3d6232 328 init_machine_status = avr_init_machine_status;
47fe598e 329
330 avr_log_set_avr_log();
a28e4651 331}
332
df3d6232 333/* Function to set up the backend function structure. */
334
335static struct machine_function *
336avr_init_machine_status (void)
337{
ba72912a 338 return ggc_alloc_cleared_machine_function ();
df3d6232 339}
340
5bd39e93 341
342/* Implement `INIT_EXPANDERS'. */
343/* The function works like a singleton. */
344
345void
346avr_init_expanders (void)
347{
348 int regno;
349
5bd39e93 350 for (regno = 0; regno < 32; regno ++)
351 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
352
353 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
354 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
355 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
356
357 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
358
0b6cf66f 359 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
360 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
361 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
362 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
72851b68 363 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
5bd39e93 364
365 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
366 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
367}
368
369
20c71901 370/* Return register class for register R. */
a28e4651 371
372enum reg_class
206a5129 373avr_regno_reg_class (int r)
a28e4651 374{
7104fbe4 375 static const enum reg_class reg_class_tab[] =
376 {
377 R0_REG,
378 /* r1 - r15 */
379 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
380 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
381 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
382 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
383 /* r16 - r23 */
384 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
385 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
386 /* r24, r25 */
387 ADDW_REGS, ADDW_REGS,
388 /* X: r26, 27 */
389 POINTER_X_REGS, POINTER_X_REGS,
390 /* Y: r28, r29 */
391 POINTER_Y_REGS, POINTER_Y_REGS,
392 /* Z: r30, r31 */
393 POINTER_Z_REGS, POINTER_Z_REGS,
394 /* SP: SPL, SPH */
395 STACK_REG, STACK_REG
396 };
397
a28e4651 398 if (r <= 33)
399 return reg_class_tab[r];
7104fbe4 400
a28e4651 401 return ALL_REGS;
402}
403
02d9a2c3 404
017c5b98 405/* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
406
02d9a2c3 407static bool
408avr_scalar_mode_supported_p (enum machine_mode mode)
409{
017c5b98 410 if (ALL_FIXED_POINT_MODE_P (mode))
411 return true;
412
02d9a2c3 413 if (PSImode == mode)
414 return true;
415
416 return default_scalar_mode_supported_p (mode);
417}
418
419
4202ef11 420/* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
421
422static bool
590da9f2 423avr_decl_flash_p (tree decl)
4202ef11 424{
5bd39e93 425 if (TREE_CODE (decl) != VAR_DECL
426 || TREE_TYPE (decl) == error_mark_node)
427 {
428 return false;
429 }
4202ef11 430
431 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
432}
433
434
5bd39e93 435/* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
436 address space and FALSE, otherwise. */
437
438static bool
590da9f2 439avr_decl_memx_p (tree decl)
5bd39e93 440{
441 if (TREE_CODE (decl) != VAR_DECL
442 || TREE_TYPE (decl) == error_mark_node)
443 {
444 return false;
445 }
446
590da9f2 447 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
5bd39e93 448}
449
450
4202ef11 451/* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
452
453bool
590da9f2 454avr_mem_flash_p (rtx x)
4202ef11 455{
456 return (MEM_P (x)
457 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
458}
459
460
5bd39e93 461/* Return TRUE if X is a MEM rtx located in the 24-bit Flash
462 address space and FALSE, otherwise. */
463
464bool
590da9f2 465avr_mem_memx_p (rtx x)
5bd39e93 466{
467 return (MEM_P (x)
590da9f2 468 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
5bd39e93 469}
470
471
32969c63 472/* A helper for the subsequent function attribute used to dig for
473 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
474
475static inline int
476avr_lookup_function_attribute1 (const_tree func, const char *name)
477{
478 if (FUNCTION_DECL == TREE_CODE (func))
479 {
480 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
481 {
482 return true;
483 }
484
485 func = TREE_TYPE (func);
486 }
487
488 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
489 || TREE_CODE (func) == METHOD_TYPE);
490
491 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
492}
493
e3e08e7f 494/* Return nonzero if FUNC is a naked function. */
a28e4651 495
496static int
206a5129 497avr_naked_function_p (tree func)
a28e4651 498{
32969c63 499 return avr_lookup_function_attribute1 (func, "naked");
a28e4651 500}
501
502/* Return nonzero if FUNC is an interrupt function as specified
503 by the "interrupt" attribute. */
504
505static int
ea679361 506avr_interrupt_function_p (tree func)
a28e4651 507{
32969c63 508 return avr_lookup_function_attribute1 (func, "interrupt");
a28e4651 509}
510
1cb39658 511/* Return nonzero if FUNC is a signal function as specified
a28e4651 512 by the "signal" attribute. */
513
514static int
ea679361 515avr_signal_function_p (tree func)
a28e4651 516{
32969c63 517 return avr_lookup_function_attribute1 (func, "signal");
a28e4651 518}
519
b0e2b973 520/* Return nonzero if FUNC is an OS_task function. */
ba8273a8 521
522static int
523avr_OS_task_function_p (tree func)
524{
32969c63 525 return avr_lookup_function_attribute1 (func, "OS_task");
ba8273a8 526}
527
b0e2b973 528/* Return nonzero if FUNC is an OS_main function. */
a6e595be 529
530static int
531avr_OS_main_function_p (tree func)
532{
32969c63 533 return avr_lookup_function_attribute1 (func, "OS_main");
a6e595be 534}
535
a12b9b80 536
ea679361 537/* Implement `TARGET_SET_CURRENT_FUNCTION'. */
538/* Sanity cheching for above function attributes. */
539
540static void
541avr_set_current_function (tree decl)
542{
543 location_t loc;
544 const char *isr;
545
546 if (decl == NULL_TREE
547 || current_function_decl == NULL_TREE
548 || current_function_decl == error_mark_node
23184a0e 549 || ! cfun->machine
ea679361 550 || cfun->machine->attributes_checked_p)
551 return;
552
553 loc = DECL_SOURCE_LOCATION (decl);
554
555 cfun->machine->is_naked = avr_naked_function_p (decl);
556 cfun->machine->is_signal = avr_signal_function_p (decl);
557 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
558 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
559 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
560
561 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
562
563 /* Too much attributes make no sense as they request conflicting features. */
564
565 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
566 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
567 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
568 " exclusive", "OS_task", "OS_main", isr);
569
570 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
571
572 if (cfun->machine->is_naked
573 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
574 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
575 " no effect on %qs function", "OS_task", "OS_main", "naked");
576
577 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
578 {
579 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
580 tree ret = TREE_TYPE (TREE_TYPE (decl));
581 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
582
583 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
584 using this when it switched from SIGNAL and INTERRUPT to ISR. */
585
586 if (cfun->machine->is_interrupt)
587 cfun->machine->is_signal = 0;
588
589 /* Interrupt handlers must be void __vector (void) functions. */
590
591 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
592 error_at (loc, "%qs function cannot have arguments", isr);
593
594 if (TREE_CODE (ret) != VOID_TYPE)
595 error_at (loc, "%qs function cannot return a value", isr);
596
597 /* If the function has the 'signal' or 'interrupt' attribute, ensure
598 that the name of the function is "__vector_NN" so as to catch
599 when the user misspells the vector name. */
600
601 if (!STR_PREFIX_P (name, "__vector"))
602 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
603 name, isr);
604 }
605
606 /* Avoid the above diagnosis to be printed more than once. */
607
608 cfun->machine->attributes_checked_p = 1;
609}
610
611
a12b9b80 612/* Implement `ACCUMULATE_OUTGOING_ARGS'. */
c529cd37 613
614int
a12b9b80 615avr_accumulate_outgoing_args (void)
616{
617 if (!cfun)
618 return TARGET_ACCUMULATE_OUTGOING_ARGS;
619
620 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
621 what offset is correct. In some cases it is relative to
622 virtual_outgoing_args_rtx and in others it is relative to
623 virtual_stack_vars_rtx. For example code see
624 gcc.c-torture/execute/built-in-setjmp.c
625 gcc.c-torture/execute/builtins/sprintf-chk.c */
626
627 return (TARGET_ACCUMULATE_OUTGOING_ARGS
628 && !(cfun->calls_setjmp
629 || cfun->has_nonlocal_label));
630}
631
632
633/* Report contribution of accumulated outgoing arguments to stack size. */
634
635static inline int
636avr_outgoing_args_size (void)
637{
638 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
639}
640
641
642/* Implement `STARTING_FRAME_OFFSET'. */
643/* This is the offset from the frame pointer register to the first stack slot
644 that contains a variable living in the frame. */
645
646int
647avr_starting_frame_offset (void)
648{
649 return 1 + avr_outgoing_args_size ();
650}
651
652
f0973368 653/* Return the number of hard registers to push/pop in the prologue/epilogue
654 of the current function, and optionally store these registers in SET. */
655
656static int
206a5129 657avr_regs_to_save (HARD_REG_SET *set)
f0973368 658{
659 int reg, count;
ea679361 660 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
f2323747 661
f0973368 662 if (set)
663 CLEAR_HARD_REG_SET (*set);
664 count = 0;
c3bcd5a9 665
ba8273a8 666 /* No need to save any registers if the function never returns or
b0e2b973 667 has the "OS_task" or "OS_main" attribute. */
ba8273a8 668 if (TREE_THIS_VOLATILE (current_function_decl)
a6e595be 669 || cfun->machine->is_OS_task
670 || cfun->machine->is_OS_main)
c3bcd5a9 671 return 0;
672
f0973368 673 for (reg = 0; reg < 32; reg++)
674 {
675 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
a12b9b80 676 any global register variables. */
f0973368 677 if (fixed_regs[reg])
a12b9b80 678 continue;
f0973368 679
d5bf7b64 680 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
a12b9b80 681 || (df_regs_ever_live_p (reg)
682 && (int_or_sig_p || !call_used_regs[reg])
683 /* Don't record frame pointer registers here. They are treated
684 indivitually in prologue. */
685 && !(frame_pointer_needed
686 && (reg == REG_Y || reg == (REG_Y+1)))))
687 {
688 if (set)
689 SET_HARD_REG_BIT (*set, reg);
690 count++;
691 }
f0973368 692 }
693 return count;
694}
695
a28e3283 696
697/* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
698
699static bool
700avr_allocate_stack_slots_for_args (void)
701{
702 return !cfun->machine->is_naked;
703}
704
705
ebdd0478 706/* Return true if register FROM can be eliminated via register TO. */
707
a45076aa 708static bool
9f42c829 709avr_can_eliminate (const int from, const int to)
ebdd0478 710{
9f42c829 711 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
712 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
713 || ((from == FRAME_POINTER_REGNUM
714 || from == FRAME_POINTER_REGNUM + 1)
715 && !frame_pointer_needed));
ebdd0478 716}
717
08c6cbd2 718
719/* Implement TARGET_WARN_FUNC_RETURN. */
720
721static bool
722avr_warn_func_return (tree decl)
723{
724 /* Naked functions are implemented entirely in assembly, including the
725 return sequence, so suppress warnings about this. */
726 return !avr_naked_function_p (decl);
727}
728
20c71901 729/* Compute offset between arg_pointer and frame_pointer. */
a28e4651 730
731int
9f42c829 732avr_initial_elimination_offset (int from, int to)
a28e4651 733{
9f42c829 734 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
735 return 0;
736 else
1cb39658 737 {
9f42c829 738 int offset = frame_pointer_needed ? 2 : 0;
739 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
740
f0973368 741 offset += avr_regs_to_save (NULL);
9f42c829 742 return (get_frame_size () + avr_outgoing_args_size()
743 + avr_pc_size + 1 + offset);
a28e4651 744 }
a28e4651 745}
746
017c5b98 747
748/* Helper for the function below. */
749
750static void
751avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
752{
753 *node = make_node (FIXED_POINT_TYPE);
754 TYPE_SATURATING (*node) = sat_p;
755 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
756 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
757 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
758 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
759 TYPE_ALIGN (*node) = 8;
760 SET_TYPE_MODE (*node, mode);
761
762 layout_type (*node);
763}
764
765
766/* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
767
768static tree
769avr_build_builtin_va_list (void)
770{
771 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
772 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
773 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
774 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
775 to the long long accum modes instead of the desired [U]TAmode.
776
777 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
778 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
779 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
780 libgcc to detect IBIT and FBIT. */
781
782 avr_adjust_type_node (&ta_type_node, TAmode, 0);
783 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
784 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
785 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
786
787 unsigned_long_long_accum_type_node = uta_type_node;
788 long_long_accum_type_node = ta_type_node;
789 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
790 sat_long_long_accum_type_node = sat_ta_type_node;
791
792 /* Dispatch to the default handler. */
793
794 return std_build_builtin_va_list ();
795}
796
797
798/* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
b1665fa2 799/* Actual start of frame is virtual_stack_vars_rtx this is offset from
800 frame pointer by +STARTING_FRAME_OFFSET.
801 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
802 avoids creating add/sub of offset in nonlocal goto and setjmp. */
803
a45076aa 804static rtx
805avr_builtin_setjmp_frame_value (void)
b1665fa2 806{
017c5b98 807 rtx xval = gen_reg_rtx (Pmode);
808 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
809 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
810 return xval;
b1665fa2 811}
812
017c5b98 813
34413b10 814/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
815 This is return address of function. */
816rtx
36f949a2 817avr_return_addr_rtx (int count, rtx tem)
34413b10 818{
819 rtx r;
820
b0e2b973 821 /* Can only return this function's return address. Others not supported. */
34413b10 822 if (count)
823 return NULL;
824
825 if (AVR_3_BYTE_PC)
826 {
827 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
828 warning (0, "'builtin_return_address' contains only 2 bytes of address");
829 }
830 else
831 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
832
833 r = gen_rtx_PLUS (Pmode, tem, r);
834 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
835 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
836 return r;
837}
838
bf522d86 839/* Return 1 if the function epilogue is just a single "ret". */
840
841int
206a5129 842avr_simple_epilogue (void)
bf522d86 843{
844 return (! frame_pointer_needed
a12b9b80 845 && get_frame_size () == 0
846 && avr_outgoing_args_size() == 0
847 && avr_regs_to_save (NULL) == 0
ea679361 848 && ! cfun->machine->is_interrupt
849 && ! cfun->machine->is_signal
850 && ! cfun->machine->is_naked
a12b9b80 851 && ! TREE_THIS_VOLATILE (current_function_decl));
bf522d86 852}
853
20c71901 854/* This function checks sequence of live registers. */
a28e4651 855
856static int
206a5129 857sequent_regs_live (void)
a28e4651 858{
859 int reg;
860 int live_seq=0;
861 int cur_seq=0;
862
863 for (reg = 0; reg < 18; ++reg)
864 {
3d83581f 865 if (fixed_regs[reg])
866 {
867 /* Don't recognize sequences that contain global register
868 variables. */
869
870 if (live_seq != 0)
871 return 0;
872 else
873 continue;
874 }
875
a28e4651 876 if (!call_used_regs[reg])
877 {
3072d30e 878 if (df_regs_ever_live_p (reg))
a28e4651 879 {
880 ++live_seq;
881 ++cur_seq;
882 }
883 else
884 cur_seq = 0;
885 }
886 }
887
888 if (!frame_pointer_needed)
889 {
3072d30e 890 if (df_regs_ever_live_p (REG_Y))
a28e4651 891 {
892 ++live_seq;
893 ++cur_seq;
894 }
895 else
896 cur_seq = 0;
897
3072d30e 898 if (df_regs_ever_live_p (REG_Y+1))
a28e4651 899 {
900 ++live_seq;
901 ++cur_seq;
902 }
903 else
904 cur_seq = 0;
905 }
906 else
907 {
908 cur_seq += 2;
909 live_seq += 2;
910 }
911 return (cur_seq == live_seq) ? live_seq : 0;
912}
913
58f62c92 914/* Obtain the length sequence of insns. */
915
916int
917get_sequence_length (rtx insns)
918{
919 rtx insn;
920 int length;
921
922 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
923 length += get_attr_length (insn);
924
925 return length;
926}
927
e484266f 928/* Implement INCOMING_RETURN_ADDR_RTX. */
929
930rtx
931avr_incoming_return_addr_rtx (void)
932{
933 /* The return address is at the top of the stack. Note that the push
934 was via post-decrement, which means the actual address is off by one. */
29c05e22 935 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
e484266f 936}
937
938/* Helper for expand_prologue. Emit a push of a byte register. */
939
940static void
941emit_push_byte (unsigned regno, bool frame_related_p)
942{
943 rtx mem, reg, insn;
944
945 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
946 mem = gen_frame_mem (QImode, mem);
947 reg = gen_rtx_REG (QImode, regno);
948
949 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
950 if (frame_related_p)
951 RTX_FRAME_RELATED_P (insn) = 1;
952
953 cfun->machine->stack_usage++;
954}
955
0b6cf66f 956
957/* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
958 SFR is a MEM representing the memory location of the SFR.
959 If CLR_P then clear the SFR after the push using zero_reg. */
960
961static void
962emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
963{
964 rtx insn;
965
966 gcc_assert (MEM_P (sfr));
967
968 /* IN __tmp_reg__, IO(SFR) */
969 insn = emit_move_insn (tmp_reg_rtx, sfr);
970 if (frame_related_p)
971 RTX_FRAME_RELATED_P (insn) = 1;
972
973 /* PUSH __tmp_reg__ */
974 emit_push_byte (TMP_REGNO, frame_related_p);
975
976 if (clr_p)
977 {
978 /* OUT IO(SFR), __zero_reg__ */
979 insn = emit_move_insn (sfr, const0_rtx);
980 if (frame_related_p)
981 RTX_FRAME_RELATED_P (insn) = 1;
982 }
983}
984
a12b9b80 985static void
986avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
987{
988 rtx insn;
989 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
990 int live_seq = sequent_regs_live ();
991
21467ee6 992 HOST_WIDE_INT size_max
993 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
994
a12b9b80 995 bool minimize = (TARGET_CALL_PROLOGUES
21467ee6 996 && size < size_max
a12b9b80 997 && live_seq
998 && !isr_p
999 && !cfun->machine->is_OS_task
1000 && !cfun->machine->is_OS_main);
1001
1002 if (minimize
1003 && (frame_pointer_needed
1004 || avr_outgoing_args_size() > 8
1005 || (AVR_2_BYTE_PC && live_seq > 6)
1006 || live_seq > 7))
1007 {
1008 rtx pattern;
1009 int first_reg, reg, offset;
1010
1011 emit_move_insn (gen_rtx_REG (HImode, REG_X),
1012 gen_int_mode (size, HImode));
1013
1014 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1015 gen_int_mode (live_seq+size, HImode));
1016 insn = emit_insn (pattern);
1017 RTX_FRAME_RELATED_P (insn) = 1;
1018
1019 /* Describe the effect of the unspec_volatile call to prologue_saves.
1020 Note that this formulation assumes that add_reg_note pushes the
1021 notes to the front. Thus we build them in the reverse order of
1022 how we want dwarf2out to process them. */
1023
9f42c829 1024 /* The function does always set frame_pointer_rtx, but whether that
a12b9b80 1025 is going to be permanent in the function is frame_pointer_needed. */
1026
1027 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1028 gen_rtx_SET (VOIDmode, (frame_pointer_needed
9f42c829 1029 ? frame_pointer_rtx
a12b9b80 1030 : stack_pointer_rtx),
29c05e22 1031 plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1032 -(size + live_seq))));
a12b9b80 1033
1034 /* Note that live_seq always contains r28+r29, but the other
1035 registers to be saved are all below 18. */
1036
1037 first_reg = 18 - (live_seq - 2);
1038
1039 for (reg = 29, offset = -live_seq + 1;
1040 reg >= first_reg;
1041 reg = (reg == 28 ? 17 : reg - 1), ++offset)
1042 {
1043 rtx m, r;
1044
29c05e22 1045 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1046 offset));
a12b9b80 1047 r = gen_rtx_REG (QImode, reg);
1048 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1049 }
1050
1051 cfun->machine->stack_usage += size + live_seq;
1052 }
1053 else /* !minimize */
1054 {
1055 int reg;
1056
1057 for (reg = 0; reg < 32; ++reg)
1058 if (TEST_HARD_REG_BIT (set, reg))
1059 emit_push_byte (reg, true);
1060
1061 if (frame_pointer_needed
1062 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1063 {
1064 /* Push frame pointer. Always be consistent about the
1065 ordering of pushes -- epilogue_restores expects the
1066 register pair to be pushed low byte first. */
1067
1068 emit_push_byte (REG_Y, true);
1069 emit_push_byte (REG_Y + 1, true);
1070 }
1071
1072 if (frame_pointer_needed
1073 && size == 0)
1074 {
9f42c829 1075 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 1076 RTX_FRAME_RELATED_P (insn) = 1;
1077 }
1078
1079 if (size != 0)
1080 {
1081 /* Creating a frame can be done by direct manipulation of the
1082 stack or via the frame pointer. These two methods are:
1083 fp = sp
1084 fp -= size
1085 sp = fp
1086 or
1087 sp -= size
1088 fp = sp (*)
1089 the optimum method depends on function type, stack and
1090 frame size. To avoid a complex logic, both methods are
1091 tested and shortest is selected.
1092
1093 There is also the case where SIZE != 0 and no frame pointer is
1094 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1095 In that case, insn (*) is not needed in that case.
1096 We use the X register as scratch. This is save because in X
1097 is call-clobbered.
1098 In an interrupt routine, the case of SIZE != 0 together with
1099 !frame_pointer_needed can only occur if the function is not a
1100 leaf function and thus X has already been saved. */
1101
b5b90b5a 1102 int irq_state = -1;
21467ee6 1103 HOST_WIDE_INT size_cfa = size;
a12b9b80 1104 rtx fp_plus_insns, fp, my_fp;
a12b9b80 1105
1106 gcc_assert (frame_pointer_needed
1107 || !isr_p
d5bf7b64 1108 || !crtl->is_leaf);
a12b9b80 1109
1110 fp = my_fp = (frame_pointer_needed
9f42c829 1111 ? frame_pointer_rtx
a12b9b80 1112 : gen_rtx_REG (Pmode, REG_X));
1113
1114 if (AVR_HAVE_8BIT_SP)
1115 {
1116 /* The high byte (r29) does not change:
b5b90b5a 1117 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
a12b9b80 1118
9f42c829 1119 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 1120 }
1121
21467ee6 1122 /* Cut down size and avoid size = 0 so that we don't run
1123 into ICE like PR52488 in the remainder. */
1124
1125 if (size > size_max)
1126 {
1127 /* Don't error so that insane code from newlib still compiles
1128 and does not break building newlib. As PR51345 is implemented
d32d7e3a 1129 now, there are multilib variants with -msp8.
21467ee6 1130
1131 If user wants sanity checks he can use -Wstack-usage=
1132 or similar options.
1133
1134 For CFA we emit the original, non-saturated size so that
1135 the generic machinery is aware of the real stack usage and
1136 will print the above diagnostic as expected. */
1137
1138 size = size_max;
1139 }
1140
1141 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1142
a12b9b80 1143 /************ Method 1: Adjust frame pointer ************/
1144
1145 start_sequence ();
1146
1147 /* Normally, the dwarf2out frame-related-expr interpreter does
1148 not expect to have the CFA change once the frame pointer is
1149 set up. Thus, we avoid marking the move insn below and
1150 instead indicate that the entire operation is complete after
1151 the frame pointer subtraction is done. */
1152
1153 insn = emit_move_insn (fp, stack_pointer_rtx);
b5b90b5a 1154 if (frame_pointer_needed)
1155 {
1156 RTX_FRAME_RELATED_P (insn) = 1;
1157 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1158 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1159 }
a12b9b80 1160
b51b04ad 1161 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1162 my_fp, -size));
a12b9b80 1163 if (frame_pointer_needed)
1164 {
b5b90b5a 1165 RTX_FRAME_RELATED_P (insn) = 1;
a12b9b80 1166 add_reg_note (insn, REG_CFA_ADJUST_CFA,
b5b90b5a 1167 gen_rtx_SET (VOIDmode, fp,
29c05e22 1168 plus_constant (Pmode, fp,
68c8bdc6 1169 -size_cfa)));
a12b9b80 1170 }
1171
1172 /* Copy to stack pointer. Note that since we've already
1173 changed the CFA to the frame pointer this operation
b5b90b5a 1174 need not be annotated if frame pointer is needed.
1175 Always move through unspec, see PR50063.
1176 For meaning of irq_state see movhi_sp_r insn. */
a12b9b80 1177
b5b90b5a 1178 if (cfun->machine->is_interrupt)
1179 irq_state = 1;
1180
1181 if (TARGET_NO_INTERRUPTS
1182 || cfun->machine->is_signal
1183 || cfun->machine->is_OS_main)
1184 irq_state = 0;
a12b9b80 1185
b5b90b5a 1186 if (AVR_HAVE_8BIT_SP)
1187 irq_state = 2;
1188
1189 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1190 fp, GEN_INT (irq_state)));
1191 if (!frame_pointer_needed)
1192 {
1193 RTX_FRAME_RELATED_P (insn) = 1;
1194 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1195 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1196 plus_constant (Pmode,
68c8bdc6 1197 stack_pointer_rtx,
1198 -size_cfa)));
b5b90b5a 1199 }
1200
a12b9b80 1201 fp_plus_insns = get_insns ();
1202 end_sequence ();
1203
1204 /************ Method 2: Adjust Stack pointer ************/
1205
1206 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1207 can only handle specific offsets. */
1208
1209 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1210 {
1211 rtx sp_plus_insns;
1212
1213 start_sequence ();
1214
b5b90b5a 1215 insn = emit_move_insn (stack_pointer_rtx,
29c05e22 1216 plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1217 -size));
a12b9b80 1218 RTX_FRAME_RELATED_P (insn) = 1;
b5b90b5a 1219 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1220 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
29c05e22 1221 plus_constant (Pmode,
1222 stack_pointer_rtx,
68c8bdc6 1223 -size_cfa)));
a12b9b80 1224 if (frame_pointer_needed)
1225 {
1226 insn = emit_move_insn (fp, stack_pointer_rtx);
1227 RTX_FRAME_RELATED_P (insn) = 1;
1228 }
1229
1230 sp_plus_insns = get_insns ();
1231 end_sequence ();
1232
1233 /************ Use shortest method ************/
1234
1235 emit_insn (get_sequence_length (sp_plus_insns)
1236 < get_sequence_length (fp_plus_insns)
1237 ? sp_plus_insns
1238 : fp_plus_insns);
1239 }
1240 else
1241 {
1242 emit_insn (fp_plus_insns);
1243 }
1244
21467ee6 1245 cfun->machine->stack_usage += size_cfa;
a12b9b80 1246 } /* !minimize && size != 0 */
1247 } /* !minimize */
1248}
1249
e484266f 1250
df3d6232 1251/* Output function prologue. */
a28e4651 1252
df3d6232 1253void
1254expand_prologue (void)
a7690ba9 1255{
637dc5b6 1256 HARD_REG_SET set;
a12b9b80 1257 HOST_WIDE_INT size;
1258
1259 size = get_frame_size() + avr_outgoing_args_size();
df3d6232 1260
34413b10 1261 cfun->machine->stack_usage = 0;
df3d6232 1262
1263 /* Prologue: naked. */
1264 if (cfun->machine->is_naked)
a7690ba9 1265 {
df3d6232 1266 return;
a7690ba9 1267 }
a7690ba9 1268
637dc5b6 1269 avr_regs_to_save (&set);
a7690ba9 1270
df3d6232 1271 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
a7690ba9 1272 {
e484266f 1273 /* Enable interrupts. */
df3d6232 1274 if (cfun->machine->is_interrupt)
a12b9b80 1275 emit_insn (gen_enable_interrupt ());
1276
df3d6232 1277 /* Push zero reg. */
e484266f 1278 emit_push_byte (ZERO_REGNO, true);
df3d6232 1279
1280 /* Push tmp reg. */
e484266f 1281 emit_push_byte (TMP_REGNO, true);
df3d6232 1282
1283 /* Push SREG. */
e484266f 1284 /* ??? There's no dwarf2 column reserved for SREG. */
0b6cf66f 1285 emit_push_sfr (sreg_rtx, false, false /* clr */);
637dc5b6 1286
df3d6232 1287 /* Clear zero reg. */
e484266f 1288 emit_move_insn (zero_reg_rtx, const0_rtx);
a7690ba9 1289
df3d6232 1290 /* Prevent any attempt to delete the setting of ZERO_REG! */
18b42941 1291 emit_use (zero_reg_rtx);
0b6cf66f 1292
1293 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1294 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1295
1296 if (AVR_HAVE_RAMPD)
1297 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1298
1299 if (AVR_HAVE_RAMPX
1300 && TEST_HARD_REG_BIT (set, REG_X)
1301 && TEST_HARD_REG_BIT (set, REG_X + 1))
1302 {
1303 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1304 }
1305
1306 if (AVR_HAVE_RAMPY
1307 && (frame_pointer_needed
1308 || (TEST_HARD_REG_BIT (set, REG_Y)
1309 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1310 {
1311 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1312 }
1313
69a2907c 1314 if (AVR_HAVE_RAMPZ
0b6cf66f 1315 && TEST_HARD_REG_BIT (set, REG_Z)
1316 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1317 {
69a2907c 1318 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
0b6cf66f 1319 }
1320 } /* is_interrupt is_signal */
200b5210 1321
a12b9b80 1322 avr_prologue_setup_frame (size, set);
1323
8c0dd614 1324 if (flag_stack_usage_info)
200b5210 1325 current_function_static_stack_size = cfun->machine->stack_usage;
a7690ba9 1326}
1327
df3d6232 1328/* Output summary at end of function prologue. */
a28e4651 1329
17d9b0c3 1330static void
df3d6232 1331avr_asm_function_end_prologue (FILE *file)
a28e4651 1332{
df3d6232 1333 if (cfun->machine->is_naked)
a28e4651 1334 {
c3bcd5a9 1335 fputs ("/* prologue: naked */\n", file);
a28e4651 1336 }
df3d6232 1337 else
a28e4651 1338 {
df3d6232 1339 if (cfun->machine->is_interrupt)
1340 {
1341 fputs ("/* prologue: Interrupt */\n", file);
1342 }
1343 else if (cfun->machine->is_signal)
1344 {
1345 fputs ("/* prologue: Signal */\n", file);
1346 }
a28e4651 1347 else
df3d6232 1348 fputs ("/* prologue: function */\n", file);
a28e4651 1349 }
a12b9b80 1350
1351 if (ACCUMULATE_OUTGOING_ARGS)
1352 fprintf (file, "/* outgoing args size = %d */\n",
1353 avr_outgoing_args_size());
1354
df3d6232 1355 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1356 get_frame_size());
34413b10 1357 fprintf (file, "/* stack size = %d */\n",
1358 cfun->machine->stack_usage);
1359 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1360 usage for offset so that SP + .L__stack_offset = return address. */
1361 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
df3d6232 1362}
f0973368 1363
e067eab2 1364
df3d6232 1365/* Implement EPILOGUE_USES. */
c3bcd5a9 1366
df3d6232 1367int
1368avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1369{
1370 if (reload_completed
1371 && cfun->machine
1372 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1373 return 1;
1374 return 0;
a28e4651 1375}
1376
e484266f 1377/* Helper for expand_epilogue. Emit a pop of a byte register. */
1378
1379static void
1380emit_pop_byte (unsigned regno)
1381{
1382 rtx mem, reg;
1383
1384 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1385 mem = gen_frame_mem (QImode, mem);
1386 reg = gen_rtx_REG (QImode, regno);
1387
1388 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1389}
1390
df3d6232 1391/* Output RTL epilogue. */
a28e4651 1392
df3d6232 1393void
32969c63 1394expand_epilogue (bool sibcall_p)
a28e4651 1395{
1396 int reg;
a28e4651 1397 int live_seq;
637dc5b6 1398 HARD_REG_SET set;
a28e4651 1399 int minimize;
a12b9b80 1400 HOST_WIDE_INT size;
1401 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1402
1403 size = get_frame_size() + avr_outgoing_args_size();
df3d6232 1404
1405 /* epilogue: naked */
1406 if (cfun->machine->is_naked)
c3bcd5a9 1407 {
32969c63 1408 gcc_assert (!sibcall_p);
1409
f7b38f2d 1410 emit_jump_insn (gen_return ());
df3d6232 1411 return;
a28e4651 1412 }
1413
637dc5b6 1414 avr_regs_to_save (&set);
a28e4651 1415 live_seq = sequent_regs_live ();
a12b9b80 1416
a28e4651 1417 minimize = (TARGET_CALL_PROLOGUES
a12b9b80 1418 && live_seq
1419 && !isr_p
1420 && !cfun->machine->is_OS_task
1421 && !cfun->machine->is_OS_main);
a28e4651 1422
a12b9b80 1423 if (minimize
1424 && (live_seq > 4
1425 || frame_pointer_needed
1426 || size))
a28e4651 1427 {
a12b9b80 1428 /* Get rid of frame. */
1429
1430 if (!frame_pointer_needed)
1431 {
9f42c829 1432 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 1433 }
1434
1435 if (size)
1436 {
9f42c829 1437 emit_move_insn (frame_pointer_rtx,
29c05e22 1438 plus_constant (Pmode, frame_pointer_rtx, size));
a12b9b80 1439 }
1440
f7b38f2d 1441 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
a12b9b80 1442 return;
a28e4651 1443 }
a12b9b80 1444
1445 if (size)
a28e4651 1446 {
a12b9b80 1447 /* Try two methods to adjust stack and select shortest. */
b5b90b5a 1448
1449 int irq_state = -1;
a12b9b80 1450 rtx fp, my_fp;
1451 rtx fp_plus_insns;
21467ee6 1452 HOST_WIDE_INT size_max;
e484266f 1453
a12b9b80 1454 gcc_assert (frame_pointer_needed
1455 || !isr_p
d5bf7b64 1456 || !crtl->is_leaf);
a12b9b80 1457
1458 fp = my_fp = (frame_pointer_needed
9f42c829 1459 ? frame_pointer_rtx
a12b9b80 1460 : gen_rtx_REG (Pmode, REG_X));
58f62c92 1461
a12b9b80 1462 if (AVR_HAVE_8BIT_SP)
1463 {
1464 /* The high byte (r29) does not change:
1465 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1466
9f42c829 1467 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 1468 }
21467ee6 1469
1470 /* For rationale see comment in prologue generation. */
1471
1472 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1473 if (size > size_max)
1474 size = size_max;
1475 size = trunc_int_for_mode (size, GET_MODE (my_fp));
a12b9b80 1476
1477 /********** Method 1: Adjust fp register **********/
1478
1479 start_sequence ();
58f62c92 1480
a12b9b80 1481 if (!frame_pointer_needed)
1482 emit_move_insn (fp, stack_pointer_rtx);
58f62c92 1483
b51b04ad 1484 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
58f62c92 1485
a12b9b80 1486 /* Copy to stack pointer. */
b5b90b5a 1487
1488 if (TARGET_NO_INTERRUPTS)
1489 irq_state = 0;
1490
1491 if (AVR_HAVE_8BIT_SP)
1492 irq_state = 2;
1493
1494 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1495 GEN_INT (irq_state)));
e484266f 1496
a12b9b80 1497 fp_plus_insns = get_insns ();
1498 end_sequence ();
58f62c92 1499
a12b9b80 1500 /********** Method 2: Adjust Stack pointer **********/
1501
1502 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1503 {
1504 rtx sp_plus_insns;
58f62c92 1505
a12b9b80 1506 start_sequence ();
58f62c92 1507
a12b9b80 1508 emit_move_insn (stack_pointer_rtx,
29c05e22 1509 plus_constant (Pmode, stack_pointer_rtx, size));
e484266f 1510
a12b9b80 1511 sp_plus_insns = get_insns ();
1512 end_sequence ();
e484266f 1513
a12b9b80 1514 /************ Use shortest method ************/
1515
1516 emit_insn (get_sequence_length (sp_plus_insns)
1517 < get_sequence_length (fp_plus_insns)
1518 ? sp_plus_insns
1519 : fp_plus_insns);
1520 }
1521 else
1522 emit_insn (fp_plus_insns);
1523 } /* size != 0 */
1524
1525 if (frame_pointer_needed
1526 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1527 {
1528 /* Restore previous frame_pointer. See expand_prologue for
1529 rationale for not using pophi. */
1530
1531 emit_pop_byte (REG_Y + 1);
1532 emit_pop_byte (REG_Y);
1533 }
f0973368 1534
a12b9b80 1535 /* Restore used registers. */
1536
1537 for (reg = 31; reg >= 0; --reg)
1538 if (TEST_HARD_REG_BIT (set, reg))
1539 emit_pop_byte (reg);
df3d6232 1540
a12b9b80 1541 if (isr_p)
1542 {
0b6cf66f 1543 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1544 The conditions to restore them must be tha same as in prologue. */
a12b9b80 1545
69a2907c 1546 if (AVR_HAVE_RAMPZ
1547 && TEST_HARD_REG_BIT (set, REG_Z)
1548 && TEST_HARD_REG_BIT (set, REG_Z + 1))
0b6cf66f 1549 {
1550 emit_pop_byte (TMP_REGNO);
69a2907c 1551 emit_move_insn (rampz_rtx, tmp_reg_rtx);
0b6cf66f 1552 }
1553
1554 if (AVR_HAVE_RAMPY
1555 && (frame_pointer_needed
1556 || (TEST_HARD_REG_BIT (set, REG_Y)
1557 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1558 {
1559 emit_pop_byte (TMP_REGNO);
1560 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1561 }
1562
69a2907c 1563 if (AVR_HAVE_RAMPX
1564 && TEST_HARD_REG_BIT (set, REG_X)
1565 && TEST_HARD_REG_BIT (set, REG_X + 1))
a12b9b80 1566 {
e484266f 1567 emit_pop_byte (TMP_REGNO);
69a2907c 1568 emit_move_insn (rampx_rtx, tmp_reg_rtx);
df3d6232 1569 }
c3bcd5a9 1570
0b6cf66f 1571 if (AVR_HAVE_RAMPD)
1572 {
1573 emit_pop_byte (TMP_REGNO);
1574 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1575 }
1576
1577 /* Restore SREG using tmp_reg as scratch. */
a12b9b80 1578
1579 emit_pop_byte (TMP_REGNO);
0b6cf66f 1580 emit_move_insn (sreg_rtx, tmp_reg_rtx);
a12b9b80 1581
1582 /* Restore tmp REG. */
1583 emit_pop_byte (TMP_REGNO);
1584
1585 /* Restore zero REG. */
1586 emit_pop_byte (ZERO_REGNO);
df3d6232 1587 }
a12b9b80 1588
1589 if (!sibcall_p)
1590 emit_jump_insn (gen_return ());
a28e4651 1591}
1592
df3d6232 1593/* Output summary messages at beginning of function epilogue. */
1594
1595static void
1596avr_asm_function_begin_epilogue (FILE *file)
1597{
1598 fprintf (file, "/* epilogue start */\n");
1599}
a28e4651 1600
1f959902 1601
1602/* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1603
1604static bool
1605avr_cannot_modify_jumps_p (void)
1606{
1607
1608 /* Naked Functions must not have any instructions after
1609 their epilogue, see PR42240 */
1610
1611 if (reload_completed
1612 && cfun->machine
1613 && cfun->machine->is_naked)
1614 {
1615 return true;
1616 }
1617
1618 return false;
1619}
1620
1621
002565f0 1622/* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1623
002565f0 1624static bool
be6d8823 1625avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
002565f0 1626{
be6d8823 1627 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1628 This hook just serves to hack around PR rtl-optimization/52543 by
1629 claiming that non-generic addresses were mode-dependent so that
1630 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1631 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1632 generic address space which is not true. */
1633
1634 return !ADDR_SPACE_GENERIC_P (as);
002565f0 1635}
1636
1637
8b0ecac5 1638/* Helper function for `avr_legitimate_address_p'. */
1639
1640static inline bool
f8a8fc7b 1641avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
f9efb148 1642 RTX_CODE outer_code, bool strict)
8b0ecac5 1643{
1644 return (REG_P (reg)
f8a8fc7b 1645 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1646 as, outer_code, UNKNOWN)
8b0ecac5 1647 || (!strict
1648 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1649}
1650
1651
a28e4651 1652/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1653 machine for a memory operand of mode MODE. */
1654
8b0ecac5 1655static bool
fd50b071 1656avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
a28e4651 1657{
f9efb148 1658 bool ok = CONSTANT_ADDRESS_P (x);
3d4d979d 1659
f9efb148 1660 switch (GET_CODE (x))
a28e4651 1661 {
f9efb148 1662 case REG:
1663 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1664 MEM, strict);
1665
1666 if (strict
017c5b98 1667 && GET_MODE_SIZE (mode) > 4
f9efb148 1668 && REG_X == REGNO (x))
8b0ecac5 1669 {
f9efb148 1670 ok = false;
8b0ecac5 1671 }
f9efb148 1672 break;
1673
1674 case POST_INC:
1675 case PRE_DEC:
1676 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1677 GET_CODE (x), strict);
1678 break;
ae86bb47 1679
f9efb148 1680 case PLUS:
1681 {
1682 rtx reg = XEXP (x, 0);
1683 rtx op1 = XEXP (x, 1);
1684
1685 if (REG_P (reg)
1686 && CONST_INT_P (op1)
1687 && INTVAL (op1) >= 0)
1688 {
1689 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1690
1691 if (fit)
1692 {
1693 ok = (! strict
1694 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1695 PLUS, strict));
1696
1697 if (reg == frame_pointer_rtx
1698 || reg == arg_pointer_rtx)
1699 {
1700 ok = true;
1701 }
1702 }
1703 else if (frame_pointer_needed
1704 && reg == frame_pointer_rtx)
1705 {
1706 ok = true;
1707 }
1708 }
1709 }
1710 break;
1711
1712 default:
1713 break;
1714 }
1715
ae86bb47 1716 if (avr_log.legitimate_address_p)
a28e4651 1717 {
f9efb148 1718 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
ae86bb47 1719 "reload_completed=%d reload_in_progress=%d %s:",
f9efb148 1720 ok, mode, strict, reload_completed, reload_in_progress,
ae86bb47 1721 reg_renumber ? "(reg_renumber)" : "");
1722
1723 if (GET_CODE (x) == PLUS
1724 && REG_P (XEXP (x, 0))
1725 && CONST_INT_P (XEXP (x, 1))
1726 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1727 && reg_renumber)
1728 {
1729 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1730 true_regnum (XEXP (x, 0)));
1731 }
1732
1733 avr_edump ("\n%r\n", x);
a28e4651 1734 }
ae86bb47 1735
f9efb148 1736 return ok;
a28e4651 1737}
1738
4202ef11 1739
1740/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1741 now only a helper for avr_addr_space_legitimize_address. */
a28e4651 1742/* Attempts to replace X with a valid
1743 memory address for an operand of mode MODE */
1744
8b0ecac5 1745static rtx
9f42c829 1746avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
a28e4651 1747{
9f42c829 1748 bool big_offset_p = false;
1749
1750 x = oldx;
1751
1752 if (GET_CODE (oldx) == PLUS
1753 && REG_P (XEXP (oldx, 0)))
1754 {
1755 if (REG_P (XEXP (oldx, 1)))
1756 x = force_reg (GET_MODE (oldx), oldx);
1757 else if (CONST_INT_P (XEXP (oldx, 1)))
1758 {
1759 int offs = INTVAL (XEXP (oldx, 1));
1760 if (frame_pointer_rtx != XEXP (oldx, 0)
1761 && offs > MAX_LD_OFFSET (mode))
1762 {
1763 big_offset_p = true;
1764 x = force_reg (GET_MODE (oldx), oldx);
1765 }
1766 }
1767 }
1768
1769 if (avr_log.legitimize_address)
1770 {
1771 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1772
1773 if (x != oldx)
1774 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1775 }
1776
a28e4651 1777 return x;
1778}
1779
1780
68a79dfc 1781/* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1782/* This will allow register R26/27 to be used where it is no worse than normal
1783 base pointers R28/29 or R30/31. For example, if base offset is greater
1784 than 63 bytes or for R++ or --R addressing. */
1785
1786rtx
1f46ee39 1787avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
68a79dfc 1788 int opnum, int type, int addr_type,
1789 int ind_levels ATTRIBUTE_UNUSED,
1790 rtx (*mk_memloc)(rtx,int))
1791{
1f46ee39 1792 rtx x = *px;
1793
68a79dfc 1794 if (avr_log.legitimize_reload_address)
1795 avr_edump ("\n%?:%m %r\n", mode, x);
1796
1797 if (1 && (GET_CODE (x) == POST_INC
1798 || GET_CODE (x) == PRE_DEC))
1799 {
1800 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1801 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1802 opnum, RELOAD_OTHER);
1803
1804 if (avr_log.legitimize_reload_address)
1f46ee39 1805 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1806 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1807
1808 return x;
1809 }
1810
1811 if (GET_CODE (x) == PLUS
1812 && REG_P (XEXP (x, 0))
1813 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1814 && CONST_INT_P (XEXP (x, 1))
1815 && INTVAL (XEXP (x, 1)) >= 1)
1816 {
1817 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1818
1819 if (fit)
1820 {
1821 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1822 {
1823 int regno = REGNO (XEXP (x, 0));
1824 rtx mem = mk_memloc (x, regno);
1825
1826 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1827 POINTER_REGS, Pmode, VOIDmode, 0, 0,
95dcc8ad 1828 1, (enum reload_type) addr_type);
68a79dfc 1829
1830 if (avr_log.legitimize_reload_address)
1f46ee39 1831 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1832 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1833
1834 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1835 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
95dcc8ad 1836 opnum, (enum reload_type) type);
68a79dfc 1837
1838 if (avr_log.legitimize_reload_address)
1f46ee39 1839 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1840 BASE_POINTER_REGS, mem, NULL_RTX);
1841
1842 return x;
1843 }
1844 }
1845 else if (! (frame_pointer_needed
9f42c829 1846 && XEXP (x, 0) == frame_pointer_rtx))
68a79dfc 1847 {
1f46ee39 1848 push_reload (x, NULL_RTX, px, NULL,
68a79dfc 1849 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
95dcc8ad 1850 opnum, (enum reload_type) type);
68a79dfc 1851
1852 if (avr_log.legitimize_reload_address)
1f46ee39 1853 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1854 POINTER_REGS, x, NULL_RTX);
1855
1856 return x;
1857 }
1858 }
1859
1860 return NULL_RTX;
1861}
1862
1863
be6d8823 1864/* Implement `TARGET_SECONDARY_RELOAD' */
1865
1866static reg_class_t
1867avr_secondary_reload (bool in_p, rtx x,
1868 reg_class_t reload_class ATTRIBUTE_UNUSED,
1869 enum machine_mode mode, secondary_reload_info *sri)
1870{
1871 if (in_p
1872 && MEM_P (x)
1873 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1874 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1875 {
1876 /* For the non-generic 16-bit spaces we need a d-class scratch. */
1877
1878 switch (mode)
1879 {
1880 default:
1881 gcc_unreachable();
1882
1883 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
1884 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
1885 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1886
1887 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
1888 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
1889 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
1890 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1891 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1892
1893 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1894
1895 case SImode: sri->icode = CODE_FOR_reload_insi; break;
1896 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
1897 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
1898 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
1899 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1900 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1901 }
1902 }
1903
1904 return NO_REGS;
1905}
1906
1907
9ce2d202 1908/* Helper function to print assembler resp. track instruction
02d9a2c3 1909 sequence lengths. Always return "".
9ce2d202 1910
1911 If PLEN == NULL:
1912 Output assembler code from template TPL with operands supplied
1913 by OPERANDS. This is just forwarding to output_asm_insn.
1914
1915 If PLEN != NULL:
dfd52f2b 1916 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1917 If N_WORDS < 0 Set *PLEN to -N_WORDS.
9ce2d202 1918 Don't output anything.
1919*/
1920
02d9a2c3 1921static const char*
9ce2d202 1922avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1923{
1924 if (NULL == plen)
1925 {
1926 output_asm_insn (tpl, operands);
1927 }
1928 else
1929 {
dfd52f2b 1930 if (n_words < 0)
1931 *plen = -n_words;
1932 else
1933 *plen += n_words;
9ce2d202 1934 }
02d9a2c3 1935
1936 return "";
9ce2d202 1937}
1938
1939
20c71901 1940/* Return a pointer register name as a string. */
a28e4651 1941
37ac04dc 1942static const char *
206a5129 1943ptrreg_to_str (int regno)
a28e4651 1944{
1945 switch (regno)
1946 {
1947 case REG_X: return "X";
1948 case REG_Y: return "Y";
1949 case REG_Z: return "Z";
1950 default:
a45076aa 1951 output_operand_lossage ("address operand requires constraint for"
1952 " X, Y, or Z register");
a28e4651 1953 }
1954 return NULL;
1955}
1956
1957/* Return the condition name as a string.
1958 Used in conditional jump constructing */
1959
37ac04dc 1960static const char *
206a5129 1961cond_string (enum rtx_code code)
a28e4651 1962{
1963 switch (code)
1964 {
1965 case NE:
1966 return "ne";
1967 case EQ:
1968 return "eq";
1969 case GE:
1970 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1971 return "pl";
1972 else
1973 return "ge";
a28e4651 1974 case LT:
1975 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1976 return "mi";
1977 else
1978 return "lt";
1979 case GEU:
1980 return "sh";
a28e4651 1981 case LTU:
1982 return "lo";
1983 default:
8ef66241 1984 gcc_unreachable ();
a28e4651 1985 }
02d9a2c3 1986
1987 return "";
a28e4651 1988}
1989
62a6a7ee 1990
1991/* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
20c71901 1992/* Output ADDR to FILE as address. */
a28e4651 1993
62a6a7ee 1994static void
1995avr_print_operand_address (FILE *file, rtx addr)
a28e4651 1996{
1997 switch (GET_CODE (addr))
1998 {
1999 case REG:
2000 fprintf (file, ptrreg_to_str (REGNO (addr)));
2001 break;
2002
2003 case PRE_DEC:
2004 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2005 break;
2006
2007 case POST_INC:
2008 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2009 break;
2010
2011 default:
2012 if (CONSTANT_ADDRESS_P (addr)
310f64db 2013 && text_segment_operand (addr, VOIDmode))
a28e4651 2014 {
163e62fb 2015 rtx x = addr;
2016 if (GET_CODE (x) == CONST)
2017 x = XEXP (x, 0);
310f64db 2018 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2019 {
2020 /* Assembler gs() will implant word address. Make offset
a45076aa 2021 a byte offset inside gs() for assembler. This is
2022 needed because the more logical (constant+gs(sym)) is not
2023 accepted by gas. For 128K and lower devices this is ok.
2024 For large devices it will create a Trampoline to offset
2025 from symbol which may not be what the user really wanted. */
310f64db 2026 fprintf (file, "gs(");
2027 output_addr_const (file, XEXP (x,0));
a45076aa 2028 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2029 2 * INTVAL (XEXP (x, 1)));
310f64db 2030 if (AVR_3_BYTE_PC)
bf776685 2031 if (warning (0, "pointer offset from symbol maybe incorrect"))
310f64db 2032 {
2033 output_addr_const (stderr, addr);
2034 fprintf(stderr,"\n");
2035 }
2036 }
2037 else
2038 {
2039 fprintf (file, "gs(");
2040 output_addr_const (file, addr);
2041 fprintf (file, ")");
2042 }
a28e4651 2043 }
2044 else
2045 output_addr_const (file, addr);
2046 }
2047}
2048
2049
62a6a7ee 2050/* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2051
2052static bool
2053avr_print_operand_punct_valid_p (unsigned char code)
2054{
2055 return code == '~' || code == '!';
2056}
2057
2058
2059/* Implement `TARGET_PRINT_OPERAND'. */
384f6361 2060/* Output X as assembler operand to file FILE.
2061 For a description of supported %-codes, see top of avr.md. */
2062
62a6a7ee 2063static void
2064avr_print_operand (FILE *file, rtx x, int code)
a28e4651 2065{
2066 int abcd = 0;
2067
2068 if (code >= 'A' && code <= 'D')
2069 abcd = code - 'A';
2070
3b351734 2071 if (code == '~')
2072 {
4f0e2214 2073 if (!AVR_HAVE_JMP_CALL)
3b351734 2074 fputc ('r', file);
2075 }
90ef7269 2076 else if (code == '!')
2077 {
2078 if (AVR_HAVE_EIJMP_EICALL)
2079 fputc ('e', file);
2080 }
384f6361 2081 else if (code == 't'
2082 || code == 'T')
2083 {
2084 static int t_regno = -1;
2085 static int t_nbits = -1;
2086
2087 if (REG_P (x) && t_regno < 0 && code == 'T')
2088 {
2089 t_regno = REGNO (x);
2090 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2091 }
2092 else if (CONST_INT_P (x) && t_regno >= 0
2093 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2094 {
2095 int bpos = INTVAL (x);
2096
2097 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2098 if (code == 'T')
2099 fprintf (file, ",%d", bpos % 8);
2100
2101 t_regno = -1;
2102 }
2103 else
2104 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2105 }
3b351734 2106 else if (REG_P (x))
a28e4651 2107 {
2108 if (x == zero_reg_rtx)
b4ebb666 2109 fprintf (file, "__zero_reg__");
2110 else if (code == 'r' && REGNO (x) < 32)
2111 fprintf (file, "%d", (int) REGNO (x));
a28e4651 2112 else
b4ebb666 2113 fprintf (file, reg_names[REGNO (x) + abcd]);
a28e4651 2114 }
96871982 2115 else if (CONST_INT_P (x))
2116 {
2117 HOST_WIDE_INT ival = INTVAL (x);
2118
2119 if ('i' != code)
2120 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2121 else if (low_io_address_operand (x, VOIDmode)
2122 || high_io_address_operand (x, VOIDmode))
2123 {
0b6cf66f 2124 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2125 fprintf (file, "__RAMPZ__");
2126 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2127 fprintf (file, "__RAMPY__");
2128 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2129 fprintf (file, "__RAMPX__");
2130 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2131 fprintf (file, "__RAMPD__");
2132 else if (AVR_XMEGA && ival == avr_addr.ccp)
2133 fprintf (file, "__CCP__");
72851b68 2134 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2135 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2136 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2137 else
96871982 2138 {
96871982 2139 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2140 ival - avr_current_arch->sfr_offset);
96871982 2141 }
2142 }
2143 else
2144 fatal_insn ("bad address, not an I/O address:", x);
2145 }
2146 else if (MEM_P (x))
a28e4651 2147 {
a45076aa 2148 rtx addr = XEXP (x, 0);
2149
310f64db 2150 if (code == 'm')
a28e4651 2151 {
a45076aa 2152 if (!CONSTANT_P (addr))
644ac9c5 2153 fatal_insn ("bad address, not a constant:", addr);
a45076aa 2154 /* Assembler template with m-code is data - not progmem section */
2155 if (text_segment_operand (addr, VOIDmode))
2156 if (warning (0, "accessing data memory with"
2157 " program memory address"))
2158 {
2159 output_addr_const (stderr, addr);
2160 fprintf(stderr,"\n");
2161 }
2162 output_addr_const (file, addr);
a28e4651 2163 }
644ac9c5 2164 else if (code == 'i')
2165 {
62a6a7ee 2166 avr_print_operand (file, addr, 'i');
644ac9c5 2167 }
3b351734 2168 else if (code == 'o')
2169 {
2170 if (GET_CODE (addr) != PLUS)
68435912 2171 fatal_insn ("bad address, not (reg+disp):", addr);
3b351734 2172
62a6a7ee 2173 avr_print_operand (file, XEXP (addr, 1), 0);
3b351734 2174 }
f43bae99 2175 else if (code == 'p' || code == 'r')
2176 {
2177 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2178 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2179
2180 if (code == 'p')
62a6a7ee 2181 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
f43bae99 2182 else
62a6a7ee 2183 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
f43bae99 2184 }
a28e4651 2185 else if (GET_CODE (addr) == PLUS)
2186 {
62a6a7ee 2187 avr_print_operand_address (file, XEXP (addr,0));
a28e4651 2188 if (REGNO (XEXP (addr, 0)) == REG_X)
68435912 2189 fatal_insn ("internal compiler error. Bad address:"
a28e4651 2190 ,addr);
2191 fputc ('+', file);
62a6a7ee 2192 avr_print_operand (file, XEXP (addr,1), code);
a28e4651 2193 }
2194 else
62a6a7ee 2195 avr_print_operand_address (file, addr);
a28e4651 2196 }
96871982 2197 else if (code == 'i')
2198 {
2199 fatal_insn ("bad address, not an I/O address:", x);
2200 }
310f64db 2201 else if (code == 'x')
2202 {
2203 /* Constant progmem address - like used in jmp or call */
2204 if (0 == text_segment_operand (x, VOIDmode))
a45076aa 2205 if (warning (0, "accessing program memory"
2206 " with data memory address"))
310f64db 2207 {
2208 output_addr_const (stderr, x);
2209 fprintf(stderr,"\n");
2210 }
2211 /* Use normal symbol for direct address no linker trampoline needed */
2212 output_addr_const (file, x);
2213 }
b4ebb666 2214 else if (CONST_FIXED_P (x))
017c5b98 2215 {
2216 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2217 if (code != 0)
2218 output_operand_lossage ("Unsupported code '%c'for fixed-point:",
2219 code);
2220 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2221 }
a28e4651 2222 else if (GET_CODE (x) == CONST_DOUBLE)
2223 {
2224 long val;
2225 REAL_VALUE_TYPE rv;
2226 if (GET_MODE (x) != SFmode)
68435912 2227 fatal_insn ("internal compiler error. Unknown mode:", x);
a28e4651 2228 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2229 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
7fe1d31c 2230 fprintf (file, "0x%lx", val);
a28e4651 2231 }
5bd39e93 2232 else if (GET_CODE (x) == CONST_STRING)
2233 fputs (XSTR (x, 0), file);
a28e4651 2234 else if (code == 'j')
7fe1d31c 2235 fputs (cond_string (GET_CODE (x)), file);
a28e4651 2236 else if (code == 'k')
7fe1d31c 2237 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
a28e4651 2238 else
62a6a7ee 2239 avr_print_operand_address (file, x);
a28e4651 2240}
2241
a28e4651 2242/* Update the condition code in the INSN. */
2243
2244void
206a5129 2245notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
a28e4651 2246{
1cb39658 2247 rtx set;
bcad9375 2248 enum attr_cc cc = get_attr_cc (insn);
1cb39658 2249
bcad9375 2250 switch (cc)
a28e4651 2251 {
bcad9375 2252 default:
2253 break;
2254
b4ebb666 2255 case CC_PLUS:
f4806884 2256 case CC_LDI:
bcad9375 2257 {
2258 rtx *op = recog_data.operand;
2259 int len_dummy, icc;
2260
2261 /* Extract insn's operands. */
2262 extract_constrain_insn_cached (insn);
eac146f2 2263
f4806884 2264 switch (cc)
2265 {
2266 default:
2267 gcc_unreachable();
2268
b4ebb666 2269 case CC_PLUS:
2270 avr_out_plus (insn, op, &len_dummy, &icc);
017c5b98 2271 cc = (enum attr_cc) icc;
2272 break;
2273
f4806884 2274 case CC_LDI:
2275
2276 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2277 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2278 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2279 ? CC_CLOBBER
2280 /* Any other "r,rL" combination does not alter cc0. */
2281 : CC_NONE;
2282
2283 break;
2284 } /* inner switch */
2285
bcad9375 2286 break;
2287 }
f4806884 2288 } /* outer swicth */
bcad9375 2289
2290 switch (cc)
2291 {
2292 default:
2293 /* Special values like CC_OUT_PLUS from above have been
2294 mapped to "standard" CC_* values so we never come here. */
2295
2296 gcc_unreachable();
2297 break;
2298
a28e4651 2299 case CC_NONE:
2300 /* Insn does not affect CC at all. */
2301 break;
2302
2303 case CC_SET_N:
2304 CC_STATUS_INIT;
2305 break;
2306
2307 case CC_SET_ZN:
1cb39658 2308 set = single_set (insn);
2309 CC_STATUS_INIT;
2310 if (set)
2311 {
2312 cc_status.flags |= CC_NO_OVERFLOW;
2313 cc_status.value1 = SET_DEST (set);
2314 }
a28e4651 2315 break;
2316
2317 case CC_SET_CZN:
2318 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2319 The V flag may or may not be known but that's ok because
2320 alter_cond will change tests to use EQ/NE. */
1cb39658 2321 set = single_set (insn);
2322 CC_STATUS_INIT;
2323 if (set)
2324 {
2325 cc_status.value1 = SET_DEST (set);
2326 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2327 }
a28e4651 2328 break;
2329
2330 case CC_COMPARE:
1cb39658 2331 set = single_set (insn);
2332 CC_STATUS_INIT;
2333 if (set)
2334 cc_status.value1 = SET_SRC (set);
a28e4651 2335 break;
1cb39658 2336
a28e4651 2337 case CC_CLOBBER:
2338 /* Insn doesn't leave CC in a usable state. */
2339 CC_STATUS_INIT;
2340 break;
2341 }
2342}
2343
a28e4651 2344/* Choose mode for jump insn:
2345 1 - relative jump in range -63 <= x <= 62 ;
2346 2 - relative jump in range -2046 <= x <= 2045 ;
2347 3 - absolute jump (only for ATmega[16]03). */
2348
2349int
206a5129 2350avr_jump_mode (rtx x, rtx insn)
a28e4651 2351{
ef51d1e3 2352 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
47fc0706 2353 ? XEXP (x, 0) : x));
2354 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
a28e4651 2355 int jump_distance = cur_addr - dest_addr;
2356
2357 if (-63 <= jump_distance && jump_distance <= 62)
2358 return 1;
2359 else if (-2046 <= jump_distance && jump_distance <= 2045)
2360 return 2;
4f0e2214 2361 else if (AVR_HAVE_JMP_CALL)
a28e4651 2362 return 3;
2363
2364 return 2;
2365}
2366
e7d17bf3 2367/* return an AVR condition jump commands.
2368 X is a comparison RTX.
2369 LEN is a number returned by avr_jump_mode function.
2370 if REVERSE nonzero then condition code in X must be reversed. */
a28e4651 2371
37ac04dc 2372const char *
206a5129 2373ret_cond_branch (rtx x, int len, int reverse)
a28e4651 2374{
e7d17bf3 2375 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2376
a28e4651 2377 switch (cond)
2378 {
2379 case GT:
2380 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2381 return (len == 1 ? ("breq .+2" CR_TAB
2382 "brpl %0") :
2383 len == 2 ? ("breq .+4" CR_TAB
2384 "brmi .+2" CR_TAB
2385 "rjmp %0") :
2386 ("breq .+6" CR_TAB
2387 "brmi .+4" CR_TAB
2388 "jmp %0"));
a28e4651 2389
2390 else
02a011e9 2391 return (len == 1 ? ("breq .+2" CR_TAB
2392 "brge %0") :
2393 len == 2 ? ("breq .+4" CR_TAB
2394 "brlt .+2" CR_TAB
2395 "rjmp %0") :
2396 ("breq .+6" CR_TAB
2397 "brlt .+4" CR_TAB
2398 "jmp %0"));
a28e4651 2399 case GTU:
02a011e9 2400 return (len == 1 ? ("breq .+2" CR_TAB
2401 "brsh %0") :
2402 len == 2 ? ("breq .+4" CR_TAB
2403 "brlo .+2" CR_TAB
2404 "rjmp %0") :
2405 ("breq .+6" CR_TAB
2406 "brlo .+4" CR_TAB
2407 "jmp %0"));
a28e4651 2408 case LE:
2409 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2410 return (len == 1 ? ("breq %0" CR_TAB
2411 "brmi %0") :
2412 len == 2 ? ("breq .+2" CR_TAB
2413 "brpl .+2" CR_TAB
2414 "rjmp %0") :
2415 ("breq .+2" CR_TAB
2416 "brpl .+4" CR_TAB
2417 "jmp %0"));
a28e4651 2418 else
02a011e9 2419 return (len == 1 ? ("breq %0" CR_TAB
2420 "brlt %0") :
2421 len == 2 ? ("breq .+2" CR_TAB
2422 "brge .+2" CR_TAB
2423 "rjmp %0") :
2424 ("breq .+2" CR_TAB
2425 "brge .+4" CR_TAB
2426 "jmp %0"));
a28e4651 2427 case LEU:
02a011e9 2428 return (len == 1 ? ("breq %0" CR_TAB
2429 "brlo %0") :
2430 len == 2 ? ("breq .+2" CR_TAB
2431 "brsh .+2" CR_TAB
2432 "rjmp %0") :
2433 ("breq .+2" CR_TAB
2434 "brsh .+4" CR_TAB
2435 "jmp %0"));
a28e4651 2436 default:
e7d17bf3 2437 if (reverse)
2438 {
2439 switch (len)
2440 {
2441 case 1:
02a011e9 2442 return "br%k1 %0";
e7d17bf3 2443 case 2:
02a011e9 2444 return ("br%j1 .+2" CR_TAB
2445 "rjmp %0");
e7d17bf3 2446 default:
02a011e9 2447 return ("br%j1 .+4" CR_TAB
2448 "jmp %0");
e7d17bf3 2449 }
2450 }
02a011e9 2451 else
2452 {
2453 switch (len)
2454 {
2455 case 1:
2456 return "br%j1 %0";
2457 case 2:
2458 return ("br%k1 .+2" CR_TAB
2459 "rjmp %0");
2460 default:
2461 return ("br%k1 .+4" CR_TAB
2462 "jmp %0");
2463 }
2464 }
a28e4651 2465 }
2466 return "";
2467}
2468
fe74bc77 2469/* Output insn cost for next insn. */
a28e4651 2470
2471void
206a5129 2472final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
114786e6 2473 int num_operands ATTRIBUTE_UNUSED)
a28e4651 2474{
ae86bb47 2475 if (avr_log.rtx_costs)
a28e4651 2476 {
114786e6 2477 rtx set = single_set (insn);
2478
2479 if (set)
2480 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
7013e87c 2481 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
114786e6 2482 else
2483 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
20d892d1 2484 rtx_cost (PATTERN (insn), INSN, 0,
2485 optimize_insn_for_speed_p()));
a28e4651 2486 }
a28e4651 2487}
2488
37ac04dc 2489/* Return 0 if undefined, 1 if always true or always false. */
a28e4651 2490
2491int
8deb3959 2492avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
a28e4651 2493{
2494 unsigned int max = (mode == QImode ? 0xff :
2495 mode == HImode ? 0xffff :
02d9a2c3 2496 mode == PSImode ? 0xffffff :
30435bf8 2497 mode == SImode ? 0xffffffff : 0);
8deb3959 2498 if (max && op && GET_CODE (x) == CONST_INT)
a28e4651 2499 {
8deb3959 2500 if (unsigned_condition (op) != op)
a28e4651 2501 max >>= 1;
2502
2503 if (max != (INTVAL (x) & max)
2504 && INTVAL (x) != 0xff)
2505 return 1;
2506 }
2507 return 0;
2508}
2509
2510
2511/* Returns nonzero if REGNO is the number of a hard
2512 register in which function arguments are sometimes passed. */
2513
2514int
206a5129 2515function_arg_regno_p(int r)
a28e4651 2516{
2517 return (r >= 8 && r <= 25);
2518}
2519
2520/* Initializing the variable cum for the state at the beginning
2521 of the argument list. */
2522
2523void
206a5129 2524init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2525 tree fndecl ATTRIBUTE_UNUSED)
a28e4651 2526{
2527 cum->nregs = 18;
2528 cum->regno = FIRST_CUM_REG;
257d99c3 2529 if (!libname && stdarg_p (fntype))
2530 cum->nregs = 0;
32969c63 2531
2532 /* Assume the calle may be tail called */
2533
2534 cfun->machine->sibcall_fails = 0;
a28e4651 2535}
2536
0af74aa0 2537/* Returns the number of registers to allocate for a function argument. */
2538
2539static int
36f949a2 2540avr_num_arg_regs (enum machine_mode mode, const_tree type)
0af74aa0 2541{
2542 int size;
2543
2544 if (mode == BLKmode)
2545 size = int_size_in_bytes (type);
2546 else
2547 size = GET_MODE_SIZE (mode);
2548
b681d971 2549 /* Align all function arguments to start in even-numbered registers.
2550 Odd-sized arguments leave holes above them. */
0af74aa0 2551
b681d971 2552 return (size + 1) & ~1;
0af74aa0 2553}
2554
a28e4651 2555/* Controls whether a function argument is passed
20c71901 2556 in a register, and which register. */
a28e4651 2557
e8509bef 2558static rtx
39cba157 2559avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
e8509bef 2560 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2561{
39cba157 2562 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2563 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2564
2565 if (cum->nregs && bytes <= cum->nregs)
1a83b3ff 2566 return gen_rtx_REG (mode, cum->regno - bytes);
0af74aa0 2567
a28e4651 2568 return NULL_RTX;
2569}
2570
2571/* Update the summarizer variable CUM to advance past an argument
2572 in the argument list. */
2573
e8509bef 2574static void
39cba157 2575avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
e8509bef 2576 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2577{
39cba157 2578 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2579 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2580
a28e4651 2581 cum->nregs -= bytes;
2582 cum->regno -= bytes;
2583
32969c63 2584 /* A parameter is being passed in a call-saved register. As the original
2585 contents of these regs has to be restored before leaving the function,
2586 a function must not pass arguments in call-saved regs in order to get
2587 tail-called. */
2588
e46ab6d3 2589 if (cum->regno >= 8
2590 && cum->nregs >= 0
32969c63 2591 && !call_used_regs[cum->regno])
2592 {
2593 /* FIXME: We ship info on failing tail-call in struct machine_function.
2594 This uses internals of calls.c:expand_call() and the way args_so_far
2595 is used. targetm.function_ok_for_sibcall() needs to be extended to
2596 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2597 dependent so that such an extension is not wanted. */
2598
2599 cfun->machine->sibcall_fails = 1;
2600 }
2601
04f600bb 2602 /* Test if all registers needed by the ABI are actually available. If the
2603 user has fixed a GPR needed to pass an argument, an (implicit) function
363cd51e 2604 call will clobber that fixed register. See PR45099 for an example. */
04f600bb 2605
e46ab6d3 2606 if (cum->regno >= 8
2607 && cum->nregs >= 0)
04f600bb 2608 {
2609 int regno;
2610
2611 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2612 if (fixed_regs[regno])
363cd51e 2613 warning (0, "fixed register %s used to pass parameter to function",
2614 reg_names[regno]);
04f600bb 2615 }
2616
a28e4651 2617 if (cum->nregs <= 0)
2618 {
2619 cum->nregs = 0;
2620 cum->regno = FIRST_CUM_REG;
2621 }
a28e4651 2622}
2623
32969c63 2624/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2625/* Decide whether we can make a sibling call to a function. DECL is the
2626 declaration of the function being targeted by the call and EXP is the
2627 CALL_EXPR representing the call. */
2628
2629static bool
2630avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2631{
2632 tree fntype_callee;
2633
2634 /* Tail-calling must fail if callee-saved regs are used to pass
2635 function args. We must not tail-call when `epilogue_restores'
2636 is used. Unfortunately, we cannot tell at this point if that
2637 actually will happen or not, and we cannot step back from
2638 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2639
2640 if (cfun->machine->sibcall_fails
2641 || TARGET_CALL_PROLOGUES)
2642 {
2643 return false;
2644 }
2645
2646 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2647
2648 if (decl_callee)
2649 {
2650 decl_callee = TREE_TYPE (decl_callee);
2651 }
2652 else
2653 {
2654 decl_callee = fntype_callee;
2655
2656 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2657 && METHOD_TYPE != TREE_CODE (decl_callee))
2658 {
2659 decl_callee = TREE_TYPE (decl_callee);
2660 }
2661 }
2662
2663 /* Ensure that caller and callee have compatible epilogues */
2664
ea679361 2665 if (cfun->machine->is_interrupt
2666 || cfun->machine->is_signal
2667 || cfun->machine->is_naked
32969c63 2668 || avr_naked_function_p (decl_callee)
32969c63 2669 /* FIXME: For OS_task and OS_main, we are over-conservative.
2670 This is due to missing documentation of these attributes
2671 and what they actually should do and should not do. */
2672 || (avr_OS_task_function_p (decl_callee)
ea679361 2673 != cfun->machine->is_OS_task)
32969c63 2674 || (avr_OS_main_function_p (decl_callee)
ea679361 2675 != cfun->machine->is_OS_main))
32969c63 2676 {
2677 return false;
2678 }
2679
2680 return true;
2681}
2682
a28e4651 2683/***********************************************************************
2684 Functions for outputting various mov's for a various modes
2685************************************************************************/
4202ef11 2686
2687/* Return true if a value of mode MODE is read from flash by
2688 __load_* function from libgcc. */
2689
2690bool
2691avr_load_libgcc_p (rtx op)
2692{
2693 enum machine_mode mode = GET_MODE (op);
2694 int n_bytes = GET_MODE_SIZE (mode);
2695
2696 return (n_bytes > 2
2697 && !AVR_HAVE_LPMX
be6d8823 2698 && avr_mem_flash_p (op));
4202ef11 2699}
2700
5bd39e93 2701/* Return true if a value of mode MODE is read by __xload_* function. */
2702
2703bool
2704avr_xload_libgcc_p (enum machine_mode mode)
2705{
2706 int n_bytes = GET_MODE_SIZE (mode);
2707
2708 return (n_bytes > 1
e508bf98 2709 || avr_current_device->n_flash > 1);
5bd39e93 2710}
2711
2712
be6d8823 2713/* Fixme: This is a hack because secondary reloads don't works as expected.
2714
2715 Find an unused d-register to be used as scratch in INSN.
2716 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2717 is a register, skip all possible return values that overlap EXCLUDE.
2718 The policy for the returned register is similar to that of
2719 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2720 of INSN.
2721
2722 Return a QImode d-register or NULL_RTX if nothing found. */
2723
2724static rtx
2725avr_find_unused_d_reg (rtx insn, rtx exclude)
2726{
2727 int regno;
2728 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2729 || avr_signal_function_p (current_function_decl));
2730
2731 for (regno = 16; regno < 32; regno++)
2732 {
2733 rtx reg = all_regs_rtx[regno];
2734
2735 if ((exclude
2736 && reg_overlap_mentioned_p (exclude, reg))
2737 || fixed_regs[regno])
2738 {
2739 continue;
2740 }
2741
2742 /* Try non-live register */
2743
2744 if (!df_regs_ever_live_p (regno)
2745 && (TREE_THIS_VOLATILE (current_function_decl)
2746 || cfun->machine->is_OS_task
2747 || cfun->machine->is_OS_main
2748 || (!isr_p && call_used_regs[regno])))
2749 {
2750 return reg;
2751 }
2752
2753 /* Any live register can be used if it is unused after.
2754 Prologue/epilogue will care for it as needed. */
2755
2756 if (df_regs_ever_live_p (regno)
2757 && reg_unused_after (insn, reg))
2758 {
2759 return reg;
2760 }
2761 }
2762
2763 return NULL_RTX;
2764}
2765
2766
2767/* Helper function for the next function in the case where only restricted
2768 version of LPM instruction is available. */
2769
2770static const char*
2771avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2772{
2773 rtx dest = xop[0];
2774 rtx addr = xop[1];
2775 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2776 int regno_dest;
2777
2778 regno_dest = REGNO (dest);
2779
2780 /* The implicit target register of LPM. */
2781 xop[3] = lpm_reg_rtx;
2782
2783 switch (GET_CODE (addr))
2784 {
2785 default:
2786 gcc_unreachable();
2787
2788 case REG:
2789
2790 gcc_assert (REG_Z == REGNO (addr));
2791
2792 switch (n_bytes)
2793 {
2794 default:
2795 gcc_unreachable();
2796
2797 case 1:
2798 avr_asm_len ("%4lpm", xop, plen, 1);
2799
2800 if (regno_dest != LPM_REGNO)
2801 avr_asm_len ("mov %0,%3", xop, plen, 1);
2802
2803 return "";
2804
2805 case 2:
2806 if (REGNO (dest) == REG_Z)
2807 return avr_asm_len ("%4lpm" CR_TAB
2808 "push %3" CR_TAB
2809 "adiw %2,1" CR_TAB
2810 "%4lpm" CR_TAB
2811 "mov %B0,%3" CR_TAB
2812 "pop %A0", xop, plen, 6);
2813
2814 avr_asm_len ("%4lpm" CR_TAB
2815 "mov %A0,%3" CR_TAB
2816 "adiw %2,1" CR_TAB
2817 "%4lpm" CR_TAB
2818 "mov %B0,%3", xop, plen, 5);
2819
2820 if (!reg_unused_after (insn, addr))
2821 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2822
2823 break; /* 2 */
2824 }
2825
2826 break; /* REG */
2827
2828 case POST_INC:
2829
2830 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2831 && n_bytes <= 4);
2832
2833 if (regno_dest == LPM_REGNO)
2834 avr_asm_len ("%4lpm" CR_TAB
2835 "adiw %2,1", xop, plen, 2);
2836 else
2837 avr_asm_len ("%4lpm" CR_TAB
2838 "mov %A0,%3" CR_TAB
2839 "adiw %2,1", xop, plen, 3);
2840
2841 if (n_bytes >= 2)
2842 avr_asm_len ("%4lpm" CR_TAB
2843 "mov %B0,%3" CR_TAB
2844 "adiw %2,1", xop, plen, 3);
2845
2846 if (n_bytes >= 3)
2847 avr_asm_len ("%4lpm" CR_TAB
2848 "mov %C0,%3" CR_TAB
2849 "adiw %2,1", xop, plen, 3);
2850
2851 if (n_bytes >= 4)
2852 avr_asm_len ("%4lpm" CR_TAB
2853 "mov %D0,%3" CR_TAB
2854 "adiw %2,1", xop, plen, 3);
2855
2856 break; /* POST_INC */
2857
2858 } /* switch CODE (addr) */
2859
2860 return "";
2861}
2862
2863
4202ef11 2864/* If PLEN == NULL: Ouput instructions to load a value from a memory location
2865 OP[1] in AS1 to register OP[0].
2866 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2867 Return "". */
2868
4b72e680 2869const char*
4202ef11 2870avr_out_lpm (rtx insn, rtx *op, int *plen)
2871{
be6d8823 2872 rtx xop[7];
4202ef11 2873 rtx dest = op[0];
2874 rtx src = SET_SRC (single_set (insn));
2875 rtx addr;
2876 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
be6d8823 2877 int segment;
9d734fa8 2878 RTX_CODE code;
ed2541ea 2879 addr_space_t as = MEM_ADDR_SPACE (src);
4202ef11 2880
2881 if (plen)
2882 *plen = 0;
2883
2884 if (MEM_P (dest))
2885 {
2886 warning (0, "writing to address space %qs not supported",
9d734fa8 2887 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
4202ef11 2888
2889 return "";
2890 }
2891
9d734fa8 2892 addr = XEXP (src, 0);
2893 code = GET_CODE (addr);
5bd39e93 2894
9d734fa8 2895 gcc_assert (REG_P (dest));
ed2541ea 2896 gcc_assert (REG == code || POST_INC == code);
4202ef11 2897
002565f0 2898 xop[0] = dest;
be6d8823 2899 xop[1] = addr;
2900 xop[2] = lpm_addr_reg_rtx;
2901 xop[4] = xstring_empty;
2902 xop[5] = tmp_reg_rtx;
2903 xop[6] = XEXP (rampz_rtx, 0);
9d734fa8 2904
be6d8823 2905 segment = avr_addrspace[as].segment;
2906
2907 /* Set RAMPZ as needed. */
2908
2909 if (segment)
2910 {
2911 xop[4] = GEN_INT (segment);
2912 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2913
2914 if (xop[3] != NULL_RTX)
2915 {
2916 avr_asm_len ("ldi %3,%4" CR_TAB
2917 "out %i6,%3", xop, plen, 2);
2918 }
2919 else if (segment == 1)
2920 {
2921 avr_asm_len ("clr %5" CR_TAB
2922 "inc %5" CR_TAB
2923 "out %i6,%5", xop, plen, 3);
2924 }
2925 else
2926 {
2927 avr_asm_len ("mov %5,%2" CR_TAB
2928 "ldi %2,%4" CR_TAB
2929 "out %i6,%2" CR_TAB
2930 "mov %2,%5", xop, plen, 4);
2931 }
2932
2933 xop[4] = xstring_e;
2934
2935 if (!AVR_HAVE_ELPMX)
2936 return avr_out_lpm_no_lpmx (insn, xop, plen);
2937 }
2938 else if (!AVR_HAVE_LPMX)
2939 {
2940 return avr_out_lpm_no_lpmx (insn, xop, plen);
2941 }
2942
2943 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2944
2945 switch (GET_CODE (addr))
4202ef11 2946 {
2947 default:
2948 gcc_unreachable();
2949
2950 case REG:
2951
2952 gcc_assert (REG_Z == REGNO (addr));
2953
be6d8823 2954 switch (n_bytes)
2955 {
2956 default:
2957 gcc_unreachable();
4202ef11 2958
be6d8823 2959 case 1:
2960 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
4202ef11 2961
be6d8823 2962 case 2:
2963 if (REGNO (dest) == REG_Z)
2964 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2965 "%4lpm %B0,%a2" CR_TAB
2966 "mov %A0,%5", xop, plen, 3);
2967 else
2968 {
2969 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2970 "%4lpm %B0,%a2", xop, plen, 2);
2971
2972 if (!reg_unused_after (insn, addr))
2973 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2974 }
2975
2976 break; /* 2 */
4202ef11 2977
be6d8823 2978 case 3:
4202ef11 2979
be6d8823 2980 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2981 "%4lpm %B0,%a2+" CR_TAB
2982 "%4lpm %C0,%a2", xop, plen, 3);
2983
2984 if (!reg_unused_after (insn, addr))
2985 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2986
2987 break; /* 3 */
4202ef11 2988
be6d8823 2989 case 4:
2990
2991 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2992 "%4lpm %B0,%a2+", xop, plen, 2);
2993
2994 if (REGNO (dest) == REG_Z - 2)
2995 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2996 "%4lpm %C0,%a2" CR_TAB
2997 "mov %D0,%5", xop, plen, 3);
2998 else
4202ef11 2999 {
be6d8823 3000 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3001 "%4lpm %D0,%a2", xop, plen, 2);
3002
3003 if (!reg_unused_after (insn, addr))
3004 avr_asm_len ("sbiw %2,3", xop, plen, 1);
4202ef11 3005 }
3006
be6d8823 3007 break; /* 4 */
3008 } /* n_bytes */
3009
3010 break; /* REG */
3011
3012 case POST_INC:
3013
3014 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3015 && n_bytes <= 4);
3016
3017 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3018 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3019 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3020 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3021
3022 break; /* POST_INC */
3023
3024 } /* switch CODE (addr) */
3025
3026 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
be20cbdd 3027 {
3028 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
be6d8823 3029
3030 xop[0] = zero_reg_rtx;
3031 avr_asm_len ("out %i6,%0", xop, plen, 1);
be20cbdd 3032 }
3033
4202ef11 3034 return "";
3035}
3036
3037
ed2541ea 3038/* Worker function for xload_8 insn. */
5bd39e93 3039
3040const char*
ed2541ea 3041avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 3042{
ed2541ea 3043 rtx xop[4];
5bd39e93 3044
ed2541ea 3045 xop[0] = op[0];
5bd39e93 3046 xop[1] = op[1];
3047 xop[2] = lpm_addr_reg_rtx;
ed2541ea 3048 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
5bd39e93 3049
ed2541ea 3050 if (plen)
3051 *plen = 0;
5bd39e93 3052
ee3ce577 3053 avr_asm_len ("sbrc %1,7" CR_TAB
3054 "ld %3,%a2" CR_TAB
3055 "sbrs %1,7", xop, plen, 3);
5bd39e93 3056
ed2541ea 3057 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
5bd39e93 3058
ed2541ea 3059 if (REGNO (xop[0]) != REGNO (xop[3]))
3060 avr_asm_len ("mov %0,%3", xop, plen, 1);
5bd39e93 3061
3062 return "";
3063}
3064
3065
002565f0 3066const char*
be6d8823 3067output_movqi (rtx insn, rtx operands[], int *plen)
a28e4651 3068{
e511e253 3069 rtx dest = operands[0];
3070 rtx src = operands[1];
e511e253 3071
590da9f2 3072 if (avr_mem_flash_p (src)
3073 || avr_mem_flash_p (dest))
4202ef11 3074 {
be6d8823 3075 return avr_out_lpm (insn, operands, plen);
4202ef11 3076 }
3077
017c5b98 3078 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3079
3080 if (REG_P (dest))
a28e4651 3081 {
017c5b98 3082 if (REG_P (src)) /* mov r,r */
be6d8823 3083 {
3084 if (test_hard_reg_class (STACK_REG, dest))
3085 return avr_asm_len ("out %0,%1", operands, plen, -1);
3086 else if (test_hard_reg_class (STACK_REG, src))
3087 return avr_asm_len ("in %0,%1", operands, plen, -1);
3088
3089 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3090 }
e511e253 3091 else if (CONSTANT_P (src))
a49907f9 3092 {
be6d8823 3093 output_reload_in_const (operands, NULL_RTX, plen, false);
a49907f9 3094 return "";
3095 }
002565f0 3096 else if (MEM_P (src))
be6d8823 3097 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
e511e253 3098 }
002565f0 3099 else if (MEM_P (dest))
e511e253 3100 {
644ac9c5 3101 rtx xop[2];
37ac04dc 3102
644ac9c5 3103 xop[0] = dest;
017c5b98 3104 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
e511e253 3105
be6d8823 3106 return out_movqi_mr_r (insn, xop, plen);
e511e253 3107 }
be6d8823 3108
e511e253 3109 return "";
3110}
3111
3112
37ac04dc 3113const char *
8c8193e1 3114output_movhi (rtx insn, rtx xop[], int *plen)
e511e253 3115{
8c8193e1 3116 rtx dest = xop[0];
3117 rtx src = xop[1];
3118
3119 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
e511e253 3120
590da9f2 3121 if (avr_mem_flash_p (src)
3122 || avr_mem_flash_p (dest))
4202ef11 3123 {
8c8193e1 3124 return avr_out_lpm (insn, xop, plen);
4202ef11 3125 }
3126
017c5b98 3127 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3128
8c8193e1 3129 if (REG_P (dest))
e511e253 3130 {
8c8193e1 3131 if (REG_P (src)) /* mov r,r */
3132 {
3133 if (test_hard_reg_class (STACK_REG, dest))
3134 {
3135 if (AVR_HAVE_8BIT_SP)
3136 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
0b6cf66f 3137
3138 if (AVR_XMEGA)
3139 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3140 "out __SP_H__,%B1", xop, plen, -2);
8c8193e1 3141
3142 /* Use simple load of SP if no interrupts are used. */
3143
3144 return TARGET_NO_INTERRUPTS
3145 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3146 "out __SP_L__,%A1", xop, plen, -2)
8c8193e1 3147 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3148 "cli" CR_TAB
3149 "out __SP_H__,%B1" CR_TAB
3150 "out __SREG__,__tmp_reg__" CR_TAB
3151 "out __SP_L__,%A1", xop, plen, -5);
3152 }
3153 else if (test_hard_reg_class (STACK_REG, src))
3154 {
d32d7e3a 3155 return !AVR_HAVE_SPH
8c8193e1 3156 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3157 "clr %B0", xop, plen, -2)
3158
3159 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3160 "in %B0,__SP_H__", xop, plen, -2);
3161 }
e511e253 3162
8c8193e1 3163 return AVR_HAVE_MOVW
3164 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3165
3166 : avr_asm_len ("mov %A0,%A1" CR_TAB
3167 "mov %B0,%B1", xop, plen, -2);
3168 } /* REG_P (src) */
e511e253 3169 else if (CONSTANT_P (src))
2f2d376f 3170 {
8c8193e1 3171 return output_reload_inhi (xop, NULL, plen);
3172 }
3173 else if (MEM_P (src))
3174 {
3175 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2f2d376f 3176 }
e511e253 3177 }
8c8193e1 3178 else if (MEM_P (dest))
e511e253 3179 {
644ac9c5 3180 rtx xop[2];
37ac04dc 3181
644ac9c5 3182 xop[0] = dest;
017c5b98 3183 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
37ac04dc 3184
8c8193e1 3185 return out_movhi_mr_r (insn, xop, plen);
a28e4651 3186 }
8c8193e1 3187
68435912 3188 fatal_insn ("invalid insn:", insn);
8c8193e1 3189
e511e253 3190 return "";
3191}
3192
644ac9c5 3193static const char*
5bd39e93 3194out_movqi_r_mr (rtx insn, rtx op[], int *plen)
e511e253 3195{
37ac04dc 3196 rtx dest = op[0];
3197 rtx src = op[1];
3198 rtx x = XEXP (src, 0);
e511e253 3199
3200 if (CONSTANT_ADDRESS_P (x))
3201 {
644ac9c5 3202 return optimize > 0 && io_address_operand (x, QImode)
3203 ? avr_asm_len ("in %0,%i1", op, plen, -1)
3204 : avr_asm_len ("lds %0,%m1", op, plen, -2);
e511e253 3205 }
e511e253 3206 else if (GET_CODE (x) == PLUS
5bd39e93 3207 && REG_P (XEXP (x, 0))
3208 && CONST_INT_P (XEXP (x, 1)))
e511e253 3209 {
5bd39e93 3210 /* memory access by reg+disp */
db45d3ed 3211
5bd39e93 3212 int disp = INTVAL (XEXP (x, 1));
3213
3214 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3215 {
3216 if (REGNO (XEXP (x, 0)) != REG_Y)
3217 fatal_insn ("incorrect insn:",insn);
db45d3ed 3218
5bd39e93 3219 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3220 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3221 "ldd %0,Y+63" CR_TAB
3222 "sbiw r28,%o1-63", op, plen, -3);
3223
3224 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3225 "sbci r29,hi8(-%o1)" CR_TAB
3226 "ld %0,Y" CR_TAB
3227 "subi r28,lo8(%o1)" CR_TAB
3228 "sbci r29,hi8(%o1)", op, plen, -5);
3229 }
3230 else if (REGNO (XEXP (x, 0)) == REG_X)
3231 {
3232 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3233 it but I have this situation with extremal optimizing options. */
3234
3235 avr_asm_len ("adiw r26,%o1" CR_TAB
3236 "ld %0,X", op, plen, -2);
3237
3238 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3239 && !reg_unused_after (insn, XEXP (x,0)))
3240 {
3241 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3242 }
3243
3244 return "";
3245 }
3246
3247 return avr_asm_len ("ldd %0,%1", op, plen, -1);
e511e253 3248 }
5bd39e93 3249
3250 return avr_asm_len ("ld %0,%1", op, plen, -1);
a28e4651 3251}
3252
644ac9c5 3253static const char*
3254out_movhi_r_mr (rtx insn, rtx op[], int *plen)
a28e4651 3255{
37ac04dc 3256 rtx dest = op[0];
3257 rtx src = op[1];
3258 rtx base = XEXP (src, 0);
3259 int reg_dest = true_regnum (dest);
3260 int reg_base = true_regnum (base);
f43bae99 3261 /* "volatile" forces reading low byte first, even if less efficient,
3262 for correct operation with 16-bit I/O registers. */
3263 int mem_volatile_p = MEM_VOLATILE_P (src);
a28e4651 3264
3265 if (reg_base > 0)
3266 {
3267 if (reg_dest == reg_base) /* R = (R) */
644ac9c5 3268 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3269 "ld %B0,%1" CR_TAB
3270 "mov %A0,__tmp_reg__", op, plen, -3);
3271
3272 if (reg_base != REG_X)
3273 return avr_asm_len ("ld %A0,%1" CR_TAB
3274 "ldd %B0,%1+1", op, plen, -2);
3275
3276 avr_asm_len ("ld %A0,X+" CR_TAB
3277 "ld %B0,X", op, plen, -2);
3278
3279 if (!reg_unused_after (insn, base))
3280 avr_asm_len ("sbiw r26,1", op, plen, 1);
3281
3282 return "";
a28e4651 3283 }
37ac04dc 3284 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3285 {
37ac04dc 3286 int disp = INTVAL (XEXP (base, 1));
3287 int reg_base = true_regnum (XEXP (base, 0));
a28e4651 3288
37ac04dc 3289 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
644ac9c5 3290 {
3291 if (REGNO (XEXP (base, 0)) != REG_Y)
3292 fatal_insn ("incorrect insn:",insn);
3293
3294 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3295 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3296 "ldd %A0,Y+62" CR_TAB
3297 "ldd %B0,Y+63" CR_TAB
3298 "sbiw r28,%o1-62", op, plen, -4)
3299
3300 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3301 "sbci r29,hi8(-%o1)" CR_TAB
3302 "ld %A0,Y" CR_TAB
3303 "ldd %B0,Y+1" CR_TAB
3304 "subi r28,lo8(%o1)" CR_TAB
3305 "sbci r29,hi8(%o1)", op, plen, -6);
3306 }
3307
3308 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3309 it but I have this situation with extremal
3310 optimization options. */
db45d3ed 3311
a28e4651 3312 if (reg_base == REG_X)
644ac9c5 3313 return reg_base == reg_dest
3314 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3315 "ld __tmp_reg__,X+" CR_TAB
3316 "ld %B0,X" CR_TAB
3317 "mov %A0,__tmp_reg__", op, plen, -4)
e511e253 3318
644ac9c5 3319 : avr_asm_len ("adiw r26,%o1" CR_TAB
3320 "ld %A0,X+" CR_TAB
3321 "ld %B0,X" CR_TAB
3322 "sbiw r26,%o1+1", op, plen, -4);
e511e253 3323
644ac9c5 3324 return reg_base == reg_dest
3325 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3326 "ldd %B0,%B1" CR_TAB
3327 "mov %A0,__tmp_reg__", op, plen, -3)
3328
3329 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3330 "ldd %B0,%B1", op, plen, -2);
a28e4651 3331 }
37ac04dc 3332 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
a28e4651 3333 {
37ac04dc 3334 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3335 fatal_insn ("incorrect insn:", insn);
e511e253 3336
644ac9c5 3337 if (!mem_volatile_p)
3338 return avr_asm_len ("ld %B0,%1" CR_TAB
3339 "ld %A0,%1", op, plen, -2);
3340
3341 return REGNO (XEXP (base, 0)) == REG_X
3342 ? avr_asm_len ("sbiw r26,2" CR_TAB
3343 "ld %A0,X+" CR_TAB
3344 "ld %B0,X" CR_TAB
3345 "sbiw r26,1", op, plen, -4)
3346
3347 : avr_asm_len ("sbiw %r1,2" CR_TAB
3348 "ld %A0,%p1" CR_TAB
3349 "ldd %B0,%p1+1", op, plen, -3);
a28e4651 3350 }
37ac04dc 3351 else if (GET_CODE (base) == POST_INC) /* (R++) */
a28e4651 3352 {
37ac04dc 3353 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3354 fatal_insn ("incorrect insn:", insn);
e511e253 3355
644ac9c5 3356 return avr_asm_len ("ld %A0,%1" CR_TAB
3357 "ld %B0,%1", op, plen, -2);
a28e4651 3358 }
37ac04dc 3359 else if (CONSTANT_ADDRESS_P (base))
e511e253 3360 {
644ac9c5 3361 return optimize > 0 && io_address_operand (base, HImode)
3362 ? avr_asm_len ("in %A0,%i1" CR_TAB
3363 "in %B0,%i1+1", op, plen, -2)
3364
3365 : avr_asm_len ("lds %A0,%m1" CR_TAB
3366 "lds %B0,%m1+1", op, plen, -4);
e511e253 3367 }
3368
68435912 3369 fatal_insn ("unknown move insn:",insn);
a28e4651 3370 return "";
3371}
3372
644ac9c5 3373static const char*
206a5129 3374out_movsi_r_mr (rtx insn, rtx op[], int *l)
a28e4651 3375{
37ac04dc 3376 rtx dest = op[0];
3377 rtx src = op[1];
3378 rtx base = XEXP (src, 0);
3379 int reg_dest = true_regnum (dest);
3380 int reg_base = true_regnum (base);
a28e4651 3381 int tmp;
e511e253 3382
a28e4651 3383 if (!l)
e511e253 3384 l = &tmp;
3385
a28e4651 3386 if (reg_base > 0)
3387 {
3388 if (reg_base == REG_X) /* (R26) */
3389 {
3390 if (reg_dest == REG_X)
66ad0834 3391 /* "ld r26,-X" is undefined */
02a011e9 3392 return *l=7, ("adiw r26,3" CR_TAB
3393 "ld r29,X" CR_TAB
3394 "ld r28,-X" CR_TAB
3395 "ld __tmp_reg__,-X" CR_TAB
3396 "sbiw r26,1" CR_TAB
3397 "ld r26,X" CR_TAB
3398 "mov r27,__tmp_reg__");
a28e4651 3399 else if (reg_dest == REG_X - 2)
02a011e9 3400 return *l=5, ("ld %A0,X+" CR_TAB
3401 "ld %B0,X+" CR_TAB
3402 "ld __tmp_reg__,X+" CR_TAB
3403 "ld %D0,X" CR_TAB
3404 "mov %C0,__tmp_reg__");
37ac04dc 3405 else if (reg_unused_after (insn, base))
02a011e9 3406 return *l=4, ("ld %A0,X+" CR_TAB
3407 "ld %B0,X+" CR_TAB
3408 "ld %C0,X+" CR_TAB
3409 "ld %D0,X");
a28e4651 3410 else
02a011e9 3411 return *l=5, ("ld %A0,X+" CR_TAB
3412 "ld %B0,X+" CR_TAB
3413 "ld %C0,X+" CR_TAB
3414 "ld %D0,X" CR_TAB
3415 "sbiw r26,3");
a28e4651 3416 }
3417 else
3418 {
3419 if (reg_dest == reg_base)
02a011e9 3420 return *l=5, ("ldd %D0,%1+3" CR_TAB
3421 "ldd %C0,%1+2" CR_TAB
3422 "ldd __tmp_reg__,%1+1" CR_TAB
3423 "ld %A0,%1" CR_TAB
3424 "mov %B0,__tmp_reg__");
a28e4651 3425 else if (reg_base == reg_dest + 2)
02a011e9 3426 return *l=5, ("ld %A0,%1" CR_TAB
3427 "ldd %B0,%1+1" CR_TAB
3428 "ldd __tmp_reg__,%1+2" CR_TAB
3429 "ldd %D0,%1+3" CR_TAB
3430 "mov %C0,__tmp_reg__");
a28e4651 3431 else
02a011e9 3432 return *l=4, ("ld %A0,%1" CR_TAB
3433 "ldd %B0,%1+1" CR_TAB
3434 "ldd %C0,%1+2" CR_TAB
3435 "ldd %D0,%1+3");
a28e4651 3436 }
3437 }
37ac04dc 3438 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3439 {
37ac04dc 3440 int disp = INTVAL (XEXP (base, 1));
a28e4651 3441
37ac04dc 3442 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
a28e4651 3443 {
37ac04dc 3444 if (REGNO (XEXP (base, 0)) != REG_Y)
68435912 3445 fatal_insn ("incorrect insn:",insn);
db45d3ed 3446
37ac04dc 3447 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
02a011e9 3448 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3449 "ldd %A0,Y+60" CR_TAB
3450 "ldd %B0,Y+61" CR_TAB
3451 "ldd %C0,Y+62" CR_TAB
3452 "ldd %D0,Y+63" CR_TAB
3453 "sbiw r28,%o1-60");
3454
3455 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3456 "sbci r29,hi8(-%o1)" CR_TAB
3457 "ld %A0,Y" CR_TAB
3458 "ldd %B0,Y+1" CR_TAB
3459 "ldd %C0,Y+2" CR_TAB
3460 "ldd %D0,Y+3" CR_TAB
3461 "subi r28,lo8(%o1)" CR_TAB
3462 "sbci r29,hi8(%o1)");
a28e4651 3463 }
3464
37ac04dc 3465 reg_base = true_regnum (XEXP (base, 0));
66ad0834 3466 if (reg_base == REG_X)
3467 {
3468 /* R = (X + d) */
3469 if (reg_dest == REG_X)
3470 {
3471 *l = 7;
3472 /* "ld r26,-X" is undefined */
02a011e9 3473 return ("adiw r26,%o1+3" CR_TAB
3474 "ld r29,X" CR_TAB
3475 "ld r28,-X" CR_TAB
3476 "ld __tmp_reg__,-X" CR_TAB
3477 "sbiw r26,1" CR_TAB
3478 "ld r26,X" CR_TAB
3479 "mov r27,__tmp_reg__");
66ad0834 3480 }
3481 *l = 6;
3482 if (reg_dest == REG_X - 2)
02a011e9 3483 return ("adiw r26,%o1" CR_TAB
3484 "ld r24,X+" CR_TAB
3485 "ld r25,X+" CR_TAB
3486 "ld __tmp_reg__,X+" CR_TAB
3487 "ld r27,X" CR_TAB
3488 "mov r26,__tmp_reg__");
3489
3490 return ("adiw r26,%o1" CR_TAB
3491 "ld %A0,X+" CR_TAB
3492 "ld %B0,X+" CR_TAB
3493 "ld %C0,X+" CR_TAB
3494 "ld %D0,X" CR_TAB
3495 "sbiw r26,%o1+3");
66ad0834 3496 }
a28e4651 3497 if (reg_dest == reg_base)
02a011e9 3498 return *l=5, ("ldd %D0,%D1" CR_TAB
3499 "ldd %C0,%C1" CR_TAB
3500 "ldd __tmp_reg__,%B1" CR_TAB
3501 "ldd %A0,%A1" CR_TAB
3502 "mov %B0,__tmp_reg__");
a28e4651 3503 else if (reg_dest == reg_base - 2)
02a011e9 3504 return *l=5, ("ldd %A0,%A1" CR_TAB
3505 "ldd %B0,%B1" CR_TAB
3506 "ldd __tmp_reg__,%C1" CR_TAB
3507 "ldd %D0,%D1" CR_TAB
3508 "mov %C0,__tmp_reg__");
3509 return *l=4, ("ldd %A0,%A1" CR_TAB
3510 "ldd %B0,%B1" CR_TAB
3511 "ldd %C0,%C1" CR_TAB
3512 "ldd %D0,%D1");
a28e4651 3513 }
37ac04dc 3514 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 3515 return *l=4, ("ld %D0,%1" CR_TAB
3516 "ld %C0,%1" CR_TAB
3517 "ld %B0,%1" CR_TAB
3518 "ld %A0,%1");
37ac04dc 3519 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 3520 return *l=4, ("ld %A0,%1" CR_TAB
3521 "ld %B0,%1" CR_TAB
3522 "ld %C0,%1" CR_TAB
3523 "ld %D0,%1");
37ac04dc 3524 else if (CONSTANT_ADDRESS_P (base))
02a011e9 3525 return *l=8, ("lds %A0,%m1" CR_TAB
3526 "lds %B0,%m1+1" CR_TAB
3527 "lds %C0,%m1+2" CR_TAB
3528 "lds %D0,%m1+3");
a28e4651 3529
68435912 3530 fatal_insn ("unknown move insn:",insn);
a28e4651 3531 return "";
3532}
3533
644ac9c5 3534static const char*
206a5129 3535out_movsi_mr_r (rtx insn, rtx op[], int *l)
a28e4651 3536{
37ac04dc 3537 rtx dest = op[0];
3538 rtx src = op[1];
3539 rtx base = XEXP (dest, 0);
3540 int reg_base = true_regnum (base);
3541 int reg_src = true_regnum (src);
a28e4651 3542 int tmp;
e511e253 3543
a28e4651 3544 if (!l)
3545 l = &tmp;
e511e253 3546
37ac04dc 3547 if (CONSTANT_ADDRESS_P (base))
02a011e9 3548 return *l=8,("sts %m0,%A1" CR_TAB
3549 "sts %m0+1,%B1" CR_TAB
3550 "sts %m0+2,%C1" CR_TAB
3551 "sts %m0+3,%D1");
a28e4651 3552 if (reg_base > 0) /* (r) */
3553 {
3554 if (reg_base == REG_X) /* (R26) */
3555 {
37ac04dc 3556 if (reg_src == REG_X)
a28e4651 3557 {
66ad0834 3558 /* "st X+,r26" is undefined */
37ac04dc 3559 if (reg_unused_after (insn, base))
02a011e9 3560 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3561 "st X,r26" CR_TAB
3562 "adiw r26,1" CR_TAB
3563 "st X+,__tmp_reg__" CR_TAB
3564 "st X+,r28" CR_TAB
3565 "st X,r29");
a28e4651 3566 else
02a011e9 3567 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3568 "st X,r26" CR_TAB
3569 "adiw r26,1" CR_TAB
3570 "st X+,__tmp_reg__" CR_TAB
3571 "st X+,r28" CR_TAB
3572 "st X,r29" CR_TAB
3573 "sbiw r26,3");
a28e4651 3574 }
37ac04dc 3575 else if (reg_base == reg_src + 2)
a28e4651 3576 {
37ac04dc 3577 if (reg_unused_after (insn, base))
02a011e9 3578 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3579 "mov __tmp_reg__,%D1" CR_TAB
3580 "st %0+,%A1" CR_TAB
3581 "st %0+,%B1" CR_TAB
3582 "st %0+,__zero_reg__" CR_TAB
3583 "st %0,__tmp_reg__" CR_TAB
3584 "clr __zero_reg__");
a28e4651 3585 else
02a011e9 3586 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3587 "mov __tmp_reg__,%D1" CR_TAB
3588 "st %0+,%A1" CR_TAB
3589 "st %0+,%B1" CR_TAB
3590 "st %0+,__zero_reg__" CR_TAB
3591 "st %0,__tmp_reg__" CR_TAB
3592 "clr __zero_reg__" CR_TAB
3593 "sbiw r26,3");
a28e4651 3594 }
02a011e9 3595 return *l=5, ("st %0+,%A1" CR_TAB
3596 "st %0+,%B1" CR_TAB
3597 "st %0+,%C1" CR_TAB
3598 "st %0,%D1" CR_TAB
3599 "sbiw r26,3");
a28e4651 3600 }
3601 else
02a011e9 3602 return *l=4, ("st %0,%A1" CR_TAB
3603 "std %0+1,%B1" CR_TAB
3604 "std %0+2,%C1" CR_TAB
3605 "std %0+3,%D1");
a28e4651 3606 }
37ac04dc 3607 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3608 {
37ac04dc 3609 int disp = INTVAL (XEXP (base, 1));
66ad0834 3610 reg_base = REGNO (XEXP (base, 0));
37ac04dc 3611 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
a28e4651 3612 {
66ad0834 3613 if (reg_base != REG_Y)
68435912 3614 fatal_insn ("incorrect insn:",insn);
db45d3ed 3615
37ac04dc 3616 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
02a011e9 3617 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3618 "std Y+60,%A1" CR_TAB
3619 "std Y+61,%B1" CR_TAB
3620 "std Y+62,%C1" CR_TAB
3621 "std Y+63,%D1" CR_TAB
3622 "sbiw r28,%o0-60");
3623
3624 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3625 "sbci r29,hi8(-%o0)" CR_TAB
3626 "st Y,%A1" CR_TAB
3627 "std Y+1,%B1" CR_TAB
3628 "std Y+2,%C1" CR_TAB
3629 "std Y+3,%D1" CR_TAB
3630 "subi r28,lo8(%o0)" CR_TAB
3631 "sbci r29,hi8(%o0)");
a28e4651 3632 }
66ad0834 3633 if (reg_base == REG_X)
3634 {
3635 /* (X + d) = R */
3636 if (reg_src == REG_X)
3637 {
3638 *l = 9;
02a011e9 3639 return ("mov __tmp_reg__,r26" CR_TAB
3640 "mov __zero_reg__,r27" CR_TAB
3641 "adiw r26,%o0" CR_TAB
3642 "st X+,__tmp_reg__" CR_TAB
3643 "st X+,__zero_reg__" CR_TAB
3644 "st X+,r28" CR_TAB
3645 "st X,r29" CR_TAB
3646 "clr __zero_reg__" CR_TAB
3647 "sbiw r26,%o0+3");
66ad0834 3648 }
3649 else if (reg_src == REG_X - 2)
3650 {
3651 *l = 9;
02a011e9 3652 return ("mov __tmp_reg__,r26" CR_TAB
3653 "mov __zero_reg__,r27" CR_TAB
3654 "adiw r26,%o0" CR_TAB
3655 "st X+,r24" CR_TAB
3656 "st X+,r25" CR_TAB
3657 "st X+,__tmp_reg__" CR_TAB
3658 "st X,__zero_reg__" CR_TAB
3659 "clr __zero_reg__" CR_TAB
3660 "sbiw r26,%o0+3");
66ad0834 3661 }
3662 *l = 6;
02a011e9 3663 return ("adiw r26,%o0" CR_TAB
3664 "st X+,%A1" CR_TAB
3665 "st X+,%B1" CR_TAB
3666 "st X+,%C1" CR_TAB
3667 "st X,%D1" CR_TAB
3668 "sbiw r26,%o0+3");
66ad0834 3669 }
02a011e9 3670 return *l=4, ("std %A0,%A1" CR_TAB
3671 "std %B0,%B1" CR_TAB
3672 "std %C0,%C1" CR_TAB
3673 "std %D0,%D1");
a28e4651 3674 }
37ac04dc 3675 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 3676 return *l=4, ("st %0,%D1" CR_TAB
3677 "st %0,%C1" CR_TAB
3678 "st %0,%B1" CR_TAB
3679 "st %0,%A1");
37ac04dc 3680 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 3681 return *l=4, ("st %0,%A1" CR_TAB
3682 "st %0,%B1" CR_TAB
3683 "st %0,%C1" CR_TAB
3684 "st %0,%D1");
68435912 3685 fatal_insn ("unknown move insn:",insn);
a28e4651 3686 return "";
3687}
3688
37ac04dc 3689const char *
28913f6b 3690output_movsisf (rtx insn, rtx operands[], int *l)
a28e4651 3691{
e511e253 3692 int dummy;
3693 rtx dest = operands[0];
3694 rtx src = operands[1];
3695 int *real_l = l;
3696
590da9f2 3697 if (avr_mem_flash_p (src)
3698 || avr_mem_flash_p (dest))
4202ef11 3699 {
3700 return avr_out_lpm (insn, operands, real_l);
3701 }
3702
e511e253 3703 if (!l)
3704 l = &dummy;
3705
017c5b98 3706 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3707 if (REG_P (dest))
a28e4651 3708 {
017c5b98 3709 if (REG_P (src)) /* mov r,r */
0af74aa0 3710 {
e511e253 3711 if (true_regnum (dest) > true_regnum (src))
3712 {
0aab73c2 3713 if (AVR_HAVE_MOVW)
e511e253 3714 {
3715 *l = 2;
02a011e9 3716 return ("movw %C0,%C1" CR_TAB
3717 "movw %A0,%A1");
e511e253 3718 }
3719 *l = 4;
02a011e9 3720 return ("mov %D0,%D1" CR_TAB
3721 "mov %C0,%C1" CR_TAB
3722 "mov %B0,%B1" CR_TAB
3723 "mov %A0,%A1");
e511e253 3724 }
0af74aa0 3725 else
e511e253 3726 {
0aab73c2 3727 if (AVR_HAVE_MOVW)
e511e253 3728 {
3729 *l = 2;
02a011e9 3730 return ("movw %A0,%A1" CR_TAB
3731 "movw %C0,%C1");
e511e253 3732 }
3733 *l = 4;
02a011e9 3734 return ("mov %A0,%A1" CR_TAB
3735 "mov %B0,%B1" CR_TAB
3736 "mov %C0,%C1" CR_TAB
3737 "mov %D0,%D1");
e511e253 3738 }
0af74aa0 3739 }
e511e253 3740 else if (CONSTANT_P (src))
0af74aa0 3741 {
644ac9c5 3742 return output_reload_insisf (operands, NULL_RTX, real_l);
3743 }
017c5b98 3744 else if (MEM_P (src))
e511e253 3745 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3746 }
017c5b98 3747 else if (MEM_P (dest))
e511e253 3748 {
8deb3959 3749 const char *templ;
37ac04dc 3750
9ce2d202 3751 if (src == CONST0_RTX (GET_MODE (dest)))
e511e253 3752 operands[1] = zero_reg_rtx;
37ac04dc 3753
8deb3959 3754 templ = out_movsi_mr_r (insn, operands, real_l);
e511e253 3755
3756 if (!real_l)
8deb3959 3757 output_asm_insn (templ, operands);
37ac04dc 3758
3759 operands[1] = src;
e511e253 3760 return "";
a28e4651 3761 }
68435912 3762 fatal_insn ("invalid insn:", insn);
a28e4651 3763 return "";
3764}
3765
02d9a2c3 3766
3767/* Handle loads of 24-bit types from memory to register. */
3768
3769static const char*
3770avr_out_load_psi (rtx insn, rtx *op, int *plen)
3771{
3772 rtx dest = op[0];
3773 rtx src = op[1];
3774 rtx base = XEXP (src, 0);
3775 int reg_dest = true_regnum (dest);
3776 int reg_base = true_regnum (base);
3777
3778 if (reg_base > 0)
3779 {
3780 if (reg_base == REG_X) /* (R26) */
3781 {
3782 if (reg_dest == REG_X)
3783 /* "ld r26,-X" is undefined */
3784 return avr_asm_len ("adiw r26,2" CR_TAB
3785 "ld r28,X" CR_TAB
3786 "ld __tmp_reg__,-X" CR_TAB
3787 "sbiw r26,1" CR_TAB
3788 "ld r26,X" CR_TAB
3789 "mov r27,__tmp_reg__", op, plen, -6);
3790 else
3791 {
3792 avr_asm_len ("ld %A0,X+" CR_TAB
3793 "ld %B0,X+" CR_TAB
3794 "ld %C0,X", op, plen, -3);
3795
3796 if (reg_dest != REG_X - 2
3797 && !reg_unused_after (insn, base))
3798 {
3799 avr_asm_len ("sbiw r26,2", op, plen, 1);
3800 }
3801
3802 return "";
3803 }
3804 }
3805 else /* reg_base != REG_X */
3806 {
3807 if (reg_dest == reg_base)
3808 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3809 "ldd __tmp_reg__,%1+1" CR_TAB
3810 "ld %A0,%1" CR_TAB
3811 "mov %B0,__tmp_reg__", op, plen, -4);
3812 else
3813 return avr_asm_len ("ld %A0,%1" CR_TAB
3814 "ldd %B0,%1+1" CR_TAB
3815 "ldd %C0,%1+2", op, plen, -3);
3816 }
3817 }
3818 else if (GET_CODE (base) == PLUS) /* (R + i) */
3819 {
3820 int disp = INTVAL (XEXP (base, 1));
3821
3822 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3823 {
3824 if (REGNO (XEXP (base, 0)) != REG_Y)
3825 fatal_insn ("incorrect insn:",insn);
3826
3827 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3828 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3829 "ldd %A0,Y+61" CR_TAB
3830 "ldd %B0,Y+62" CR_TAB
3831 "ldd %C0,Y+63" CR_TAB
3832 "sbiw r28,%o1-61", op, plen, -5);
3833
3834 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3835 "sbci r29,hi8(-%o1)" CR_TAB
3836 "ld %A0,Y" CR_TAB
3837 "ldd %B0,Y+1" CR_TAB
3838 "ldd %C0,Y+2" CR_TAB
3839 "subi r28,lo8(%o1)" CR_TAB
3840 "sbci r29,hi8(%o1)", op, plen, -7);
3841 }
3842
3843 reg_base = true_regnum (XEXP (base, 0));
3844 if (reg_base == REG_X)
3845 {
3846 /* R = (X + d) */
3847 if (reg_dest == REG_X)
3848 {
3849 /* "ld r26,-X" is undefined */
3850 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3851 "ld r28,X" CR_TAB
3852 "ld __tmp_reg__,-X" CR_TAB
3853 "sbiw r26,1" CR_TAB
3854 "ld r26,X" CR_TAB
3855 "mov r27,__tmp_reg__", op, plen, -6);
3856 }
3857
27b3429e 3858 avr_asm_len ("adiw r26,%o1" CR_TAB
3859 "ld %A0,X+" CR_TAB
3860 "ld %B0,X+" CR_TAB
3861 "ld %C0,X", op, plen, -4);
02d9a2c3 3862
27b3429e 3863 if (reg_dest != REG_W
3864 && !reg_unused_after (insn, XEXP (base, 0)))
3865 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
02d9a2c3 3866
27b3429e 3867 return "";
02d9a2c3 3868 }
3869
3870 if (reg_dest == reg_base)
3871 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3872 "ldd __tmp_reg__,%B1" CR_TAB
3873 "ldd %A0,%A1" CR_TAB
3874 "mov %B0,__tmp_reg__", op, plen, -4);
3875
3876 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3877 "ldd %B0,%B1" CR_TAB
3878 "ldd %C0,%C1", op, plen, -3);
3879 }
3880 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3881 return avr_asm_len ("ld %C0,%1" CR_TAB
3882 "ld %B0,%1" CR_TAB
3883 "ld %A0,%1", op, plen, -3);
3884 else if (GET_CODE (base) == POST_INC) /* (R++) */
3885 return avr_asm_len ("ld %A0,%1" CR_TAB
3886 "ld %B0,%1" CR_TAB
3887 "ld %C0,%1", op, plen, -3);
3888
3889 else if (CONSTANT_ADDRESS_P (base))
3890 return avr_asm_len ("lds %A0,%m1" CR_TAB
3891 "lds %B0,%m1+1" CR_TAB
3892 "lds %C0,%m1+2", op, plen , -6);
3893
3894 fatal_insn ("unknown move insn:",insn);
3895 return "";
3896}
3897
3898/* Handle store of 24-bit type from register or zero to memory. */
3899
3900static const char*
3901avr_out_store_psi (rtx insn, rtx *op, int *plen)
3902{
3903 rtx dest = op[0];
3904 rtx src = op[1];
3905 rtx base = XEXP (dest, 0);
3906 int reg_base = true_regnum (base);
3907
3908 if (CONSTANT_ADDRESS_P (base))
3909 return avr_asm_len ("sts %m0,%A1" CR_TAB
3910 "sts %m0+1,%B1" CR_TAB
3911 "sts %m0+2,%C1", op, plen, -6);
3912
3913 if (reg_base > 0) /* (r) */
3914 {
3915 if (reg_base == REG_X) /* (R26) */
3916 {
3917 gcc_assert (!reg_overlap_mentioned_p (base, src));
3918
3919 avr_asm_len ("st %0+,%A1" CR_TAB
3920 "st %0+,%B1" CR_TAB
3921 "st %0,%C1", op, plen, -3);
3922
3923 if (!reg_unused_after (insn, base))
3924 avr_asm_len ("sbiw r26,2", op, plen, 1);
3925
3926 return "";
3927 }
3928 else
3929 return avr_asm_len ("st %0,%A1" CR_TAB
3930 "std %0+1,%B1" CR_TAB
3931 "std %0+2,%C1", op, plen, -3);
3932 }
3933 else if (GET_CODE (base) == PLUS) /* (R + i) */
3934 {
3935 int disp = INTVAL (XEXP (base, 1));
3936 reg_base = REGNO (XEXP (base, 0));
3937
3938 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3939 {
3940 if (reg_base != REG_Y)
3941 fatal_insn ("incorrect insn:",insn);
3942
3943 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3944 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3945 "std Y+61,%A1" CR_TAB
3946 "std Y+62,%B1" CR_TAB
3947 "std Y+63,%C1" CR_TAB
3948 "sbiw r28,%o0-60", op, plen, -5);
3949
3950 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3951 "sbci r29,hi8(-%o0)" CR_TAB
3952 "st Y,%A1" CR_TAB
3953 "std Y+1,%B1" CR_TAB
3954 "std Y+2,%C1" CR_TAB
3955 "subi r28,lo8(%o0)" CR_TAB
3956 "sbci r29,hi8(%o0)", op, plen, -7);
3957 }
3958 if (reg_base == REG_X)
3959 {
3960 /* (X + d) = R */
3961 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3962
3963 avr_asm_len ("adiw r26,%o0" CR_TAB
3964 "st X+,%A1" CR_TAB
3965 "st X+,%B1" CR_TAB
3966 "st X,%C1", op, plen, -4);
3967
3968 if (!reg_unused_after (insn, XEXP (base, 0)))
3969 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3970
3971 return "";
3972 }
3973
3974 return avr_asm_len ("std %A0,%A1" CR_TAB
3975 "std %B0,%B1" CR_TAB
3976 "std %C0,%C1", op, plen, -3);
3977 }
3978 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3979 return avr_asm_len ("st %0,%C1" CR_TAB
3980 "st %0,%B1" CR_TAB
3981 "st %0,%A1", op, plen, -3);
3982 else if (GET_CODE (base) == POST_INC) /* (R++) */
3983 return avr_asm_len ("st %0,%A1" CR_TAB
3984 "st %0,%B1" CR_TAB
3985 "st %0,%C1", op, plen, -3);
3986
3987 fatal_insn ("unknown move insn:",insn);
3988 return "";
3989}
3990
3991
3992/* Move around 24-bit stuff. */
3993
3994const char *
3995avr_out_movpsi (rtx insn, rtx *op, int *plen)
3996{
3997 rtx dest = op[0];
3998 rtx src = op[1];
3999
590da9f2 4000 if (avr_mem_flash_p (src)
4001 || avr_mem_flash_p (dest))
4202ef11 4002 {
4003 return avr_out_lpm (insn, op, plen);
4004 }
4005
02d9a2c3 4006 if (register_operand (dest, VOIDmode))
4007 {
4008 if (register_operand (src, VOIDmode)) /* mov r,r */
4009 {
4010 if (true_regnum (dest) > true_regnum (src))
4011 {
4012 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4013
4014 if (AVR_HAVE_MOVW)
4015 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4016 else
4017 return avr_asm_len ("mov %B0,%B1" CR_TAB
4018 "mov %A0,%A1", op, plen, 2);
4019 }
4020 else
4021 {
4022 if (AVR_HAVE_MOVW)
4023 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4024 else
4025 avr_asm_len ("mov %A0,%A1" CR_TAB
4026 "mov %B0,%B1", op, plen, -2);
4027
4028 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4029 }
4030 }
02d9a2c3 4031 else if (CONSTANT_P (src))
4032 {
644ac9c5 4033 return avr_out_reload_inpsi (op, NULL_RTX, plen);
02d9a2c3 4034 }
4035 else if (MEM_P (src))
4036 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4037 }
4038 else if (MEM_P (dest))
4039 {
644ac9c5 4040 rtx xop[2];
4041
4042 xop[0] = dest;
4043 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
02d9a2c3 4044
644ac9c5 4045 return avr_out_store_psi (insn, xop, plen);
02d9a2c3 4046 }
4047
4048 fatal_insn ("invalid insn:", insn);
4049 return "";
4050}
4051
4052
644ac9c5 4053static const char*
5bd39e93 4054out_movqi_mr_r (rtx insn, rtx op[], int *plen)
a28e4651 4055{
37ac04dc 4056 rtx dest = op[0];
4057 rtx src = op[1];
4058 rtx x = XEXP (dest, 0);
e511e253 4059
4060 if (CONSTANT_ADDRESS_P (x))
a28e4651 4061 {
644ac9c5 4062 return optimize > 0 && io_address_operand (x, QImode)
4063 ? avr_asm_len ("out %i0,%1", op, plen, -1)
4064 : avr_asm_len ("sts %m0,%1", op, plen, -2);
e511e253 4065 }
5bd39e93 4066 else if (GET_CODE (x) == PLUS
4067 && REG_P (XEXP (x, 0))
4068 && CONST_INT_P (XEXP (x, 1)))
e511e253 4069 {
5bd39e93 4070 /* memory access by reg+disp */
db45d3ed 4071
5bd39e93 4072 int disp = INTVAL (XEXP (x, 1));
db45d3ed 4073
5bd39e93 4074 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4075 {
4076 if (REGNO (XEXP (x, 0)) != REG_Y)
4077 fatal_insn ("incorrect insn:",insn);
4078
4079 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4080 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4081 "std Y+63,%1" CR_TAB
4082 "sbiw r28,%o0-63", op, plen, -3);
4083
4084 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4085 "sbci r29,hi8(-%o0)" CR_TAB
4086 "st Y,%1" CR_TAB
4087 "subi r28,lo8(%o0)" CR_TAB
4088 "sbci r29,hi8(%o0)", op, plen, -5);
4089 }
e511e253 4090 else if (REGNO (XEXP (x,0)) == REG_X)
5bd39e93 4091 {
4092 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4093 {
4094 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4095 "adiw r26,%o0" CR_TAB
4096 "st X,__tmp_reg__", op, plen, -3);
4097 }
4098 else
4099 {
4100 avr_asm_len ("adiw r26,%o0" CR_TAB
4101 "st X,%1", op, plen, -2);
4102 }
4103
4104 if (!reg_unused_after (insn, XEXP (x,0)))
4105 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
db45d3ed 4106
5bd39e93 4107 return "";
4108 }
4109
578c09ca 4110 return avr_asm_len ("std %0,%1", op, plen, -1);
a28e4651 4111 }
5bd39e93 4112
7e7baeb5 4113 return avr_asm_len ("st %0,%1", op, plen, -1);
a28e4651 4114}
4115
0b6cf66f 4116
4117/* Helper for the next function for XMEGA. It does the same
4118 but with low byte first. */
4119
644ac9c5 4120static const char*
0b6cf66f 4121avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
a28e4651 4122{
37ac04dc 4123 rtx dest = op[0];
4124 rtx src = op[1];
4125 rtx base = XEXP (dest, 0);
4126 int reg_base = true_regnum (base);
4127 int reg_src = true_regnum (src);
0b6cf66f 4128
4129 /* "volatile" forces writing low byte first, even if less efficient,
4130 for correct operation with 16-bit I/O registers like SP. */
f43bae99 4131 int mem_volatile_p = MEM_VOLATILE_P (dest);
f43bae99 4132
0b6cf66f 4133 if (CONSTANT_ADDRESS_P (base))
4134 return optimize > 0 && io_address_operand (base, HImode)
4135 ? avr_asm_len ("out %i0,%A1" CR_TAB
4136 "out %i0+1,%B1", op, plen, -2)
4137
4138 : avr_asm_len ("sts %m0,%A1" CR_TAB
4139 "sts %m0+1,%B1", op, plen, -4);
4140
4141 if (reg_base > 0)
4142 {
4143 if (reg_base != REG_X)
4144 return avr_asm_len ("st %0,%A1" CR_TAB
4145 "std %0+1,%B1", op, plen, -2);
4146
4147 if (reg_src == REG_X)
4148 /* "st X+,r26" and "st -X,r26" are undefined. */
4149 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4150 "st X,r26" CR_TAB
4151 "adiw r26,1" CR_TAB
4152 "st X,__tmp_reg__", op, plen, -4);
4153 else
4154 avr_asm_len ("st X+,%A1" CR_TAB
4155 "st X,%B1", op, plen, -2);
00c52bc9 4156
4157 return reg_unused_after (insn, base)
0b6cf66f 4158 ? ""
4159 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4160 }
4161 else if (GET_CODE (base) == PLUS)
4162 {
4163 int disp = INTVAL (XEXP (base, 1));
4164 reg_base = REGNO (XEXP (base, 0));
4165 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4166 {
4167 if (reg_base != REG_Y)
4168 fatal_insn ("incorrect insn:",insn);
4169
4170 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4171 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4172 "std Y+62,%A1" CR_TAB
4173 "std Y+63,%B1" CR_TAB
4174 "sbiw r28,%o0-62", op, plen, -4)
4175
4176 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4177 "sbci r29,hi8(-%o0)" CR_TAB
4178 "st Y,%A1" CR_TAB
4179 "std Y+1,%B1" CR_TAB
4180 "subi r28,lo8(%o0)" CR_TAB
4181 "sbci r29,hi8(%o0)", op, plen, -6);
4182 }
4183
4184 if (reg_base != REG_X)
4185 return avr_asm_len ("std %A0,%A1" CR_TAB
4186 "std %B0,%B1", op, plen, -2);
4187 /* (X + d) = R */
4188 return reg_src == REG_X
4189 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4190 "mov __zero_reg__,r27" CR_TAB
4191 "adiw r26,%o0" CR_TAB
4192 "st X+,__tmp_reg__" CR_TAB
4193 "st X,__zero_reg__" CR_TAB
4194 "clr __zero_reg__" CR_TAB
4195 "sbiw r26,%o0+1", op, plen, -7)
4196
4197 : avr_asm_len ("adiw r26,%o0" CR_TAB
4198 "st X+,%A1" CR_TAB
4199 "st X,%B1" CR_TAB
4200 "sbiw r26,%o0+1", op, plen, -4);
4201 }
4202 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4203 {
4204 if (!mem_volatile_p)
4205 return avr_asm_len ("st %0,%B1" CR_TAB
4206 "st %0,%A1", op, plen, -2);
4207
4208 return REGNO (XEXP (base, 0)) == REG_X
4209 ? avr_asm_len ("sbiw r26,2" CR_TAB
4210 "st X+,%A1" CR_TAB
4211 "st X,%B1" CR_TAB
4212 "sbiw r26,1", op, plen, -4)
4213
4214 : avr_asm_len ("sbiw %r0,2" CR_TAB
4215 "st %p0,%A1" CR_TAB
4216 "std %p0+1,%B1", op, plen, -3);
4217 }
4218 else if (GET_CODE (base) == POST_INC) /* (R++) */
4219 {
4220 return avr_asm_len ("st %0,%A1" CR_TAB
4221 "st %0,%B1", op, plen, -2);
4222
4223 }
4224 fatal_insn ("unknown move insn:",insn);
4225 return "";
4226}
4227
4228
4229static const char*
4230out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4231{
4232 rtx dest = op[0];
4233 rtx src = op[1];
4234 rtx base = XEXP (dest, 0);
4235 int reg_base = true_regnum (base);
4236 int reg_src = true_regnum (src);
4237 int mem_volatile_p;
4238
4239 /* "volatile" forces writing high-byte first (no-xmega) resp.
4240 low-byte first (xmega) even if less efficient, for correct
4241 operation with 16-bit I/O registers like. */
4242
4243 if (AVR_XMEGA)
4244 return avr_out_movhi_mr_r_xmega (insn, op, plen);
4245
4246 mem_volatile_p = MEM_VOLATILE_P (dest);
4247
37ac04dc 4248 if (CONSTANT_ADDRESS_P (base))
644ac9c5 4249 return optimize > 0 && io_address_operand (base, HImode)
4250 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4251 "out %i0,%A1", op, plen, -2)
4252
4253 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4254 "sts %m0,%A1", op, plen, -4);
4255
a28e4651 4256 if (reg_base > 0)
4257 {
644ac9c5 4258 if (reg_base != REG_X)
4259 return avr_asm_len ("std %0+1,%B1" CR_TAB
4260 "st %0,%A1", op, plen, -2);
4261
4262 if (reg_src == REG_X)
4263 /* "st X+,r26" and "st -X,r26" are undefined. */
4264 return !mem_volatile_p && reg_unused_after (insn, src)
4265 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4266 "st X,r26" CR_TAB
4267 "adiw r26,1" CR_TAB
4268 "st X,__tmp_reg__", op, plen, -4)
4269
4270 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4271 "adiw r26,1" CR_TAB
4272 "st X,__tmp_reg__" CR_TAB
4273 "sbiw r26,1" CR_TAB
4274 "st X,r26", op, plen, -5);
4275
4276 return !mem_volatile_p && reg_unused_after (insn, base)
4277 ? avr_asm_len ("st X+,%A1" CR_TAB
4278 "st X,%B1", op, plen, -2)
4279 : avr_asm_len ("adiw r26,1" CR_TAB
4280 "st X,%B1" CR_TAB
4281 "st -X,%A1", op, plen, -3);
a28e4651 4282 }
37ac04dc 4283 else if (GET_CODE (base) == PLUS)
a28e4651 4284 {
37ac04dc 4285 int disp = INTVAL (XEXP (base, 1));
66ad0834 4286 reg_base = REGNO (XEXP (base, 0));
37ac04dc 4287 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
644ac9c5 4288 {
4289 if (reg_base != REG_Y)
4290 fatal_insn ("incorrect insn:",insn);
4291
4292 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4293 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4294 "std Y+63,%B1" CR_TAB
4295 "std Y+62,%A1" CR_TAB
4296 "sbiw r28,%o0-62", op, plen, -4)
4297
4298 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4299 "sbci r29,hi8(-%o0)" CR_TAB
4300 "std Y+1,%B1" CR_TAB
4301 "st Y,%A1" CR_TAB
4302 "subi r28,lo8(%o0)" CR_TAB
4303 "sbci r29,hi8(%o0)", op, plen, -6);
4304 }
4305
4306 if (reg_base != REG_X)
4307 return avr_asm_len ("std %B0,%B1" CR_TAB
4308 "std %A0,%A1", op, plen, -2);
4309 /* (X + d) = R */
4310 return reg_src == REG_X
4311 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4312 "mov __zero_reg__,r27" CR_TAB
4313 "adiw r26,%o0+1" CR_TAB
4314 "st X,__zero_reg__" CR_TAB
4315 "st -X,__tmp_reg__" CR_TAB
4316 "clr __zero_reg__" CR_TAB
4317 "sbiw r26,%o0", op, plen, -7)
4318
4319 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4320 "st X,%B1" CR_TAB
4321 "st -X,%A1" CR_TAB
4322 "sbiw r26,%o0", op, plen, -4);
e511e253 4323 }
37ac04dc 4324 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
644ac9c5 4325 {
4326 return avr_asm_len ("st %0,%B1" CR_TAB
4327 "st %0,%A1", op, plen, -2);
4328 }
37ac04dc 4329 else if (GET_CODE (base) == POST_INC) /* (R++) */
f43bae99 4330 {
644ac9c5 4331 if (!mem_volatile_p)
4332 return avr_asm_len ("st %0,%A1" CR_TAB
4333 "st %0,%B1", op, plen, -2);
4334
4335 return REGNO (XEXP (base, 0)) == REG_X
4336 ? avr_asm_len ("adiw r26,1" CR_TAB
4337 "st X,%B1" CR_TAB
4338 "st -X,%A1" CR_TAB
4339 "adiw r26,2", op, plen, -4)
f43bae99 4340
644ac9c5 4341 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4342 "st %p0,%A1" CR_TAB
4343 "adiw %r0,2", op, plen, -3);
f43bae99 4344 }
68435912 4345 fatal_insn ("unknown move insn:",insn);
a28e4651 4346 return "";
4347}
4348
20c71901 4349/* Return 1 if frame pointer for current function required. */
a28e4651 4350
a45076aa 4351static bool
ebdd0478 4352avr_frame_pointer_required_p (void)
a28e4651 4353{
18d50ae6 4354 return (cfun->calls_alloca
a12b9b80 4355 || cfun->calls_setjmp
4356 || cfun->has_nonlocal_label
4357 || crtl->args.info.nregs == 0
4358 || get_frame_size () > 0);
a28e4651 4359}
4360
faf8f400 4361/* Returns the condition of compare insn INSN, or UNKNOWN. */
a28e4651 4362
faf8f400 4363static RTX_CODE
206a5129 4364compare_condition (rtx insn)
a28e4651 4365{
4366 rtx next = next_real_insn (insn);
cffa155c 4367
4368 if (next && JUMP_P (next))
a28e4651 4369 {
4370 rtx pat = PATTERN (next);
4371 rtx src = SET_SRC (pat);
cffa155c 4372
4373 if (IF_THEN_ELSE == GET_CODE (src))
4374 return GET_CODE (XEXP (src, 0));
a28e4651 4375 }
cffa155c 4376
4377 return UNKNOWN;
faf8f400 4378}
4379
faf8f400 4380
dfd52f2b 4381/* Returns true iff INSN is a tst insn that only tests the sign. */
4382
4383static bool
206a5129 4384compare_sign_p (rtx insn)
faf8f400 4385{
4386 RTX_CODE cond = compare_condition (insn);
4387 return (cond == GE || cond == LT);
4388}
4389
dfd52f2b 4390
4391/* Returns true iff the next insn is a JUMP_INSN with a condition
faf8f400 4392 that needs to be swapped (GT, GTU, LE, LEU). */
4393
dfd52f2b 4394static bool
206a5129 4395compare_diff_p (rtx insn)
faf8f400 4396{
4397 RTX_CODE cond = compare_condition (insn);
a28e4651 4398 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4399}
4400
dfd52f2b 4401/* Returns true iff INSN is a compare insn with the EQ or NE condition. */
a28e4651 4402
dfd52f2b 4403static bool
206a5129 4404compare_eq_p (rtx insn)
a28e4651 4405{
faf8f400 4406 RTX_CODE cond = compare_condition (insn);
a28e4651 4407 return (cond == EQ || cond == NE);
4408}
4409
4410
dfd52f2b 4411/* Output compare instruction
4412
4413 compare (XOP[0], XOP[1])
4414
4415 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4416 XOP[2] is an 8-bit scratch register as needed.
4417
4418 PLEN == NULL: Output instructions.
4419 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4420 Don't output anything. */
4421
4422const char*
4423avr_out_compare (rtx insn, rtx *xop, int *plen)
4424{
4425 /* Register to compare and value to compare against. */
4426 rtx xreg = xop[0];
4427 rtx xval = xop[1];
4428
4429 /* MODE of the comparison. */
017c5b98 4430 enum machine_mode mode;
dfd52f2b 4431
4432 /* Number of bytes to operate on. */
017c5b98 4433 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
dfd52f2b 4434
4435 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4436 int clobber_val = -1;
4437
017c5b98 4438 /* Map fixed mode operands to integer operands with the same binary
4439 representation. They are easier to handle in the remainder. */
4440
b4ebb666 4441 if (CONST_FIXED_P (xval))
017c5b98 4442 {
4443 xreg = avr_to_int_mode (xop[0]);
4444 xval = avr_to_int_mode (xop[1]);
4445 }
4446
4447 mode = GET_MODE (xreg);
4448
83921eda 4449 gcc_assert (REG_P (xreg));
4450 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4451 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
dfd52f2b 4452
4453 if (plen)
4454 *plen = 0;
4455
7f94020e 4456 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
83921eda 4457 against 0 by ORing the bytes. This is one instruction shorter.
017c5b98 4458 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
83921eda 4459 and therefore don't use this. */
7f94020e 4460
4461 if (!test_hard_reg_class (LD_REGS, xreg)
4462 && compare_eq_p (insn)
4463 && reg_unused_after (insn, xreg))
4464 {
4465 if (xval == const1_rtx)
4466 {
4467 avr_asm_len ("dec %A0" CR_TAB
4468 "or %A0,%B0", xop, plen, 2);
4469
02d9a2c3 4470 if (n_bytes >= 3)
4471 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4472
4473 if (n_bytes >= 4)
4474 avr_asm_len ("or %A0,%D0", xop, plen, 1);
7f94020e 4475
4476 return "";
4477 }
4478 else if (xval == constm1_rtx)
4479 {
02d9a2c3 4480 if (n_bytes >= 4)
4481 avr_asm_len ("and %A0,%D0", xop, plen, 1);
7f94020e 4482
02d9a2c3 4483 if (n_bytes >= 3)
4484 avr_asm_len ("and %A0,%C0", xop, plen, 1);
7f94020e 4485
02d9a2c3 4486 return avr_asm_len ("and %A0,%B0" CR_TAB
4487 "com %A0", xop, plen, 2);
7f94020e 4488 }
4489 }
4490
dfd52f2b 4491 for (i = 0; i < n_bytes; i++)
4492 {
4493 /* We compare byte-wise. */
4494 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4495 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4496
4497 /* 8-bit value to compare with this byte. */
4498 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4499
4500 /* Registers R16..R31 can operate with immediate. */
4501 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4502
4503 xop[0] = reg8;
4504 xop[1] = gen_int_mode (val8, QImode);
4505
4506 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4507
4508 if (i == 0
4509 && test_hard_reg_class (ADDW_REGS, reg8))
4510 {
4511 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4512
4513 if (IN_RANGE (val16, 0, 63)
4514 && (val8 == 0
4515 || reg_unused_after (insn, xreg)))
4516 {
4517 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4518 i++;
4519 continue;
4520 }
4521
4522 if (n_bytes == 2
4523 && IN_RANGE (val16, -63, -1)
4524 && compare_eq_p (insn)
4525 && reg_unused_after (insn, xreg))
4526 {
02d9a2c3 4527 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
dfd52f2b 4528 }
4529 }
4530
4531 /* Comparing against 0 is easy. */
4532
4533 if (val8 == 0)
4534 {
4535 avr_asm_len (i == 0
4536 ? "cp %0,__zero_reg__"
4537 : "cpc %0,__zero_reg__", xop, plen, 1);
4538 continue;
4539 }
4540
4541 /* Upper registers can compare and subtract-with-carry immediates.
4542 Notice that compare instructions do the same as respective subtract
4543 instruction; the only difference is that comparisons don't write
4544 the result back to the target register. */
4545
4546 if (ld_reg_p)
4547 {
4548 if (i == 0)
4549 {
4550 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4551 continue;
4552 }
4553 else if (reg_unused_after (insn, xreg))
4554 {
4555 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4556 continue;
4557 }
4558 }
4559
4560 /* Must load the value into the scratch register. */
4561
4562 gcc_assert (REG_P (xop[2]));
4563
4564 if (clobber_val != (int) val8)
4565 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4566 clobber_val = (int) val8;
4567
4568 avr_asm_len (i == 0
4569 ? "cp %0,%2"
4570 : "cpc %0,%2", xop, plen, 1);
4571 }
4572
4573 return "";
4574}
4575
4576
83921eda 4577/* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4578
4579const char*
4580avr_out_compare64 (rtx insn, rtx *op, int *plen)
4581{
4582 rtx xop[3];
4583
4584 xop[0] = gen_rtx_REG (DImode, 18);
4585 xop[1] = op[0];
4586 xop[2] = op[1];
4587
4588 return avr_out_compare (insn, xop, plen);
4589}
4590
20c71901 4591/* Output test instruction for HImode. */
a28e4651 4592
dfd52f2b 4593const char*
4594avr_out_tsthi (rtx insn, rtx *op, int *plen)
a28e4651 4595{
faf8f400 4596 if (compare_sign_p (insn))
a28e4651 4597 {
dfd52f2b 4598 avr_asm_len ("tst %B0", op, plen, -1);
a28e4651 4599 }
dfd52f2b 4600 else if (reg_unused_after (insn, op[0])
4601 && compare_eq_p (insn))
a28e4651 4602 {
20c71901 4603 /* Faster than sbiw if we can clobber the operand. */
dfd52f2b 4604 avr_asm_len ("or %A0,%B0", op, plen, -1);
a28e4651 4605 }
dfd52f2b 4606 else
a28e4651 4607 {
dfd52f2b 4608 avr_out_compare (insn, op, plen);
a28e4651 4609 }
dfd52f2b 4610
4611 return "";
a28e4651 4612}
4613
4614
02d9a2c3 4615/* Output test instruction for PSImode. */
4616
4617const char*
4618avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4619{
4620 if (compare_sign_p (insn))
4621 {
4622 avr_asm_len ("tst %C0", op, plen, -1);
4623 }
4624 else if (reg_unused_after (insn, op[0])
4625 && compare_eq_p (insn))
4626 {
4627 /* Faster than sbiw if we can clobber the operand. */
4628 avr_asm_len ("or %A0,%B0" CR_TAB
4629 "or %A0,%C0", op, plen, -2);
4630 }
4631 else
4632 {
4633 avr_out_compare (insn, op, plen);
4634 }
4635
4636 return "";
4637}
4638
4639
20c71901 4640/* Output test instruction for SImode. */
a28e4651 4641
dfd52f2b 4642const char*
4643avr_out_tstsi (rtx insn, rtx *op, int *plen)
a28e4651 4644{
faf8f400 4645 if (compare_sign_p (insn))
a28e4651 4646 {
dfd52f2b 4647 avr_asm_len ("tst %D0", op, plen, -1);
a28e4651 4648 }
dfd52f2b 4649 else if (reg_unused_after (insn, op[0])
4650 && compare_eq_p (insn))
a28e4651 4651 {
dfd52f2b 4652 /* Faster than sbiw if we can clobber the operand. */
4653 avr_asm_len ("or %A0,%B0" CR_TAB
4654 "or %A0,%C0" CR_TAB
4655 "or %A0,%D0", op, plen, -3);
4656 }
4657 else
4658 {
4659 avr_out_compare (insn, op, plen);
a28e4651 4660 }
dfd52f2b 4661
4662 return "";
a28e4651 4663}
4664
4665
644ac9c5 4666/* Generate asm equivalent for various shifts. This only handles cases
4667 that are not already carefully hand-optimized in ?sh??i3_out.
4668
4669 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4670 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4671 OPERANDS[3] is a QImode scratch register from LD regs if
4672 available and SCRATCH, otherwise (no scratch available)
4673
4674 TEMPL is an assembler template that shifts by one position.
4675 T_LEN is the length of this template. */
a28e4651 4676
4677void
8deb3959 4678out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
644ac9c5 4679 int *plen, int t_len)
a28e4651 4680{
644ac9c5 4681 bool second_label = true;
4682 bool saved_in_tmp = false;
4683 bool use_zero_reg = false;
4684 rtx op[5];
b681d971 4685
a28e4651 4686 op[0] = operands[0];
4687 op[1] = operands[1];
4688 op[2] = operands[2];
4689 op[3] = operands[3];
b681d971 4690
644ac9c5 4691 if (plen)
4692 *plen = 0;
b681d971 4693
4202ef11 4694 if (CONST_INT_P (operands[2]))
a28e4651 4695 {
4202ef11 4696 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4697 && REG_P (operands[3]));
b681d971 4698 int count = INTVAL (operands[2]);
4699 int max_len = 10; /* If larger than this, always use a loop. */
4700
5667001b 4701 if (count <= 0)
644ac9c5 4702 return;
5667001b 4703
b681d971 4704 if (count < 8 && !scratch)
644ac9c5 4705 use_zero_reg = true;
b681d971 4706
4707 if (optimize_size)
644ac9c5 4708 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
b681d971 4709
4710 if (t_len * count <= max_len)
644ac9c5 4711 {
4712 /* Output shifts inline with no loop - faster. */
4713
4714 while (count-- > 0)
4715 avr_asm_len (templ, op, plen, t_len);
b681d971 4716
644ac9c5 4717 return;
4718 }
b681d971 4719
4720 if (scratch)
644ac9c5 4721 {
4722 avr_asm_len ("ldi %3,%2", op, plen, 1);
4723 }
b681d971 4724 else if (use_zero_reg)
644ac9c5 4725 {
4726 /* Hack to save one word: use __zero_reg__ as loop counter.
4727 Set one bit, then shift in a loop until it is 0 again. */
b681d971 4728
644ac9c5 4729 op[3] = zero_reg_rtx;
4730
4731 avr_asm_len ("set" CR_TAB
4732 "bld %3,%2-1", op, plen, 2);
4733 }
a28e4651 4734 else
644ac9c5 4735 {
4736 /* No scratch register available, use one from LD_REGS (saved in
4737 __tmp_reg__) that doesn't overlap with registers to shift. */
b681d971 4738
644ac9c5 4739 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4740 op[4] = tmp_reg_rtx;
4741 saved_in_tmp = true;
b681d971 4742
644ac9c5 4743 avr_asm_len ("mov %4,%3" CR_TAB
4744 "ldi %3,%2", op, plen, 2);
4745 }
b681d971 4746
644ac9c5 4747 second_label = false;
a28e4651 4748 }
644ac9c5 4749 else if (MEM_P (op[2]))
a28e4651 4750 {
644ac9c5 4751 rtx op_mov[2];
361aa610 4752
644ac9c5 4753 op_mov[0] = op[3] = tmp_reg_rtx;
a28e4651 4754 op_mov[1] = op[2];
b681d971 4755
644ac9c5 4756 out_movqi_r_mr (insn, op_mov, plen);
a28e4651 4757 }
644ac9c5 4758 else if (register_operand (op[2], QImode))
a28e4651 4759 {
644ac9c5 4760 op[3] = op[2];
4761
4762 if (!reg_unused_after (insn, op[2])
4763 || reg_overlap_mentioned_p (op[0], op[2]))
959d6369 4764 {
644ac9c5 4765 op[3] = tmp_reg_rtx;
4766 avr_asm_len ("mov %3,%2", op, plen, 1);
959d6369 4767 }
b681d971 4768 }
4769 else
68435912 4770 fatal_insn ("bad shift insn:", insn);
b681d971 4771
4772 if (second_label)
644ac9c5 4773 avr_asm_len ("rjmp 2f", op, plen, 1);
b681d971 4774
644ac9c5 4775 avr_asm_len ("1:", op, plen, 0);
4776 avr_asm_len (templ, op, plen, t_len);
4777
4778 if (second_label)
4779 avr_asm_len ("2:", op, plen, 0);
4780
4781 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4782 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4783
4784 if (saved_in_tmp)
4785 avr_asm_len ("mov %3,%4", op, plen, 1);
a28e4651 4786}
4787
4788
4789/* 8bit shift left ((char)x << i) */
4790
37ac04dc 4791const char *
206a5129 4792ashlqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 4793{
4794 if (GET_CODE (operands[2]) == CONST_INT)
4795 {
4796 int k;
1cb39658 4797
a28e4651 4798 if (!len)
4799 len = &k;
1cb39658 4800
a28e4651 4801 switch (INTVAL (operands[2]))
4802 {
1cb39658 4803 default:
5667001b 4804 if (INTVAL (operands[2]) < 8)
4805 break;
4806
1cb39658 4807 *len = 1;
02a011e9 4808 return "clr %0";
1cb39658 4809
a28e4651 4810 case 1:
1cb39658 4811 *len = 1;
02a011e9 4812 return "lsl %0";
1cb39658 4813
a28e4651 4814 case 2:
1cb39658 4815 *len = 2;
02a011e9 4816 return ("lsl %0" CR_TAB
4817 "lsl %0");
1cb39658 4818
a28e4651 4819 case 3:
1cb39658 4820 *len = 3;
02a011e9 4821 return ("lsl %0" CR_TAB
4822 "lsl %0" CR_TAB
4823 "lsl %0");
1cb39658 4824
a28e4651 4825 case 4:
0af74aa0 4826 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4827 {
1cb39658 4828 *len = 2;
02a011e9 4829 return ("swap %0" CR_TAB
4830 "andi %0,0xf0");
a28e4651 4831 }
1cb39658 4832 *len = 4;
02a011e9 4833 return ("lsl %0" CR_TAB
4834 "lsl %0" CR_TAB
4835 "lsl %0" CR_TAB
4836 "lsl %0");
1cb39658 4837
a28e4651 4838 case 5:
0af74aa0 4839 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4840 {
1cb39658 4841 *len = 3;
02a011e9 4842 return ("swap %0" CR_TAB
4843 "lsl %0" CR_TAB
4844 "andi %0,0xe0");
a28e4651 4845 }
1cb39658 4846 *len = 5;
02a011e9 4847 return ("lsl %0" CR_TAB
4848 "lsl %0" CR_TAB
4849 "lsl %0" CR_TAB
4850 "lsl %0" CR_TAB
4851 "lsl %0");
1cb39658 4852
a28e4651 4853 case 6:
0af74aa0 4854 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 4855 {
1cb39658 4856 *len = 4;
02a011e9 4857 return ("swap %0" CR_TAB
4858 "lsl %0" CR_TAB
4859 "lsl %0" CR_TAB
4860 "andi %0,0xc0");
a28e4651 4861 }
1cb39658 4862 *len = 6;
02a011e9 4863 return ("lsl %0" CR_TAB
4864 "lsl %0" CR_TAB
4865 "lsl %0" CR_TAB
4866 "lsl %0" CR_TAB
4867 "lsl %0" CR_TAB
4868 "lsl %0");
1cb39658 4869
a28e4651 4870 case 7:
1cb39658 4871 *len = 3;
02a011e9 4872 return ("ror %0" CR_TAB
4873 "clr %0" CR_TAB
4874 "ror %0");
a28e4651 4875 }
4876 }
1cb39658 4877 else if (CONSTANT_P (operands[2]))
68435912 4878 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 4879
02a011e9 4880 out_shift_with_cnt ("lsl %0",
4881 insn, operands, len, 1);
a28e4651 4882 return "";
4883}
4884
4885
4886/* 16bit shift left ((short)x << i) */
4887
37ac04dc 4888const char *
206a5129 4889ashlhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 4890{
4891 if (GET_CODE (operands[2]) == CONST_INT)
4892 {
b681d971 4893 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4894 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 4895 int k;
b681d971 4896 int *t = len;
1cb39658 4897
a28e4651 4898 if (!len)
4899 len = &k;
1cb39658 4900
a28e4651 4901 switch (INTVAL (operands[2]))
4902 {
5667001b 4903 default:
4904 if (INTVAL (operands[2]) < 16)
4905 break;
4906
4907 *len = 2;
02a011e9 4908 return ("clr %B0" CR_TAB
4909 "clr %A0");
5667001b 4910
b681d971 4911 case 4:
4912 if (optimize_size && scratch)
4913 break; /* 5 */
4914 if (ldi_ok)
4915 {
4916 *len = 6;
02a011e9 4917 return ("swap %A0" CR_TAB
4918 "swap %B0" CR_TAB
4919 "andi %B0,0xf0" CR_TAB
4920 "eor %B0,%A0" CR_TAB
4921 "andi %A0,0xf0" CR_TAB
4922 "eor %B0,%A0");
b681d971 4923 }
4924 if (scratch)
4925 {
4926 *len = 7;
02a011e9 4927 return ("swap %A0" CR_TAB
4928 "swap %B0" CR_TAB
4929 "ldi %3,0xf0" CR_TAB
ef51d1e3 4930 "and %B0,%3" CR_TAB
02a011e9 4931 "eor %B0,%A0" CR_TAB
ef51d1e3 4932 "and %A0,%3" CR_TAB
02a011e9 4933 "eor %B0,%A0");
b681d971 4934 }
4935 break; /* optimize_size ? 6 : 8 */
1cb39658 4936
b681d971 4937 case 5:
4938 if (optimize_size)
4939 break; /* scratch ? 5 : 6 */
4940 if (ldi_ok)
4941 {
4942 *len = 8;
02a011e9 4943 return ("lsl %A0" CR_TAB
4944 "rol %B0" CR_TAB
4945 "swap %A0" CR_TAB
4946 "swap %B0" CR_TAB
4947 "andi %B0,0xf0" CR_TAB
4948 "eor %B0,%A0" CR_TAB
4949 "andi %A0,0xf0" CR_TAB
4950 "eor %B0,%A0");
b681d971 4951 }
4952 if (scratch)
4953 {
4954 *len = 9;
02a011e9 4955 return ("lsl %A0" CR_TAB
4956 "rol %B0" CR_TAB
4957 "swap %A0" CR_TAB
4958 "swap %B0" CR_TAB
4959 "ldi %3,0xf0" CR_TAB
ef51d1e3 4960 "and %B0,%3" CR_TAB
02a011e9 4961 "eor %B0,%A0" CR_TAB
ef51d1e3 4962 "and %A0,%3" CR_TAB
02a011e9 4963 "eor %B0,%A0");
b681d971 4964 }
4965 break; /* 10 */
4966
4967 case 6:
4968 if (optimize_size)
4969 break; /* scratch ? 5 : 6 */
4970 *len = 9;
02a011e9 4971 return ("clr __tmp_reg__" CR_TAB
4972 "lsr %B0" CR_TAB
4973 "ror %A0" CR_TAB
4974 "ror __tmp_reg__" CR_TAB
4975 "lsr %B0" CR_TAB
4976 "ror %A0" CR_TAB
4977 "ror __tmp_reg__" CR_TAB
4978 "mov %B0,%A0" CR_TAB
4979 "mov %A0,__tmp_reg__");
1cb39658 4980
28f5cc4d 4981 case 7:
4982 *len = 5;
02a011e9 4983 return ("lsr %B0" CR_TAB
4984 "mov %B0,%A0" CR_TAB
4985 "clr %A0" CR_TAB
4986 "ror %B0" CR_TAB
4987 "ror %A0");
28f5cc4d 4988
a28e4651 4989 case 8:
02a011e9 4990 return *len = 2, ("mov %B0,%A1" CR_TAB
4991 "clr %A0");
28f5cc4d 4992
4993 case 9:
4994 *len = 3;
02a011e9 4995 return ("mov %B0,%A0" CR_TAB
4996 "clr %A0" CR_TAB
4997 "lsl %B0");
28f5cc4d 4998
4999 case 10:
5000 *len = 4;
02a011e9 5001 return ("mov %B0,%A0" CR_TAB
5002 "clr %A0" CR_TAB
5003 "lsl %B0" CR_TAB
5004 "lsl %B0");
28f5cc4d 5005
5006 case 11:
5007 *len = 5;
02a011e9 5008 return ("mov %B0,%A0" CR_TAB
5009 "clr %A0" CR_TAB
5010 "lsl %B0" CR_TAB
5011 "lsl %B0" CR_TAB
5012 "lsl %B0");
28f5cc4d 5013
5014 case 12:
b681d971 5015 if (ldi_ok)
28f5cc4d 5016 {
5017 *len = 4;
02a011e9 5018 return ("mov %B0,%A0" CR_TAB
5019 "clr %A0" CR_TAB
5020 "swap %B0" CR_TAB
5021 "andi %B0,0xf0");
28f5cc4d 5022 }
b681d971 5023 if (scratch)
5024 {
5025 *len = 5;
02a011e9 5026 return ("mov %B0,%A0" CR_TAB
5027 "clr %A0" CR_TAB
5028 "swap %B0" CR_TAB
5029 "ldi %3,0xf0" CR_TAB
ef51d1e3 5030 "and %B0,%3");
b681d971 5031 }
5032 *len = 6;
02a011e9 5033 return ("mov %B0,%A0" CR_TAB
5034 "clr %A0" CR_TAB
5035 "lsl %B0" CR_TAB
5036 "lsl %B0" CR_TAB
5037 "lsl %B0" CR_TAB
5038 "lsl %B0");
28f5cc4d 5039
5040 case 13:
b681d971 5041 if (ldi_ok)
28f5cc4d 5042 {
5043 *len = 5;
02a011e9 5044 return ("mov %B0,%A0" CR_TAB
5045 "clr %A0" CR_TAB
5046 "swap %B0" CR_TAB
5047 "lsl %B0" CR_TAB
5048 "andi %B0,0xe0");
28f5cc4d 5049 }
8cc5a1af 5050 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5051 {
5052 *len = 5;
02a011e9 5053 return ("ldi %3,0x20" CR_TAB
5054 "mul %A0,%3" CR_TAB
5055 "mov %B0,r0" CR_TAB
5056 "clr %A0" CR_TAB
5057 "clr __zero_reg__");
28f5cc4d 5058 }
b681d971 5059 if (optimize_size && scratch)
5060 break; /* 5 */
5061 if (scratch)
5062 {
5063 *len = 6;
02a011e9 5064 return ("mov %B0,%A0" CR_TAB
5065 "clr %A0" CR_TAB
5066 "swap %B0" CR_TAB
5067 "lsl %B0" CR_TAB
5068 "ldi %3,0xe0" CR_TAB
ef51d1e3 5069 "and %B0,%3");
b681d971 5070 }
8cc5a1af 5071 if (AVR_HAVE_MUL)
b681d971 5072 {
5073 *len = 6;
5074 return ("set" CR_TAB
02a011e9 5075 "bld r1,5" CR_TAB
5076 "mul %A0,r1" CR_TAB
5077 "mov %B0,r0" CR_TAB
5078 "clr %A0" CR_TAB
5079 "clr __zero_reg__");
b681d971 5080 }
5081 *len = 7;
02a011e9 5082 return ("mov %B0,%A0" CR_TAB
5083 "clr %A0" CR_TAB
5084 "lsl %B0" CR_TAB
5085 "lsl %B0" CR_TAB
5086 "lsl %B0" CR_TAB
5087 "lsl %B0" CR_TAB
5088 "lsl %B0");
28f5cc4d 5089
5090 case 14:
8cc5a1af 5091 if (AVR_HAVE_MUL && ldi_ok)
b681d971 5092 {
5093 *len = 5;
02a011e9 5094 return ("ldi %B0,0x40" CR_TAB
5095 "mul %A0,%B0" CR_TAB
5096 "mov %B0,r0" CR_TAB
5097 "clr %A0" CR_TAB
5098 "clr __zero_reg__");
b681d971 5099 }
8cc5a1af 5100 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5101 {
5102 *len = 5;
02a011e9 5103 return ("ldi %3,0x40" CR_TAB
5104 "mul %A0,%3" CR_TAB
5105 "mov %B0,r0" CR_TAB
5106 "clr %A0" CR_TAB
5107 "clr __zero_reg__");
28f5cc4d 5108 }
b681d971 5109 if (optimize_size && ldi_ok)
5110 {
5111 *len = 5;
02a011e9 5112 return ("mov %B0,%A0" CR_TAB
5113 "ldi %A0,6" "\n1:\t"
5114 "lsl %B0" CR_TAB
5115 "dec %A0" CR_TAB
5116 "brne 1b");
b681d971 5117 }
5118 if (optimize_size && scratch)
5119 break; /* 5 */
5120 *len = 6;
02a011e9 5121 return ("clr %B0" CR_TAB
5122 "lsr %A0" CR_TAB
5123 "ror %B0" CR_TAB
5124 "lsr %A0" CR_TAB
5125 "ror %B0" CR_TAB
5126 "clr %A0");
28f5cc4d 5127
5128 case 15:
5129 *len = 4;
02a011e9 5130 return ("clr %B0" CR_TAB
5131 "lsr %A0" CR_TAB
5132 "ror %B0" CR_TAB
5133 "clr %A0");
a28e4651 5134 }
28f5cc4d 5135 len = t;
a28e4651 5136 }
02a011e9 5137 out_shift_with_cnt ("lsl %A0" CR_TAB
5138 "rol %B0", insn, operands, len, 2);
a28e4651 5139 return "";
5140}
5141
5142
02d9a2c3 5143/* 24-bit shift left */
5144
5145const char*
5146avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5147{
5148 if (plen)
5149 *plen = 0;
5150
5151 if (CONST_INT_P (op[2]))
5152 {
5153 switch (INTVAL (op[2]))
5154 {
5155 default:
5156 if (INTVAL (op[2]) < 24)
5157 break;
5158
5159 return avr_asm_len ("clr %A0" CR_TAB
5160 "clr %B0" CR_TAB
5161 "clr %C0", op, plen, 3);
5162
5163 case 8:
5164 {
5165 int reg0 = REGNO (op[0]);
5166 int reg1 = REGNO (op[1]);
5167
5168 if (reg0 >= reg1)
5169 return avr_asm_len ("mov %C0,%B1" CR_TAB
5170 "mov %B0,%A1" CR_TAB
5171 "clr %A0", op, plen, 3);
5172 else
5173 return avr_asm_len ("clr %A0" CR_TAB
5174 "mov %B0,%A1" CR_TAB
5175 "mov %C0,%B1", op, plen, 3);
5176 }
5177
5178 case 16:
5179 {
5180 int reg0 = REGNO (op[0]);
5181 int reg1 = REGNO (op[1]);
5182
5183 if (reg0 + 2 != reg1)
5184 avr_asm_len ("mov %C0,%A0", op, plen, 1);
5185
5186 return avr_asm_len ("clr %B0" CR_TAB
5187 "clr %A0", op, plen, 2);
5188 }
5189
5190 case 23:
5191 return avr_asm_len ("clr %C0" CR_TAB
5192 "lsr %A0" CR_TAB
5193 "ror %C0" CR_TAB
5194 "clr %B0" CR_TAB
5195 "clr %A0", op, plen, 5);
5196 }
5197 }
5198
5199 out_shift_with_cnt ("lsl %A0" CR_TAB
5200 "rol %B0" CR_TAB
5201 "rol %C0", insn, op, plen, 3);
5202 return "";
5203}
5204
5205
a28e4651 5206/* 32bit shift left ((long)x << i) */
5207
37ac04dc 5208const char *
206a5129 5209ashlsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5210{
5211 if (GET_CODE (operands[2]) == CONST_INT)
5212 {
5213 int k;
b681d971 5214 int *t = len;
1cb39658 5215
a28e4651 5216 if (!len)
5217 len = &k;
1cb39658 5218
a28e4651 5219 switch (INTVAL (operands[2]))
5220 {
5667001b 5221 default:
5222 if (INTVAL (operands[2]) < 32)
5223 break;
5224
0aab73c2 5225 if (AVR_HAVE_MOVW)
02a011e9 5226 return *len = 3, ("clr %D0" CR_TAB
5227 "clr %C0" CR_TAB
5228 "movw %A0,%C0");
5667001b 5229 *len = 4;
02a011e9 5230 return ("clr %D0" CR_TAB
5231 "clr %C0" CR_TAB
5232 "clr %B0" CR_TAB
5233 "clr %A0");
5667001b 5234
a28e4651 5235 case 8:
5236 {
5237 int reg0 = true_regnum (operands[0]);
5238 int reg1 = true_regnum (operands[1]);
1cb39658 5239 *len = 4;
a28e4651 5240 if (reg0 >= reg1)
02a011e9 5241 return ("mov %D0,%C1" CR_TAB
5242 "mov %C0,%B1" CR_TAB
5243 "mov %B0,%A1" CR_TAB
5244 "clr %A0");
a28e4651 5245 else
02a011e9 5246 return ("clr %A0" CR_TAB
5247 "mov %B0,%A1" CR_TAB
5248 "mov %C0,%B1" CR_TAB
5249 "mov %D0,%C1");
a28e4651 5250 }
1cb39658 5251
a28e4651 5252 case 16:
5253 {
5254 int reg0 = true_regnum (operands[0]);
5255 int reg1 = true_regnum (operands[1]);
a28e4651 5256 if (reg0 + 2 == reg1)
02a011e9 5257 return *len = 2, ("clr %B0" CR_TAB
5258 "clr %A0");
ab3a6ef8 5259 if (AVR_HAVE_MOVW)
02a011e9 5260 return *len = 3, ("movw %C0,%A1" CR_TAB
5261 "clr %B0" CR_TAB
5262 "clr %A0");
a28e4651 5263 else
02a011e9 5264 return *len = 4, ("mov %C0,%A1" CR_TAB
5265 "mov %D0,%B1" CR_TAB
5266 "clr %B0" CR_TAB
5267 "clr %A0");
a28e4651 5268 }
1cb39658 5269
a28e4651 5270 case 24:
1cb39658 5271 *len = 4;
02a011e9 5272 return ("mov %D0,%A1" CR_TAB
5273 "clr %C0" CR_TAB
5274 "clr %B0" CR_TAB
5275 "clr %A0");
28f5cc4d 5276
5277 case 31:
5278 *len = 6;
02a011e9 5279 return ("clr %D0" CR_TAB
5280 "lsr %A0" CR_TAB
5281 "ror %D0" CR_TAB
5282 "clr %C0" CR_TAB
5283 "clr %B0" CR_TAB
5284 "clr %A0");
a28e4651 5285 }
28f5cc4d 5286 len = t;
a28e4651 5287 }
02a011e9 5288 out_shift_with_cnt ("lsl %A0" CR_TAB
5289 "rol %B0" CR_TAB
5290 "rol %C0" CR_TAB
5291 "rol %D0", insn, operands, len, 4);
a28e4651 5292 return "";
5293}
5294
5295/* 8bit arithmetic shift right ((signed char)x >> i) */
5296
37ac04dc 5297const char *
206a5129 5298ashrqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5299{
5300 if (GET_CODE (operands[2]) == CONST_INT)
5301 {
a28e4651 5302 int k;
1cb39658 5303
a28e4651 5304 if (!len)
5305 len = &k;
1cb39658 5306
a28e4651 5307 switch (INTVAL (operands[2]))
5308 {
a28e4651 5309 case 1:
1cb39658 5310 *len = 1;
02a011e9 5311 return "asr %0";
1cb39658 5312
a28e4651 5313 case 2:
1cb39658 5314 *len = 2;
02a011e9 5315 return ("asr %0" CR_TAB
5316 "asr %0");
1cb39658 5317
a28e4651 5318 case 3:
1cb39658 5319 *len = 3;
02a011e9 5320 return ("asr %0" CR_TAB
5321 "asr %0" CR_TAB
5322 "asr %0");
1cb39658 5323
a28e4651 5324 case 4:
1cb39658 5325 *len = 4;
02a011e9 5326 return ("asr %0" CR_TAB
5327 "asr %0" CR_TAB
5328 "asr %0" CR_TAB
5329 "asr %0");
1cb39658 5330
5331 case 5:
5332 *len = 5;
02a011e9 5333 return ("asr %0" CR_TAB
5334 "asr %0" CR_TAB
5335 "asr %0" CR_TAB
5336 "asr %0" CR_TAB
5337 "asr %0");
1cb39658 5338
5339 case 6:
5340 *len = 4;
02a011e9 5341 return ("bst %0,6" CR_TAB
5342 "lsl %0" CR_TAB
5343 "sbc %0,%0" CR_TAB
5344 "bld %0,0");
1cb39658 5345
5346 default:
5667001b 5347 if (INTVAL (operands[2]) < 8)
5348 break;
5349
5350 /* fall through */
5351
1cb39658 5352 case 7:
5353 *len = 2;
02a011e9 5354 return ("lsl %0" CR_TAB
5355 "sbc %0,%0");
a28e4651 5356 }
5357 }
1cb39658 5358 else if (CONSTANT_P (operands[2]))
68435912 5359 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 5360
02a011e9 5361 out_shift_with_cnt ("asr %0",
5362 insn, operands, len, 1);
a28e4651 5363 return "";
5364}
5365
5366
5367/* 16bit arithmetic shift right ((signed short)x >> i) */
5368
37ac04dc 5369const char *
206a5129 5370ashrhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5371{
5372 if (GET_CODE (operands[2]) == CONST_INT)
5373 {
b681d971 5374 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5375 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 5376 int k;
1cb39658 5377 int *t = len;
5378
a28e4651 5379 if (!len)
5380 len = &k;
1cb39658 5381
a28e4651 5382 switch (INTVAL (operands[2]))
5383 {
b681d971 5384 case 4:
5385 case 5:
5386 /* XXX try to optimize this too? */
5387 break;
1cb39658 5388
b681d971 5389 case 6:
5390 if (optimize_size)
5391 break; /* scratch ? 5 : 6 */
5392 *len = 8;
02a011e9 5393 return ("mov __tmp_reg__,%A0" CR_TAB
5394 "mov %A0,%B0" CR_TAB
5395 "lsl __tmp_reg__" CR_TAB
5396 "rol %A0" CR_TAB
5397 "sbc %B0,%B0" CR_TAB
5398 "lsl __tmp_reg__" CR_TAB
5399 "rol %A0" CR_TAB
5400 "rol %B0");
1cb39658 5401
28f5cc4d 5402 case 7:
5403 *len = 4;
02a011e9 5404 return ("lsl %A0" CR_TAB
5405 "mov %A0,%B0" CR_TAB
5406 "rol %A0" CR_TAB
5407 "sbc %B0,%B0");
28f5cc4d 5408
a28e4651 5409 case 8:
b681d971 5410 {
5411 int reg0 = true_regnum (operands[0]);
5412 int reg1 = true_regnum (operands[1]);
5413
5414 if (reg0 == reg1)
02a011e9 5415 return *len = 3, ("mov %A0,%B0" CR_TAB
5416 "lsl %B0" CR_TAB
5417 "sbc %B0,%B0");
ab3a6ef8 5418 else
02a011e9 5419 return *len = 4, ("mov %A0,%B1" CR_TAB
5420 "clr %B0" CR_TAB
5421 "sbrc %A0,7" CR_TAB
5422 "dec %B0");
b681d971 5423 }
1cb39658 5424
28f5cc4d 5425 case 9:
5426 *len = 4;
02a011e9 5427 return ("mov %A0,%B0" CR_TAB
5428 "lsl %B0" CR_TAB
5429 "sbc %B0,%B0" CR_TAB
5430 "asr %A0");
28f5cc4d 5431
5432 case 10:
5433 *len = 5;
02a011e9 5434 return ("mov %A0,%B0" CR_TAB
5435 "lsl %B0" CR_TAB
5436 "sbc %B0,%B0" CR_TAB
5437 "asr %A0" CR_TAB
5438 "asr %A0");
28f5cc4d 5439
5440 case 11:
8cc5a1af 5441 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5442 {
5443 *len = 5;
02a011e9 5444 return ("ldi %A0,0x20" CR_TAB
5445 "muls %B0,%A0" CR_TAB
5446 "mov %A0,r1" CR_TAB
5447 "sbc %B0,%B0" CR_TAB
5448 "clr __zero_reg__");
28f5cc4d 5449 }
b681d971 5450 if (optimize_size && scratch)
5451 break; /* 5 */
5452 *len = 6;
02a011e9 5453 return ("mov %A0,%B0" CR_TAB
5454 "lsl %B0" CR_TAB
5455 "sbc %B0,%B0" CR_TAB
5456 "asr %A0" CR_TAB
5457 "asr %A0" CR_TAB
5458 "asr %A0");
28f5cc4d 5459
5460 case 12:
8cc5a1af 5461 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5462 {
5463 *len = 5;
02a011e9 5464 return ("ldi %A0,0x10" CR_TAB
5465 "muls %B0,%A0" CR_TAB
5466 "mov %A0,r1" CR_TAB
5467 "sbc %B0,%B0" CR_TAB
5468 "clr __zero_reg__");
28f5cc4d 5469 }
b681d971 5470 if (optimize_size && scratch)
5471 break; /* 5 */
5472 *len = 7;
02a011e9 5473 return ("mov %A0,%B0" CR_TAB
5474 "lsl %B0" CR_TAB
5475 "sbc %B0,%B0" CR_TAB
5476 "asr %A0" CR_TAB
5477 "asr %A0" CR_TAB
5478 "asr %A0" CR_TAB
5479 "asr %A0");
28f5cc4d 5480
5481 case 13:
8cc5a1af 5482 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 5483 {
5484 *len = 5;
02a011e9 5485 return ("ldi %A0,0x08" CR_TAB
5486 "muls %B0,%A0" CR_TAB
5487 "mov %A0,r1" CR_TAB
5488 "sbc %B0,%B0" CR_TAB
5489 "clr __zero_reg__");
28f5cc4d 5490 }
b681d971 5491 if (optimize_size)
5492 break; /* scratch ? 5 : 7 */
5493 *len = 8;
02a011e9 5494 return ("mov %A0,%B0" CR_TAB
5495 "lsl %B0" CR_TAB
5496 "sbc %B0,%B0" CR_TAB
5497 "asr %A0" CR_TAB
5498 "asr %A0" CR_TAB
5499 "asr %A0" CR_TAB
5500 "asr %A0" CR_TAB
5501 "asr %A0");
28f5cc4d 5502
5503 case 14:
5504 *len = 5;
02a011e9 5505 return ("lsl %B0" CR_TAB
5506 "sbc %A0,%A0" CR_TAB
5507 "lsl %B0" CR_TAB
5508 "mov %B0,%A0" CR_TAB
5509 "rol %A0");
28f5cc4d 5510
5667001b 5511 default:
5512 if (INTVAL (operands[2]) < 16)
5513 break;
5514
5515 /* fall through */
5516
8a2a7305 5517 case 15:
02a011e9 5518 return *len = 3, ("lsl %B0" CR_TAB
5519 "sbc %A0,%A0" CR_TAB
5520 "mov %B0,%A0");
a28e4651 5521 }
28f5cc4d 5522 len = t;
a28e4651 5523 }
02a011e9 5524 out_shift_with_cnt ("asr %B0" CR_TAB
5525 "ror %A0", insn, operands, len, 2);
a28e4651 5526 return "";
5527}
5528
5529
02d9a2c3 5530/* 24-bit arithmetic shift right */
5531
5532const char*
5533avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5534{
5535 int dest = REGNO (op[0]);
5536 int src = REGNO (op[1]);
5537
5538 if (CONST_INT_P (op[2]))
5539 {
5540 if (plen)
5541 *plen = 0;
5542
5543 switch (INTVAL (op[2]))
5544 {
5545 case 8:
5546 if (dest <= src)
5547 return avr_asm_len ("mov %A0,%B1" CR_TAB
5548 "mov %B0,%C1" CR_TAB
5549 "clr %C0" CR_TAB
5550 "sbrc %B0,7" CR_TAB
5551 "dec %C0", op, plen, 5);
5552 else
5553 return avr_asm_len ("clr %C0" CR_TAB
5554 "sbrc %C1,7" CR_TAB
5555 "dec %C0" CR_TAB
5556 "mov %B0,%C1" CR_TAB
5557 "mov %A0,%B1", op, plen, 5);
5558
5559 case 16:
5560 if (dest != src + 2)
5561 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5562
5563 return avr_asm_len ("clr %B0" CR_TAB
5564 "sbrc %A0,7" CR_TAB
5565 "com %B0" CR_TAB
5566 "mov %C0,%B0", op, plen, 4);
5567
5568 default:
5569 if (INTVAL (op[2]) < 24)
5570 break;
5571
5572 /* fall through */
5573
8e8ae8d4 5574 case 23:
02d9a2c3 5575 return avr_asm_len ("lsl %C0" CR_TAB
5576 "sbc %A0,%A0" CR_TAB
5577 "mov %B0,%A0" CR_TAB
5578 "mov %C0,%A0", op, plen, 4);
5579 } /* switch */
5580 }
5581
5582 out_shift_with_cnt ("asr %C0" CR_TAB
5583 "ror %B0" CR_TAB
5584 "ror %A0", insn, op, plen, 3);
5585 return "";
5586}
5587
5588
a28e4651 5589/* 32bit arithmetic shift right ((signed long)x >> i) */
5590
37ac04dc 5591const char *
206a5129 5592ashrsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5593{
5594 if (GET_CODE (operands[2]) == CONST_INT)
5595 {
5596 int k;
5597 int *t = len;
1cb39658 5598
a28e4651 5599 if (!len)
5600 len = &k;
1cb39658 5601
a28e4651 5602 switch (INTVAL (operands[2]))
5603 {
a28e4651 5604 case 8:
5605 {
5606 int reg0 = true_regnum (operands[0]);
5607 int reg1 = true_regnum (operands[1]);
5608 *len=6;
5609 if (reg0 <= reg1)
02a011e9 5610 return ("mov %A0,%B1" CR_TAB
5611 "mov %B0,%C1" CR_TAB
5612 "mov %C0,%D1" CR_TAB
5613 "clr %D0" CR_TAB
5614 "sbrc %C0,7" CR_TAB
5615 "dec %D0");
a28e4651 5616 else
02a011e9 5617 return ("clr %D0" CR_TAB
5618 "sbrc %D1,7" CR_TAB
5619 "dec %D0" CR_TAB
5620 "mov %C0,%D1" CR_TAB
5621 "mov %B0,%C1" CR_TAB
5622 "mov %A0,%B1");
a28e4651 5623 }
1cb39658 5624
a28e4651 5625 case 16:
5626 {
5627 int reg0 = true_regnum (operands[0]);
5628 int reg1 = true_regnum (operands[1]);
ab3a6ef8 5629
5630 if (reg0 == reg1 + 2)
02a011e9 5631 return *len = 4, ("clr %D0" CR_TAB
5632 "sbrc %B0,7" CR_TAB
5633 "com %D0" CR_TAB
5634 "mov %C0,%D0");
ab3a6ef8 5635 if (AVR_HAVE_MOVW)
02a011e9 5636 return *len = 5, ("movw %A0,%C1" CR_TAB
5637 "clr %D0" CR_TAB
5638 "sbrc %B0,7" CR_TAB
5639 "com %D0" CR_TAB
5640 "mov %C0,%D0");
ab3a6ef8 5641 else
02a011e9 5642 return *len = 6, ("mov %B0,%D1" CR_TAB
5643 "mov %A0,%C1" CR_TAB
5644 "clr %D0" CR_TAB
5645 "sbrc %B0,7" CR_TAB
5646 "com %D0" CR_TAB
5647 "mov %C0,%D0");
a28e4651 5648 }
1cb39658 5649
a28e4651 5650 case 24:
02a011e9 5651 return *len = 6, ("mov %A0,%D1" CR_TAB
5652 "clr %D0" CR_TAB
5653 "sbrc %A0,7" CR_TAB
5654 "com %D0" CR_TAB
5655 "mov %B0,%D0" CR_TAB
5656 "mov %C0,%D0");
28f5cc4d 5657
5667001b 5658 default:
5659 if (INTVAL (operands[2]) < 32)
5660 break;
5661
5662 /* fall through */
5663
28f5cc4d 5664 case 31:
0aab73c2 5665 if (AVR_HAVE_MOVW)
02a011e9 5666 return *len = 4, ("lsl %D0" CR_TAB
5667 "sbc %A0,%A0" CR_TAB
5668 "mov %B0,%A0" CR_TAB
5669 "movw %C0,%A0");
28f5cc4d 5670 else
02a011e9 5671 return *len = 5, ("lsl %D0" CR_TAB
5672 "sbc %A0,%A0" CR_TAB
5673 "mov %B0,%A0" CR_TAB
5674 "mov %C0,%A0" CR_TAB
5675 "mov %D0,%A0");
a28e4651 5676 }
28f5cc4d 5677 len = t;
a28e4651 5678 }
02a011e9 5679 out_shift_with_cnt ("asr %D0" CR_TAB
5680 "ror %C0" CR_TAB
5681 "ror %B0" CR_TAB
5682 "ror %A0", insn, operands, len, 4);
a28e4651 5683 return "";
5684}
5685
5686/* 8bit logic shift right ((unsigned char)x >> i) */
5687
37ac04dc 5688const char *
206a5129 5689lshrqi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5690{
5691 if (GET_CODE (operands[2]) == CONST_INT)
5692 {
5693 int k;
1cb39658 5694
a28e4651 5695 if (!len)
5696 len = &k;
1cb39658 5697
a28e4651 5698 switch (INTVAL (operands[2]))
5699 {
1cb39658 5700 default:
5667001b 5701 if (INTVAL (operands[2]) < 8)
5702 break;
5703
1cb39658 5704 *len = 1;
02a011e9 5705 return "clr %0";
1cb39658 5706
a28e4651 5707 case 1:
1cb39658 5708 *len = 1;
02a011e9 5709 return "lsr %0";
1cb39658 5710
a28e4651 5711 case 2:
1cb39658 5712 *len = 2;
02a011e9 5713 return ("lsr %0" CR_TAB
5714 "lsr %0");
a28e4651 5715 case 3:
1cb39658 5716 *len = 3;
02a011e9 5717 return ("lsr %0" CR_TAB
5718 "lsr %0" CR_TAB
5719 "lsr %0");
1cb39658 5720
a28e4651 5721 case 4:
0af74aa0 5722 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5723 {
5724 *len=2;
02a011e9 5725 return ("swap %0" CR_TAB
5726 "andi %0,0x0f");
a28e4651 5727 }
1cb39658 5728 *len = 4;
02a011e9 5729 return ("lsr %0" CR_TAB
5730 "lsr %0" CR_TAB
5731 "lsr %0" CR_TAB
5732 "lsr %0");
1cb39658 5733
a28e4651 5734 case 5:
0af74aa0 5735 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5736 {
1cb39658 5737 *len = 3;
02a011e9 5738 return ("swap %0" CR_TAB
5739 "lsr %0" CR_TAB
5740 "andi %0,0x7");
a28e4651 5741 }
1cb39658 5742 *len = 5;
02a011e9 5743 return ("lsr %0" CR_TAB
5744 "lsr %0" CR_TAB
5745 "lsr %0" CR_TAB
5746 "lsr %0" CR_TAB
5747 "lsr %0");
1cb39658 5748
a28e4651 5749 case 6:
0af74aa0 5750 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5751 {
1cb39658 5752 *len = 4;
02a011e9 5753 return ("swap %0" CR_TAB
5754 "lsr %0" CR_TAB
5755 "lsr %0" CR_TAB
5756 "andi %0,0x3");
a28e4651 5757 }
1cb39658 5758 *len = 6;
02a011e9 5759 return ("lsr %0" CR_TAB
5760 "lsr %0" CR_TAB
5761 "lsr %0" CR_TAB
5762 "lsr %0" CR_TAB
5763 "lsr %0" CR_TAB
5764 "lsr %0");
1cb39658 5765
a28e4651 5766 case 7:
1cb39658 5767 *len = 3;
02a011e9 5768 return ("rol %0" CR_TAB
5769 "clr %0" CR_TAB
5770 "rol %0");
a28e4651 5771 }
5772 }
1cb39658 5773 else if (CONSTANT_P (operands[2]))
68435912 5774 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 5775
02a011e9 5776 out_shift_with_cnt ("lsr %0",
5777 insn, operands, len, 1);
a28e4651 5778 return "";
5779}
5780
5781/* 16bit logic shift right ((unsigned short)x >> i) */
5782
37ac04dc 5783const char *
206a5129 5784lshrhi3_out (rtx insn, rtx operands[], int *len)
a28e4651 5785{
5786 if (GET_CODE (operands[2]) == CONST_INT)
5787 {
b681d971 5788 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5789 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 5790 int k;
1cb39658 5791 int *t = len;
b681d971 5792
a28e4651 5793 if (!len)
5794 len = &k;
1cb39658 5795
a28e4651 5796 switch (INTVAL (operands[2]))
5797 {
5667001b 5798 default:
5799 if (INTVAL (operands[2]) < 16)
5800 break;
5801
5802 *len = 2;
02a011e9 5803 return ("clr %B0" CR_TAB
5804 "clr %A0");
5667001b 5805
b681d971 5806 case 4:
5807 if (optimize_size && scratch)
5808 break; /* 5 */
5809 if (ldi_ok)
5810 {
5811 *len = 6;
02a011e9 5812 return ("swap %B0" CR_TAB
5813 "swap %A0" CR_TAB
5814 "andi %A0,0x0f" CR_TAB
5815 "eor %A0,%B0" CR_TAB
5816 "andi %B0,0x0f" CR_TAB
5817 "eor %A0,%B0");
b681d971 5818 }
5819 if (scratch)
5820 {
5821 *len = 7;
02a011e9 5822 return ("swap %B0" CR_TAB
5823 "swap %A0" CR_TAB
5824 "ldi %3,0x0f" CR_TAB
ef51d1e3 5825 "and %A0,%3" CR_TAB
02a011e9 5826 "eor %A0,%B0" CR_TAB
ef51d1e3 5827 "and %B0,%3" CR_TAB
02a011e9 5828 "eor %A0,%B0");
b681d971 5829 }
5830 break; /* optimize_size ? 6 : 8 */
5831
5832 case 5:
5833 if (optimize_size)
5834 break; /* scratch ? 5 : 6 */
5835 if (ldi_ok)
5836 {
5837 *len = 8;
02a011e9 5838 return ("lsr %B0" CR_TAB
5839 "ror %A0" CR_TAB
5840 "swap %B0" CR_TAB
5841 "swap %A0" CR_TAB
5842 "andi %A0,0x0f" CR_TAB
5843 "eor %A0,%B0" CR_TAB
5844 "andi %B0,0x0f" CR_TAB
5845 "eor %A0,%B0");
b681d971 5846 }
5847 if (scratch)
5848 {
5849 *len = 9;
02a011e9 5850 return ("lsr %B0" CR_TAB
5851 "ror %A0" CR_TAB
5852 "swap %B0" CR_TAB
5853 "swap %A0" CR_TAB
5854 "ldi %3,0x0f" CR_TAB
ef51d1e3 5855 "and %A0,%3" CR_TAB
02a011e9 5856 "eor %A0,%B0" CR_TAB
ef51d1e3 5857 "and %B0,%3" CR_TAB
02a011e9 5858 "eor %A0,%B0");
b681d971 5859 }
5860 break; /* 10 */
5861
5862 case 6:
5863 if (optimize_size)
5864 break; /* scratch ? 5 : 6 */
5865 *len = 9;
02a011e9 5866 return ("clr __tmp_reg__" CR_TAB
5867 "lsl %A0" CR_TAB
5868 "rol %B0" CR_TAB
5869 "rol __tmp_reg__" CR_TAB
5870 "lsl %A0" CR_TAB
5871 "rol %B0" CR_TAB
5872 "rol __tmp_reg__" CR_TAB
5873 "mov %A0,%B0" CR_TAB
5874 "mov %B0,__tmp_reg__");
28f5cc4d 5875
5876 case 7:
5877 *len = 5;
02a011e9 5878 return ("lsl %A0" CR_TAB
5879 "mov %A0,%B0" CR_TAB
5880 "rol %A0" CR_TAB
5881 "sbc %B0,%B0" CR_TAB
5882 "neg %B0");
28f5cc4d 5883
a28e4651 5884 case 8:
02a011e9 5885 return *len = 2, ("mov %A0,%B1" CR_TAB
5886 "clr %B0");
28f5cc4d 5887
5888 case 9:
5889 *len = 3;
02a011e9 5890 return ("mov %A0,%B0" CR_TAB
5891 "clr %B0" CR_TAB
5892 "lsr %A0");
28f5cc4d 5893
5894 case 10:
5895 *len = 4;
02a011e9 5896 return ("mov %A0,%B0" CR_TAB
5897 "clr %B0" CR_TAB
5898 "lsr %A0" CR_TAB
5899 "lsr %A0");
28f5cc4d 5900
5901 case 11:
5902 *len = 5;
02a011e9 5903 return ("mov %A0,%B0" CR_TAB
5904 "clr %B0" CR_TAB
5905 "lsr %A0" CR_TAB
5906 "lsr %A0" CR_TAB
5907 "lsr %A0");
28f5cc4d 5908
5909 case 12:
b681d971 5910 if (ldi_ok)
28f5cc4d 5911 {
5912 *len = 4;
02a011e9 5913 return ("mov %A0,%B0" CR_TAB
5914 "clr %B0" CR_TAB
5915 "swap %A0" CR_TAB
5916 "andi %A0,0x0f");
28f5cc4d 5917 }
b681d971 5918 if (scratch)
5919 {
5920 *len = 5;
02a011e9 5921 return ("mov %A0,%B0" CR_TAB
5922 "clr %B0" CR_TAB
5923 "swap %A0" CR_TAB
5924 "ldi %3,0x0f" CR_TAB
ef51d1e3 5925 "and %A0,%3");
b681d971 5926 }
5927 *len = 6;
02a011e9 5928 return ("mov %A0,%B0" CR_TAB
5929 "clr %B0" CR_TAB
5930 "lsr %A0" CR_TAB
5931 "lsr %A0" CR_TAB
5932 "lsr %A0" CR_TAB
5933 "lsr %A0");
28f5cc4d 5934
5935 case 13:
b681d971 5936 if (ldi_ok)
28f5cc4d 5937 {
5938 *len = 5;
02a011e9 5939 return ("mov %A0,%B0" CR_TAB
5940 "clr %B0" CR_TAB
5941 "swap %A0" CR_TAB
5942 "lsr %A0" CR_TAB
5943 "andi %A0,0x07");
28f5cc4d 5944 }
8cc5a1af 5945 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5946 {
5947 *len = 5;
02a011e9 5948 return ("ldi %3,0x08" CR_TAB
5949 "mul %B0,%3" CR_TAB
5950 "mov %A0,r1" CR_TAB
5951 "clr %B0" CR_TAB
5952 "clr __zero_reg__");
28f5cc4d 5953 }
b681d971 5954 if (optimize_size && scratch)
5955 break; /* 5 */
5956 if (scratch)
5957 {
5958 *len = 6;
02a011e9 5959 return ("mov %A0,%B0" CR_TAB
5960 "clr %B0" CR_TAB
5961 "swap %A0" CR_TAB
5962 "lsr %A0" CR_TAB
5963 "ldi %3,0x07" CR_TAB
ef51d1e3 5964 "and %A0,%3");
b681d971 5965 }
8cc5a1af 5966 if (AVR_HAVE_MUL)
b681d971 5967 {
5968 *len = 6;
5969 return ("set" CR_TAB
02a011e9 5970 "bld r1,3" CR_TAB
5971 "mul %B0,r1" CR_TAB
5972 "mov %A0,r1" CR_TAB
5973 "clr %B0" CR_TAB
5974 "clr __zero_reg__");
b681d971 5975 }
5976 *len = 7;
02a011e9 5977 return ("mov %A0,%B0" CR_TAB
5978 "clr %B0" CR_TAB
5979 "lsr %A0" CR_TAB
5980 "lsr %A0" CR_TAB
5981 "lsr %A0" CR_TAB
5982 "lsr %A0" CR_TAB
5983 "lsr %A0");
28f5cc4d 5984
5985 case 14:
8cc5a1af 5986 if (AVR_HAVE_MUL && ldi_ok)
b681d971 5987 {
5988 *len = 5;
02a011e9 5989 return ("ldi %A0,0x04" CR_TAB
5990 "mul %B0,%A0" CR_TAB
5991 "mov %A0,r1" CR_TAB
5992 "clr %B0" CR_TAB
5993 "clr __zero_reg__");
b681d971 5994 }
8cc5a1af 5995 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5996 {
5997 *len = 5;
02a011e9 5998 return ("ldi %3,0x04" CR_TAB
5999 "mul %B0,%3" CR_TAB
6000 "mov %A0,r1" CR_TAB
6001 "clr %B0" CR_TAB
6002 "clr __zero_reg__");
28f5cc4d 6003 }
b681d971 6004 if (optimize_size && ldi_ok)
6005 {
6006 *len = 5;
02a011e9 6007 return ("mov %A0,%B0" CR_TAB
6008 "ldi %B0,6" "\n1:\t"
6009 "lsr %A0" CR_TAB
6010 "dec %B0" CR_TAB
6011 "brne 1b");
b681d971 6012 }
6013 if (optimize_size && scratch)
6014 break; /* 5 */
6015 *len = 6;
02a011e9 6016 return ("clr %A0" CR_TAB
6017 "lsl %B0" CR_TAB
6018 "rol %A0" CR_TAB
6019 "lsl %B0" CR_TAB
6020 "rol %A0" CR_TAB
6021 "clr %B0");
28f5cc4d 6022
8a2a7305 6023 case 15:
1cb39658 6024 *len = 4;
02a011e9 6025 return ("clr %A0" CR_TAB
6026 "lsl %B0" CR_TAB
6027 "rol %A0" CR_TAB
6028 "clr %B0");
a28e4651 6029 }
28f5cc4d 6030 len = t;
a28e4651 6031 }
02a011e9 6032 out_shift_with_cnt ("lsr %B0" CR_TAB
6033 "ror %A0", insn, operands, len, 2);
a28e4651 6034 return "";
6035}
6036
02d9a2c3 6037
6038/* 24-bit logic shift right */
6039
6040const char*
6041avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6042{
6043 int dest = REGNO (op[0]);
6044 int src = REGNO (op[1]);
6045
6046 if (CONST_INT_P (op[2]))
6047 {
6048 if (plen)
6049 *plen = 0;
6050
6051 switch (INTVAL (op[2]))
6052 {
6053 case 8:
6054 if (dest <= src)
6055 return avr_asm_len ("mov %A0,%B1" CR_TAB
6056 "mov %B0,%C1" CR_TAB
6057 "clr %C0", op, plen, 3);
6058 else
6059 return avr_asm_len ("clr %C0" CR_TAB
6060 "mov %B0,%C1" CR_TAB
6061 "mov %A0,%B1", op, plen, 3);
6062
6063 case 16:
6064 if (dest != src + 2)
6065 avr_asm_len ("mov %A0,%C1", op, plen, 1);
6066
6067 return avr_asm_len ("clr %B0" CR_TAB
6068 "clr %C0", op, plen, 2);
6069
6070 default:
6071 if (INTVAL (op[2]) < 24)
6072 break;
6073
6074 /* fall through */
6075
6076 case 23:
6077 return avr_asm_len ("clr %A0" CR_TAB
6078 "sbrc %C0,7" CR_TAB
6079 "inc %A0" CR_TAB
6080 "clr %B0" CR_TAB
6081 "clr %C0", op, plen, 5);
6082 } /* switch */
6083 }
6084
6085 out_shift_with_cnt ("lsr %C0" CR_TAB
6086 "ror %B0" CR_TAB
6087 "ror %A0", insn, op, plen, 3);
6088 return "";
6089}
6090
6091
a28e4651 6092/* 32bit logic shift right ((unsigned int)x >> i) */
6093
37ac04dc 6094const char *
206a5129 6095lshrsi3_out (rtx insn, rtx operands[], int *len)
a28e4651 6096{
6097 if (GET_CODE (operands[2]) == CONST_INT)
6098 {
6099 int k;
1cb39658 6100 int *t = len;
6101
a28e4651 6102 if (!len)
6103 len = &k;
1cb39658 6104
a28e4651 6105 switch (INTVAL (operands[2]))
6106 {
5667001b 6107 default:
6108 if (INTVAL (operands[2]) < 32)
6109 break;
6110
0aab73c2 6111 if (AVR_HAVE_MOVW)
02a011e9 6112 return *len = 3, ("clr %D0" CR_TAB
6113 "clr %C0" CR_TAB
6114 "movw %A0,%C0");
5667001b 6115 *len = 4;
02a011e9 6116 return ("clr %D0" CR_TAB
6117 "clr %C0" CR_TAB
6118 "clr %B0" CR_TAB
6119 "clr %A0");
5667001b 6120
a28e4651 6121 case 8:
6122 {
6123 int reg0 = true_regnum (operands[0]);
6124 int reg1 = true_regnum (operands[1]);
1cb39658 6125 *len = 4;
a28e4651 6126 if (reg0 <= reg1)
02a011e9 6127 return ("mov %A0,%B1" CR_TAB
6128 "mov %B0,%C1" CR_TAB
6129 "mov %C0,%D1" CR_TAB
6130 "clr %D0");
a28e4651 6131 else
02a011e9 6132 return ("clr %D0" CR_TAB
6133 "mov %C0,%D1" CR_TAB
6134 "mov %B0,%C1" CR_TAB
6135 "mov %A0,%B1");
a28e4651 6136 }
1cb39658 6137
a28e4651 6138 case 16:
6139 {
6140 int reg0 = true_regnum (operands[0]);
6141 int reg1 = true_regnum (operands[1]);
ab3a6ef8 6142
6143 if (reg0 == reg1 + 2)
02a011e9 6144 return *len = 2, ("clr %C0" CR_TAB
6145 "clr %D0");
ab3a6ef8 6146 if (AVR_HAVE_MOVW)
02a011e9 6147 return *len = 3, ("movw %A0,%C1" CR_TAB
6148 "clr %C0" CR_TAB
6149 "clr %D0");
a28e4651 6150 else
02a011e9 6151 return *len = 4, ("mov %B0,%D1" CR_TAB
6152 "mov %A0,%C1" CR_TAB
6153 "clr %C0" CR_TAB
6154 "clr %D0");
a28e4651 6155 }
1cb39658 6156
a28e4651 6157 case 24:
02a011e9 6158 return *len = 4, ("mov %A0,%D1" CR_TAB
6159 "clr %B0" CR_TAB
6160 "clr %C0" CR_TAB
6161 "clr %D0");
28f5cc4d 6162
6163 case 31:
6164 *len = 6;
02a011e9 6165 return ("clr %A0" CR_TAB
6166 "sbrc %D0,7" CR_TAB
6167 "inc %A0" CR_TAB
6168 "clr %B0" CR_TAB
6169 "clr %C0" CR_TAB
6170 "clr %D0");
a28e4651 6171 }
28f5cc4d 6172 len = t;
a28e4651 6173 }
02a011e9 6174 out_shift_with_cnt ("lsr %D0" CR_TAB
6175 "ror %C0" CR_TAB
6176 "ror %B0" CR_TAB
6177 "ror %A0", insn, operands, len, 4);
a28e4651 6178 return "";
6179}
6180
6be828c1 6181
b4ebb666 6182/* Output addition of register XOP[0] and compile time constant XOP[2].
6183 CODE == PLUS: perform addition by using ADD instructions or
6184 CODE == MINUS: perform addition by using SUB instructions:
6185
37bcc7b9 6186 XOP[0] = XOP[0] + XOP[2]
b4ebb666 6187
6188 Or perform addition/subtraction with register XOP[2] depending on CODE:
6189
6190 XOP[0] = XOP[0] +/- XOP[2]
37bcc7b9 6191
b4ebb666 6192 If PLEN == NULL, print assembler instructions to perform the operation;
6193 otherwise, set *PLEN to the length of the instruction sequence (in words)
6194 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
6195 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6196
6197 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6198 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6199 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
6200 the subtrahend in the original insn, provided it is a compile time constant.
6201 In all other cases, SIGN is 0.
6202
6203 Return "". */
37bcc7b9 6204
6205static void
b4ebb666 6206avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6207 enum rtx_code code_sat = UNKNOWN, int sign = 0)
37bcc7b9 6208{
6209 /* MODE of the operation. */
6210 enum machine_mode mode = GET_MODE (xop[0]);
6211
017c5b98 6212 /* INT_MODE of the same size. */
6213 enum machine_mode imode = int_mode_for_mode (mode);
6214
37bcc7b9 6215 /* Number of bytes to operate on. */
6216 int i, n_bytes = GET_MODE_SIZE (mode);
6217
6218 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6219 int clobber_val = -1;
6220
6221 /* op[0]: 8-bit destination register
6222 op[1]: 8-bit const int
6223 op[2]: 8-bit scratch register */
6224 rtx op[3];
6225
6226 /* Started the operation? Before starting the operation we may skip
6227 adding 0. This is no more true after the operation started because
6228 carry must be taken into account. */
6229 bool started = false;
6230
6231 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
6232 rtx xval = xop[2];
6233
b4ebb666 6234 /* Output a BRVC instruction. Only needed with saturation. */
6235 bool out_brvc = true;
6236
6237 if (plen)
6238 *plen = 0;
6239
6240 if (REG_P (xop[2]))
6241 {
6242 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6243
6244 for (i = 0; i < n_bytes; i++)
6245 {
6246 /* We operate byte-wise on the destination. */
6247 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6248 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6249
6250 if (i == 0)
6251 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6252 op, plen, 1);
6253 else
6254 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6255 op, plen, 1);
6256 }
6257
6258 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6259 {
6260 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6261
6262 if (MINUS == code)
6263 return;
6264 }
6265
6266 goto saturate;
6267 }
6268
eac146f2 6269 /* Except in the case of ADIW with 16-bit register (see below)
6270 addition does not set cc0 in a usable way. */
bcad9375 6271
6272 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6273
017c5b98 6274 if (CONST_FIXED_P (xval))
6275 xval = avr_to_int_mode (xval);
6276
b4ebb666 6277 /* Adding/Subtracting zero is a no-op. */
6278
6279 if (xval == const0_rtx)
6280 {
6281 *pcc = CC_NONE;
6282 return;
6283 }
6284
37bcc7b9 6285 if (MINUS == code)
017c5b98 6286 xval = simplify_unary_operation (NEG, imode, xval, imode);
37bcc7b9 6287
6288 op[2] = xop[3];
6289
b4ebb666 6290 if (SS_PLUS == code_sat && MINUS == code
6291 && sign < 0
6292 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6293 & GET_MODE_MASK (QImode)))
6294 {
6295 /* We compute x + 0x80 by means of SUB instructions. We negated the
6296 constant subtrahend above and are left with x - (-128) so that we
6297 need something like SUBI r,128 which does not exist because SUBI sets
6298 V according to the sign of the subtrahend. Notice the only case
6299 where this must be done is when NEG overflowed in case [2s] because
6300 the V computation needs the right sign of the subtrahend. */
6301
6302 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6303
6304 avr_asm_len ("subi %0,128" CR_TAB
6305 "brmi 0f", &msb, plen, 2);
6306 out_brvc = false;
6307
6308 goto saturate;
6309 }
37bcc7b9 6310
6311 for (i = 0; i < n_bytes; i++)
6312 {
6313 /* We operate byte-wise on the destination. */
6314 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
017c5b98 6315 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
37bcc7b9 6316
6317 /* 8-bit value to operate with this byte. */
6318 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6319
6320 /* Registers R16..R31 can operate with immediate. */
6321 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6322
6323 op[0] = reg8;
644ac9c5 6324 op[1] = gen_int_mode (val8, QImode);
bcad9375 6325
6326 /* To get usable cc0 no low-bytes must have been skipped. */
6327
6328 if (i && !started)
6329 *pcc = CC_CLOBBER;
37bcc7b9 6330
02d9a2c3 6331 if (!started
6332 && i % 2 == 0
6333 && i + 2 <= n_bytes
37bcc7b9 6334 && test_hard_reg_class (ADDW_REGS, reg8))
6335 {
017c5b98 6336 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
37bcc7b9 6337 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6338
6339 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6340 i.e. operate word-wise. */
6341
6342 if (val16 < 64)
6343 {
6344 if (val16 != 0)
6345 {
6346 started = true;
6347 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6348 op, plen, 1);
eac146f2 6349
6350 if (n_bytes == 2 && PLUS == code)
b4ebb666 6351 *pcc = CC_SET_ZN;
37bcc7b9 6352 }
6353
6354 i++;
6355 continue;
6356 }
6357 }
6358
6359 if (val8 == 0)
6360 {
6361 if (started)
6362 avr_asm_len (code == PLUS
6363 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6364 op, plen, 1);
6365 continue;
6366 }
eac146f2 6367 else if ((val8 == 1 || val8 == 0xff)
b4ebb666 6368 && UNKNOWN == code_sat
eac146f2 6369 && !started
6370 && i == n_bytes - 1)
02d9a2c3 6371 {
eac146f2 6372 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6373 op, plen, 1);
6374 break;
02d9a2c3 6375 }
37bcc7b9 6376
6377 switch (code)
6378 {
6379 case PLUS:
6380
b4ebb666 6381 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6382
6383 if (plen != NULL && UNKNOWN != code_sat)
6384 {
6385 /* This belongs to the x + 0x80 corner case. The code with
6386 ADD instruction is not smaller, thus make this case
6387 expensive so that the caller won't pick it. */
6388
6389 *plen += 10;
6390 break;
6391 }
37bcc7b9 6392
6393 if (clobber_val != (int) val8)
6394 avr_asm_len ("ldi %2,%1", op, plen, 1);
6395 clobber_val = (int) val8;
6396
6397 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6398
6399 break; /* PLUS */
6400
6401 case MINUS:
6402
6403 if (ld_reg_p)
6404 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6405 else
6406 {
6407 gcc_assert (plen != NULL || REG_P (op[2]));
6408
6409 if (clobber_val != (int) val8)
6410 avr_asm_len ("ldi %2,%1", op, plen, 1);
6411 clobber_val = (int) val8;
6412
6413 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6414 }
6415
6416 break; /* MINUS */
6417
6418 default:
6419 /* Unknown code */
6420 gcc_unreachable();
6421 }
6422
6423 started = true;
6424
6425 } /* for all sub-bytes */
bcad9375 6426
b4ebb666 6427 saturate:
37bcc7b9 6428
b4ebb666 6429 if (UNKNOWN == code_sat)
6430 return;
37bcc7b9 6431
b4ebb666 6432 *pcc = (int) CC_CLOBBER;
37bcc7b9 6433
b4ebb666 6434 /* Vanilla addition/subtraction is done. We are left with saturation.
6435
6436 We have to compute A = A <op> B where A is a register and
6437 B is a register or a non-zero compile time constant CONST.
6438 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
6439 B stands for the original operand $2 in INSN. In the case of B = CONST
6440 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
6441
6442 CODE is the instruction flavor we use in the asm sequence to perform <op>.
6443
6444
6445 unsigned
6446 operation | code | sat if | b is | sat value | case
6447 -----------------+-------+----------+--------------+-----------+-------
6448 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
6449 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
6450 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
6451 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
6452
6453
6454 signed
6455 operation | code | sat if | b is | sat value | case
6456 -----------------+-------+----------+--------------+-----------+-------
6457 + as a + b | add | V == 1 | const, reg | s+ | [1s]
6458 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
6459 - as a - b | sub | V == 1 | const, reg | s- | [3s]
6460 - as a + (-b) | add | V == 1 | const | s- | [4s]
6461
6462 s+ = b < 0 ? -0x80 : 0x7f
6463 s- = b < 0 ? 0x7f : -0x80
6464
6465 The cases a - b actually perform a - (-(-b)) if B is CONST.
6466 */
37bcc7b9 6467
b4ebb666 6468 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6469 op[1] = n_bytes > 1
6470 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6471 : NULL_RTX;
37bcc7b9 6472
b4ebb666 6473 bool need_copy = true;
6474 int len_call = 1 + AVR_HAVE_JMP_CALL;
37bcc7b9 6475
b4ebb666 6476 switch (code_sat)
6477 {
6478 default:
6479 gcc_unreachable();
6480
6481 case SS_PLUS:
6482 case SS_MINUS:
b4ebb666 6483
6484 if (out_brvc)
6485 avr_asm_len ("brvc 0f", op, plen, 1);
6486
6487 if (reg_overlap_mentioned_p (xop[0], xop[2]))
6488 {
6489 /* [1s,reg] */
6490
6491 if (n_bytes == 1)
6492 avr_asm_len ("ldi %0,0x7f" CR_TAB
6493 "adc %0,__zero_reg__", op, plen, 2);
6494 else
6495 avr_asm_len ("ldi %0,0x7f" CR_TAB
6496 "ldi %1,0xff" CR_TAB
6497 "adc %1,__zero_reg__" CR_TAB
6498 "adc %0,__zero_reg__", op, plen, 4);
6499 }
6500 else if (sign == 0 && PLUS == code)
6501 {
6502 /* [1s,reg] */
6503
6504 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6505
6506 if (n_bytes == 1)
6507 avr_asm_len ("ldi %0,0x80" CR_TAB
6508 "sbrs %2,7" CR_TAB
6509 "dec %0", op, plen, 3);
6510 else
6511 avr_asm_len ("ldi %0,0x80" CR_TAB
6512 "cp %2,%0" CR_TAB
6513 "sbc %1,%1" CR_TAB
6514 "sbci %0,0", op, plen, 4);
6515 }
6516 else if (sign == 0 && MINUS == code)
6517 {
6518 /* [3s,reg] */
6519
6520 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6521
6522 if (n_bytes == 1)
6523 avr_asm_len ("ldi %0,0x7f" CR_TAB
6524 "sbrs %2,7" CR_TAB
6525 "inc %0", op, plen, 3);
6526 else
6527 avr_asm_len ("ldi %0,0x7f" CR_TAB
6528 "cp %0,%2" CR_TAB
6529 "sbc %1,%1" CR_TAB
6530 "sbci %0,-1", op, plen, 4);
6531 }
6532 else if ((sign < 0) ^ (SS_MINUS == code_sat))
6533 {
6534 /* [1s,const,B < 0] [2s,B < 0] */
6535 /* [3s,const,B > 0] [4s,B > 0] */
6536
6537 if (n_bytes == 8)
6538 {
6539 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6540 need_copy = false;
6541 }
6542
6543 avr_asm_len ("ldi %0,0x80", op, plen, 1);
6544 if (n_bytes > 1 && need_copy)
6545 avr_asm_len ("clr %1", op, plen, 1);
6546 }
6547 else if ((sign > 0) ^ (SS_MINUS == code_sat))
6548 {
6549 /* [1s,const,B > 0] [2s,B > 0] */
6550 /* [3s,const,B < 0] [4s,B < 0] */
6551
6552 if (n_bytes == 8)
6553 {
6554 avr_asm_len ("sec" CR_TAB
6555 "%~call __sbc_8", op, plen, 1 + len_call);
6556 need_copy = false;
6557 }
6558
6559 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6560 if (n_bytes > 1 && need_copy)
6561 avr_asm_len ("ldi %1,0xff", op, plen, 1);
6562 }
6563 else
6564 gcc_unreachable();
6565
6566 break;
6567
6568 case US_PLUS:
6569 /* [1u] : [2u] */
6570
6571 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6572
6573 if (n_bytes == 8)
6574 {
6575 if (MINUS == code)
6576 avr_asm_len ("sec", op, plen, 1);
6577 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6578
6579 need_copy = false;
6580 }
6581 else
6582 {
6583 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6584 avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6585 else
6586 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6587 op, plen, 1);
6588 }
6589 break; /* US_PLUS */
6590
6591 case US_MINUS:
6592 /* [4u] : [3u] */
6593
6594 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6595
6596 if (n_bytes == 8)
6597 {
6598 avr_asm_len ("%~call __clr_8", op, plen, len_call);
6599 need_copy = false;
6600 }
6601 else
6602 avr_asm_len ("clr %0", op, plen, 1);
6603
6604 break;
6605 }
37bcc7b9 6606
b4ebb666 6607 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6608 Now copy the right value to the LSBs. */
bcad9375 6609
b4ebb666 6610 if (need_copy && n_bytes > 1)
bcad9375 6611 {
b4ebb666 6612 if (US_MINUS == code_sat || US_PLUS == code_sat)
6613 {
6614 avr_asm_len ("mov %1,%0", op, plen, 1);
6615
6616 if (n_bytes > 2)
6617 {
6618 op[0] = xop[0];
6619 if (AVR_HAVE_MOVW)
6620 avr_asm_len ("movw %0,%1", op, plen, 1);
6621 else
6622 avr_asm_len ("mov %A0,%1" CR_TAB
6623 "mov %B0,%1", op, plen, 2);
6624 }
6625 }
6626 else if (n_bytes > 2)
6627 {
6628 op[0] = xop[0];
6629 avr_asm_len ("mov %A0,%1" CR_TAB
6630 "mov %B0,%1", op, plen, 2);
6631 }
bcad9375 6632 }
37bcc7b9 6633
b4ebb666 6634 if (need_copy && n_bytes == 8)
6635 {
6636 if (AVR_HAVE_MOVW)
6637 avr_asm_len ("movw %r0+2,%0" CR_TAB
6638 "movw %r0+4,%0", xop, plen, 2);
6639 else
6640 avr_asm_len ("mov %r0+2,%0" CR_TAB
6641 "mov %r0+3,%0" CR_TAB
6642 "mov %r0+4,%0" CR_TAB
6643 "mov %r0+5,%0", xop, plen, 4);
6644 }
6645
6646 avr_asm_len ("0:", op, plen, 0);
37bcc7b9 6647}
6648
6649
b4ebb666 6650/* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6651 is ont a compile-time constant:
eac146f2 6652
b4ebb666 6653 XOP[0] = XOP[0] +/- XOP[2]
6654
6655 This is a helper for the function below. The only insns that need this
6656 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
6657
6658static const char*
6659avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
eac146f2 6660{
b4ebb666 6661 enum machine_mode mode = GET_MODE (xop[0]);
eac146f2 6662
b4ebb666 6663 /* Only pointer modes want to add symbols. */
6664
6665 gcc_assert (mode == HImode || mode == PSImode);
eac146f2 6666
b4ebb666 6667 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6668
6669 avr_asm_len (PLUS == code
6670 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6671 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
6672 xop, plen, -2);
6673
4e07e83e 6674 if (PSImode == mode)
b4ebb666 6675 avr_asm_len (PLUS == code
4e07e83e 6676 ? "sbci %C0,hlo8(-(%2))"
b4ebb666 6677 : "sbci %C0,hlo8(%2)", xop, plen, 1);
6678 return "";
eac146f2 6679}
6680
83921eda 6681
b4ebb666 6682/* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6683
6684 INSN is a single_set insn with a binary operation as SET_SRC that is
6685 one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6686
6687 XOP are the operands of INSN. In the case of 64-bit operations with
6688 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
6689 The non-saturating insns up to 32 bits may or may not supply a "d" class
6690 scratch as XOP[3].
6691
6692 If PLEN == NULL output the instructions.
6693 If PLEN != NULL set *PLEN to the length of the sequence in words.
017c5b98 6694
b4ebb666 6695 PCC is a pointer to store the instructions' effect on cc0.
6696 PCC may be NULL.
017c5b98 6697
b4ebb666 6698 PLEN and PCC default to NULL.
6699
6700 Return "" */
017c5b98 6701
6702const char*
b4ebb666 6703avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc)
017c5b98 6704{
b4ebb666 6705 int cc_plus, cc_minus, cc_dummy;
6706 int len_plus, len_minus;
017c5b98 6707 rtx op[4];
b4ebb666 6708 rtx xdest = SET_DEST (single_set (insn));
6709 enum machine_mode mode = GET_MODE (xdest);
6710 enum machine_mode imode = int_mode_for_mode (mode);
6711 int n_bytes = GET_MODE_SIZE (mode);
6712 enum rtx_code code_sat = GET_CODE (SET_SRC (single_set (insn)));
6713 enum rtx_code code
6714 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6715 ? PLUS : MINUS);
017c5b98 6716
b4ebb666 6717 if (!pcc)
6718 pcc = &cc_dummy;
017c5b98 6719
b4ebb666 6720 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
017c5b98 6721
b4ebb666 6722 if (PLUS == code_sat || MINUS == code_sat)
6723 code_sat = UNKNOWN;
017c5b98 6724
b4ebb666 6725 if (n_bytes <= 4 && REG_P (xop[2]))
6726 {
6727 avr_out_plus_1 (xop, plen, code, pcc, code_sat);
6728 return "";
6729 }
017c5b98 6730
b4ebb666 6731 if (8 == n_bytes)
6732 {
6733 op[0] = gen_rtx_REG (DImode, ACC_A);
6734 op[1] = gen_rtx_REG (DImode, ACC_A);
6735 op[2] = avr_to_int_mode (xop[0]);
6736 }
6737 else
6738 {
6739 if (!REG_P (xop[2])
6740 && !CONST_INT_P (xop[2])
6741 && !CONST_FIXED_P (xop[2]))
6742 {
6743 return avr_out_plus_symbol (xop, code, plen, pcc);
6744 }
6745
6746 op[0] = avr_to_int_mode (xop[0]);
6747 op[1] = avr_to_int_mode (xop[1]);
6748 op[2] = avr_to_int_mode (xop[2]);
6749 }
017c5b98 6750
b4ebb666 6751 /* Saturations and 64-bit operations don't have a clobber operand.
6752 For the other cases, the caller will provide a proper XOP[3]. */
6753
6754 op[3] = PARALLEL == GET_CODE (PATTERN (insn)) ? xop[3] : NULL_RTX;
83921eda 6755
b4ebb666 6756 /* Saturation will need the sign of the original operand. */
83921eda 6757
b4ebb666 6758 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6759 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
83921eda 6760
b4ebb666 6761 /* If we subtract and the subtrahend is a constant, then negate it
6762 so that avr_out_plus_1 can be used. */
83921eda 6763
b4ebb666 6764 if (MINUS == code)
6765 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
83921eda 6766
b4ebb666 6767 /* Work out the shortest sequence. */
017c5b98 6768
b4ebb666 6769 avr_out_plus_1 (op, &len_minus, MINUS, &cc_plus, code_sat, sign);
6770 avr_out_plus_1 (op, &len_plus, PLUS, &cc_minus, code_sat, sign);
017c5b98 6771
b4ebb666 6772 if (plen)
6773 {
6774 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6775 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6776 }
6777 else if (len_minus <= len_plus)
6778 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign);
6779 else
6780 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign);
017c5b98 6781
b4ebb666 6782 return "";
017c5b98 6783}
6784
6785
6be828c1 6786/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6787 time constant XOP[2]:
6788
6789 XOP[0] = XOP[0] <op> XOP[2]
6790
6791 and return "". If PLEN == NULL, print assembler instructions to perform the
6792 operation; otherwise, set *PLEN to the length of the instruction sequence
6793 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6794 register or SCRATCH if no clobber register is needed for the operation. */
6795
6796const char*
6797avr_out_bitop (rtx insn, rtx *xop, int *plen)
6798{
6799 /* CODE and MODE of the operation. */
6800 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6801 enum machine_mode mode = GET_MODE (xop[0]);
6802
6803 /* Number of bytes to operate on. */
6804 int i, n_bytes = GET_MODE_SIZE (mode);
6805
6806 /* Value of T-flag (0 or 1) or -1 if unknow. */
6807 int set_t = -1;
6808
6809 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6810 int clobber_val = -1;
6811
6812 /* op[0]: 8-bit destination register
6813 op[1]: 8-bit const int
6814 op[2]: 8-bit clobber register or SCRATCH
6815 op[3]: 8-bit register containing 0xff or NULL_RTX */
6816 rtx op[4];
6817
6818 op[2] = xop[3];
6819 op[3] = NULL_RTX;
6820
6821 if (plen)
6822 *plen = 0;
6823
6824 for (i = 0; i < n_bytes; i++)
6825 {
6826 /* We operate byte-wise on the destination. */
6827 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6828 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6829
6830 /* 8-bit value to operate with this byte. */
6831 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6832
6833 /* Number of bits set in the current byte of the constant. */
6834 int pop8 = avr_popcount (val8);
6835
6836 /* Registers R16..R31 can operate with immediate. */
6837 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6838
6839 op[0] = reg8;
6840 op[1] = GEN_INT (val8);
6841
6842 switch (code)
6843 {
6844 case IOR:
6845
6846 if (0 == pop8)
6847 continue;
6848 else if (ld_reg_p)
6849 avr_asm_len ("ori %0,%1", op, plen, 1);
6850 else if (1 == pop8)
6851 {
6852 if (set_t != 1)
6853 avr_asm_len ("set", op, plen, 1);
6854 set_t = 1;
6855
6856 op[1] = GEN_INT (exact_log2 (val8));
6857 avr_asm_len ("bld %0,%1", op, plen, 1);
6858 }
6859 else if (8 == pop8)
6860 {
6861 if (op[3] != NULL_RTX)
6862 avr_asm_len ("mov %0,%3", op, plen, 1);
6863 else
6864 avr_asm_len ("clr %0" CR_TAB
6865 "dec %0", op, plen, 2);
6866
6867 op[3] = op[0];
6868 }
6869 else
6870 {
6871 if (clobber_val != (int) val8)
6872 avr_asm_len ("ldi %2,%1", op, plen, 1);
6873 clobber_val = (int) val8;
6874
6875 avr_asm_len ("or %0,%2", op, plen, 1);
6876 }
6877
6878 continue; /* IOR */
6879
6880 case AND:
6881
6882 if (8 == pop8)
6883 continue;
6884 else if (0 == pop8)
6885 avr_asm_len ("clr %0", op, plen, 1);
6886 else if (ld_reg_p)
6887 avr_asm_len ("andi %0,%1", op, plen, 1);
6888 else if (7 == pop8)
6889 {
6890 if (set_t != 0)
6891 avr_asm_len ("clt", op, plen, 1);
6892 set_t = 0;
6893
6894 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6895 avr_asm_len ("bld %0,%1", op, plen, 1);
6896 }
6897 else
6898 {
6899 if (clobber_val != (int) val8)
6900 avr_asm_len ("ldi %2,%1", op, plen, 1);
6901 clobber_val = (int) val8;
6902
6903 avr_asm_len ("and %0,%2", op, plen, 1);
6904 }
6905
6906 continue; /* AND */
6907
6908 case XOR:
6909
6910 if (0 == pop8)
6911 continue;
6912 else if (8 == pop8)
6913 avr_asm_len ("com %0", op, plen, 1);
6914 else if (ld_reg_p && val8 == (1 << 7))
6915 avr_asm_len ("subi %0,%1", op, plen, 1);
6916 else
6917 {
6918 if (clobber_val != (int) val8)
6919 avr_asm_len ("ldi %2,%1", op, plen, 1);
6920 clobber_val = (int) val8;
6921
6922 avr_asm_len ("eor %0,%2", op, plen, 1);
6923 }
6924
6925 continue; /* XOR */
6926
6927 default:
6928 /* Unknown rtx_code */
6929 gcc_unreachable();
6930 }
6931 } /* for all sub-bytes */
6932
6933 return "";
6934}
6935
915f904b 6936
6937/* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6938 PLEN != NULL: Set *PLEN to the length of that sequence.
6939 Return "". */
6940
6941const char*
6942avr_out_addto_sp (rtx *op, int *plen)
6943{
6944 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6945 int addend = INTVAL (op[0]);
6946
6947 if (plen)
6948 *plen = 0;
6949
6950 if (addend < 0)
6951 {
6952 if (flag_verbose_asm || flag_print_asm_name)
6953 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6954
6955 while (addend <= -pc_len)
6956 {
6957 addend += pc_len;
6958 avr_asm_len ("rcall .", op, plen, 1);
6959 }
6960
6961 while (addend++ < 0)
6962 avr_asm_len ("push __zero_reg__", op, plen, 1);
6963 }
6964 else if (addend > 0)
6965 {
6966 if (flag_verbose_asm || flag_print_asm_name)
6967 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6968
6969 while (addend-- > 0)
6970 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6971 }
6972
6973 return "";
6974}
6975
6976
c8ec4eb6 6977/* Outputs instructions needed for fixed point type conversion.
6978 This includes converting between any fixed point type, as well
6979 as converting to any integer type. Conversion between integer
6980 types is not supported.
6981
6982 Converting signed fractional types requires a bit shift if converting
6983 to or from any unsigned fractional type because the decimal place is
6984 shifted by 1 bit. When the destination is a signed fractional, the sign
6985 is stored in either the carry or T bit. */
6986
6987const char*
6988avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
6989{
6990 size_t i;
6991 rtx xop[6];
6992 RTX_CODE shift = UNKNOWN;
6993 bool sign_in_carry = false;
6994 bool msb_in_carry = false;
6995 bool lsb_in_carry = false;
6996 const char *code_ashift = "lsl %0";
6997
6998
6999#define MAY_CLOBBER(RR) \
7000 /* Shorthand used below. */ \
7001 ((sign_bytes \
7002 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
7003 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7004 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7005
7006 struct
7007 {
7008 /* bytes : Length of operand in bytes.
7009 ibyte : Length of integral part in bytes.
7010 fbyte, fbit : Length of fractional part in bytes, bits. */
7011
7012 bool sbit;
7013 unsigned fbit, bytes, ibyte, fbyte;
7014 unsigned regno, regno_msb;
7015 } dest, src, *val[2] = { &dest, &src };
7016
7017 if (plen)
7018 *plen = 0;
7019
7020 /* Step 0: Determine information on source and destination operand we
7021 ====== will need in the remainder. */
7022
7023 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7024 {
7025 enum machine_mode mode;
7026
7027 xop[i] = operands[i];
7028
7029 mode = GET_MODE (xop[i]);
7030
7031 val[i]->bytes = GET_MODE_SIZE (mode);
7032 val[i]->regno = REGNO (xop[i]);
7033 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7034
7035 if (SCALAR_INT_MODE_P (mode))
7036 {
7037 val[i]->sbit = intsigned;
7038 val[i]->fbit = 0;
7039 }
7040 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7041 {
7042 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7043 val[i]->fbit = GET_MODE_FBIT (mode);
7044 }
7045 else
7046 fatal_insn ("unsupported fixed-point conversion", insn);
7047
7048 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7049 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7050 }
7051
7052 // Byte offset of the decimal point taking into account different place
7053 // of the decimal point in input and output and different register numbers
7054 // of input and output.
7055 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7056
7057 // Number of destination bytes that will come from sign / zero extension.
7058 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7059
7060 // Number of bytes at the low end to be filled with zeros.
7061 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7062
7063 // Do we have a 16-Bit register that is cleared?
7064 rtx clrw = NULL_RTX;
7065
7066 bool sign_extend = src.sbit && sign_bytes;
7067
7068 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7069 shift = ASHIFT;
7070 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7071 shift = ASHIFTRT;
7072 else if (dest.fbit % 8 == src.fbit % 8)
7073 shift = UNKNOWN;
7074 else
7075 gcc_unreachable();
7076
7077 /* Step 1: Clear bytes at the low end and copy payload bits from source
7078 ====== to destination. */
7079
7080 int step = offset < 0 ? 1 : -1;
7081 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7082
7083 // We leared at least that number of registers.
7084 int clr_n = 0;
7085
7086 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7087 {
7088 // Next regno of destination is needed for MOVW
7089 unsigned d1 = d0 + step;
7090
7091 // Current and next regno of source
7092 unsigned s0 = d0 - offset;
7093 unsigned s1 = s0 + step;
7094
7095 // Must current resp. next regno be CLRed? This applies to the low
7096 // bytes of the destination that have no associated source bytes.
7097 bool clr0 = s0 < src.regno;
7098 bool clr1 = s1 < src.regno && d1 >= dest.regno;
7099
7100 // First gather what code to emit (if any) and additional step to
7101 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
7102 // is the source rtx for the current loop iteration.
7103 const char *code = NULL;
7104 int stepw = 0;
7105
7106 if (clr0)
7107 {
7108 if (AVR_HAVE_MOVW && clr1 && clrw)
7109 {
7110 xop[2] = all_regs_rtx[d0 & ~1];
7111 xop[3] = clrw;
7112 code = "movw %2,%3";
7113 stepw = step;
7114 }
7115 else
7116 {
7117 xop[2] = all_regs_rtx[d0];
7118 code = "clr %2";
7119
7120 if (++clr_n >= 2
7121 && !clrw
7122 && d0 % 2 == (step > 0))
7123 {
7124 clrw = all_regs_rtx[d0 & ~1];
7125 }
7126 }
7127 }
7128 else if (offset && s0 <= src.regno_msb)
7129 {
7130 int movw = AVR_HAVE_MOVW && offset % 2 == 0
7131 && d0 % 2 == (offset > 0)
7132 && d1 <= dest.regno_msb && d1 >= dest.regno
7133 && s1 <= src.regno_msb && s1 >= src.regno;
7134
7135 xop[2] = all_regs_rtx[d0 & ~movw];
7136 xop[3] = all_regs_rtx[s0 & ~movw];
7137 code = movw ? "movw %2,%3" : "mov %2,%3";
7138 stepw = step * movw;
7139 }
7140
7141 if (code)
7142 {
7143 if (sign_extend && shift != ASHIFT && !sign_in_carry
7144 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7145 {
7146 /* We are going to override the sign bit. If we sign-extend,
7147 store the sign in the Carry flag. This is not needed if
7148 the destination will be ASHIFT is the remainder because
7149 the ASHIFT will set Carry without extra instruction. */
7150
7151 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7152 sign_in_carry = true;
7153 }
7154
7155 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7156
7157 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7158 && src.ibyte > dest.ibyte
7159 && (d0 == src_msb || d0 + stepw == src_msb))
7160 {
7161 /* We are going to override the MSB. If we shift right,
7162 store the MSB in the Carry flag. This is only needed if
7163 we don't sign-extend becaue with sign-extension the MSB
7164 (the sign) will be produced by the sign extension. */
7165
7166 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7167 msb_in_carry = true;
7168 }
7169
7170 unsigned src_lsb = dest.regno - offset -1;
7171
7172 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7173 && (d0 == src_lsb || d0 + stepw == src_lsb))
7174 {
7175 /* We are going to override the new LSB; store it into carry. */
7176
7177 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7178 code_ashift = "rol %0";
7179 lsb_in_carry = true;
7180 }
7181
7182 avr_asm_len (code, xop, plen, 1);
7183 d0 += stepw;
7184 }
7185 }
7186
7187 /* Step 2: Shift destination left by 1 bit position. This might be needed
7188 ====== for signed input and unsigned output. */
7189
7190 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7191 {
7192 unsigned s0 = dest.regno - offset -1;
7193
7194 if (MAY_CLOBBER (s0))
7195 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7196 else
7197 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7198 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7199
7200 code_ashift = "rol %0";
7201 lsb_in_carry = true;
7202 }
7203
7204 if (shift == ASHIFT)
7205 {
7206 for (d0 = dest.regno + zero_bytes;
7207 d0 <= dest.regno_msb - sign_bytes; d0++)
7208 {
7209 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7210 code_ashift = "rol %0";
7211 }
7212
7213 lsb_in_carry = false;
7214 sign_in_carry = true;
7215 }
7216
7217 /* Step 4a: Store MSB in carry if we don't already have it or will produce
7218 ======= it in sign-extension below. */
7219
7220 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7221 && src.ibyte > dest.ibyte)
7222 {
7223 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7224
7225 if (MAY_CLOBBER (s0))
7226 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7227 else
7228 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7229 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7230
7231 msb_in_carry = true;
7232 }
7233
7234 /* Step 3: Sign-extend or zero-extend the destination as needed.
7235 ====== */
7236
7237 if (sign_extend && !sign_in_carry)
7238 {
7239 unsigned s0 = src.regno_msb;
7240
7241 if (MAY_CLOBBER (s0))
7242 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7243 else
7244 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7245 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7246
7247 sign_in_carry = true;
7248 }
7249
7250 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7251
7252 unsigned copies = 0;
7253 rtx movw = sign_extend ? NULL_RTX : clrw;
7254
7255 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7256 {
7257 if (AVR_HAVE_MOVW && movw
7258 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7259 {
7260 xop[2] = all_regs_rtx[d0];
7261 xop[3] = movw;
7262 avr_asm_len ("movw %2,%3", xop, plen, 1);
7263 d0++;
7264 }
7265 else
7266 {
7267 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7268 &all_regs_rtx[d0], plen, 1);
7269
7270 if (++copies >= 2 && !movw && d0 % 2 == 1)
7271 movw = all_regs_rtx[d0-1];
7272 }
7273 } /* for */
7274
7275
7276 /* Step 4: Right shift the destination. This might be needed for
7277 ====== conversions from unsigned to signed. */
7278
7279 if (shift == ASHIFTRT)
7280 {
7281 const char *code_ashiftrt = "lsr %0";
7282
7283 if (sign_extend || msb_in_carry)
7284 code_ashiftrt = "ror %0";
7285
7286 if (src.sbit && src.ibyte == dest.ibyte)
7287 code_ashiftrt = "asr %0";
7288
7289 for (d0 = dest.regno_msb - sign_bytes;
7290 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7291 {
7292 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7293 code_ashiftrt = "ror %0";
7294 }
7295 }
7296
7297#undef MAY_CLOBBER
7298
7299 return "";
7300}
7301
7302
9643da7d 7303/* Create RTL split patterns for byte sized rotate expressions. This
7304 produces a series of move instructions and considers overlap situations.
7305 Overlapping non-HImode operands need a scratch register. */
7306
7307bool
7308avr_rotate_bytes (rtx operands[])
7309{
7310 int i, j;
7311 enum machine_mode mode = GET_MODE (operands[0]);
7312 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7313 bool same_reg = rtx_equal_p (operands[0], operands[1]);
7314 int num = INTVAL (operands[2]);
7315 rtx scratch = operands[3];
7316 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
7317 Word move if no scratch is needed, otherwise use size of scratch. */
7318 enum machine_mode move_mode = QImode;
36f949a2 7319 int move_size, offset, size;
7320
9643da7d 7321 if (num & 0xf)
7322 move_mode = QImode;
7323 else if ((mode == SImode && !same_reg) || !overlapped)
7324 move_mode = HImode;
7325 else
7326 move_mode = GET_MODE (scratch);
7327
7328 /* Force DI rotate to use QI moves since other DI moves are currently split
7329 into QI moves so forward propagation works better. */
7330 if (mode == DImode)
7331 move_mode = QImode;
7332 /* Make scratch smaller if needed. */
ac191360 7333 if (SCRATCH != GET_CODE (scratch)
7334 && HImode == GET_MODE (scratch)
7335 && QImode == move_mode)
9643da7d 7336 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7337
36f949a2 7338 move_size = GET_MODE_SIZE (move_mode);
9643da7d 7339 /* Number of bytes/words to rotate. */
36f949a2 7340 offset = (num >> 3) / move_size;
9643da7d 7341 /* Number of moves needed. */
36f949a2 7342 size = GET_MODE_SIZE (mode) / move_size;
9643da7d 7343 /* Himode byte swap is special case to avoid a scratch register. */
7344 if (mode == HImode && same_reg)
7345 {
7346 /* HImode byte swap, using xor. This is as quick as using scratch. */
7347 rtx src, dst;
7348 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7349 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7350 if (!rtx_equal_p (dst, src))
7351 {
7352 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7353 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7354 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7355 }
7356 }
7357 else
7358 {
36f949a2 7359#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9643da7d 7360 /* Create linked list of moves to determine move order. */
7361 struct {
7362 rtx src, dst;
7363 int links;
36f949a2 7364 } move[MAX_SIZE + 8];
7365 int blocked, moves;
9643da7d 7366
36f949a2 7367 gcc_assert (size <= MAX_SIZE);
9643da7d 7368 /* Generate list of subreg moves. */
7369 for (i = 0; i < size; i++)
7370 {
7371 int from = i;
7372 int to = (from + offset) % size;
7373 move[i].src = simplify_gen_subreg (move_mode, operands[1],
7374 mode, from * move_size);
7375 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7376 mode, to * move_size);
7377 move[i].links = -1;
7378 }
7379 /* Mark dependence where a dst of one move is the src of another move.
7380 The first move is a conflict as it must wait until second is
7381 performed. We ignore moves to self - we catch this later. */
7382 if (overlapped)
7383 for (i = 0; i < size; i++)
7384 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7385 for (j = 0; j < size; j++)
7386 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7387 {
7388 /* The dst of move i is the src of move j. */
7389 move[i].links = j;
7390 break;
7391 }
7392
36f949a2 7393 blocked = -1;
7394 moves = 0;
9643da7d 7395 /* Go through move list and perform non-conflicting moves. As each
7396 non-overlapping move is made, it may remove other conflicts
7397 so the process is repeated until no conflicts remain. */
7398 do
7399 {
7400 blocked = -1;
7401 moves = 0;
7402 /* Emit move where dst is not also a src or we have used that
7403 src already. */
7404 for (i = 0; i < size; i++)
7405 if (move[i].src != NULL_RTX)
36f949a2 7406 {
7407 if (move[i].links == -1
7408 || move[move[i].links].src == NULL_RTX)
7409 {
7410 moves++;
7411 /* Ignore NOP moves to self. */
7412 if (!rtx_equal_p (move[i].dst, move[i].src))
7413 emit_move_insn (move[i].dst, move[i].src);
9643da7d 7414
36f949a2 7415 /* Remove conflict from list. */
7416 move[i].src = NULL_RTX;
7417 }
7418 else
7419 blocked = i;
7420 }
9643da7d 7421
7422 /* Check for deadlock. This is when no moves occurred and we have
7423 at least one blocked move. */
7424 if (moves == 0 && blocked != -1)
7425 {
7426 /* Need to use scratch register to break deadlock.
7427 Add move to put dst of blocked move into scratch.
7428 When this move occurs, it will break chain deadlock.
7429 The scratch register is substituted for real move. */
7430
ac191360 7431 gcc_assert (SCRATCH != GET_CODE (scratch));
7432
9643da7d 7433 move[size].src = move[blocked].dst;
7434 move[size].dst = scratch;
7435 /* Scratch move is never blocked. */
7436 move[size].links = -1;
7437 /* Make sure we have valid link. */
7438 gcc_assert (move[blocked].links != -1);
7439 /* Replace src of blocking move with scratch reg. */
7440 move[move[blocked].links].src = scratch;
7441 /* Make dependent on scratch move occuring. */
7442 move[blocked].links = size;
7443 size=size+1;
7444 }
7445 }
7446 while (blocked != -1);
7447 }
7448 return true;
7449}
7450
017c5b98 7451
a28e4651 7452/* Modifies the length assigned to instruction INSN
b0e2b973 7453 LEN is the initially computed length of the insn. */
a28e4651 7454
7455int
206a5129 7456adjust_insn_length (rtx insn, int len)
a28e4651 7457{
5bca95a8 7458 rtx *op = recog_data.operand;
28913f6b 7459 enum attr_adjust_len adjust_len;
7460
7461 /* Some complex insns don't need length adjustment and therefore
7462 the length need not/must not be adjusted for these insns.
7463 It is easier to state this in an insn attribute "adjust_len" than
7464 to clutter up code here... */
7465
7466 if (-1 == recog_memoized (insn))
7467 {
7468 return len;
7469 }
7470
7471 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
7472
7473 adjust_len = get_attr_adjust_len (insn);
7474
5bca95a8 7475 if (adjust_len == ADJUST_LEN_NO)
28913f6b 7476 {
5bca95a8 7477 /* Nothing to adjust: The length from attribute "length" is fine.
7478 This is the default. */
28913f6b 7479
5bca95a8 7480 return len;
7481 }
7482
7483 /* Extract insn's operands. */
7484
7485 extract_constrain_insn_cached (insn);
7486
7487 /* Dispatch to right function. */
7488
7489 switch (adjust_len)
7490 {
7491 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
02d9a2c3 7492 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
5bca95a8 7493 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
28913f6b 7494
5bca95a8 7495 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
28913f6b 7496
b4ebb666 7497 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
915f904b 7498 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
28913f6b 7499
5bca95a8 7500 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
7501 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
02d9a2c3 7502 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
5bca95a8 7503 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5bd39e93 7504 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7505 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
4b72e680 7506 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
5bd39e93 7507
017c5b98 7508 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7509 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7510
5bca95a8 7511 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
02d9a2c3 7512 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
5bca95a8 7513 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7514 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
83921eda 7515 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9c501a04 7516
5bca95a8 7517 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7518 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7519 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9c501a04 7520
5bca95a8 7521 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7522 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7523 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9c501a04 7524
5bca95a8 7525 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7526 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7527 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7528
02d9a2c3 7529 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7530 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7531 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7532
48264eb8 7533 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7534
15b84087 7535 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
384f6361 7536
5bca95a8 7537 default:
7538 gcc_unreachable();
a28e4651 7539 }
15b84087 7540
a28e4651 7541 return len;
7542}
7543
674a8f0b 7544/* Return nonzero if register REG dead after INSN. */
a28e4651 7545
7546int
206a5129 7547reg_unused_after (rtx insn, rtx reg)
a28e4651 7548{
e511e253 7549 return (dead_or_set_p (insn, reg)
a28e4651 7550 || (REG_P(reg) && _reg_unused_after (insn, reg)));
7551}
7552
e3e08e7f 7553/* Return nonzero if REG is not used after INSN.
a28e4651 7554 We assume REG is a reload reg, and therefore does
7555 not live past labels. It may live past calls or jumps though. */
7556
7557int
206a5129 7558_reg_unused_after (rtx insn, rtx reg)
a28e4651 7559{
7560 enum rtx_code code;
7561 rtx set;
7562
7563 /* If the reg is set by this instruction, then it is safe for our
7564 case. Disregard the case where this is a store to memory, since
7565 we are checking a register used in the store address. */
7566 set = single_set (insn);
7567 if (set && GET_CODE (SET_DEST (set)) != MEM
7568 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7569 return 1;
7570
7571 while ((insn = NEXT_INSN (insn)))
7572 {
6720e96c 7573 rtx set;
a28e4651 7574 code = GET_CODE (insn);
7575
7576#if 0
7577 /* If this is a label that existed before reload, then the register
7578 if dead here. However, if this is a label added by reorg, then
7579 the register may still be live here. We can't tell the difference,
7580 so we just ignore labels completely. */
7581 if (code == CODE_LABEL)
7582 return 1;
7583 /* else */
7584#endif
7585
6720e96c 7586 if (!INSN_P (insn))
7587 continue;
7588
a28e4651 7589 if (code == JUMP_INSN)
7590 return 0;
7591
7592 /* If this is a sequence, we must handle them all at once.
7593 We could have for instance a call that sets the target register,
ebb11c7b 7594 and an insn in a delay slot that uses the register. In this case,
a28e4651 7595 we must return 0. */
7596 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7597 {
7598 int i;
7599 int retval = 0;
7600
7601 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7602 {
7603 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7604 rtx set = single_set (this_insn);
7605
7606 if (GET_CODE (this_insn) == CALL_INSN)
7607 code = CALL_INSN;
7608 else if (GET_CODE (this_insn) == JUMP_INSN)
7609 {
7610 if (INSN_ANNULLED_BRANCH_P (this_insn))
7611 return 0;
7612 code = JUMP_INSN;
7613 }
7614
7615 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7616 return 0;
7617 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7618 {
7619 if (GET_CODE (SET_DEST (set)) != MEM)
7620 retval = 1;
7621 else
7622 return 0;
7623 }
7624 if (set == 0
7625 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7626 return 0;
7627 }
7628 if (retval == 1)
7629 return 1;
7630 else if (code == JUMP_INSN)
7631 return 0;
7632 }
7633
7634 if (code == CALL_INSN)
7635 {
7636 rtx tem;
7637 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7638 if (GET_CODE (XEXP (tem, 0)) == USE
7639 && REG_P (XEXP (XEXP (tem, 0), 0))
7640 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7641 return 0;
7642 if (call_used_regs[REGNO (reg)])
7643 return 1;
7644 }
7645
6720e96c 7646 set = single_set (insn);
a28e4651 7647
6720e96c 7648 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7649 return 0;
7650 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7651 return GET_CODE (SET_DEST (set)) != MEM;
7652 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7653 return 0;
a28e4651 7654 }
7655 return 1;
7656}
7657
5bd39e93 7658
58356836 7659/* Target hook for assembling integer objects. The AVR version needs
7660 special handling for references to certain labels. */
a28e4651 7661
58356836 7662static bool
206a5129 7663avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
a28e4651 7664{
58356836 7665 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5a9cc803 7666 && text_segment_operand (x, VOIDmode))
a28e4651 7667 {
90ef7269 7668 fputs ("\t.word\tgs(", asm_out_file);
58356836 7669 output_addr_const (asm_out_file, x);
7670 fputs (")\n", asm_out_file);
5bd39e93 7671
7672 return true;
7673 }
7674 else if (GET_MODE (x) == PSImode)
7675 {
5a9cc803 7676 /* This needs binutils 2.23+, see PR binutils/13503 */
7677
7678 fputs ("\t.byte\tlo8(", asm_out_file);
7679 output_addr_const (asm_out_file, x);
7680 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
5bd39e93 7681
5a9cc803 7682 fputs ("\t.byte\thi8(", asm_out_file);
5bd39e93 7683 output_addr_const (asm_out_file, x);
5a9cc803 7684 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
5bd39e93 7685
5a9cc803 7686 fputs ("\t.byte\thh8(", asm_out_file);
5bd39e93 7687 output_addr_const (asm_out_file, x);
5a9cc803 7688 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
5bd39e93 7689
58356836 7690 return true;
a28e4651 7691 }
017c5b98 7692 else if (CONST_FIXED_P (x))
7693 {
7694 unsigned n;
7695
7696 /* varasm fails to handle big fixed modes that don't fit in hwi. */
7697
7698 for (n = 0; n < size; n++)
7699 {
7700 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
7701 default_assemble_integer (xn, 1, aligned_p);
7702 }
7703
7704 return true;
7705 }
5bd39e93 7706
58356836 7707 return default_assemble_integer (x, size, aligned_p);
a28e4651 7708}
7709
5bd39e93 7710
a28e4651 7711/* Return value is nonzero if pseudos that have been
7712 assigned to registers of class CLASS would likely be spilled
7713 because registers of CLASS are needed for spill registers. */
7714
cb3959cc 7715static bool
7716avr_class_likely_spilled_p (reg_class_t c)
a28e4651 7717{
7718 return (c != ALL_REGS && c != ADDW_REGS);
7719}
7720
e3c541f0 7721/* Valid attributes:
a28e4651 7722 progmem - put data to program memory;
7723 signal - make a function to be hardware interrupt. After function
0af74aa0 7724 prologue interrupts are disabled;
a28e4651 7725 interrupt - make a function to be hardware interrupt. After function
0af74aa0 7726 prologue interrupts are enabled;
e3c541f0 7727 naked - don't generate function prologue/epilogue and `ret' command.
a28e4651 7728
e3c541f0 7729 Only `progmem' attribute valid for type. */
7730
e3c541f0 7731/* Handle a "progmem" attribute; arguments as in
7732 struct attribute_spec.handler. */
7733static tree
206a5129 7734avr_handle_progmem_attribute (tree *node, tree name,
7735 tree args ATTRIBUTE_UNUSED,
7736 int flags ATTRIBUTE_UNUSED,
7737 bool *no_add_attrs)
e3c541f0 7738{
7739 if (DECL_P (*node))
a28e4651 7740 {
68e7ca0a 7741 if (TREE_CODE (*node) == TYPE_DECL)
7742 {
7743 /* This is really a decl attribute, not a type attribute,
7744 but try to handle it for GCC 3.0 backwards compatibility. */
7745
7746 tree type = TREE_TYPE (*node);
7747 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
7748 tree newtype = build_type_attribute_variant (type, attr);
7749
7750 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
7751 TREE_TYPE (*node) = newtype;
7752 *no_add_attrs = true;
7753 }
7754 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
a28e4651 7755 {
b44e24e6 7756 *no_add_attrs = false;
e3c541f0 7757 }
7758 else
7759 {
67a779df 7760 warning (OPT_Wattributes, "%qE attribute ignored",
7761 name);
e3c541f0 7762 *no_add_attrs = true;
a28e4651 7763 }
a28e4651 7764 }
e3c541f0 7765
7766 return NULL_TREE;
a28e4651 7767}
7768
e3c541f0 7769/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7770 struct attribute_spec.handler. */
206a5129 7771
e3c541f0 7772static tree
206a5129 7773avr_handle_fndecl_attribute (tree *node, tree name,
7774 tree args ATTRIBUTE_UNUSED,
7775 int flags ATTRIBUTE_UNUSED,
7776 bool *no_add_attrs)
e3c541f0 7777{
7778 if (TREE_CODE (*node) != FUNCTION_DECL)
7779 {
67a779df 7780 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7781 name);
e3c541f0 7782 *no_add_attrs = true;
7783 }
7784
7785 return NULL_TREE;
7786}
a28e4651 7787
f86b386b 7788static tree
7789avr_handle_fntype_attribute (tree *node, tree name,
7790 tree args ATTRIBUTE_UNUSED,
7791 int flags ATTRIBUTE_UNUSED,
7792 bool *no_add_attrs)
7793{
7794 if (TREE_CODE (*node) != FUNCTION_TYPE)
7795 {
67a779df 7796 warning (OPT_Wattributes, "%qE attribute only applies to functions",
7797 name);
f86b386b 7798 *no_add_attrs = true;
7799 }
7800
7801 return NULL_TREE;
7802}
7803
a45076aa 7804
7805/* AVR attributes. */
7806static const struct attribute_spec
7807avr_attribute_table[] =
7808{
7809 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7810 affects_type_identity } */
7811 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
7812 false },
7813 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7814 false },
7815 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
7816 false },
7817 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
7818 false },
7819 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
7820 false },
7821 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
7822 false },
7823 { NULL, 0, 0, false, false, false, NULL, false }
7824};
7825
4202ef11 7826
7827/* Look if DECL shall be placed in program memory space by
7828 means of attribute `progmem' or some address-space qualifier.
7829 Return non-zero if DECL is data that must end up in Flash and
7830 zero if the data lives in RAM (.bss, .data, .rodata, ...).
7831
5bd39e93 7832 Return 2 if DECL is located in 24-bit flash address-space
4202ef11 7833 Return 1 if DECL is located in 16-bit flash address-space
7834 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
7835 Return 0 otherwise */
a28e4651 7836
7837int
148b2ce0 7838avr_progmem_p (tree decl, tree attributes)
a28e4651 7839{
7840 tree a;
7841
7842 if (TREE_CODE (decl) != VAR_DECL)
7843 return 0;
7844
590da9f2 7845 if (avr_decl_memx_p (decl))
5bd39e93 7846 return 2;
7847
590da9f2 7848 if (avr_decl_flash_p (decl))
4202ef11 7849 return 1;
7850
a28e4651 7851 if (NULL_TREE
148b2ce0 7852 != lookup_attribute ("progmem", attributes))
4202ef11 7853 return -1;
a28e4651 7854
4202ef11 7855 a = decl;
7856
a28e4651 7857 do
7858 a = TREE_TYPE(a);
7859 while (TREE_CODE (a) == ARRAY_TYPE);
7860
faf8f400 7861 if (a == error_mark_node)
7862 return 0;
7863
a28e4651 7864 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4202ef11 7865 return -1;
a28e4651 7866
7867 return 0;
7868}
7869
4202ef11 7870
7871/* Scan type TYP for pointer references to address space ASn.
7872 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7873 the AS are also declared to be CONST.
9d75589a 7874 Otherwise, return the respective address space, i.e. a value != 0. */
4202ef11 7875
7876static addr_space_t
7877avr_nonconst_pointer_addrspace (tree typ)
7878{
7879 while (ARRAY_TYPE == TREE_CODE (typ))
7880 typ = TREE_TYPE (typ);
7881
7882 if (POINTER_TYPE_P (typ))
7883 {
e508bf98 7884 addr_space_t as;
4202ef11 7885 tree target = TREE_TYPE (typ);
7886
7887 /* Pointer to function: Test the function's return type. */
7888
7889 if (FUNCTION_TYPE == TREE_CODE (target))
7890 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
7891
7892 /* "Ordinary" pointers... */
7893
7894 while (TREE_CODE (target) == ARRAY_TYPE)
7895 target = TREE_TYPE (target);
7896
e508bf98 7897 /* Pointers to non-generic address space must be const.
7898 Refuse address spaces outside the device's flash. */
4202ef11 7899
e508bf98 7900 as = TYPE_ADDR_SPACE (target);
7901
7902 if (!ADDR_SPACE_GENERIC_P (as)
7903 && (!TYPE_READONLY (target)
7904 || avr_addrspace[as].segment >= avr_current_device->n_flash))
7905 {
7906 return as;
4202ef11 7907 }
7908
7909 /* Scan pointer's target type. */
7910
7911 return avr_nonconst_pointer_addrspace (target);
7912 }
7913
7914 return ADDR_SPACE_GENERIC;
7915}
7916
7917
9d75589a 7918/* Sanity check NODE so that all pointers targeting non-generic address spaces
590da9f2 7919 go along with CONST qualifier. Writing to these address spaces should
4202ef11 7920 be detected and complained about as early as possible. */
7921
7922static bool
7923avr_pgm_check_var_decl (tree node)
7924{
7925 const char *reason = NULL;
7926
7927 addr_space_t as = ADDR_SPACE_GENERIC;
7928
7929 gcc_assert (as == 0);
7930
7931 if (avr_log.progmem)
7932 avr_edump ("%?: %t\n", node);
7933
7934 switch (TREE_CODE (node))
7935 {
7936 default:
7937 break;
7938
7939 case VAR_DECL:
7940 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7941 reason = "variable";
7942 break;
7943
7944 case PARM_DECL:
7945 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7946 reason = "function parameter";
7947 break;
7948
7949 case FIELD_DECL:
7950 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7951 reason = "structure field";
7952 break;
7953
7954 case FUNCTION_DECL:
7955 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7956 as)
7957 reason = "return type of function";
7958 break;
7959
7960 case POINTER_TYPE:
7961 if (as = avr_nonconst_pointer_addrspace (node), as)
7962 reason = "pointer";
7963 break;
7964 }
7965
7966 if (reason)
7967 {
e508bf98 7968 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7969 {
7970 if (TYPE_P (node))
7971 error ("%qT uses address space %qs beyond flash of %qs",
7972 node, avr_addrspace[as].name, avr_current_device->name);
7973 else
7974 error ("%s %q+D uses address space %qs beyond flash of %qs",
7975 reason, node, avr_addrspace[as].name,
7976 avr_current_device->name);
7977 }
4202ef11 7978 else
e508bf98 7979 {
7980 if (TYPE_P (node))
7981 error ("pointer targeting address space %qs must be const in %qT",
7982 avr_addrspace[as].name, node);
7983 else
7984 error ("pointer targeting address space %qs must be const"
7985 " in %s %q+D",
7986 avr_addrspace[as].name, reason, node);
7987 }
4202ef11 7988 }
7989
7990 return reason == NULL;
7991}
7992
7993
a179a371 7994/* Add the section attribute if the variable is in progmem. */
7995
7811991d 7996static void
206a5129 7997avr_insert_attributes (tree node, tree *attributes)
a28e4651 7998{
4202ef11 7999 avr_pgm_check_var_decl (node);
8000
a179a371 8001 if (TREE_CODE (node) == VAR_DECL
8002 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
148b2ce0 8003 && avr_progmem_p (node, *attributes))
a28e4651 8004 {
e508bf98 8005 addr_space_t as;
c47985a0 8006 tree node0 = node;
8007
8008 /* For C++, we have to peel arrays in order to get correct
8009 determination of readonlyness. */
8010
8011 do
8012 node0 = TREE_TYPE (node0);
8013 while (TREE_CODE (node0) == ARRAY_TYPE);
8014
8015 if (error_mark_node == node0)
8016 return;
e508bf98 8017
8018 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8019
8020 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8021 {
8022 error ("variable %q+D located in address space %qs"
8023 " beyond flash of %qs",
8024 node, avr_addrspace[as].name, avr_current_device->name);
8025 }
c47985a0 8026
4202ef11 8027 if (!TYPE_READONLY (node0)
8028 && !TREE_READONLY (node))
cb7c66a8 8029 {
4202ef11 8030 const char *reason = "__attribute__((progmem))";
8031
8032 if (!ADDR_SPACE_GENERIC_P (as))
9d734fa8 8033 reason = avr_addrspace[as].name;
4202ef11 8034
8035 if (avr_log.progmem)
8036 avr_edump ("\n%?: %t\n%t\n", node, node0);
8037
cb7c66a8 8038 error ("variable %q+D must be const in order to be put into"
4202ef11 8039 " read-only section by means of %qs", node, reason);
cb7c66a8 8040 }
a28e4651 8041 }
7b4a38a6 8042}
a28e4651 8043
7c2339f8 8044
8045/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
8046/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
8047/* Track need of __do_clear_bss. */
8048
8049void
a45076aa 8050avr_asm_output_aligned_decl_common (FILE * stream,
8051 const_tree decl ATTRIBUTE_UNUSED,
8052 const char *name,
8053 unsigned HOST_WIDE_INT size,
7c2339f8 8054 unsigned int align, bool local_p)
8055{
5be63f82 8056 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8057 There is no need to trigger __do_clear_bss code for them. */
8058
8059 if (!STR_PREFIX_P (name, "__gnu_lto"))
8060 avr_need_clear_bss_p = true;
7c2339f8 8061
8062 if (local_p)
21440ca3 8063 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8064 else
8065 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7c2339f8 8066}
8067
8068
8069/* Unnamed section callback for data_section
8070 to track need of __do_copy_data. */
8071
8072static void
8073avr_output_data_section_asm_op (const void *data)
8074{
8075 avr_need_copy_data_p = true;
8076
8077 /* Dispatch to default. */
8078 output_section_asm_op (data);
8079}
8080
8081
8082/* Unnamed section callback for bss_section
8083 to track need of __do_clear_bss. */
8084
8085static void
8086avr_output_bss_section_asm_op (const void *data)
8087{
8088 avr_need_clear_bss_p = true;
8089
8090 /* Dispatch to default. */
8091 output_section_asm_op (data);
8092}
8093
8094
5bd39e93 8095/* Unnamed section callback for progmem*.data sections. */
8096
8097static void
8098avr_output_progmem_section_asm_op (const void *data)
8099{
8100 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8101 (const char*) data);
8102}
8103
8104
7c2339f8 8105/* Implement `TARGET_ASM_INIT_SECTIONS'. */
2f14b1f9 8106
8107static void
8108avr_asm_init_sections (void)
8109{
5bd39e93 8110 unsigned int n;
8111
c3f18f18 8112 /* Set up a section for jump tables. Alignment is handled by
8113 ASM_OUTPUT_BEFORE_CASE_LABEL. */
8114
8115 if (AVR_HAVE_JMP_CALL)
8116 {
8117 progmem_swtable_section
8118 = get_unnamed_section (0, output_section_asm_op,
8119 "\t.section\t.progmem.gcc_sw_table"
8120 ",\"a\",@progbits");
8121 }
8122 else
8123 {
8124 progmem_swtable_section
8125 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8126 "\t.section\t.progmem.gcc_sw_table"
8127 ",\"ax\",@progbits");
8128 }
7c2339f8 8129
5bd39e93 8130 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
8131 {
8132 progmem_section[n]
8133 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8134 progmem_section_prefix[n]);
8135 }
bf412f98 8136
853000f2 8137 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8138 resp. `avr_need_copy_data_p'. */
8139
8140 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7c2339f8 8141 data_section->unnamed.callback = avr_output_data_section_asm_op;
8142 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8143}
8144
8145
c3f18f18 8146/* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
8147
8148static section*
8149avr_asm_function_rodata_section (tree decl)
8150{
8151 /* If a function is unused and optimized out by -ffunction-sections
8152 and --gc-sections, ensure that the same will happen for its jump
8153 tables by putting them into individual sections. */
8154
8155 unsigned int flags;
8156 section * frodata;
8157
8158 /* Get the frodata section from the default function in varasm.c
8159 but treat function-associated data-like jump tables as code
8160 rather than as user defined data. AVR has no constant pools. */
8161 {
8162 int fdata = flag_data_sections;
8163
8164 flag_data_sections = flag_function_sections;
8165 frodata = default_function_rodata_section (decl);
8166 flag_data_sections = fdata;
8167 flags = frodata->common.flags;
8168 }
8169
8170 if (frodata != readonly_data_section
8171 && flags & SECTION_NAMED)
8172 {
8173 /* Adjust section flags and replace section name prefix. */
8174
8175 unsigned int i;
8176
8177 static const char* const prefix[] =
8178 {
8179 ".rodata", ".progmem.gcc_sw_table",
8180 ".gnu.linkonce.r.", ".gnu.linkonce.t."
8181 };
8182
8183 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8184 {
8185 const char * old_prefix = prefix[i];
8186 const char * new_prefix = prefix[i+1];
8187 const char * name = frodata->named.name;
8188
8189 if (STR_PREFIX_P (name, old_prefix))
8190 {
1b6c82cc 8191 const char *rname = ACONCAT ((new_prefix,
8192 name + strlen (old_prefix), NULL));
c3f18f18 8193 flags &= ~SECTION_CODE;
8194 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8195
8196 return get_section (rname, flags, frodata->named.decl);
8197 }
8198 }
8199 }
8200
8201 return progmem_swtable_section;
8202}
8203
8204
7c2339f8 8205/* Implement `TARGET_ASM_NAMED_SECTION'. */
8206/* Track need of __do_clear_bss, __do_copy_data for named sections. */
8207
853000f2 8208static void
7c2339f8 8209avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8210{
bf412f98 8211 if (flags & AVR_SECTION_PROGMEM)
8212 {
9d734fa8 8213 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
e508bf98 8214 int segment = avr_addrspace[as].segment;
bf412f98 8215 const char *old_prefix = ".rodata";
5bd39e93 8216 const char *new_prefix = progmem_section_prefix[segment];
bf412f98 8217
8218 if (STR_PREFIX_P (name, old_prefix))
8219 {
1b6c82cc 8220 const char *sname = ACONCAT ((new_prefix,
8221 name + strlen (old_prefix), NULL));
8222 default_elf_asm_named_section (sname, flags, decl);
8223 return;
bf412f98 8224 }
8225
1b6c82cc 8226 default_elf_asm_named_section (new_prefix, flags, decl);
bf412f98 8227 return;
8228 }
8229
7c2339f8 8230 if (!avr_need_copy_data_p)
53026b2c 8231 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8232 || STR_PREFIX_P (name, ".rodata")
8233 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7c2339f8 8234
8235 if (!avr_need_clear_bss_p)
53026b2c 8236 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7c2339f8 8237
8238 default_elf_asm_named_section (name, flags, decl);
2f14b1f9 8239}
8240
8a46ca38 8241static unsigned int
206a5129 8242avr_section_type_flags (tree decl, const char *name, int reloc)
8a46ca38 8243{
8244 unsigned int flags = default_section_type_flags (decl, name, reloc);
8245
53026b2c 8246 if (STR_PREFIX_P (name, ".noinit"))
8a46ca38 8247 {
8248 if (decl && TREE_CODE (decl) == VAR_DECL
8249 && DECL_INITIAL (decl) == NULL_TREE)
8250 flags |= SECTION_BSS; /* @nobits */
8251 else
c3ceba8e 8252 warning (0, "only uninitialized variables can be placed in the "
8a46ca38 8253 ".noinit section");
8254 }
8255
bf412f98 8256 if (decl && DECL_P (decl)
9d734fa8 8257 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 8258 {
9d734fa8 8259 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8260
8261 /* Attribute progmem puts data in generic address space.
590da9f2 8262 Set section flags as if it was in __flash to get the right
9d734fa8 8263 section prefix in the remainder. */
5bd39e93 8264
9d734fa8 8265 if (ADDR_SPACE_GENERIC_P (as))
590da9f2 8266 as = ADDR_SPACE_FLASH;
5bd39e93 8267
9d734fa8 8268 flags |= as * SECTION_MACH_DEP;
bf412f98 8269 flags &= ~SECTION_WRITE;
4202ef11 8270 flags &= ~SECTION_BSS;
bf412f98 8271 }
b44e24e6 8272
8a46ca38 8273 return flags;
8274}
8275
7c2339f8 8276
b44e24e6 8277/* Implement `TARGET_ENCODE_SECTION_INFO'. */
8278
8279static void
9d734fa8 8280avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
b44e24e6 8281{
8282 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8283 readily available, see PR34734. So we postpone the warning
8284 about uninitialized data in program memory section until here. */
8285
8286 if (new_decl_p
8287 && decl && DECL_P (decl)
8288 && NULL_TREE == DECL_INITIAL (decl)
07b2ccdc 8289 && !DECL_EXTERNAL (decl)
b44e24e6 8290 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8291 {
8292 warning (OPT_Wuninitialized,
8293 "uninitialized variable %q+D put into "
8294 "program memory area", decl);
8295 }
66824cc3 8296
8297 default_encode_section_info (decl, rtl, new_decl_p);
ed2541ea 8298
8299 if (decl && DECL_P (decl)
8300 && TREE_CODE (decl) != FUNCTION_DECL
8301 && MEM_P (rtl)
8302 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8303 {
8304 rtx sym = XEXP (rtl, 0);
8305 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8306
8307 /* PSTR strings are in generic space but located in flash:
8308 patch address space. */
8309
8310 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
590da9f2 8311 as = ADDR_SPACE_FLASH;
ed2541ea 8312
8313 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8314 }
b44e24e6 8315}
8316
8317
bf412f98 8318/* Implement `TARGET_ASM_SELECT_SECTION' */
8319
8320static section *
8321avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8322{
8323 section * sect = default_elf_select_section (decl, reloc, align);
8324
8325 if (decl && DECL_P (decl)
9d734fa8 8326 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 8327 {
9d734fa8 8328 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
e508bf98 8329 int segment = avr_addrspace[as].segment;
5bd39e93 8330
bf412f98 8331 if (sect->common.flags & SECTION_NAMED)
8332 {
8333 const char * name = sect->named.name;
8334 const char * old_prefix = ".rodata";
5bd39e93 8335 const char * new_prefix = progmem_section_prefix[segment];
bf412f98 8336
8337 if (STR_PREFIX_P (name, old_prefix))
8338 {
1b6c82cc 8339 const char *sname = ACONCAT ((new_prefix,
8340 name + strlen (old_prefix), NULL));
bf412f98 8341 return get_section (sname, sect->common.flags, sect->named.decl);
8342 }
8343 }
8344
5bd39e93 8345 return progmem_section[segment];
bf412f98 8346 }
8347
8348 return sect;
8349}
8350
7c2339f8 8351/* Implement `TARGET_ASM_FILE_START'. */
5bd39e93 8352/* Outputs some text at the start of each assembler file. */
a28e4651 8353
92c473b8 8354static void
206a5129 8355avr_file_start (void)
a28e4651 8356{
644ac9c5 8357 int sfr_offset = avr_current_arch->sfr_offset;
5bd39e93 8358
b2afd900 8359 if (avr_current_arch->asm_only)
c1535dd2 8360 error ("MCU %qs supported for assembler only", avr_current_device->name);
235d7594 8361
92c473b8 8362 default_file_start ();
8363
72851b68 8364 /* Print I/O addresses of some SFRs used with IN and OUT. */
8365
d32d7e3a 8366 if (AVR_HAVE_SPH)
72851b68 8367 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8c8193e1 8368
72851b68 8369 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8370 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
0b6cf66f 8371 if (AVR_HAVE_RAMPZ)
8372 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8373 if (AVR_HAVE_RAMPY)
8374 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8375 if (AVR_HAVE_RAMPX)
8376 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8377 if (AVR_HAVE_RAMPD)
8378 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8379 if (AVR_XMEGA)
8380 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
72851b68 8381 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8382 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
a28e4651 8383}
8384
7c2339f8 8385
8386/* Implement `TARGET_ASM_FILE_END'. */
a28e4651 8387/* Outputs to the stdio stream FILE some
8388 appropriate text to go at the end of an assembler file. */
8389
f6940372 8390static void
206a5129 8391avr_file_end (void)
a28e4651 8392{
7c2339f8 8393 /* Output these only if there is anything in the
8394 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
8395 input section(s) - some code size can be saved by not
8396 linking in the initialization code from libgcc if resp.
8397 sections are empty. */
8398
8399 if (avr_need_copy_data_p)
8400 fputs (".global __do_copy_data\n", asm_out_file);
8401
8402 if (avr_need_clear_bss_p)
8403 fputs (".global __do_clear_bss\n", asm_out_file);
a28e4651 8404}
8405
8406/* Choose the order in which to allocate hard registers for
8407 pseudo-registers local to a basic block.
8408
8409 Store the desired register order in the array `reg_alloc_order'.
8410 Element 0 should be the register to allocate first; element 1, the
8411 next register; and so on. */
8412
8413void
206a5129 8414order_regs_for_local_alloc (void)
a28e4651 8415{
8416 unsigned int i;
e99c3a1d 8417 static const int order_0[] = {
a28e4651 8418 24,25,
8419 18,19,
8420 20,21,
8421 22,23,
8422 30,31,
8423 26,27,
8424 28,29,
8425 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8426 0,1,
8427 32,33,34,35
8428 };
e99c3a1d 8429 static const int order_1[] = {
a28e4651 8430 18,19,
8431 20,21,
8432 22,23,
8433 24,25,
8434 30,31,
8435 26,27,
8436 28,29,
8437 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8438 0,1,
8439 32,33,34,35
8440 };
e99c3a1d 8441 static const int order_2[] = {
a28e4651 8442 25,24,
8443 23,22,
8444 21,20,
8445 19,18,
8446 30,31,
8447 26,27,
8448 28,29,
8449 17,16,
8450 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
8451 1,0,
8452 32,33,34,35
8453 };
8454
e99c3a1d 8455 const int *order = (TARGET_ORDER_1 ? order_1 :
8456 TARGET_ORDER_2 ? order_2 :
8457 order_0);
5c62f199 8458 for (i=0; i < ARRAY_SIZE (order_0); ++i)
a28e4651 8459 reg_alloc_order[i] = order[i];
8460}
8461
433a5f02 8462
dfc1e3e4 8463/* Implement `TARGET_REGISTER_MOVE_COST' */
8464
8465static int
8466avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8467 reg_class_t from, reg_class_t to)
8468{
8469 return (from == STACK_REG ? 6
8470 : to == STACK_REG ? 12
8471 : 2);
8472}
8473
8474
8475/* Implement `TARGET_MEMORY_MOVE_COST' */
8476
8477static int
a45076aa 8478avr_memory_move_cost (enum machine_mode mode,
8479 reg_class_t rclass ATTRIBUTE_UNUSED,
dfc1e3e4 8480 bool in ATTRIBUTE_UNUSED)
8481{
8482 return (mode == QImode ? 2
8483 : mode == HImode ? 4
8484 : mode == SImode ? 8
8485 : mode == SFmode ? 8
8486 : 16);
8487}
8488
8489
433a5f02 8490/* Mutually recursive subroutine of avr_rtx_cost for calculating the
8491 cost of an RTX operand given its context. X is the rtx of the
8492 operand, MODE is its mode, and OUTER is the rtx_code of this
8493 operand's parent operator. */
a28e4651 8494
fab7adbf 8495static int
f529eb25 8496avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
20d892d1 8497 int opno, bool speed)
a28e4651 8498{
433a5f02 8499 enum rtx_code code = GET_CODE (x);
8500 int total;
8501
a28e4651 8502 switch (code)
8503 {
433a5f02 8504 case REG:
8505 case SUBREG:
8506 return 0;
8507
a28e4651 8508 case CONST_INT:
017c5b98 8509 case CONST_FIXED:
433a5f02 8510 case CONST_DOUBLE:
8511 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8512
a28e4651 8513 default:
8514 break;
8515 }
433a5f02 8516
8517 total = 0;
20d892d1 8518 avr_rtx_costs (x, code, outer, opno, &total, speed);
433a5f02 8519 return total;
a28e4651 8520}
8521
ae86bb47 8522/* Worker function for AVR backend's rtx_cost function.
8523 X is rtx expression whose cost is to be calculated.
8524 Return true if the complete cost has been computed.
8525 Return false if subexpressions should be scanned.
8526 In either case, *TOTAL contains the cost result. */
433a5f02 8527
fab7adbf 8528static bool
ae86bb47 8529avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8530 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
fab7adbf 8531{
ef51d1e3 8532 enum rtx_code code = (enum rtx_code) codearg;
433a5f02 8533 enum machine_mode mode = GET_MODE (x);
8534 HOST_WIDE_INT val;
fab7adbf 8535
8536 switch (code)
8537 {
8538 case CONST_INT:
017c5b98 8539 case CONST_FIXED:
433a5f02 8540 case CONST_DOUBLE:
9685fb69 8541 case SYMBOL_REF:
f9fb96f9 8542 case CONST:
8543 case LABEL_REF:
433a5f02 8544 /* Immediate constants are as cheap as registers. */
8545 *total = 0;
8546 return true;
8547
8548 case MEM:
433a5f02 8549 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8550 return true;
8551
8552 case NEG:
8553 switch (mode)
fab7adbf 8554 {
433a5f02 8555 case QImode:
8556 case SFmode:
8557 *total = COSTS_N_INSNS (1);
8558 break;
8559
02d9a2c3 8560 case HImode:
8561 case PSImode:
8562 case SImode:
8563 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8564 break;
433a5f02 8565
8566 default:
8567 return false;
fab7adbf 8568 }
20d892d1 8569 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8570 return true;
8571
8572 case ABS:
8573 switch (mode)
fab7adbf 8574 {
433a5f02 8575 case QImode:
8576 case SFmode:
8577 *total = COSTS_N_INSNS (1);
8578 break;
8579
8580 default:
8581 return false;
fab7adbf 8582 }
20d892d1 8583 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8584 return true;
fab7adbf 8585
433a5f02 8586 case NOT:
8587 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 8588 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fab7adbf 8589 return true;
8590
433a5f02 8591 case ZERO_EXTEND:
8592 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8593 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 8594 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8595 return true;
8596
8597 case SIGN_EXTEND:
8598 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8599 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 8600 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8601 return true;
8602
8603 case PLUS:
8604 switch (mode)
8605 {
8606 case QImode:
37ee98f3 8607 if (AVR_HAVE_MUL
8608 && MULT == GET_CODE (XEXP (x, 0))
8609 && register_operand (XEXP (x, 1), QImode))
8610 {
8611 /* multiply-add */
8612 *total = COSTS_N_INSNS (speed ? 4 : 3);
8613 /* multiply-add with constant: will be split and load constant. */
8614 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8615 *total = COSTS_N_INSNS (1) + *total;
8616 return true;
8617 }
433a5f02 8618 *total = COSTS_N_INSNS (1);
8619 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 8620 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8621 break;
8622
8623 case HImode:
73cd2c42 8624 if (AVR_HAVE_MUL
8625 && (MULT == GET_CODE (XEXP (x, 0))
8626 || ASHIFT == GET_CODE (XEXP (x, 0)))
8627 && register_operand (XEXP (x, 1), HImode)
8628 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8629 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8630 {
37ee98f3 8631 /* multiply-add */
73cd2c42 8632 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 8633 /* multiply-add with constant: will be split and load constant. */
8634 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8635 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 8636 return true;
8637 }
433a5f02 8638 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8639 {
8640 *total = COSTS_N_INSNS (2);
20d892d1 8641 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8642 speed);
433a5f02 8643 }
8644 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8645 *total = COSTS_N_INSNS (1);
8646 else
8647 *total = COSTS_N_INSNS (2);
8648 break;
8649
02d9a2c3 8650 case PSImode:
8651 if (!CONST_INT_P (XEXP (x, 1)))
8652 {
8653 *total = COSTS_N_INSNS (3);
8654 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8655 speed);
8656 }
8657 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8658 *total = COSTS_N_INSNS (2);
8659 else
8660 *total = COSTS_N_INSNS (3);
8661 break;
8662
433a5f02 8663 case SImode:
8664 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8665 {
8666 *total = COSTS_N_INSNS (4);
20d892d1 8667 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8668 speed);
433a5f02 8669 }
8670 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8671 *total = COSTS_N_INSNS (1);
8672 else
8673 *total = COSTS_N_INSNS (4);
8674 break;
8675
8676 default:
8677 return false;
8678 }
20d892d1 8679 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8680 return true;
8681
8682 case MINUS:
37ee98f3 8683 if (AVR_HAVE_MUL
8684 && QImode == mode
8685 && register_operand (XEXP (x, 0), QImode)
8686 && MULT == GET_CODE (XEXP (x, 1)))
8687 {
8688 /* multiply-sub */
8689 *total = COSTS_N_INSNS (speed ? 4 : 3);
8690 /* multiply-sub with constant: will be split and load constant. */
8691 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8692 *total = COSTS_N_INSNS (1) + *total;
8693 return true;
8694 }
73cd2c42 8695 if (AVR_HAVE_MUL
8696 && HImode == mode
8697 && register_operand (XEXP (x, 0), HImode)
8698 && (MULT == GET_CODE (XEXP (x, 1))
8699 || ASHIFT == GET_CODE (XEXP (x, 1)))
8700 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
8701 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
8702 {
37ee98f3 8703 /* multiply-sub */
73cd2c42 8704 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 8705 /* multiply-sub with constant: will be split and load constant. */
8706 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8707 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 8708 return true;
8709 }
02d9a2c3 8710 /* FALLTHRU */
433a5f02 8711 case AND:
8712 case IOR:
8713 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 8714 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 8715 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 8716 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8717 return true;
8718
8719 case XOR:
8720 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 8721 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8722 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8723 return true;
8724
8725 case MULT:
8726 switch (mode)
8727 {
8728 case QImode:
8cc5a1af 8729 if (AVR_HAVE_MUL)
f529eb25 8730 *total = COSTS_N_INSNS (!speed ? 3 : 4);
8731 else if (!speed)
4f0e2214 8732 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 8733 else
8734 return false;
ba92127f 8735 break;
433a5f02 8736
8737 case HImode:
8cc5a1af 8738 if (AVR_HAVE_MUL)
0b90fc76 8739 {
8740 rtx op0 = XEXP (x, 0);
8741 rtx op1 = XEXP (x, 1);
8742 enum rtx_code code0 = GET_CODE (op0);
8743 enum rtx_code code1 = GET_CODE (op1);
8744 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
8745 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
8746
8747 if (ex0
8748 && (u8_operand (op1, HImode)
8749 || s8_operand (op1, HImode)))
8750 {
8751 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8752 return true;
8753 }
8754 if (ex0
8755 && register_operand (op1, HImode))
8756 {
8757 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8758 return true;
8759 }
8760 else if (ex0 || ex1)
8761 {
8762 *total = COSTS_N_INSNS (!speed ? 3 : 5);
8763 return true;
8764 }
8765 else if (register_operand (op0, HImode)
8766 && (u8_operand (op1, HImode)
8767 || s8_operand (op1, HImode)))
8768 {
8769 *total = COSTS_N_INSNS (!speed ? 6 : 9);
8770 return true;
8771 }
8772 else
8773 *total = COSTS_N_INSNS (!speed ? 7 : 10);
8774 }
f529eb25 8775 else if (!speed)
4f0e2214 8776 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 8777 else
8778 return false;
ba92127f 8779 break;
433a5f02 8780
02d9a2c3 8781 case PSImode:
8782 if (!speed)
8783 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8784 else
8785 *total = 10;
8786 break;
8787
713e2ad9 8788 case SImode:
8789 if (AVR_HAVE_MUL)
8790 {
8791 if (!speed)
8792 {
8793 /* Add some additional costs besides CALL like moves etc. */
8794
8795 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8796 }
8797 else
8798 {
8799 /* Just a rough estimate. Even with -O2 we don't want bulky
8800 code expanded inline. */
8801
8802 *total = COSTS_N_INSNS (25);
8803 }
8804 }
8805 else
8806 {
8807 if (speed)
8808 *total = COSTS_N_INSNS (300);
8809 else
8810 /* Add some additional costs besides CALL like moves etc. */
8811 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8812 }
8813
8814 return true;
8815
433a5f02 8816 default:
8817 return false;
8818 }
20d892d1 8819 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8820 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 8821 return true;
8822
8823 case DIV:
8824 case MOD:
8825 case UDIV:
8826 case UMOD:
f529eb25 8827 if (!speed)
fd2db4d6 8828 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 8829 else
fd2db4d6 8830 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
20d892d1 8831 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fd2db4d6 8832 /* For div/mod with const-int divisor we have at least the cost of
8833 loading the divisor. */
8834 if (CONST_INT_P (XEXP (x, 1)))
8835 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
8836 /* Add some overall penaly for clobbering and moving around registers */
8837 *total += COSTS_N_INSNS (2);
433a5f02 8838 return true;
8839
8f14d2e0 8840 case ROTATE:
8841 switch (mode)
8842 {
8843 case QImode:
8844 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
8845 *total = COSTS_N_INSNS (1);
8846
8847 break;
8848
8849 case HImode:
8850 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
8851 *total = COSTS_N_INSNS (3);
8852
8853 break;
8854
8855 case SImode:
8856 if (CONST_INT_P (XEXP (x, 1)))
8857 switch (INTVAL (XEXP (x, 1)))
8858 {
8859 case 8:
8860 case 24:
8861 *total = COSTS_N_INSNS (5);
8862 break;
8863 case 16:
8864 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
8865 break;
8866 }
8867 break;
8868
8869 default:
8870 return false;
8871 }
20d892d1 8872 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8f14d2e0 8873 return true;
8874
433a5f02 8875 case ASHIFT:
8876 switch (mode)
8877 {
8878 case QImode:
8879 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8880 {
f529eb25 8881 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 8882 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8883 speed);
433a5f02 8884 }
8885 else
8886 {
8887 val = INTVAL (XEXP (x, 1));
8888 if (val == 7)
8889 *total = COSTS_N_INSNS (3);
8890 else if (val >= 0 && val <= 7)
8891 *total = COSTS_N_INSNS (val);
8892 else
8893 *total = COSTS_N_INSNS (1);
8894 }
8895 break;
8896
8897 case HImode:
0b90fc76 8898 if (AVR_HAVE_MUL)
8899 {
8900 if (const_2_to_7_operand (XEXP (x, 1), HImode)
8901 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
8902 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
8903 {
8904 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8905 return true;
8906 }
8907 }
8908
37ee98f3 8909 if (const1_rtx == (XEXP (x, 1))
8910 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8911 {
8912 *total = COSTS_N_INSNS (2);
8913 return true;
8914 }
8915
433a5f02 8916 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8917 {
f529eb25 8918 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8919 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8920 speed);
433a5f02 8921 }
8922 else
8923 switch (INTVAL (XEXP (x, 1)))
8924 {
8925 case 0:
8926 *total = 0;
8927 break;
8928 case 1:
8929 case 8:
8930 *total = COSTS_N_INSNS (2);
8931 break;
8932 case 9:
8933 *total = COSTS_N_INSNS (3);
8934 break;
8935 case 2:
8936 case 3:
8937 case 10:
8938 case 15:
8939 *total = COSTS_N_INSNS (4);
8940 break;
8941 case 7:
8942 case 11:
8943 case 12:
8944 *total = COSTS_N_INSNS (5);
8945 break;
8946 case 4:
f529eb25 8947 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 8948 break;
8949 case 6:
12564c56 8950 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 8951 break;
8952 case 5:
f529eb25 8953 *total = COSTS_N_INSNS (!speed ? 5 : 10);
433a5f02 8954 break;
8955 default:
f529eb25 8956 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 8957 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8958 speed);
433a5f02 8959 }
8960 break;
8961
02d9a2c3 8962 case PSImode:
8963 if (!CONST_INT_P (XEXP (x, 1)))
8964 {
8965 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8966 }
8967 else
8968 switch (INTVAL (XEXP (x, 1)))
8969 {
8970 case 0:
8971 *total = 0;
8972 break;
8973 case 1:
8974 case 8:
8975 case 16:
8976 *total = COSTS_N_INSNS (3);
8977 break;
8978 case 23:
8979 *total = COSTS_N_INSNS (5);
8980 break;
8981 default:
8982 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8983 break;
8984 }
8985 break;
8986
433a5f02 8987 case SImode:
8988 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8989 {
f529eb25 8990 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 8991 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8992 speed);
433a5f02 8993 }
8994 else
8995 switch (INTVAL (XEXP (x, 1)))
8996 {
8997 case 0:
8998 *total = 0;
8999 break;
9000 case 24:
9001 *total = COSTS_N_INSNS (3);
9002 break;
9003 case 1:
9004 case 8:
9005 case 16:
9006 *total = COSTS_N_INSNS (4);
9007 break;
9008 case 31:
9009 *total = COSTS_N_INSNS (6);
9010 break;
9011 case 2:
f529eb25 9012 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 9013 break;
9014 default:
f529eb25 9015 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 9016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9017 speed);
433a5f02 9018 }
9019 break;
9020
9021 default:
9022 return false;
9023 }
20d892d1 9024 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9025 return true;
9026
9027 case ASHIFTRT:
9028 switch (mode)
9029 {
9030 case QImode:
9031 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9032 {
f529eb25 9033 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 9034 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9035 speed);
433a5f02 9036 }
9037 else
9038 {
9039 val = INTVAL (XEXP (x, 1));
9040 if (val == 6)
9041 *total = COSTS_N_INSNS (4);
9042 else if (val == 7)
9043 *total = COSTS_N_INSNS (2);
9044 else if (val >= 0 && val <= 7)
9045 *total = COSTS_N_INSNS (val);
9046 else
9047 *total = COSTS_N_INSNS (1);
9048 }
9049 break;
9050
9051 case HImode:
9052 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9053 {
f529eb25 9054 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 9055 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9056 speed);
433a5f02 9057 }
9058 else
9059 switch (INTVAL (XEXP (x, 1)))
9060 {
9061 case 0:
9062 *total = 0;
9063 break;
9064 case 1:
9065 *total = COSTS_N_INSNS (2);
9066 break;
9067 case 15:
9068 *total = COSTS_N_INSNS (3);
9069 break;
9070 case 2:
9071 case 7:
9072 case 8:
9073 case 9:
9074 *total = COSTS_N_INSNS (4);
9075 break;
9076 case 10:
9077 case 14:
9078 *total = COSTS_N_INSNS (5);
9079 break;
9080 case 11:
f529eb25 9081 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 9082 break;
9083 case 12:
f529eb25 9084 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 9085 break;
9086 case 6:
9087 case 13:
f529eb25 9088 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 9089 break;
9090 default:
f529eb25 9091 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 9092 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9093 speed);
433a5f02 9094 }
9095 break;
9096
02d9a2c3 9097 case PSImode:
9098 if (!CONST_INT_P (XEXP (x, 1)))
9099 {
9100 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9101 }
9102 else
9103 switch (INTVAL (XEXP (x, 1)))
9104 {
9105 case 0:
9106 *total = 0;
9107 break;
9108 case 1:
9109 *total = COSTS_N_INSNS (3);
9110 break;
9111 case 16:
9112 case 8:
9113 *total = COSTS_N_INSNS (5);
9114 break;
9115 case 23:
9116 *total = COSTS_N_INSNS (4);
9117 break;
9118 default:
9119 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9120 break;
9121 }
9122 break;
9123
433a5f02 9124 case SImode:
9125 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9126 {
f529eb25 9127 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 9128 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9129 speed);
433a5f02 9130 }
9131 else
9132 switch (INTVAL (XEXP (x, 1)))
9133 {
9134 case 0:
9135 *total = 0;
9136 break;
9137 case 1:
9138 *total = COSTS_N_INSNS (4);
9139 break;
9140 case 8:
9141 case 16:
9142 case 24:
9143 *total = COSTS_N_INSNS (6);
9144 break;
9145 case 2:
f529eb25 9146 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 9147 break;
9148 case 31:
0aab73c2 9149 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
433a5f02 9150 break;
9151 default:
f529eb25 9152 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 9153 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9154 speed);
433a5f02 9155 }
9156 break;
9157
9158 default:
9159 return false;
9160 }
20d892d1 9161 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9162 return true;
9163
9164 case LSHIFTRT:
9165 switch (mode)
9166 {
9167 case QImode:
9168 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9169 {
f529eb25 9170 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 9171 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9172 speed);
433a5f02 9173 }
9174 else
9175 {
9176 val = INTVAL (XEXP (x, 1));
9177 if (val == 7)
9178 *total = COSTS_N_INSNS (3);
9179 else if (val >= 0 && val <= 7)
9180 *total = COSTS_N_INSNS (val);
9181 else
9182 *total = COSTS_N_INSNS (1);
9183 }
9184 break;
9185
9186 case HImode:
9187 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9188 {
f529eb25 9189 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 9190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9191 speed);
433a5f02 9192 }
9193 else
9194 switch (INTVAL (XEXP (x, 1)))
9195 {
9196 case 0:
9197 *total = 0;
9198 break;
9199 case 1:
9200 case 8:
9201 *total = COSTS_N_INSNS (2);
9202 break;
9203 case 9:
9204 *total = COSTS_N_INSNS (3);
9205 break;
9206 case 2:
9207 case 10:
9208 case 15:
9209 *total = COSTS_N_INSNS (4);
9210 break;
9211 case 7:
9212 case 11:
9213 *total = COSTS_N_INSNS (5);
9214 break;
9215 case 3:
9216 case 12:
9217 case 13:
9218 case 14:
f529eb25 9219 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 9220 break;
9221 case 4:
f529eb25 9222 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 9223 break;
9224 case 5:
9225 case 6:
f529eb25 9226 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 9227 break;
9228 default:
f529eb25 9229 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 9230 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9231 speed);
433a5f02 9232 }
9233 break;
9234
02d9a2c3 9235 case PSImode:
9236 if (!CONST_INT_P (XEXP (x, 1)))
9237 {
9238 *total = COSTS_N_INSNS (!speed ? 6 : 73);
9239 }
9240 else
9241 switch (INTVAL (XEXP (x, 1)))
9242 {
9243 case 0:
9244 *total = 0;
9245 break;
9246 case 1:
9247 case 8:
9248 case 16:
9249 *total = COSTS_N_INSNS (3);
9250 break;
9251 case 23:
9252 *total = COSTS_N_INSNS (5);
9253 break;
9254 default:
9255 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9256 break;
9257 }
9258 break;
9259
433a5f02 9260 case SImode:
9261 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9262 {
f529eb25 9263 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 9264 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9265 speed);
433a5f02 9266 }
9267 else
9268 switch (INTVAL (XEXP (x, 1)))
9269 {
9270 case 0:
9271 *total = 0;
9272 break;
9273 case 1:
9274 *total = COSTS_N_INSNS (4);
9275 break;
9276 case 2:
f529eb25 9277 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 9278 break;
9279 case 8:
9280 case 16:
9281 case 24:
9282 *total = COSTS_N_INSNS (4);
9283 break;
9284 case 31:
9285 *total = COSTS_N_INSNS (6);
9286 break;
9287 default:
f529eb25 9288 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 9289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9290 speed);
433a5f02 9291 }
9292 break;
9293
9294 default:
9295 return false;
9296 }
20d892d1 9297 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9298 return true;
9299
9300 case COMPARE:
9301 switch (GET_MODE (XEXP (x, 0)))
fab7adbf 9302 {
433a5f02 9303 case QImode:
9304 *total = COSTS_N_INSNS (1);
9305 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 9306 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 9307 break;
9308
9309 case HImode:
9310 *total = COSTS_N_INSNS (2);
9311 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 9312 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 9313 else if (INTVAL (XEXP (x, 1)) != 0)
9314 *total += COSTS_N_INSNS (1);
9315 break;
9316
02d9a2c3 9317 case PSImode:
9318 *total = COSTS_N_INSNS (3);
9319 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9320 *total += COSTS_N_INSNS (2);
9321 break;
9322
433a5f02 9323 case SImode:
9324 *total = COSTS_N_INSNS (4);
9325 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 9326 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 9327 else if (INTVAL (XEXP (x, 1)) != 0)
9328 *total += COSTS_N_INSNS (3);
9329 break;
9330
9331 default:
9332 return false;
fab7adbf 9333 }
20d892d1 9334 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9335 return true;
9336
12bf3919 9337 case TRUNCATE:
9338 if (AVR_HAVE_MUL
9339 && LSHIFTRT == GET_CODE (XEXP (x, 0))
9340 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9341 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9342 {
9343 if (QImode == mode || HImode == mode)
9344 {
9345 *total = COSTS_N_INSNS (2);
9346 return true;
9347 }
9348 }
9349 break;
9350
433a5f02 9351 default:
9352 break;
fab7adbf 9353 }
433a5f02 9354 return false;
fab7adbf 9355}
9356
ae86bb47 9357
9358/* Implement `TARGET_RTX_COSTS'. */
9359
9360static bool
9361avr_rtx_costs (rtx x, int codearg, int outer_code,
9362 int opno, int *total, bool speed)
9363{
9364 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9365 opno, total, speed);
9366
9367 if (avr_log.rtx_costs)
9368 {
9369 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9370 done, speed ? "speed" : "size", *total, outer_code, x);
9371 }
9372
9373 return done;
9374}
9375
8c3bcbe3 9376
9377/* Implement `TARGET_ADDRESS_COST'. */
a28e4651 9378
ec0457a8 9379static int
d9c5e5f4 9380avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
b4ebb666 9381 addr_space_t as ATTRIBUTE_UNUSED,
9382 bool speed ATTRIBUTE_UNUSED)
a28e4651 9383{
8c3bcbe3 9384 int cost = 4;
9385
a28e4651 9386 if (GET_CODE (x) == PLUS
8c3bcbe3 9387 && CONST_INT_P (XEXP (x, 1))
9388 && (REG_P (XEXP (x, 0))
9389 || GET_CODE (XEXP (x, 0)) == SUBREG))
37ac04dc 9390 {
8c3bcbe3 9391 if (INTVAL (XEXP (x, 1)) >= 61)
9392 cost = 18;
37ac04dc 9393 }
8c3bcbe3 9394 else if (CONSTANT_ADDRESS_P (x))
9395 {
9396 if (optimize > 0
9397 && io_address_operand (x, QImode))
9398 cost = 2;
9399 }
9400
9401 if (avr_log.address_cost)
9402 avr_edump ("\n%?: %d = %r\n", cost, x);
9403
9404 return cost;
a28e4651 9405}
9406
164f5b34 9407/* Test for extra memory constraint 'Q'.
9408 It's a memory address based on Y or Z pointer with valid displacement. */
a28e4651 9409
9410int
164f5b34 9411extra_constraint_Q (rtx x)
a28e4651 9412{
ae86bb47 9413 int ok = 0;
9414
164f5b34 9415 if (GET_CODE (XEXP (x,0)) == PLUS
9416 && REG_P (XEXP (XEXP (x,0), 0))
9417 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9418 && (INTVAL (XEXP (XEXP (x,0), 1))
9419 <= MAX_LD_OFFSET (GET_MODE (x))))
a28e4651 9420 {
164f5b34 9421 rtx xx = XEXP (XEXP (x,0), 0);
9422 int regno = REGNO (xx);
ae86bb47 9423
9424 ok = (/* allocate pseudos */
9425 regno >= FIRST_PSEUDO_REGISTER
9426 /* strictly check */
9427 || regno == REG_Z || regno == REG_Y
9428 /* XXX frame & arg pointer checks */
9429 || xx == frame_pointer_rtx
9430 || xx == arg_pointer_rtx);
9431
9432 if (avr_log.constraints)
9433 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9434 ok, reload_completed, reload_in_progress, x);
a28e4651 9435 }
ae86bb47 9436
9437 return ok;
a28e4651 9438}
9439
20c71901 9440/* Convert condition code CONDITION to the valid AVR condition code. */
a28e4651 9441
9442RTX_CODE
206a5129 9443avr_normalize_condition (RTX_CODE condition)
a28e4651 9444{
9445 switch (condition)
9446 {
9447 case GT:
9448 return GE;
9449 case GTU:
9450 return GEU;
9451 case LE:
9452 return LT;
9453 case LEU:
9454 return LTU;
9455 default:
8ef66241 9456 gcc_unreachable ();
a28e4651 9457 }
9458}
9459
cffa155c 9460/* Helper function for `avr_reorg'. */
9461
9462static rtx
9463avr_compare_pattern (rtx insn)
9464{
9465 rtx pattern = single_set (insn);
9466
9467 if (pattern
9468 && NONJUMP_INSN_P (insn)
9469 && SET_DEST (pattern) == cc0_rtx
017c5b98 9470 && GET_CODE (SET_SRC (pattern)) == COMPARE)
cffa155c 9471 {
017c5b98 9472 enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9473 enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9474
9475 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9476 They must not be swapped, thus skip them. */
9477
9478 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9479 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9480 return pattern;
cffa155c 9481 }
9482
9483 return NULL_RTX;
9484}
9485
9486/* Helper function for `avr_reorg'. */
9487
9488/* Expansion of switch/case decision trees leads to code like
9489
9490 cc0 = compare (Reg, Num)
9491 if (cc0 == 0)
9492 goto L1
9493
9494 cc0 = compare (Reg, Num)
9495 if (cc0 > 0)
9496 goto L2
9497
9498 The second comparison is superfluous and can be deleted.
9499 The second jump condition can be transformed from a
9500 "difficult" one to a "simple" one because "cc0 > 0" and
9501 "cc0 >= 0" will have the same effect here.
9502
9503 This function relies on the way switch/case is being expaned
9504 as binary decision tree. For example code see PR 49903.
9505
9506 Return TRUE if optimization performed.
9507 Return FALSE if nothing changed.
9508
9509 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9510
9511 We don't want to do this in text peephole because it is
9512 tedious to work out jump offsets there and the second comparison
9513 might have been transormed by `avr_reorg'.
9514
9515 RTL peephole won't do because peephole2 does not scan across
9516 basic blocks. */
9517
9518static bool
9519avr_reorg_remove_redundant_compare (rtx insn1)
9520{
9521 rtx comp1, ifelse1, xcond1, branch1;
9522 rtx comp2, ifelse2, xcond2, branch2, insn2;
9523 enum rtx_code code;
9524 rtx jump, target, cond;
9525
9526 /* Look out for: compare1 - branch1 - compare2 - branch2 */
9527
9528 branch1 = next_nonnote_nondebug_insn (insn1);
9529 if (!branch1 || !JUMP_P (branch1))
9530 return false;
9531
9532 insn2 = next_nonnote_nondebug_insn (branch1);
9533 if (!insn2 || !avr_compare_pattern (insn2))
9534 return false;
9535
9536 branch2 = next_nonnote_nondebug_insn (insn2);
9537 if (!branch2 || !JUMP_P (branch2))
9538 return false;
9539
9540 comp1 = avr_compare_pattern (insn1);
9541 comp2 = avr_compare_pattern (insn2);
9542 xcond1 = single_set (branch1);
9543 xcond2 = single_set (branch2);
9544
9545 if (!comp1 || !comp2
9546 || !rtx_equal_p (comp1, comp2)
9547 || !xcond1 || SET_DEST (xcond1) != pc_rtx
9548 || !xcond2 || SET_DEST (xcond2) != pc_rtx
9549 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9550 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9551 {
9552 return false;
9553 }
9554
9555 comp1 = SET_SRC (comp1);
9556 ifelse1 = SET_SRC (xcond1);
9557 ifelse2 = SET_SRC (xcond2);
9558
9559 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
9560
9561 if (EQ != GET_CODE (XEXP (ifelse1, 0))
9562 || !REG_P (XEXP (comp1, 0))
9563 || !CONST_INT_P (XEXP (comp1, 1))
9564 || XEXP (ifelse1, 2) != pc_rtx
9565 || XEXP (ifelse2, 2) != pc_rtx
9566 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9567 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9568 || !COMPARISON_P (XEXP (ifelse2, 0))
9569 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9570 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9571 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9572 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9573 {
9574 return false;
9575 }
9576
9577 /* We filtered the insn sequence to look like
9578
9579 (set (cc0)
9580 (compare (reg:M N)
9581 (const_int VAL)))
9582 (set (pc)
9583 (if_then_else (eq (cc0)
9584 (const_int 0))
9585 (label_ref L1)
9586 (pc)))
9587
9588 (set (cc0)
9589 (compare (reg:M N)
9590 (const_int VAL)))
9591 (set (pc)
9592 (if_then_else (CODE (cc0)
9593 (const_int 0))
9594 (label_ref L2)
9595 (pc)))
9596 */
9597
9598 code = GET_CODE (XEXP (ifelse2, 0));
9599
9600 /* Map GT/GTU to GE/GEU which is easier for AVR.
9601 The first two instructions compare/branch on EQ
9602 so we may replace the difficult
9603
9604 if (x == VAL) goto L1;
9605 if (x > VAL) goto L2;
9606
9607 with easy
9608
9609 if (x == VAL) goto L1;
9610 if (x >= VAL) goto L2;
9611
9612 Similarly, replace LE/LEU by LT/LTU. */
9613
9614 switch (code)
9615 {
9616 case EQ:
9617 case LT: case LTU:
9618 case GE: case GEU:
9619 break;
9620
9621 case LE: case LEU:
9622 case GT: case GTU:
9623 code = avr_normalize_condition (code);
9624 break;
9625
9626 default:
9627 return false;
9628 }
9629
9630 /* Wrap the branches into UNSPECs so they won't be changed or
9631 optimized in the remainder. */
9632
9633 target = XEXP (XEXP (ifelse1, 1), 0);
9634 cond = XEXP (ifelse1, 0);
9635 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9636
9637 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9638
9639 target = XEXP (XEXP (ifelse2, 1), 0);
9640 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9641 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9642
9643 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9644
9645 /* The comparisons in insn1 and insn2 are exactly the same;
9646 insn2 is superfluous so delete it. */
9647
9648 delete_insn (insn2);
9649 delete_insn (branch1);
9650 delete_insn (branch2);
9651
9652 return true;
9653}
9654
9655
9656/* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
9657/* Optimize conditional jumps. */
a28e4651 9658
2efea8c0 9659static void
206a5129 9660avr_reorg (void)
a28e4651 9661{
cffa155c 9662 rtx insn = get_insns();
a28e4651 9663
cffa155c 9664 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
a28e4651 9665 {
cffa155c 9666 rtx pattern = avr_compare_pattern (insn);
9667
9668 if (!pattern)
9669 continue;
a28e4651 9670
cffa155c 9671 if (optimize
9672 && avr_reorg_remove_redundant_compare (insn))
9673 {
9674 continue;
9675 }
a28e4651 9676
cffa155c 9677 if (compare_diff_p (insn))
a28e4651 9678 {
cffa155c 9679 /* Now we work under compare insn with difficult branch. */
9680
9681 rtx next = next_real_insn (insn);
9682 rtx pat = PATTERN (next);
9683
9684 pattern = SET_SRC (pattern);
9685
9686 if (true_regnum (XEXP (pattern, 0)) >= 0
9687 && true_regnum (XEXP (pattern, 1)) >= 0)
9688 {
9689 rtx x = XEXP (pattern, 0);
9690 rtx src = SET_SRC (pat);
9691 rtx t = XEXP (src,0);
9692 PUT_CODE (t, swap_condition (GET_CODE (t)));
9693 XEXP (pattern, 0) = XEXP (pattern, 1);
9694 XEXP (pattern, 1) = x;
9695 INSN_CODE (next) = -1;
9696 }
9697 else if (true_regnum (XEXP (pattern, 0)) >= 0
9698 && XEXP (pattern, 1) == const0_rtx)
9699 {
9700 /* This is a tst insn, we can reverse it. */
9701 rtx src = SET_SRC (pat);
9702 rtx t = XEXP (src,0);
74f4459c 9703
cffa155c 9704 PUT_CODE (t, swap_condition (GET_CODE (t)));
9705 XEXP (pattern, 1) = XEXP (pattern, 0);
9706 XEXP (pattern, 0) = const0_rtx;
9707 INSN_CODE (next) = -1;
9708 INSN_CODE (insn) = -1;
9709 }
9710 else if (true_regnum (XEXP (pattern, 0)) >= 0
9711 && CONST_INT_P (XEXP (pattern, 1)))
9712 {
9713 rtx x = XEXP (pattern, 1);
9714 rtx src = SET_SRC (pat);
9715 rtx t = XEXP (src,0);
9716 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
9717
9718 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
9719 {
9720 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
9721 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
9722 INSN_CODE (next) = -1;
9723 INSN_CODE (insn) = -1;
9724 }
9725 }
9726 }
a28e4651 9727 }
9728}
9729
9730/* Returns register number for function return value.*/
9731
1086ba15 9732static inline unsigned int
206a5129 9733avr_ret_register (void)
a28e4651 9734{
9735 return 24;
9736}
9737
73475e84 9738/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
9739
9740static bool
9741avr_function_value_regno_p (const unsigned int regno)
9742{
9743 return (regno == avr_ret_register ());
9744}
9745
f2b32076 9746/* Create an RTX representing the place where a
a28e4651 9747 library function returns a value of mode MODE. */
9748
73475e84 9749static rtx
9750avr_libcall_value (enum machine_mode mode,
9751 const_rtx func ATTRIBUTE_UNUSED)
a28e4651 9752{
9753 int offs = GET_MODE_SIZE (mode);
02d9a2c3 9754
9755 if (offs <= 4)
9756 offs = (offs + 1) & ~1;
9757
73475e84 9758 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
a28e4651 9759}
9760
9761/* Create an RTX representing the place where a
9762 function returns a value of data type VALTYPE. */
9763
73475e84 9764static rtx
1086ba15 9765avr_function_value (const_tree type,
9766 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
9767 bool outgoing ATTRIBUTE_UNUSED)
a28e4651 9768{
1cb39658 9769 unsigned int offs;
73475e84 9770
a28e4651 9771 if (TYPE_MODE (type) != BLKmode)
1086ba15 9772 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
a28e4651 9773
9774 offs = int_size_in_bytes (type);
9775 if (offs < 2)
9776 offs = 2;
9777 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
9778 offs = GET_MODE_SIZE (SImode);
9779 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
9780 offs = GET_MODE_SIZE (DImode);
9781
73475e84 9782 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
a28e4651 9783}
9784
8a2a7305 9785int
8deb3959 9786test_hard_reg_class (enum reg_class rclass, rtx x)
8a2a7305 9787{
9788 int regno = true_regnum (x);
9789 if (regno < 0)
9790 return 0;
cb39cd35 9791
8deb3959 9792 if (TEST_HARD_REG_CLASS (rclass, regno))
cb39cd35 9793 return 1;
9794
9795 return 0;
8a2a7305 9796}
9797
2ebcbfe8 9798
8ef28ef2 9799/* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
9800 and thus is suitable to be skipped by CPSE, SBRC, etc. */
9801
9802static bool
9803avr_2word_insn_p (rtx insn)
9804{
9805 if (avr_current_device->errata_skip
9806 || !insn
9807 || 2 != get_attr_length (insn))
9808 {
9809 return false;
9810 }
9811
9812 switch (INSN_CODE (insn))
9813 {
9814 default:
9815 return false;
9816
9817 case CODE_FOR_movqi_insn:
017c5b98 9818 case CODE_FOR_movuqq_insn:
9819 case CODE_FOR_movqq_insn:
8ef28ef2 9820 {
9821 rtx set = single_set (insn);
9822 rtx src = SET_SRC (set);
9823 rtx dest = SET_DEST (set);
9824
9825 /* Factor out LDS and STS from movqi_insn. */
9826
9827 if (MEM_P (dest)
017c5b98 9828 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
8ef28ef2 9829 {
9830 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
9831 }
9832 else if (REG_P (dest)
9833 && MEM_P (src))
9834 {
9835 return CONSTANT_ADDRESS_P (XEXP (src, 0));
9836 }
9837
9838 return false;
9839 }
9840
9841 case CODE_FOR_call_insn:
9842 case CODE_FOR_call_value_insn:
9843 return true;
9844 }
9845}
9846
9847
2ebcbfe8 9848int
206a5129 9849jump_over_one_insn_p (rtx insn, rtx dest)
2ebcbfe8 9850{
9851 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
9852 ? XEXP (dest, 0)
9853 : dest);
47fc0706 9854 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
9855 int dest_addr = INSN_ADDRESSES (uid);
8ef28ef2 9856 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
9857
9858 return (jump_offset == 1
9859 || (jump_offset == 2
9860 && avr_2word_insn_p (next_active_insn (insn))));
2ebcbfe8 9861}
a7690ba9 9862
9863/* Returns 1 if a value of mode MODE can be stored starting with hard
0af74aa0 9864 register number REGNO. On the enhanced core, anything larger than
9865 1 byte must start in even numbered register for "movw" to work
9866 (this way we don't have to check for odd registers everywhere). */
a7690ba9 9867
9868int
206a5129 9869avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
a7690ba9 9870{
3d4d979d 9871 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9872 Disallowing QI et al. in these regs might lead to code like
9873 (set (subreg:QI (reg:HI 28) n) ...)
9874 which will result in wrong code because reload does not
9875 handle SUBREGs of hard regsisters like this.
9876 This could be fixed in reload. However, it appears
9877 that fixing reload is not wanted by reload people. */
9878
9879 /* Any GENERAL_REGS register can hold 8-bit values. */
9880
9881 if (GET_MODE_SIZE (mode) == 1)
a7690ba9 9882 return 1;
60d76236 9883
3d4d979d 9884 /* FIXME: Ideally, the following test is not needed.
9885 However, it turned out that it can reduce the number
9886 of spill fails. AVR and it's poor endowment with
9887 address registers is extreme stress test for reload. */
9888
9889 if (GET_MODE_SIZE (mode) >= 4
9890 && regno >= REG_X)
60d76236 9891 return 0;
9892
3d4d979d 9893 /* All modes larger than 8 bits should start in an even register. */
9f42c829 9894
a7690ba9 9895 return !(regno & 1);
9896}
e511e253 9897
e511e253 9898
f55cb01e 9899/* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
9900
9901int
9902avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
9903{
9904 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
9905 represent valid hard registers like, e.g. HI:29. Returning TRUE
9906 for such registers can lead to performance degradation as mentioned
9907 in PR53595. Thus, report invalid hard registers as FALSE. */
9908
9909 if (!avr_hard_regno_mode_ok (regno, mode))
9910 return 0;
9911
9912 /* Return true if any of the following boundaries is crossed:
9913 17/18, 27/28 and 29/30. */
9914
9915 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
9916 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
9917 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
9918}
9919
9920
8b0ecac5 9921/* Implement `MODE_CODE_BASE_REG_CLASS'. */
9922
63477dcc 9923enum reg_class
8b0ecac5 9924avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
4202ef11 9925 addr_space_t as, RTX_CODE outer_code,
8b0ecac5 9926 RTX_CODE index_code ATTRIBUTE_UNUSED)
9927{
4202ef11 9928 if (!ADDR_SPACE_GENERIC_P (as))
9929 {
9930 return POINTER_Z_REGS;
9931 }
9932
f9efb148 9933 if (!avr_strict_X)
9934 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
9935
9936 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8b0ecac5 9937}
9938
9939
9940/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9941
9942bool
9943avr_regno_mode_code_ok_for_base_p (int regno,
9944 enum machine_mode mode ATTRIBUTE_UNUSED,
f8a8fc7b 9945 addr_space_t as ATTRIBUTE_UNUSED,
f9efb148 9946 RTX_CODE outer_code,
8b0ecac5 9947 RTX_CODE index_code ATTRIBUTE_UNUSED)
9948{
f9efb148 9949 bool ok = false;
9950
4202ef11 9951 if (!ADDR_SPACE_GENERIC_P (as))
9952 {
9953 if (regno < FIRST_PSEUDO_REGISTER
9954 && regno == REG_Z)
9955 {
9956 return true;
9957 }
9958
9959 if (reg_renumber)
9960 {
9961 regno = reg_renumber[regno];
9962
9963 if (regno == REG_Z)
9964 {
9965 return true;
9966 }
9967 }
9968
9969 return false;
9970 }
9971
8b0ecac5 9972 if (regno < FIRST_PSEUDO_REGISTER
9973 && (regno == REG_X
9974 || regno == REG_Y
9975 || regno == REG_Z
9f42c829 9976 || regno == ARG_POINTER_REGNUM))
8b0ecac5 9977 {
f9efb148 9978 ok = true;
8b0ecac5 9979 }
f9efb148 9980 else if (reg_renumber)
8b0ecac5 9981 {
9982 regno = reg_renumber[regno];
9983
9984 if (regno == REG_X
9985 || regno == REG_Y
9986 || regno == REG_Z
9f42c829 9987 || regno == ARG_POINTER_REGNUM)
8b0ecac5 9988 {
f9efb148 9989 ok = true;
8b0ecac5 9990 }
9991 }
f9efb148 9992
9993 if (avr_strict_X
9994 && PLUS == outer_code
9995 && regno == REG_X)
9996 {
9997 ok = false;
9998 }
9999
10000 return ok;
8b0ecac5 10001}
10002
10003
5bca95a8 10004/* A helper for `output_reload_insisf' and `output_reload_inhi'. */
28913f6b 10005/* Set 32-bit register OP[0] to compile-time constant OP[1].
10006 CLOBBER_REG is a QI clobber register or NULL_RTX.
10007 LEN == NULL: output instructions.
10008 LEN != NULL: set *LEN to the length of the instruction sequence
10009 (in words) printed with LEN = NULL.
10010 If CLEAR_P is true, OP[0] had been cleard to Zero already.
33817c7e 10011 If CLEAR_P is false, nothing is known about OP[0].
10012
10013 The effect on cc0 is as follows:
10014
f4806884 10015 Load 0 to any register except ZERO_REG : NONE
10016 Load ld register with any value : NONE
10017 Anything else: : CLOBBER */
9ce2d202 10018
28913f6b 10019static void
2f2d376f 10020output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
e511e253 10021{
9ce2d202 10022 rtx src = op[1];
10023 rtx dest = op[0];
10024 rtx xval, xdest[4];
10025 int ival[4];
10026 int clobber_val = 1234;
10027 bool cooked_clobber_p = false;
10028 bool set_p = false;
9ce2d202 10029 enum machine_mode mode = GET_MODE (dest);
4202ef11 10030 int n, n_bytes = GET_MODE_SIZE (mode);
9ce2d202 10031
a49907f9 10032 gcc_assert (REG_P (dest)
10033 && CONSTANT_P (src));
37ac04dc 10034
10035 if (len)
9ce2d202 10036 *len = 0;
10037
10038 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10039 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
10040
02d9a2c3 10041 if (REGNO (dest) < 16
10042 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
37ac04dc 10043 {
4202ef11 10044 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
37ac04dc 10045 }
e511e253 10046
a49907f9 10047 /* We might need a clobber reg but don't have one. Look at the value to
10048 be loaded more closely. A clobber is only needed if it is a symbol
10049 or contains a byte that is neither 0, -1 or a power of 2. */
9ce2d202 10050
10051 if (NULL_RTX == clobber_reg
2f2d376f 10052 && !test_hard_reg_class (LD_REGS, dest)
017c5b98 10053 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
a49907f9 10054 || !avr_popcount_each_byte (src, n_bytes,
10055 (1 << 0) | (1 << 1) | (1 << 8))))
e511e253 10056 {
2f2d376f 10057 /* We have no clobber register but need one. Cook one up.
10058 That's cheaper than loading from constant pool. */
10059
10060 cooked_clobber_p = true;
4202ef11 10061 clobber_reg = all_regs_rtx[REG_Z + 1];
2f2d376f 10062 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
e511e253 10063 }
9ce2d202 10064
10065 /* Now start filling DEST from LSB to MSB. */
10066
a49907f9 10067 for (n = 0; n < n_bytes; n++)
e511e253 10068 {
a49907f9 10069 int ldreg_p;
9ce2d202 10070 bool done_byte = false;
4202ef11 10071 int j;
9ce2d202 10072 rtx xop[3];
10073
a49907f9 10074 /* Crop the n-th destination byte. */
10075
9ce2d202 10076 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
a49907f9 10077 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10078
10079 if (!CONST_INT_P (src)
017c5b98 10080 && !CONST_FIXED_P (src)
a49907f9 10081 && !CONST_DOUBLE_P (src))
10082 {
10083 static const char* const asm_code[][2] =
10084 {
10085 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
10086 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
10087 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
10088 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
10089 };
10090
10091 xop[0] = xdest[n];
10092 xop[1] = src;
10093 xop[2] = clobber_reg;
10094
ed2541ea 10095 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10096
a49907f9 10097 continue;
10098 }
10099
10100 /* Crop the n-th source byte. */
10101
10102 xval = simplify_gen_subreg (QImode, src, mode, n);
9ce2d202 10103 ival[n] = INTVAL (xval);
10104
10105 /* Look if we can reuse the low word by means of MOVW. */
10106
10107 if (n == 2
a49907f9 10108 && n_bytes >= 4
9ce2d202 10109 && AVR_HAVE_MOVW)
10110 {
10111 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10112 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10113
10114 if (INTVAL (lo16) == INTVAL (hi16))
10115 {
28913f6b 10116 if (0 != INTVAL (lo16)
10117 || !clear_p)
10118 {
10119 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10120 }
10121
9ce2d202 10122 break;
10123 }
10124 }
10125
33817c7e 10126 /* Don't use CLR so that cc0 is set as expected. */
9ce2d202 10127
10128 if (ival[n] == 0)
10129 {
28913f6b 10130 if (!clear_p)
f4806884 10131 avr_asm_len (ldreg_p ? "ldi %0,0"
10132 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10133 : "mov %0,__zero_reg__",
33817c7e 10134 &xdest[n], len, 1);
9ce2d202 10135 continue;
10136 }
10137
10138 if (clobber_val == ival[n]
10139 && REGNO (clobber_reg) == REGNO (xdest[n]))
10140 {
10141 continue;
10142 }
10143
10144 /* LD_REGS can use LDI to move a constant value */
10145
a49907f9 10146 if (ldreg_p)
9ce2d202 10147 {
10148 xop[0] = xdest[n];
10149 xop[1] = xval;
10150 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10151 continue;
10152 }
10153
10154 /* Try to reuse value already loaded in some lower byte. */
10155
10156 for (j = 0; j < n; j++)
10157 if (ival[j] == ival[n])
10158 {
10159 xop[0] = xdest[n];
10160 xop[1] = xdest[j];
10161
10162 avr_asm_len ("mov %0,%1", xop, len, 1);
10163 done_byte = true;
10164 break;
10165 }
10166
10167 if (done_byte)
10168 continue;
10169
10170 /* Need no clobber reg for -1: Use CLR/DEC */
10171
10172 if (-1 == ival[n])
10173 {
28913f6b 10174 if (!clear_p)
10175 avr_asm_len ("clr %0", &xdest[n], len, 1);
10176
10177 avr_asm_len ("dec %0", &xdest[n], len, 1);
10178 continue;
10179 }
10180 else if (1 == ival[n])
10181 {
10182 if (!clear_p)
10183 avr_asm_len ("clr %0", &xdest[n], len, 1);
10184
10185 avr_asm_len ("inc %0", &xdest[n], len, 1);
9ce2d202 10186 continue;
10187 }
10188
10189 /* Use T flag or INC to manage powers of 2 if we have
10190 no clobber reg. */
10191
10192 if (NULL_RTX == clobber_reg
10193 && single_one_operand (xval, QImode))
10194 {
9ce2d202 10195 xop[0] = xdest[n];
10196 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10197
10198 gcc_assert (constm1_rtx != xop[1]);
10199
10200 if (!set_p)
10201 {
10202 set_p = true;
10203 avr_asm_len ("set", xop, len, 1);
10204 }
10205
28913f6b 10206 if (!clear_p)
10207 avr_asm_len ("clr %0", xop, len, 1);
10208
10209 avr_asm_len ("bld %0,%1", xop, len, 1);
9ce2d202 10210 continue;
10211 }
10212
10213 /* We actually need the LD_REGS clobber reg. */
10214
10215 gcc_assert (NULL_RTX != clobber_reg);
10216
10217 xop[0] = xdest[n];
10218 xop[1] = xval;
10219 xop[2] = clobber_reg;
10220 clobber_val = ival[n];
10221
10222 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10223 "mov %0,%2", xop, len, 2);
e511e253 10224 }
9ce2d202 10225
10226 /* If we cooked up a clobber reg above, restore it. */
10227
10228 if (cooked_clobber_p)
e511e253 10229 {
9ce2d202 10230 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
e511e253 10231 }
28913f6b 10232}
10233
10234
2f2d376f 10235/* Reload the constant OP[1] into the HI register OP[0].
10236 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10237 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10238 need a clobber reg or have to cook one up.
10239
10240 PLEN == NULL: Output instructions.
10241 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
10242 by the insns printed.
10243
10244 Return "". */
10245
10246const char*
10247output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10248{
a49907f9 10249 output_reload_in_const (op, clobber_reg, plen, false);
2f2d376f 10250 return "";
10251}
10252
10253
28913f6b 10254/* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10255 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10256 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
10257 need a clobber reg or have to cook one up.
10258
10259 LEN == NULL: Output instructions.
10260
b0e2b973 10261 LEN != NULL: Output nothing. Set *LEN to number of words occupied
28913f6b 10262 by the insns printed.
10263
10264 Return "". */
10265
10266const char *
5bca95a8 10267output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
28913f6b 10268{
28913f6b 10269 if (AVR_HAVE_MOVW
644ac9c5 10270 && !test_hard_reg_class (LD_REGS, op[0])
10271 && (CONST_INT_P (op[1])
017c5b98 10272 || CONST_FIXED_P (op[1])
644ac9c5 10273 || CONST_DOUBLE_P (op[1])))
28913f6b 10274 {
10275 int len_clr, len_noclr;
10276
10277 /* In some cases it is better to clear the destination beforehand, e.g.
10278
10279 CLR R2 CLR R3 MOVW R4,R2 INC R2
10280
10281 is shorther than
10282
10283 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
10284
10285 We find it too tedious to work that out in the print function.
10286 Instead, we call the print function twice to get the lengths of
10287 both methods and use the shortest one. */
10288
2f2d376f 10289 output_reload_in_const (op, clobber_reg, &len_clr, true);
10290 output_reload_in_const (op, clobber_reg, &len_noclr, false);
28913f6b 10291
10292 if (len_noclr - len_clr == 4)
10293 {
10294 /* Default needs 4 CLR instructions: clear register beforehand. */
10295
f4806884 10296 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10297 "mov %B0,__zero_reg__" CR_TAB
28913f6b 10298 "movw %C0,%A0", &op[0], len, 3);
10299
2f2d376f 10300 output_reload_in_const (op, clobber_reg, len, true);
28913f6b 10301
10302 if (len)
10303 *len += 3;
10304
10305 return "";
10306 }
10307 }
10308
10309 /* Default: destination not pre-cleared. */
10310
2f2d376f 10311 output_reload_in_const (op, clobber_reg, len, false);
e511e253 10312 return "";
10313}
b681d971 10314
02d9a2c3 10315const char *
10316avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10317{
02d9a2c3 10318 output_reload_in_const (op, clobber_reg, len, false);
10319 return "";
10320}
10321
b681d971 10322
91b18013 10323void
206a5129 10324avr_output_addr_vec_elt (FILE *stream, int value)
91b18013 10325{
90ef7269 10326 if (AVR_HAVE_JMP_CALL)
10327 fprintf (stream, "\t.word gs(.L%d)\n", value);
91b18013 10328 else
10329 fprintf (stream, "\trjmp .L%d\n", value);
91b18013 10330}
10331
5431d4c2 10332/* Returns true if SCRATCH are safe to be allocated as a scratch
51fe7379 10333 registers (for a define_peephole2) in the current function. */
10334
a45076aa 10335static bool
5431d4c2 10336avr_hard_regno_scratch_ok (unsigned int regno)
51fe7379 10337{
5431d4c2 10338 /* Interrupt functions can only use registers that have already been saved
10339 by the prologue, even if they would normally be call-clobbered. */
51fe7379 10340
5431d4c2 10341 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10342 && !df_regs_ever_live_p (regno))
10343 return false;
10344
3d4d979d 10345 /* Don't allow hard registers that might be part of the frame pointer.
10346 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10347 and don't care for a frame pointer that spans more than one register. */
10348
10349 if ((!reload_completed || frame_pointer_needed)
10350 && (regno == REG_Y || regno == REG_Y + 1))
10351 {
10352 return false;
10353 }
10354
5431d4c2 10355 return true;
51fe7379 10356}
4af90ac0 10357
afe7695c 10358/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
10359
10360int
3d4d979d 10361avr_hard_regno_rename_ok (unsigned int old_reg,
afe7695c 10362 unsigned int new_reg)
10363{
10364 /* Interrupt functions can only use registers that have already been
10365 saved by the prologue, even if they would normally be
10366 call-clobbered. */
10367
10368 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10369 && !df_regs_ever_live_p (new_reg))
10370 return 0;
10371
3d4d979d 10372 /* Don't allow hard registers that might be part of the frame pointer.
10373 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10374 and don't care for a frame pointer that spans more than one register. */
10375
10376 if ((!reload_completed || frame_pointer_needed)
10377 && (old_reg == REG_Y || old_reg == REG_Y + 1
10378 || new_reg == REG_Y || new_reg == REG_Y + 1))
10379 {
10380 return 0;
10381 }
10382
afe7695c 10383 return 1;
10384}
10385
dd7bbc23 10386/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
4af90ac0 10387 or memory location in the I/O space (QImode only).
10388
10389 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10390 Operand 1: register operand to test, or CONST_INT memory address.
dd7bbc23 10391 Operand 2: bit number.
4af90ac0 10392 Operand 3: label to jump to if the test is true. */
10393
10394const char *
206a5129 10395avr_out_sbxx_branch (rtx insn, rtx operands[])
4af90ac0 10396{
10397 enum rtx_code comp = GET_CODE (operands[0]);
5bd39e93 10398 bool long_jump = get_attr_length (insn) >= 4;
10399 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
4af90ac0 10400
10401 if (comp == GE)
10402 comp = EQ;
10403 else if (comp == LT)
10404 comp = NE;
10405
10406 if (reverse)
10407 comp = reverse_condition (comp);
10408
5bd39e93 10409 switch (GET_CODE (operands[1]))
4af90ac0 10410 {
5bd39e93 10411 default:
10412 gcc_unreachable();
10413
10414 case CONST_INT:
10415
10416 if (low_io_address_operand (operands[1], QImode))
10417 {
10418 if (comp == EQ)
644ac9c5 10419 output_asm_insn ("sbis %i1,%2", operands);
5bd39e93 10420 else
644ac9c5 10421 output_asm_insn ("sbic %i1,%2", operands);
5bd39e93 10422 }
4af90ac0 10423 else
5bd39e93 10424 {
644ac9c5 10425 output_asm_insn ("in __tmp_reg__,%i1", operands);
5bd39e93 10426 if (comp == EQ)
10427 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10428 else
10429 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10430 }
10431
10432 break; /* CONST_INT */
10433
10434 case REG:
10435
16d17f31 10436 if (comp == EQ)
10437 output_asm_insn ("sbrs %T1%T2", operands);
10438 else
10439 output_asm_insn ("sbrc %T1%T2", operands);
5bd39e93 10440
10441 break; /* REG */
10442 } /* switch */
4af90ac0 10443
10444 if (long_jump)
5bd39e93 10445 return ("rjmp .+4" CR_TAB
10446 "jmp %x3");
10447
4af90ac0 10448 if (!reverse)
5bd39e93 10449 return "rjmp %x3";
10450
4af90ac0 10451 return "";
10452}
9aa7484c 10453
6644435d 10454/* Worker function for TARGET_ASM_CONSTRUCTOR. */
10455
9aa7484c 10456static void
206a5129 10457avr_asm_out_ctor (rtx symbol, int priority)
9aa7484c 10458{
10459 fputs ("\t.global __do_global_ctors\n", asm_out_file);
10460 default_ctor_section_asm_out_constructor (symbol, priority);
10461}
10462
6644435d 10463/* Worker function for TARGET_ASM_DESTRUCTOR. */
10464
9aa7484c 10465static void
206a5129 10466avr_asm_out_dtor (rtx symbol, int priority)
9aa7484c 10467{
10468 fputs ("\t.global __do_global_dtors\n", asm_out_file);
10469 default_dtor_section_asm_out_destructor (symbol, priority);
10470}
10471
6644435d 10472/* Worker function for TARGET_RETURN_IN_MEMORY. */
10473
cfd55026 10474static bool
fb80456a 10475avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
cfd55026 10476{
39cc9599 10477 if (TYPE_MODE (type) == BLKmode)
10478 {
10479 HOST_WIDE_INT size = int_size_in_bytes (type);
10480 return (size == -1 || size > 8);
10481 }
10482 else
10483 return false;
cfd55026 10484}
10485
6d8ed506 10486
10487/* Implement `CASE_VALUES_THRESHOLD'. */
10488/* Supply the default for --param case-values-threshold=0 */
b4a3be2d 10489
a45076aa 10490static unsigned int
10491avr_case_values_threshold (void)
b4a3be2d 10492{
6d8ed506 10493 /* The exact break-even point between a jump table and an if-else tree
10494 depends on several factors not available here like, e.g. if 8-bit
10495 comparisons can be used in the if-else tree or not, on the
10496 range of the case values, if the case value can be reused, on the
10497 register allocation, etc. '7' appears to be a good choice. */
10498
10499 return 7;
b4a3be2d 10500}
10501
4202ef11 10502
10503/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
10504
10505static enum machine_mode
5bd39e93 10506avr_addr_space_address_mode (addr_space_t as)
4202ef11 10507{
9d734fa8 10508 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
4202ef11 10509}
10510
10511
10512/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
10513
10514static enum machine_mode
5bd39e93 10515avr_addr_space_pointer_mode (addr_space_t as)
4202ef11 10516{
9d734fa8 10517 return avr_addr_space_address_mode (as);
4202ef11 10518}
10519
10520
10521/* Helper for following function. */
10522
10523static bool
10524avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10525{
be6d8823 10526 gcc_assert (REG_P (reg));
4202ef11 10527
10528 if (strict)
10529 {
10530 return REGNO (reg) == REG_Z;
10531 }
10532
10533 /* Avoid combine to propagate hard regs. */
10534
10535 if (can_create_pseudo_p()
10536 && REGNO (reg) < REG_Z)
10537 {
10538 return false;
10539 }
10540
10541 return true;
10542}
10543
10544
10545/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
10546
10547static bool
10548avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10549 bool strict, addr_space_t as)
10550{
10551 bool ok = false;
10552
10553 switch (as)
10554 {
10555 default:
10556 gcc_unreachable();
10557
10558 case ADDR_SPACE_GENERIC:
10559 return avr_legitimate_address_p (mode, x, strict);
10560
590da9f2 10561 case ADDR_SPACE_FLASH:
10562 case ADDR_SPACE_FLASH1:
10563 case ADDR_SPACE_FLASH2:
10564 case ADDR_SPACE_FLASH3:
10565 case ADDR_SPACE_FLASH4:
10566 case ADDR_SPACE_FLASH5:
4202ef11 10567
10568 switch (GET_CODE (x))
10569 {
10570 case REG:
10571 ok = avr_reg_ok_for_pgm_addr (x, strict);
10572 break;
10573
10574 case POST_INC:
5bd39e93 10575 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
4202ef11 10576 break;
10577
10578 default:
10579 break;
10580 }
10581
590da9f2 10582 break; /* FLASH */
5bd39e93 10583
590da9f2 10584 case ADDR_SPACE_MEMX:
5bd39e93 10585 if (REG_P (x))
10586 ok = (!strict
10587 && can_create_pseudo_p());
10588
10589 if (LO_SUM == GET_CODE (x))
10590 {
10591 rtx hi = XEXP (x, 0);
10592 rtx lo = XEXP (x, 1);
10593
10594 ok = (REG_P (hi)
10595 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10596 && REG_P (lo)
10597 && REGNO (lo) == REG_Z);
10598 }
10599
590da9f2 10600 break; /* MEMX */
4202ef11 10601 }
10602
10603 if (avr_log.legitimate_address_p)
10604 {
10605 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10606 "reload_completed=%d reload_in_progress=%d %s:",
10607 ok, mode, strict, reload_completed, reload_in_progress,
10608 reg_renumber ? "(reg_renumber)" : "");
10609
10610 if (GET_CODE (x) == PLUS
10611 && REG_P (XEXP (x, 0))
10612 && CONST_INT_P (XEXP (x, 1))
10613 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10614 && reg_renumber)
10615 {
10616 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10617 true_regnum (XEXP (x, 0)));
10618 }
10619
10620 avr_edump ("\n%r\n", x);
10621 }
10622
10623 return ok;
10624}
10625
10626
10627/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
10628
10629static rtx
10630avr_addr_space_legitimize_address (rtx x, rtx old_x,
10631 enum machine_mode mode, addr_space_t as)
10632{
10633 if (ADDR_SPACE_GENERIC_P (as))
10634 return avr_legitimize_address (x, old_x, mode);
10635
10636 if (avr_log.legitimize_address)
10637 {
10638 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10639 }
10640
10641 return old_x;
10642}
10643
10644
10645/* Implement `TARGET_ADDR_SPACE_CONVERT'. */
10646
10647static rtx
10648avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10649{
5bd39e93 10650 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10651 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10652
4202ef11 10653 if (avr_log.progmem)
10654 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10655 src, type_from, type_to);
10656
ed2541ea 10657 /* Up-casting from 16-bit to 24-bit pointer. */
10658
590da9f2 10659 if (as_from != ADDR_SPACE_MEMX
10660 && as_to == ADDR_SPACE_MEMX)
5bd39e93 10661 {
ed2541ea 10662 int msb;
10663 rtx sym = src;
10664 rtx reg = gen_reg_rtx (PSImode);
10665
10666 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10667 sym = XEXP (sym, 0);
10668
10669 /* Look at symbol flags: avr_encode_section_info set the flags
10670 also if attribute progmem was seen so that we get the right
10671 promotion for, e.g. PSTR-like strings that reside in generic space
10672 but are located in flash. In that case we patch the incoming
10673 address space. */
5bd39e93 10674
ed2541ea 10675 if (SYMBOL_REF == GET_CODE (sym)
590da9f2 10676 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
5bd39e93 10677 {
590da9f2 10678 as_from = ADDR_SPACE_FLASH;
5bd39e93 10679 }
10680
ed2541ea 10681 /* Linearize memory: RAM has bit 23 set. */
10682
10683 msb = ADDR_SPACE_GENERIC_P (as_from)
10684 ? 0x80
e508bf98 10685 : avr_addrspace[as_from].segment;
ed2541ea 10686
5bd39e93 10687 src = force_reg (Pmode, src);
10688
ed2541ea 10689 emit_insn (msb == 0
10690 ? gen_zero_extendhipsi2 (reg, src)
10691 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
10692
10693 return reg;
10694 }
5bd39e93 10695
ed2541ea 10696 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
5bd39e93 10697
590da9f2 10698 if (as_from == ADDR_SPACE_MEMX
10699 && as_to != ADDR_SPACE_MEMX)
ed2541ea 10700 {
10701 rtx new_src = gen_reg_rtx (Pmode);
10702
10703 src = force_reg (PSImode, src);
10704
10705 emit_move_insn (new_src,
10706 simplify_gen_subreg (Pmode, src, PSImode, 0));
10707 return new_src;
5bd39e93 10708 }
10709
4202ef11 10710 return src;
10711}
10712
10713
10714/* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
10715
10716static bool
ed2541ea 10717avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
10718 addr_space_t superset ATTRIBUTE_UNUSED)
4202ef11 10719{
ed2541ea 10720 /* Allow any kind of pointer mess. */
5bd39e93 10721
10722 return true;
10723}
10724
10725
ed2541ea 10726/* Worker function for movmemhi expander.
5bd39e93 10727 XOP[0] Destination as MEM:BLK
10728 XOP[1] Source " "
10729 XOP[2] # Bytes to copy
10730
10731 Return TRUE if the expansion is accomplished.
10732 Return FALSE if the operand compination is not supported. */
10733
10734bool
10735avr_emit_movmemhi (rtx *xop)
10736{
10737 HOST_WIDE_INT count;
10738 enum machine_mode loop_mode;
10739 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
f1222c71 10740 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
5bd39e93 10741 rtx a_hi8 = NULL_RTX;
10742
590da9f2 10743 if (avr_mem_flash_p (xop[0]))
5bd39e93 10744 return false;
10745
10746 if (!CONST_INT_P (xop[2]))
10747 return false;
10748
10749 count = INTVAL (xop[2]);
10750 if (count <= 0)
10751 return false;
10752
10753 a_src = XEXP (xop[1], 0);
10754 a_dest = XEXP (xop[0], 0);
10755
5bd39e93 10756 if (PSImode == GET_MODE (a_src))
10757 {
590da9f2 10758 gcc_assert (as == ADDR_SPACE_MEMX);
ed2541ea 10759
10760 loop_mode = (count < 0x100) ? QImode : HImode;
10761 loop_reg = gen_rtx_REG (loop_mode, 24);
10762 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
10763
5bd39e93 10764 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
10765 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
10766 }
10767 else
10768 {
e508bf98 10769 int segment = avr_addrspace[as].segment;
5bd39e93 10770
ed2541ea 10771 if (segment
e508bf98 10772 && avr_current_device->n_flash > 1)
ed2541ea 10773 {
10774 a_hi8 = GEN_INT (segment);
10775 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
10776 }
10777 else if (!ADDR_SPACE_GENERIC_P (as))
10778 {
590da9f2 10779 as = ADDR_SPACE_FLASH;
ed2541ea 10780 }
10781
5bd39e93 10782 addr1 = a_src;
10783
ed2541ea 10784 loop_mode = (count <= 0x100) ? QImode : HImode;
10785 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
5bd39e93 10786 }
10787
10788 xas = GEN_INT (as);
10789
5bd39e93 10790 /* FIXME: Register allocator might come up with spill fails if it is left
ed2541ea 10791 on its own. Thus, we allocate the pointer registers by hand:
10792 Z = source address
10793 X = destination address */
5bd39e93 10794
10795 emit_move_insn (lpm_addr_reg_rtx, addr1);
f1222c71 10796 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
5bd39e93 10797
10798 /* FIXME: Register allocator does a bad job and might spill address
10799 register(s) inside the loop leading to additional move instruction
10800 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9d75589a 10801 load and store as separate insns. Instead, we perform the copy
5bd39e93 10802 by means of one monolithic insn. */
10803
ed2541ea 10804 gcc_assert (TMP_REGNO == LPM_REGNO);
10805
590da9f2 10806 if (as != ADDR_SPACE_MEMX)
5bd39e93 10807 {
ed2541ea 10808 /* Load instruction ([E]LPM or LD) is known at compile time:
10809 Do the copy-loop inline. */
10810
f1222c71 10811 rtx (*fun) (rtx, rtx, rtx)
5bd39e93 10812 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
10813
f1222c71 10814 insn = fun (xas, loop_reg, loop_reg);
5bd39e93 10815 }
5bd39e93 10816 else
10817 {
f1222c71 10818 rtx (*fun) (rtx, rtx)
ed2541ea 10819 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
10820
f1222c71 10821 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
5bd39e93 10822
f1222c71 10823 insn = fun (xas, GEN_INT (avr_addr.rampz));
5bd39e93 10824 }
10825
10826 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
10827 emit_insn (insn);
10828
4202ef11 10829 return true;
10830}
10831
10832
5bd39e93 10833/* Print assembler for movmem_qi, movmem_hi insns...
f1222c71 10834 $0 : Address Space
10835 $1, $2 : Loop register
10836 Z : Source address
10837 X : Destination address
5bd39e93 10838*/
10839
10840const char*
f1222c71 10841avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 10842{
f1222c71 10843 addr_space_t as = (addr_space_t) INTVAL (op[0]);
10844 enum machine_mode loop_mode = GET_MODE (op[1]);
10845 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
10846 rtx xop[3];
5bd39e93 10847
10848 if (plen)
10849 *plen = 0;
10850
f1222c71 10851 xop[0] = op[0];
10852 xop[1] = op[1];
10853 xop[2] = tmp_reg_rtx;
10854
5bd39e93 10855 /* Loop label */
10856
10857 avr_asm_len ("0:", xop, plen, 0);
10858
10859 /* Load with post-increment */
10860
10861 switch (as)
10862 {
10863 default:
10864 gcc_unreachable();
10865
10866 case ADDR_SPACE_GENERIC:
10867
f1222c71 10868 avr_asm_len ("ld %2,Z+", xop, plen, 1);
5bd39e93 10869 break;
10870
590da9f2 10871 case ADDR_SPACE_FLASH:
5bd39e93 10872
10873 if (AVR_HAVE_LPMX)
6bdcc5e4 10874 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
5bd39e93 10875 else
10876 avr_asm_len ("lpm" CR_TAB
f1222c71 10877 "adiw r30,1", xop, plen, 2);
5bd39e93 10878 break;
10879
590da9f2 10880 case ADDR_SPACE_FLASH1:
10881 case ADDR_SPACE_FLASH2:
10882 case ADDR_SPACE_FLASH3:
10883 case ADDR_SPACE_FLASH4:
10884 case ADDR_SPACE_FLASH5:
5bd39e93 10885
10886 if (AVR_HAVE_ELPMX)
f1222c71 10887 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
5bd39e93 10888 else
10889 avr_asm_len ("elpm" CR_TAB
f1222c71 10890 "adiw r30,1", xop, plen, 2);
5bd39e93 10891 break;
10892 }
10893
10894 /* Store with post-increment */
10895
f1222c71 10896 avr_asm_len ("st X+,%2", xop, plen, 1);
5bd39e93 10897
10898 /* Decrement loop-counter and set Z-flag */
10899
10900 if (QImode == loop_mode)
10901 {
f1222c71 10902 avr_asm_len ("dec %1", xop, plen, 1);
5bd39e93 10903 }
10904 else if (sbiw_p)
10905 {
f1222c71 10906 avr_asm_len ("sbiw %1,1", xop, plen, 1);
5bd39e93 10907 }
10908 else
10909 {
f1222c71 10910 avr_asm_len ("subi %A1,1" CR_TAB
10911 "sbci %B1,0", xop, plen, 2);
5bd39e93 10912 }
10913
10914 /* Loop until zero */
10915
10916 return avr_asm_len ("brne 0b", xop, plen, 1);
10917}
10918
10919
10920\f
c5be380e 10921/* Helper for __builtin_avr_delay_cycles */
10922
5af5ea69 10923static rtx
10924avr_mem_clobber (void)
10925{
10926 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
10927 MEM_VOLATILE_P (mem) = 1;
10928 return mem;
10929}
10930
c5be380e 10931static void
10932avr_expand_delay_cycles (rtx operands0)
10933{
c1a60a68 10934 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
c5be380e 10935 unsigned HOST_WIDE_INT cycles_used;
10936 unsigned HOST_WIDE_INT loop_count;
10937
10938 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
10939 {
10940 loop_count = ((cycles - 9) / 6) + 1;
10941 cycles_used = ((loop_count - 1) * 6) + 9;
5af5ea69 10942 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
10943 avr_mem_clobber()));
c5be380e 10944 cycles -= cycles_used;
10945 }
10946
10947 if (IN_RANGE (cycles, 262145, 83886081))
10948 {
10949 loop_count = ((cycles - 7) / 5) + 1;
10950 if (loop_count > 0xFFFFFF)
10951 loop_count = 0xFFFFFF;
10952 cycles_used = ((loop_count - 1) * 5) + 7;
5af5ea69 10953 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
10954 avr_mem_clobber()));
c5be380e 10955 cycles -= cycles_used;
10956 }
10957
10958 if (IN_RANGE (cycles, 768, 262144))
10959 {
10960 loop_count = ((cycles - 5) / 4) + 1;
10961 if (loop_count > 0xFFFF)
10962 loop_count = 0xFFFF;
10963 cycles_used = ((loop_count - 1) * 4) + 5;
5af5ea69 10964 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
10965 avr_mem_clobber()));
c5be380e 10966 cycles -= cycles_used;
10967 }
10968
10969 if (IN_RANGE (cycles, 6, 767))
10970 {
10971 loop_count = cycles / 3;
10972 if (loop_count > 255)
10973 loop_count = 255;
10974 cycles_used = loop_count * 3;
5af5ea69 10975 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
10976 avr_mem_clobber()));
c5be380e 10977 cycles -= cycles_used;
10978 }
10979
10980 while (cycles >= 2)
10981 {
10982 emit_insn (gen_nopv (GEN_INT(2)));
10983 cycles -= 2;
10984 }
10985
10986 if (cycles == 1)
10987 {
10988 emit_insn (gen_nopv (GEN_INT(1)));
10989 cycles--;
10990 }
10991}
10992
384f6361 10993
10994/* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10995
10996static double_int
10997avr_double_int_push_digit (double_int val, int base,
10998 unsigned HOST_WIDE_INT digit)
10999{
11000 val = 0 == base
d67b7119 11001 ? val.llshift (32, 64)
11002 : val * double_int::from_uhwi (base);
384f6361 11003
d67b7119 11004 return val + double_int::from_uhwi (digit);
384f6361 11005}
11006
11007
11008/* Compute the image of x under f, i.e. perform x --> f(x) */
11009
11010static int
11011avr_map (double_int f, int x)
11012{
d67b7119 11013 return 0xf & f.lrshift (4*x, 64).to_uhwi ();
384f6361 11014}
11015
11016
15b84087 11017/* Return some metrics of map A. */
384f6361 11018
15b84087 11019enum
11020 {
11021 /* Number of fixed points in { 0 ... 7 } */
11022 MAP_FIXED_0_7,
384f6361 11023
15b84087 11024 /* Size of preimage of non-fixed points in { 0 ... 7 } */
11025 MAP_NONFIXED_0_7,
11026
11027 /* Mask representing the fixed points in { 0 ... 7 } */
11028 MAP_MASK_FIXED_0_7,
11029
11030 /* Size of the preimage of { 0 ... 7 } */
11031 MAP_PREIMAGE_0_7,
11032
11033 /* Mask that represents the preimage of { f } */
11034 MAP_MASK_PREIMAGE_F
11035 };
11036
11037static unsigned
11038avr_map_metric (double_int a, int mode)
384f6361 11039{
15b84087 11040 unsigned i, metric = 0;
384f6361 11041
15b84087 11042 for (i = 0; i < 8; i++)
11043 {
11044 unsigned ai = avr_map (a, i);
384f6361 11045
15b84087 11046 if (mode == MAP_FIXED_0_7)
11047 metric += ai == i;
11048 else if (mode == MAP_NONFIXED_0_7)
11049 metric += ai < 8 && ai != i;
11050 else if (mode == MAP_MASK_FIXED_0_7)
11051 metric |= ((unsigned) (ai == i)) << i;
11052 else if (mode == MAP_PREIMAGE_0_7)
11053 metric += ai < 8;
11054 else if (mode == MAP_MASK_PREIMAGE_F)
11055 metric |= ((unsigned) (ai == 0xf)) << i;
11056 else
11057 gcc_unreachable();
11058 }
11059
11060 return metric;
384f6361 11061}
11062
11063
15b84087 11064/* Return true if IVAL has a 0xf in its hexadecimal representation
11065 and false, otherwise. Only nibbles 0..7 are taken into account.
11066 Used as constraint helper for C0f and Cxf. */
384f6361 11067
15b84087 11068bool
11069avr_has_nibble_0xf (rtx ival)
11070{
11071 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
11072}
384f6361 11073
384f6361 11074
15b84087 11075/* We have a set of bits that are mapped by a function F.
11076 Try to decompose F by means of a second function G so that
384f6361 11077
15b84087 11078 F = F o G^-1 o G
384f6361 11079
15b84087 11080 and
384f6361 11081
15b84087 11082 cost (F o G^-1) + cost (G) < cost (F)
384f6361 11083
15b84087 11084 Example: Suppose builtin insert_bits supplies us with the map
11085 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
11086 nibble of the result, we can just as well rotate the bits before inserting
11087 them and use the map 0x7654ffff which is cheaper than the original map.
11088 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
11089
11090typedef struct
11091{
11092 /* tree code of binary function G */
11093 enum tree_code code;
384f6361 11094
15b84087 11095 /* The constant second argument of G */
11096 int arg;
384f6361 11097
15b84087 11098 /* G^-1, the inverse of G (*, arg) */
11099 unsigned ginv;
384f6361 11100
15b84087 11101 /* The cost of appplying G (*, arg) */
11102 int cost;
384f6361 11103
15b84087 11104 /* The composition F o G^-1 (*, arg) for some function F */
11105 double_int map;
384f6361 11106
15b84087 11107 /* For debug purpose only */
11108 const char *str;
11109} avr_map_op_t;
384f6361 11110
15b84087 11111static const avr_map_op_t avr_map_op[] =
384f6361 11112 {
15b84087 11113 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
11114 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
11115 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
11116 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
11117 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
11118 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
11119 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
11120 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
11121 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
11122 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
11123 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
11124 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
11125 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
11126 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
11127 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
384f6361 11128 };
11129
11130
15b84087 11131/* Try to decompose F as F = (F o G^-1) o G as described above.
11132 The result is a struct representing F o G^-1 and G.
11133 If result.cost < 0 then such a decomposition does not exist. */
11134
11135static avr_map_op_t
11136avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
384f6361 11137{
15b84087 11138 int i;
11139 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11140 avr_map_op_t f_ginv = *g;
a85c0a2e 11141 double_int ginv = double_int::from_uhwi (g->ginv);
384f6361 11142
15b84087 11143 f_ginv.cost = -1;
384f6361 11144
15b84087 11145 /* Step 1: Computing F o G^-1 */
384f6361 11146
15b84087 11147 for (i = 7; i >= 0; i--)
11148 {
11149 int x = avr_map (f, i);
11150
11151 if (x <= 7)
11152 {
11153 x = avr_map (ginv, x);
384f6361 11154
15b84087 11155 /* The bit is no element of the image of G: no avail (cost = -1) */
11156
11157 if (x > 7)
11158 return f_ginv;
11159 }
11160
11161 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
11162 }
384f6361 11163
15b84087 11164 /* Step 2: Compute the cost of the operations.
11165 The overall cost of doing an operation prior to the insertion is
11166 the cost of the insertion plus the cost of the operation. */
384f6361 11167
15b84087 11168 /* Step 2a: Compute cost of F o G^-1 */
384f6361 11169
15b84087 11170 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11171 {
11172 /* The mapping consists only of fixed points and can be folded
11173 to AND/OR logic in the remainder. Reasonable cost is 3. */
384f6361 11174
15b84087 11175 f_ginv.cost = 2 + (val_used_p && !val_const_p);
11176 }
11177 else
11178 {
11179 rtx xop[4];
384f6361 11180
15b84087 11181 /* Get the cost of the insn by calling the output worker with some
11182 fake values. Mimic effect of reloading xop[3]: Unused operands
11183 are mapped to 0 and used operands are reloaded to xop[0]. */
384f6361 11184
15b84087 11185 xop[0] = all_regs_rtx[24];
d67b7119 11186 xop[1] = gen_int_mode (f_ginv.map.to_uhwi (), SImode);
15b84087 11187 xop[2] = all_regs_rtx[25];
11188 xop[3] = val_used_p ? xop[0] : const0_rtx;
384f6361 11189
15b84087 11190 avr_out_insert_bits (xop, &f_ginv.cost);
11191
11192 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11193 }
11194
11195 /* Step 2b: Add cost of G */
384f6361 11196
15b84087 11197 f_ginv.cost += g->cost;
384f6361 11198
15b84087 11199 if (avr_log.builtin)
11200 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11201
11202 return f_ginv;
384f6361 11203}
11204
11205
15b84087 11206/* Insert bits from XOP[1] into XOP[0] according to MAP.
11207 XOP[0] and XOP[1] don't overlap.
11208 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
11209 If FIXP_P = false: Just move the bit if its position in the destination
11210 is different to its source position. */
384f6361 11211
11212static void
15b84087 11213avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
384f6361 11214{
15b84087 11215 int bit_dest, b;
384f6361 11216
11217 /* T-flag contains this bit of the source, i.e. of XOP[1] */
11218 int t_bit_src = -1;
11219
384f6361 11220 /* We order the operations according to the requested source bit b. */
11221
15b84087 11222 for (b = 0; b < 8; b++)
11223 for (bit_dest = 0; bit_dest < 8; bit_dest++)
384f6361 11224 {
11225 int bit_src = avr_map (map, bit_dest);
11226
11227 if (b != bit_src
15b84087 11228 || bit_src >= 8
11229 /* Same position: No need to copy as requested by FIXP_P. */
11230 || (bit_dest == bit_src && !fixp_p))
384f6361 11231 continue;
11232
11233 if (t_bit_src != bit_src)
11234 {
11235 /* Source bit is not yet in T: Store it to T. */
11236
11237 t_bit_src = bit_src;
11238
15b84087 11239 xop[3] = GEN_INT (bit_src);
11240 avr_asm_len ("bst %T1%T3", xop, plen, 1);
384f6361 11241 }
11242
11243 /* Load destination bit with T. */
11244
15b84087 11245 xop[3] = GEN_INT (bit_dest);
11246 avr_asm_len ("bld %T0%T3", xop, plen, 1);
384f6361 11247 }
11248}
11249
11250
15b84087 11251/* PLEN == 0: Print assembler code for `insert_bits'.
11252 PLEN != 0: Compute code length in bytes.
11253
11254 OP[0]: Result
11255 OP[1]: The mapping composed of nibbles. If nibble no. N is
11256 0: Bit N of result is copied from bit OP[2].0
11257 ... ...
11258 7: Bit N of result is copied from bit OP[2].7
11259 0xf: Bit N of result is copied from bit OP[3].N
11260 OP[2]: Bits to be inserted
11261 OP[3]: Target value */
384f6361 11262
11263const char*
15b84087 11264avr_out_insert_bits (rtx *op, int *plen)
384f6361 11265{
15b84087 11266 double_int map = rtx_to_double_int (op[1]);
11267 unsigned mask_fixed;
11268 bool fixp_p = true;
11269 rtx xop[4];
384f6361 11270
15b84087 11271 xop[0] = op[0];
11272 xop[1] = op[2];
11273 xop[2] = op[3];
384f6361 11274
15b84087 11275 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11276
384f6361 11277 if (plen)
11278 *plen = 0;
11279 else if (flag_print_asm_name)
15b84087 11280 fprintf (asm_out_file,
11281 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
d67b7119 11282 map.to_uhwi () & GET_MODE_MASK (SImode));
384f6361 11283
15b84087 11284 /* If MAP has fixed points it might be better to initialize the result
11285 with the bits to be inserted instead of moving all bits by hand. */
384f6361 11286
15b84087 11287 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
384f6361 11288
15b84087 11289 if (REGNO (xop[0]) == REGNO (xop[1]))
11290 {
11291 /* Avoid early-clobber conflicts */
384f6361 11292
15b84087 11293 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11294 xop[1] = tmp_reg_rtx;
11295 fixp_p = false;
384f6361 11296 }
11297
15b84087 11298 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
384f6361 11299 {
15b84087 11300 /* XOP[2] is used and reloaded to XOP[0] already */
11301
11302 int n_fix = 0, n_nofix = 0;
11303
11304 gcc_assert (REG_P (xop[2]));
11305
11306 /* Get the code size of the bit insertions; once with all bits
11307 moved and once with fixed points omitted. */
11308
11309 avr_move_bits (xop, map, true, &n_fix);
11310 avr_move_bits (xop, map, false, &n_nofix);
11311
11312 if (fixp_p && n_fix - n_nofix > 3)
384f6361 11313 {
15b84087 11314 xop[3] = gen_int_mode (~mask_fixed, QImode);
11315
11316 avr_asm_len ("eor %0,%1" CR_TAB
11317 "andi %0,%3" CR_TAB
11318 "eor %0,%1", xop, plen, 3);
11319 fixp_p = false;
384f6361 11320 }
11321 }
384f6361 11322 else
11323 {
15b84087 11324 /* XOP[2] is unused */
11325
11326 if (fixp_p && mask_fixed)
11327 {
11328 avr_asm_len ("mov %0,%1", xop, plen, 1);
11329 fixp_p = false;
11330 }
384f6361 11331 }
15b84087 11332
11333 /* Move/insert remaining bits. */
384f6361 11334
15b84087 11335 avr_move_bits (xop, map, fixp_p, plen);
384f6361 11336
11337 return "";
11338}
11339
11340
c5be380e 11341/* IDs for all the AVR builtins. */
11342
11343enum avr_builtin_id
11344 {
c19a2f5f 11345
11346#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
11347#include "builtins.def"
11348#undef DEF_BUILTIN
11349
11350 AVR_BUILTIN_COUNT
c5be380e 11351 };
11352
58cf0091 11353struct GTY(()) avr_builtin_description
11354{
11355 enum insn_code icode;
11356 const char *name;
11357 int n_args;
11358 tree fndecl;
11359};
11360
11361
11362/* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11363 that a built-in's ID can be used to access the built-in by means of
11364 avr_bdesc[ID] */
11365
11366static GTY(()) struct avr_builtin_description
11367avr_bdesc[AVR_BUILTIN_COUNT] =
11368 {
11369
11370#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
95dcc8ad 11371 { (enum insn_code) ICODE, NAME, N_ARGS, NULL_TREE },
58cf0091 11372#include "builtins.def"
11373#undef DEF_BUILTIN
11374 };
11375
11376
11377/* Implement `TARGET_BUILTIN_DECL'. */
11378
11379static tree
11380avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11381{
11382 if (id < AVR_BUILTIN_COUNT)
11383 return avr_bdesc[id].fndecl;
11384
11385 return error_mark_node;
11386}
11387
11388
02d9a2c3 11389static void
11390avr_init_builtin_int24 (void)
11391{
11392 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
11393 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11394
d0acfa59 11395 lang_hooks.types.register_builtin_type (int24_type, "__int24");
11396 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
02d9a2c3 11397}
11398
58cf0091 11399
c5be380e 11400/* Implement `TARGET_INIT_BUILTINS' */
11401/* Set up all builtin functions for this target. */
11402
11403static void
11404avr_init_builtins (void)
11405{
11406 tree void_ftype_void
c0010db9 11407 = build_function_type_list (void_type_node, NULL_TREE);
c5be380e 11408 tree uchar_ftype_uchar
11409 = build_function_type_list (unsigned_char_type_node,
11410 unsigned_char_type_node,
11411 NULL_TREE);
11412 tree uint_ftype_uchar_uchar
11413 = build_function_type_list (unsigned_type_node,
11414 unsigned_char_type_node,
11415 unsigned_char_type_node,
11416 NULL_TREE);
11417 tree int_ftype_char_char
11418 = build_function_type_list (integer_type_node,
11419 char_type_node,
11420 char_type_node,
11421 NULL_TREE);
11422 tree int_ftype_char_uchar
11423 = build_function_type_list (integer_type_node,
11424 char_type_node,
11425 unsigned_char_type_node,
11426 NULL_TREE);
11427 tree void_ftype_ulong
11428 = build_function_type_list (void_type_node,
11429 long_unsigned_type_node,
11430 NULL_TREE);
11431
15b84087 11432 tree uchar_ftype_ulong_uchar_uchar
384f6361 11433 = build_function_type_list (unsigned_char_type_node,
11434 long_unsigned_type_node,
11435 unsigned_char_type_node,
15b84087 11436 unsigned_char_type_node,
384f6361 11437 NULL_TREE);
11438
12ffadfa 11439 tree const_memx_void_node
11440 = build_qualified_type (void_type_node,
11441 TYPE_QUAL_CONST
11442 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11443
11444 tree const_memx_ptr_type_node
11445 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11446
11447 tree char_ftype_const_memx_ptr
11448 = build_function_type_list (char_type_node,
11449 const_memx_ptr_type_node,
11450 NULL);
11451
c19a2f5f 11452#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
58cf0091 11453 gcc_assert (ID < AVR_BUILTIN_COUNT); \
11454 avr_bdesc[ID].fndecl \
11455 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
c19a2f5f 11456#include "builtins.def"
11457#undef DEF_BUILTIN
11458
02d9a2c3 11459 avr_init_builtin_int24 ();
c5be380e 11460}
11461
c5be380e 11462
d0acfa59 11463/* Subroutine of avr_expand_builtin to expand vanilla builtins
11464 with non-void result and 1 ... 3 arguments. */
c5be380e 11465
11466static rtx
d0acfa59 11467avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
c5be380e 11468{
d0acfa59 11469 rtx pat, xop[3];
11470 int n, n_args = call_expr_nargs (exp);
c5be380e 11471 enum machine_mode tmode = insn_data[icode].operand[0].mode;
c5be380e 11472
d0acfa59 11473 gcc_assert (n_args >= 1 && n_args <= 3);
11474
11475 if (target == NULL_RTX
c5be380e 11476 || GET_MODE (target) != tmode
d0acfa59 11477 || !insn_data[icode].operand[0].predicate (target, tmode))
c5be380e 11478 {
11479 target = gen_reg_rtx (tmode);
11480 }
11481
d0acfa59 11482 for (n = 0; n < n_args; n++)
c5be380e 11483 {
d0acfa59 11484 tree arg = CALL_EXPR_ARG (exp, n);
11485 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11486 enum machine_mode opmode = GET_MODE (op);
11487 enum machine_mode mode = insn_data[icode].operand[n+1].mode;
c5be380e 11488
d0acfa59 11489 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11490 {
11491 opmode = HImode;
11492 op = gen_lowpart (HImode, op);
11493 }
c5be380e 11494
d0acfa59 11495 /* In case the insn wants input operands in modes different from
11496 the result, abort. */
c5be380e 11497
d0acfa59 11498 gcc_assert (opmode == mode || opmode == VOIDmode);
c5be380e 11499
d0acfa59 11500 if (!insn_data[icode].operand[n+1].predicate (op, mode))
11501 op = copy_to_mode_reg (mode, op);
c5be380e 11502
d0acfa59 11503 xop[n] = op;
c5be380e 11504 }
11505
d0acfa59 11506 switch (n_args)
c5be380e 11507 {
d0acfa59 11508 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11509 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
11510 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
c5be380e 11511
d0acfa59 11512 default:
11513 gcc_unreachable();
15b84087 11514 }
11515
d0acfa59 11516 if (pat == NULL_RTX)
11517 return NULL_RTX;
15b84087 11518
11519 emit_insn (pat);
d0acfa59 11520
15b84087 11521 return target;
11522}
11523
c5be380e 11524
58cf0091 11525/* Implement `TARGET_EXPAND_BUILTIN'. */
c5be380e 11526/* Expand an expression EXP that calls a built-in function,
11527 with result going to TARGET if that's convenient
11528 (and in mode MODE if that's convenient).
11529 SUBTARGET may be used as the target for computing one of EXP's operands.
11530 IGNORE is nonzero if the value is to be ignored. */
11531
11532static rtx
11533avr_expand_builtin (tree exp, rtx target,
11534 rtx subtarget ATTRIBUTE_UNUSED,
11535 enum machine_mode mode ATTRIBUTE_UNUSED,
11536 int ignore ATTRIBUTE_UNUSED)
11537{
c5be380e 11538 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
d0acfa59 11539 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
c5be380e 11540 unsigned int id = DECL_FUNCTION_CODE (fndecl);
58cf0091 11541 const struct avr_builtin_description *d = &avr_bdesc[id];
c5be380e 11542 tree arg0;
11543 rtx op0;
11544
58cf0091 11545 gcc_assert (id < AVR_BUILTIN_COUNT);
11546
c5be380e 11547 switch (id)
11548 {
11549 case AVR_BUILTIN_NOP:
11550 emit_insn (gen_nopv (GEN_INT(1)));
11551 return 0;
11552
c5be380e 11553 case AVR_BUILTIN_DELAY_CYCLES:
11554 {
11555 arg0 = CALL_EXPR_ARG (exp, 0);
1086ba15 11556 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
c5be380e 11557
c19a2f5f 11558 if (!CONST_INT_P (op0))
384f6361 11559 error ("%s expects a compile time integer constant", bname);
c19a2f5f 11560 else
11561 avr_expand_delay_cycles (op0);
c5be380e 11562
d0acfa59 11563 return NULL_RTX;
c5be380e 11564 }
384f6361 11565
15b84087 11566 case AVR_BUILTIN_INSERT_BITS:
384f6361 11567 {
11568 arg0 = CALL_EXPR_ARG (exp, 0);
11569 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11570
11571 if (!CONST_INT_P (op0))
11572 {
11573 error ("%s expects a compile time long integer constant"
11574 " as first argument", bname);
11575 return target;
11576 }
11577 }
c5be380e 11578 }
11579
58cf0091 11580 /* No special treatment needed: vanilla expand. */
d0acfa59 11581
11582 gcc_assert (d->n_args == call_expr_nargs (exp));
11583
11584 if (d->n_args == 0)
c19a2f5f 11585 {
58cf0091 11586 emit_insn ((GEN_FCN (d->icode)) (target));
d0acfa59 11587 return NULL_RTX;
c19a2f5f 11588 }
d0acfa59 11589
11590 return avr_default_expand_builtin (d->icode, exp, target);
c5be380e 11591}
11592
15b84087 11593
11594/* Implement `TARGET_FOLD_BUILTIN'. */
11595
11596static tree
11597avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
11598 bool ignore ATTRIBUTE_UNUSED)
11599{
11600 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
11601 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
11602
11603 if (!optimize)
11604 return NULL_TREE;
11605
11606 switch (fcode)
11607 {
11608 default:
11609 break;
11610
c19a2f5f 11611 case AVR_BUILTIN_SWAP:
11612 {
11613 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
11614 build_int_cst (val_type, 4));
11615 }
11616
15b84087 11617 case AVR_BUILTIN_INSERT_BITS:
11618 {
11619 tree tbits = arg[1];
11620 tree tval = arg[2];
11621 tree tmap;
11622 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
c19a2f5f 11623 double_int map;
15b84087 11624 bool changed = false;
11625 unsigned i;
11626 avr_map_op_t best_g;
c19a2f5f 11627
11628 if (TREE_CODE (arg[0]) != INTEGER_CST)
11629 {
11630 /* No constant as first argument: Don't fold this and run into
11631 error in avr_expand_builtin. */
11632
11633 break;
11634 }
15b84087 11635
c19a2f5f 11636 map = tree_to_double_int (arg[0]);
15b84087 11637 tmap = double_int_to_tree (map_type, map);
11638
11639 if (TREE_CODE (tval) != INTEGER_CST
11640 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11641 {
11642 /* There are no F in the map, i.e. 3rd operand is unused.
11643 Replace that argument with some constant to render
11644 respective input unused. */
11645
11646 tval = build_int_cst (val_type, 0);
11647 changed = true;
11648 }
11649
11650 if (TREE_CODE (tbits) != INTEGER_CST
11651 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
11652 {
11653 /* Similar for the bits to be inserted. If they are unused,
11654 we can just as well pass 0. */
11655
11656 tbits = build_int_cst (val_type, 0);
11657 }
11658
11659 if (TREE_CODE (tbits) == INTEGER_CST)
11660 {
11661 /* Inserting bits known at compile time is easy and can be
11662 performed by AND and OR with appropriate masks. */
11663
11664 int bits = TREE_INT_CST_LOW (tbits);
11665 int mask_ior = 0, mask_and = 0xff;
11666
11667 for (i = 0; i < 8; i++)
11668 {
11669 int mi = avr_map (map, i);
11670
11671 if (mi < 8)
11672 {
11673 if (bits & (1 << mi)) mask_ior |= (1 << i);
11674 else mask_and &= ~(1 << i);
11675 }
11676 }
11677
11678 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
11679 build_int_cst (val_type, mask_ior));
11680 return fold_build2 (BIT_AND_EXPR, val_type, tval,
11681 build_int_cst (val_type, mask_and));
11682 }
11683
11684 if (changed)
11685 return build_call_expr (fndecl, 3, tmap, tbits, tval);
11686
11687 /* If bits don't change their position we can use vanilla logic
11688 to merge the two arguments. */
11689
11690 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
11691 {
11692 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
11693 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
11694
11695 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
11696 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
11697 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
11698 }
11699
11700 /* Try to decomposing map to reduce overall cost. */
11701
11702 if (avr_log.builtin)
11703 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
11704
11705 best_g = avr_map_op[0];
11706 best_g.cost = 1000;
11707
11708 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
11709 {
11710 avr_map_op_t g
11711 = avr_map_decompose (map, avr_map_op + i,
11712 TREE_CODE (tval) == INTEGER_CST);
11713
11714 if (g.cost >= 0 && g.cost < best_g.cost)
11715 best_g = g;
11716 }
11717
11718 if (avr_log.builtin)
11719 avr_edump ("\n");
11720
11721 if (best_g.arg == 0)
11722 /* No optimization found */
11723 break;
11724
11725 /* Apply operation G to the 2nd argument. */
11726
11727 if (avr_log.builtin)
11728 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
11729 best_g.str, best_g.arg, best_g.map, best_g.cost);
11730
11731 /* Do right-shifts arithmetically: They copy the MSB instead of
11732 shifting in a non-usable value (0) as with logic right-shift. */
11733
11734 tbits = fold_convert (signed_char_type_node, tbits);
11735 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
11736 build_int_cst (val_type, best_g.arg));
11737 tbits = fold_convert (val_type, tbits);
11738
11739 /* Use map o G^-1 instead of original map to undo the effect of G. */
11740
11741 tmap = double_int_to_tree (map_type, best_g.map);
11742
11743 return build_call_expr (fndecl, 3, tmap, tbits, tval);
11744 } /* AVR_BUILTIN_INSERT_BITS */
11745 }
11746
11747 return NULL_TREE;
11748}
11749
15b84087 11750\f
1602e4b0 11751
11752/* Initialize the GCC target structure. */
11753
11754#undef TARGET_ASM_ALIGNED_HI_OP
11755#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11756#undef TARGET_ASM_ALIGNED_SI_OP
11757#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
11758#undef TARGET_ASM_UNALIGNED_HI_OP
11759#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
11760#undef TARGET_ASM_UNALIGNED_SI_OP
11761#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
11762#undef TARGET_ASM_INTEGER
11763#define TARGET_ASM_INTEGER avr_assemble_integer
11764#undef TARGET_ASM_FILE_START
11765#define TARGET_ASM_FILE_START avr_file_start
11766#undef TARGET_ASM_FILE_END
11767#define TARGET_ASM_FILE_END avr_file_end
11768
11769#undef TARGET_ASM_FUNCTION_END_PROLOGUE
11770#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
11771#undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
11772#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
11773
11774#undef TARGET_FUNCTION_VALUE
11775#define TARGET_FUNCTION_VALUE avr_function_value
11776#undef TARGET_LIBCALL_VALUE
11777#define TARGET_LIBCALL_VALUE avr_libcall_value
11778#undef TARGET_FUNCTION_VALUE_REGNO_P
11779#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
11780
11781#undef TARGET_ATTRIBUTE_TABLE
11782#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
11783#undef TARGET_INSERT_ATTRIBUTES
11784#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
11785#undef TARGET_SECTION_TYPE_FLAGS
11786#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
11787
11788#undef TARGET_ASM_NAMED_SECTION
11789#define TARGET_ASM_NAMED_SECTION avr_asm_named_section
11790#undef TARGET_ASM_INIT_SECTIONS
11791#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
11792#undef TARGET_ENCODE_SECTION_INFO
11793#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
11794#undef TARGET_ASM_SELECT_SECTION
11795#define TARGET_ASM_SELECT_SECTION avr_asm_select_section
11796
11797#undef TARGET_REGISTER_MOVE_COST
11798#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
11799#undef TARGET_MEMORY_MOVE_COST
11800#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
11801#undef TARGET_RTX_COSTS
11802#define TARGET_RTX_COSTS avr_rtx_costs
11803#undef TARGET_ADDRESS_COST
11804#define TARGET_ADDRESS_COST avr_address_cost
11805#undef TARGET_MACHINE_DEPENDENT_REORG
11806#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
11807#undef TARGET_FUNCTION_ARG
11808#define TARGET_FUNCTION_ARG avr_function_arg
11809#undef TARGET_FUNCTION_ARG_ADVANCE
11810#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
11811
ea679361 11812#undef TARGET_SET_CURRENT_FUNCTION
11813#define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
11814
1602e4b0 11815#undef TARGET_RETURN_IN_MEMORY
11816#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
11817
11818#undef TARGET_STRICT_ARGUMENT_NAMING
11819#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
11820
11821#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
11822#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
11823
11824#undef TARGET_HARD_REGNO_SCRATCH_OK
11825#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
11826#undef TARGET_CASE_VALUES_THRESHOLD
11827#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
11828
11829#undef TARGET_FRAME_POINTER_REQUIRED
11830#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
11831#undef TARGET_CAN_ELIMINATE
11832#define TARGET_CAN_ELIMINATE avr_can_eliminate
11833
a28e3283 11834#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
11835#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
11836
08c6cbd2 11837#undef TARGET_WARN_FUNC_RETURN
11838#define TARGET_WARN_FUNC_RETURN avr_warn_func_return
11839
1602e4b0 11840#undef TARGET_CLASS_LIKELY_SPILLED_P
11841#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
11842
11843#undef TARGET_OPTION_OVERRIDE
11844#define TARGET_OPTION_OVERRIDE avr_option_override
11845
11846#undef TARGET_CANNOT_MODIFY_JUMPS_P
11847#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
11848
11849#undef TARGET_FUNCTION_OK_FOR_SIBCALL
11850#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
11851
11852#undef TARGET_INIT_BUILTINS
11853#define TARGET_INIT_BUILTINS avr_init_builtins
11854
58cf0091 11855#undef TARGET_BUILTIN_DECL
11856#define TARGET_BUILTIN_DECL avr_builtin_decl
11857
1602e4b0 11858#undef TARGET_EXPAND_BUILTIN
11859#define TARGET_EXPAND_BUILTIN avr_expand_builtin
11860
11861#undef TARGET_FOLD_BUILTIN
11862#define TARGET_FOLD_BUILTIN avr_fold_builtin
11863
11864#undef TARGET_ASM_FUNCTION_RODATA_SECTION
11865#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
11866
11867#undef TARGET_SCALAR_MODE_SUPPORTED_P
11868#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
11869
017c5b98 11870#undef TARGET_BUILD_BUILTIN_VA_LIST
11871#define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
11872
11873#undef TARGET_FIXED_POINT_SUPPORTED_P
11874#define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
11875
1602e4b0 11876#undef TARGET_ADDR_SPACE_SUBSET_P
11877#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11878
11879#undef TARGET_ADDR_SPACE_CONVERT
11880#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11881
11882#undef TARGET_ADDR_SPACE_ADDRESS_MODE
11883#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11884
11885#undef TARGET_ADDR_SPACE_POINTER_MODE
11886#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11887
11888#undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11889#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11890 avr_addr_space_legitimate_address_p
11891
11892#undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11893#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11894
002565f0 11895#undef TARGET_MODE_DEPENDENT_ADDRESS_P
11896#define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
11897
be6d8823 11898#undef TARGET_SECONDARY_RELOAD
11899#define TARGET_SECONDARY_RELOAD avr_secondary_reload
11900
1602e4b0 11901#undef TARGET_PRINT_OPERAND
11902#define TARGET_PRINT_OPERAND avr_print_operand
11903#undef TARGET_PRINT_OPERAND_ADDRESS
11904#define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11905#undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11906#define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11907
a45076aa 11908struct gcc_target targetm = TARGET_INITIALIZER;
c5be380e 11909
1602e4b0 11910\f
c84f2269 11911#include "gt-avr.h"