]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/avr/avr.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / config / avr / avr.c
CommitLineData
a28e4651 1/* Subroutines for insn-output.c for ATMEL AVR micro controllers
d353bf18 2 Copyright (C) 1998-2015 Free Software Foundation, Inc.
947dd720 3 Contributed by Denis Chertykov (chertykov@gmail.com)
a28e4651 4
187b36cf 5 This file is part of GCC.
a28e4651 6
187b36cf 7 GCC is free software; you can redistribute it and/or modify
a28e4651 8 it under the terms of the GNU General Public License as published by
038d1e19 9 the Free Software Foundation; either version 3, or (at your option)
a28e4651 10 any later version.
11
187b36cf 12 GCC is distributed in the hope that it will be useful,
a28e4651 13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
038d1e19 18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
a28e4651 20
21#include "config.h"
3337ec92 22#include "system.h"
805e22b2 23#include "coretypes.h"
24#include "tm.h"
a28e4651 25#include "rtl.h"
26#include "regs.h"
27#include "hard-reg-set.h"
a28e4651 28#include "insn-config.h"
29#include "conditions.h"
a28e4651 30#include "insn-attr.h"
c5be380e 31#include "insn-codes.h"
a28e4651 32#include "flags.h"
33#include "reload.h"
b20a8bb4 34#include "alias.h"
35#include "symtab.h"
a28e4651 36#include "tree.h"
b20a8bb4 37#include "fold-const.h"
c0d7a1d0 38#include "varasm.h"
9ed99284 39#include "print-tree.h"
40#include "calls.h"
41#include "stor-layout.h"
42#include "stringpool.h"
9bfdb494 43#include "output.h"
d53441c8 44#include "function.h"
d53441c8 45#include "expmed.h"
46#include "dojump.h"
47#include "explow.h"
48#include "emit-rtl.h"
49#include "stmt.h"
a28e4651 50#include "expr.h"
4202ef11 51#include "c-family/c-common.h"
0b205f4c 52#include "diagnostic-core.h"
a28e4651 53#include "obstack.h"
a28e4651 54#include "recog.h"
c5be380e 55#include "optabs.h"
c5be380e 56#include "langhooks.h"
a28e4651 57#include "tm_p.h"
a767736d 58#include "target.h"
59#include "target-def.h"
9c12cc94 60#include "params.h"
94ea8568 61#include "dominance.h"
62#include "cfg.h"
63#include "cfgrtl.h"
64#include "cfganal.h"
65#include "lcm.h"
66#include "cfgbuild.h"
67#include "cfgcleanup.h"
68#include "predict.h"
69#include "basic-block.h"
a4c6e6a2 70#include "df.h"
f7715905 71#include "builtins.h"
40114021 72#include "context.h"
73#include "tree-pass.h"
a28e4651 74
1cb39658 75/* Maximal allowed offset for an address in the LD command */
76#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
a28e4651 77
53026b2c 78/* Return true if STR starts with PREFIX and false, otherwise. */
79#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
80
9d734fa8 81/* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
82 address space where data is to be located.
0dff9558 83 As the only non-generic address spaces are all located in flash,
9d734fa8 84 this can be used to test if data shall go into some .progmem* section.
85 This must be the rightmost field of machine dependent section flags. */
5bd39e93 86#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
bf412f98 87
ed2541ea 88/* Similar 4-bit region for SYMBOL_REF_FLAGS. */
89#define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
90
91/* Similar 4-bit region in SYMBOL_REF_FLAGS:
92 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
93#define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
94 do { \
95 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
96 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
97 } while (0)
98
99/* Read address-space from SYMBOL_REF_FLAGS of SYM */
100#define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
101 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
102 / SYMBOL_FLAG_MACH_DEP)
103
b4e6d2e2 104#define TINY_ADIW(REG1, REG2, I) \
1a96adb9 105 "subi " #REG1 ",lo8(-(" #I "))" CR_TAB \
106 "sbci " #REG2 ",hi8(-(" #I "))"
b4e6d2e2 107
108#define TINY_SBIW(REG1, REG2, I) \
1a96adb9 109 "subi " #REG1 ",lo8((" #I "))" CR_TAB \
110 "sbci " #REG2 ",hi8((" #I "))"
b4e6d2e2 111
112#define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
113#define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
114
9d734fa8 115/* Known address spaces. The order must be the same as in the respective
116 enum from avr.h (or designated initialized must be used). */
0558f5da 117const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
118{
119 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
120 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
121 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
122 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
123 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
124 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
125 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
126 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
9d734fa8 127};
128
9d734fa8 129
72851b68 130/* Holding RAM addresses of some SFRs used by the compiler and that
131 are unique over all devices in an architecture like 'avr4'. */
0dff9558 132
72851b68 133typedef struct
134{
0dff9558 135 /* SREG: The processor status */
72851b68 136 int sreg;
137
0b6cf66f 138 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
139 int ccp;
140 int rampd;
141 int rampx;
142 int rampy;
143
0dff9558 144 /* RAMPZ: The high byte of 24-bit address used with ELPM */
72851b68 145 int rampz;
146
147 /* SP: The stack pointer and its low and high byte */
148 int sp_l;
149 int sp_h;
150} avr_addr_t;
151
152static avr_addr_t avr_addr;
153
a45076aa 154
155/* Prototypes for local helper functions. */
156
375204de 157static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
158static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
159static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
160static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
161static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
162static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
163
164static int get_sequence_length (rtx_insn *insns);
206a5129 165static int sequent_regs_live (void);
166static const char *ptrreg_to_str (int);
167static const char *cond_string (enum rtx_code);
3754d046 168static int avr_num_arg_regs (machine_mode, const_tree);
169static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
a49907f9 170 int, bool);
171static void output_reload_in_const (rtx*, rtx, int*, bool);
df3d6232 172static struct machine_function * avr_init_machine_status (void);
a45076aa 173
174
175/* Prototypes for hook implementors if needed before their implementation. */
176
0dff9558 177static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
a45076aa 178
b1665fa2 179
20c71901 180/* Allocate registers from r25 to r8 for parameters for function calls. */
a28e4651 181#define FIRST_CUM_REG 26
182
b4e6d2e2 183/* Last call saved register */
184#define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
185
4202ef11 186/* Implicit target register of LPM instruction (R0) */
2d86450c 187extern GTY(()) rtx lpm_reg_rtx;
188rtx lpm_reg_rtx;
4202ef11 189
190/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
2d86450c 191extern GTY(()) rtx lpm_addr_reg_rtx;
192rtx lpm_addr_reg_rtx;
4202ef11 193
2d86450c 194/* Temporary register RTX (reg:QI TMP_REGNO) */
195extern GTY(()) rtx tmp_reg_rtx;
196rtx tmp_reg_rtx;
a28e4651 197
2d86450c 198/* Zeroed register RTX (reg:QI ZERO_REGNO) */
199extern GTY(()) rtx zero_reg_rtx;
200rtx zero_reg_rtx;
201
202/* RTXs for all general purpose registers as QImode */
203extern GTY(()) rtx all_regs_rtx[32];
204rtx all_regs_rtx[32];
e511e253 205
0b6cf66f 206/* SREG, the processor status */
207extern GTY(()) rtx sreg_rtx;
208rtx sreg_rtx;
209
210/* RAMP* special function registers */
211extern GTY(()) rtx rampd_rtx;
212extern GTY(()) rtx rampx_rtx;
213extern GTY(()) rtx rampy_rtx;
2d86450c 214extern GTY(()) rtx rampz_rtx;
0b6cf66f 215rtx rampd_rtx;
216rtx rampx_rtx;
217rtx rampy_rtx;
2d86450c 218rtx rampz_rtx;
5bd39e93 219
220/* RTX containing the strings "" and "e", respectively */
221static GTY(()) rtx xstring_empty;
222static GTY(()) rtx xstring_e;
223
b1eb5c83 224/* Current architecture. */
f0aa7fe2 225const avr_arch_t *avr_arch;
e511e253 226
c3f18f18 227/* Section to put switch tables in. */
228static GTY(()) section *progmem_swtable_section;
a28e4651 229
9d734fa8 230/* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
0558f5da 231 or to address space __flash* or __memx. Only used as singletons inside
232 avr_asm_select_section, but it must not be local there because of GTY. */
233static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
5bd39e93 234
83921eda 235/* Condition for insns/expanders from avr-dimode.md. */
236bool avr_have_dimode = true;
237
7c2339f8 238/* To track if code will use .bss and/or .data. */
239bool avr_need_clear_bss_p = false;
240bool avr_need_copy_data_p = false;
241
a767736d 242\f
73263209 243/* Transform UP into lowercase and write the result to LO.
244 You must provide enough space for LO. Return LO. */
245
246static char*
247avr_tolower (char *lo, const char *up)
248{
249 char *lo0 = lo;
250
251 for (; *up; up++, lo++)
252 *lo = TOLOWER (*up);
253
254 *lo = '\0';
255
256 return lo0;
257}
258
bf412f98 259
6be828c1 260/* Custom function to count number of set bits. */
261
262static inline int
263avr_popcount (unsigned int val)
264{
265 int pop = 0;
266
267 while (val)
268 {
269 val &= val-1;
270 pop++;
271 }
272
273 return pop;
274}
275
276
2f2d376f 277/* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
278 Return true if the least significant N_BYTES bytes of XVAL all have a
279 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
280 of integers which contains an integer N iff bit N of POP_MASK is set. */
0dff9558 281
6be828c1 282bool
283avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
284{
285 int i;
286
3754d046 287 machine_mode mode = GET_MODE (xval);
2f2d376f 288
289 if (VOIDmode == mode)
290 mode = SImode;
291
6be828c1 292 for (i = 0; i < n_bytes; i++)
293 {
2f2d376f 294 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
6be828c1 295 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
296
297 if (0 == (pop_mask & (1 << avr_popcount (val8))))
298 return false;
299 }
300
301 return true;
302}
303
017c5b98 304
305/* Access some RTX as INT_MODE. If X is a CONST_FIXED we can get
306 the bit representation of X by "casting" it to CONST_INT. */
307
308rtx
309avr_to_int_mode (rtx x)
310{
3754d046 311 machine_mode mode = GET_MODE (x);
017c5b98 312
313 return VOIDmode == mode
314 ? x
315 : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
316}
317
318
40114021 319static const pass_data avr_pass_data_recompute_notes =
320{
321 RTL_PASS, // type
322 "", // name (will be patched)
323 OPTGROUP_NONE, // optinfo_flags
324 TV_DF_SCAN, // tv_id
325 0, // properties_required
326 0, // properties_provided
327 0, // properties_destroyed
328 0, // todo_flags_start
329 TODO_df_finish | TODO_df_verify // todo_flags_finish
330};
331
332
333class avr_pass_recompute_notes : public rtl_opt_pass
334{
335public:
336 avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
337 : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
338 {
339 this->name = name;
340 }
341
342 virtual unsigned int execute (function*)
343 {
344 df_note_add_problem ();
345 df_analyze ();
346
347 return 0;
348 }
349}; // avr_pass_recompute_notes
350
351
352static void
353avr_register_passes (void)
354{
355 /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
356 notes which are used by `avr.c::reg_unused_after' and branch offset
357 computations. These notes must be correct, i.e. there must be no
358 dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
359
360 DF needs (correct) CFG, hence right before free_cfg is the last
361 opportunity to rectify notes. */
362
363 register_pass (new avr_pass_recompute_notes (g, "avr-notes-free-cfg"),
364 PASS_POS_INSERT_BEFORE, "*free_cfg", 1);
365}
366
367
f0aa7fe2 368/* Set `avr_arch' as specified by `-mmcu='.
369 Return true on success. */
370
371static bool
372avr_set_core_architecture (void)
373{
374 /* Search for mcu core architecture. */
375
376 if (!avr_mmcu)
377 avr_mmcu = AVR_MMCU_DEFAULT;
378
379 avr_arch = &avr_arch_types[0];
380
381 for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
382 {
383 if (NULL == mcu->name)
384 {
385 /* Reached the end of `avr_mcu_types'. This should actually never
386 happen as options are provided by device-specs. It could be a
387 typo in a device-specs or calling the compiler proper directly
388 with -mmcu=<device>. */
389
390 error ("unknown core architecture %qs specified with %qs",
391 avr_mmcu, "-mmcu=");
392 avr_inform_core_architectures ();
393 break;
394 }
395 else if (0 == strcmp (mcu->name, avr_mmcu)
396 // Is this a proper architecture ?
397 && NULL == mcu->macro)
398 {
399 avr_arch = &avr_arch_types[mcu->arch_id];
400 if (avr_n_flash < 0)
401 avr_n_flash = mcu->n_flash;
402
403 return true;
404 }
405 }
406
407 return false;
408}
409
410
017c5b98 411/* Implement `TARGET_OPTION_OVERRIDE'. */
412
4c834714 413static void
414avr_option_override (void)
a28e4651 415{
dbf10c97 416 /* Disable -fdelete-null-pointer-checks option for AVR target.
417 This option compiler assumes that dereferencing of a null pointer
418 would halt the program. For AVR this assumption is not true and
419 programs can safely dereference null pointers. Changes made by this
420 option may not work properly for AVR. So disable this option. */
421
6e9e3dbe 422 flag_delete_null_pointer_checks = 0;
423
f9efb148 424 /* caller-save.c looks for call-clobbered hard registers that are assigned
425 to pseudos that cross calls and tries so save-restore them around calls
426 in order to reduce the number of stack slots needed.
427
0dff9558 428 This might lead to situations where reload is no more able to cope
f9efb148 429 with the challenge of AVR's very few address registers and fails to
430 perform the requested spills. */
0dff9558 431
f9efb148 432 if (avr_strict_X)
433 flag_caller_saves = 0;
434
344ae919 435 /* Unwind tables currently require a frame pointer for correctness,
436 see toplev.c:process_options(). */
437
438 if ((flag_unwind_tables
439 || flag_non_call_exceptions
440 || flag_asynchronous_unwind_tables)
441 && !ACCUMULATE_OUTGOING_ARGS)
442 {
443 flag_omit_frame_pointer = 0;
444 }
344ae919 445
13fde1a0 446 if (flag_pic == 1)
447 warning (OPT_fpic, "-fpic is not supported");
448 if (flag_pic == 2)
449 warning (OPT_fPIC, "-fPIC is not supported");
450 if (flag_pie == 1)
451 warning (OPT_fpie, "-fpie is not supported");
452 if (flag_pie == 2)
453 warning (OPT_fPIE, "-fPIE is not supported");
454
f0aa7fe2 455 if (!avr_set_core_architecture())
456 return;
0dff9558 457
458 /* RAM addresses of some SFRs common to all devices in respective arch. */
72851b68 459
460 /* SREG: Status Register containing flags like I (global IRQ) */
f0aa7fe2 461 avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
72851b68 462
463 /* RAMPZ: Address' high part when loading via ELPM */
f0aa7fe2 464 avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
72851b68 465
f0aa7fe2 466 avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
467 avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
468 avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
469 avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
0b6cf66f 470
72851b68 471 /* SP: Stack Pointer (SP_H:SP_L) */
f0aa7fe2 472 avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
72851b68 473 avr_addr.sp_h = avr_addr.sp_l + 1;
21167a30 474
df3d6232 475 init_machine_status = avr_init_machine_status;
47fe598e 476
477 avr_log_set_avr_log();
40114021 478
479 /* Register some avr-specific pass(es). There is no canonical place for
480 pass registration. This function is convenient. */
481
482 avr_register_passes ();
a28e4651 483}
484
df3d6232 485/* Function to set up the backend function structure. */
486
487static struct machine_function *
488avr_init_machine_status (void)
489{
25a27413 490 return ggc_cleared_alloc<machine_function> ();
df3d6232 491}
492
5bd39e93 493
494/* Implement `INIT_EXPANDERS'. */
495/* The function works like a singleton. */
496
497void
498avr_init_expanders (void)
499{
500 int regno;
501
5bd39e93 502 for (regno = 0; regno < 32; regno ++)
503 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
504
505 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
b4e6d2e2 506 tmp_reg_rtx = all_regs_rtx[AVR_TMP_REGNO];
507 zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
5bd39e93 508
509 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
510
0b6cf66f 511 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
512 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
513 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
514 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
72851b68 515 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
5bd39e93 516
517 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
518 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
b4e6d2e2 519
520 /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
521 to be present */
522 if (AVR_TINY)
1a96adb9 523 avr_have_dimode = false;
5bd39e93 524}
525
526
0dff9558 527/* Implement `REGNO_REG_CLASS'. */
20c71901 528/* Return register class for register R. */
a28e4651 529
530enum reg_class
206a5129 531avr_regno_reg_class (int r)
a28e4651 532{
7104fbe4 533 static const enum reg_class reg_class_tab[] =
534 {
535 R0_REG,
536 /* r1 - r15 */
537 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
538 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
539 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
540 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
541 /* r16 - r23 */
542 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
543 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
544 /* r24, r25 */
545 ADDW_REGS, ADDW_REGS,
546 /* X: r26, 27 */
547 POINTER_X_REGS, POINTER_X_REGS,
548 /* Y: r28, r29 */
549 POINTER_Y_REGS, POINTER_Y_REGS,
550 /* Z: r30, r31 */
551 POINTER_Z_REGS, POINTER_Z_REGS,
552 /* SP: SPL, SPH */
553 STACK_REG, STACK_REG
554 };
555
a28e4651 556 if (r <= 33)
557 return reg_class_tab[r];
0dff9558 558
a28e4651 559 return ALL_REGS;
560}
561
02d9a2c3 562
017c5b98 563/* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'. */
564
02d9a2c3 565static bool
3754d046 566avr_scalar_mode_supported_p (machine_mode mode)
02d9a2c3 567{
017c5b98 568 if (ALL_FIXED_POINT_MODE_P (mode))
569 return true;
570
02d9a2c3 571 if (PSImode == mode)
572 return true;
573
574 return default_scalar_mode_supported_p (mode);
575}
576
577
0dff9558 578/* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise. */
4202ef11 579
580static bool
590da9f2 581avr_decl_flash_p (tree decl)
4202ef11 582{
5bd39e93 583 if (TREE_CODE (decl) != VAR_DECL
584 || TREE_TYPE (decl) == error_mark_node)
585 {
586 return false;
587 }
4202ef11 588
589 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
590}
591
592
0dff9558 593/* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
5bd39e93 594 address space and FALSE, otherwise. */
0dff9558 595
5bd39e93 596static bool
590da9f2 597avr_decl_memx_p (tree decl)
5bd39e93 598{
599 if (TREE_CODE (decl) != VAR_DECL
600 || TREE_TYPE (decl) == error_mark_node)
601 {
602 return false;
603 }
604
590da9f2 605 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
5bd39e93 606}
607
608
0dff9558 609/* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise. */
4202ef11 610
611bool
590da9f2 612avr_mem_flash_p (rtx x)
4202ef11 613{
614 return (MEM_P (x)
615 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
616}
617
618
0dff9558 619/* Return TRUE if X is a MEM rtx located in the 24-bit flash
5bd39e93 620 address space and FALSE, otherwise. */
621
622bool
590da9f2 623avr_mem_memx_p (rtx x)
5bd39e93 624{
625 return (MEM_P (x)
590da9f2 626 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
5bd39e93 627}
628
629
32969c63 630/* A helper for the subsequent function attribute used to dig for
631 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
632
633static inline int
634avr_lookup_function_attribute1 (const_tree func, const char *name)
635{
636 if (FUNCTION_DECL == TREE_CODE (func))
637 {
638 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
639 {
640 return true;
641 }
0dff9558 642
32969c63 643 func = TREE_TYPE (func);
644 }
645
646 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
647 || TREE_CODE (func) == METHOD_TYPE);
0dff9558 648
32969c63 649 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
650}
651
e3e08e7f 652/* Return nonzero if FUNC is a naked function. */
a28e4651 653
654static int
206a5129 655avr_naked_function_p (tree func)
a28e4651 656{
32969c63 657 return avr_lookup_function_attribute1 (func, "naked");
a28e4651 658}
659
660/* Return nonzero if FUNC is an interrupt function as specified
661 by the "interrupt" attribute. */
662
663static int
ea679361 664avr_interrupt_function_p (tree func)
a28e4651 665{
32969c63 666 return avr_lookup_function_attribute1 (func, "interrupt");
a28e4651 667}
668
1cb39658 669/* Return nonzero if FUNC is a signal function as specified
a28e4651 670 by the "signal" attribute. */
671
672static int
ea679361 673avr_signal_function_p (tree func)
a28e4651 674{
32969c63 675 return avr_lookup_function_attribute1 (func, "signal");
a28e4651 676}
677
b0e2b973 678/* Return nonzero if FUNC is an OS_task function. */
ba8273a8 679
680static int
681avr_OS_task_function_p (tree func)
682{
32969c63 683 return avr_lookup_function_attribute1 (func, "OS_task");
ba8273a8 684}
685
b0e2b973 686/* Return nonzero if FUNC is an OS_main function. */
a6e595be 687
688static int
689avr_OS_main_function_p (tree func)
690{
32969c63 691 return avr_lookup_function_attribute1 (func, "OS_main");
a6e595be 692}
693
a12b9b80 694
ea679361 695/* Implement `TARGET_SET_CURRENT_FUNCTION'. */
696/* Sanity cheching for above function attributes. */
697
698static void
699avr_set_current_function (tree decl)
700{
701 location_t loc;
702 const char *isr;
703
704 if (decl == NULL_TREE
705 || current_function_decl == NULL_TREE
706 || current_function_decl == error_mark_node
23184a0e 707 || ! cfun->machine
ea679361 708 || cfun->machine->attributes_checked_p)
709 return;
710
711 loc = DECL_SOURCE_LOCATION (decl);
712
713 cfun->machine->is_naked = avr_naked_function_p (decl);
714 cfun->machine->is_signal = avr_signal_function_p (decl);
715 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
716 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
717 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
718
719 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
720
721 /* Too much attributes make no sense as they request conflicting features. */
722
723 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
724 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
725 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
726 " exclusive", "OS_task", "OS_main", isr);
727
728 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
729
730 if (cfun->machine->is_naked
731 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
732 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
733 " no effect on %qs function", "OS_task", "OS_main", "naked");
734
735 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
736 {
737 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
738 tree ret = TREE_TYPE (TREE_TYPE (decl));
248c10bf 739 const char *name;
740
741 name = DECL_ASSEMBLER_NAME_SET_P (decl)
a11b2cf9 742 ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
248c10bf 743 : IDENTIFIER_POINTER (DECL_NAME (decl));
0dff9558 744
a11b2cf9 745 /* Skip a leading '*' that might still prefix the assembler name,
746 e.g. in non-LTO runs. */
747
748 name = default_strip_name_encoding (name);
749
ea679361 750 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
751 using this when it switched from SIGNAL and INTERRUPT to ISR. */
752
753 if (cfun->machine->is_interrupt)
754 cfun->machine->is_signal = 0;
755
756 /* Interrupt handlers must be void __vector (void) functions. */
757
758 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
759 error_at (loc, "%qs function cannot have arguments", isr);
760
761 if (TREE_CODE (ret) != VOID_TYPE)
762 error_at (loc, "%qs function cannot return a value", isr);
763
764 /* If the function has the 'signal' or 'interrupt' attribute, ensure
765 that the name of the function is "__vector_NN" so as to catch
766 when the user misspells the vector name. */
767
768 if (!STR_PREFIX_P (name, "__vector"))
769 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
770 name, isr);
771 }
772
0dff9558 773 /* Don't print the above diagnostics more than once. */
774
ea679361 775 cfun->machine->attributes_checked_p = 1;
776}
777
778
a12b9b80 779/* Implement `ACCUMULATE_OUTGOING_ARGS'. */
c529cd37 780
781int
a12b9b80 782avr_accumulate_outgoing_args (void)
783{
784 if (!cfun)
785 return TARGET_ACCUMULATE_OUTGOING_ARGS;
786
787 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
788 what offset is correct. In some cases it is relative to
789 virtual_outgoing_args_rtx and in others it is relative to
790 virtual_stack_vars_rtx. For example code see
791 gcc.c-torture/execute/built-in-setjmp.c
792 gcc.c-torture/execute/builtins/sprintf-chk.c */
0dff9558 793
a12b9b80 794 return (TARGET_ACCUMULATE_OUTGOING_ARGS
795 && !(cfun->calls_setjmp
796 || cfun->has_nonlocal_label));
797}
798
799
800/* Report contribution of accumulated outgoing arguments to stack size. */
801
802static inline int
803avr_outgoing_args_size (void)
804{
805 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
806}
807
808
809/* Implement `STARTING_FRAME_OFFSET'. */
810/* This is the offset from the frame pointer register to the first stack slot
811 that contains a variable living in the frame. */
812
813int
814avr_starting_frame_offset (void)
815{
816 return 1 + avr_outgoing_args_size ();
817}
818
819
f0973368 820/* Return the number of hard registers to push/pop in the prologue/epilogue
821 of the current function, and optionally store these registers in SET. */
822
823static int
206a5129 824avr_regs_to_save (HARD_REG_SET *set)
f0973368 825{
826 int reg, count;
ea679361 827 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
f2323747 828
f0973368 829 if (set)
830 CLEAR_HARD_REG_SET (*set);
831 count = 0;
c3bcd5a9 832
0dff9558 833 /* No need to save any registers if the function never returns or
b0e2b973 834 has the "OS_task" or "OS_main" attribute. */
0dff9558 835
ba8273a8 836 if (TREE_THIS_VOLATILE (current_function_decl)
a6e595be 837 || cfun->machine->is_OS_task
838 || cfun->machine->is_OS_main)
c3bcd5a9 839 return 0;
840
f0973368 841 for (reg = 0; reg < 32; reg++)
842 {
843 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
a12b9b80 844 any global register variables. */
0dff9558 845
f0973368 846 if (fixed_regs[reg])
a12b9b80 847 continue;
f0973368 848
d5bf7b64 849 if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
a12b9b80 850 || (df_regs_ever_live_p (reg)
851 && (int_or_sig_p || !call_used_regs[reg])
852 /* Don't record frame pointer registers here. They are treated
853 indivitually in prologue. */
854 && !(frame_pointer_needed
855 && (reg == REG_Y || reg == (REG_Y+1)))))
856 {
857 if (set)
858 SET_HARD_REG_BIT (*set, reg);
859 count++;
860 }
f0973368 861 }
862 return count;
863}
864
a28e3283 865
866/* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
867
868static bool
869avr_allocate_stack_slots_for_args (void)
870{
871 return !cfun->machine->is_naked;
872}
873
874
ebdd0478 875/* Return true if register FROM can be eliminated via register TO. */
876
a45076aa 877static bool
50f39f78 878avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
ebdd0478 879{
1cffa4f0 880 return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
881 || !frame_pointer_needed);
ebdd0478 882}
883
08c6cbd2 884
0dff9558 885/* Implement `TARGET_WARN_FUNC_RETURN'. */
08c6cbd2 886
887static bool
888avr_warn_func_return (tree decl)
889{
890 /* Naked functions are implemented entirely in assembly, including the
891 return sequence, so suppress warnings about this. */
0dff9558 892
08c6cbd2 893 return !avr_naked_function_p (decl);
894}
895
20c71901 896/* Compute offset between arg_pointer and frame_pointer. */
a28e4651 897
898int
9f42c829 899avr_initial_elimination_offset (int from, int to)
a28e4651 900{
9f42c829 901 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
902 return 0;
903 else
1cb39658 904 {
9f42c829 905 int offset = frame_pointer_needed ? 2 : 0;
906 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
0dff9558 907
f0973368 908 offset += avr_regs_to_save (NULL);
9f42c829 909 return (get_frame_size () + avr_outgoing_args_size()
910 + avr_pc_size + 1 + offset);
a28e4651 911 }
a28e4651 912}
913
017c5b98 914
915/* Helper for the function below. */
916
917static void
3754d046 918avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
017c5b98 919{
920 *node = make_node (FIXED_POINT_TYPE);
921 TYPE_SATURATING (*node) = sat_p;
922 TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
923 TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
924 TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
925 TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
926 TYPE_ALIGN (*node) = 8;
927 SET_TYPE_MODE (*node, mode);
928
929 layout_type (*node);
930}
931
932
933/* Implement `TARGET_BUILD_BUILTIN_VA_LIST'. */
934
935static tree
936avr_build_builtin_va_list (void)
937{
938 /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
939 This is more appropriate for the 8-bit machine AVR than 128-bit modes.
940 The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
941 which is auto-generated by genmodes, but the compiler assigns [U]DAmode
942 to the long long accum modes instead of the desired [U]TAmode.
943
944 Fix this now, right after node setup in tree.c:build_common_tree_nodes().
945 This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
946 which built-in defines macros like __ULLACCUM_FBIT__ that are used by
947 libgcc to detect IBIT and FBIT. */
948
949 avr_adjust_type_node (&ta_type_node, TAmode, 0);
950 avr_adjust_type_node (&uta_type_node, UTAmode, 0);
951 avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
952 avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
953
954 unsigned_long_long_accum_type_node = uta_type_node;
955 long_long_accum_type_node = ta_type_node;
956 sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
957 sat_long_long_accum_type_node = sat_ta_type_node;
958
959 /* Dispatch to the default handler. */
0dff9558 960
017c5b98 961 return std_build_builtin_va_list ();
962}
963
964
965/* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'. */
0dff9558 966/* Actual start of frame is virtual_stack_vars_rtx this is offset from
b1665fa2 967 frame pointer by +STARTING_FRAME_OFFSET.
968 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
969 avoids creating add/sub of offset in nonlocal goto and setjmp. */
970
a45076aa 971static rtx
972avr_builtin_setjmp_frame_value (void)
b1665fa2 973{
017c5b98 974 rtx xval = gen_reg_rtx (Pmode);
975 emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
976 gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
977 return xval;
b1665fa2 978}
979
017c5b98 980
0dff9558 981/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
34413b10 982 This is return address of function. */
0dff9558 983
984rtx
36f949a2 985avr_return_addr_rtx (int count, rtx tem)
34413b10 986{
987 rtx r;
0dff9558 988
b0e2b973 989 /* Can only return this function's return address. Others not supported. */
34413b10 990 if (count)
991 return NULL;
992
993 if (AVR_3_BYTE_PC)
994 {
995 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
0dff9558 996 warning (0, "%<builtin_return_address%> contains only 2 bytes"
997 " of address");
34413b10 998 }
999 else
1000 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1001
1002 r = gen_rtx_PLUS (Pmode, tem, r);
1003 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1004 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1005 return r;
1006}
1007
bf522d86 1008/* Return 1 if the function epilogue is just a single "ret". */
1009
1010int
206a5129 1011avr_simple_epilogue (void)
bf522d86 1012{
1013 return (! frame_pointer_needed
a12b9b80 1014 && get_frame_size () == 0
1015 && avr_outgoing_args_size() == 0
1016 && avr_regs_to_save (NULL) == 0
ea679361 1017 && ! cfun->machine->is_interrupt
1018 && ! cfun->machine->is_signal
1019 && ! cfun->machine->is_naked
a12b9b80 1020 && ! TREE_THIS_VOLATILE (current_function_decl));
bf522d86 1021}
1022
20c71901 1023/* This function checks sequence of live registers. */
a28e4651 1024
1025static int
206a5129 1026sequent_regs_live (void)
a28e4651 1027{
1028 int reg;
0dff9558 1029 int live_seq = 0;
1030 int cur_seq = 0;
a28e4651 1031
b4e6d2e2 1032 for (reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
a28e4651 1033 {
3d83581f 1034 if (fixed_regs[reg])
1035 {
1036 /* Don't recognize sequences that contain global register
1037 variables. */
0dff9558 1038
3d83581f 1039 if (live_seq != 0)
1040 return 0;
1041 else
1042 continue;
1043 }
0dff9558 1044
a28e4651 1045 if (!call_used_regs[reg])
0dff9558 1046 {
1047 if (df_regs_ever_live_p (reg))
1048 {
1049 ++live_seq;
1050 ++cur_seq;
1051 }
1052 else
1053 cur_seq = 0;
1054 }
a28e4651 1055 }
1056
1057 if (!frame_pointer_needed)
1058 {
3072d30e 1059 if (df_regs_ever_live_p (REG_Y))
0dff9558 1060 {
1061 ++live_seq;
1062 ++cur_seq;
1063 }
a28e4651 1064 else
0dff9558 1065 cur_seq = 0;
a28e4651 1066
3072d30e 1067 if (df_regs_ever_live_p (REG_Y+1))
0dff9558 1068 {
1069 ++live_seq;
1070 ++cur_seq;
1071 }
a28e4651 1072 else
0dff9558 1073 cur_seq = 0;
a28e4651 1074 }
1075 else
1076 {
1077 cur_seq += 2;
1078 live_seq += 2;
1079 }
1080 return (cur_seq == live_seq) ? live_seq : 0;
1081}
1082
58f62c92 1083/* Obtain the length sequence of insns. */
1084
1085int
375204de 1086get_sequence_length (rtx_insn *insns)
58f62c92 1087{
375204de 1088 rtx_insn *insn;
58f62c92 1089 int length;
0dff9558 1090
58f62c92 1091 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
1092 length += get_attr_length (insn);
0dff9558 1093
58f62c92 1094 return length;
1095}
1096
0dff9558 1097
1098/* Implement `INCOMING_RETURN_ADDR_RTX'. */
e484266f 1099
1100rtx
1101avr_incoming_return_addr_rtx (void)
1102{
1103 /* The return address is at the top of the stack. Note that the push
1104 was via post-decrement, which means the actual address is off by one. */
29c05e22 1105 return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
e484266f 1106}
1107
1108/* Helper for expand_prologue. Emit a push of a byte register. */
1109
1110static void
1111emit_push_byte (unsigned regno, bool frame_related_p)
1112{
375204de 1113 rtx mem, reg;
1114 rtx_insn *insn;
e484266f 1115
1116 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1117 mem = gen_frame_mem (QImode, mem);
1118 reg = gen_rtx_REG (QImode, regno);
1119
d1f9b275 1120 insn = emit_insn (gen_rtx_SET (mem, reg));
e484266f 1121 if (frame_related_p)
1122 RTX_FRAME_RELATED_P (insn) = 1;
1123
1124 cfun->machine->stack_usage++;
1125}
1126
0b6cf66f 1127
1128/* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
1129 SFR is a MEM representing the memory location of the SFR.
1130 If CLR_P then clear the SFR after the push using zero_reg. */
1131
1132static void
1133emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1134{
375204de 1135 rtx_insn *insn;
0dff9558 1136
0b6cf66f 1137 gcc_assert (MEM_P (sfr));
1138
1139 /* IN __tmp_reg__, IO(SFR) */
1140 insn = emit_move_insn (tmp_reg_rtx, sfr);
1141 if (frame_related_p)
1142 RTX_FRAME_RELATED_P (insn) = 1;
0dff9558 1143
0b6cf66f 1144 /* PUSH __tmp_reg__ */
b4e6d2e2 1145 emit_push_byte (AVR_TMP_REGNO, frame_related_p);
0b6cf66f 1146
1147 if (clr_p)
1148 {
1149 /* OUT IO(SFR), __zero_reg__ */
1150 insn = emit_move_insn (sfr, const0_rtx);
1151 if (frame_related_p)
1152 RTX_FRAME_RELATED_P (insn) = 1;
1153 }
1154}
1155
a12b9b80 1156static void
1157avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1158{
375204de 1159 rtx_insn *insn;
a12b9b80 1160 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1161 int live_seq = sequent_regs_live ();
1162
21467ee6 1163 HOST_WIDE_INT size_max
1164 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1165
a12b9b80 1166 bool minimize = (TARGET_CALL_PROLOGUES
21467ee6 1167 && size < size_max
a12b9b80 1168 && live_seq
1169 && !isr_p
1170 && !cfun->machine->is_OS_task
b4e6d2e2 1171 && !cfun->machine->is_OS_main
1172 && !AVR_TINY);
0dff9558 1173
a12b9b80 1174 if (minimize
1175 && (frame_pointer_needed
1176 || avr_outgoing_args_size() > 8
1177 || (AVR_2_BYTE_PC && live_seq > 6)
0dff9558 1178 || live_seq > 7))
a12b9b80 1179 {
1180 rtx pattern;
1181 int first_reg, reg, offset;
1182
0dff9558 1183 emit_move_insn (gen_rtx_REG (HImode, REG_X),
a12b9b80 1184 gen_int_mode (size, HImode));
1185
1186 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1187 gen_int_mode (live_seq+size, HImode));
1188 insn = emit_insn (pattern);
1189 RTX_FRAME_RELATED_P (insn) = 1;
1190
1191 /* Describe the effect of the unspec_volatile call to prologue_saves.
1192 Note that this formulation assumes that add_reg_note pushes the
1193 notes to the front. Thus we build them in the reverse order of
1194 how we want dwarf2out to process them. */
1195
9f42c829 1196 /* The function does always set frame_pointer_rtx, but whether that
a12b9b80 1197 is going to be permanent in the function is frame_pointer_needed. */
1198
1199 add_reg_note (insn, REG_CFA_ADJUST_CFA,
d1f9b275 1200 gen_rtx_SET ((frame_pointer_needed
1201 ? frame_pointer_rtx
1202 : stack_pointer_rtx),
29c05e22 1203 plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1204 -(size + live_seq))));
a12b9b80 1205
1206 /* Note that live_seq always contains r28+r29, but the other
1207 registers to be saved are all below 18. */
1208
b4e6d2e2 1209 first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
a12b9b80 1210
1211 for (reg = 29, offset = -live_seq + 1;
1212 reg >= first_reg;
b4e6d2e2 1213 reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
a12b9b80 1214 {
1215 rtx m, r;
1216
29c05e22 1217 m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1218 offset));
a12b9b80 1219 r = gen_rtx_REG (QImode, reg);
d1f9b275 1220 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
a12b9b80 1221 }
1222
1223 cfun->machine->stack_usage += size + live_seq;
1224 }
1225 else /* !minimize */
1226 {
1227 int reg;
0dff9558 1228
a12b9b80 1229 for (reg = 0; reg < 32; ++reg)
1230 if (TEST_HARD_REG_BIT (set, reg))
1231 emit_push_byte (reg, true);
1232
1233 if (frame_pointer_needed
1234 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1235 {
1236 /* Push frame pointer. Always be consistent about the
1237 ordering of pushes -- epilogue_restores expects the
1238 register pair to be pushed low byte first. */
0dff9558 1239
a12b9b80 1240 emit_push_byte (REG_Y, true);
1241 emit_push_byte (REG_Y + 1, true);
1242 }
0dff9558 1243
a12b9b80 1244 if (frame_pointer_needed
1245 && size == 0)
1246 {
9f42c829 1247 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 1248 RTX_FRAME_RELATED_P (insn) = 1;
1249 }
0dff9558 1250
a12b9b80 1251 if (size != 0)
1252 {
1253 /* Creating a frame can be done by direct manipulation of the
1254 stack or via the frame pointer. These two methods are:
1255 fp = sp
1256 fp -= size
1257 sp = fp
1258 or
1259 sp -= size
1260 fp = sp (*)
1261 the optimum method depends on function type, stack and
1262 frame size. To avoid a complex logic, both methods are
1263 tested and shortest is selected.
1264
1265 There is also the case where SIZE != 0 and no frame pointer is
1266 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1267 In that case, insn (*) is not needed in that case.
1268 We use the X register as scratch. This is save because in X
1269 is call-clobbered.
1270 In an interrupt routine, the case of SIZE != 0 together with
1271 !frame_pointer_needed can only occur if the function is not a
1272 leaf function and thus X has already been saved. */
0dff9558 1273
b5b90b5a 1274 int irq_state = -1;
cde06e71 1275 HOST_WIDE_INT size_cfa = size, neg_size;
375204de 1276 rtx_insn *fp_plus_insns;
1277 rtx fp, my_fp;
a12b9b80 1278
1279 gcc_assert (frame_pointer_needed
1280 || !isr_p
d5bf7b64 1281 || !crtl->is_leaf);
0dff9558 1282
a12b9b80 1283 fp = my_fp = (frame_pointer_needed
9f42c829 1284 ? frame_pointer_rtx
a12b9b80 1285 : gen_rtx_REG (Pmode, REG_X));
0dff9558 1286
a12b9b80 1287 if (AVR_HAVE_8BIT_SP)
1288 {
1289 /* The high byte (r29) does not change:
b5b90b5a 1290 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
a12b9b80 1291
9f42c829 1292 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 1293 }
1294
21467ee6 1295 /* Cut down size and avoid size = 0 so that we don't run
1296 into ICE like PR52488 in the remainder. */
1297
1298 if (size > size_max)
1299 {
1300 /* Don't error so that insane code from newlib still compiles
1301 and does not break building newlib. As PR51345 is implemented
d32d7e3a 1302 now, there are multilib variants with -msp8.
0dff9558 1303
21467ee6 1304 If user wants sanity checks he can use -Wstack-usage=
1305 or similar options.
1306
1307 For CFA we emit the original, non-saturated size so that
1308 the generic machinery is aware of the real stack usage and
1309 will print the above diagnostic as expected. */
0dff9558 1310
21467ee6 1311 size = size_max;
1312 }
1313
1314 size = trunc_int_for_mode (size, GET_MODE (my_fp));
cde06e71 1315 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
0dff9558 1316
a12b9b80 1317 /************ Method 1: Adjust frame pointer ************/
0dff9558 1318
a12b9b80 1319 start_sequence ();
1320
1321 /* Normally, the dwarf2out frame-related-expr interpreter does
1322 not expect to have the CFA change once the frame pointer is
1323 set up. Thus, we avoid marking the move insn below and
1324 instead indicate that the entire operation is complete after
1325 the frame pointer subtraction is done. */
0dff9558 1326
a12b9b80 1327 insn = emit_move_insn (fp, stack_pointer_rtx);
b5b90b5a 1328 if (frame_pointer_needed)
1329 {
1330 RTX_FRAME_RELATED_P (insn) = 1;
1331 add_reg_note (insn, REG_CFA_ADJUST_CFA,
d1f9b275 1332 gen_rtx_SET (fp, stack_pointer_rtx));
b5b90b5a 1333 }
a12b9b80 1334
b51b04ad 1335 insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
cde06e71 1336 my_fp, neg_size));
1337
a12b9b80 1338 if (frame_pointer_needed)
1339 {
b5b90b5a 1340 RTX_FRAME_RELATED_P (insn) = 1;
a12b9b80 1341 add_reg_note (insn, REG_CFA_ADJUST_CFA,
d1f9b275 1342 gen_rtx_SET (fp, plus_constant (Pmode, fp,
1343 -size_cfa)));
a12b9b80 1344 }
0dff9558 1345
a12b9b80 1346 /* Copy to stack pointer. Note that since we've already
1347 changed the CFA to the frame pointer this operation
b5b90b5a 1348 need not be annotated if frame pointer is needed.
1349 Always move through unspec, see PR50063.
1350 For meaning of irq_state see movhi_sp_r insn. */
a12b9b80 1351
b5b90b5a 1352 if (cfun->machine->is_interrupt)
1353 irq_state = 1;
1354
1355 if (TARGET_NO_INTERRUPTS
1356 || cfun->machine->is_signal
1357 || cfun->machine->is_OS_main)
1358 irq_state = 0;
a12b9b80 1359
b5b90b5a 1360 if (AVR_HAVE_8BIT_SP)
1361 irq_state = 2;
1362
1363 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1364 fp, GEN_INT (irq_state)));
1365 if (!frame_pointer_needed)
1366 {
1367 RTX_FRAME_RELATED_P (insn) = 1;
1368 add_reg_note (insn, REG_CFA_ADJUST_CFA,
d1f9b275 1369 gen_rtx_SET (stack_pointer_rtx,
29c05e22 1370 plus_constant (Pmode,
68c8bdc6 1371 stack_pointer_rtx,
1372 -size_cfa)));
b5b90b5a 1373 }
0dff9558 1374
a12b9b80 1375 fp_plus_insns = get_insns ();
1376 end_sequence ();
0dff9558 1377
a12b9b80 1378 /************ Method 2: Adjust Stack pointer ************/
1379
1380 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1381 can only handle specific offsets. */
0dff9558 1382
a12b9b80 1383 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1384 {
375204de 1385 rtx_insn *sp_plus_insns;
0dff9558 1386
a12b9b80 1387 start_sequence ();
1388
b5b90b5a 1389 insn = emit_move_insn (stack_pointer_rtx,
29c05e22 1390 plus_constant (Pmode, stack_pointer_rtx,
68c8bdc6 1391 -size));
a12b9b80 1392 RTX_FRAME_RELATED_P (insn) = 1;
b5b90b5a 1393 add_reg_note (insn, REG_CFA_ADJUST_CFA,
d1f9b275 1394 gen_rtx_SET (stack_pointer_rtx,
29c05e22 1395 plus_constant (Pmode,
0dff9558 1396 stack_pointer_rtx,
68c8bdc6 1397 -size_cfa)));
a12b9b80 1398 if (frame_pointer_needed)
1399 {
1400 insn = emit_move_insn (fp, stack_pointer_rtx);
1401 RTX_FRAME_RELATED_P (insn) = 1;
1402 }
1403
1404 sp_plus_insns = get_insns ();
1405 end_sequence ();
1406
1407 /************ Use shortest method ************/
0dff9558 1408
a12b9b80 1409 emit_insn (get_sequence_length (sp_plus_insns)
1410 < get_sequence_length (fp_plus_insns)
1411 ? sp_plus_insns
1412 : fp_plus_insns);
1413 }
1414 else
1415 {
1416 emit_insn (fp_plus_insns);
1417 }
1418
21467ee6 1419 cfun->machine->stack_usage += size_cfa;
a12b9b80 1420 } /* !minimize && size != 0 */
1421 } /* !minimize */
1422}
1423
e484266f 1424
df3d6232 1425/* Output function prologue. */
a28e4651 1426
df3d6232 1427void
0dff9558 1428avr_expand_prologue (void)
a7690ba9 1429{
637dc5b6 1430 HARD_REG_SET set;
a12b9b80 1431 HOST_WIDE_INT size;
1432
1433 size = get_frame_size() + avr_outgoing_args_size();
0dff9558 1434
34413b10 1435 cfun->machine->stack_usage = 0;
0dff9558 1436
df3d6232 1437 /* Prologue: naked. */
1438 if (cfun->machine->is_naked)
a7690ba9 1439 {
df3d6232 1440 return;
a7690ba9 1441 }
a7690ba9 1442
637dc5b6 1443 avr_regs_to_save (&set);
a7690ba9 1444
df3d6232 1445 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
a7690ba9 1446 {
e484266f 1447 /* Enable interrupts. */
df3d6232 1448 if (cfun->machine->is_interrupt)
a12b9b80 1449 emit_insn (gen_enable_interrupt ());
0dff9558 1450
df3d6232 1451 /* Push zero reg. */
b4e6d2e2 1452 emit_push_byte (AVR_ZERO_REGNO, true);
df3d6232 1453
1454 /* Push tmp reg. */
b4e6d2e2 1455 emit_push_byte (AVR_TMP_REGNO, true);
df3d6232 1456
1457 /* Push SREG. */
e484266f 1458 /* ??? There's no dwarf2 column reserved for SREG. */
0b6cf66f 1459 emit_push_sfr (sreg_rtx, false, false /* clr */);
637dc5b6 1460
df3d6232 1461 /* Clear zero reg. */
e484266f 1462 emit_move_insn (zero_reg_rtx, const0_rtx);
a7690ba9 1463
df3d6232 1464 /* Prevent any attempt to delete the setting of ZERO_REG! */
18b42941 1465 emit_use (zero_reg_rtx);
0b6cf66f 1466
1467 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1468 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
0dff9558 1469
0b6cf66f 1470 if (AVR_HAVE_RAMPD)
1471 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1472
1473 if (AVR_HAVE_RAMPX
1474 && TEST_HARD_REG_BIT (set, REG_X)
1475 && TEST_HARD_REG_BIT (set, REG_X + 1))
1476 {
1477 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1478 }
1479
1480 if (AVR_HAVE_RAMPY
1481 && (frame_pointer_needed
1482 || (TEST_HARD_REG_BIT (set, REG_Y)
1483 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1484 {
1485 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1486 }
1487
69a2907c 1488 if (AVR_HAVE_RAMPZ
0b6cf66f 1489 && TEST_HARD_REG_BIT (set, REG_Z)
1490 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1491 {
69a2907c 1492 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
0b6cf66f 1493 }
1494 } /* is_interrupt is_signal */
200b5210 1495
a12b9b80 1496 avr_prologue_setup_frame (size, set);
0dff9558 1497
8c0dd614 1498 if (flag_stack_usage_info)
200b5210 1499 current_function_static_stack_size = cfun->machine->stack_usage;
a7690ba9 1500}
1501
0dff9558 1502
1503/* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'. */
df3d6232 1504/* Output summary at end of function prologue. */
a28e4651 1505
17d9b0c3 1506static void
df3d6232 1507avr_asm_function_end_prologue (FILE *file)
a28e4651 1508{
df3d6232 1509 if (cfun->machine->is_naked)
a28e4651 1510 {
c3bcd5a9 1511 fputs ("/* prologue: naked */\n", file);
a28e4651 1512 }
df3d6232 1513 else
a28e4651 1514 {
df3d6232 1515 if (cfun->machine->is_interrupt)
1516 {
1517 fputs ("/* prologue: Interrupt */\n", file);
1518 }
1519 else if (cfun->machine->is_signal)
1520 {
1521 fputs ("/* prologue: Signal */\n", file);
1522 }
a28e4651 1523 else
df3d6232 1524 fputs ("/* prologue: function */\n", file);
a28e4651 1525 }
a12b9b80 1526
1527 if (ACCUMULATE_OUTGOING_ARGS)
1528 fprintf (file, "/* outgoing args size = %d */\n",
1529 avr_outgoing_args_size());
1530
df3d6232 1531 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1532 get_frame_size());
34413b10 1533 fprintf (file, "/* stack size = %d */\n",
1534 cfun->machine->stack_usage);
1535 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1536 usage for offset so that SP + .L__stack_offset = return address. */
1537 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
df3d6232 1538}
f0973368 1539
e067eab2 1540
0dff9558 1541/* Implement `EPILOGUE_USES'. */
c3bcd5a9 1542
df3d6232 1543int
1544avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1545{
0dff9558 1546 if (reload_completed
df3d6232 1547 && cfun->machine
1548 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1549 return 1;
1550 return 0;
a28e4651 1551}
1552
0dff9558 1553/* Helper for avr_expand_epilogue. Emit a pop of a byte register. */
e484266f 1554
1555static void
1556emit_pop_byte (unsigned regno)
1557{
1558 rtx mem, reg;
1559
1560 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1561 mem = gen_frame_mem (QImode, mem);
1562 reg = gen_rtx_REG (QImode, regno);
1563
d1f9b275 1564 emit_insn (gen_rtx_SET (reg, mem));
e484266f 1565}
1566
df3d6232 1567/* Output RTL epilogue. */
a28e4651 1568
df3d6232 1569void
0dff9558 1570avr_expand_epilogue (bool sibcall_p)
a28e4651 1571{
1572 int reg;
a28e4651 1573 int live_seq;
0dff9558 1574 HARD_REG_SET set;
a28e4651 1575 int minimize;
a12b9b80 1576 HOST_WIDE_INT size;
1577 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1578
1579 size = get_frame_size() + avr_outgoing_args_size();
0dff9558 1580
df3d6232 1581 /* epilogue: naked */
1582 if (cfun->machine->is_naked)
c3bcd5a9 1583 {
32969c63 1584 gcc_assert (!sibcall_p);
0dff9558 1585
f7b38f2d 1586 emit_jump_insn (gen_return ());
df3d6232 1587 return;
a28e4651 1588 }
1589
637dc5b6 1590 avr_regs_to_save (&set);
a28e4651 1591 live_seq = sequent_regs_live ();
0dff9558 1592
a28e4651 1593 minimize = (TARGET_CALL_PROLOGUES
a12b9b80 1594 && live_seq
1595 && !isr_p
1596 && !cfun->machine->is_OS_task
b4e6d2e2 1597 && !cfun->machine->is_OS_main
1598 && !AVR_TINY);
0dff9558 1599
a12b9b80 1600 if (minimize
1601 && (live_seq > 4
1602 || frame_pointer_needed
1603 || size))
a28e4651 1604 {
a12b9b80 1605 /* Get rid of frame. */
0dff9558 1606
a12b9b80 1607 if (!frame_pointer_needed)
1608 {
9f42c829 1609 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
a12b9b80 1610 }
1611
1612 if (size)
1613 {
9f42c829 1614 emit_move_insn (frame_pointer_rtx,
29c05e22 1615 plus_constant (Pmode, frame_pointer_rtx, size));
a12b9b80 1616 }
0dff9558 1617
f7b38f2d 1618 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
a12b9b80 1619 return;
a28e4651 1620 }
0dff9558 1621
a12b9b80 1622 if (size)
a28e4651 1623 {
a12b9b80 1624 /* Try two methods to adjust stack and select shortest. */
b5b90b5a 1625
1626 int irq_state = -1;
a12b9b80 1627 rtx fp, my_fp;
375204de 1628 rtx_insn *fp_plus_insns;
21467ee6 1629 HOST_WIDE_INT size_max;
e484266f 1630
a12b9b80 1631 gcc_assert (frame_pointer_needed
1632 || !isr_p
d5bf7b64 1633 || !crtl->is_leaf);
0dff9558 1634
a12b9b80 1635 fp = my_fp = (frame_pointer_needed
9f42c829 1636 ? frame_pointer_rtx
a12b9b80 1637 : gen_rtx_REG (Pmode, REG_X));
58f62c92 1638
a12b9b80 1639 if (AVR_HAVE_8BIT_SP)
1640 {
1641 /* The high byte (r29) does not change:
1642 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
0dff9558 1643
9f42c829 1644 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
a12b9b80 1645 }
21467ee6 1646
1647 /* For rationale see comment in prologue generation. */
1648
1649 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1650 if (size > size_max)
1651 size = size_max;
1652 size = trunc_int_for_mode (size, GET_MODE (my_fp));
0dff9558 1653
a12b9b80 1654 /********** Method 1: Adjust fp register **********/
0dff9558 1655
a12b9b80 1656 start_sequence ();
58f62c92 1657
a12b9b80 1658 if (!frame_pointer_needed)
1659 emit_move_insn (fp, stack_pointer_rtx);
58f62c92 1660
b51b04ad 1661 emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
58f62c92 1662
a12b9b80 1663 /* Copy to stack pointer. */
b5b90b5a 1664
1665 if (TARGET_NO_INTERRUPTS)
1666 irq_state = 0;
1667
1668 if (AVR_HAVE_8BIT_SP)
1669 irq_state = 2;
1670
1671 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1672 GEN_INT (irq_state)));
e484266f 1673
a12b9b80 1674 fp_plus_insns = get_insns ();
0dff9558 1675 end_sequence ();
58f62c92 1676
a12b9b80 1677 /********** Method 2: Adjust Stack pointer **********/
0dff9558 1678
a12b9b80 1679 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1680 {
375204de 1681 rtx_insn *sp_plus_insns;
58f62c92 1682
a12b9b80 1683 start_sequence ();
58f62c92 1684
a12b9b80 1685 emit_move_insn (stack_pointer_rtx,
29c05e22 1686 plus_constant (Pmode, stack_pointer_rtx, size));
e484266f 1687
a12b9b80 1688 sp_plus_insns = get_insns ();
1689 end_sequence ();
e484266f 1690
a12b9b80 1691 /************ Use shortest method ************/
0dff9558 1692
a12b9b80 1693 emit_insn (get_sequence_length (sp_plus_insns)
1694 < get_sequence_length (fp_plus_insns)
1695 ? sp_plus_insns
1696 : fp_plus_insns);
1697 }
1698 else
1699 emit_insn (fp_plus_insns);
1700 } /* size != 0 */
0dff9558 1701
a12b9b80 1702 if (frame_pointer_needed
1703 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1704 {
0dff9558 1705 /* Restore previous frame_pointer. See avr_expand_prologue for
a12b9b80 1706 rationale for not using pophi. */
0dff9558 1707
a12b9b80 1708 emit_pop_byte (REG_Y + 1);
1709 emit_pop_byte (REG_Y);
1710 }
f0973368 1711
a12b9b80 1712 /* Restore used registers. */
0dff9558 1713
a12b9b80 1714 for (reg = 31; reg >= 0; --reg)
1715 if (TEST_HARD_REG_BIT (set, reg))
1716 emit_pop_byte (reg);
df3d6232 1717
a12b9b80 1718 if (isr_p)
1719 {
0b6cf66f 1720 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1721 The conditions to restore them must be tha same as in prologue. */
0dff9558 1722
69a2907c 1723 if (AVR_HAVE_RAMPZ
1724 && TEST_HARD_REG_BIT (set, REG_Z)
1725 && TEST_HARD_REG_BIT (set, REG_Z + 1))
0b6cf66f 1726 {
1727 emit_pop_byte (TMP_REGNO);
69a2907c 1728 emit_move_insn (rampz_rtx, tmp_reg_rtx);
0b6cf66f 1729 }
1730
1731 if (AVR_HAVE_RAMPY
1732 && (frame_pointer_needed
1733 || (TEST_HARD_REG_BIT (set, REG_Y)
1734 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1735 {
1736 emit_pop_byte (TMP_REGNO);
1737 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1738 }
1739
69a2907c 1740 if (AVR_HAVE_RAMPX
1741 && TEST_HARD_REG_BIT (set, REG_X)
1742 && TEST_HARD_REG_BIT (set, REG_X + 1))
a12b9b80 1743 {
e484266f 1744 emit_pop_byte (TMP_REGNO);
69a2907c 1745 emit_move_insn (rampx_rtx, tmp_reg_rtx);
df3d6232 1746 }
c3bcd5a9 1747
0b6cf66f 1748 if (AVR_HAVE_RAMPD)
1749 {
1750 emit_pop_byte (TMP_REGNO);
1751 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1752 }
1753
1754 /* Restore SREG using tmp_reg as scratch. */
0dff9558 1755
b4e6d2e2 1756 emit_pop_byte (AVR_TMP_REGNO);
0b6cf66f 1757 emit_move_insn (sreg_rtx, tmp_reg_rtx);
a12b9b80 1758
1759 /* Restore tmp REG. */
b4e6d2e2 1760 emit_pop_byte (AVR_TMP_REGNO);
a12b9b80 1761
1762 /* Restore zero REG. */
b4e6d2e2 1763 emit_pop_byte (AVR_ZERO_REGNO);
df3d6232 1764 }
a12b9b80 1765
1766 if (!sibcall_p)
1767 emit_jump_insn (gen_return ());
a28e4651 1768}
1769
0dff9558 1770
1771/* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'. */
df3d6232 1772
1773static void
1774avr_asm_function_begin_epilogue (FILE *file)
1775{
1776 fprintf (file, "/* epilogue start */\n");
1777}
a28e4651 1778
1f959902 1779
0dff9558 1780/* Implement `TARGET_CANNOT_MODITY_JUMPS_P'. */
1f959902 1781
1782static bool
1783avr_cannot_modify_jumps_p (void)
1784{
1785
1786 /* Naked Functions must not have any instructions after
1787 their epilogue, see PR42240 */
0dff9558 1788
1f959902 1789 if (reload_completed
1790 && cfun->machine
1791 && cfun->machine->is_naked)
1792 {
1793 return true;
1794 }
1795
1796 return false;
1797}
1798
1799
002565f0 1800/* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1801
002565f0 1802static bool
be6d8823 1803avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
002565f0 1804{
be6d8823 1805 /* FIXME: Non-generic addresses are not mode-dependent in themselves.
1806 This hook just serves to hack around PR rtl-optimization/52543 by
1807 claiming that non-generic addresses were mode-dependent so that
1808 lower-subreg.c will skip these addresses. lower-subreg.c sets up fake
1809 RTXes to probe SET and MEM costs and assumes that MEM is always in the
1810 generic address space which is not true. */
1811
1812 return !ADDR_SPACE_GENERIC_P (as);
002565f0 1813}
1814
1815
8b0ecac5 1816/* Helper function for `avr_legitimate_address_p'. */
1817
1818static inline bool
f8a8fc7b 1819avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
f9efb148 1820 RTX_CODE outer_code, bool strict)
8b0ecac5 1821{
1822 return (REG_P (reg)
f8a8fc7b 1823 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1824 as, outer_code, UNKNOWN)
8b0ecac5 1825 || (!strict
1826 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1827}
1828
1829
a28e4651 1830/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1831 machine for a memory operand of mode MODE. */
1832
8b0ecac5 1833static bool
3754d046 1834avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
a28e4651 1835{
f9efb148 1836 bool ok = CONSTANT_ADDRESS_P (x);
0dff9558 1837
f9efb148 1838 switch (GET_CODE (x))
a28e4651 1839 {
f9efb148 1840 case REG:
1841 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1842 MEM, strict);
1843
1844 if (strict
017c5b98 1845 && GET_MODE_SIZE (mode) > 4
f9efb148 1846 && REG_X == REGNO (x))
8b0ecac5 1847 {
f9efb148 1848 ok = false;
8b0ecac5 1849 }
f9efb148 1850 break;
1851
1852 case POST_INC:
1853 case PRE_DEC:
1854 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1855 GET_CODE (x), strict);
1856 break;
ae86bb47 1857
f9efb148 1858 case PLUS:
1859 {
1860 rtx reg = XEXP (x, 0);
1861 rtx op1 = XEXP (x, 1);
0dff9558 1862
f9efb148 1863 if (REG_P (reg)
1864 && CONST_INT_P (op1)
1865 && INTVAL (op1) >= 0)
1866 {
1867 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1868
1869 if (fit)
1870 {
1871 ok = (! strict
1872 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1873 PLUS, strict));
0dff9558 1874
f9efb148 1875 if (reg == frame_pointer_rtx
1876 || reg == arg_pointer_rtx)
1877 {
1878 ok = true;
1879 }
1880 }
1881 else if (frame_pointer_needed
1882 && reg == frame_pointer_rtx)
1883 {
1884 ok = true;
1885 }
1886 }
1887 }
1888 break;
0dff9558 1889
f9efb148 1890 default:
1891 break;
1892 }
0dff9558 1893
8992f348 1894 if (AVR_TINY
1895 && CONSTANT_ADDRESS_P (x))
1896 {
1897 /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1898 IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf. */
1899
1900 ok = (CONST_INT_P (x)
1901 && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)));
1902 }
1903
ae86bb47 1904 if (avr_log.legitimate_address_p)
a28e4651 1905 {
f9efb148 1906 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
ae86bb47 1907 "reload_completed=%d reload_in_progress=%d %s:",
f9efb148 1908 ok, mode, strict, reload_completed, reload_in_progress,
ae86bb47 1909 reg_renumber ? "(reg_renumber)" : "");
0dff9558 1910
ae86bb47 1911 if (GET_CODE (x) == PLUS
1912 && REG_P (XEXP (x, 0))
1913 && CONST_INT_P (XEXP (x, 1))
1914 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1915 && reg_renumber)
1916 {
1917 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1918 true_regnum (XEXP (x, 0)));
1919 }
0dff9558 1920
ae86bb47 1921 avr_edump ("\n%r\n", x);
a28e4651 1922 }
0dff9558 1923
f9efb148 1924 return ok;
a28e4651 1925}
1926
4202ef11 1927
1928/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1929 now only a helper for avr_addr_space_legitimize_address. */
a28e4651 1930/* Attempts to replace X with a valid
1931 memory address for an operand of mode MODE */
1932
8b0ecac5 1933static rtx
3754d046 1934avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
a28e4651 1935{
9f42c829 1936 bool big_offset_p = false;
0dff9558 1937
9f42c829 1938 x = oldx;
0dff9558 1939
9f42c829 1940 if (GET_CODE (oldx) == PLUS
1941 && REG_P (XEXP (oldx, 0)))
1942 {
1943 if (REG_P (XEXP (oldx, 1)))
1944 x = force_reg (GET_MODE (oldx), oldx);
1945 else if (CONST_INT_P (XEXP (oldx, 1)))
1946 {
0dff9558 1947 int offs = INTVAL (XEXP (oldx, 1));
9f42c829 1948 if (frame_pointer_rtx != XEXP (oldx, 0)
1949 && offs > MAX_LD_OFFSET (mode))
1950 {
1951 big_offset_p = true;
1952 x = force_reg (GET_MODE (oldx), oldx);
1953 }
1954 }
1955 }
0dff9558 1956
9f42c829 1957 if (avr_log.legitimize_address)
1958 {
1959 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1960
1961 if (x != oldx)
1962 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1963 }
1964
a28e4651 1965 return x;
1966}
1967
1968
68a79dfc 1969/* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1970/* This will allow register R26/27 to be used where it is no worse than normal
1971 base pointers R28/29 or R30/31. For example, if base offset is greater
1972 than 63 bytes or for R++ or --R addressing. */
1973
1974rtx
3754d046 1975avr_legitimize_reload_address (rtx *px, machine_mode mode,
68a79dfc 1976 int opnum, int type, int addr_type,
1977 int ind_levels ATTRIBUTE_UNUSED,
1978 rtx (*mk_memloc)(rtx,int))
1979{
1f46ee39 1980 rtx x = *px;
0dff9558 1981
68a79dfc 1982 if (avr_log.legitimize_reload_address)
1983 avr_edump ("\n%?:%m %r\n", mode, x);
0dff9558 1984
68a79dfc 1985 if (1 && (GET_CODE (x) == POST_INC
1986 || GET_CODE (x) == PRE_DEC))
1987 {
1988 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1989 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1990 opnum, RELOAD_OTHER);
0dff9558 1991
68a79dfc 1992 if (avr_log.legitimize_reload_address)
1f46ee39 1993 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 1994 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
0dff9558 1995
68a79dfc 1996 return x;
1997 }
0dff9558 1998
68a79dfc 1999 if (GET_CODE (x) == PLUS
2000 && REG_P (XEXP (x, 0))
2001 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2002 && CONST_INT_P (XEXP (x, 1))
2003 && INTVAL (XEXP (x, 1)) >= 1)
2004 {
2005 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
0dff9558 2006
68a79dfc 2007 if (fit)
2008 {
2009 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2010 {
2011 int regno = REGNO (XEXP (x, 0));
2012 rtx mem = mk_memloc (x, regno);
0dff9558 2013
68a79dfc 2014 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2015 POINTER_REGS, Pmode, VOIDmode, 0, 0,
95dcc8ad 2016 1, (enum reload_type) addr_type);
0dff9558 2017
68a79dfc 2018 if (avr_log.legitimize_reload_address)
1f46ee39 2019 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 2020 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
0dff9558 2021
68a79dfc 2022 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2023 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
95dcc8ad 2024 opnum, (enum reload_type) type);
0dff9558 2025
68a79dfc 2026 if (avr_log.legitimize_reload_address)
1f46ee39 2027 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 2028 BASE_POINTER_REGS, mem, NULL_RTX);
0dff9558 2029
68a79dfc 2030 return x;
2031 }
2032 }
2033 else if (! (frame_pointer_needed
9f42c829 2034 && XEXP (x, 0) == frame_pointer_rtx))
68a79dfc 2035 {
1f46ee39 2036 push_reload (x, NULL_RTX, px, NULL,
68a79dfc 2037 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
95dcc8ad 2038 opnum, (enum reload_type) type);
0dff9558 2039
68a79dfc 2040 if (avr_log.legitimize_reload_address)
1f46ee39 2041 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
68a79dfc 2042 POINTER_REGS, x, NULL_RTX);
0dff9558 2043
68a79dfc 2044 return x;
2045 }
2046 }
0dff9558 2047
68a79dfc 2048 return NULL_RTX;
2049}
2050
2051
be6d8823 2052/* Implement `TARGET_SECONDARY_RELOAD' */
2053
2054static reg_class_t
2055avr_secondary_reload (bool in_p, rtx x,
2056 reg_class_t reload_class ATTRIBUTE_UNUSED,
3754d046 2057 machine_mode mode, secondary_reload_info *sri)
be6d8823 2058{
2059 if (in_p
2060 && MEM_P (x)
2061 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
2062 && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
2063 {
2064 /* For the non-generic 16-bit spaces we need a d-class scratch. */
2065
2066 switch (mode)
2067 {
2068 default:
2069 gcc_unreachable();
2070
2071 case QImode: sri->icode = CODE_FOR_reload_inqi; break;
2072 case QQmode: sri->icode = CODE_FOR_reload_inqq; break;
2073 case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
2074
2075 case HImode: sri->icode = CODE_FOR_reload_inhi; break;
2076 case HQmode: sri->icode = CODE_FOR_reload_inhq; break;
2077 case HAmode: sri->icode = CODE_FOR_reload_inha; break;
2078 case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
2079 case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
2080
2081 case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
2082
2083 case SImode: sri->icode = CODE_FOR_reload_insi; break;
2084 case SFmode: sri->icode = CODE_FOR_reload_insf; break;
2085 case SQmode: sri->icode = CODE_FOR_reload_insq; break;
2086 case SAmode: sri->icode = CODE_FOR_reload_insa; break;
2087 case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
2088 case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
2089 }
2090 }
2091
2092 return NO_REGS;
2093}
2094
2095
9ce2d202 2096/* Helper function to print assembler resp. track instruction
02d9a2c3 2097 sequence lengths. Always return "".
0dff9558 2098
9ce2d202 2099 If PLEN == NULL:
2100 Output assembler code from template TPL with operands supplied
2101 by OPERANDS. This is just forwarding to output_asm_insn.
0dff9558 2102
9ce2d202 2103 If PLEN != NULL:
dfd52f2b 2104 If N_WORDS >= 0 Add N_WORDS to *PLEN.
2105 If N_WORDS < 0 Set *PLEN to -N_WORDS.
9ce2d202 2106 Don't output anything.
2107*/
2108
02d9a2c3 2109static const char*
9ce2d202 2110avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2111{
2112 if (NULL == plen)
2113 {
2114 output_asm_insn (tpl, operands);
2115 }
2116 else
2117 {
dfd52f2b 2118 if (n_words < 0)
2119 *plen = -n_words;
2120 else
2121 *plen += n_words;
9ce2d202 2122 }
02d9a2c3 2123
2124 return "";
9ce2d202 2125}
2126
2127
20c71901 2128/* Return a pointer register name as a string. */
a28e4651 2129
0dff9558 2130static const char*
206a5129 2131ptrreg_to_str (int regno)
a28e4651 2132{
2133 switch (regno)
2134 {
2135 case REG_X: return "X";
2136 case REG_Y: return "Y";
2137 case REG_Z: return "Z";
2138 default:
a45076aa 2139 output_operand_lossage ("address operand requires constraint for"
2140 " X, Y, or Z register");
a28e4651 2141 }
2142 return NULL;
2143}
2144
2145/* Return the condition name as a string.
2146 Used in conditional jump constructing */
2147
0dff9558 2148static const char*
206a5129 2149cond_string (enum rtx_code code)
a28e4651 2150{
2151 switch (code)
2152 {
2153 case NE:
2154 return "ne";
2155 case EQ:
2156 return "eq";
2157 case GE:
2158 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
0dff9558 2159 return "pl";
a28e4651 2160 else
0dff9558 2161 return "ge";
a28e4651 2162 case LT:
2163 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
0dff9558 2164 return "mi";
a28e4651 2165 else
0dff9558 2166 return "lt";
a28e4651 2167 case GEU:
2168 return "sh";
a28e4651 2169 case LTU:
2170 return "lo";
2171 default:
8ef66241 2172 gcc_unreachable ();
a28e4651 2173 }
02d9a2c3 2174
2175 return "";
a28e4651 2176}
2177
62a6a7ee 2178
2179/* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
20c71901 2180/* Output ADDR to FILE as address. */
a28e4651 2181
62a6a7ee 2182static void
2183avr_print_operand_address (FILE *file, rtx addr)
a28e4651 2184{
2185 switch (GET_CODE (addr))
2186 {
2187 case REG:
2188 fprintf (file, ptrreg_to_str (REGNO (addr)));
2189 break;
2190
2191 case PRE_DEC:
2192 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2193 break;
2194
2195 case POST_INC:
2196 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2197 break;
2198
2199 default:
2200 if (CONSTANT_ADDRESS_P (addr)
0dff9558 2201 && text_segment_operand (addr, VOIDmode))
2202 {
2203 rtx x = addr;
2204 if (GET_CODE (x) == CONST)
2205 x = XEXP (x, 0);
2206 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2207 {
2208 /* Assembler gs() will implant word address. Make offset
2209 a byte offset inside gs() for assembler. This is
2210 needed because the more logical (constant+gs(sym)) is not
2211 accepted by gas. For 128K and smaller devices this is ok.
2212 For large devices it will create a trampoline to offset
a45076aa 2213 from symbol which may not be what the user really wanted. */
0dff9558 2214
2215 fprintf (file, "gs(");
2216 output_addr_const (file, XEXP (x,0));
a45076aa 2217 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2218 2 * INTVAL (XEXP (x, 1)));
0dff9558 2219 if (AVR_3_BYTE_PC)
2220 if (warning (0, "pointer offset from symbol maybe incorrect"))
2221 {
2222 output_addr_const (stderr, addr);
2223 fprintf(stderr,"\n");
2224 }
2225 }
2226 else
2227 {
2228 fprintf (file, "gs(");
2229 output_addr_const (file, addr);
2230 fprintf (file, ")");
2231 }
2232 }
a28e4651 2233 else
0dff9558 2234 output_addr_const (file, addr);
a28e4651 2235 }
2236}
2237
2238
62a6a7ee 2239/* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
2240
2241static bool
2242avr_print_operand_punct_valid_p (unsigned char code)
2243{
2244 return code == '~' || code == '!';
2245}
2246
2247
2248/* Implement `TARGET_PRINT_OPERAND'. */
384f6361 2249/* Output X as assembler operand to file FILE.
2250 For a description of supported %-codes, see top of avr.md. */
2251
62a6a7ee 2252static void
2253avr_print_operand (FILE *file, rtx x, int code)
a28e4651 2254{
b4e6d2e2 2255 int abcd = 0, ef = 0, ij = 0;
a28e4651 2256
2257 if (code >= 'A' && code <= 'D')
2258 abcd = code - 'A';
b4e6d2e2 2259 else if (code == 'E' || code == 'F')
2260 ef = code - 'E';
2261 else if (code == 'I' || code == 'J')
2262 ij = code - 'I';
a28e4651 2263
3b351734 2264 if (code == '~')
2265 {
4f0e2214 2266 if (!AVR_HAVE_JMP_CALL)
0dff9558 2267 fputc ('r', file);
3b351734 2268 }
90ef7269 2269 else if (code == '!')
2270 {
2271 if (AVR_HAVE_EIJMP_EICALL)
0dff9558 2272 fputc ('e', file);
90ef7269 2273 }
384f6361 2274 else if (code == 't'
2275 || code == 'T')
2276 {
2277 static int t_regno = -1;
2278 static int t_nbits = -1;
2279
2280 if (REG_P (x) && t_regno < 0 && code == 'T')
2281 {
2282 t_regno = REGNO (x);
2283 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2284 }
2285 else if (CONST_INT_P (x) && t_regno >= 0
2286 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2287 {
2288 int bpos = INTVAL (x);
2289
2290 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2291 if (code == 'T')
2292 fprintf (file, ",%d", bpos % 8);
2293
2294 t_regno = -1;
2295 }
2296 else
2297 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2298 }
b4e6d2e2 2299 else if (code == 'E' || code == 'F')
2300 {
2301 rtx op = XEXP(x, 0);
2302 fprintf (file, reg_names[REGNO (op) + ef]);
2303 }
2304 else if (code == 'I' || code == 'J')
2305 {
2306 rtx op = XEXP(XEXP(x, 0), 0);
2307 fprintf (file, reg_names[REGNO (op) + ij]);
2308 }
3b351734 2309 else if (REG_P (x))
a28e4651 2310 {
2311 if (x == zero_reg_rtx)
b4ebb666 2312 fprintf (file, "__zero_reg__");
2313 else if (code == 'r' && REGNO (x) < 32)
2314 fprintf (file, "%d", (int) REGNO (x));
a28e4651 2315 else
b4ebb666 2316 fprintf (file, reg_names[REGNO (x) + abcd]);
a28e4651 2317 }
96871982 2318 else if (CONST_INT_P (x))
2319 {
2320 HOST_WIDE_INT ival = INTVAL (x);
0dff9558 2321
96871982 2322 if ('i' != code)
2323 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2324 else if (low_io_address_operand (x, VOIDmode)
2325 || high_io_address_operand (x, VOIDmode))
2326 {
0b6cf66f 2327 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2328 fprintf (file, "__RAMPZ__");
2329 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2330 fprintf (file, "__RAMPY__");
2331 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2332 fprintf (file, "__RAMPX__");
2333 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2334 fprintf (file, "__RAMPD__");
b4e6d2e2 2335 else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
0b6cf66f 2336 fprintf (file, "__CCP__");
72851b68 2337 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
2338 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
2339 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
2340 else
96871982 2341 {
96871982 2342 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
f0aa7fe2 2343 ival - avr_arch->sfr_offset);
96871982 2344 }
2345 }
2346 else
2347 fatal_insn ("bad address, not an I/O address:", x);
2348 }
2349 else if (MEM_P (x))
a28e4651 2350 {
a45076aa 2351 rtx addr = XEXP (x, 0);
0dff9558 2352
310f64db 2353 if (code == 'm')
0dff9558 2354 {
a45076aa 2355 if (!CONSTANT_P (addr))
644ac9c5 2356 fatal_insn ("bad address, not a constant:", addr);
a45076aa 2357 /* Assembler template with m-code is data - not progmem section */
2358 if (text_segment_operand (addr, VOIDmode))
2359 if (warning (0, "accessing data memory with"
2360 " program memory address"))
2361 {
2362 output_addr_const (stderr, addr);
2363 fprintf(stderr,"\n");
2364 }
2365 output_addr_const (file, addr);
0dff9558 2366 }
644ac9c5 2367 else if (code == 'i')
2368 {
62a6a7ee 2369 avr_print_operand (file, addr, 'i');
644ac9c5 2370 }
3b351734 2371 else if (code == 'o')
0dff9558 2372 {
2373 if (GET_CODE (addr) != PLUS)
2374 fatal_insn ("bad address, not (reg+disp):", addr);
3b351734 2375
0dff9558 2376 avr_print_operand (file, XEXP (addr, 1), 0);
2377 }
b4e6d2e2 2378 else if (code == 'b')
2379 {
2380 if (GET_CODE (addr) != PLUS)
2381 fatal_insn ("bad address, not (reg+disp):", addr);
2382
2383 avr_print_operand_address (file, XEXP (addr, 0));
2384 }
f43bae99 2385 else if (code == 'p' || code == 'r')
2386 {
2387 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2388 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
0dff9558 2389
f43bae99 2390 if (code == 'p')
62a6a7ee 2391 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
f43bae99 2392 else
62a6a7ee 2393 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
f43bae99 2394 }
a28e4651 2395 else if (GET_CODE (addr) == PLUS)
0dff9558 2396 {
2397 avr_print_operand_address (file, XEXP (addr,0));
2398 if (REGNO (XEXP (addr, 0)) == REG_X)
2399 fatal_insn ("internal compiler error. Bad address:"
2400 ,addr);
2401 fputc ('+', file);
2402 avr_print_operand (file, XEXP (addr,1), code);
2403 }
a28e4651 2404 else
0dff9558 2405 avr_print_operand_address (file, addr);
a28e4651 2406 }
96871982 2407 else if (code == 'i')
2408 {
c0d7a1d0 2409 if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2410 avr_print_operand_address
f0aa7fe2 2411 (file, plus_constant (HImode, x, -avr_arch->sfr_offset));
c0d7a1d0 2412 else
2413 fatal_insn ("bad address, not an I/O address:", x);
96871982 2414 }
310f64db 2415 else if (code == 'x')
2416 {
2417 /* Constant progmem address - like used in jmp or call */
2418 if (0 == text_segment_operand (x, VOIDmode))
a45076aa 2419 if (warning (0, "accessing program memory"
2420 " with data memory address"))
0dff9558 2421 {
2422 output_addr_const (stderr, x);
2423 fprintf(stderr,"\n");
2424 }
310f64db 2425 /* Use normal symbol for direct address no linker trampoline needed */
2426 output_addr_const (file, x);
2427 }
b4ebb666 2428 else if (CONST_FIXED_P (x))
017c5b98 2429 {
2430 HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2431 if (code != 0)
803e7ca1 2432 output_operand_lossage ("Unsupported code '%c' for fixed-point:",
017c5b98 2433 code);
2434 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2435 }
a28e4651 2436 else if (GET_CODE (x) == CONST_DOUBLE)
2437 {
2438 long val;
2439 REAL_VALUE_TYPE rv;
2440 if (GET_MODE (x) != SFmode)
0dff9558 2441 fatal_insn ("internal compiler error. Unknown mode:", x);
a28e4651 2442 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2443 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
7fe1d31c 2444 fprintf (file, "0x%lx", val);
a28e4651 2445 }
5bd39e93 2446 else if (GET_CODE (x) == CONST_STRING)
2447 fputs (XSTR (x, 0), file);
a28e4651 2448 else if (code == 'j')
7fe1d31c 2449 fputs (cond_string (GET_CODE (x)), file);
a28e4651 2450 else if (code == 'k')
7fe1d31c 2451 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
a28e4651 2452 else
62a6a7ee 2453 avr_print_operand_address (file, x);
a28e4651 2454}
2455
0dff9558 2456
2457/* Worker function for `NOTICE_UPDATE_CC'. */
a28e4651 2458/* Update the condition code in the INSN. */
2459
2460void
ed3e6e5d 2461avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
a28e4651 2462{
1cb39658 2463 rtx set;
bcad9375 2464 enum attr_cc cc = get_attr_cc (insn);
0dff9558 2465
bcad9375 2466 switch (cc)
a28e4651 2467 {
bcad9375 2468 default:
2469 break;
2470
b4ebb666 2471 case CC_PLUS:
f4806884 2472 case CC_LDI:
bcad9375 2473 {
2474 rtx *op = recog_data.operand;
2475 int len_dummy, icc;
0dff9558 2476
bcad9375 2477 /* Extract insn's operands. */
2478 extract_constrain_insn_cached (insn);
eac146f2 2479
f4806884 2480 switch (cc)
2481 {
2482 default:
2483 gcc_unreachable();
0dff9558 2484
b4ebb666 2485 case CC_PLUS:
2486 avr_out_plus (insn, op, &len_dummy, &icc);
017c5b98 2487 cc = (enum attr_cc) icc;
2488 break;
2489
f4806884 2490 case CC_LDI:
2491
2492 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2493 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
0dff9558 2494 /* Loading zero-reg with 0 uses CLR and thus clobbers cc0. */
f4806884 2495 ? CC_CLOBBER
2496 /* Any other "r,rL" combination does not alter cc0. */
2497 : CC_NONE;
0dff9558 2498
f4806884 2499 break;
2500 } /* inner switch */
2501
bcad9375 2502 break;
2503 }
f4806884 2504 } /* outer swicth */
bcad9375 2505
2506 switch (cc)
2507 {
2508 default:
2509 /* Special values like CC_OUT_PLUS from above have been
2510 mapped to "standard" CC_* values so we never come here. */
0dff9558 2511
bcad9375 2512 gcc_unreachable();
2513 break;
0dff9558 2514
a28e4651 2515 case CC_NONE:
2516 /* Insn does not affect CC at all. */
2517 break;
2518
2519 case CC_SET_N:
2520 CC_STATUS_INIT;
2521 break;
2522
2523 case CC_SET_ZN:
1cb39658 2524 set = single_set (insn);
2525 CC_STATUS_INIT;
2526 if (set)
0dff9558 2527 {
2528 cc_status.flags |= CC_NO_OVERFLOW;
2529 cc_status.value1 = SET_DEST (set);
2530 }
a28e4651 2531 break;
2532
c455e5d3 2533 case CC_SET_VZN:
2534 /* Insn like INC, DEC, NEG that set Z,N,V. We currently don't make use
2535 of this combination, cf. also PR61055. */
2536 CC_STATUS_INIT;
2537 break;
2538
a28e4651 2539 case CC_SET_CZN:
2540 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2541 The V flag may or may not be known but that's ok because
2542 alter_cond will change tests to use EQ/NE. */
1cb39658 2543 set = single_set (insn);
2544 CC_STATUS_INIT;
2545 if (set)
0dff9558 2546 {
2547 cc_status.value1 = SET_DEST (set);
2548 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2549 }
a28e4651 2550 break;
2551
2552 case CC_COMPARE:
1cb39658 2553 set = single_set (insn);
2554 CC_STATUS_INIT;
2555 if (set)
0dff9558 2556 cc_status.value1 = SET_SRC (set);
a28e4651 2557 break;
0dff9558 2558
a28e4651 2559 case CC_CLOBBER:
2560 /* Insn doesn't leave CC in a usable state. */
2561 CC_STATUS_INIT;
2562 break;
2563 }
2564}
2565
a28e4651 2566/* Choose mode for jump insn:
2567 1 - relative jump in range -63 <= x <= 62 ;
2568 2 - relative jump in range -2046 <= x <= 2045 ;
2569 3 - absolute jump (only for ATmega[16]03). */
2570
2571int
375204de 2572avr_jump_mode (rtx x, rtx_insn *insn)
a28e4651 2573{
ef51d1e3 2574 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
0dff9558 2575 ? XEXP (x, 0) : x));
47fc0706 2576 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
a28e4651 2577 int jump_distance = cur_addr - dest_addr;
0dff9558 2578
a28e4651 2579 if (-63 <= jump_distance && jump_distance <= 62)
2580 return 1;
2581 else if (-2046 <= jump_distance && jump_distance <= 2045)
2582 return 2;
4f0e2214 2583 else if (AVR_HAVE_JMP_CALL)
a28e4651 2584 return 3;
0dff9558 2585
a28e4651 2586 return 2;
2587}
2588
0dff9558 2589/* Return an AVR condition jump commands.
e7d17bf3 2590 X is a comparison RTX.
2591 LEN is a number returned by avr_jump_mode function.
0dff9558 2592 If REVERSE nonzero then condition code in X must be reversed. */
a28e4651 2593
0dff9558 2594const char*
206a5129 2595ret_cond_branch (rtx x, int len, int reverse)
a28e4651 2596{
e7d17bf3 2597 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
0dff9558 2598
a28e4651 2599 switch (cond)
2600 {
2601 case GT:
2602 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2603 return (len == 1 ? ("breq .+2" CR_TAB
2604 "brpl %0") :
2605 len == 2 ? ("breq .+4" CR_TAB
2606 "brmi .+2" CR_TAB
2607 "rjmp %0") :
2608 ("breq .+6" CR_TAB
2609 "brmi .+4" CR_TAB
2610 "jmp %0"));
0dff9558 2611
a28e4651 2612 else
02a011e9 2613 return (len == 1 ? ("breq .+2" CR_TAB
2614 "brge %0") :
2615 len == 2 ? ("breq .+4" CR_TAB
2616 "brlt .+2" CR_TAB
2617 "rjmp %0") :
2618 ("breq .+6" CR_TAB
2619 "brlt .+4" CR_TAB
2620 "jmp %0"));
a28e4651 2621 case GTU:
02a011e9 2622 return (len == 1 ? ("breq .+2" CR_TAB
2623 "brsh %0") :
2624 len == 2 ? ("breq .+4" CR_TAB
2625 "brlo .+2" CR_TAB
2626 "rjmp %0") :
2627 ("breq .+6" CR_TAB
2628 "brlo .+4" CR_TAB
2629 "jmp %0"));
a28e4651 2630 case LE:
2631 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
02a011e9 2632 return (len == 1 ? ("breq %0" CR_TAB
2633 "brmi %0") :
2634 len == 2 ? ("breq .+2" CR_TAB
2635 "brpl .+2" CR_TAB
2636 "rjmp %0") :
2637 ("breq .+2" CR_TAB
2638 "brpl .+4" CR_TAB
2639 "jmp %0"));
a28e4651 2640 else
02a011e9 2641 return (len == 1 ? ("breq %0" CR_TAB
2642 "brlt %0") :
2643 len == 2 ? ("breq .+2" CR_TAB
2644 "brge .+2" CR_TAB
2645 "rjmp %0") :
2646 ("breq .+2" CR_TAB
2647 "brge .+4" CR_TAB
2648 "jmp %0"));
a28e4651 2649 case LEU:
02a011e9 2650 return (len == 1 ? ("breq %0" CR_TAB
2651 "brlo %0") :
2652 len == 2 ? ("breq .+2" CR_TAB
2653 "brsh .+2" CR_TAB
2654 "rjmp %0") :
2655 ("breq .+2" CR_TAB
2656 "brsh .+4" CR_TAB
2657 "jmp %0"));
a28e4651 2658 default:
e7d17bf3 2659 if (reverse)
2660 {
2661 switch (len)
2662 {
2663 case 1:
02a011e9 2664 return "br%k1 %0";
e7d17bf3 2665 case 2:
02a011e9 2666 return ("br%j1 .+2" CR_TAB
2667 "rjmp %0");
e7d17bf3 2668 default:
02a011e9 2669 return ("br%j1 .+4" CR_TAB
2670 "jmp %0");
e7d17bf3 2671 }
2672 }
02a011e9 2673 else
2674 {
2675 switch (len)
2676 {
2677 case 1:
2678 return "br%j1 %0";
2679 case 2:
2680 return ("br%k1 .+2" CR_TAB
2681 "rjmp %0");
2682 default:
2683 return ("br%k1 .+4" CR_TAB
2684 "jmp %0");
2685 }
2686 }
a28e4651 2687 }
2688 return "";
2689}
2690
0dff9558 2691
2692/* Worker function for `FINAL_PRESCAN_INSN'. */
fe74bc77 2693/* Output insn cost for next insn. */
a28e4651 2694
2695void
375204de 2696avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
0dff9558 2697 int num_operands ATTRIBUTE_UNUSED)
a28e4651 2698{
ae86bb47 2699 if (avr_log.rtx_costs)
a28e4651 2700 {
114786e6 2701 rtx set = single_set (insn);
2702
2703 if (set)
2704 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
7013e87c 2705 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
114786e6 2706 else
2707 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
20d892d1 2708 rtx_cost (PATTERN (insn), INSN, 0,
0dff9558 2709 optimize_insn_for_speed_p()));
a28e4651 2710 }
a28e4651 2711}
2712
37ac04dc 2713/* Return 0 if undefined, 1 if always true or always false. */
a28e4651 2714
2715int
3754d046 2716avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
a28e4651 2717{
2718 unsigned int max = (mode == QImode ? 0xff :
2719 mode == HImode ? 0xffff :
02d9a2c3 2720 mode == PSImode ? 0xffffff :
30435bf8 2721 mode == SImode ? 0xffffffff : 0);
0dff9558 2722 if (max && op && CONST_INT_P (x))
a28e4651 2723 {
8deb3959 2724 if (unsigned_condition (op) != op)
0dff9558 2725 max >>= 1;
a28e4651 2726
2727 if (max != (INTVAL (x) & max)
0dff9558 2728 && INTVAL (x) != 0xff)
2729 return 1;
a28e4651 2730 }
2731 return 0;
2732}
2733
2734
0dff9558 2735/* Worker function for `FUNCTION_ARG_REGNO_P'. */
a28e4651 2736/* Returns nonzero if REGNO is the number of a hard
2737 register in which function arguments are sometimes passed. */
2738
2739int
0dff9558 2740avr_function_arg_regno_p(int r)
a28e4651 2741{
b4e6d2e2 2742 return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
a28e4651 2743}
2744
0dff9558 2745
2746/* Worker function for `INIT_CUMULATIVE_ARGS'. */
a28e4651 2747/* Initializing the variable cum for the state at the beginning
2748 of the argument list. */
2749
2750void
0dff9558 2751avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2752 tree fndecl ATTRIBUTE_UNUSED)
a28e4651 2753{
b4e6d2e2 2754 cum->nregs = AVR_TINY ? 6 : 18;
a28e4651 2755 cum->regno = FIRST_CUM_REG;
257d99c3 2756 if (!libname && stdarg_p (fntype))
2757 cum->nregs = 0;
32969c63 2758
2759 /* Assume the calle may be tail called */
0dff9558 2760
32969c63 2761 cfun->machine->sibcall_fails = 0;
a28e4651 2762}
2763
0af74aa0 2764/* Returns the number of registers to allocate for a function argument. */
2765
2766static int
3754d046 2767avr_num_arg_regs (machine_mode mode, const_tree type)
0af74aa0 2768{
2769 int size;
2770
2771 if (mode == BLKmode)
2772 size = int_size_in_bytes (type);
2773 else
2774 size = GET_MODE_SIZE (mode);
2775
b681d971 2776 /* Align all function arguments to start in even-numbered registers.
2777 Odd-sized arguments leave holes above them. */
0af74aa0 2778
b681d971 2779 return (size + 1) & ~1;
0af74aa0 2780}
2781
0dff9558 2782
2783/* Implement `TARGET_FUNCTION_ARG'. */
a28e4651 2784/* Controls whether a function argument is passed
20c71901 2785 in a register, and which register. */
a28e4651 2786
e8509bef 2787static rtx
3754d046 2788avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
0dff9558 2789 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2790{
39cba157 2791 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2792 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2793
2794 if (cum->nregs && bytes <= cum->nregs)
1a83b3ff 2795 return gen_rtx_REG (mode, cum->regno - bytes);
0af74aa0 2796
a28e4651 2797 return NULL_RTX;
2798}
2799
0dff9558 2800
2801/* Implement `TARGET_FUNCTION_ARG_ADVANCE'. */
a28e4651 2802/* Update the summarizer variable CUM to advance past an argument
2803 in the argument list. */
0dff9558 2804
e8509bef 2805static void
3754d046 2806avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
0dff9558 2807 const_tree type, bool named ATTRIBUTE_UNUSED)
a28e4651 2808{
39cba157 2809 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
0af74aa0 2810 int bytes = avr_num_arg_regs (mode, type);
a28e4651 2811
a28e4651 2812 cum->nregs -= bytes;
2813 cum->regno -= bytes;
2814
0dff9558 2815 /* A parameter is being passed in a call-saved register. As the original
32969c63 2816 contents of these regs has to be restored before leaving the function,
2817 a function must not pass arguments in call-saved regs in order to get
0dff9558 2818 tail-called. */
2819
e46ab6d3 2820 if (cum->regno >= 8
2821 && cum->nregs >= 0
32969c63 2822 && !call_used_regs[cum->regno])
2823 {
2824 /* FIXME: We ship info on failing tail-call in struct machine_function.
2825 This uses internals of calls.c:expand_call() and the way args_so_far
0dff9558 2826 is used. targetm.function_ok_for_sibcall() needs to be extended to
2827 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2828 dependent so that such an extension is not wanted. */
2829
32969c63 2830 cfun->machine->sibcall_fails = 1;
2831 }
2832
04f600bb 2833 /* Test if all registers needed by the ABI are actually available. If the
2834 user has fixed a GPR needed to pass an argument, an (implicit) function
363cd51e 2835 call will clobber that fixed register. See PR45099 for an example. */
0dff9558 2836
e46ab6d3 2837 if (cum->regno >= 8
2838 && cum->nregs >= 0)
04f600bb 2839 {
2840 int regno;
2841
2842 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2843 if (fixed_regs[regno])
363cd51e 2844 warning (0, "fixed register %s used to pass parameter to function",
2845 reg_names[regno]);
04f600bb 2846 }
0dff9558 2847
a28e4651 2848 if (cum->nregs <= 0)
2849 {
2850 cum->nregs = 0;
2851 cum->regno = FIRST_CUM_REG;
2852 }
a28e4651 2853}
2854
32969c63 2855/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2856/* Decide whether we can make a sibling call to a function. DECL is the
2857 declaration of the function being targeted by the call and EXP is the
0dff9558 2858 CALL_EXPR representing the call. */
32969c63 2859
2860static bool
2861avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2862{
2863 tree fntype_callee;
2864
2865 /* Tail-calling must fail if callee-saved regs are used to pass
2866 function args. We must not tail-call when `epilogue_restores'
2867 is used. Unfortunately, we cannot tell at this point if that
2868 actually will happen or not, and we cannot step back from
0dff9558 2869 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2870
32969c63 2871 if (cfun->machine->sibcall_fails
2872 || TARGET_CALL_PROLOGUES)
2873 {
2874 return false;
2875 }
0dff9558 2876
32969c63 2877 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2878
2879 if (decl_callee)
2880 {
2881 decl_callee = TREE_TYPE (decl_callee);
2882 }
2883 else
2884 {
2885 decl_callee = fntype_callee;
0dff9558 2886
32969c63 2887 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2888 && METHOD_TYPE != TREE_CODE (decl_callee))
2889 {
2890 decl_callee = TREE_TYPE (decl_callee);
2891 }
2892 }
2893
2894 /* Ensure that caller and callee have compatible epilogues */
0dff9558 2895
ea679361 2896 if (cfun->machine->is_interrupt
2897 || cfun->machine->is_signal
2898 || cfun->machine->is_naked
32969c63 2899 || avr_naked_function_p (decl_callee)
0dff9558 2900 /* FIXME: For OS_task and OS_main, this might be over-conservative. */
32969c63 2901 || (avr_OS_task_function_p (decl_callee)
ea679361 2902 != cfun->machine->is_OS_task)
32969c63 2903 || (avr_OS_main_function_p (decl_callee)
ea679361 2904 != cfun->machine->is_OS_main))
32969c63 2905 {
2906 return false;
2907 }
0dff9558 2908
32969c63 2909 return true;
2910}
2911
a28e4651 2912/***********************************************************************
2913 Functions for outputting various mov's for a various modes
2914************************************************************************/
4202ef11 2915
2916/* Return true if a value of mode MODE is read from flash by
2917 __load_* function from libgcc. */
2918
2919bool
2920avr_load_libgcc_p (rtx op)
2921{
3754d046 2922 machine_mode mode = GET_MODE (op);
4202ef11 2923 int n_bytes = GET_MODE_SIZE (mode);
0dff9558 2924
4202ef11 2925 return (n_bytes > 2
2926 && !AVR_HAVE_LPMX
be6d8823 2927 && avr_mem_flash_p (op));
4202ef11 2928}
2929
5bd39e93 2930/* Return true if a value of mode MODE is read by __xload_* function. */
2931
2932bool
3754d046 2933avr_xload_libgcc_p (machine_mode mode)
5bd39e93 2934{
2935 int n_bytes = GET_MODE_SIZE (mode);
0dff9558 2936
5bd39e93 2937 return (n_bytes > 1
c0d7a1d0 2938 || avr_n_flash > 1);
5bd39e93 2939}
2940
2941
be6d8823 2942/* Fixme: This is a hack because secondary reloads don't works as expected.
2943
2944 Find an unused d-register to be used as scratch in INSN.
2945 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2946 is a register, skip all possible return values that overlap EXCLUDE.
2947 The policy for the returned register is similar to that of
2948 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2949 of INSN.
2950
2951 Return a QImode d-register or NULL_RTX if nothing found. */
2952
2953static rtx
375204de 2954avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
be6d8823 2955{
2956 int regno;
2957 bool isr_p = (avr_interrupt_function_p (current_function_decl)
2958 || avr_signal_function_p (current_function_decl));
2959
2960 for (regno = 16; regno < 32; regno++)
2961 {
2962 rtx reg = all_regs_rtx[regno];
0dff9558 2963
be6d8823 2964 if ((exclude
2965 && reg_overlap_mentioned_p (exclude, reg))
2966 || fixed_regs[regno])
2967 {
2968 continue;
2969 }
2970
2971 /* Try non-live register */
2972
2973 if (!df_regs_ever_live_p (regno)
2974 && (TREE_THIS_VOLATILE (current_function_decl)
2975 || cfun->machine->is_OS_task
2976 || cfun->machine->is_OS_main
2977 || (!isr_p && call_used_regs[regno])))
2978 {
2979 return reg;
2980 }
2981
2982 /* Any live register can be used if it is unused after.
2983 Prologue/epilogue will care for it as needed. */
0dff9558 2984
be6d8823 2985 if (df_regs_ever_live_p (regno)
2986 && reg_unused_after (insn, reg))
2987 {
2988 return reg;
2989 }
2990 }
2991
2992 return NULL_RTX;
2993}
2994
2995
2996/* Helper function for the next function in the case where only restricted
2997 version of LPM instruction is available. */
2998
2999static const char*
375204de 3000avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
be6d8823 3001{
3002 rtx dest = xop[0];
3003 rtx addr = xop[1];
3004 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3005 int regno_dest;
3006
3007 regno_dest = REGNO (dest);
3008
3009 /* The implicit target register of LPM. */
3010 xop[3] = lpm_reg_rtx;
3011
3012 switch (GET_CODE (addr))
3013 {
3014 default:
3015 gcc_unreachable();
3016
3017 case REG:
3018
3019 gcc_assert (REG_Z == REGNO (addr));
3020
3021 switch (n_bytes)
3022 {
3023 default:
3024 gcc_unreachable();
3025
3026 case 1:
3027 avr_asm_len ("%4lpm", xop, plen, 1);
3028
3029 if (regno_dest != LPM_REGNO)
3030 avr_asm_len ("mov %0,%3", xop, plen, 1);
3031
3032 return "";
3033
3034 case 2:
3035 if (REGNO (dest) == REG_Z)
3036 return avr_asm_len ("%4lpm" CR_TAB
3037 "push %3" CR_TAB
3038 "adiw %2,1" CR_TAB
3039 "%4lpm" CR_TAB
3040 "mov %B0,%3" CR_TAB
3041 "pop %A0", xop, plen, 6);
0dff9558 3042
be6d8823 3043 avr_asm_len ("%4lpm" CR_TAB
3044 "mov %A0,%3" CR_TAB
3045 "adiw %2,1" CR_TAB
3046 "%4lpm" CR_TAB
3047 "mov %B0,%3", xop, plen, 5);
0dff9558 3048
be6d8823 3049 if (!reg_unused_after (insn, addr))
3050 avr_asm_len ("sbiw %2,1", xop, plen, 1);
0dff9558 3051
be6d8823 3052 break; /* 2 */
3053 }
0dff9558 3054
be6d8823 3055 break; /* REG */
3056
3057 case POST_INC:
3058
3059 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3060 && n_bytes <= 4);
3061
3062 if (regno_dest == LPM_REGNO)
3063 avr_asm_len ("%4lpm" CR_TAB
3064 "adiw %2,1", xop, plen, 2);
3065 else
3066 avr_asm_len ("%4lpm" CR_TAB
3067 "mov %A0,%3" CR_TAB
3068 "adiw %2,1", xop, plen, 3);
3069
3070 if (n_bytes >= 2)
3071 avr_asm_len ("%4lpm" CR_TAB
3072 "mov %B0,%3" CR_TAB
3073 "adiw %2,1", xop, plen, 3);
3074
3075 if (n_bytes >= 3)
3076 avr_asm_len ("%4lpm" CR_TAB
3077 "mov %C0,%3" CR_TAB
3078 "adiw %2,1", xop, plen, 3);
3079
3080 if (n_bytes >= 4)
3081 avr_asm_len ("%4lpm" CR_TAB
3082 "mov %D0,%3" CR_TAB
3083 "adiw %2,1", xop, plen, 3);
3084
3085 break; /* POST_INC */
0dff9558 3086
be6d8823 3087 } /* switch CODE (addr) */
0dff9558 3088
be6d8823 3089 return "";
3090}
3091
3092
4202ef11 3093/* If PLEN == NULL: Ouput instructions to load a value from a memory location
3094 OP[1] in AS1 to register OP[0].
3095 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3096 Return "". */
3097
4b72e680 3098const char*
375204de 3099avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
4202ef11 3100{
be6d8823 3101 rtx xop[7];
4202ef11 3102 rtx dest = op[0];
3103 rtx src = SET_SRC (single_set (insn));
3104 rtx addr;
3105 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
be6d8823 3106 int segment;
9d734fa8 3107 RTX_CODE code;
ed2541ea 3108 addr_space_t as = MEM_ADDR_SPACE (src);
4202ef11 3109
3110 if (plen)
3111 *plen = 0;
0dff9558 3112
4202ef11 3113 if (MEM_P (dest))
3114 {
3115 warning (0, "writing to address space %qs not supported",
9d734fa8 3116 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
0dff9558 3117
4202ef11 3118 return "";
3119 }
3120
9d734fa8 3121 addr = XEXP (src, 0);
3122 code = GET_CODE (addr);
5bd39e93 3123
9d734fa8 3124 gcc_assert (REG_P (dest));
ed2541ea 3125 gcc_assert (REG == code || POST_INC == code);
4202ef11 3126
002565f0 3127 xop[0] = dest;
be6d8823 3128 xop[1] = addr;
3129 xop[2] = lpm_addr_reg_rtx;
3130 xop[4] = xstring_empty;
3131 xop[5] = tmp_reg_rtx;
3132 xop[6] = XEXP (rampz_rtx, 0);
9d734fa8 3133
be6d8823 3134 segment = avr_addrspace[as].segment;
3135
3136 /* Set RAMPZ as needed. */
3137
3138 if (segment)
3139 {
3140 xop[4] = GEN_INT (segment);
3141 xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3142
3143 if (xop[3] != NULL_RTX)
3144 {
3145 avr_asm_len ("ldi %3,%4" CR_TAB
3146 "out %i6,%3", xop, plen, 2);
3147 }
3148 else if (segment == 1)
3149 {
3150 avr_asm_len ("clr %5" CR_TAB
3151 "inc %5" CR_TAB
3152 "out %i6,%5", xop, plen, 3);
3153 }
3154 else
3155 {
3156 avr_asm_len ("mov %5,%2" CR_TAB
3157 "ldi %2,%4" CR_TAB
3158 "out %i6,%2" CR_TAB
3159 "mov %2,%5", xop, plen, 4);
3160 }
0dff9558 3161
be6d8823 3162 xop[4] = xstring_e;
3163
3164 if (!AVR_HAVE_ELPMX)
3165 return avr_out_lpm_no_lpmx (insn, xop, plen);
3166 }
3167 else if (!AVR_HAVE_LPMX)
3168 {
3169 return avr_out_lpm_no_lpmx (insn, xop, plen);
3170 }
3171
3172 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
3173
3174 switch (GET_CODE (addr))
4202ef11 3175 {
3176 default:
3177 gcc_unreachable();
3178
3179 case REG:
3180
3181 gcc_assert (REG_Z == REGNO (addr));
3182
be6d8823 3183 switch (n_bytes)
3184 {
3185 default:
3186 gcc_unreachable();
4202ef11 3187
be6d8823 3188 case 1:
3189 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
4202ef11 3190
be6d8823 3191 case 2:
3192 if (REGNO (dest) == REG_Z)
3193 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3194 "%4lpm %B0,%a2" CR_TAB
3195 "mov %A0,%5", xop, plen, 3);
3196 else
3197 {
3198 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3199 "%4lpm %B0,%a2", xop, plen, 2);
0dff9558 3200
be6d8823 3201 if (!reg_unused_after (insn, addr))
3202 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3203 }
0dff9558 3204
be6d8823 3205 break; /* 2 */
4202ef11 3206
be6d8823 3207 case 3:
4202ef11 3208
be6d8823 3209 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3210 "%4lpm %B0,%a2+" CR_TAB
3211 "%4lpm %C0,%a2", xop, plen, 3);
0dff9558 3212
be6d8823 3213 if (!reg_unused_after (insn, addr))
3214 avr_asm_len ("sbiw %2,2", xop, plen, 1);
3215
3216 break; /* 3 */
0dff9558 3217
be6d8823 3218 case 4:
3219
3220 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3221 "%4lpm %B0,%a2+", xop, plen, 2);
0dff9558 3222
be6d8823 3223 if (REGNO (dest) == REG_Z - 2)
3224 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3225 "%4lpm %C0,%a2" CR_TAB
3226 "mov %D0,%5", xop, plen, 3);
3227 else
4202ef11 3228 {
be6d8823 3229 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3230 "%4lpm %D0,%a2", xop, plen, 2);
0dff9558 3231
be6d8823 3232 if (!reg_unused_after (insn, addr))
3233 avr_asm_len ("sbiw %2,3", xop, plen, 1);
4202ef11 3234 }
3235
be6d8823 3236 break; /* 4 */
3237 } /* n_bytes */
0dff9558 3238
be6d8823 3239 break; /* REG */
3240
3241 case POST_INC:
3242
3243 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3244 && n_bytes <= 4);
3245
3246 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
3247 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3248 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3249 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3250
3251 break; /* POST_INC */
3252
3253 } /* switch CODE (addr) */
3254
3255 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
be20cbdd 3256 {
0dff9558 3257 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM. */
be6d8823 3258
3259 xop[0] = zero_reg_rtx;
3260 avr_asm_len ("out %i6,%0", xop, plen, 1);
be20cbdd 3261 }
3262
4202ef11 3263 return "";
3264}
3265
3266
ed2541ea 3267/* Worker function for xload_8 insn. */
5bd39e93 3268
3269const char*
375204de 3270avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 3271{
ed2541ea 3272 rtx xop[4];
5bd39e93 3273
ed2541ea 3274 xop[0] = op[0];
5bd39e93 3275 xop[1] = op[1];
3276 xop[2] = lpm_addr_reg_rtx;
ed2541ea 3277 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
5bd39e93 3278
4f3c1595 3279 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
5bd39e93 3280
ee3ce577 3281 avr_asm_len ("sbrc %1,7" CR_TAB
4f3c1595 3282 "ld %3,%a2", xop, plen, 2);
5bd39e93 3283
ed2541ea 3284 if (REGNO (xop[0]) != REGNO (xop[3]))
3285 avr_asm_len ("mov %0,%3", xop, plen, 1);
0dff9558 3286
5bd39e93 3287 return "";
3288}
3289
1a96adb9 3290
002565f0 3291const char*
375204de 3292output_movqi (rtx_insn *insn, rtx operands[], int *plen)
a28e4651 3293{
e511e253 3294 rtx dest = operands[0];
3295 rtx src = operands[1];
0dff9558 3296
590da9f2 3297 if (avr_mem_flash_p (src)
3298 || avr_mem_flash_p (dest))
4202ef11 3299 {
be6d8823 3300 return avr_out_lpm (insn, operands, plen);
4202ef11 3301 }
3302
017c5b98 3303 gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3304
3305 if (REG_P (dest))
a28e4651 3306 {
017c5b98 3307 if (REG_P (src)) /* mov r,r */
be6d8823 3308 {
3309 if (test_hard_reg_class (STACK_REG, dest))
3310 return avr_asm_len ("out %0,%1", operands, plen, -1);
3311 else if (test_hard_reg_class (STACK_REG, src))
3312 return avr_asm_len ("in %0,%1", operands, plen, -1);
0dff9558 3313
be6d8823 3314 return avr_asm_len ("mov %0,%1", operands, plen, -1);
3315 }
e511e253 3316 else if (CONSTANT_P (src))
a49907f9 3317 {
be6d8823 3318 output_reload_in_const (operands, NULL_RTX, plen, false);
a49907f9 3319 return "";
3320 }
002565f0 3321 else if (MEM_P (src))
be6d8823 3322 return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
e511e253 3323 }
002565f0 3324 else if (MEM_P (dest))
e511e253 3325 {
644ac9c5 3326 rtx xop[2];
37ac04dc 3327
644ac9c5 3328 xop[0] = dest;
017c5b98 3329 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
e511e253 3330
be6d8823 3331 return out_movqi_mr_r (insn, xop, plen);
e511e253 3332 }
be6d8823 3333
e511e253 3334 return "";
3335}
3336
3337
37ac04dc 3338const char *
375204de 3339output_movhi (rtx_insn *insn, rtx xop[], int *plen)
e511e253 3340{
8c8193e1 3341 rtx dest = xop[0];
3342 rtx src = xop[1];
3343
3344 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
0dff9558 3345
590da9f2 3346 if (avr_mem_flash_p (src)
3347 || avr_mem_flash_p (dest))
4202ef11 3348 {
8c8193e1 3349 return avr_out_lpm (insn, xop, plen);
4202ef11 3350 }
3351
017c5b98 3352 gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3353
8c8193e1 3354 if (REG_P (dest))
e511e253 3355 {
8c8193e1 3356 if (REG_P (src)) /* mov r,r */
3357 {
3358 if (test_hard_reg_class (STACK_REG, dest))
3359 {
3360 if (AVR_HAVE_8BIT_SP)
3361 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
0b6cf66f 3362
3363 if (AVR_XMEGA)
3364 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3365 "out __SP_H__,%B1", xop, plen, -2);
0dff9558 3366
8c8193e1 3367 /* Use simple load of SP if no interrupts are used. */
0dff9558 3368
8c8193e1 3369 return TARGET_NO_INTERRUPTS
3370 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3371 "out __SP_L__,%A1", xop, plen, -2)
8c8193e1 3372 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
3373 "cli" CR_TAB
3374 "out __SP_H__,%B1" CR_TAB
3375 "out __SREG__,__tmp_reg__" CR_TAB
3376 "out __SP_L__,%A1", xop, plen, -5);
3377 }
3378 else if (test_hard_reg_class (STACK_REG, src))
3379 {
d32d7e3a 3380 return !AVR_HAVE_SPH
8c8193e1 3381 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3382 "clr %B0", xop, plen, -2)
0dff9558 3383
8c8193e1 3384 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3385 "in %B0,__SP_H__", xop, plen, -2);
3386 }
e511e253 3387
8c8193e1 3388 return AVR_HAVE_MOVW
3389 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3390
3391 : avr_asm_len ("mov %A0,%A1" CR_TAB
3392 "mov %B0,%B1", xop, plen, -2);
3393 } /* REG_P (src) */
e511e253 3394 else if (CONSTANT_P (src))
2f2d376f 3395 {
8c8193e1 3396 return output_reload_inhi (xop, NULL, plen);
3397 }
3398 else if (MEM_P (src))
3399 {
3400 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2f2d376f 3401 }
e511e253 3402 }
8c8193e1 3403 else if (MEM_P (dest))
e511e253 3404 {
644ac9c5 3405 rtx xop[2];
37ac04dc 3406
644ac9c5 3407 xop[0] = dest;
017c5b98 3408 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
37ac04dc 3409
8c8193e1 3410 return out_movhi_mr_r (insn, xop, plen);
a28e4651 3411 }
0dff9558 3412
68435912 3413 fatal_insn ("invalid insn:", insn);
0dff9558 3414
e511e253 3415 return "";
3416}
3417
1a96adb9 3418
b4e6d2e2 3419/* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
1a96adb9 3420
b4e6d2e2 3421static const char*
3422avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3423{
3424 rtx dest = op[0];
3425 rtx src = op[1];
3426 rtx x = XEXP (src, 0);
3427
3428 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
1a96adb9 3429 "ld %0,%b1" , op, plen, -3);
b4e6d2e2 3430
3431 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
1a96adb9 3432 && !reg_unused_after (insn, XEXP (x,0)))
3433 avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
b4e6d2e2 3434
3435 return "";
3436}
3437
644ac9c5 3438static const char*
375204de 3439out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
e511e253 3440{
37ac04dc 3441 rtx dest = op[0];
3442 rtx src = op[1];
3443 rtx x = XEXP (src, 0);
0dff9558 3444
e511e253 3445 if (CONSTANT_ADDRESS_P (x))
3446 {
b4e6d2e2 3447 int n_words = AVR_TINY ? 1 : 2;
644ac9c5 3448 return optimize > 0 && io_address_operand (x, QImode)
3449 ? avr_asm_len ("in %0,%i1", op, plen, -1)
b4e6d2e2 3450 : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
e511e253 3451 }
b4e6d2e2 3452
3453 if (GET_CODE (x) == PLUS
5bd39e93 3454 && REG_P (XEXP (x, 0))
3455 && CONST_INT_P (XEXP (x, 1)))
e511e253 3456 {
5bd39e93 3457 /* memory access by reg+disp */
db45d3ed 3458
5bd39e93 3459 int disp = INTVAL (XEXP (x, 1));
0dff9558 3460
b4e6d2e2 3461 if (AVR_TINY)
3462 return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3463
5bd39e93 3464 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3465 {
3466 if (REGNO (XEXP (x, 0)) != REG_Y)
3467 fatal_insn ("incorrect insn:",insn);
db45d3ed 3468
5bd39e93 3469 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3470 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3471 "ldd %0,Y+63" CR_TAB
3472 "sbiw r28,%o1-63", op, plen, -3);
3473
3474 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3475 "sbci r29,hi8(-%o1)" CR_TAB
3476 "ld %0,Y" CR_TAB
3477 "subi r28,lo8(%o1)" CR_TAB
3478 "sbci r29,hi8(%o1)", op, plen, -5);
3479 }
3480 else if (REGNO (XEXP (x, 0)) == REG_X)
3481 {
3482 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3483 it but I have this situation with extremal optimizing options. */
0dff9558 3484
5bd39e93 3485 avr_asm_len ("adiw r26,%o1" CR_TAB
3486 "ld %0,X", op, plen, -2);
0dff9558 3487
5bd39e93 3488 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3489 && !reg_unused_after (insn, XEXP (x,0)))
3490 {
3491 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3492 }
3493
3494 return "";
3495 }
3496
3497 return avr_asm_len ("ldd %0,%1", op, plen, -1);
e511e253 3498 }
0dff9558 3499
5bd39e93 3500 return avr_asm_len ("ld %0,%1", op, plen, -1);
a28e4651 3501}
3502
1a96adb9 3503
b4e6d2e2 3504/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
1a96adb9 3505
b4e6d2e2 3506static const char*
3507avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op[], int *plen)
3508{
3509 rtx dest = op[0];
3510 rtx src = op[1];
3511 rtx base = XEXP (src, 0);
3512
3513 int reg_dest = true_regnum (dest);
3514 int reg_base = true_regnum (base);
3515
3516 if (reg_dest == reg_base) /* R = (R) */
1a96adb9 3517 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3518 "ld %B0,%1" CR_TAB
3519 "mov %A0,__tmp_reg__", op, plen, -3);
b4e6d2e2 3520
1a96adb9 3521 return avr_asm_len ("ld %A0,%1" CR_TAB
3522 TINY_ADIW (%E1, %F1, 1) CR_TAB
3523 "ld %B0,%1" CR_TAB
3524 TINY_SBIW (%E1, %F1, 1), op, plen, -6);
b4e6d2e2 3525}
3526
1a96adb9 3527
b4e6d2e2 3528/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
1a96adb9 3529
b4e6d2e2 3530static const char*
3531avr_out_movhi_r_mr_reg_disp_tiny (rtx op[], int *plen)
3532{
3533 rtx dest = op[0];
3534 rtx src = op[1];
3535 rtx base = XEXP (src, 0);
3536
3537 int reg_dest = true_regnum (dest);
3538 int reg_base = true_regnum (XEXP (base, 0));
3539
3540 if (reg_base == reg_dest)
1a96adb9 3541 {
b4e6d2e2 3542 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
1a96adb9 3543 "ld __tmp_reg__,%b1+" CR_TAB
3544 "ld %B0,%b1" CR_TAB
3545 "mov %A0,__tmp_reg__", op, plen, -5);
3546 }
b4e6d2e2 3547 else
1a96adb9 3548 {
b4e6d2e2 3549 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
1a96adb9 3550 "ld %A0,%b1+" CR_TAB
3551 "ld %B0,%b1" CR_TAB
3552 TINY_SBIW (%I1, %J1, %o1+1), op, plen, -6);
3553 }
3554}
3555
b4e6d2e2 3556
3557/* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
1a96adb9 3558
b4e6d2e2 3559static const char*
3560avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3561{
3562 int mem_volatile_p = 0;
3563 rtx dest = op[0];
3564 rtx src = op[1];
3565 rtx base = XEXP (src, 0);
3566
3567 /* "volatile" forces reading low byte first, even if less efficient,
3568 for correct operation with 16-bit I/O registers. */
3569 mem_volatile_p = MEM_VOLATILE_P (src);
3570
3571 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1a96adb9 3572 fatal_insn ("incorrect insn:", insn);
b4e6d2e2 3573
3574 if (!mem_volatile_p)
1a96adb9 3575 return avr_asm_len ("ld %B0,%1" CR_TAB
3576 "ld %A0,%1", op, plen, -2);
b4e6d2e2 3577
3578 return avr_asm_len (TINY_SBIW (%I1, %J1, 2) CR_TAB
1a96adb9 3579 "ld %A0,%p1+" CR_TAB
3580 "ld %B0,%p1" CR_TAB
3581 TINY_SBIW (%I1, %J1, 1), op, plen, -6);
b4e6d2e2 3582}
3583
1a96adb9 3584
644ac9c5 3585static const char*
375204de 3586out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
a28e4651 3587{
37ac04dc 3588 rtx dest = op[0];
3589 rtx src = op[1];
3590 rtx base = XEXP (src, 0);
3591 int reg_dest = true_regnum (dest);
3592 int reg_base = true_regnum (base);
f43bae99 3593 /* "volatile" forces reading low byte first, even if less efficient,
3594 for correct operation with 16-bit I/O registers. */
3595 int mem_volatile_p = MEM_VOLATILE_P (src);
a28e4651 3596
3597 if (reg_base > 0)
3598 {
b4e6d2e2 3599 if (AVR_TINY)
3600 return avr_out_movhi_r_mr_reg_no_disp_tiny (op, plen);
3601
a28e4651 3602 if (reg_dest == reg_base) /* R = (R) */
644ac9c5 3603 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3604 "ld %B0,%1" CR_TAB
3605 "mov %A0,__tmp_reg__", op, plen, -3);
3606
3607 if (reg_base != REG_X)
3608 return avr_asm_len ("ld %A0,%1" CR_TAB
3609 "ldd %B0,%1+1", op, plen, -2);
0dff9558 3610
644ac9c5 3611 avr_asm_len ("ld %A0,X+" CR_TAB
3612 "ld %B0,X", op, plen, -2);
0dff9558 3613
644ac9c5 3614 if (!reg_unused_after (insn, base))
3615 avr_asm_len ("sbiw r26,1", op, plen, 1);
3616
3617 return "";
a28e4651 3618 }
37ac04dc 3619 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3620 {
37ac04dc 3621 int disp = INTVAL (XEXP (base, 1));
3622 int reg_base = true_regnum (XEXP (base, 0));
0dff9558 3623
b4e6d2e2 3624 if (AVR_TINY)
3625 return avr_out_movhi_r_mr_reg_disp_tiny (op, plen);
3626
37ac04dc 3627 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
644ac9c5 3628 {
3629 if (REGNO (XEXP (base, 0)) != REG_Y)
3630 fatal_insn ("incorrect insn:",insn);
0dff9558 3631
644ac9c5 3632 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3633 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3634 "ldd %A0,Y+62" CR_TAB
3635 "ldd %B0,Y+63" CR_TAB
3636 "sbiw r28,%o1-62", op, plen, -4)
3637
b4e6d2e2 3638 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
644ac9c5 3639 "sbci r29,hi8(-%o1)" CR_TAB
3640 "ld %A0,Y" CR_TAB
3641 "ldd %B0,Y+1" CR_TAB
3642 "subi r28,lo8(%o1)" CR_TAB
3643 "sbci r29,hi8(%o1)", op, plen, -6);
3644 }
3645
3646 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3647 it but I have this situation with extremal
3648 optimization options. */
db45d3ed 3649
a28e4651 3650 if (reg_base == REG_X)
644ac9c5 3651 return reg_base == reg_dest
3652 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3653 "ld __tmp_reg__,X+" CR_TAB
3654 "ld %B0,X" CR_TAB
3655 "mov %A0,__tmp_reg__", op, plen, -4)
e511e253 3656
644ac9c5 3657 : avr_asm_len ("adiw r26,%o1" CR_TAB
3658 "ld %A0,X+" CR_TAB
3659 "ld %B0,X" CR_TAB
3660 "sbiw r26,%o1+1", op, plen, -4);
e511e253 3661
644ac9c5 3662 return reg_base == reg_dest
3663 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3664 "ldd %B0,%B1" CR_TAB
3665 "mov %A0,__tmp_reg__", op, plen, -3)
3666
3667 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3668 "ldd %B0,%B1", op, plen, -2);
a28e4651 3669 }
37ac04dc 3670 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
a28e4651 3671 {
b4e6d2e2 3672 if (AVR_TINY)
1a96adb9 3673 return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
b4e6d2e2 3674
37ac04dc 3675 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3676 fatal_insn ("incorrect insn:", insn);
e511e253 3677
644ac9c5 3678 if (!mem_volatile_p)
3679 return avr_asm_len ("ld %B0,%1" CR_TAB
3680 "ld %A0,%1", op, plen, -2);
0dff9558 3681
644ac9c5 3682 return REGNO (XEXP (base, 0)) == REG_X
3683 ? avr_asm_len ("sbiw r26,2" CR_TAB
3684 "ld %A0,X+" CR_TAB
3685 "ld %B0,X" CR_TAB
3686 "sbiw r26,1", op, plen, -4)
0dff9558 3687
644ac9c5 3688 : avr_asm_len ("sbiw %r1,2" CR_TAB
3689 "ld %A0,%p1" CR_TAB
3690 "ldd %B0,%p1+1", op, plen, -3);
a28e4651 3691 }
37ac04dc 3692 else if (GET_CODE (base) == POST_INC) /* (R++) */
a28e4651 3693 {
37ac04dc 3694 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
644ac9c5 3695 fatal_insn ("incorrect insn:", insn);
e511e253 3696
644ac9c5 3697 return avr_asm_len ("ld %A0,%1" CR_TAB
3698 "ld %B0,%1", op, plen, -2);
a28e4651 3699 }
37ac04dc 3700 else if (CONSTANT_ADDRESS_P (base))
e511e253 3701 {
b4e6d2e2 3702 int n_words = AVR_TINY ? 2 : 4;
644ac9c5 3703 return optimize > 0 && io_address_operand (base, HImode)
3704 ? avr_asm_len ("in %A0,%i1" CR_TAB
3705 "in %B0,%i1+1", op, plen, -2)
3706
3707 : avr_asm_len ("lds %A0,%m1" CR_TAB
b4e6d2e2 3708 "lds %B0,%m1+1", op, plen, -n_words);
e511e253 3709 }
0dff9558 3710
68435912 3711 fatal_insn ("unknown move insn:",insn);
a28e4651 3712 return "";
3713}
3714
b4e6d2e2 3715static const char*
3716avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3717{
3718 rtx dest = op[0];
3719 rtx src = op[1];
3720 rtx base = XEXP (src, 0);
3721 int reg_dest = true_regnum (dest);
3722 int reg_base = true_regnum (base);
3723
3724 if (reg_dest == reg_base)
3725 {
1a96adb9 3726 /* "ld r26,-X" is undefined */
3727 return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
3728 "ld %D0,%1" CR_TAB
3729 "ld %C0,-%1" CR_TAB
3730 "ld __tmp_reg__,-%1" CR_TAB
3731 TINY_SBIW (%E1, %F1, 1) CR_TAB
3732 "ld %A0,%1" CR_TAB
3733 "mov %B0,__tmp_reg__");
b4e6d2e2 3734 }
3735 else if (reg_dest == reg_base - 2)
3736 {
1a96adb9 3737 return *l = 5, ("ld %A0,%1+" CR_TAB
3738 "ld %B0,%1+" CR_TAB
3739 "ld __tmp_reg__,%1+" CR_TAB
3740 "ld %D0,%1" CR_TAB
3741 "mov %C0,__tmp_reg__");
b4e6d2e2 3742 }
3743 else if (reg_unused_after (insn, base))
3744 {
1a96adb9 3745 return *l = 4, ("ld %A0,%1+" CR_TAB
3746 "ld %B0,%1+" CR_TAB
3747 "ld %C0,%1+" CR_TAB
3748 "ld %D0,%1");
b4e6d2e2 3749 }
3750 else
3751 {
1a96adb9 3752 return *l = 6, ("ld %A0,%1+" CR_TAB
3753 "ld %B0,%1+" CR_TAB
3754 "ld %C0,%1+" CR_TAB
3755 "ld %D0,%1" CR_TAB
3756 TINY_SBIW (%E1, %F1, 3));
b4e6d2e2 3757 }
3758}
3759
1a96adb9 3760
b4e6d2e2 3761static const char*
3762avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3763{
3764 rtx dest = op[0];
3765 rtx src = op[1];
3766 rtx base = XEXP (src, 0);
3767 int reg_dest = true_regnum (dest);
3768 int reg_base = true_regnum (XEXP (base, 0));
3769
3770 if (reg_dest == reg_base)
3771 {
1a96adb9 3772 /* "ld r26,-X" is undefined */
3773 return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
3774 "ld %D0,%b1" CR_TAB
3775 "ld %C0,-%b1" CR_TAB
3776 "ld __tmp_reg__,-%b1" CR_TAB
3777 TINY_SBIW (%I1, %J1, 1) CR_TAB
3778 "ld %A0,%b1" CR_TAB
3779 "mov %B0,__tmp_reg__");
b4e6d2e2 3780 }
3781 else if (reg_dest == reg_base - 2)
3782 {
1a96adb9 3783 return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3784 "ld %A0,%b1+" CR_TAB
3785 "ld %B0,%b1+" CR_TAB
3786 "ld __tmp_reg__,%b1+" CR_TAB
3787 "ld %D0,%b1" CR_TAB
3788 "mov %C0,__tmp_reg__");
b4e6d2e2 3789 }
3790 else if (reg_unused_after (insn, XEXP (base, 0)))
3791 {
1a96adb9 3792 return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3793 "ld %A0,%b1+" CR_TAB
3794 "ld %B0,%b1+" CR_TAB
3795 "ld %C0,%b1+" CR_TAB
3796 "ld %D0,%b1");
b4e6d2e2 3797 }
3798 else
3799 {
1a96adb9 3800 return *l = 8, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3801 "ld %A0,%b1+" CR_TAB
3802 "ld %B0,%b1+" CR_TAB
3803 "ld %C0,%b1+" CR_TAB
3804 "ld %D0,%b1" CR_TAB
3805 TINY_SBIW (%I1, %J1, %o1+3));
b4e6d2e2 3806 }
3807}
3808
644ac9c5 3809static const char*
375204de 3810out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
a28e4651 3811{
37ac04dc 3812 rtx dest = op[0];
3813 rtx src = op[1];
3814 rtx base = XEXP (src, 0);
3815 int reg_dest = true_regnum (dest);
3816 int reg_base = true_regnum (base);
a28e4651 3817 int tmp;
e511e253 3818
a28e4651 3819 if (!l)
e511e253 3820 l = &tmp;
0dff9558 3821
a28e4651 3822 if (reg_base > 0)
3823 {
b4e6d2e2 3824 if (AVR_TINY)
3825 return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
3826
a28e4651 3827 if (reg_base == REG_X) /* (R26) */
3828 {
3829 if (reg_dest == REG_X)
66ad0834 3830 /* "ld r26,-X" is undefined */
02a011e9 3831 return *l=7, ("adiw r26,3" CR_TAB
3832 "ld r29,X" CR_TAB
3833 "ld r28,-X" CR_TAB
3834 "ld __tmp_reg__,-X" CR_TAB
3835 "sbiw r26,1" CR_TAB
3836 "ld r26,X" CR_TAB
3837 "mov r27,__tmp_reg__");
a28e4651 3838 else if (reg_dest == REG_X - 2)
02a011e9 3839 return *l=5, ("ld %A0,X+" CR_TAB
3840 "ld %B0,X+" CR_TAB
3841 "ld __tmp_reg__,X+" CR_TAB
3842 "ld %D0,X" CR_TAB
3843 "mov %C0,__tmp_reg__");
37ac04dc 3844 else if (reg_unused_after (insn, base))
02a011e9 3845 return *l=4, ("ld %A0,X+" CR_TAB
3846 "ld %B0,X+" CR_TAB
3847 "ld %C0,X+" CR_TAB
3848 "ld %D0,X");
a28e4651 3849 else
02a011e9 3850 return *l=5, ("ld %A0,X+" CR_TAB
3851 "ld %B0,X+" CR_TAB
3852 "ld %C0,X+" CR_TAB
3853 "ld %D0,X" CR_TAB
3854 "sbiw r26,3");
a28e4651 3855 }
3856 else
3857 {
3858 if (reg_dest == reg_base)
02a011e9 3859 return *l=5, ("ldd %D0,%1+3" CR_TAB
3860 "ldd %C0,%1+2" CR_TAB
3861 "ldd __tmp_reg__,%1+1" CR_TAB
3862 "ld %A0,%1" CR_TAB
3863 "mov %B0,__tmp_reg__");
a28e4651 3864 else if (reg_base == reg_dest + 2)
02a011e9 3865 return *l=5, ("ld %A0,%1" CR_TAB
3866 "ldd %B0,%1+1" CR_TAB
3867 "ldd __tmp_reg__,%1+2" CR_TAB
3868 "ldd %D0,%1+3" CR_TAB
3869 "mov %C0,__tmp_reg__");
a28e4651 3870 else
02a011e9 3871 return *l=4, ("ld %A0,%1" CR_TAB
3872 "ldd %B0,%1+1" CR_TAB
3873 "ldd %C0,%1+2" CR_TAB
3874 "ldd %D0,%1+3");
a28e4651 3875 }
3876 }
37ac04dc 3877 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 3878 {
37ac04dc 3879 int disp = INTVAL (XEXP (base, 1));
0dff9558 3880
b4e6d2e2 3881 if (AVR_TINY)
3882 return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
3883
37ac04dc 3884 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
a28e4651 3885 {
37ac04dc 3886 if (REGNO (XEXP (base, 0)) != REG_Y)
68435912 3887 fatal_insn ("incorrect insn:",insn);
db45d3ed 3888
37ac04dc 3889 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
02a011e9 3890 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3891 "ldd %A0,Y+60" CR_TAB
3892 "ldd %B0,Y+61" CR_TAB
3893 "ldd %C0,Y+62" CR_TAB
3894 "ldd %D0,Y+63" CR_TAB
3895 "sbiw r28,%o1-60");
3896
3897 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3898 "sbci r29,hi8(-%o1)" CR_TAB
3899 "ld %A0,Y" CR_TAB
3900 "ldd %B0,Y+1" CR_TAB
3901 "ldd %C0,Y+2" CR_TAB
3902 "ldd %D0,Y+3" CR_TAB
3903 "subi r28,lo8(%o1)" CR_TAB
3904 "sbci r29,hi8(%o1)");
a28e4651 3905 }
3906
37ac04dc 3907 reg_base = true_regnum (XEXP (base, 0));
66ad0834 3908 if (reg_base == REG_X)
3909 {
3910 /* R = (X + d) */
3911 if (reg_dest == REG_X)
3912 {
3913 *l = 7;
3914 /* "ld r26,-X" is undefined */
02a011e9 3915 return ("adiw r26,%o1+3" CR_TAB
3916 "ld r29,X" CR_TAB
3917 "ld r28,-X" CR_TAB
3918 "ld __tmp_reg__,-X" CR_TAB
3919 "sbiw r26,1" CR_TAB
3920 "ld r26,X" CR_TAB
3921 "mov r27,__tmp_reg__");
66ad0834 3922 }
3923 *l = 6;
3924 if (reg_dest == REG_X - 2)
02a011e9 3925 return ("adiw r26,%o1" CR_TAB
3926 "ld r24,X+" CR_TAB
3927 "ld r25,X+" CR_TAB
3928 "ld __tmp_reg__,X+" CR_TAB
3929 "ld r27,X" CR_TAB
3930 "mov r26,__tmp_reg__");
3931
3932 return ("adiw r26,%o1" CR_TAB
3933 "ld %A0,X+" CR_TAB
3934 "ld %B0,X+" CR_TAB
3935 "ld %C0,X+" CR_TAB
3936 "ld %D0,X" CR_TAB
3937 "sbiw r26,%o1+3");
66ad0834 3938 }
a28e4651 3939 if (reg_dest == reg_base)
02a011e9 3940 return *l=5, ("ldd %D0,%D1" CR_TAB
3941 "ldd %C0,%C1" CR_TAB
3942 "ldd __tmp_reg__,%B1" CR_TAB
3943 "ldd %A0,%A1" CR_TAB
3944 "mov %B0,__tmp_reg__");
a28e4651 3945 else if (reg_dest == reg_base - 2)
02a011e9 3946 return *l=5, ("ldd %A0,%A1" CR_TAB
3947 "ldd %B0,%B1" CR_TAB
3948 "ldd __tmp_reg__,%C1" CR_TAB
3949 "ldd %D0,%D1" CR_TAB
3950 "mov %C0,__tmp_reg__");
3951 return *l=4, ("ldd %A0,%A1" CR_TAB
3952 "ldd %B0,%B1" CR_TAB
3953 "ldd %C0,%C1" CR_TAB
3954 "ldd %D0,%D1");
a28e4651 3955 }
37ac04dc 3956 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 3957 return *l=4, ("ld %D0,%1" CR_TAB
3958 "ld %C0,%1" CR_TAB
3959 "ld %B0,%1" CR_TAB
3960 "ld %A0,%1");
37ac04dc 3961 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 3962 return *l=4, ("ld %A0,%1" CR_TAB
3963 "ld %B0,%1" CR_TAB
3964 "ld %C0,%1" CR_TAB
3965 "ld %D0,%1");
37ac04dc 3966 else if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 3967 {
3968 if (io_address_operand (base, SImode))
3969 {
3970 *l = 4;
3971 return ("in %A0,%i1" CR_TAB
3972 "in %B0,%i1+1" CR_TAB
3973 "in %C0,%i1+2" CR_TAB
3974 "in %D0,%i1+3");
3975 }
3976 else
3977 {
3978 *l = AVR_TINY ? 4 : 8;
3979 return ("lds %A0,%m1" CR_TAB
02a011e9 3980 "lds %B0,%m1+1" CR_TAB
3981 "lds %C0,%m1+2" CR_TAB
3982 "lds %D0,%m1+3");
b4e6d2e2 3983 }
3984 }
0dff9558 3985
68435912 3986 fatal_insn ("unknown move insn:",insn);
a28e4651 3987 return "";
3988}
3989
b4e6d2e2 3990static const char*
3991avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3992{
3993 rtx dest = op[0];
3994 rtx src = op[1];
3995 rtx base = XEXP (dest, 0);
3996 int reg_base = true_regnum (base);
3997 int reg_src = true_regnum (src);
1a96adb9 3998
b4e6d2e2 3999 if (reg_base == reg_src)
4000 {
4001 /* "ld r26,-X" is undefined */
4002 if (reg_unused_after (insn, base))
1a96adb9 4003 {
4004 return *l = 7, ("mov __tmp_reg__, %B1" CR_TAB
4005 "st %0,%A1" CR_TAB
4006 TINY_ADIW (%E0, %F0, 1) CR_TAB
4007 "st %0+,__tmp_reg__" CR_TAB
4008 "st %0+,%C1" CR_TAB
4009 "st %0+,%D1");
b4e6d2e2 4010 }
4011 else
4012 {
1a96adb9 4013 return *l = 9, ("mov __tmp_reg__, %B1" CR_TAB
4014 "st %0,%A1" CR_TAB
4015 TINY_ADIW (%E0, %F0, 1) CR_TAB
4016 "st %0+,__tmp_reg__" CR_TAB
4017 "st %0+,%C1" CR_TAB
4018 "st %0+,%D1" CR_TAB
4019 TINY_SBIW (%E0, %F0, 3));
b4e6d2e2 4020 }
4021 }
1a96adb9 4022 else if (reg_base == reg_src + 2)
4023 {
4024 if (reg_unused_after (insn, base))
4025 return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
b4e6d2e2 4026 "mov __tmp_reg__,%D1" CR_TAB
4027 "st %0+,%A1" CR_TAB
4028 "st %0+,%B1" CR_TAB
4029 "st %0+,__zero_reg__" CR_TAB
4030 "st %0,__tmp_reg__" CR_TAB
4031 "clr __zero_reg__");
1a96adb9 4032 else
4033 return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4034 "mov __tmp_reg__,%D1" CR_TAB
4035 "st %0+,%A1" CR_TAB
4036 "st %0+,%B1" CR_TAB
4037 "st %0+,__zero_reg__" CR_TAB
4038 "st %0,__tmp_reg__" CR_TAB
4039 "clr __zero_reg__" CR_TAB
4040 TINY_SBIW (%E0, %F0, 3));
4041 }
b4e6d2e2 4042
1a96adb9 4043 return *l = 6, ("st %0+,%A1" CR_TAB
4044 "st %0+,%B1" CR_TAB
4045 "st %0+,%C1" CR_TAB
4046 "st %0,%D1" CR_TAB
4047 TINY_SBIW (%E0, %F0, 3));
b4e6d2e2 4048}
4049
4050static const char*
4051avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4052{
4053 rtx dest = op[0];
4054 rtx src = op[1];
4055 rtx base = XEXP (dest, 0);
4056 int reg_base = REGNO (XEXP (base, 0));
4057 int reg_src =true_regnum (src);
4058
4059 if (reg_base == reg_src)
4060 {
1a96adb9 4061 *l = 11;
4062 return ("mov __tmp_reg__,%A2" CR_TAB
4063 "mov __zero_reg__,%B2" CR_TAB
b4e6d2e2 4064 TINY_ADIW (%I0, %J0, %o0) CR_TAB
1a96adb9 4065 "st %b0+,__tmp_reg__" CR_TAB
4066 "st %b0+,__zero_reg__" CR_TAB
4067 "st %b0+,%C2" CR_TAB
4068 "st %b0,%D2" CR_TAB
4069 "clr __zero_reg__" CR_TAB
4070 TINY_SBIW (%I0, %J0, %o0+3));
4071 }
b4e6d2e2 4072 else if (reg_src == reg_base - 2)
4073 {
1a96adb9 4074 *l = 11;
4075 return ("mov __tmp_reg__,%C2" CR_TAB
4076 "mov __zero_reg__,%D2" CR_TAB
4077 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4078 "st %b0+,%A0" CR_TAB
4079 "st %b0+,%B0" CR_TAB
4080 "st %b0+,__tmp_reg__" CR_TAB
4081 "st %b0,__zero_reg__" CR_TAB
4082 "clr __zero_reg__" CR_TAB
4083 TINY_SBIW (%I0, %J0, %o0+3));
4084 }
b4e6d2e2 4085 *l = 8;
4086 return (TINY_ADIW (%I0, %J0, %o0) CR_TAB
1a96adb9 4087 "st %b0+,%A1" CR_TAB
4088 "st %b0+,%B1" CR_TAB
4089 "st %b0+,%C1" CR_TAB
4090 "st %b0,%D1" CR_TAB
4091 TINY_SBIW (%I0, %J0, %o0+3));
b4e6d2e2 4092}
4093
644ac9c5 4094static const char*
375204de 4095out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
a28e4651 4096{
37ac04dc 4097 rtx dest = op[0];
4098 rtx src = op[1];
4099 rtx base = XEXP (dest, 0);
4100 int reg_base = true_regnum (base);
4101 int reg_src = true_regnum (src);
a28e4651 4102 int tmp;
0dff9558 4103
a28e4651 4104 if (!l)
4105 l = &tmp;
0dff9558 4106
37ac04dc 4107 if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 4108 {
4109 if (io_address_operand (base, SImode))
4110 {
4111 return *l=4,("out %i0, %A1" CR_TAB
4112 "out %i0+1,%B1" CR_TAB
4113 "out %i0+2,%C1" CR_TAB
4114 "out %i0+3,%D1");
4115 }
4116 else
4117 {
4118 *l = AVR_TINY ? 4 : 8;
4119 return ("sts %m0,%A1" CR_TAB
4120 "sts %m0+1,%B1" CR_TAB
4121 "sts %m0+2,%C1" CR_TAB
4122 "sts %m0+3,%D1");
4123 }
4124 }
4125
a28e4651 4126 if (reg_base > 0) /* (r) */
4127 {
b4e6d2e2 4128 if (AVR_TINY)
4129 return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4130
a28e4651 4131 if (reg_base == REG_X) /* (R26) */
4132 {
37ac04dc 4133 if (reg_src == REG_X)
a28e4651 4134 {
66ad0834 4135 /* "st X+,r26" is undefined */
37ac04dc 4136 if (reg_unused_after (insn, base))
02a011e9 4137 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4138 "st X,r26" CR_TAB
4139 "adiw r26,1" CR_TAB
4140 "st X+,__tmp_reg__" CR_TAB
4141 "st X+,r28" CR_TAB
4142 "st X,r29");
a28e4651 4143 else
02a011e9 4144 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4145 "st X,r26" CR_TAB
4146 "adiw r26,1" CR_TAB
4147 "st X+,__tmp_reg__" CR_TAB
4148 "st X+,r28" CR_TAB
4149 "st X,r29" CR_TAB
4150 "sbiw r26,3");
a28e4651 4151 }
37ac04dc 4152 else if (reg_base == reg_src + 2)
a28e4651 4153 {
37ac04dc 4154 if (reg_unused_after (insn, base))
02a011e9 4155 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4156 "mov __tmp_reg__,%D1" CR_TAB
4157 "st %0+,%A1" CR_TAB
4158 "st %0+,%B1" CR_TAB
4159 "st %0+,__zero_reg__" CR_TAB
4160 "st %0,__tmp_reg__" CR_TAB
4161 "clr __zero_reg__");
a28e4651 4162 else
02a011e9 4163 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4164 "mov __tmp_reg__,%D1" CR_TAB
4165 "st %0+,%A1" CR_TAB
4166 "st %0+,%B1" CR_TAB
4167 "st %0+,__zero_reg__" CR_TAB
4168 "st %0,__tmp_reg__" CR_TAB
4169 "clr __zero_reg__" CR_TAB
4170 "sbiw r26,3");
a28e4651 4171 }
02a011e9 4172 return *l=5, ("st %0+,%A1" CR_TAB
4173 "st %0+,%B1" CR_TAB
4174 "st %0+,%C1" CR_TAB
4175 "st %0,%D1" CR_TAB
4176 "sbiw r26,3");
a28e4651 4177 }
4178 else
02a011e9 4179 return *l=4, ("st %0,%A1" CR_TAB
4180 "std %0+1,%B1" CR_TAB
4181 "std %0+2,%C1" CR_TAB
4182 "std %0+3,%D1");
a28e4651 4183 }
37ac04dc 4184 else if (GET_CODE (base) == PLUS) /* (R + i) */
a28e4651 4185 {
37ac04dc 4186 int disp = INTVAL (XEXP (base, 1));
b4e6d2e2 4187
4188 if (AVR_TINY)
4189 return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4190
66ad0834 4191 reg_base = REGNO (XEXP (base, 0));
37ac04dc 4192 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
a28e4651 4193 {
66ad0834 4194 if (reg_base != REG_Y)
68435912 4195 fatal_insn ("incorrect insn:",insn);
db45d3ed 4196
37ac04dc 4197 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
02a011e9 4198 return *l = 6, ("adiw r28,%o0-60" CR_TAB
4199 "std Y+60,%A1" CR_TAB
4200 "std Y+61,%B1" CR_TAB
4201 "std Y+62,%C1" CR_TAB
4202 "std Y+63,%D1" CR_TAB
4203 "sbiw r28,%o0-60");
4204
4205 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4206 "sbci r29,hi8(-%o0)" CR_TAB
4207 "st Y,%A1" CR_TAB
4208 "std Y+1,%B1" CR_TAB
4209 "std Y+2,%C1" CR_TAB
4210 "std Y+3,%D1" CR_TAB
4211 "subi r28,lo8(%o0)" CR_TAB
4212 "sbci r29,hi8(%o0)");
a28e4651 4213 }
66ad0834 4214 if (reg_base == REG_X)
4215 {
4216 /* (X + d) = R */
4217 if (reg_src == REG_X)
4218 {
4219 *l = 9;
02a011e9 4220 return ("mov __tmp_reg__,r26" CR_TAB
4221 "mov __zero_reg__,r27" CR_TAB
4222 "adiw r26,%o0" CR_TAB
4223 "st X+,__tmp_reg__" CR_TAB
4224 "st X+,__zero_reg__" CR_TAB
4225 "st X+,r28" CR_TAB
4226 "st X,r29" CR_TAB
4227 "clr __zero_reg__" CR_TAB
4228 "sbiw r26,%o0+3");
66ad0834 4229 }
4230 else if (reg_src == REG_X - 2)
4231 {
4232 *l = 9;
02a011e9 4233 return ("mov __tmp_reg__,r26" CR_TAB
4234 "mov __zero_reg__,r27" CR_TAB
4235 "adiw r26,%o0" CR_TAB
4236 "st X+,r24" CR_TAB
4237 "st X+,r25" CR_TAB
4238 "st X+,__tmp_reg__" CR_TAB
4239 "st X,__zero_reg__" CR_TAB
4240 "clr __zero_reg__" CR_TAB
4241 "sbiw r26,%o0+3");
66ad0834 4242 }
4243 *l = 6;
02a011e9 4244 return ("adiw r26,%o0" CR_TAB
4245 "st X+,%A1" CR_TAB
4246 "st X+,%B1" CR_TAB
4247 "st X+,%C1" CR_TAB
4248 "st X,%D1" CR_TAB
4249 "sbiw r26,%o0+3");
66ad0834 4250 }
02a011e9 4251 return *l=4, ("std %A0,%A1" CR_TAB
4252 "std %B0,%B1" CR_TAB
4253 "std %C0,%C1" CR_TAB
4254 "std %D0,%D1");
a28e4651 4255 }
37ac04dc 4256 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
02a011e9 4257 return *l=4, ("st %0,%D1" CR_TAB
4258 "st %0,%C1" CR_TAB
4259 "st %0,%B1" CR_TAB
4260 "st %0,%A1");
37ac04dc 4261 else if (GET_CODE (base) == POST_INC) /* (R++) */
02a011e9 4262 return *l=4, ("st %0,%A1" CR_TAB
4263 "st %0,%B1" CR_TAB
4264 "st %0,%C1" CR_TAB
4265 "st %0,%D1");
68435912 4266 fatal_insn ("unknown move insn:",insn);
a28e4651 4267 return "";
4268}
4269
37ac04dc 4270const char *
375204de 4271output_movsisf (rtx_insn *insn, rtx operands[], int *l)
a28e4651 4272{
e511e253 4273 int dummy;
4274 rtx dest = operands[0];
4275 rtx src = operands[1];
4276 int *real_l = l;
0dff9558 4277
590da9f2 4278 if (avr_mem_flash_p (src)
4279 || avr_mem_flash_p (dest))
4202ef11 4280 {
4281 return avr_out_lpm (insn, operands, real_l);
4282 }
4283
e511e253 4284 if (!l)
4285 l = &dummy;
0dff9558 4286
017c5b98 4287 gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4288 if (REG_P (dest))
a28e4651 4289 {
017c5b98 4290 if (REG_P (src)) /* mov r,r */
0af74aa0 4291 {
e511e253 4292 if (true_regnum (dest) > true_regnum (src))
4293 {
0aab73c2 4294 if (AVR_HAVE_MOVW)
e511e253 4295 {
4296 *l = 2;
02a011e9 4297 return ("movw %C0,%C1" CR_TAB
4298 "movw %A0,%A1");
e511e253 4299 }
4300 *l = 4;
02a011e9 4301 return ("mov %D0,%D1" CR_TAB
4302 "mov %C0,%C1" CR_TAB
4303 "mov %B0,%B1" CR_TAB
4304 "mov %A0,%A1");
e511e253 4305 }
0af74aa0 4306 else
e511e253 4307 {
0aab73c2 4308 if (AVR_HAVE_MOVW)
e511e253 4309 {
4310 *l = 2;
02a011e9 4311 return ("movw %A0,%A1" CR_TAB
4312 "movw %C0,%C1");
e511e253 4313 }
4314 *l = 4;
02a011e9 4315 return ("mov %A0,%A1" CR_TAB
4316 "mov %B0,%B1" CR_TAB
4317 "mov %C0,%C1" CR_TAB
4318 "mov %D0,%D1");
e511e253 4319 }
0af74aa0 4320 }
e511e253 4321 else if (CONSTANT_P (src))
0af74aa0 4322 {
644ac9c5 4323 return output_reload_insisf (operands, NULL_RTX, real_l);
4324 }
017c5b98 4325 else if (MEM_P (src))
e511e253 4326 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4327 }
017c5b98 4328 else if (MEM_P (dest))
e511e253 4329 {
8deb3959 4330 const char *templ;
37ac04dc 4331
9ce2d202 4332 if (src == CONST0_RTX (GET_MODE (dest)))
e511e253 4333 operands[1] = zero_reg_rtx;
37ac04dc 4334
8deb3959 4335 templ = out_movsi_mr_r (insn, operands, real_l);
e511e253 4336
4337 if (!real_l)
8deb3959 4338 output_asm_insn (templ, operands);
37ac04dc 4339
4340 operands[1] = src;
e511e253 4341 return "";
a28e4651 4342 }
68435912 4343 fatal_insn ("invalid insn:", insn);
a28e4651 4344 return "";
4345}
4346
02d9a2c3 4347
4348/* Handle loads of 24-bit types from memory to register. */
4349
b4e6d2e2 4350static const char*
4351avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4352{
4353 rtx dest = op[0];
4354 rtx src = op[1];
4355 rtx base = XEXP (src, 0);
4356 int reg_dest = true_regnum (dest);
4357 int reg_base = true_regnum (base);
4358
4359 if (reg_base == reg_dest)
4360 {
4361 return avr_asm_len (TINY_ADIW (%E1, %F1, 2) CR_TAB
4362 "ld %C0,%1" CR_TAB
1a96adb9 4363 "ld __tmp_reg__,-%1" CR_TAB
b4e6d2e2 4364 TINY_SBIW (%E1, %F1, 1) CR_TAB
1a96adb9 4365 "ld %A0,%1" CR_TAB
b4e6d2e2 4366 "mov %B0,__tmp_reg__", op, plen, -8);
4367 }
4368 else
4369 {
f1c4a634 4370 avr_asm_len ("ld %A0,%1+" CR_TAB
4371 "ld %B0,%1+" CR_TAB
4372 "ld %C0,%1", op, plen, -3);
b4e6d2e2 4373
4374 if (reg_dest != reg_base - 2 &&
4375 !reg_unused_after (insn, base))
4376 {
4377 avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4378 }
4379 return "";
4380 }
4381}
4382
4383static const char*
4384avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4385{
4386 rtx dest = op[0];
4387 rtx src = op[1];
4388 rtx base = XEXP (src, 0);
4389 int reg_dest = true_regnum (dest);
4390 int reg_base = true_regnum (base);
4391
4392 reg_base = true_regnum (XEXP (base, 0));
4393 if (reg_base == reg_dest)
4394 {
4395 return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4396 "ld %C0,%b1" CR_TAB
4397 "ld __tmp_reg__,-%b1" CR_TAB
4398 TINY_SBIW (%I1, %J1, 1) CR_TAB
4399 "ld %A0,%b1" CR_TAB
4400 "mov %B0,__tmp_reg__", op, plen, -8);
4401 }
4402 else
4403 {
4404 avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4405 "ld %A0,%b1+" CR_TAB
1a96adb9 4406 "ld %B0,%b1+" CR_TAB
b4e6d2e2 4407 "ld %C0,%b1", op, plen, -5);
4408
4409 if (reg_dest != (reg_base - 2)
4410 && !reg_unused_after (insn, XEXP (base, 0)))
4411 avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
1a96adb9 4412
b4e6d2e2 4413 return "";
4414 }
4415}
4416
02d9a2c3 4417static const char*
375204de 4418avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 4419{
4420 rtx dest = op[0];
4421 rtx src = op[1];
4422 rtx base = XEXP (src, 0);
4423 int reg_dest = true_regnum (dest);
4424 int reg_base = true_regnum (base);
0dff9558 4425
02d9a2c3 4426 if (reg_base > 0)
4427 {
b4e6d2e2 4428 if (AVR_TINY)
4429 return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4430
02d9a2c3 4431 if (reg_base == REG_X) /* (R26) */
4432 {
4433 if (reg_dest == REG_X)
4434 /* "ld r26,-X" is undefined */
4435 return avr_asm_len ("adiw r26,2" CR_TAB
4436 "ld r28,X" CR_TAB
4437 "ld __tmp_reg__,-X" CR_TAB
4438 "sbiw r26,1" CR_TAB
4439 "ld r26,X" CR_TAB
4440 "mov r27,__tmp_reg__", op, plen, -6);
4441 else
4442 {
4443 avr_asm_len ("ld %A0,X+" CR_TAB
4444 "ld %B0,X+" CR_TAB
4445 "ld %C0,X", op, plen, -3);
4446
4447 if (reg_dest != REG_X - 2
4448 && !reg_unused_after (insn, base))
4449 {
4450 avr_asm_len ("sbiw r26,2", op, plen, 1);
4451 }
4452
4453 return "";
4454 }
4455 }
4456 else /* reg_base != REG_X */
4457 {
4458 if (reg_dest == reg_base)
4459 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
4460 "ldd __tmp_reg__,%1+1" CR_TAB
4461 "ld %A0,%1" CR_TAB
4462 "mov %B0,__tmp_reg__", op, plen, -4);
4463 else
4464 return avr_asm_len ("ld %A0,%1" CR_TAB
4465 "ldd %B0,%1+1" CR_TAB
4466 "ldd %C0,%1+2", op, plen, -3);
4467 }
4468 }
4469 else if (GET_CODE (base) == PLUS) /* (R + i) */
4470 {
4471 int disp = INTVAL (XEXP (base, 1));
0dff9558 4472
b4e6d2e2 4473 if (AVR_TINY)
4474 return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4475
02d9a2c3 4476 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4477 {
4478 if (REGNO (XEXP (base, 0)) != REG_Y)
4479 fatal_insn ("incorrect insn:",insn);
4480
4481 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4482 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4483 "ldd %A0,Y+61" CR_TAB
4484 "ldd %B0,Y+62" CR_TAB
4485 "ldd %C0,Y+63" CR_TAB
4486 "sbiw r28,%o1-61", op, plen, -5);
4487
4488 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4489 "sbci r29,hi8(-%o1)" CR_TAB
4490 "ld %A0,Y" CR_TAB
4491 "ldd %B0,Y+1" CR_TAB
4492 "ldd %C0,Y+2" CR_TAB
4493 "subi r28,lo8(%o1)" CR_TAB
4494 "sbci r29,hi8(%o1)", op, plen, -7);
4495 }
4496
4497 reg_base = true_regnum (XEXP (base, 0));
4498 if (reg_base == REG_X)
4499 {
4500 /* R = (X + d) */
4501 if (reg_dest == REG_X)
4502 {
4503 /* "ld r26,-X" is undefined */
4504 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
4505 "ld r28,X" CR_TAB
4506 "ld __tmp_reg__,-X" CR_TAB
4507 "sbiw r26,1" CR_TAB
4508 "ld r26,X" CR_TAB
4509 "mov r27,__tmp_reg__", op, plen, -6);
4510 }
0dff9558 4511
27b3429e 4512 avr_asm_len ("adiw r26,%o1" CR_TAB
4513 "ld %A0,X+" CR_TAB
4514 "ld %B0,X+" CR_TAB
4515 "ld %C0,X", op, plen, -4);
02d9a2c3 4516
27b3429e 4517 if (reg_dest != REG_W
4518 && !reg_unused_after (insn, XEXP (base, 0)))
4519 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
02d9a2c3 4520
27b3429e 4521 return "";
02d9a2c3 4522 }
0dff9558 4523
02d9a2c3 4524 if (reg_dest == reg_base)
4525 return avr_asm_len ("ldd %C0,%C1" CR_TAB
4526 "ldd __tmp_reg__,%B1" CR_TAB
4527 "ldd %A0,%A1" CR_TAB
4528 "mov %B0,__tmp_reg__", op, plen, -4);
4529
4530 return avr_asm_len ("ldd %A0,%A1" CR_TAB
4531 "ldd %B0,%B1" CR_TAB
4532 "ldd %C0,%C1", op, plen, -3);
4533 }
4534 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4535 return avr_asm_len ("ld %C0,%1" CR_TAB
4536 "ld %B0,%1" CR_TAB
4537 "ld %A0,%1", op, plen, -3);
4538 else if (GET_CODE (base) == POST_INC) /* (R++) */
4539 return avr_asm_len ("ld %A0,%1" CR_TAB
4540 "ld %B0,%1" CR_TAB
4541 "ld %C0,%1", op, plen, -3);
4542
4543 else if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 4544 {
4545 int n_words = AVR_TINY ? 3 : 6;
4546 return avr_asm_len ("lds %A0,%m1" CR_TAB
4547 "lds %B0,%m1+1" CR_TAB
4548 "lds %C0,%m1+2", op, plen , -n_words);
4549 }
0dff9558 4550
02d9a2c3 4551 fatal_insn ("unknown move insn:",insn);
4552 return "";
4553}
4554
b4e6d2e2 4555
4556static const char*
4557avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4558{
4559 rtx dest = op[0];
4560 rtx src = op[1];
4561 rtx base = XEXP (dest, 0);
4562 int reg_base = true_regnum (base);
4563 int reg_src = true_regnum (src);
4564
4565 if (reg_base == reg_src)
4566 {
4567 avr_asm_len ("st %0,%A1" CR_TAB
4568 "mov __tmp_reg__,%B1" CR_TAB
4569 TINY_ADIW (%E0, %F0, 1) CR_TAB /* st X+, r27 is undefined */
4570 "st %0+,__tmp_reg__" CR_TAB
4571 "st %0,%C1", op, plen, -6);
4572
4573 }
4574 else if (reg_src == reg_base - 2)
4575 {
4576 avr_asm_len ("st %0,%A1" CR_TAB
4577 "mov __tmp_reg__,%C1" CR_TAB
4578 TINY_ADIW (%E0, %F0, 1) CR_TAB
4579 "st %0+,%B1" CR_TAB
4580 "st %0,__tmp_reg__", op, plen, 6);
4581 }
4582 else
4583 {
4584 avr_asm_len ("st %0+,%A1" CR_TAB
4585 "st %0+,%B1" CR_TAB
4586 "st %0,%C1", op, plen, -3);
4587 }
4588
4589 if (!reg_unused_after (insn, base))
4590 avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4591
4592 return "";
4593}
4594
4595static const char*
4596avr_out_store_psi_reg_disp_tiny (rtx *op, int *plen)
4597{
4598 rtx dest = op[0];
4599 rtx src = op[1];
4600 rtx base = XEXP (dest, 0);
4601 int reg_base = REGNO (XEXP (base, 0));
4602 int reg_src = true_regnum (src);
4603
4604 if (reg_src == reg_base)
4605 {
4606 return avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
4607 "mov __zero_reg__,%B1" CR_TAB
4608 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4609 "st %b0+,__tmp_reg__" CR_TAB
4610 "st %b0+,__zero_reg__" CR_TAB
4611 "st %b0,%C1" CR_TAB
4612 "clr __zero_reg__" CR_TAB
4613 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -10);
4614 }
4615 else if (reg_src == reg_base - 2)
4616 {
1a96adb9 4617 return avr_asm_len ("mov __tmp_reg__,%C1" CR_TAB
b4e6d2e2 4618 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4619 "st %b0+,%A1" CR_TAB
4620 "st %b0+,%B1" CR_TAB
4621 "st %b0,__tmp_reg__" CR_TAB
4622 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -8);
4623 }
4624
4625 return avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4626 "st %b0+,%A1" CR_TAB
4627 "st %b0+,%B1" CR_TAB
4628 "st %b0,%C1" CR_TAB
4629 TINY_SBIW (%I0, %J0, %o0+2), op, plen, -7);
4630}
4631
02d9a2c3 4632/* Handle store of 24-bit type from register or zero to memory. */
4633
4634static const char*
375204de 4635avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 4636{
4637 rtx dest = op[0];
4638 rtx src = op[1];
4639 rtx base = XEXP (dest, 0);
4640 int reg_base = true_regnum (base);
0dff9558 4641
02d9a2c3 4642 if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 4643 {
4644 int n_words = AVR_TINY ? 3 : 6;
4645 return avr_asm_len ("sts %m0,%A1" CR_TAB
4646 "sts %m0+1,%B1" CR_TAB
4647 "sts %m0+2,%C1", op, plen, -n_words);
4648 }
0dff9558 4649
02d9a2c3 4650 if (reg_base > 0) /* (r) */
4651 {
b4e6d2e2 4652 if (AVR_TINY)
4653 return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
4654
02d9a2c3 4655 if (reg_base == REG_X) /* (R26) */
4656 {
4657 gcc_assert (!reg_overlap_mentioned_p (base, src));
0dff9558 4658
02d9a2c3 4659 avr_asm_len ("st %0+,%A1" CR_TAB
4660 "st %0+,%B1" CR_TAB
4661 "st %0,%C1", op, plen, -3);
4662
4663 if (!reg_unused_after (insn, base))
4664 avr_asm_len ("sbiw r26,2", op, plen, 1);
4665
4666 return "";
4667 }
4668 else
4669 return avr_asm_len ("st %0,%A1" CR_TAB
4670 "std %0+1,%B1" CR_TAB
4671 "std %0+2,%C1", op, plen, -3);
4672 }
4673 else if (GET_CODE (base) == PLUS) /* (R + i) */
4674 {
4675 int disp = INTVAL (XEXP (base, 1));
b4e6d2e2 4676
4677 if (AVR_TINY)
4678 return avr_out_store_psi_reg_disp_tiny (op, plen);
4679
02d9a2c3 4680 reg_base = REGNO (XEXP (base, 0));
4681
4682 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4683 {
4684 if (reg_base != REG_Y)
4685 fatal_insn ("incorrect insn:",insn);
4686
4687 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4688 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4689 "std Y+61,%A1" CR_TAB
4690 "std Y+62,%B1" CR_TAB
4691 "std Y+63,%C1" CR_TAB
467dcd24 4692 "sbiw r28,%o0-61", op, plen, -5);
02d9a2c3 4693
4694 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4695 "sbci r29,hi8(-%o0)" CR_TAB
4696 "st Y,%A1" CR_TAB
4697 "std Y+1,%B1" CR_TAB
4698 "std Y+2,%C1" CR_TAB
4699 "subi r28,lo8(%o0)" CR_TAB
4700 "sbci r29,hi8(%o0)", op, plen, -7);
4701 }
4702 if (reg_base == REG_X)
4703 {
4704 /* (X + d) = R */
4705 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
0dff9558 4706
02d9a2c3 4707 avr_asm_len ("adiw r26,%o0" CR_TAB
4708 "st X+,%A1" CR_TAB
4709 "st X+,%B1" CR_TAB
4710 "st X,%C1", op, plen, -4);
4711
4712 if (!reg_unused_after (insn, XEXP (base, 0)))
4713 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4714
4715 return "";
4716 }
0dff9558 4717
02d9a2c3 4718 return avr_asm_len ("std %A0,%A1" CR_TAB
4719 "std %B0,%B1" CR_TAB
4720 "std %C0,%C1", op, plen, -3);
4721 }
4722 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4723 return avr_asm_len ("st %0,%C1" CR_TAB
4724 "st %0,%B1" CR_TAB
4725 "st %0,%A1", op, plen, -3);
4726 else if (GET_CODE (base) == POST_INC) /* (R++) */
4727 return avr_asm_len ("st %0,%A1" CR_TAB
4728 "st %0,%B1" CR_TAB
4729 "st %0,%C1", op, plen, -3);
4730
4731 fatal_insn ("unknown move insn:",insn);
4732 return "";
4733}
4734
4735
4736/* Move around 24-bit stuff. */
4737
4738const char *
375204de 4739avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 4740{
4741 rtx dest = op[0];
4742 rtx src = op[1];
0dff9558 4743
590da9f2 4744 if (avr_mem_flash_p (src)
4745 || avr_mem_flash_p (dest))
4202ef11 4746 {
4747 return avr_out_lpm (insn, op, plen);
4748 }
0dff9558 4749
02d9a2c3 4750 if (register_operand (dest, VOIDmode))
4751 {
4752 if (register_operand (src, VOIDmode)) /* mov r,r */
4753 {
4754 if (true_regnum (dest) > true_regnum (src))
4755 {
4756 avr_asm_len ("mov %C0,%C1", op, plen, -1);
4757
4758 if (AVR_HAVE_MOVW)
4759 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4760 else
4761 return avr_asm_len ("mov %B0,%B1" CR_TAB
4762 "mov %A0,%A1", op, plen, 2);
4763 }
4764 else
4765 {
4766 if (AVR_HAVE_MOVW)
4767 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4768 else
4769 avr_asm_len ("mov %A0,%A1" CR_TAB
4770 "mov %B0,%B1", op, plen, -2);
0dff9558 4771
02d9a2c3 4772 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4773 }
4774 }
02d9a2c3 4775 else if (CONSTANT_P (src))
4776 {
644ac9c5 4777 return avr_out_reload_inpsi (op, NULL_RTX, plen);
02d9a2c3 4778 }
4779 else if (MEM_P (src))
4780 return avr_out_load_psi (insn, op, plen); /* mov r,m */
4781 }
4782 else if (MEM_P (dest))
4783 {
644ac9c5 4784 rtx xop[2];
4785
4786 xop[0] = dest;
4787 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
0dff9558 4788
644ac9c5 4789 return avr_out_store_psi (insn, xop, plen);
02d9a2c3 4790 }
0dff9558 4791
02d9a2c3 4792 fatal_insn ("invalid insn:", insn);
4793 return "";
4794}
4795
b4e6d2e2 4796static const char*
4797avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4798{
4799 rtx dest = op[0];
4800 rtx src = op[1];
4801 rtx x = XEXP (dest, 0);
4802
4803 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4804 {
4805 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4806 TINY_ADIW (%I0, %J0, %o0) CR_TAB
4807 "st %b0,__tmp_reg__", op, plen, -4);
4808 }
4809 else
4810 {
4811 avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4812 "st %b0,%1" , op, plen, -3);
4813 }
4814
4815 if (!reg_unused_after (insn, XEXP (x,0)))
4816 avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
4817
4818 return "";
4819}
02d9a2c3 4820
644ac9c5 4821static const char*
375204de 4822out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
a28e4651 4823{
37ac04dc 4824 rtx dest = op[0];
4825 rtx src = op[1];
4826 rtx x = XEXP (dest, 0);
0dff9558 4827
e511e253 4828 if (CONSTANT_ADDRESS_P (x))
a28e4651 4829 {
b4e6d2e2 4830 int n_words = AVR_TINY ? 1 : 2;
644ac9c5 4831 return optimize > 0 && io_address_operand (x, QImode)
4832 ? avr_asm_len ("out %i0,%1", op, plen, -1)
b4e6d2e2 4833 : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
e511e253 4834 }
5bd39e93 4835 else if (GET_CODE (x) == PLUS
4836 && REG_P (XEXP (x, 0))
4837 && CONST_INT_P (XEXP (x, 1)))
e511e253 4838 {
5bd39e93 4839 /* memory access by reg+disp */
db45d3ed 4840
5bd39e93 4841 int disp = INTVAL (XEXP (x, 1));
db45d3ed 4842
b4e6d2e2 4843 if (AVR_TINY)
4844 return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
4845
5bd39e93 4846 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4847 {
4848 if (REGNO (XEXP (x, 0)) != REG_Y)
4849 fatal_insn ("incorrect insn:",insn);
4850
4851 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4852 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4853 "std Y+63,%1" CR_TAB
4854 "sbiw r28,%o0-63", op, plen, -3);
4855
4856 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4857 "sbci r29,hi8(-%o0)" CR_TAB
4858 "st Y,%1" CR_TAB
4859 "subi r28,lo8(%o0)" CR_TAB
4860 "sbci r29,hi8(%o0)", op, plen, -5);
4861 }
e511e253 4862 else if (REGNO (XEXP (x,0)) == REG_X)
5bd39e93 4863 {
4864 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4865 {
4866 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4867 "adiw r26,%o0" CR_TAB
4868 "st X,__tmp_reg__", op, plen, -3);
4869 }
4870 else
4871 {
4872 avr_asm_len ("adiw r26,%o0" CR_TAB
4873 "st X,%1", op, plen, -2);
4874 }
0dff9558 4875
5bd39e93 4876 if (!reg_unused_after (insn, XEXP (x,0)))
4877 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
db45d3ed 4878
5bd39e93 4879 return "";
4880 }
0dff9558 4881
578c09ca 4882 return avr_asm_len ("std %0,%1", op, plen, -1);
a28e4651 4883 }
0dff9558 4884
7e7baeb5 4885 return avr_asm_len ("st %0,%1", op, plen, -1);
a28e4651 4886}
4887
0b6cf66f 4888
4889/* Helper for the next function for XMEGA. It does the same
4890 but with low byte first. */
4891
644ac9c5 4892static const char*
375204de 4893avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
a28e4651 4894{
37ac04dc 4895 rtx dest = op[0];
4896 rtx src = op[1];
4897 rtx base = XEXP (dest, 0);
4898 int reg_base = true_regnum (base);
4899 int reg_src = true_regnum (src);
0b6cf66f 4900
4901 /* "volatile" forces writing low byte first, even if less efficient,
4902 for correct operation with 16-bit I/O registers like SP. */
f43bae99 4903 int mem_volatile_p = MEM_VOLATILE_P (dest);
f43bae99 4904
0b6cf66f 4905 if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 4906 {
4907 int n_words = AVR_TINY ? 2 : 4;
4908 return optimize > 0 && io_address_operand (base, HImode)
4909 ? avr_asm_len ("out %i0,%A1" CR_TAB
4910 "out %i0+1,%B1", op, plen, -2)
0b6cf66f 4911
b4e6d2e2 4912 : avr_asm_len ("sts %m0,%A1" CR_TAB
4913 "sts %m0+1,%B1", op, plen, -n_words);
4914 }
0dff9558 4915
0b6cf66f 4916 if (reg_base > 0)
4917 {
4918 if (reg_base != REG_X)
4919 return avr_asm_len ("st %0,%A1" CR_TAB
4920 "std %0+1,%B1", op, plen, -2);
0dff9558 4921
0b6cf66f 4922 if (reg_src == REG_X)
4923 /* "st X+,r26" and "st -X,r26" are undefined. */
4924 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4925 "st X,r26" CR_TAB
4926 "adiw r26,1" CR_TAB
4927 "st X,__tmp_reg__", op, plen, -4);
4928 else
4929 avr_asm_len ("st X+,%A1" CR_TAB
4930 "st X,%B1", op, plen, -2);
00c52bc9 4931
4932 return reg_unused_after (insn, base)
0b6cf66f 4933 ? ""
4934 : avr_asm_len ("sbiw r26,1", op, plen, 1);
4935 }
4936 else if (GET_CODE (base) == PLUS)
4937 {
4938 int disp = INTVAL (XEXP (base, 1));
4939 reg_base = REGNO (XEXP (base, 0));
4940 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4941 {
4942 if (reg_base != REG_Y)
4943 fatal_insn ("incorrect insn:",insn);
0dff9558 4944
0b6cf66f 4945 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4946 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4947 "std Y+62,%A1" CR_TAB
4948 "std Y+63,%B1" CR_TAB
4949 "sbiw r28,%o0-62", op, plen, -4)
4950
4951 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4952 "sbci r29,hi8(-%o0)" CR_TAB
4953 "st Y,%A1" CR_TAB
4954 "std Y+1,%B1" CR_TAB
4955 "subi r28,lo8(%o0)" CR_TAB
4956 "sbci r29,hi8(%o0)", op, plen, -6);
4957 }
0dff9558 4958
0b6cf66f 4959 if (reg_base != REG_X)
4960 return avr_asm_len ("std %A0,%A1" CR_TAB
4961 "std %B0,%B1", op, plen, -2);
4962 /* (X + d) = R */
4963 return reg_src == REG_X
4964 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4965 "mov __zero_reg__,r27" CR_TAB
4966 "adiw r26,%o0" CR_TAB
4967 "st X+,__tmp_reg__" CR_TAB
4968 "st X,__zero_reg__" CR_TAB
4969 "clr __zero_reg__" CR_TAB
4970 "sbiw r26,%o0+1", op, plen, -7)
4971
4972 : avr_asm_len ("adiw r26,%o0" CR_TAB
4973 "st X+,%A1" CR_TAB
4974 "st X,%B1" CR_TAB
4975 "sbiw r26,%o0+1", op, plen, -4);
4976 }
4977 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4978 {
4979 if (!mem_volatile_p)
4980 return avr_asm_len ("st %0,%B1" CR_TAB
4981 "st %0,%A1", op, plen, -2);
4982
4983 return REGNO (XEXP (base, 0)) == REG_X
4984 ? avr_asm_len ("sbiw r26,2" CR_TAB
4985 "st X+,%A1" CR_TAB
4986 "st X,%B1" CR_TAB
4987 "sbiw r26,1", op, plen, -4)
4988
4989 : avr_asm_len ("sbiw %r0,2" CR_TAB
4990 "st %p0,%A1" CR_TAB
4991 "std %p0+1,%B1", op, plen, -3);
4992 }
4993 else if (GET_CODE (base) == POST_INC) /* (R++) */
4994 {
4995 return avr_asm_len ("st %0,%A1" CR_TAB
4996 "st %0,%B1", op, plen, -2);
0dff9558 4997
0b6cf66f 4998 }
4999 fatal_insn ("unknown move insn:",insn);
5000 return "";
5001}
5002
b4e6d2e2 5003static const char*
5004avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5005{
5006 rtx dest = op[0];
5007 rtx src = op[1];
5008 rtx base = XEXP (dest, 0);
5009 int reg_base = true_regnum (base);
5010 int reg_src = true_regnum (src);
5011 int mem_volatile_p = MEM_VOLATILE_P (dest);
5012
5013 if (reg_base == reg_src)
5014 {
5015 return !mem_volatile_p && reg_unused_after (insn, src)
5016 ? avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5017 "st %0,%A1" CR_TAB
5018 TINY_ADIW (%E0, %F0, 1) CR_TAB
5019 "st %0,__tmp_reg__", op, plen, -5)
5020 : avr_asm_len ("mov __tmp_reg__,%B1" CR_TAB
5021 TINY_ADIW (%E0, %F0, 1) CR_TAB
5022 "st %0,__tmp_reg__" CR_TAB
5023 TINY_SBIW (%E0, %F0, 1) CR_TAB
5024 "st %0, %A1", op, plen, -7);
5025 }
5026
5027 return !mem_volatile_p && reg_unused_after (insn, base)
5028 ? avr_asm_len ("st %0+,%A1" CR_TAB
5029 "st %0,%B1", op, plen, -2)
5030 : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5031 "st %0,%B1" CR_TAB
5032 "st -%0,%A1", op, plen, -4);
5033}
5034
5035static const char*
5036avr_out_movhi_mr_r_reg_disp_tiny (rtx op[], int *plen)
5037{
5038 rtx dest = op[0];
5039 rtx src = op[1];
5040 rtx base = XEXP (dest, 0);
5041 int reg_base = REGNO (XEXP (base, 0));
5042 int reg_src = true_regnum (src);
5043
5044 return reg_src == reg_base
5045 ? avr_asm_len ("mov __tmp_reg__,%A1" CR_TAB
5046 "mov __zero_reg__,%B1" CR_TAB
5047 TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5048 "st %b0,__zero_reg__" CR_TAB
5049 "st -%b0,__tmp_reg__" CR_TAB
5050 "clr __zero_reg__" CR_TAB
1a96adb9 5051 TINY_SBIW (%I0, %J0, %o0), op, plen, -9)
b4e6d2e2 5052
5053 : avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5054 "st %b0,%B1" CR_TAB
5055 "st -%b0,%A1" CR_TAB
5056 TINY_SBIW (%I0, %J0, %o0), op, plen, -6);
5057}
5058
5059static const char*
5060avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5061{
5062 return avr_asm_len (TINY_ADIW (%I0, %J0, 1) CR_TAB
5063 "st %p0,%B1" CR_TAB
5064 "st -%p0,%A1" CR_TAB
5065 TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5066}
0b6cf66f 5067
5068static const char*
375204de 5069out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
0b6cf66f 5070{
5071 rtx dest = op[0];
5072 rtx src = op[1];
5073 rtx base = XEXP (dest, 0);
5074 int reg_base = true_regnum (base);
5075 int reg_src = true_regnum (src);
5076 int mem_volatile_p;
5077
5078 /* "volatile" forces writing high-byte first (no-xmega) resp.
5079 low-byte first (xmega) even if less efficient, for correct
5080 operation with 16-bit I/O registers like. */
5081
5082 if (AVR_XMEGA)
5083 return avr_out_movhi_mr_r_xmega (insn, op, plen);
5084
5085 mem_volatile_p = MEM_VOLATILE_P (dest);
5086
37ac04dc 5087 if (CONSTANT_ADDRESS_P (base))
b4e6d2e2 5088 {
5089 int n_words = AVR_TINY ? 2 : 4;
5090 return optimize > 0 && io_address_operand (base, HImode)
5091 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5092 "out %i0,%A1", op, plen, -2)
644ac9c5 5093
b4e6d2e2 5094 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5095 "sts %m0,%A1", op, plen, -n_words);
5096 }
0dff9558 5097
a28e4651 5098 if (reg_base > 0)
5099 {
b4e6d2e2 5100 if (AVR_TINY)
5101 return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5102
644ac9c5 5103 if (reg_base != REG_X)
5104 return avr_asm_len ("std %0+1,%B1" CR_TAB
5105 "st %0,%A1", op, plen, -2);
0dff9558 5106
644ac9c5 5107 if (reg_src == REG_X)
5108 /* "st X+,r26" and "st -X,r26" are undefined. */
5109 return !mem_volatile_p && reg_unused_after (insn, src)
5110 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5111 "st X,r26" CR_TAB
5112 "adiw r26,1" CR_TAB
5113 "st X,__tmp_reg__", op, plen, -4)
5114
5115 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5116 "adiw r26,1" CR_TAB
5117 "st X,__tmp_reg__" CR_TAB
5118 "sbiw r26,1" CR_TAB
5119 "st X,r26", op, plen, -5);
0dff9558 5120
644ac9c5 5121 return !mem_volatile_p && reg_unused_after (insn, base)
5122 ? avr_asm_len ("st X+,%A1" CR_TAB
5123 "st X,%B1", op, plen, -2)
5124 : avr_asm_len ("adiw r26,1" CR_TAB
5125 "st X,%B1" CR_TAB
5126 "st -X,%A1", op, plen, -3);
a28e4651 5127 }
37ac04dc 5128 else if (GET_CODE (base) == PLUS)
a28e4651 5129 {
37ac04dc 5130 int disp = INTVAL (XEXP (base, 1));
b4e6d2e2 5131
5132 if (AVR_TINY)
5133 return avr_out_movhi_mr_r_reg_disp_tiny (op, plen);
5134
66ad0834 5135 reg_base = REGNO (XEXP (base, 0));
37ac04dc 5136 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
644ac9c5 5137 {
5138 if (reg_base != REG_Y)
5139 fatal_insn ("incorrect insn:",insn);
0dff9558 5140
644ac9c5 5141 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5142 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5143 "std Y+63,%B1" CR_TAB
5144 "std Y+62,%A1" CR_TAB
5145 "sbiw r28,%o0-62", op, plen, -4)
5146
5147 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5148 "sbci r29,hi8(-%o0)" CR_TAB
5149 "std Y+1,%B1" CR_TAB
5150 "st Y,%A1" CR_TAB
5151 "subi r28,lo8(%o0)" CR_TAB
5152 "sbci r29,hi8(%o0)", op, plen, -6);
5153 }
0dff9558 5154
644ac9c5 5155 if (reg_base != REG_X)
5156 return avr_asm_len ("std %B0,%B1" CR_TAB
5157 "std %A0,%A1", op, plen, -2);
5158 /* (X + d) = R */
5159 return reg_src == REG_X
5160 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
5161 "mov __zero_reg__,r27" CR_TAB
5162 "adiw r26,%o0+1" CR_TAB
5163 "st X,__zero_reg__" CR_TAB
5164 "st -X,__tmp_reg__" CR_TAB
5165 "clr __zero_reg__" CR_TAB
5166 "sbiw r26,%o0", op, plen, -7)
5167
5168 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5169 "st X,%B1" CR_TAB
5170 "st -X,%A1" CR_TAB
5171 "sbiw r26,%o0", op, plen, -4);
e511e253 5172 }
37ac04dc 5173 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
644ac9c5 5174 {
5175 return avr_asm_len ("st %0,%B1" CR_TAB
5176 "st %0,%A1", op, plen, -2);
5177 }
37ac04dc 5178 else if (GET_CODE (base) == POST_INC) /* (R++) */
f43bae99 5179 {
644ac9c5 5180 if (!mem_volatile_p)
5181 return avr_asm_len ("st %0,%A1" CR_TAB
5182 "st %0,%B1", op, plen, -2);
0dff9558 5183
b4e6d2e2 5184 if (AVR_TINY)
5185 return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5186
644ac9c5 5187 return REGNO (XEXP (base, 0)) == REG_X
5188 ? avr_asm_len ("adiw r26,1" CR_TAB
5189 "st X,%B1" CR_TAB
5190 "st -X,%A1" CR_TAB
5191 "adiw r26,2", op, plen, -4)
f43bae99 5192
644ac9c5 5193 : avr_asm_len ("std %p0+1,%B1" CR_TAB
5194 "st %p0,%A1" CR_TAB
5195 "adiw %r0,2", op, plen, -3);
f43bae99 5196 }
68435912 5197 fatal_insn ("unknown move insn:",insn);
a28e4651 5198 return "";
5199}
5200
20c71901 5201/* Return 1 if frame pointer for current function required. */
a28e4651 5202
a45076aa 5203static bool
ebdd0478 5204avr_frame_pointer_required_p (void)
a28e4651 5205{
18d50ae6 5206 return (cfun->calls_alloca
a12b9b80 5207 || cfun->calls_setjmp
5208 || cfun->has_nonlocal_label
5209 || crtl->args.info.nregs == 0
5210 || get_frame_size () > 0);
a28e4651 5211}
5212
faf8f400 5213/* Returns the condition of compare insn INSN, or UNKNOWN. */
a28e4651 5214
faf8f400 5215static RTX_CODE
375204de 5216compare_condition (rtx_insn *insn)
a28e4651 5217{
375204de 5218 rtx_insn *next = next_real_insn (insn);
cffa155c 5219
5220 if (next && JUMP_P (next))
a28e4651 5221 {
5222 rtx pat = PATTERN (next);
5223 rtx src = SET_SRC (pat);
0dff9558 5224
cffa155c 5225 if (IF_THEN_ELSE == GET_CODE (src))
5226 return GET_CODE (XEXP (src, 0));
a28e4651 5227 }
0dff9558 5228
cffa155c 5229 return UNKNOWN;
faf8f400 5230}
5231
faf8f400 5232
dfd52f2b 5233/* Returns true iff INSN is a tst insn that only tests the sign. */
5234
5235static bool
375204de 5236compare_sign_p (rtx_insn *insn)
faf8f400 5237{
5238 RTX_CODE cond = compare_condition (insn);
5239 return (cond == GE || cond == LT);
5240}
5241
dfd52f2b 5242
5243/* Returns true iff the next insn is a JUMP_INSN with a condition
faf8f400 5244 that needs to be swapped (GT, GTU, LE, LEU). */
5245
dfd52f2b 5246static bool
375204de 5247compare_diff_p (rtx_insn *insn)
faf8f400 5248{
5249 RTX_CODE cond = compare_condition (insn);
a28e4651 5250 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5251}
5252
dfd52f2b 5253/* Returns true iff INSN is a compare insn with the EQ or NE condition. */
a28e4651 5254
dfd52f2b 5255static bool
375204de 5256compare_eq_p (rtx_insn *insn)
a28e4651 5257{
faf8f400 5258 RTX_CODE cond = compare_condition (insn);
a28e4651 5259 return (cond == EQ || cond == NE);
5260}
5261
5262
dfd52f2b 5263/* Output compare instruction
5264
5265 compare (XOP[0], XOP[1])
5266
0dff9558 5267 for a register XOP[0] and a compile-time constant XOP[1]. Return "".
dfd52f2b 5268 XOP[2] is an 8-bit scratch register as needed.
5269
5270 PLEN == NULL: Output instructions.
5271 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
5272 Don't output anything. */
5273
5274const char*
375204de 5275avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
dfd52f2b 5276{
5277 /* Register to compare and value to compare against. */
5278 rtx xreg = xop[0];
5279 rtx xval = xop[1];
0dff9558 5280
dfd52f2b 5281 /* MODE of the comparison. */
3754d046 5282 machine_mode mode;
dfd52f2b 5283
5284 /* Number of bytes to operate on. */
017c5b98 5285 int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
dfd52f2b 5286
5287 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
5288 int clobber_val = -1;
5289
017c5b98 5290 /* Map fixed mode operands to integer operands with the same binary
5291 representation. They are easier to handle in the remainder. */
5292
b4ebb666 5293 if (CONST_FIXED_P (xval))
017c5b98 5294 {
5295 xreg = avr_to_int_mode (xop[0]);
5296 xval = avr_to_int_mode (xop[1]);
5297 }
0dff9558 5298
017c5b98 5299 mode = GET_MODE (xreg);
5300
83921eda 5301 gcc_assert (REG_P (xreg));
5302 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5303 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
0dff9558 5304
dfd52f2b 5305 if (plen)
5306 *plen = 0;
5307
7f94020e 5308 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
83921eda 5309 against 0 by ORing the bytes. This is one instruction shorter.
017c5b98 5310 Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
83921eda 5311 and therefore don't use this. */
7f94020e 5312
5313 if (!test_hard_reg_class (LD_REGS, xreg)
5314 && compare_eq_p (insn)
5315 && reg_unused_after (insn, xreg))
5316 {
5317 if (xval == const1_rtx)
5318 {
5319 avr_asm_len ("dec %A0" CR_TAB
5320 "or %A0,%B0", xop, plen, 2);
0dff9558 5321
02d9a2c3 5322 if (n_bytes >= 3)
5323 avr_asm_len ("or %A0,%C0", xop, plen, 1);
5324
5325 if (n_bytes >= 4)
5326 avr_asm_len ("or %A0,%D0", xop, plen, 1);
7f94020e 5327
5328 return "";
5329 }
5330 else if (xval == constm1_rtx)
5331 {
02d9a2c3 5332 if (n_bytes >= 4)
5333 avr_asm_len ("and %A0,%D0", xop, plen, 1);
0dff9558 5334
02d9a2c3 5335 if (n_bytes >= 3)
5336 avr_asm_len ("and %A0,%C0", xop, plen, 1);
0dff9558 5337
02d9a2c3 5338 return avr_asm_len ("and %A0,%B0" CR_TAB
5339 "com %A0", xop, plen, 2);
7f94020e 5340 }
5341 }
5342
dfd52f2b 5343 for (i = 0; i < n_bytes; i++)
5344 {
5345 /* We compare byte-wise. */
5346 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5347 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5348
5349 /* 8-bit value to compare with this byte. */
5350 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5351
5352 /* Registers R16..R31 can operate with immediate. */
5353 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5354
5355 xop[0] = reg8;
5356 xop[1] = gen_int_mode (val8, QImode);
5357
5358 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
5359
5360 if (i == 0
5361 && test_hard_reg_class (ADDW_REGS, reg8))
5362 {
5363 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
0dff9558 5364
dfd52f2b 5365 if (IN_RANGE (val16, 0, 63)
5366 && (val8 == 0
5367 || reg_unused_after (insn, xreg)))
5368 {
b4e6d2e2 5369 if (AVR_TINY)
5370 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5371 else
5372 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5373
dfd52f2b 5374 i++;
5375 continue;
5376 }
5377
5378 if (n_bytes == 2
5379 && IN_RANGE (val16, -63, -1)
5380 && compare_eq_p (insn)
5381 && reg_unused_after (insn, xreg))
5382 {
b4e6d2e2 5383 return AVR_TINY
5384 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5385 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
dfd52f2b 5386 }
5387 }
5388
5389 /* Comparing against 0 is easy. */
0dff9558 5390
dfd52f2b 5391 if (val8 == 0)
5392 {
5393 avr_asm_len (i == 0
5394 ? "cp %0,__zero_reg__"
5395 : "cpc %0,__zero_reg__", xop, plen, 1);
5396 continue;
5397 }
5398
5399 /* Upper registers can compare and subtract-with-carry immediates.
5400 Notice that compare instructions do the same as respective subtract
5401 instruction; the only difference is that comparisons don't write
5402 the result back to the target register. */
5403
5404 if (ld_reg_p)
5405 {
5406 if (i == 0)
5407 {
5408 avr_asm_len ("cpi %0,%1", xop, plen, 1);
5409 continue;
5410 }
5411 else if (reg_unused_after (insn, xreg))
5412 {
5413 avr_asm_len ("sbci %0,%1", xop, plen, 1);
5414 continue;
5415 }
5416 }
5417
5418 /* Must load the value into the scratch register. */
5419
5420 gcc_assert (REG_P (xop[2]));
0dff9558 5421
dfd52f2b 5422 if (clobber_val != (int) val8)
5423 avr_asm_len ("ldi %2,%1", xop, plen, 1);
5424 clobber_val = (int) val8;
0dff9558 5425
dfd52f2b 5426 avr_asm_len (i == 0
5427 ? "cp %0,%2"
5428 : "cpc %0,%2", xop, plen, 1);
5429 }
5430
5431 return "";
5432}
5433
5434
83921eda 5435/* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
5436
5437const char*
375204de 5438avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
83921eda 5439{
5440 rtx xop[3];
5441
5442 xop[0] = gen_rtx_REG (DImode, 18);
5443 xop[1] = op[0];
5444 xop[2] = op[1];
5445
5446 return avr_out_compare (insn, xop, plen);
5447}
5448
20c71901 5449/* Output test instruction for HImode. */
a28e4651 5450
dfd52f2b 5451const char*
375204de 5452avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
a28e4651 5453{
faf8f400 5454 if (compare_sign_p (insn))
a28e4651 5455 {
dfd52f2b 5456 avr_asm_len ("tst %B0", op, plen, -1);
a28e4651 5457 }
dfd52f2b 5458 else if (reg_unused_after (insn, op[0])
5459 && compare_eq_p (insn))
a28e4651 5460 {
20c71901 5461 /* Faster than sbiw if we can clobber the operand. */
dfd52f2b 5462 avr_asm_len ("or %A0,%B0", op, plen, -1);
a28e4651 5463 }
dfd52f2b 5464 else
a28e4651 5465 {
dfd52f2b 5466 avr_out_compare (insn, op, plen);
a28e4651 5467 }
dfd52f2b 5468
5469 return "";
a28e4651 5470}
5471
5472
02d9a2c3 5473/* Output test instruction for PSImode. */
5474
5475const char*
375204de 5476avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 5477{
5478 if (compare_sign_p (insn))
5479 {
5480 avr_asm_len ("tst %C0", op, plen, -1);
5481 }
5482 else if (reg_unused_after (insn, op[0])
5483 && compare_eq_p (insn))
5484 {
5485 /* Faster than sbiw if we can clobber the operand. */
5486 avr_asm_len ("or %A0,%B0" CR_TAB
5487 "or %A0,%C0", op, plen, -2);
5488 }
5489 else
5490 {
5491 avr_out_compare (insn, op, plen);
5492 }
5493
5494 return "";
5495}
5496
5497
20c71901 5498/* Output test instruction for SImode. */
a28e4651 5499
dfd52f2b 5500const char*
375204de 5501avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
a28e4651 5502{
faf8f400 5503 if (compare_sign_p (insn))
a28e4651 5504 {
dfd52f2b 5505 avr_asm_len ("tst %D0", op, plen, -1);
a28e4651 5506 }
dfd52f2b 5507 else if (reg_unused_after (insn, op[0])
5508 && compare_eq_p (insn))
a28e4651 5509 {
dfd52f2b 5510 /* Faster than sbiw if we can clobber the operand. */
5511 avr_asm_len ("or %A0,%B0" CR_TAB
5512 "or %A0,%C0" CR_TAB
5513 "or %A0,%D0", op, plen, -3);
5514 }
5515 else
5516 {
5517 avr_out_compare (insn, op, plen);
a28e4651 5518 }
dfd52f2b 5519
5520 return "";
a28e4651 5521}
5522
5523
644ac9c5 5524/* Generate asm equivalent for various shifts. This only handles cases
5525 that are not already carefully hand-optimized in ?sh??i3_out.
5526
5527 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5528 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5529 OPERANDS[3] is a QImode scratch register from LD regs if
5530 available and SCRATCH, otherwise (no scratch available)
5531
5532 TEMPL is an assembler template that shifts by one position.
5533 T_LEN is the length of this template. */
a28e4651 5534
5535void
375204de 5536out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
644ac9c5 5537 int *plen, int t_len)
a28e4651 5538{
644ac9c5 5539 bool second_label = true;
5540 bool saved_in_tmp = false;
5541 bool use_zero_reg = false;
5542 rtx op[5];
b681d971 5543
a28e4651 5544 op[0] = operands[0];
5545 op[1] = operands[1];
5546 op[2] = operands[2];
5547 op[3] = operands[3];
b681d971 5548
644ac9c5 5549 if (plen)
5550 *plen = 0;
b681d971 5551
4202ef11 5552 if (CONST_INT_P (operands[2]))
a28e4651 5553 {
4202ef11 5554 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5555 && REG_P (operands[3]));
b681d971 5556 int count = INTVAL (operands[2]);
5557 int max_len = 10; /* If larger than this, always use a loop. */
5558
5667001b 5559 if (count <= 0)
644ac9c5 5560 return;
5667001b 5561
b681d971 5562 if (count < 8 && !scratch)
644ac9c5 5563 use_zero_reg = true;
b681d971 5564
5565 if (optimize_size)
644ac9c5 5566 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
b681d971 5567
5568 if (t_len * count <= max_len)
644ac9c5 5569 {
5570 /* Output shifts inline with no loop - faster. */
0dff9558 5571
644ac9c5 5572 while (count-- > 0)
5573 avr_asm_len (templ, op, plen, t_len);
b681d971 5574
644ac9c5 5575 return;
5576 }
b681d971 5577
5578 if (scratch)
644ac9c5 5579 {
5580 avr_asm_len ("ldi %3,%2", op, plen, 1);
5581 }
b681d971 5582 else if (use_zero_reg)
644ac9c5 5583 {
5584 /* Hack to save one word: use __zero_reg__ as loop counter.
5585 Set one bit, then shift in a loop until it is 0 again. */
b681d971 5586
644ac9c5 5587 op[3] = zero_reg_rtx;
5588
5589 avr_asm_len ("set" CR_TAB
5590 "bld %3,%2-1", op, plen, 2);
5591 }
a28e4651 5592 else
644ac9c5 5593 {
5594 /* No scratch register available, use one from LD_REGS (saved in
5595 __tmp_reg__) that doesn't overlap with registers to shift. */
b681d971 5596
644ac9c5 5597 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
5598 op[4] = tmp_reg_rtx;
5599 saved_in_tmp = true;
b681d971 5600
644ac9c5 5601 avr_asm_len ("mov %4,%3" CR_TAB
5602 "ldi %3,%2", op, plen, 2);
5603 }
b681d971 5604
644ac9c5 5605 second_label = false;
a28e4651 5606 }
644ac9c5 5607 else if (MEM_P (op[2]))
a28e4651 5608 {
644ac9c5 5609 rtx op_mov[2];
0dff9558 5610
644ac9c5 5611 op_mov[0] = op[3] = tmp_reg_rtx;
a28e4651 5612 op_mov[1] = op[2];
b681d971 5613
644ac9c5 5614 out_movqi_r_mr (insn, op_mov, plen);
a28e4651 5615 }
644ac9c5 5616 else if (register_operand (op[2], QImode))
a28e4651 5617 {
644ac9c5 5618 op[3] = op[2];
0dff9558 5619
644ac9c5 5620 if (!reg_unused_after (insn, op[2])
5621 || reg_overlap_mentioned_p (op[0], op[2]))
959d6369 5622 {
644ac9c5 5623 op[3] = tmp_reg_rtx;
5624 avr_asm_len ("mov %3,%2", op, plen, 1);
959d6369 5625 }
b681d971 5626 }
5627 else
68435912 5628 fatal_insn ("bad shift insn:", insn);
b681d971 5629
5630 if (second_label)
644ac9c5 5631 avr_asm_len ("rjmp 2f", op, plen, 1);
b681d971 5632
644ac9c5 5633 avr_asm_len ("1:", op, plen, 0);
5634 avr_asm_len (templ, op, plen, t_len);
5635
5636 if (second_label)
5637 avr_asm_len ("2:", op, plen, 0);
5638
5639 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
5640 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
5641
5642 if (saved_in_tmp)
5643 avr_asm_len ("mov %3,%4", op, plen, 1);
a28e4651 5644}
5645
5646
5647/* 8bit shift left ((char)x << i) */
5648
37ac04dc 5649const char *
375204de 5650ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 5651{
5652 if (GET_CODE (operands[2]) == CONST_INT)
5653 {
5654 int k;
1cb39658 5655
a28e4651 5656 if (!len)
5657 len = &k;
1cb39658 5658
a28e4651 5659 switch (INTVAL (operands[2]))
5660 {
1cb39658 5661 default:
5667001b 5662 if (INTVAL (operands[2]) < 8)
5663 break;
5664
1cb39658 5665 *len = 1;
02a011e9 5666 return "clr %0";
0dff9558 5667
a28e4651 5668 case 1:
1cb39658 5669 *len = 1;
02a011e9 5670 return "lsl %0";
0dff9558 5671
a28e4651 5672 case 2:
1cb39658 5673 *len = 2;
02a011e9 5674 return ("lsl %0" CR_TAB
5675 "lsl %0");
1cb39658 5676
a28e4651 5677 case 3:
1cb39658 5678 *len = 3;
02a011e9 5679 return ("lsl %0" CR_TAB
5680 "lsl %0" CR_TAB
5681 "lsl %0");
1cb39658 5682
a28e4651 5683 case 4:
0af74aa0 5684 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5685 {
1cb39658 5686 *len = 2;
02a011e9 5687 return ("swap %0" CR_TAB
5688 "andi %0,0xf0");
a28e4651 5689 }
1cb39658 5690 *len = 4;
02a011e9 5691 return ("lsl %0" CR_TAB
5692 "lsl %0" CR_TAB
5693 "lsl %0" CR_TAB
5694 "lsl %0");
1cb39658 5695
a28e4651 5696 case 5:
0af74aa0 5697 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5698 {
1cb39658 5699 *len = 3;
02a011e9 5700 return ("swap %0" CR_TAB
5701 "lsl %0" CR_TAB
5702 "andi %0,0xe0");
a28e4651 5703 }
1cb39658 5704 *len = 5;
02a011e9 5705 return ("lsl %0" CR_TAB
5706 "lsl %0" CR_TAB
5707 "lsl %0" CR_TAB
5708 "lsl %0" CR_TAB
5709 "lsl %0");
1cb39658 5710
a28e4651 5711 case 6:
0af74aa0 5712 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 5713 {
1cb39658 5714 *len = 4;
02a011e9 5715 return ("swap %0" CR_TAB
5716 "lsl %0" CR_TAB
5717 "lsl %0" CR_TAB
5718 "andi %0,0xc0");
a28e4651 5719 }
1cb39658 5720 *len = 6;
02a011e9 5721 return ("lsl %0" CR_TAB
5722 "lsl %0" CR_TAB
5723 "lsl %0" CR_TAB
5724 "lsl %0" CR_TAB
5725 "lsl %0" CR_TAB
5726 "lsl %0");
1cb39658 5727
a28e4651 5728 case 7:
1cb39658 5729 *len = 3;
02a011e9 5730 return ("ror %0" CR_TAB
5731 "clr %0" CR_TAB
5732 "ror %0");
a28e4651 5733 }
5734 }
1cb39658 5735 else if (CONSTANT_P (operands[2]))
68435912 5736 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 5737
02a011e9 5738 out_shift_with_cnt ("lsl %0",
5739 insn, operands, len, 1);
a28e4651 5740 return "";
5741}
5742
5743
5744/* 16bit shift left ((short)x << i) */
5745
37ac04dc 5746const char *
375204de 5747ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 5748{
5749 if (GET_CODE (operands[2]) == CONST_INT)
5750 {
b681d971 5751 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5752 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 5753 int k;
b681d971 5754 int *t = len;
1cb39658 5755
a28e4651 5756 if (!len)
5757 len = &k;
0dff9558 5758
a28e4651 5759 switch (INTVAL (operands[2]))
5760 {
5667001b 5761 default:
5762 if (INTVAL (operands[2]) < 16)
5763 break;
5764
5765 *len = 2;
02a011e9 5766 return ("clr %B0" CR_TAB
5767 "clr %A0");
5667001b 5768
b681d971 5769 case 4:
5770 if (optimize_size && scratch)
5771 break; /* 5 */
5772 if (ldi_ok)
5773 {
5774 *len = 6;
02a011e9 5775 return ("swap %A0" CR_TAB
5776 "swap %B0" CR_TAB
5777 "andi %B0,0xf0" CR_TAB
5778 "eor %B0,%A0" CR_TAB
5779 "andi %A0,0xf0" CR_TAB
5780 "eor %B0,%A0");
b681d971 5781 }
5782 if (scratch)
5783 {
5784 *len = 7;
02a011e9 5785 return ("swap %A0" CR_TAB
5786 "swap %B0" CR_TAB
5787 "ldi %3,0xf0" CR_TAB
ef51d1e3 5788 "and %B0,%3" CR_TAB
02a011e9 5789 "eor %B0,%A0" CR_TAB
ef51d1e3 5790 "and %A0,%3" CR_TAB
02a011e9 5791 "eor %B0,%A0");
b681d971 5792 }
5793 break; /* optimize_size ? 6 : 8 */
1cb39658 5794
b681d971 5795 case 5:
5796 if (optimize_size)
5797 break; /* scratch ? 5 : 6 */
5798 if (ldi_ok)
5799 {
5800 *len = 8;
02a011e9 5801 return ("lsl %A0" CR_TAB
5802 "rol %B0" CR_TAB
5803 "swap %A0" CR_TAB
5804 "swap %B0" CR_TAB
5805 "andi %B0,0xf0" CR_TAB
5806 "eor %B0,%A0" CR_TAB
5807 "andi %A0,0xf0" CR_TAB
5808 "eor %B0,%A0");
b681d971 5809 }
5810 if (scratch)
5811 {
5812 *len = 9;
02a011e9 5813 return ("lsl %A0" CR_TAB
5814 "rol %B0" CR_TAB
5815 "swap %A0" CR_TAB
5816 "swap %B0" CR_TAB
5817 "ldi %3,0xf0" CR_TAB
ef51d1e3 5818 "and %B0,%3" CR_TAB
02a011e9 5819 "eor %B0,%A0" CR_TAB
ef51d1e3 5820 "and %A0,%3" CR_TAB
02a011e9 5821 "eor %B0,%A0");
b681d971 5822 }
5823 break; /* 10 */
5824
5825 case 6:
5826 if (optimize_size)
5827 break; /* scratch ? 5 : 6 */
5828 *len = 9;
02a011e9 5829 return ("clr __tmp_reg__" CR_TAB
5830 "lsr %B0" CR_TAB
5831 "ror %A0" CR_TAB
5832 "ror __tmp_reg__" CR_TAB
5833 "lsr %B0" CR_TAB
5834 "ror %A0" CR_TAB
5835 "ror __tmp_reg__" CR_TAB
5836 "mov %B0,%A0" CR_TAB
5837 "mov %A0,__tmp_reg__");
1cb39658 5838
28f5cc4d 5839 case 7:
5840 *len = 5;
02a011e9 5841 return ("lsr %B0" CR_TAB
5842 "mov %B0,%A0" CR_TAB
5843 "clr %A0" CR_TAB
5844 "ror %B0" CR_TAB
5845 "ror %A0");
28f5cc4d 5846
a28e4651 5847 case 8:
02a011e9 5848 return *len = 2, ("mov %B0,%A1" CR_TAB
5849 "clr %A0");
28f5cc4d 5850
5851 case 9:
5852 *len = 3;
02a011e9 5853 return ("mov %B0,%A0" CR_TAB
5854 "clr %A0" CR_TAB
5855 "lsl %B0");
28f5cc4d 5856
5857 case 10:
5858 *len = 4;
02a011e9 5859 return ("mov %B0,%A0" CR_TAB
5860 "clr %A0" CR_TAB
5861 "lsl %B0" CR_TAB
5862 "lsl %B0");
28f5cc4d 5863
5864 case 11:
5865 *len = 5;
02a011e9 5866 return ("mov %B0,%A0" CR_TAB
5867 "clr %A0" CR_TAB
5868 "lsl %B0" CR_TAB
5869 "lsl %B0" CR_TAB
5870 "lsl %B0");
28f5cc4d 5871
5872 case 12:
b681d971 5873 if (ldi_ok)
28f5cc4d 5874 {
5875 *len = 4;
02a011e9 5876 return ("mov %B0,%A0" CR_TAB
5877 "clr %A0" CR_TAB
5878 "swap %B0" CR_TAB
5879 "andi %B0,0xf0");
28f5cc4d 5880 }
b681d971 5881 if (scratch)
5882 {
5883 *len = 5;
02a011e9 5884 return ("mov %B0,%A0" CR_TAB
5885 "clr %A0" CR_TAB
5886 "swap %B0" CR_TAB
5887 "ldi %3,0xf0" CR_TAB
ef51d1e3 5888 "and %B0,%3");
b681d971 5889 }
5890 *len = 6;
02a011e9 5891 return ("mov %B0,%A0" CR_TAB
5892 "clr %A0" CR_TAB
5893 "lsl %B0" CR_TAB
5894 "lsl %B0" CR_TAB
5895 "lsl %B0" CR_TAB
5896 "lsl %B0");
28f5cc4d 5897
5898 case 13:
b681d971 5899 if (ldi_ok)
28f5cc4d 5900 {
5901 *len = 5;
02a011e9 5902 return ("mov %B0,%A0" CR_TAB
5903 "clr %A0" CR_TAB
5904 "swap %B0" CR_TAB
5905 "lsl %B0" CR_TAB
5906 "andi %B0,0xe0");
28f5cc4d 5907 }
8cc5a1af 5908 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5909 {
5910 *len = 5;
02a011e9 5911 return ("ldi %3,0x20" CR_TAB
5912 "mul %A0,%3" CR_TAB
5913 "mov %B0,r0" CR_TAB
5914 "clr %A0" CR_TAB
5915 "clr __zero_reg__");
28f5cc4d 5916 }
b681d971 5917 if (optimize_size && scratch)
5918 break; /* 5 */
5919 if (scratch)
5920 {
5921 *len = 6;
02a011e9 5922 return ("mov %B0,%A0" CR_TAB
5923 "clr %A0" CR_TAB
5924 "swap %B0" CR_TAB
5925 "lsl %B0" CR_TAB
5926 "ldi %3,0xe0" CR_TAB
ef51d1e3 5927 "and %B0,%3");
b681d971 5928 }
8cc5a1af 5929 if (AVR_HAVE_MUL)
b681d971 5930 {
5931 *len = 6;
5932 return ("set" CR_TAB
02a011e9 5933 "bld r1,5" CR_TAB
5934 "mul %A0,r1" CR_TAB
5935 "mov %B0,r0" CR_TAB
5936 "clr %A0" CR_TAB
5937 "clr __zero_reg__");
b681d971 5938 }
5939 *len = 7;
02a011e9 5940 return ("mov %B0,%A0" CR_TAB
5941 "clr %A0" CR_TAB
5942 "lsl %B0" CR_TAB
5943 "lsl %B0" CR_TAB
5944 "lsl %B0" CR_TAB
5945 "lsl %B0" CR_TAB
5946 "lsl %B0");
28f5cc4d 5947
5948 case 14:
8cc5a1af 5949 if (AVR_HAVE_MUL && ldi_ok)
b681d971 5950 {
5951 *len = 5;
02a011e9 5952 return ("ldi %B0,0x40" CR_TAB
5953 "mul %A0,%B0" CR_TAB
5954 "mov %B0,r0" CR_TAB
5955 "clr %A0" CR_TAB
5956 "clr __zero_reg__");
b681d971 5957 }
8cc5a1af 5958 if (AVR_HAVE_MUL && scratch)
28f5cc4d 5959 {
5960 *len = 5;
02a011e9 5961 return ("ldi %3,0x40" CR_TAB
5962 "mul %A0,%3" CR_TAB
5963 "mov %B0,r0" CR_TAB
5964 "clr %A0" CR_TAB
5965 "clr __zero_reg__");
28f5cc4d 5966 }
b681d971 5967 if (optimize_size && ldi_ok)
5968 {
5969 *len = 5;
02a011e9 5970 return ("mov %B0,%A0" CR_TAB
5971 "ldi %A0,6" "\n1:\t"
5972 "lsl %B0" CR_TAB
5973 "dec %A0" CR_TAB
5974 "brne 1b");
b681d971 5975 }
5976 if (optimize_size && scratch)
5977 break; /* 5 */
5978 *len = 6;
02a011e9 5979 return ("clr %B0" CR_TAB
5980 "lsr %A0" CR_TAB
5981 "ror %B0" CR_TAB
5982 "lsr %A0" CR_TAB
5983 "ror %B0" CR_TAB
5984 "clr %A0");
28f5cc4d 5985
5986 case 15:
5987 *len = 4;
02a011e9 5988 return ("clr %B0" CR_TAB
5989 "lsr %A0" CR_TAB
5990 "ror %B0" CR_TAB
5991 "clr %A0");
a28e4651 5992 }
28f5cc4d 5993 len = t;
a28e4651 5994 }
02a011e9 5995 out_shift_with_cnt ("lsl %A0" CR_TAB
5996 "rol %B0", insn, operands, len, 2);
a28e4651 5997 return "";
5998}
5999
6000
02d9a2c3 6001/* 24-bit shift left */
6002
6003const char*
375204de 6004avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 6005{
6006 if (plen)
6007 *plen = 0;
0dff9558 6008
02d9a2c3 6009 if (CONST_INT_P (op[2]))
6010 {
6011 switch (INTVAL (op[2]))
6012 {
6013 default:
6014 if (INTVAL (op[2]) < 24)
6015 break;
6016
6017 return avr_asm_len ("clr %A0" CR_TAB
6018 "clr %B0" CR_TAB
6019 "clr %C0", op, plen, 3);
6020
6021 case 8:
6022 {
6023 int reg0 = REGNO (op[0]);
6024 int reg1 = REGNO (op[1]);
0dff9558 6025
02d9a2c3 6026 if (reg0 >= reg1)
6027 return avr_asm_len ("mov %C0,%B1" CR_TAB
6028 "mov %B0,%A1" CR_TAB
6029 "clr %A0", op, plen, 3);
6030 else
6031 return avr_asm_len ("clr %A0" CR_TAB
6032 "mov %B0,%A1" CR_TAB
6033 "mov %C0,%B1", op, plen, 3);
6034 }
6035
6036 case 16:
6037 {
6038 int reg0 = REGNO (op[0]);
6039 int reg1 = REGNO (op[1]);
6040
6041 if (reg0 + 2 != reg1)
6042 avr_asm_len ("mov %C0,%A0", op, plen, 1);
0dff9558 6043
02d9a2c3 6044 return avr_asm_len ("clr %B0" CR_TAB
6045 "clr %A0", op, plen, 2);
6046 }
6047
6048 case 23:
6049 return avr_asm_len ("clr %C0" CR_TAB
6050 "lsr %A0" CR_TAB
6051 "ror %C0" CR_TAB
6052 "clr %B0" CR_TAB
6053 "clr %A0", op, plen, 5);
6054 }
6055 }
0dff9558 6056
02d9a2c3 6057 out_shift_with_cnt ("lsl %A0" CR_TAB
6058 "rol %B0" CR_TAB
6059 "rol %C0", insn, op, plen, 3);
6060 return "";
6061}
6062
6063
a28e4651 6064/* 32bit shift left ((long)x << i) */
6065
37ac04dc 6066const char *
375204de 6067ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6068{
6069 if (GET_CODE (operands[2]) == CONST_INT)
6070 {
6071 int k;
b681d971 6072 int *t = len;
0dff9558 6073
a28e4651 6074 if (!len)
6075 len = &k;
0dff9558 6076
a28e4651 6077 switch (INTVAL (operands[2]))
6078 {
5667001b 6079 default:
6080 if (INTVAL (operands[2]) < 32)
6081 break;
6082
0aab73c2 6083 if (AVR_HAVE_MOVW)
02a011e9 6084 return *len = 3, ("clr %D0" CR_TAB
6085 "clr %C0" CR_TAB
6086 "movw %A0,%C0");
5667001b 6087 *len = 4;
02a011e9 6088 return ("clr %D0" CR_TAB
6089 "clr %C0" CR_TAB
6090 "clr %B0" CR_TAB
6091 "clr %A0");
5667001b 6092
a28e4651 6093 case 8:
6094 {
6095 int reg0 = true_regnum (operands[0]);
6096 int reg1 = true_regnum (operands[1]);
1cb39658 6097 *len = 4;
a28e4651 6098 if (reg0 >= reg1)
02a011e9 6099 return ("mov %D0,%C1" CR_TAB
6100 "mov %C0,%B1" CR_TAB
6101 "mov %B0,%A1" CR_TAB
6102 "clr %A0");
a28e4651 6103 else
02a011e9 6104 return ("clr %A0" CR_TAB
6105 "mov %B0,%A1" CR_TAB
6106 "mov %C0,%B1" CR_TAB
6107 "mov %D0,%C1");
a28e4651 6108 }
1cb39658 6109
a28e4651 6110 case 16:
6111 {
6112 int reg0 = true_regnum (operands[0]);
6113 int reg1 = true_regnum (operands[1]);
a28e4651 6114 if (reg0 + 2 == reg1)
02a011e9 6115 return *len = 2, ("clr %B0" CR_TAB
6116 "clr %A0");
ab3a6ef8 6117 if (AVR_HAVE_MOVW)
02a011e9 6118 return *len = 3, ("movw %C0,%A1" CR_TAB
6119 "clr %B0" CR_TAB
6120 "clr %A0");
a28e4651 6121 else
02a011e9 6122 return *len = 4, ("mov %C0,%A1" CR_TAB
6123 "mov %D0,%B1" CR_TAB
6124 "clr %B0" CR_TAB
6125 "clr %A0");
a28e4651 6126 }
1cb39658 6127
a28e4651 6128 case 24:
1cb39658 6129 *len = 4;
02a011e9 6130 return ("mov %D0,%A1" CR_TAB
6131 "clr %C0" CR_TAB
6132 "clr %B0" CR_TAB
6133 "clr %A0");
28f5cc4d 6134
6135 case 31:
6136 *len = 6;
02a011e9 6137 return ("clr %D0" CR_TAB
6138 "lsr %A0" CR_TAB
6139 "ror %D0" CR_TAB
6140 "clr %C0" CR_TAB
6141 "clr %B0" CR_TAB
6142 "clr %A0");
a28e4651 6143 }
28f5cc4d 6144 len = t;
a28e4651 6145 }
02a011e9 6146 out_shift_with_cnt ("lsl %A0" CR_TAB
6147 "rol %B0" CR_TAB
6148 "rol %C0" CR_TAB
6149 "rol %D0", insn, operands, len, 4);
a28e4651 6150 return "";
6151}
6152
6153/* 8bit arithmetic shift right ((signed char)x >> i) */
6154
37ac04dc 6155const char *
375204de 6156ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6157{
6158 if (GET_CODE (operands[2]) == CONST_INT)
6159 {
a28e4651 6160 int k;
1cb39658 6161
a28e4651 6162 if (!len)
6163 len = &k;
1cb39658 6164
a28e4651 6165 switch (INTVAL (operands[2]))
6166 {
a28e4651 6167 case 1:
1cb39658 6168 *len = 1;
02a011e9 6169 return "asr %0";
1cb39658 6170
a28e4651 6171 case 2:
1cb39658 6172 *len = 2;
02a011e9 6173 return ("asr %0" CR_TAB
6174 "asr %0");
1cb39658 6175
a28e4651 6176 case 3:
1cb39658 6177 *len = 3;
02a011e9 6178 return ("asr %0" CR_TAB
6179 "asr %0" CR_TAB
6180 "asr %0");
1cb39658 6181
a28e4651 6182 case 4:
1cb39658 6183 *len = 4;
02a011e9 6184 return ("asr %0" CR_TAB
6185 "asr %0" CR_TAB
6186 "asr %0" CR_TAB
6187 "asr %0");
1cb39658 6188
6189 case 5:
6190 *len = 5;
02a011e9 6191 return ("asr %0" CR_TAB
6192 "asr %0" CR_TAB
6193 "asr %0" CR_TAB
6194 "asr %0" CR_TAB
6195 "asr %0");
1cb39658 6196
6197 case 6:
6198 *len = 4;
02a011e9 6199 return ("bst %0,6" CR_TAB
6200 "lsl %0" CR_TAB
6201 "sbc %0,%0" CR_TAB
6202 "bld %0,0");
1cb39658 6203
6204 default:
5667001b 6205 if (INTVAL (operands[2]) < 8)
6206 break;
6207
6208 /* fall through */
6209
1cb39658 6210 case 7:
6211 *len = 2;
02a011e9 6212 return ("lsl %0" CR_TAB
6213 "sbc %0,%0");
a28e4651 6214 }
6215 }
1cb39658 6216 else if (CONSTANT_P (operands[2]))
68435912 6217 fatal_insn ("internal compiler error. Incorrect shift:", insn);
1cb39658 6218
02a011e9 6219 out_shift_with_cnt ("asr %0",
6220 insn, operands, len, 1);
a28e4651 6221 return "";
6222}
6223
6224
6225/* 16bit arithmetic shift right ((signed short)x >> i) */
6226
37ac04dc 6227const char *
375204de 6228ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6229{
6230 if (GET_CODE (operands[2]) == CONST_INT)
6231 {
b681d971 6232 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6233 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 6234 int k;
1cb39658 6235 int *t = len;
0dff9558 6236
a28e4651 6237 if (!len)
6238 len = &k;
1cb39658 6239
a28e4651 6240 switch (INTVAL (operands[2]))
6241 {
b681d971 6242 case 4:
6243 case 5:
6244 /* XXX try to optimize this too? */
6245 break;
1cb39658 6246
b681d971 6247 case 6:
6248 if (optimize_size)
6249 break; /* scratch ? 5 : 6 */
6250 *len = 8;
02a011e9 6251 return ("mov __tmp_reg__,%A0" CR_TAB
6252 "mov %A0,%B0" CR_TAB
6253 "lsl __tmp_reg__" CR_TAB
6254 "rol %A0" CR_TAB
6255 "sbc %B0,%B0" CR_TAB
6256 "lsl __tmp_reg__" CR_TAB
6257 "rol %A0" CR_TAB
6258 "rol %B0");
1cb39658 6259
28f5cc4d 6260 case 7:
6261 *len = 4;
02a011e9 6262 return ("lsl %A0" CR_TAB
6263 "mov %A0,%B0" CR_TAB
6264 "rol %A0" CR_TAB
6265 "sbc %B0,%B0");
28f5cc4d 6266
a28e4651 6267 case 8:
b681d971 6268 {
6269 int reg0 = true_regnum (operands[0]);
6270 int reg1 = true_regnum (operands[1]);
6271
6272 if (reg0 == reg1)
02a011e9 6273 return *len = 3, ("mov %A0,%B0" CR_TAB
6274 "lsl %B0" CR_TAB
6275 "sbc %B0,%B0");
0dff9558 6276 else
02a011e9 6277 return *len = 4, ("mov %A0,%B1" CR_TAB
6278 "clr %B0" CR_TAB
6279 "sbrc %A0,7" CR_TAB
6280 "dec %B0");
b681d971 6281 }
1cb39658 6282
28f5cc4d 6283 case 9:
6284 *len = 4;
02a011e9 6285 return ("mov %A0,%B0" CR_TAB
6286 "lsl %B0" CR_TAB
6287 "sbc %B0,%B0" CR_TAB
6288 "asr %A0");
28f5cc4d 6289
6290 case 10:
6291 *len = 5;
02a011e9 6292 return ("mov %A0,%B0" CR_TAB
6293 "lsl %B0" CR_TAB
6294 "sbc %B0,%B0" CR_TAB
6295 "asr %A0" CR_TAB
6296 "asr %A0");
28f5cc4d 6297
6298 case 11:
8cc5a1af 6299 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 6300 {
6301 *len = 5;
02a011e9 6302 return ("ldi %A0,0x20" CR_TAB
6303 "muls %B0,%A0" CR_TAB
6304 "mov %A0,r1" CR_TAB
6305 "sbc %B0,%B0" CR_TAB
6306 "clr __zero_reg__");
28f5cc4d 6307 }
b681d971 6308 if (optimize_size && scratch)
6309 break; /* 5 */
6310 *len = 6;
02a011e9 6311 return ("mov %A0,%B0" CR_TAB
6312 "lsl %B0" CR_TAB
6313 "sbc %B0,%B0" CR_TAB
6314 "asr %A0" CR_TAB
6315 "asr %A0" CR_TAB
6316 "asr %A0");
28f5cc4d 6317
6318 case 12:
8cc5a1af 6319 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 6320 {
6321 *len = 5;
02a011e9 6322 return ("ldi %A0,0x10" CR_TAB
6323 "muls %B0,%A0" CR_TAB
6324 "mov %A0,r1" CR_TAB
6325 "sbc %B0,%B0" CR_TAB
6326 "clr __zero_reg__");
28f5cc4d 6327 }
b681d971 6328 if (optimize_size && scratch)
6329 break; /* 5 */
6330 *len = 7;
02a011e9 6331 return ("mov %A0,%B0" CR_TAB
6332 "lsl %B0" CR_TAB
6333 "sbc %B0,%B0" CR_TAB
6334 "asr %A0" CR_TAB
6335 "asr %A0" CR_TAB
6336 "asr %A0" CR_TAB
6337 "asr %A0");
28f5cc4d 6338
6339 case 13:
8cc5a1af 6340 if (AVR_HAVE_MUL && ldi_ok)
28f5cc4d 6341 {
6342 *len = 5;
02a011e9 6343 return ("ldi %A0,0x08" CR_TAB
6344 "muls %B0,%A0" CR_TAB
6345 "mov %A0,r1" CR_TAB
6346 "sbc %B0,%B0" CR_TAB
6347 "clr __zero_reg__");
28f5cc4d 6348 }
b681d971 6349 if (optimize_size)
6350 break; /* scratch ? 5 : 7 */
6351 *len = 8;
02a011e9 6352 return ("mov %A0,%B0" CR_TAB
6353 "lsl %B0" CR_TAB
6354 "sbc %B0,%B0" CR_TAB
6355 "asr %A0" CR_TAB
6356 "asr %A0" CR_TAB
6357 "asr %A0" CR_TAB
6358 "asr %A0" CR_TAB
6359 "asr %A0");
28f5cc4d 6360
6361 case 14:
6362 *len = 5;
02a011e9 6363 return ("lsl %B0" CR_TAB
6364 "sbc %A0,%A0" CR_TAB
6365 "lsl %B0" CR_TAB
6366 "mov %B0,%A0" CR_TAB
6367 "rol %A0");
28f5cc4d 6368
5667001b 6369 default:
6370 if (INTVAL (operands[2]) < 16)
6371 break;
6372
6373 /* fall through */
6374
8a2a7305 6375 case 15:
02a011e9 6376 return *len = 3, ("lsl %B0" CR_TAB
6377 "sbc %A0,%A0" CR_TAB
6378 "mov %B0,%A0");
a28e4651 6379 }
28f5cc4d 6380 len = t;
a28e4651 6381 }
02a011e9 6382 out_shift_with_cnt ("asr %B0" CR_TAB
6383 "ror %A0", insn, operands, len, 2);
a28e4651 6384 return "";
6385}
6386
6387
02d9a2c3 6388/* 24-bit arithmetic shift right */
6389
6390const char*
375204de 6391avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 6392{
6393 int dest = REGNO (op[0]);
6394 int src = REGNO (op[1]);
6395
6396 if (CONST_INT_P (op[2]))
6397 {
6398 if (plen)
6399 *plen = 0;
0dff9558 6400
02d9a2c3 6401 switch (INTVAL (op[2]))
6402 {
6403 case 8:
6404 if (dest <= src)
6405 return avr_asm_len ("mov %A0,%B1" CR_TAB
6406 "mov %B0,%C1" CR_TAB
6407 "clr %C0" CR_TAB
6408 "sbrc %B0,7" CR_TAB
6409 "dec %C0", op, plen, 5);
6410 else
6411 return avr_asm_len ("clr %C0" CR_TAB
6412 "sbrc %C1,7" CR_TAB
6413 "dec %C0" CR_TAB
6414 "mov %B0,%C1" CR_TAB
6415 "mov %A0,%B1", op, plen, 5);
0dff9558 6416
02d9a2c3 6417 case 16:
6418 if (dest != src + 2)
6419 avr_asm_len ("mov %A0,%C1", op, plen, 1);
0dff9558 6420
02d9a2c3 6421 return avr_asm_len ("clr %B0" CR_TAB
6422 "sbrc %A0,7" CR_TAB
6423 "com %B0" CR_TAB
6424 "mov %C0,%B0", op, plen, 4);
6425
6426 default:
6427 if (INTVAL (op[2]) < 24)
6428 break;
6429
6430 /* fall through */
6431
8e8ae8d4 6432 case 23:
02d9a2c3 6433 return avr_asm_len ("lsl %C0" CR_TAB
6434 "sbc %A0,%A0" CR_TAB
6435 "mov %B0,%A0" CR_TAB
6436 "mov %C0,%A0", op, plen, 4);
6437 } /* switch */
6438 }
0dff9558 6439
02d9a2c3 6440 out_shift_with_cnt ("asr %C0" CR_TAB
6441 "ror %B0" CR_TAB
6442 "ror %A0", insn, op, plen, 3);
6443 return "";
6444}
6445
6446
0dff9558 6447/* 32-bit arithmetic shift right ((signed long)x >> i) */
a28e4651 6448
37ac04dc 6449const char *
375204de 6450ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6451{
6452 if (GET_CODE (operands[2]) == CONST_INT)
6453 {
6454 int k;
6455 int *t = len;
0dff9558 6456
a28e4651 6457 if (!len)
6458 len = &k;
0dff9558 6459
a28e4651 6460 switch (INTVAL (operands[2]))
6461 {
a28e4651 6462 case 8:
6463 {
6464 int reg0 = true_regnum (operands[0]);
6465 int reg1 = true_regnum (operands[1]);
6466 *len=6;
6467 if (reg0 <= reg1)
02a011e9 6468 return ("mov %A0,%B1" CR_TAB
6469 "mov %B0,%C1" CR_TAB
6470 "mov %C0,%D1" CR_TAB
6471 "clr %D0" CR_TAB
6472 "sbrc %C0,7" CR_TAB
6473 "dec %D0");
a28e4651 6474 else
02a011e9 6475 return ("clr %D0" CR_TAB
6476 "sbrc %D1,7" CR_TAB
6477 "dec %D0" CR_TAB
6478 "mov %C0,%D1" CR_TAB
6479 "mov %B0,%C1" CR_TAB
6480 "mov %A0,%B1");
a28e4651 6481 }
0dff9558 6482
a28e4651 6483 case 16:
6484 {
6485 int reg0 = true_regnum (operands[0]);
6486 int reg1 = true_regnum (operands[1]);
0dff9558 6487
ab3a6ef8 6488 if (reg0 == reg1 + 2)
02a011e9 6489 return *len = 4, ("clr %D0" CR_TAB
6490 "sbrc %B0,7" CR_TAB
6491 "com %D0" CR_TAB
6492 "mov %C0,%D0");
ab3a6ef8 6493 if (AVR_HAVE_MOVW)
02a011e9 6494 return *len = 5, ("movw %A0,%C1" CR_TAB
6495 "clr %D0" CR_TAB
6496 "sbrc %B0,7" CR_TAB
6497 "com %D0" CR_TAB
6498 "mov %C0,%D0");
0dff9558 6499 else
02a011e9 6500 return *len = 6, ("mov %B0,%D1" CR_TAB
6501 "mov %A0,%C1" CR_TAB
6502 "clr %D0" CR_TAB
6503 "sbrc %B0,7" CR_TAB
6504 "com %D0" CR_TAB
6505 "mov %C0,%D0");
a28e4651 6506 }
1cb39658 6507
a28e4651 6508 case 24:
02a011e9 6509 return *len = 6, ("mov %A0,%D1" CR_TAB
6510 "clr %D0" CR_TAB
6511 "sbrc %A0,7" CR_TAB
6512 "com %D0" CR_TAB
6513 "mov %B0,%D0" CR_TAB
6514 "mov %C0,%D0");
28f5cc4d 6515
5667001b 6516 default:
6517 if (INTVAL (operands[2]) < 32)
6518 break;
6519
6520 /* fall through */
6521
28f5cc4d 6522 case 31:
0aab73c2 6523 if (AVR_HAVE_MOVW)
02a011e9 6524 return *len = 4, ("lsl %D0" CR_TAB
6525 "sbc %A0,%A0" CR_TAB
6526 "mov %B0,%A0" CR_TAB
6527 "movw %C0,%A0");
28f5cc4d 6528 else
02a011e9 6529 return *len = 5, ("lsl %D0" CR_TAB
6530 "sbc %A0,%A0" CR_TAB
6531 "mov %B0,%A0" CR_TAB
6532 "mov %C0,%A0" CR_TAB
6533 "mov %D0,%A0");
a28e4651 6534 }
28f5cc4d 6535 len = t;
a28e4651 6536 }
02a011e9 6537 out_shift_with_cnt ("asr %D0" CR_TAB
6538 "ror %C0" CR_TAB
6539 "ror %B0" CR_TAB
6540 "ror %A0", insn, operands, len, 4);
a28e4651 6541 return "";
6542}
6543
0dff9558 6544/* 8-bit logic shift right ((unsigned char)x >> i) */
a28e4651 6545
37ac04dc 6546const char *
375204de 6547lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6548{
6549 if (GET_CODE (operands[2]) == CONST_INT)
6550 {
6551 int k;
1cb39658 6552
a28e4651 6553 if (!len)
6554 len = &k;
0dff9558 6555
a28e4651 6556 switch (INTVAL (operands[2]))
6557 {
1cb39658 6558 default:
5667001b 6559 if (INTVAL (operands[2]) < 8)
6560 break;
6561
1cb39658 6562 *len = 1;
02a011e9 6563 return "clr %0";
1cb39658 6564
a28e4651 6565 case 1:
1cb39658 6566 *len = 1;
02a011e9 6567 return "lsr %0";
1cb39658 6568
a28e4651 6569 case 2:
1cb39658 6570 *len = 2;
02a011e9 6571 return ("lsr %0" CR_TAB
6572 "lsr %0");
a28e4651 6573 case 3:
1cb39658 6574 *len = 3;
02a011e9 6575 return ("lsr %0" CR_TAB
6576 "lsr %0" CR_TAB
6577 "lsr %0");
0dff9558 6578
a28e4651 6579 case 4:
0af74aa0 6580 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 6581 {
6582 *len=2;
02a011e9 6583 return ("swap %0" CR_TAB
6584 "andi %0,0x0f");
a28e4651 6585 }
1cb39658 6586 *len = 4;
02a011e9 6587 return ("lsr %0" CR_TAB
6588 "lsr %0" CR_TAB
6589 "lsr %0" CR_TAB
6590 "lsr %0");
0dff9558 6591
a28e4651 6592 case 5:
0af74aa0 6593 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 6594 {
1cb39658 6595 *len = 3;
02a011e9 6596 return ("swap %0" CR_TAB
6597 "lsr %0" CR_TAB
6598 "andi %0,0x7");
a28e4651 6599 }
1cb39658 6600 *len = 5;
02a011e9 6601 return ("lsr %0" CR_TAB
6602 "lsr %0" CR_TAB
6603 "lsr %0" CR_TAB
6604 "lsr %0" CR_TAB
6605 "lsr %0");
0dff9558 6606
a28e4651 6607 case 6:
0af74aa0 6608 if (test_hard_reg_class (LD_REGS, operands[0]))
a28e4651 6609 {
1cb39658 6610 *len = 4;
02a011e9 6611 return ("swap %0" CR_TAB
6612 "lsr %0" CR_TAB
6613 "lsr %0" CR_TAB
6614 "andi %0,0x3");
a28e4651 6615 }
1cb39658 6616 *len = 6;
02a011e9 6617 return ("lsr %0" CR_TAB
6618 "lsr %0" CR_TAB
6619 "lsr %0" CR_TAB
6620 "lsr %0" CR_TAB
6621 "lsr %0" CR_TAB
6622 "lsr %0");
0dff9558 6623
a28e4651 6624 case 7:
1cb39658 6625 *len = 3;
02a011e9 6626 return ("rol %0" CR_TAB
6627 "clr %0" CR_TAB
6628 "rol %0");
a28e4651 6629 }
6630 }
1cb39658 6631 else if (CONSTANT_P (operands[2]))
68435912 6632 fatal_insn ("internal compiler error. Incorrect shift:", insn);
0dff9558 6633
02a011e9 6634 out_shift_with_cnt ("lsr %0",
6635 insn, operands, len, 1);
a28e4651 6636 return "";
6637}
6638
0dff9558 6639/* 16-bit logic shift right ((unsigned short)x >> i) */
a28e4651 6640
37ac04dc 6641const char *
375204de 6642lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6643{
6644 if (GET_CODE (operands[2]) == CONST_INT)
6645 {
b681d971 6646 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6647 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
a28e4651 6648 int k;
1cb39658 6649 int *t = len;
b681d971 6650
a28e4651 6651 if (!len)
6652 len = &k;
0dff9558 6653
a28e4651 6654 switch (INTVAL (operands[2]))
6655 {
5667001b 6656 default:
6657 if (INTVAL (operands[2]) < 16)
6658 break;
6659
6660 *len = 2;
02a011e9 6661 return ("clr %B0" CR_TAB
6662 "clr %A0");
5667001b 6663
b681d971 6664 case 4:
6665 if (optimize_size && scratch)
6666 break; /* 5 */
6667 if (ldi_ok)
6668 {
6669 *len = 6;
02a011e9 6670 return ("swap %B0" CR_TAB
6671 "swap %A0" CR_TAB
6672 "andi %A0,0x0f" CR_TAB
6673 "eor %A0,%B0" CR_TAB
6674 "andi %B0,0x0f" CR_TAB
6675 "eor %A0,%B0");
b681d971 6676 }
6677 if (scratch)
6678 {
6679 *len = 7;
02a011e9 6680 return ("swap %B0" CR_TAB
6681 "swap %A0" CR_TAB
6682 "ldi %3,0x0f" CR_TAB
ef51d1e3 6683 "and %A0,%3" CR_TAB
02a011e9 6684 "eor %A0,%B0" CR_TAB
ef51d1e3 6685 "and %B0,%3" CR_TAB
02a011e9 6686 "eor %A0,%B0");
b681d971 6687 }
6688 break; /* optimize_size ? 6 : 8 */
6689
6690 case 5:
6691 if (optimize_size)
6692 break; /* scratch ? 5 : 6 */
6693 if (ldi_ok)
6694 {
6695 *len = 8;
02a011e9 6696 return ("lsr %B0" CR_TAB
6697 "ror %A0" CR_TAB
6698 "swap %B0" CR_TAB
6699 "swap %A0" CR_TAB
6700 "andi %A0,0x0f" CR_TAB
6701 "eor %A0,%B0" CR_TAB
6702 "andi %B0,0x0f" CR_TAB
6703 "eor %A0,%B0");
b681d971 6704 }
6705 if (scratch)
6706 {
6707 *len = 9;
02a011e9 6708 return ("lsr %B0" CR_TAB
6709 "ror %A0" CR_TAB
6710 "swap %B0" CR_TAB
6711 "swap %A0" CR_TAB
6712 "ldi %3,0x0f" CR_TAB
ef51d1e3 6713 "and %A0,%3" CR_TAB
02a011e9 6714 "eor %A0,%B0" CR_TAB
ef51d1e3 6715 "and %B0,%3" CR_TAB
02a011e9 6716 "eor %A0,%B0");
b681d971 6717 }
6718 break; /* 10 */
6719
6720 case 6:
6721 if (optimize_size)
6722 break; /* scratch ? 5 : 6 */
6723 *len = 9;
02a011e9 6724 return ("clr __tmp_reg__" CR_TAB
6725 "lsl %A0" CR_TAB
6726 "rol %B0" CR_TAB
6727 "rol __tmp_reg__" CR_TAB
6728 "lsl %A0" CR_TAB
6729 "rol %B0" CR_TAB
6730 "rol __tmp_reg__" CR_TAB
6731 "mov %A0,%B0" CR_TAB
6732 "mov %B0,__tmp_reg__");
28f5cc4d 6733
6734 case 7:
6735 *len = 5;
02a011e9 6736 return ("lsl %A0" CR_TAB
6737 "mov %A0,%B0" CR_TAB
6738 "rol %A0" CR_TAB
6739 "sbc %B0,%B0" CR_TAB
6740 "neg %B0");
28f5cc4d 6741
a28e4651 6742 case 8:
02a011e9 6743 return *len = 2, ("mov %A0,%B1" CR_TAB
6744 "clr %B0");
28f5cc4d 6745
6746 case 9:
6747 *len = 3;
02a011e9 6748 return ("mov %A0,%B0" CR_TAB
6749 "clr %B0" CR_TAB
6750 "lsr %A0");
28f5cc4d 6751
6752 case 10:
6753 *len = 4;
02a011e9 6754 return ("mov %A0,%B0" CR_TAB
6755 "clr %B0" CR_TAB
6756 "lsr %A0" CR_TAB
6757 "lsr %A0");
28f5cc4d 6758
6759 case 11:
6760 *len = 5;
02a011e9 6761 return ("mov %A0,%B0" CR_TAB
6762 "clr %B0" CR_TAB
6763 "lsr %A0" CR_TAB
6764 "lsr %A0" CR_TAB
6765 "lsr %A0");
28f5cc4d 6766
6767 case 12:
b681d971 6768 if (ldi_ok)
28f5cc4d 6769 {
6770 *len = 4;
02a011e9 6771 return ("mov %A0,%B0" CR_TAB
6772 "clr %B0" CR_TAB
6773 "swap %A0" CR_TAB
6774 "andi %A0,0x0f");
28f5cc4d 6775 }
b681d971 6776 if (scratch)
6777 {
6778 *len = 5;
02a011e9 6779 return ("mov %A0,%B0" CR_TAB
6780 "clr %B0" CR_TAB
6781 "swap %A0" CR_TAB
6782 "ldi %3,0x0f" CR_TAB
ef51d1e3 6783 "and %A0,%3");
b681d971 6784 }
6785 *len = 6;
02a011e9 6786 return ("mov %A0,%B0" CR_TAB
6787 "clr %B0" CR_TAB
6788 "lsr %A0" CR_TAB
6789 "lsr %A0" CR_TAB
6790 "lsr %A0" CR_TAB
6791 "lsr %A0");
28f5cc4d 6792
6793 case 13:
b681d971 6794 if (ldi_ok)
28f5cc4d 6795 {
6796 *len = 5;
02a011e9 6797 return ("mov %A0,%B0" CR_TAB
6798 "clr %B0" CR_TAB
6799 "swap %A0" CR_TAB
6800 "lsr %A0" CR_TAB
6801 "andi %A0,0x07");
28f5cc4d 6802 }
8cc5a1af 6803 if (AVR_HAVE_MUL && scratch)
28f5cc4d 6804 {
6805 *len = 5;
02a011e9 6806 return ("ldi %3,0x08" CR_TAB
6807 "mul %B0,%3" CR_TAB
6808 "mov %A0,r1" CR_TAB
6809 "clr %B0" CR_TAB
6810 "clr __zero_reg__");
28f5cc4d 6811 }
b681d971 6812 if (optimize_size && scratch)
6813 break; /* 5 */
6814 if (scratch)
6815 {
6816 *len = 6;
02a011e9 6817 return ("mov %A0,%B0" CR_TAB
6818 "clr %B0" CR_TAB
6819 "swap %A0" CR_TAB
6820 "lsr %A0" CR_TAB
6821 "ldi %3,0x07" CR_TAB
ef51d1e3 6822 "and %A0,%3");
b681d971 6823 }
8cc5a1af 6824 if (AVR_HAVE_MUL)
b681d971 6825 {
6826 *len = 6;
6827 return ("set" CR_TAB
02a011e9 6828 "bld r1,3" CR_TAB
6829 "mul %B0,r1" CR_TAB
6830 "mov %A0,r1" CR_TAB
6831 "clr %B0" CR_TAB
6832 "clr __zero_reg__");
b681d971 6833 }
6834 *len = 7;
02a011e9 6835 return ("mov %A0,%B0" CR_TAB
6836 "clr %B0" CR_TAB
6837 "lsr %A0" CR_TAB
6838 "lsr %A0" CR_TAB
6839 "lsr %A0" CR_TAB
6840 "lsr %A0" CR_TAB
6841 "lsr %A0");
28f5cc4d 6842
6843 case 14:
8cc5a1af 6844 if (AVR_HAVE_MUL && ldi_ok)
b681d971 6845 {
6846 *len = 5;
02a011e9 6847 return ("ldi %A0,0x04" CR_TAB
6848 "mul %B0,%A0" CR_TAB
6849 "mov %A0,r1" CR_TAB
6850 "clr %B0" CR_TAB
6851 "clr __zero_reg__");
b681d971 6852 }
8cc5a1af 6853 if (AVR_HAVE_MUL && scratch)
28f5cc4d 6854 {
6855 *len = 5;
02a011e9 6856 return ("ldi %3,0x04" CR_TAB
6857 "mul %B0,%3" CR_TAB
6858 "mov %A0,r1" CR_TAB
6859 "clr %B0" CR_TAB
6860 "clr __zero_reg__");
28f5cc4d 6861 }
b681d971 6862 if (optimize_size && ldi_ok)
6863 {
6864 *len = 5;
02a011e9 6865 return ("mov %A0,%B0" CR_TAB
6866 "ldi %B0,6" "\n1:\t"
6867 "lsr %A0" CR_TAB
6868 "dec %B0" CR_TAB
6869 "brne 1b");
b681d971 6870 }
6871 if (optimize_size && scratch)
6872 break; /* 5 */
6873 *len = 6;
02a011e9 6874 return ("clr %A0" CR_TAB
6875 "lsl %B0" CR_TAB
6876 "rol %A0" CR_TAB
6877 "lsl %B0" CR_TAB
6878 "rol %A0" CR_TAB
6879 "clr %B0");
28f5cc4d 6880
8a2a7305 6881 case 15:
1cb39658 6882 *len = 4;
02a011e9 6883 return ("clr %A0" CR_TAB
6884 "lsl %B0" CR_TAB
6885 "rol %A0" CR_TAB
6886 "clr %B0");
a28e4651 6887 }
28f5cc4d 6888 len = t;
a28e4651 6889 }
02a011e9 6890 out_shift_with_cnt ("lsr %B0" CR_TAB
6891 "ror %A0", insn, operands, len, 2);
a28e4651 6892 return "";
6893}
6894
02d9a2c3 6895
6896/* 24-bit logic shift right */
6897
6898const char*
375204de 6899avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
02d9a2c3 6900{
6901 int dest = REGNO (op[0]);
6902 int src = REGNO (op[1]);
6903
6904 if (CONST_INT_P (op[2]))
6905 {
6906 if (plen)
6907 *plen = 0;
0dff9558 6908
02d9a2c3 6909 switch (INTVAL (op[2]))
6910 {
6911 case 8:
6912 if (dest <= src)
6913 return avr_asm_len ("mov %A0,%B1" CR_TAB
6914 "mov %B0,%C1" CR_TAB
6915 "clr %C0", op, plen, 3);
6916 else
6917 return avr_asm_len ("clr %C0" CR_TAB
6918 "mov %B0,%C1" CR_TAB
6919 "mov %A0,%B1", op, plen, 3);
0dff9558 6920
02d9a2c3 6921 case 16:
6922 if (dest != src + 2)
6923 avr_asm_len ("mov %A0,%C1", op, plen, 1);
0dff9558 6924
02d9a2c3 6925 return avr_asm_len ("clr %B0" CR_TAB
6926 "clr %C0", op, plen, 2);
6927
6928 default:
6929 if (INTVAL (op[2]) < 24)
6930 break;
6931
6932 /* fall through */
6933
6934 case 23:
6935 return avr_asm_len ("clr %A0" CR_TAB
6936 "sbrc %C0,7" CR_TAB
6937 "inc %A0" CR_TAB
6938 "clr %B0" CR_TAB
6939 "clr %C0", op, plen, 5);
6940 } /* switch */
6941 }
0dff9558 6942
02d9a2c3 6943 out_shift_with_cnt ("lsr %C0" CR_TAB
6944 "ror %B0" CR_TAB
6945 "ror %A0", insn, op, plen, 3);
6946 return "";
6947}
6948
6949
0dff9558 6950/* 32-bit logic shift right ((unsigned int)x >> i) */
a28e4651 6951
37ac04dc 6952const char *
375204de 6953lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
a28e4651 6954{
6955 if (GET_CODE (operands[2]) == CONST_INT)
6956 {
6957 int k;
1cb39658 6958 int *t = len;
0dff9558 6959
a28e4651 6960 if (!len)
6961 len = &k;
0dff9558 6962
a28e4651 6963 switch (INTVAL (operands[2]))
6964 {
5667001b 6965 default:
6966 if (INTVAL (operands[2]) < 32)
6967 break;
6968
0aab73c2 6969 if (AVR_HAVE_MOVW)
02a011e9 6970 return *len = 3, ("clr %D0" CR_TAB
6971 "clr %C0" CR_TAB
6972 "movw %A0,%C0");
5667001b 6973 *len = 4;
02a011e9 6974 return ("clr %D0" CR_TAB
6975 "clr %C0" CR_TAB
6976 "clr %B0" CR_TAB
6977 "clr %A0");
5667001b 6978
a28e4651 6979 case 8:
6980 {
6981 int reg0 = true_regnum (operands[0]);
6982 int reg1 = true_regnum (operands[1]);
1cb39658 6983 *len = 4;
a28e4651 6984 if (reg0 <= reg1)
02a011e9 6985 return ("mov %A0,%B1" CR_TAB
6986 "mov %B0,%C1" CR_TAB
6987 "mov %C0,%D1" CR_TAB
6988 "clr %D0");
a28e4651 6989 else
02a011e9 6990 return ("clr %D0" CR_TAB
6991 "mov %C0,%D1" CR_TAB
6992 "mov %B0,%C1" CR_TAB
0dff9558 6993 "mov %A0,%B1");
a28e4651 6994 }
0dff9558 6995
a28e4651 6996 case 16:
6997 {
6998 int reg0 = true_regnum (operands[0]);
6999 int reg1 = true_regnum (operands[1]);
ab3a6ef8 7000
7001 if (reg0 == reg1 + 2)
02a011e9 7002 return *len = 2, ("clr %C0" CR_TAB
7003 "clr %D0");
ab3a6ef8 7004 if (AVR_HAVE_MOVW)
02a011e9 7005 return *len = 3, ("movw %A0,%C1" CR_TAB
7006 "clr %C0" CR_TAB
7007 "clr %D0");
a28e4651 7008 else
02a011e9 7009 return *len = 4, ("mov %B0,%D1" CR_TAB
7010 "mov %A0,%C1" CR_TAB
7011 "clr %C0" CR_TAB
7012 "clr %D0");
a28e4651 7013 }
0dff9558 7014
a28e4651 7015 case 24:
02a011e9 7016 return *len = 4, ("mov %A0,%D1" CR_TAB
7017 "clr %B0" CR_TAB
7018 "clr %C0" CR_TAB
7019 "clr %D0");
28f5cc4d 7020
7021 case 31:
7022 *len = 6;
02a011e9 7023 return ("clr %A0" CR_TAB
7024 "sbrc %D0,7" CR_TAB
7025 "inc %A0" CR_TAB
7026 "clr %B0" CR_TAB
7027 "clr %C0" CR_TAB
7028 "clr %D0");
a28e4651 7029 }
28f5cc4d 7030 len = t;
a28e4651 7031 }
02a011e9 7032 out_shift_with_cnt ("lsr %D0" CR_TAB
7033 "ror %C0" CR_TAB
7034 "ror %B0" CR_TAB
7035 "ror %A0", insn, operands, len, 4);
a28e4651 7036 return "";
7037}
7038
6be828c1 7039
b4ebb666 7040/* Output addition of register XOP[0] and compile time constant XOP[2].
7041 CODE == PLUS: perform addition by using ADD instructions or
7042 CODE == MINUS: perform addition by using SUB instructions:
0dff9558 7043
37bcc7b9 7044 XOP[0] = XOP[0] + XOP[2]
0dff9558 7045
b4ebb666 7046 Or perform addition/subtraction with register XOP[2] depending on CODE:
0dff9558 7047
b4ebb666 7048 XOP[0] = XOP[0] +/- XOP[2]
37bcc7b9 7049
b4ebb666 7050 If PLEN == NULL, print assembler instructions to perform the operation;
7051 otherwise, set *PLEN to the length of the instruction sequence (in words)
7052 printed with PLEN == NULL. XOP[3] is an 8-bit scratch register or NULL_RTX.
7053 Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7054
7055 CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7056 CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7057 If CODE_SAT != UNKNOWN then SIGN contains the sign of the summand resp.
7058 the subtrahend in the original insn, provided it is a compile time constant.
7059 In all other cases, SIGN is 0.
7060
fdbf5b19 7061 If OUT_LABEL is true, print the final 0: label which is needed for
7062 saturated addition / subtraction. The only case where OUT_LABEL = false
7063 is useful is for saturated addition / subtraction performed during
7064 fixed-point rounding, cf. `avr_out_round'. */
37bcc7b9 7065
7066static void
b4ebb666 7067avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
fdbf5b19 7068 enum rtx_code code_sat, int sign, bool out_label)
37bcc7b9 7069{
7070 /* MODE of the operation. */
3754d046 7071 machine_mode mode = GET_MODE (xop[0]);
37bcc7b9 7072
017c5b98 7073 /* INT_MODE of the same size. */
3754d046 7074 machine_mode imode = int_mode_for_mode (mode);
017c5b98 7075
37bcc7b9 7076 /* Number of bytes to operate on. */
7077 int i, n_bytes = GET_MODE_SIZE (mode);
7078
7079 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7080 int clobber_val = -1;
7081
7082 /* op[0]: 8-bit destination register
7083 op[1]: 8-bit const int
7084 op[2]: 8-bit scratch register */
7085 rtx op[3];
7086
7087 /* Started the operation? Before starting the operation we may skip
7088 adding 0. This is no more true after the operation started because
7089 carry must be taken into account. */
7090 bool started = false;
7091
7092 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
7093 rtx xval = xop[2];
7094
b4ebb666 7095 /* Output a BRVC instruction. Only needed with saturation. */
7096 bool out_brvc = true;
7097
7098 if (plen)
7099 *plen = 0;
7100
7101 if (REG_P (xop[2]))
7102 {
c455e5d3 7103 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
b4ebb666 7104
7105 for (i = 0; i < n_bytes; i++)
7106 {
7107 /* We operate byte-wise on the destination. */
7108 op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7109 op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7110
7111 if (i == 0)
7112 avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7113 op, plen, 1);
7114 else
7115 avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7116 op, plen, 1);
7117 }
7118
7119 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7120 {
7121 gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
0dff9558 7122
b4ebb666 7123 if (MINUS == code)
7124 return;
7125 }
7126
7127 goto saturate;
7128 }
7129
eac146f2 7130 /* Except in the case of ADIW with 16-bit register (see below)
7131 addition does not set cc0 in a usable way. */
0dff9558 7132
bcad9375 7133 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7134
017c5b98 7135 if (CONST_FIXED_P (xval))
7136 xval = avr_to_int_mode (xval);
7137
b4ebb666 7138 /* Adding/Subtracting zero is a no-op. */
0dff9558 7139
b4ebb666 7140 if (xval == const0_rtx)
7141 {
7142 *pcc = CC_NONE;
7143 return;
7144 }
7145
37bcc7b9 7146 if (MINUS == code)
017c5b98 7147 xval = simplify_unary_operation (NEG, imode, xval, imode);
37bcc7b9 7148
7149 op[2] = xop[3];
7150
b4ebb666 7151 if (SS_PLUS == code_sat && MINUS == code
7152 && sign < 0
7153 && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7154 & GET_MODE_MASK (QImode)))
7155 {
7156 /* We compute x + 0x80 by means of SUB instructions. We negated the
7157 constant subtrahend above and are left with x - (-128) so that we
7158 need something like SUBI r,128 which does not exist because SUBI sets
7159 V according to the sign of the subtrahend. Notice the only case
7160 where this must be done is when NEG overflowed in case [2s] because
7161 the V computation needs the right sign of the subtrahend. */
0dff9558 7162
b4ebb666 7163 rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7164
7165 avr_asm_len ("subi %0,128" CR_TAB
7166 "brmi 0f", &msb, plen, 2);
7167 out_brvc = false;
7168
7169 goto saturate;
7170 }
37bcc7b9 7171
7172 for (i = 0; i < n_bytes; i++)
7173 {
7174 /* We operate byte-wise on the destination. */
7175 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
017c5b98 7176 rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
37bcc7b9 7177
7178 /* 8-bit value to operate with this byte. */
7179 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7180
7181 /* Registers R16..R31 can operate with immediate. */
7182 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7183
7184 op[0] = reg8;
644ac9c5 7185 op[1] = gen_int_mode (val8, QImode);
bcad9375 7186
7187 /* To get usable cc0 no low-bytes must have been skipped. */
0dff9558 7188
bcad9375 7189 if (i && !started)
7190 *pcc = CC_CLOBBER;
0dff9558 7191
02d9a2c3 7192 if (!started
7193 && i % 2 == 0
7194 && i + 2 <= n_bytes
37bcc7b9 7195 && test_hard_reg_class (ADDW_REGS, reg8))
7196 {
017c5b98 7197 rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
37bcc7b9 7198 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7199
7200 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7201 i.e. operate word-wise. */
7202
7203 if (val16 < 64)
7204 {
7205 if (val16 != 0)
7206 {
7207 started = true;
7208 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7209 op, plen, 1);
eac146f2 7210
7211 if (n_bytes == 2 && PLUS == code)
c455e5d3 7212 *pcc = CC_SET_CZN;
37bcc7b9 7213 }
7214
7215 i++;
7216 continue;
7217 }
7218 }
7219
7220 if (val8 == 0)
7221 {
7222 if (started)
7223 avr_asm_len (code == PLUS
7224 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7225 op, plen, 1);
7226 continue;
7227 }
eac146f2 7228 else if ((val8 == 1 || val8 == 0xff)
b4ebb666 7229 && UNKNOWN == code_sat
eac146f2 7230 && !started
7231 && i == n_bytes - 1)
02d9a2c3 7232 {
eac146f2 7233 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7234 op, plen, 1);
c455e5d3 7235 *pcc = CC_CLOBBER;
eac146f2 7236 break;
02d9a2c3 7237 }
37bcc7b9 7238
7239 switch (code)
7240 {
7241 case PLUS:
7242
b4ebb666 7243 gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7244
7245 if (plen != NULL && UNKNOWN != code_sat)
7246 {
7247 /* This belongs to the x + 0x80 corner case. The code with
7248 ADD instruction is not smaller, thus make this case
7249 expensive so that the caller won't pick it. */
7250
7251 *plen += 10;
7252 break;
7253 }
37bcc7b9 7254
7255 if (clobber_val != (int) val8)
7256 avr_asm_len ("ldi %2,%1", op, plen, 1);
7257 clobber_val = (int) val8;
0dff9558 7258
37bcc7b9 7259 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7260
7261 break; /* PLUS */
7262
7263 case MINUS:
7264
7265 if (ld_reg_p)
7266 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7267 else
7268 {
7269 gcc_assert (plen != NULL || REG_P (op[2]));
7270
7271 if (clobber_val != (int) val8)
7272 avr_asm_len ("ldi %2,%1", op, plen, 1);
7273 clobber_val = (int) val8;
0dff9558 7274
37bcc7b9 7275 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7276 }
7277
7278 break; /* MINUS */
0dff9558 7279
37bcc7b9 7280 default:
7281 /* Unknown code */
7282 gcc_unreachable();
7283 }
7284
7285 started = true;
7286
7287 } /* for all sub-bytes */
bcad9375 7288
b4ebb666 7289 saturate:
37bcc7b9 7290
b4ebb666 7291 if (UNKNOWN == code_sat)
7292 return;
37bcc7b9 7293
b4ebb666 7294 *pcc = (int) CC_CLOBBER;
37bcc7b9 7295
b4ebb666 7296 /* Vanilla addition/subtraction is done. We are left with saturation.
0dff9558 7297
b4ebb666 7298 We have to compute A = A <op> B where A is a register and
7299 B is a register or a non-zero compile time constant CONST.
7300 A is register class "r" if unsigned && B is REG. Otherwise, A is in "d".
0dff9558 7301 B stands for the original operand $2 in INSN. In the case of B = CONST,
b4ebb666 7302 SIGN in { -1, 1 } is the sign of B. Otherwise, SIGN is 0.
0dff9558 7303
b4ebb666 7304 CODE is the instruction flavor we use in the asm sequence to perform <op>.
0dff9558 7305
7306
b4ebb666 7307 unsigned
7308 operation | code | sat if | b is | sat value | case
7309 -----------------+-------+----------+--------------+-----------+-------
7310 + as a + b | add | C == 1 | const, reg | u+ = 0xff | [1u]
7311 + as a - (-b) | sub | C == 0 | const | u+ = 0xff | [2u]
7312 - as a - b | sub | C == 1 | const, reg | u- = 0 | [3u]
7313 - as a + (-b) | add | C == 0 | const | u- = 0 | [4u]
0dff9558 7314
7315
b4ebb666 7316 signed
7317 operation | code | sat if | b is | sat value | case
7318 -----------------+-------+----------+--------------+-----------+-------
7319 + as a + b | add | V == 1 | const, reg | s+ | [1s]
7320 + as a - (-b) | sub | V == 1 | const | s+ | [2s]
7321 - as a - b | sub | V == 1 | const, reg | s- | [3s]
7322 - as a + (-b) | add | V == 1 | const | s- | [4s]
0dff9558 7323
b4ebb666 7324 s+ = b < 0 ? -0x80 : 0x7f
7325 s- = b < 0 ? 0x7f : -0x80
0dff9558 7326
b4ebb666 7327 The cases a - b actually perform a - (-(-b)) if B is CONST.
7328 */
37bcc7b9 7329
b4ebb666 7330 op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7331 op[1] = n_bytes > 1
7332 ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7333 : NULL_RTX;
37bcc7b9 7334
b4ebb666 7335 bool need_copy = true;
7336 int len_call = 1 + AVR_HAVE_JMP_CALL;
0dff9558 7337
b4ebb666 7338 switch (code_sat)
7339 {
7340 default:
7341 gcc_unreachable();
7342
7343 case SS_PLUS:
7344 case SS_MINUS:
b4ebb666 7345
7346 if (out_brvc)
7347 avr_asm_len ("brvc 0f", op, plen, 1);
7348
7349 if (reg_overlap_mentioned_p (xop[0], xop[2]))
7350 {
7351 /* [1s,reg] */
7352
7353 if (n_bytes == 1)
7354 avr_asm_len ("ldi %0,0x7f" CR_TAB
7355 "adc %0,__zero_reg__", op, plen, 2);
7356 else
7357 avr_asm_len ("ldi %0,0x7f" CR_TAB
7358 "ldi %1,0xff" CR_TAB
7359 "adc %1,__zero_reg__" CR_TAB
7360 "adc %0,__zero_reg__", op, plen, 4);
7361 }
7362 else if (sign == 0 && PLUS == code)
7363 {
7364 /* [1s,reg] */
7365
7366 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7367
7368 if (n_bytes == 1)
7369 avr_asm_len ("ldi %0,0x80" CR_TAB
7370 "sbrs %2,7" CR_TAB
7371 "dec %0", op, plen, 3);
7372 else
7373 avr_asm_len ("ldi %0,0x80" CR_TAB
7374 "cp %2,%0" CR_TAB
7375 "sbc %1,%1" CR_TAB
7376 "sbci %0,0", op, plen, 4);
7377 }
7378 else if (sign == 0 && MINUS == code)
7379 {
7380 /* [3s,reg] */
7381
7382 op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7383
7384 if (n_bytes == 1)
7385 avr_asm_len ("ldi %0,0x7f" CR_TAB
7386 "sbrs %2,7" CR_TAB
7387 "inc %0", op, plen, 3);
7388 else
7389 avr_asm_len ("ldi %0,0x7f" CR_TAB
7390 "cp %0,%2" CR_TAB
7391 "sbc %1,%1" CR_TAB
7392 "sbci %0,-1", op, plen, 4);
7393 }
7394 else if ((sign < 0) ^ (SS_MINUS == code_sat))
7395 {
7396 /* [1s,const,B < 0] [2s,B < 0] */
7397 /* [3s,const,B > 0] [4s,B > 0] */
7398
7399 if (n_bytes == 8)
7400 {
7401 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7402 need_copy = false;
7403 }
7404
7405 avr_asm_len ("ldi %0,0x80", op, plen, 1);
7406 if (n_bytes > 1 && need_copy)
7407 avr_asm_len ("clr %1", op, plen, 1);
7408 }
7409 else if ((sign > 0) ^ (SS_MINUS == code_sat))
7410 {
7411 /* [1s,const,B > 0] [2s,B > 0] */
7412 /* [3s,const,B < 0] [4s,B < 0] */
7413
7414 if (n_bytes == 8)
7415 {
7416 avr_asm_len ("sec" CR_TAB
7417 "%~call __sbc_8", op, plen, 1 + len_call);
7418 need_copy = false;
7419 }
7420
7421 avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7422 if (n_bytes > 1 && need_copy)
7423 avr_asm_len ("ldi %1,0xff", op, plen, 1);
7424 }
7425 else
7426 gcc_unreachable();
0dff9558 7427
b4ebb666 7428 break;
7429
7430 case US_PLUS:
7431 /* [1u] : [2u] */
0dff9558 7432
b4ebb666 7433 avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
0dff9558 7434
b4ebb666 7435 if (n_bytes == 8)
7436 {
7437 if (MINUS == code)
7438 avr_asm_len ("sec", op, plen, 1);
7439 avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7440
7441 need_copy = false;
7442 }
7443 else
7444 {
7445 if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
1a96adb9 7446 avr_asm_len ("sec" CR_TAB
7447 "sbc %0,%0", op, plen, 2);
b4ebb666 7448 else
7449 avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7450 op, plen, 1);
7451 }
7452 break; /* US_PLUS */
0dff9558 7453
b4ebb666 7454 case US_MINUS:
7455 /* [4u] : [3u] */
7456
7457 avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7458
7459 if (n_bytes == 8)
7460 {
7461 avr_asm_len ("%~call __clr_8", op, plen, len_call);
7462 need_copy = false;
7463 }
7464 else
7465 avr_asm_len ("clr %0", op, plen, 1);
0dff9558 7466
b4ebb666 7467 break;
7468 }
37bcc7b9 7469
b4ebb666 7470 /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7471 Now copy the right value to the LSBs. */
0dff9558 7472
b4ebb666 7473 if (need_copy && n_bytes > 1)
bcad9375 7474 {
b4ebb666 7475 if (US_MINUS == code_sat || US_PLUS == code_sat)
7476 {
7477 avr_asm_len ("mov %1,%0", op, plen, 1);
7478
7479 if (n_bytes > 2)
7480 {
7481 op[0] = xop[0];
7482 if (AVR_HAVE_MOVW)
7483 avr_asm_len ("movw %0,%1", op, plen, 1);
7484 else
7485 avr_asm_len ("mov %A0,%1" CR_TAB
7486 "mov %B0,%1", op, plen, 2);
7487 }
7488 }
7489 else if (n_bytes > 2)
7490 {
7491 op[0] = xop[0];
7492 avr_asm_len ("mov %A0,%1" CR_TAB
7493 "mov %B0,%1", op, plen, 2);
7494 }
bcad9375 7495 }
37bcc7b9 7496
b4ebb666 7497 if (need_copy && n_bytes == 8)
7498 {
7499 if (AVR_HAVE_MOVW)
7500 avr_asm_len ("movw %r0+2,%0" CR_TAB
7501 "movw %r0+4,%0", xop, plen, 2);
7502 else
7503 avr_asm_len ("mov %r0+2,%0" CR_TAB
7504 "mov %r0+3,%0" CR_TAB
7505 "mov %r0+4,%0" CR_TAB
7506 "mov %r0+5,%0", xop, plen, 4);
7507 }
7508
fdbf5b19 7509 if (out_label)
7510 avr_asm_len ("0:", op, plen, 0);
37bcc7b9 7511}
7512
7513
b4ebb666 7514/* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7515 is ont a compile-time constant:
eac146f2 7516
b4ebb666 7517 XOP[0] = XOP[0] +/- XOP[2]
7518
7519 This is a helper for the function below. The only insns that need this
7520 are additions/subtraction for pointer modes, i.e. HImode and PSImode. */
7521
7522static const char*
7523avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
eac146f2 7524{
3754d046 7525 machine_mode mode = GET_MODE (xop[0]);
eac146f2 7526
b4ebb666 7527 /* Only pointer modes want to add symbols. */
0dff9558 7528
b4ebb666 7529 gcc_assert (mode == HImode || mode == PSImode);
eac146f2 7530
b4ebb666 7531 *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7532
7533 avr_asm_len (PLUS == code
7534 ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7535 : "subi %A0,lo8(%2)" CR_TAB "sbci %B0,hi8(%2)",
7536 xop, plen, -2);
7537
4e07e83e 7538 if (PSImode == mode)
b4ebb666 7539 avr_asm_len (PLUS == code
4e07e83e 7540 ? "sbci %C0,hlo8(-(%2))"
b4ebb666 7541 : "sbci %C0,hlo8(%2)", xop, plen, 1);
7542 return "";
eac146f2 7543}
7544
83921eda 7545
b4ebb666 7546/* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
0dff9558 7547
fdbf5b19 7548 INSN is a single_set insn or an insn pattern with a binary operation as
7549 SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
b4ebb666 7550
7551 XOP are the operands of INSN. In the case of 64-bit operations with
7552 constant XOP[] has just one element: The summand/subtrahend in XOP[0].
7553 The non-saturating insns up to 32 bits may or may not supply a "d" class
7554 scratch as XOP[3].
7555
7556 If PLEN == NULL output the instructions.
7557 If PLEN != NULL set *PLEN to the length of the sequence in words.
017c5b98 7558
b4ebb666 7559 PCC is a pointer to store the instructions' effect on cc0.
7560 PCC may be NULL.
017c5b98 7561
b4ebb666 7562 PLEN and PCC default to NULL.
7563
fdbf5b19 7564 OUT_LABEL defaults to TRUE. For a description, see AVR_OUT_PLUS_1.
7565
b4ebb666 7566 Return "" */
017c5b98 7567
7568const char*
fdbf5b19 7569avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
017c5b98 7570{
b4ebb666 7571 int cc_plus, cc_minus, cc_dummy;
7572 int len_plus, len_minus;
017c5b98 7573 rtx op[4];
50fc2d35 7574 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
fdbf5b19 7575 rtx xdest = SET_DEST (xpattern);
3754d046 7576 machine_mode mode = GET_MODE (xdest);
7577 machine_mode imode = int_mode_for_mode (mode);
b4ebb666 7578 int n_bytes = GET_MODE_SIZE (mode);
fdbf5b19 7579 enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
b4ebb666 7580 enum rtx_code code
7581 = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
7582 ? PLUS : MINUS);
017c5b98 7583
b4ebb666 7584 if (!pcc)
7585 pcc = &cc_dummy;
017c5b98 7586
b4ebb666 7587 /* PLUS and MINUS don't saturate: Use modular wrap-around. */
0dff9558 7588
b4ebb666 7589 if (PLUS == code_sat || MINUS == code_sat)
7590 code_sat = UNKNOWN;
017c5b98 7591
b4ebb666 7592 if (n_bytes <= 4 && REG_P (xop[2]))
7593 {
fdbf5b19 7594 avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
b4ebb666 7595 return "";
7596 }
017c5b98 7597
b4ebb666 7598 if (8 == n_bytes)
7599 {
7600 op[0] = gen_rtx_REG (DImode, ACC_A);
7601 op[1] = gen_rtx_REG (DImode, ACC_A);
7602 op[2] = avr_to_int_mode (xop[0]);
7603 }
7604 else
7605 {
7606 if (!REG_P (xop[2])
7607 && !CONST_INT_P (xop[2])
7608 && !CONST_FIXED_P (xop[2]))
7609 {
7610 return avr_out_plus_symbol (xop, code, plen, pcc);
7611 }
0dff9558 7612
b4ebb666 7613 op[0] = avr_to_int_mode (xop[0]);
7614 op[1] = avr_to_int_mode (xop[1]);
7615 op[2] = avr_to_int_mode (xop[2]);
7616 }
017c5b98 7617
b4ebb666 7618 /* Saturations and 64-bit operations don't have a clobber operand.
7619 For the other cases, the caller will provide a proper XOP[3]. */
0dff9558 7620
fdbf5b19 7621 xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
7622 op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
83921eda 7623
b4ebb666 7624 /* Saturation will need the sign of the original operand. */
83921eda 7625
b4ebb666 7626 rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
7627 int sign = INTVAL (xmsb) < 0 ? -1 : 1;
83921eda 7628
b4ebb666 7629 /* If we subtract and the subtrahend is a constant, then negate it
7630 so that avr_out_plus_1 can be used. */
83921eda 7631
b4ebb666 7632 if (MINUS == code)
7633 op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
83921eda 7634
b4ebb666 7635 /* Work out the shortest sequence. */
017c5b98 7636
3269fce4 7637 avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
7638 avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
017c5b98 7639
b4ebb666 7640 if (plen)
7641 {
7642 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
7643 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
7644 }
7645 else if (len_minus <= len_plus)
fdbf5b19 7646 avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
b4ebb666 7647 else
fdbf5b19 7648 avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
017c5b98 7649
b4ebb666 7650 return "";
017c5b98 7651}
7652
7653
6be828c1 7654/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7655 time constant XOP[2]:
7656
7657 XOP[0] = XOP[0] <op> XOP[2]
7658
7659 and return "". If PLEN == NULL, print assembler instructions to perform the
7660 operation; otherwise, set *PLEN to the length of the instruction sequence
7661 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
fdbf5b19 7662 register or SCRATCH if no clobber register is needed for the operation.
7663 INSN is an INSN_P or a pattern of an insn. */
6be828c1 7664
7665const char*
7666avr_out_bitop (rtx insn, rtx *xop, int *plen)
7667{
7668 /* CODE and MODE of the operation. */
50fc2d35 7669 rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
fdbf5b19 7670 enum rtx_code code = GET_CODE (SET_SRC (xpattern));
3754d046 7671 machine_mode mode = GET_MODE (xop[0]);
6be828c1 7672
7673 /* Number of bytes to operate on. */
7674 int i, n_bytes = GET_MODE_SIZE (mode);
7675
7676 /* Value of T-flag (0 or 1) or -1 if unknow. */
7677 int set_t = -1;
7678
7679 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
7680 int clobber_val = -1;
7681
7682 /* op[0]: 8-bit destination register
7683 op[1]: 8-bit const int
7684 op[2]: 8-bit clobber register or SCRATCH
7685 op[3]: 8-bit register containing 0xff or NULL_RTX */
7686 rtx op[4];
7687
7688 op[2] = xop[3];
7689 op[3] = NULL_RTX;
7690
7691 if (plen)
7692 *plen = 0;
7693
7694 for (i = 0; i < n_bytes; i++)
7695 {
7696 /* We operate byte-wise on the destination. */
7697 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7698 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
7699
7700 /* 8-bit value to operate with this byte. */
7701 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7702
7703 /* Number of bits set in the current byte of the constant. */
7704 int pop8 = avr_popcount (val8);
7705
7706 /* Registers R16..R31 can operate with immediate. */
7707 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7708
7709 op[0] = reg8;
7710 op[1] = GEN_INT (val8);
0dff9558 7711
6be828c1 7712 switch (code)
7713 {
7714 case IOR:
7715
7716 if (0 == pop8)
7717 continue;
7718 else if (ld_reg_p)
7719 avr_asm_len ("ori %0,%1", op, plen, 1);
7720 else if (1 == pop8)
7721 {
7722 if (set_t != 1)
7723 avr_asm_len ("set", op, plen, 1);
7724 set_t = 1;
0dff9558 7725
6be828c1 7726 op[1] = GEN_INT (exact_log2 (val8));
7727 avr_asm_len ("bld %0,%1", op, plen, 1);
7728 }
7729 else if (8 == pop8)
7730 {
7731 if (op[3] != NULL_RTX)
7732 avr_asm_len ("mov %0,%3", op, plen, 1);
7733 else
7734 avr_asm_len ("clr %0" CR_TAB
7735 "dec %0", op, plen, 2);
7736
7737 op[3] = op[0];
7738 }
7739 else
7740 {
7741 if (clobber_val != (int) val8)
7742 avr_asm_len ("ldi %2,%1", op, plen, 1);
7743 clobber_val = (int) val8;
0dff9558 7744
6be828c1 7745 avr_asm_len ("or %0,%2", op, plen, 1);
7746 }
7747
7748 continue; /* IOR */
7749
7750 case AND:
7751
7752 if (8 == pop8)
7753 continue;
7754 else if (0 == pop8)
7755 avr_asm_len ("clr %0", op, plen, 1);
7756 else if (ld_reg_p)
7757 avr_asm_len ("andi %0,%1", op, plen, 1);
7758 else if (7 == pop8)
7759 {
7760 if (set_t != 0)
7761 avr_asm_len ("clt", op, plen, 1);
7762 set_t = 0;
0dff9558 7763
6be828c1 7764 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
7765 avr_asm_len ("bld %0,%1", op, plen, 1);
7766 }
7767 else
7768 {
7769 if (clobber_val != (int) val8)
7770 avr_asm_len ("ldi %2,%1", op, plen, 1);
7771 clobber_val = (int) val8;
0dff9558 7772
6be828c1 7773 avr_asm_len ("and %0,%2", op, plen, 1);
7774 }
7775
7776 continue; /* AND */
0dff9558 7777
6be828c1 7778 case XOR:
7779
7780 if (0 == pop8)
7781 continue;
7782 else if (8 == pop8)
7783 avr_asm_len ("com %0", op, plen, 1);
7784 else if (ld_reg_p && val8 == (1 << 7))
7785 avr_asm_len ("subi %0,%1", op, plen, 1);
7786 else
7787 {
7788 if (clobber_val != (int) val8)
7789 avr_asm_len ("ldi %2,%1", op, plen, 1);
7790 clobber_val = (int) val8;
0dff9558 7791
6be828c1 7792 avr_asm_len ("eor %0,%2", op, plen, 1);
7793 }
7794
7795 continue; /* XOR */
0dff9558 7796
6be828c1 7797 default:
7798 /* Unknown rtx_code */
7799 gcc_unreachable();
7800 }
7801 } /* for all sub-bytes */
7802
7803 return "";
7804}
7805
915f904b 7806
5766e83c 7807/* Output sign extension from XOP[1] to XOP[0] and return "".
7808 If PLEN == NULL, print assembler instructions to perform the operation;
7809 otherwise, set *PLEN to the length of the instruction sequence (in words)
7810 as printed with PLEN == NULL. */
7811
7812const char*
7813avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
7814{
7815 // Size in bytes of source resp. destination operand.
7816 unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
7817 unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
7818 rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
7819
7820 if (plen)
7821 *plen = 0;
7822
7823 // Copy destination to source
7824
7825 if (REGNO (xop[0]) != REGNO (xop[1]))
7826 {
7827 gcc_assert (n_src <= 2);
7828
7829 if (n_src == 2)
7830 avr_asm_len (AVR_HAVE_MOVW
7831 ? "movw %0,%1"
7832 : "mov %B0,%B1", xop, plen, 1);
7833 if (n_src == 1 || !AVR_HAVE_MOVW)
7834 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
7835 }
7836
7837 // Set Carry to the sign bit MSB.7...
7838
7839 if (REGNO (xop[0]) == REGNO (xop[1])
7840 || !reg_unused_after (insn, r_msb))
7841 {
7842 avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
7843 r_msb = tmp_reg_rtx;
7844 }
7845
7846 avr_asm_len ("lsl %0", &r_msb, plen, 1);
7847
7848 // ...and propagate it to all the new sign bits
7849
7850 for (unsigned n = n_src; n < n_dest; n++)
7851 avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
7852
7853 return "";
7854}
7855
7856
915f904b 7857/* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7858 PLEN != NULL: Set *PLEN to the length of that sequence.
7859 Return "". */
7860
7861const char*
7862avr_out_addto_sp (rtx *op, int *plen)
7863{
7864 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7865 int addend = INTVAL (op[0]);
7866
7867 if (plen)
7868 *plen = 0;
7869
7870 if (addend < 0)
7871 {
7872 if (flag_verbose_asm || flag_print_asm_name)
7873 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
0dff9558 7874
915f904b 7875 while (addend <= -pc_len)
7876 {
7877 addend += pc_len;
7878 avr_asm_len ("rcall .", op, plen, 1);
7879 }
7880
7881 while (addend++ < 0)
7882 avr_asm_len ("push __zero_reg__", op, plen, 1);
7883 }
7884 else if (addend > 0)
7885 {
7886 if (flag_verbose_asm || flag_print_asm_name)
7887 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7888
7889 while (addend-- > 0)
7890 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7891 }
7892
7893 return "";
7894}
7895
7896
c8ec4eb6 7897/* Outputs instructions needed for fixed point type conversion.
7898 This includes converting between any fixed point type, as well
7899 as converting to any integer type. Conversion between integer
7900 types is not supported.
7901
7902 Converting signed fractional types requires a bit shift if converting
7903 to or from any unsigned fractional type because the decimal place is
7904 shifted by 1 bit. When the destination is a signed fractional, the sign
7905 is stored in either the carry or T bit. */
7906
7907const char*
375204de 7908avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
c8ec4eb6 7909{
7910 size_t i;
7911 rtx xop[6];
7912 RTX_CODE shift = UNKNOWN;
7913 bool sign_in_carry = false;
7914 bool msb_in_carry = false;
226d57b0 7915 bool lsb_in_tmp_reg = false;
c8ec4eb6 7916 bool lsb_in_carry = false;
226d57b0 7917 bool frac_rounded = false;
c8ec4eb6 7918 const char *code_ashift = "lsl %0";
7919
0dff9558 7920
c8ec4eb6 7921#define MAY_CLOBBER(RR) \
7922 /* Shorthand used below. */ \
7923 ((sign_bytes \
7924 && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb)) \
226d57b0 7925 || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb)) \
c8ec4eb6 7926 || (reg_unused_after (insn, all_regs_rtx[RR]) \
7927 && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7928
7929 struct
7930 {
7931 /* bytes : Length of operand in bytes.
7932 ibyte : Length of integral part in bytes.
7933 fbyte, fbit : Length of fractional part in bytes, bits. */
7934
7935 bool sbit;
7936 unsigned fbit, bytes, ibyte, fbyte;
7937 unsigned regno, regno_msb;
7938 } dest, src, *val[2] = { &dest, &src };
7939
7940 if (plen)
7941 *plen = 0;
7942
7943 /* Step 0: Determine information on source and destination operand we
7944 ====== will need in the remainder. */
7945
7946 for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7947 {
3754d046 7948 machine_mode mode;
c8ec4eb6 7949
7950 xop[i] = operands[i];
7951
7952 mode = GET_MODE (xop[i]);
7953
7954 val[i]->bytes = GET_MODE_SIZE (mode);
7955 val[i]->regno = REGNO (xop[i]);
7956 val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7957
7958 if (SCALAR_INT_MODE_P (mode))
7959 {
7960 val[i]->sbit = intsigned;
7961 val[i]->fbit = 0;
7962 }
7963 else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7964 {
7965 val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7966 val[i]->fbit = GET_MODE_FBIT (mode);
7967 }
7968 else
7969 fatal_insn ("unsupported fixed-point conversion", insn);
7970
7971 val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7972 val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7973 }
7974
7975 // Byte offset of the decimal point taking into account different place
7976 // of the decimal point in input and output and different register numbers
7977 // of input and output.
7978 int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7979
7980 // Number of destination bytes that will come from sign / zero extension.
7981 int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7982
7983 // Number of bytes at the low end to be filled with zeros.
7984 int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7985
7986 // Do we have a 16-Bit register that is cleared?
7987 rtx clrw = NULL_RTX;
0dff9558 7988
c8ec4eb6 7989 bool sign_extend = src.sbit && sign_bytes;
7990
7991 if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7992 shift = ASHIFT;
7993 else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7994 shift = ASHIFTRT;
7995 else if (dest.fbit % 8 == src.fbit % 8)
7996 shift = UNKNOWN;
7997 else
7998 gcc_unreachable();
7999
226d57b0 8000 /* If we need to round the fraction part, we might need to save/round it
1a96adb9 8001 before clobbering any of it in Step 1. Also, we might want to do
226d57b0 8002 the rounding now to make use of LD_REGS. */
8003 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8004 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8005 && !TARGET_FRACT_CONV_TRUNC)
8006 {
8007 bool overlap
1a96adb9 8008 = (src.regno <=
8009 (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8010 && dest.regno - offset -1 >= dest.regno);
226d57b0 8011 unsigned s0 = dest.regno - offset -1;
8012 bool use_src = true;
8013 unsigned sn;
8014 unsigned copied_msb = src.regno_msb;
8015 bool have_carry = false;
8016
8017 if (src.ibyte > dest.ibyte)
1a96adb9 8018 copied_msb -= src.ibyte - dest.ibyte;
226d57b0 8019
8020 for (sn = s0; sn <= copied_msb; sn++)
1a96adb9 8021 if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8022 && !reg_unused_after (insn, all_regs_rtx[sn]))
8023 use_src = false;
226d57b0 8024 if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
1a96adb9 8025 {
8026 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8027 &all_regs_rtx[src.regno_msb], plen, 2);
8028 sn = src.regno;
8029 if (sn < s0)
8030 {
8031 if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8032 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8033 else
8034 avr_asm_len ("sec" CR_TAB
8035 "cpc %0,__zero_reg__",
8036 &all_regs_rtx[sn], plen, 2);
8037 have_carry = true;
8038 }
8039 while (++sn < s0)
8040 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8041
8042 avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8043 &all_regs_rtx[s0], plen, 1);
8044 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8045 avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8046 avr_asm_len ("\n0:", NULL, plen, 0);
8047 frac_rounded = true;
8048 }
226d57b0 8049 else if (use_src && overlap)
1a96adb9 8050 {
8051 avr_asm_len ("clr __tmp_reg__" CR_TAB
8052 "sbrc %1,0" CR_TAB
8053 "dec __tmp_reg__", xop, plen, 1);
8054 sn = src.regno;
8055 if (sn < s0)
8056 {
8057 avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8058 have_carry = true;
8059 }
8060
8061 while (++sn < s0)
8062 avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8063
8064 if (have_carry)
8065 avr_asm_len ("clt" CR_TAB
8066 "bld __tmp_reg__,7" CR_TAB
8067 "adc %0,__tmp_reg__",
8068 &all_regs_rtx[s0], plen, 1);
8069 else
8070 avr_asm_len ("lsr __tmp_reg" CR_TAB
8071 "add %0,__tmp_reg__",
8072 &all_regs_rtx[s0], plen, 2);
8073 for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8074 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8075 frac_rounded = true;
8076 }
226d57b0 8077 else if (overlap)
1a96adb9 8078 {
8079 bool use_src
8080 = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8081 && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8082 || reg_unused_after (insn, all_regs_rtx[s0])));
8083 xop[2] = all_regs_rtx[s0];
8084 unsigned sn = src.regno;
8085 if (!use_src || sn == s0)
8086 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8087 /* We need to consider to-be-discarded bits
8088 if the value is negative. */
8089 if (sn < s0)
8090 {
8091 avr_asm_len ("tst %0" CR_TAB
8092 "brpl 0f",
8093 &all_regs_rtx[src.regno_msb], plen, 2);
8094 /* Test to-be-discarded bytes for any nozero bits.
8095 ??? Could use OR or SBIW to test two registers at once. */
8096 if (sn < s0)
8097 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8098
8099 while (++sn < s0)
8100 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8101 /* Set bit 0 in __tmp_reg__ if any of the lower bits was set. */
8102 if (use_src)
8103 avr_asm_len ("breq 0f" CR_TAB
8104 "ori %2,1"
8105 "\n0:\t" "mov __tmp_reg__,%2",
8106 xop, plen, 3);
8107 else
8108 avr_asm_len ("breq 0f" CR_TAB
8109 "set" CR_TAB
8110 "bld __tmp_reg__,0\n0:",
8111 xop, plen, 3);
8112 }
8113 lsb_in_tmp_reg = true;
8114 }
226d57b0 8115 }
8116
c8ec4eb6 8117 /* Step 1: Clear bytes at the low end and copy payload bits from source
8118 ====== to destination. */
8119
8120 int step = offset < 0 ? 1 : -1;
8121 unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8122
226d57b0 8123 // We cleared at least that number of registers.
c8ec4eb6 8124 int clr_n = 0;
8125
8126 for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8127 {
8128 // Next regno of destination is needed for MOVW
8129 unsigned d1 = d0 + step;
8130
8131 // Current and next regno of source
2b5c3dd1 8132 signed s0 = d0 - offset;
8133 signed s1 = s0 + step;
c8ec4eb6 8134
8135 // Must current resp. next regno be CLRed? This applies to the low
8136 // bytes of the destination that have no associated source bytes.
2b5c3dd1 8137 bool clr0 = s0 < (signed) src.regno;
8138 bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
c8ec4eb6 8139
8140 // First gather what code to emit (if any) and additional step to
8141 // apply if a MOVW is in use. xop[2] is destination rtx and xop[3]
8142 // is the source rtx for the current loop iteration.
8143 const char *code = NULL;
8144 int stepw = 0;
0dff9558 8145
c8ec4eb6 8146 if (clr0)
8147 {
8148 if (AVR_HAVE_MOVW && clr1 && clrw)
8149 {
8150 xop[2] = all_regs_rtx[d0 & ~1];
8151 xop[3] = clrw;
8152 code = "movw %2,%3";
8153 stepw = step;
8154 }
8155 else
8156 {
8157 xop[2] = all_regs_rtx[d0];
8158 code = "clr %2";
8159
8160 if (++clr_n >= 2
8161 && !clrw
8162 && d0 % 2 == (step > 0))
8163 {
8164 clrw = all_regs_rtx[d0 & ~1];
8165 }
8166 }
8167 }
2b5c3dd1 8168 else if (offset && s0 <= (signed) src.regno_msb)
c8ec4eb6 8169 {
8170 int movw = AVR_HAVE_MOVW && offset % 2 == 0
8171 && d0 % 2 == (offset > 0)
8172 && d1 <= dest.regno_msb && d1 >= dest.regno
2b5c3dd1 8173 && s1 <= (signed) src.regno_msb && s1 >= (signed) src.regno;
c8ec4eb6 8174
8175 xop[2] = all_regs_rtx[d0 & ~movw];
8176 xop[3] = all_regs_rtx[s0 & ~movw];
8177 code = movw ? "movw %2,%3" : "mov %2,%3";
8178 stepw = step * movw;
8179 }
8180
8181 if (code)
8182 {
8183 if (sign_extend && shift != ASHIFT && !sign_in_carry
8184 && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8185 {
8186 /* We are going to override the sign bit. If we sign-extend,
8187 store the sign in the Carry flag. This is not needed if
1a96adb9 8188 the destination will be ASHIFT in the remainder because
c8ec4eb6 8189 the ASHIFT will set Carry without extra instruction. */
8190
8191 avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8192 sign_in_carry = true;
8193 }
8194
8195 unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8196
8197 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8198 && src.ibyte > dest.ibyte
8199 && (d0 == src_msb || d0 + stepw == src_msb))
8200 {
8201 /* We are going to override the MSB. If we shift right,
8202 store the MSB in the Carry flag. This is only needed if
8203 we don't sign-extend becaue with sign-extension the MSB
8204 (the sign) will be produced by the sign extension. */
8205
8206 avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8207 msb_in_carry = true;
8208 }
8209
8210 unsigned src_lsb = dest.regno - offset -1;
8211
8212 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
226d57b0 8213 && !lsb_in_tmp_reg
c8ec4eb6 8214 && (d0 == src_lsb || d0 + stepw == src_lsb))
8215 {
8216 /* We are going to override the new LSB; store it into carry. */
8217
8218 avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8219 code_ashift = "rol %0";
8220 lsb_in_carry = true;
8221 }
8222
8223 avr_asm_len (code, xop, plen, 1);
8224 d0 += stepw;
8225 }
8226 }
8227
8228 /* Step 2: Shift destination left by 1 bit position. This might be needed
8229 ====== for signed input and unsigned output. */
8230
8231 if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8232 {
8233 unsigned s0 = dest.regno - offset -1;
8234
226d57b0 8235 /* n1169 4.1.4 says:
8236 "Conversions from a fixed-point to an integer type round toward zero."
8237 Hence, converting a fract type to integer only gives a non-zero result
8238 for -1. */
8239 if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8240 && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8241 && !TARGET_FRACT_CONV_TRUNC)
8242 {
8243 gcc_assert (s0 == src.regno_msb);
8244 /* Check if the input is -1. We do that by checking if negating
8245 the input causes an integer overflow. */
8246 unsigned sn = src.regno;
8247 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8248 while (sn <= s0)
8249 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8250
8251 /* Overflow goes with set carry. Clear carry otherwise. */
1a96adb9 8252 avr_asm_len ("brvs 0f" CR_TAB
8253 "clc\n0:", NULL, plen, 2);
226d57b0 8254 }
8255 /* Likewise, when converting from accumulator types to integer, we
8256 need to round up negative values. */
8257 else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8258 && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8259 && !TARGET_FRACT_CONV_TRUNC
8260 && !frac_rounded)
8261 {
8262 bool have_carry = false;
8263
8264 xop[2] = all_regs_rtx[s0];
8265 if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8266 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8267 avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8268 &all_regs_rtx[src.regno_msb], plen, 2);
8269 if (!lsb_in_tmp_reg)
8270 {
8271 unsigned sn = src.regno;
8272 if (sn < s0)
8273 {
8274 avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8275 plen, 1);
8276 have_carry = true;
8277 }
8278 while (++sn < s0)
8279 avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8280 lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8281 }
8282 /* Add in C and the rounding value 127. */
8283 /* If the destination msb is a sign byte, and in LD_REGS,
8284 grab it as a temporary. */
8285 if (sign_bytes
8286 && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8287 dest.regno_msb))
8288 {
8289 xop[3] = all_regs_rtx[dest.regno_msb];
8290 avr_asm_len ("ldi %3,127", xop, plen, 1);
8291 avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8292 : have_carry ? "adc %2,%3"
8293 : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8294 : "add %2,%3"),
8295 xop, plen, 1);
8296 }
8297 else
8298 {
8299 /* Fall back to use __zero_reg__ as a temporary. */
8300 avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8301 if (have_carry)
1a96adb9 8302 avr_asm_len ("clt" CR_TAB
8303 "bld __zero_reg__,7", NULL, plen, 2);
226d57b0 8304 else
8305 avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
1a96adb9 8306 avr_asm_len (have_carry && lsb_in_tmp_reg
8307 ? "adc __tmp_reg__,__zero_reg__"
8308 : have_carry ? "adc %2,__zero_reg__"
8309 : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8310 : "add %2,__zero_reg__",
226d57b0 8311 xop, plen, 1);
8312 avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8313 }
1a96adb9 8314
8315 for (d0 = dest.regno + zero_bytes;
226d57b0 8316 d0 <= dest.regno_msb - sign_bytes; d0++)
8317 avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
1a96adb9 8318
8319 avr_asm_len (lsb_in_tmp_reg
8320 ? "\n0:\t" "lsl __tmp_reg__"
8321 : "\n0:\t" "lsl %2",
226d57b0 8322 xop, plen, 1);
8323 }
8324 else if (MAY_CLOBBER (s0))
c8ec4eb6 8325 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8326 else
8327 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8328 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8329
8330 code_ashift = "rol %0";
8331 lsb_in_carry = true;
8332 }
8333
8334 if (shift == ASHIFT)
8335 {
8336 for (d0 = dest.regno + zero_bytes;
8337 d0 <= dest.regno_msb - sign_bytes; d0++)
8338 {
8339 avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8340 code_ashift = "rol %0";
8341 }
8342
8343 lsb_in_carry = false;
8344 sign_in_carry = true;
8345 }
8346
8347 /* Step 4a: Store MSB in carry if we don't already have it or will produce
8348 ======= it in sign-extension below. */
8349
8350 if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8351 && src.ibyte > dest.ibyte)
8352 {
8353 unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8354
8355 if (MAY_CLOBBER (s0))
8356 avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8357 else
8358 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8359 "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8360
8361 msb_in_carry = true;
8362 }
8363
8364 /* Step 3: Sign-extend or zero-extend the destination as needed.
8365 ====== */
8366
8367 if (sign_extend && !sign_in_carry)
8368 {
8369 unsigned s0 = src.regno_msb;
0dff9558 8370
c8ec4eb6 8371 if (MAY_CLOBBER (s0))
8372 avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8373 else
8374 avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8375 "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8376
8377 sign_in_carry = true;
8378 }
8379
8380 gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8381
8382 unsigned copies = 0;
8383 rtx movw = sign_extend ? NULL_RTX : clrw;
8384
8385 for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8386 {
8387 if (AVR_HAVE_MOVW && movw
8388 && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8389 {
8390 xop[2] = all_regs_rtx[d0];
8391 xop[3] = movw;
8392 avr_asm_len ("movw %2,%3", xop, plen, 1);
8393 d0++;
8394 }
8395 else
8396 {
8397 avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8398 &all_regs_rtx[d0], plen, 1);
8399
8400 if (++copies >= 2 && !movw && d0 % 2 == 1)
8401 movw = all_regs_rtx[d0-1];
8402 }
8403 } /* for */
8404
8405
8406 /* Step 4: Right shift the destination. This might be needed for
8407 ====== conversions from unsigned to signed. */
8408
8409 if (shift == ASHIFTRT)
8410 {
8411 const char *code_ashiftrt = "lsr %0";
8412
8413 if (sign_extend || msb_in_carry)
8414 code_ashiftrt = "ror %0";
8415
8416 if (src.sbit && src.ibyte == dest.ibyte)
8417 code_ashiftrt = "asr %0";
8418
8419 for (d0 = dest.regno_msb - sign_bytes;
8420 d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8421 {
8422 avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8423 code_ashiftrt = "ror %0";
8424 }
8425 }
8426
8427#undef MAY_CLOBBER
8428
8429 return "";
8430}
8431
8432
fdbf5b19 8433/* Output fixed-point rounding. XOP[0] = XOP[1] is the operand to round.
8434 XOP[2] is the rounding point, a CONST_INT. The function prints the
8435 instruction sequence if PLEN = NULL and computes the length in words
8436 of the sequence if PLEN != NULL. Most of this function deals with
8437 preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
8438
8439const char*
375204de 8440avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
fdbf5b19 8441{
3754d046 8442 machine_mode mode = GET_MODE (xop[0]);
8443 machine_mode imode = int_mode_for_mode (mode);
fdbf5b19 8444 // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8445 int fbit = (int) GET_MODE_FBIT (mode);
8446 double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
85fbad9e 8447 wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8448 GET_MODE_PRECISION (imode));
fdbf5b19 8449 // Lengths of PLUS and AND parts.
8450 int len_add = 0, *plen_add = plen ? &len_add : NULL;
8451 int len_and = 0, *plen_and = plen ? &len_and : NULL;
8452
8453 // Add-Saturate 1/2 * 2^(-RP). Don't print the label "0:" when printing
8454 // the saturated addition so that we can emit the "rjmp 1f" before the
8455 // "0:" below.
8456
8457 rtx xadd = const_fixed_from_double_int (i_add, mode);
8458 rtx xpattern, xsrc, op[4];
8459
8460 xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8461 ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8462 : gen_rtx_US_PLUS (mode, xop[1], xadd);
d1f9b275 8463 xpattern = gen_rtx_SET (xop[0], xsrc);
fdbf5b19 8464
8465 op[0] = xop[0];
8466 op[1] = xop[1];
8467 op[2] = xadd;
8468 avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8469
8470 avr_asm_len ("rjmp 1f" CR_TAB
8471 "0:", NULL, plen_add, 1);
8472
8473 // Keep all bits from RP and higher: ... 2^(-RP)
8474 // Clear all bits from RP+1 and lower: 2^(-RP-1) ...
8475 // Rounding point ^^^^^^^
8476 // Added above ^^^^^^^^^
8477 rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
85fbad9e 8478 rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
fdbf5b19 8479
d1f9b275 8480 xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
fdbf5b19 8481
8482 op[0] = xreg;
8483 op[1] = xreg;
8484 op[2] = xmask;
8485 op[3] = gen_rtx_SCRATCH (QImode);
8486 avr_out_bitop (xpattern, op, plen_and);
8487 avr_asm_len ("1:", NULL, plen, 0);
8488
8489 if (plen)
8490 *plen = len_add + len_and;
8491
8492 return "";
8493}
8494
8495
9643da7d 8496/* Create RTL split patterns for byte sized rotate expressions. This
8497 produces a series of move instructions and considers overlap situations.
8498 Overlapping non-HImode operands need a scratch register. */
8499
8500bool
8501avr_rotate_bytes (rtx operands[])
8502{
8503 int i, j;
3754d046 8504 machine_mode mode = GET_MODE (operands[0]);
9643da7d 8505 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
8506 bool same_reg = rtx_equal_p (operands[0], operands[1]);
8507 int num = INTVAL (operands[2]);
8508 rtx scratch = operands[3];
8509 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
8510 Word move if no scratch is needed, otherwise use size of scratch. */
3754d046 8511 machine_mode move_mode = QImode;
36f949a2 8512 int move_size, offset, size;
8513
9643da7d 8514 if (num & 0xf)
8515 move_mode = QImode;
8516 else if ((mode == SImode && !same_reg) || !overlapped)
8517 move_mode = HImode;
8518 else
8519 move_mode = GET_MODE (scratch);
8520
8521 /* Force DI rotate to use QI moves since other DI moves are currently split
8522 into QI moves so forward propagation works better. */
8523 if (mode == DImode)
8524 move_mode = QImode;
8525 /* Make scratch smaller if needed. */
ac191360 8526 if (SCRATCH != GET_CODE (scratch)
8527 && HImode == GET_MODE (scratch)
8528 && QImode == move_mode)
0dff9558 8529 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
9643da7d 8530
36f949a2 8531 move_size = GET_MODE_SIZE (move_mode);
9643da7d 8532 /* Number of bytes/words to rotate. */
36f949a2 8533 offset = (num >> 3) / move_size;
9643da7d 8534 /* Number of moves needed. */
36f949a2 8535 size = GET_MODE_SIZE (mode) / move_size;
9643da7d 8536 /* Himode byte swap is special case to avoid a scratch register. */
8537 if (mode == HImode && same_reg)
8538 {
8539 /* HImode byte swap, using xor. This is as quick as using scratch. */
8540 rtx src, dst;
8541 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
8542 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
8543 if (!rtx_equal_p (dst, src))
8544 {
8545 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8546 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
8547 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8548 }
0dff9558 8549 }
8550 else
9643da7d 8551 {
36f949a2 8552#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
9643da7d 8553 /* Create linked list of moves to determine move order. */
8554 struct {
8555 rtx src, dst;
8556 int links;
36f949a2 8557 } move[MAX_SIZE + 8];
8558 int blocked, moves;
9643da7d 8559
36f949a2 8560 gcc_assert (size <= MAX_SIZE);
9643da7d 8561 /* Generate list of subreg moves. */
8562 for (i = 0; i < size; i++)
1a96adb9 8563 {
9643da7d 8564 int from = i;
0dff9558 8565 int to = (from + offset) % size;
9643da7d 8566 move[i].src = simplify_gen_subreg (move_mode, operands[1],
1a96adb9 8567 mode, from * move_size);
9643da7d 8568 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
1a96adb9 8569 mode, to * move_size);
8570 move[i].links = -1;
8571 }
9643da7d 8572 /* Mark dependence where a dst of one move is the src of another move.
8573 The first move is a conflict as it must wait until second is
8574 performed. We ignore moves to self - we catch this later. */
8575 if (overlapped)
8576 for (i = 0; i < size; i++)
8577 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
8578 for (j = 0; j < size; j++)
8579 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
8580 {
8581 /* The dst of move i is the src of move j. */
8582 move[i].links = j;
8583 break;
8584 }
8585
36f949a2 8586 blocked = -1;
8587 moves = 0;
9643da7d 8588 /* Go through move list and perform non-conflicting moves. As each
8589 non-overlapping move is made, it may remove other conflicts
8590 so the process is repeated until no conflicts remain. */
8591 do
8592 {
8593 blocked = -1;
8594 moves = 0;
8595 /* Emit move where dst is not also a src or we have used that
8596 src already. */
8597 for (i = 0; i < size; i++)
8598 if (move[i].src != NULL_RTX)
36f949a2 8599 {
8600 if (move[i].links == -1
8601 || move[move[i].links].src == NULL_RTX)
8602 {
8603 moves++;
8604 /* Ignore NOP moves to self. */
8605 if (!rtx_equal_p (move[i].dst, move[i].src))
8606 emit_move_insn (move[i].dst, move[i].src);
9643da7d 8607
36f949a2 8608 /* Remove conflict from list. */
8609 move[i].src = NULL_RTX;
8610 }
8611 else
8612 blocked = i;
8613 }
9643da7d 8614
8615 /* Check for deadlock. This is when no moves occurred and we have
8616 at least one blocked move. */
8617 if (moves == 0 && blocked != -1)
8618 {
8619 /* Need to use scratch register to break deadlock.
8620 Add move to put dst of blocked move into scratch.
8621 When this move occurs, it will break chain deadlock.
8622 The scratch register is substituted for real move. */
8623
ac191360 8624 gcc_assert (SCRATCH != GET_CODE (scratch));
8625
9643da7d 8626 move[size].src = move[blocked].dst;
8627 move[size].dst = scratch;
8628 /* Scratch move is never blocked. */
0dff9558 8629 move[size].links = -1;
9643da7d 8630 /* Make sure we have valid link. */
8631 gcc_assert (move[blocked].links != -1);
8632 /* Replace src of blocking move with scratch reg. */
8633 move[move[blocked].links].src = scratch;
4246a5c7 8634 /* Make dependent on scratch move occurring. */
0dff9558 8635 move[blocked].links = size;
9643da7d 8636 size=size+1;
8637 }
8638 }
8639 while (blocked != -1);
8640 }
8641 return true;
8642}
8643
017c5b98 8644
0dff9558 8645/* Worker function for `ADJUST_INSN_LENGTH'. */
a28e4651 8646/* Modifies the length assigned to instruction INSN
b0e2b973 8647 LEN is the initially computed length of the insn. */
a28e4651 8648
8649int
375204de 8650avr_adjust_insn_length (rtx_insn *insn, int len)
a28e4651 8651{
5bca95a8 8652 rtx *op = recog_data.operand;
28913f6b 8653 enum attr_adjust_len adjust_len;
8654
8655 /* Some complex insns don't need length adjustment and therefore
8656 the length need not/must not be adjusted for these insns.
8657 It is easier to state this in an insn attribute "adjust_len" than
8658 to clutter up code here... */
1a96adb9 8659
cb5cf35e 8660 if (!NONDEBUG_INSN_P (insn)
8661 || -1 == recog_memoized (insn))
28913f6b 8662 {
8663 return len;
8664 }
8665
8666 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
8667
8668 adjust_len = get_attr_adjust_len (insn);
8669
5bca95a8 8670 if (adjust_len == ADJUST_LEN_NO)
28913f6b 8671 {
5bca95a8 8672 /* Nothing to adjust: The length from attribute "length" is fine.
8673 This is the default. */
0dff9558 8674
5bca95a8 8675 return len;
8676 }
0dff9558 8677
5bca95a8 8678 /* Extract insn's operands. */
0dff9558 8679
5bca95a8 8680 extract_constrain_insn_cached (insn);
0dff9558 8681
5bca95a8 8682 /* Dispatch to right function. */
0dff9558 8683
5bca95a8 8684 switch (adjust_len)
8685 {
8686 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
02d9a2c3 8687 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
5bca95a8 8688 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
0dff9558 8689
5bca95a8 8690 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
0dff9558 8691
b4ebb666 8692 case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
915f904b 8693 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
0dff9558 8694
5bca95a8 8695 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
8696 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
02d9a2c3 8697 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
5bca95a8 8698 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5bd39e93 8699 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
8700 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
4b72e680 8701 case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
5766e83c 8702 case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
5bd39e93 8703
017c5b98 8704 case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
8705 case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
fdbf5b19 8706 case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
017c5b98 8707
5bca95a8 8708 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
02d9a2c3 8709 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
5bca95a8 8710 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
8711 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
83921eda 8712 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9c501a04 8713
5bca95a8 8714 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
8715 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
8716 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9c501a04 8717
5bca95a8 8718 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
8719 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
8720 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9c501a04 8721
5bca95a8 8722 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
8723 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
8724 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
0dff9558 8725
02d9a2c3 8726 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
8727 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
8728 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
8729
48264eb8 8730 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
8731
15b84087 8732 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
384f6361 8733
5bca95a8 8734 default:
8735 gcc_unreachable();
a28e4651 8736 }
15b84087 8737
a28e4651 8738 return len;
8739}
8740
674a8f0b 8741/* Return nonzero if register REG dead after INSN. */
a28e4651 8742
8743int
375204de 8744reg_unused_after (rtx_insn *insn, rtx reg)
a28e4651 8745{
e511e253 8746 return (dead_or_set_p (insn, reg)
a28e4651 8747 || (REG_P(reg) && _reg_unused_after (insn, reg)));
8748}
8749
e3e08e7f 8750/* Return nonzero if REG is not used after INSN.
a28e4651 8751 We assume REG is a reload reg, and therefore does
8752 not live past labels. It may live past calls or jumps though. */
8753
8754int
375204de 8755_reg_unused_after (rtx_insn *insn, rtx reg)
a28e4651 8756{
8757 enum rtx_code code;
8758 rtx set;
8759
8760 /* If the reg is set by this instruction, then it is safe for our
8761 case. Disregard the case where this is a store to memory, since
8762 we are checking a register used in the store address. */
8763 set = single_set (insn);
8764 if (set && GET_CODE (SET_DEST (set)) != MEM
8765 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8766 return 1;
8767
8768 while ((insn = NEXT_INSN (insn)))
8769 {
6720e96c 8770 rtx set;
a28e4651 8771 code = GET_CODE (insn);
8772
8773#if 0
8774 /* If this is a label that existed before reload, then the register
8775 if dead here. However, if this is a label added by reorg, then
8776 the register may still be live here. We can't tell the difference,
8777 so we just ignore labels completely. */
8778 if (code == CODE_LABEL)
8779 return 1;
8780 /* else */
8781#endif
8782
6720e96c 8783 if (!INSN_P (insn))
8784 continue;
8785
a28e4651 8786 if (code == JUMP_INSN)
8787 return 0;
8788
8789 /* If this is a sequence, we must handle them all at once.
8790 We could have for instance a call that sets the target register,
ebb11c7b 8791 and an insn in a delay slot that uses the register. In this case,
a28e4651 8792 we must return 0. */
8793 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
8794 {
50fc2d35 8795 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
a28e4651 8796 int i;
8797 int retval = 0;
8798
50fc2d35 8799 for (i = 0; i < seq->len (); i++)
a28e4651 8800 {
50fc2d35 8801 rtx_insn *this_insn = seq->insn (i);
a28e4651 8802 rtx set = single_set (this_insn);
8803
aa90bb35 8804 if (CALL_P (this_insn))
a28e4651 8805 code = CALL_INSN;
aa90bb35 8806 else if (JUMP_P (this_insn))
a28e4651 8807 {
8808 if (INSN_ANNULLED_BRANCH_P (this_insn))
8809 return 0;
8810 code = JUMP_INSN;
8811 }
8812
8813 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8814 return 0;
8815 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8816 {
8817 if (GET_CODE (SET_DEST (set)) != MEM)
8818 retval = 1;
8819 else
8820 return 0;
8821 }
8822 if (set == 0
8823 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
8824 return 0;
8825 }
8826 if (retval == 1)
8827 return 1;
8828 else if (code == JUMP_INSN)
8829 return 0;
8830 }
8831
8832 if (code == CALL_INSN)
8833 {
8834 rtx tem;
8835 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
8836 if (GET_CODE (XEXP (tem, 0)) == USE
8837 && REG_P (XEXP (XEXP (tem, 0), 0))
8838 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
8839 return 0;
0dff9558 8840 if (call_used_regs[REGNO (reg)])
a28e4651 8841 return 1;
8842 }
8843
6720e96c 8844 set = single_set (insn);
a28e4651 8845
6720e96c 8846 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8847 return 0;
8848 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8849 return GET_CODE (SET_DEST (set)) != MEM;
8850 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8851 return 0;
a28e4651 8852 }
8853 return 1;
8854}
8855
5bd39e93 8856
0dff9558 8857/* Implement `TARGET_ASM_INTEGER'. */
58356836 8858/* Target hook for assembling integer objects. The AVR version needs
8859 special handling for references to certain labels. */
a28e4651 8860
58356836 8861static bool
206a5129 8862avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
a28e4651 8863{
58356836 8864 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5a9cc803 8865 && text_segment_operand (x, VOIDmode))
a28e4651 8866 {
90ef7269 8867 fputs ("\t.word\tgs(", asm_out_file);
58356836 8868 output_addr_const (asm_out_file, x);
8869 fputs (")\n", asm_out_file);
0dff9558 8870
5bd39e93 8871 return true;
8872 }
8873 else if (GET_MODE (x) == PSImode)
8874 {
5a9cc803 8875 /* This needs binutils 2.23+, see PR binutils/13503 */
8876
8877 fputs ("\t.byte\tlo8(", asm_out_file);
8878 output_addr_const (asm_out_file, x);
8879 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
0dff9558 8880
5a9cc803 8881 fputs ("\t.byte\thi8(", asm_out_file);
5bd39e93 8882 output_addr_const (asm_out_file, x);
5a9cc803 8883 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
0dff9558 8884
5a9cc803 8885 fputs ("\t.byte\thh8(", asm_out_file);
5bd39e93 8886 output_addr_const (asm_out_file, x);
5a9cc803 8887 fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
0dff9558 8888
58356836 8889 return true;
a28e4651 8890 }
017c5b98 8891 else if (CONST_FIXED_P (x))
8892 {
8893 unsigned n;
8894
8895 /* varasm fails to handle big fixed modes that don't fit in hwi. */
8896
8897 for (n = 0; n < size; n++)
8898 {
8899 rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8900 default_assemble_integer (xn, 1, aligned_p);
8901 }
8902
8903 return true;
8904 }
0dff9558 8905
58356836 8906 return default_assemble_integer (x, size, aligned_p);
a28e4651 8907}
8908
5bd39e93 8909
0dff9558 8910/* Implement `TARGET_CLASS_LIKELY_SPILLED_P'. */
a28e4651 8911/* Return value is nonzero if pseudos that have been
8912 assigned to registers of class CLASS would likely be spilled
8913 because registers of CLASS are needed for spill registers. */
8914
cb3959cc 8915static bool
8916avr_class_likely_spilled_p (reg_class_t c)
a28e4651 8917{
b4e6d2e2 8918 return (c != ALL_REGS &&
8919 (AVR_TINY ? 1 : c != ADDW_REGS));
a28e4651 8920}
8921
a28e4651 8922
0dff9558 8923/* Valid attributes:
8924 progmem - Put data to program memory.
8925 signal - Make a function to be hardware interrupt.
8926 After function prologue interrupts remain disabled.
8927 interrupt - Make a function to be hardware interrupt. Before function
8928 prologue interrupts are enabled by means of SEI.
8929 naked - Don't generate function prologue/epilogue and RET
8930 instruction. */
e3c541f0 8931
e3c541f0 8932/* Handle a "progmem" attribute; arguments as in
8933 struct attribute_spec.handler. */
0dff9558 8934
e3c541f0 8935static tree
206a5129 8936avr_handle_progmem_attribute (tree *node, tree name,
8937 tree args ATTRIBUTE_UNUSED,
8938 int flags ATTRIBUTE_UNUSED,
8939 bool *no_add_attrs)
e3c541f0 8940{
8941 if (DECL_P (*node))
a28e4651 8942 {
68e7ca0a 8943 if (TREE_CODE (*node) == TYPE_DECL)
8944 {
8945 /* This is really a decl attribute, not a type attribute,
8946 but try to handle it for GCC 3.0 backwards compatibility. */
8947
8948 tree type = TREE_TYPE (*node);
8949 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
8950 tree newtype = build_type_attribute_variant (type, attr);
8951
8952 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
8953 TREE_TYPE (*node) = newtype;
8954 *no_add_attrs = true;
8955 }
8956 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
a28e4651 8957 {
b44e24e6 8958 *no_add_attrs = false;
e3c541f0 8959 }
8960 else
8961 {
67a779df 8962 warning (OPT_Wattributes, "%qE attribute ignored",
8963 name);
e3c541f0 8964 *no_add_attrs = true;
a28e4651 8965 }
a28e4651 8966 }
e3c541f0 8967
8968 return NULL_TREE;
a28e4651 8969}
8970
e3c541f0 8971/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
8972 struct attribute_spec.handler. */
206a5129 8973
e3c541f0 8974static tree
206a5129 8975avr_handle_fndecl_attribute (tree *node, tree name,
8976 tree args ATTRIBUTE_UNUSED,
8977 int flags ATTRIBUTE_UNUSED,
8978 bool *no_add_attrs)
e3c541f0 8979{
8980 if (TREE_CODE (*node) != FUNCTION_DECL)
8981 {
67a779df 8982 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8983 name);
e3c541f0 8984 *no_add_attrs = true;
8985 }
8986
8987 return NULL_TREE;
8988}
a28e4651 8989
f86b386b 8990static tree
8991avr_handle_fntype_attribute (tree *node, tree name,
8992 tree args ATTRIBUTE_UNUSED,
8993 int flags ATTRIBUTE_UNUSED,
8994 bool *no_add_attrs)
8995{
8996 if (TREE_CODE (*node) != FUNCTION_TYPE)
8997 {
67a779df 8998 warning (OPT_Wattributes, "%qE attribute only applies to functions",
8999 name);
f86b386b 9000 *no_add_attrs = true;
9001 }
9002
9003 return NULL_TREE;
9004}
9005
c0d7a1d0 9006static tree
9007avr_handle_addr_attribute (tree *node, tree name, tree args,
9008 int flags ATTRIBUTE_UNUSED, bool *no_add)
9009{
9010 bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9011 location_t loc = DECL_SOURCE_LOCATION (*node);
9012
9013 if (TREE_CODE (*node) != VAR_DECL)
9014 {
9015 warning_at (loc, 0, "%qE attribute only applies to variables", name);
9016 *no_add = true;
9017 }
9018
9019 if (args != NULL_TREE)
9020 {
9021 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9022 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9023 tree arg = TREE_VALUE (args);
9024 if (TREE_CODE (arg) != INTEGER_CST)
9025 {
9026 warning (0, "%qE attribute allows only an integer constant argument",
9027 name);
9028 *no_add = true;
9029 }
9030 else if (io_p
9031 && (!tree_fits_shwi_p (arg)
9032 || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9033 ? low_io_address_operand : io_address_operand)
9034 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9035 {
9036 warning_at (loc, 0, "%qE attribute address out of range", name);
9037 *no_add = true;
9038 }
9039 else
9040 {
9041 tree attribs = DECL_ATTRIBUTES (*node);
9042 const char *names[] = { "io", "io_low", "address", NULL } ;
9043 for (const char **p = names; *p; p++)
9044 {
9045 tree other = lookup_attribute (*p, attribs);
9046 if (other && TREE_VALUE (other))
9047 {
9048 warning_at (loc, 0,
9049 "both %s and %qE attribute provide address",
9050 *p, name);
9051 *no_add = true;
9052 break;
9053 }
9054 }
9055 }
9056 }
9057
9058 if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9059 warning_at (loc, 0, "%qE attribute on non-volatile variable", name);
9060
9061 return NULL_TREE;
9062}
9063
9064rtx
9065avr_eval_addr_attrib (rtx x)
9066{
9067 if (GET_CODE (x) == SYMBOL_REF
9068 && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9069 {
9070 tree decl = SYMBOL_REF_DECL (x);
9071 tree attr = NULL_TREE;
9072
9073 if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9074 {
9075 attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9076 gcc_assert (attr);
9077 }
9078 if (!attr || !TREE_VALUE (attr))
9079 attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9080 gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9081 return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9082 }
9083 return x;
9084}
9085
a45076aa 9086
9087/* AVR attributes. */
9088static const struct attribute_spec
9089avr_attribute_table[] =
9090{
9091 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9092 affects_type_identity } */
9093 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
9094 false },
9095 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9096 false },
9097 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
9098 false },
9099 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
9100 false },
9101 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
9102 false },
9103 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
9104 false },
c0d7a1d0 9105 { "io", 0, 1, false, false, false, avr_handle_addr_attribute,
9106 false },
9107 { "io_low", 0, 1, false, false, false, avr_handle_addr_attribute,
9108 false },
9109 { "address", 1, 1, false, false, false, avr_handle_addr_attribute,
9110 false },
a45076aa 9111 { NULL, 0, 0, false, false, false, NULL, false }
9112};
9113
4202ef11 9114
9115/* Look if DECL shall be placed in program memory space by
9116 means of attribute `progmem' or some address-space qualifier.
9117 Return non-zero if DECL is data that must end up in Flash and
9118 zero if the data lives in RAM (.bss, .data, .rodata, ...).
0dff9558 9119
5bd39e93 9120 Return 2 if DECL is located in 24-bit flash address-space
4202ef11 9121 Return 1 if DECL is located in 16-bit flash address-space
9122 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
9123 Return 0 otherwise */
a28e4651 9124
9125int
148b2ce0 9126avr_progmem_p (tree decl, tree attributes)
a28e4651 9127{
9128 tree a;
9129
9130 if (TREE_CODE (decl) != VAR_DECL)
9131 return 0;
9132
590da9f2 9133 if (avr_decl_memx_p (decl))
5bd39e93 9134 return 2;
9135
590da9f2 9136 if (avr_decl_flash_p (decl))
4202ef11 9137 return 1;
9138
a28e4651 9139 if (NULL_TREE
148b2ce0 9140 != lookup_attribute ("progmem", attributes))
4202ef11 9141 return -1;
a28e4651 9142
4202ef11 9143 a = decl;
0dff9558 9144
a28e4651 9145 do
9146 a = TREE_TYPE(a);
9147 while (TREE_CODE (a) == ARRAY_TYPE);
9148
faf8f400 9149 if (a == error_mark_node)
9150 return 0;
9151
a28e4651 9152 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4202ef11 9153 return -1;
0dff9558 9154
a28e4651 9155 return 0;
9156}
9157
4202ef11 9158
9159/* Scan type TYP for pointer references to address space ASn.
9160 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9161 the AS are also declared to be CONST.
9d75589a 9162 Otherwise, return the respective address space, i.e. a value != 0. */
0dff9558 9163
4202ef11 9164static addr_space_t
9165avr_nonconst_pointer_addrspace (tree typ)
9166{
9167 while (ARRAY_TYPE == TREE_CODE (typ))
9168 typ = TREE_TYPE (typ);
9169
9170 if (POINTER_TYPE_P (typ))
9171 {
e508bf98 9172 addr_space_t as;
4202ef11 9173 tree target = TREE_TYPE (typ);
9174
9175 /* Pointer to function: Test the function's return type. */
0dff9558 9176
4202ef11 9177 if (FUNCTION_TYPE == TREE_CODE (target))
9178 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9179
9180 /* "Ordinary" pointers... */
9181
9182 while (TREE_CODE (target) == ARRAY_TYPE)
9183 target = TREE_TYPE (target);
9184
e508bf98 9185 /* Pointers to non-generic address space must be const.
9186 Refuse address spaces outside the device's flash. */
0dff9558 9187
e508bf98 9188 as = TYPE_ADDR_SPACE (target);
0dff9558 9189
e508bf98 9190 if (!ADDR_SPACE_GENERIC_P (as)
9191 && (!TYPE_READONLY (target)
b4e6d2e2 9192 || avr_addrspace[as].segment >= avr_n_flash
9193 /* Also refuse __memx address space if we can't support it. */
9194 || (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)))
e508bf98 9195 {
9196 return as;
4202ef11 9197 }
9198
9199 /* Scan pointer's target type. */
0dff9558 9200
4202ef11 9201 return avr_nonconst_pointer_addrspace (target);
9202 }
9203
9204 return ADDR_SPACE_GENERIC;
9205}
9206
9207
9d75589a 9208/* Sanity check NODE so that all pointers targeting non-generic address spaces
590da9f2 9209 go along with CONST qualifier. Writing to these address spaces should
4202ef11 9210 be detected and complained about as early as possible. */
9211
9212static bool
9213avr_pgm_check_var_decl (tree node)
9214{
9215 const char *reason = NULL;
0dff9558 9216
4202ef11 9217 addr_space_t as = ADDR_SPACE_GENERIC;
9218
9219 gcc_assert (as == 0);
0dff9558 9220
4202ef11 9221 if (avr_log.progmem)
9222 avr_edump ("%?: %t\n", node);
0dff9558 9223
4202ef11 9224 switch (TREE_CODE (node))
9225 {
9226 default:
9227 break;
9228
9229 case VAR_DECL:
9230 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9231 reason = "variable";
9232 break;
9233
9234 case PARM_DECL:
9235 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9236 reason = "function parameter";
9237 break;
0dff9558 9238
4202ef11 9239 case FIELD_DECL:
9240 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9241 reason = "structure field";
9242 break;
0dff9558 9243
4202ef11 9244 case FUNCTION_DECL:
9245 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9246 as)
9247 reason = "return type of function";
9248 break;
9249
9250 case POINTER_TYPE:
9251 if (as = avr_nonconst_pointer_addrspace (node), as)
9252 reason = "pointer";
9253 break;
9254 }
9255
9256 if (reason)
9257 {
c0d7a1d0 9258 if (avr_addrspace[as].segment >= avr_n_flash)
e508bf98 9259 {
9260 if (TYPE_P (node))
f0aa7fe2 9261 error ("%qT uses address space %qs beyond flash of %d KiB",
9262 node, avr_addrspace[as].name, avr_n_flash);
e508bf98 9263 else
f0aa7fe2 9264 error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9265 reason, node, avr_addrspace[as].name, avr_n_flash);
e508bf98 9266 }
4202ef11 9267 else
e508bf98 9268 {
9269 if (TYPE_P (node))
9270 error ("pointer targeting address space %qs must be const in %qT",
9271 avr_addrspace[as].name, node);
9272 else
9273 error ("pointer targeting address space %qs must be const"
9274 " in %s %q+D",
9275 avr_addrspace[as].name, reason, node);
9276 }
4202ef11 9277 }
9278
9279 return reason == NULL;
9280}
9281
9282
a179a371 9283/* Add the section attribute if the variable is in progmem. */
9284
7811991d 9285static void
206a5129 9286avr_insert_attributes (tree node, tree *attributes)
a28e4651 9287{
4202ef11 9288 avr_pgm_check_var_decl (node);
9289
a179a371 9290 if (TREE_CODE (node) == VAR_DECL
9291 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
148b2ce0 9292 && avr_progmem_p (node, *attributes))
a28e4651 9293 {
e508bf98 9294 addr_space_t as;
c47985a0 9295 tree node0 = node;
9296
9297 /* For C++, we have to peel arrays in order to get correct
9298 determination of readonlyness. */
0dff9558 9299
c47985a0 9300 do
9301 node0 = TREE_TYPE (node0);
9302 while (TREE_CODE (node0) == ARRAY_TYPE);
9303
9304 if (error_mark_node == node0)
9305 return;
e508bf98 9306
9307 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9308
c0d7a1d0 9309 if (avr_addrspace[as].segment >= avr_n_flash)
e508bf98 9310 {
f0aa7fe2 9311 error ("variable %q+D located in address space %qs beyond flash "
9312 "of %d KiB", node, avr_addrspace[as].name, avr_n_flash);
e508bf98 9313 }
b4e6d2e2 9314 else if (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)
9315 {
9316 error ("variable %q+D located in address space %qs"
f0aa7fe2 9317 " which is not supported for architecture %qs",
9318 node, avr_addrspace[as].name, avr_arch->name);
b4e6d2e2 9319 }
0dff9558 9320
4202ef11 9321 if (!TYPE_READONLY (node0)
9322 && !TREE_READONLY (node))
cb7c66a8 9323 {
4202ef11 9324 const char *reason = "__attribute__((progmem))";
9325
9326 if (!ADDR_SPACE_GENERIC_P (as))
9d734fa8 9327 reason = avr_addrspace[as].name;
0dff9558 9328
4202ef11 9329 if (avr_log.progmem)
9330 avr_edump ("\n%?: %t\n%t\n", node, node0);
0dff9558 9331
cb7c66a8 9332 error ("variable %q+D must be const in order to be put into"
4202ef11 9333 " read-only section by means of %qs", node, reason);
cb7c66a8 9334 }
a28e4651 9335 }
7b4a38a6 9336}
a28e4651 9337
7c2339f8 9338
9339/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
9340/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
9341/* Track need of __do_clear_bss. */
9342
9343void
a45076aa 9344avr_asm_output_aligned_decl_common (FILE * stream,
c0d7a1d0 9345 tree decl,
a45076aa 9346 const char *name,
9347 unsigned HOST_WIDE_INT size,
7c2339f8 9348 unsigned int align, bool local_p)
9349{
c0d7a1d0 9350 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9351 rtx symbol;
9352
9353 if (mem != NULL_RTX && MEM_P (mem)
9354 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9355 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9356 {
9357
9358 if (!local_p)
9359 {
9360 fprintf (stream, "\t.globl\t");
9361 assemble_name (stream, name);
9362 fprintf (stream, "\n");
9363 }
9364 if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9365 {
9366 assemble_name (stream, name);
9367 fprintf (stream, " = %ld\n",
9368 (long) INTVAL (avr_eval_addr_attrib (symbol)));
9369 }
9370 else if (local_p)
9371 error_at (DECL_SOURCE_LOCATION (decl),
9372 "static IO declaration for %q+D needs an address", decl);
9373 return;
9374 }
9375
5be63f82 9376 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9377 There is no need to trigger __do_clear_bss code for them. */
9378
9379 if (!STR_PREFIX_P (name, "__gnu_lto"))
9380 avr_need_clear_bss_p = true;
7c2339f8 9381
9382 if (local_p)
21440ca3 9383 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9384 else
9385 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7c2339f8 9386}
9387
c0d7a1d0 9388void
9389avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9390 unsigned HOST_WIDE_INT size, int align,
9391 void (*default_func)
9392 (FILE *, tree, const char *,
9393 unsigned HOST_WIDE_INT, int))
9394{
9395 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9396 rtx symbol;
9397
9398 if (mem != NULL_RTX && MEM_P (mem)
9399 && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9400 && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9401 {
9402 if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9403 error_at (DECL_SOURCE_LOCATION (decl),
9404 "IO definition for %q+D needs an address", decl);
9405 avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9406 }
9407 else
9408 default_func (file, decl, name, size, align);
9409}
9410
7c2339f8 9411
9412/* Unnamed section callback for data_section
9413 to track need of __do_copy_data. */
9414
9415static void
9416avr_output_data_section_asm_op (const void *data)
9417{
9418 avr_need_copy_data_p = true;
0dff9558 9419
7c2339f8 9420 /* Dispatch to default. */
9421 output_section_asm_op (data);
9422}
9423
9424
9425/* Unnamed section callback for bss_section
9426 to track need of __do_clear_bss. */
9427
9428static void
9429avr_output_bss_section_asm_op (const void *data)
9430{
9431 avr_need_clear_bss_p = true;
0dff9558 9432
7c2339f8 9433 /* Dispatch to default. */
9434 output_section_asm_op (data);
9435}
9436
9437
5bd39e93 9438/* Unnamed section callback for progmem*.data sections. */
9439
9440static void
9441avr_output_progmem_section_asm_op (const void *data)
9442{
9443 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
9444 (const char*) data);
9445}
9446
9447
7c2339f8 9448/* Implement `TARGET_ASM_INIT_SECTIONS'. */
2f14b1f9 9449
9450static void
9451avr_asm_init_sections (void)
9452{
c3f18f18 9453 /* Set up a section for jump tables. Alignment is handled by
9454 ASM_OUTPUT_BEFORE_CASE_LABEL. */
0dff9558 9455
c3f18f18 9456 if (AVR_HAVE_JMP_CALL)
9457 {
9458 progmem_swtable_section
9459 = get_unnamed_section (0, output_section_asm_op,
9460 "\t.section\t.progmem.gcc_sw_table"
9461 ",\"a\",@progbits");
9462 }
9463 else
9464 {
9465 progmem_swtable_section
9466 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
9467 "\t.section\t.progmem.gcc_sw_table"
9468 ",\"ax\",@progbits");
9469 }
7c2339f8 9470
853000f2 9471 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9472 resp. `avr_need_copy_data_p'. */
0dff9558 9473
853000f2 9474 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7c2339f8 9475 data_section->unnamed.callback = avr_output_data_section_asm_op;
9476 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
9477}
9478
9479
c3f18f18 9480/* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
9481
9482static section*
9483avr_asm_function_rodata_section (tree decl)
9484{
9485 /* If a function is unused and optimized out by -ffunction-sections
9486 and --gc-sections, ensure that the same will happen for its jump
9487 tables by putting them into individual sections. */
9488
9489 unsigned int flags;
9490 section * frodata;
9491
9492 /* Get the frodata section from the default function in varasm.c
9493 but treat function-associated data-like jump tables as code
9494 rather than as user defined data. AVR has no constant pools. */
9495 {
9496 int fdata = flag_data_sections;
9497
9498 flag_data_sections = flag_function_sections;
9499 frodata = default_function_rodata_section (decl);
9500 flag_data_sections = fdata;
9501 flags = frodata->common.flags;
9502 }
9503
9504 if (frodata != readonly_data_section
9505 && flags & SECTION_NAMED)
9506 {
9507 /* Adjust section flags and replace section name prefix. */
9508
9509 unsigned int i;
9510
9511 static const char* const prefix[] =
9512 {
9513 ".rodata", ".progmem.gcc_sw_table",
9514 ".gnu.linkonce.r.", ".gnu.linkonce.t."
9515 };
9516
9517 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
9518 {
9519 const char * old_prefix = prefix[i];
9520 const char * new_prefix = prefix[i+1];
9521 const char * name = frodata->named.name;
9522
9523 if (STR_PREFIX_P (name, old_prefix))
9524 {
1b6c82cc 9525 const char *rname = ACONCAT ((new_prefix,
9526 name + strlen (old_prefix), NULL));
c3f18f18 9527 flags &= ~SECTION_CODE;
9528 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
0dff9558 9529
c3f18f18 9530 return get_section (rname, flags, frodata->named.decl);
9531 }
9532 }
9533 }
0dff9558 9534
c3f18f18 9535 return progmem_swtable_section;
9536}
9537
9538
7c2339f8 9539/* Implement `TARGET_ASM_NAMED_SECTION'. */
9540/* Track need of __do_clear_bss, __do_copy_data for named sections. */
9541
853000f2 9542static void
7c2339f8 9543avr_asm_named_section (const char *name, unsigned int flags, tree decl)
9544{
bf412f98 9545 if (flags & AVR_SECTION_PROGMEM)
9546 {
9d734fa8 9547 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
bf412f98 9548 const char *old_prefix = ".rodata";
0558f5da 9549 const char *new_prefix = avr_addrspace[as].section_name;
9550
bf412f98 9551 if (STR_PREFIX_P (name, old_prefix))
9552 {
1b6c82cc 9553 const char *sname = ACONCAT ((new_prefix,
9554 name + strlen (old_prefix), NULL));
9555 default_elf_asm_named_section (sname, flags, decl);
9556 return;
bf412f98 9557 }
9558
1b6c82cc 9559 default_elf_asm_named_section (new_prefix, flags, decl);
bf412f98 9560 return;
9561 }
0dff9558 9562
7c2339f8 9563 if (!avr_need_copy_data_p)
53026b2c 9564 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
9565 || STR_PREFIX_P (name, ".rodata")
9566 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
0dff9558 9567
7c2339f8 9568 if (!avr_need_clear_bss_p)
53026b2c 9569 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
0dff9558 9570
7c2339f8 9571 default_elf_asm_named_section (name, flags, decl);
2f14b1f9 9572}
9573
0dff9558 9574
9575/* Implement `TARGET_SECTION_TYPE_FLAGS'. */
9576
8a46ca38 9577static unsigned int
206a5129 9578avr_section_type_flags (tree decl, const char *name, int reloc)
8a46ca38 9579{
9580 unsigned int flags = default_section_type_flags (decl, name, reloc);
9581
53026b2c 9582 if (STR_PREFIX_P (name, ".noinit"))
8a46ca38 9583 {
9584 if (decl && TREE_CODE (decl) == VAR_DECL
9585 && DECL_INITIAL (decl) == NULL_TREE)
9586 flags |= SECTION_BSS; /* @nobits */
9587 else
c3ceba8e 9588 warning (0, "only uninitialized variables can be placed in the "
8a46ca38 9589 ".noinit section");
9590 }
9591
bf412f98 9592 if (decl && DECL_P (decl)
9d734fa8 9593 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 9594 {
9d734fa8 9595 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9596
9597 /* Attribute progmem puts data in generic address space.
590da9f2 9598 Set section flags as if it was in __flash to get the right
9d734fa8 9599 section prefix in the remainder. */
5bd39e93 9600
9d734fa8 9601 if (ADDR_SPACE_GENERIC_P (as))
590da9f2 9602 as = ADDR_SPACE_FLASH;
5bd39e93 9603
9d734fa8 9604 flags |= as * SECTION_MACH_DEP;
bf412f98 9605 flags &= ~SECTION_WRITE;
4202ef11 9606 flags &= ~SECTION_BSS;
bf412f98 9607 }
0dff9558 9608
8a46ca38 9609 return flags;
9610}
9611
7c2339f8 9612
b44e24e6 9613/* Implement `TARGET_ENCODE_SECTION_INFO'. */
9614
9615static void
9d734fa8 9616avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
b44e24e6 9617{
9618 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9619 readily available, see PR34734. So we postpone the warning
9620 about uninitialized data in program memory section until here. */
0dff9558 9621
b44e24e6 9622 if (new_decl_p
9623 && decl && DECL_P (decl)
9624 && NULL_TREE == DECL_INITIAL (decl)
07b2ccdc 9625 && !DECL_EXTERNAL (decl)
b44e24e6 9626 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9627 {
9628 warning (OPT_Wuninitialized,
9629 "uninitialized variable %q+D put into "
9630 "program memory area", decl);
9631 }
66824cc3 9632
9633 default_encode_section_info (decl, rtl, new_decl_p);
ed2541ea 9634
9635 if (decl && DECL_P (decl)
9636 && TREE_CODE (decl) != FUNCTION_DECL
9637 && MEM_P (rtl)
9638 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
9639 {
9640 rtx sym = XEXP (rtl, 0);
b8f9423d 9641 tree type = TREE_TYPE (decl);
c0d7a1d0 9642 tree attr = DECL_ATTRIBUTES (decl);
b8f9423d 9643 if (type == error_mark_node)
9644 return;
c0d7a1d0 9645
b8f9423d 9646 addr_space_t as = TYPE_ADDR_SPACE (type);
ed2541ea 9647
9648 /* PSTR strings are in generic space but located in flash:
9649 patch address space. */
0dff9558 9650
c0d7a1d0 9651 if (-1 == avr_progmem_p (decl, attr))
590da9f2 9652 as = ADDR_SPACE_FLASH;
ed2541ea 9653
9654 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
c0d7a1d0 9655
9656 tree io_low_attr = lookup_attribute ("io_low", attr);
9657 tree io_attr = lookup_attribute ("io", attr);
9658 tree addr_attr;
9659 if (io_low_attr
9660 && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
9661 addr_attr = io_attr;
9662 else if (io_attr
9663 && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
9664 addr_attr = io_attr;
9665 else
9666 addr_attr = lookup_attribute ("address", attr);
9667 if (io_low_attr
1a96adb9 9668 || (io_attr && addr_attr
9669 && low_io_address_operand
9670 (GEN_INT (TREE_INT_CST_LOW
9671 (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
c0d7a1d0 9672 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
9673 if (io_attr || io_low_attr)
9674 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
9675 /* If we have an (io) address attribute specification, but the variable
9676 is external, treat the address as only a tentative definition
9677 to be used to determine if an io port is in the lower range, but
9678 don't use the exact value for constant propagation. */
9679 if (addr_attr && !DECL_EXTERNAL (decl))
9680 SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
ed2541ea 9681 }
b44e24e6 9682}
9683
9684
bf412f98 9685/* Implement `TARGET_ASM_SELECT_SECTION' */
9686
9687static section *
9688avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
9689{
9690 section * sect = default_elf_select_section (decl, reloc, align);
0dff9558 9691
bf412f98 9692 if (decl && DECL_P (decl)
9d734fa8 9693 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
bf412f98 9694 {
9d734fa8 9695 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
0558f5da 9696
9697 /* __progmem__ goes in generic space but shall be allocated to
9698 .progmem.data */
9699
9700 if (ADDR_SPACE_GENERIC_P (as))
9701 as = ADDR_SPACE_FLASH;
0dff9558 9702
bf412f98 9703 if (sect->common.flags & SECTION_NAMED)
9704 {
9705 const char * name = sect->named.name;
9706 const char * old_prefix = ".rodata";
0558f5da 9707 const char * new_prefix = avr_addrspace[as].section_name;
bf412f98 9708
9709 if (STR_PREFIX_P (name, old_prefix))
9710 {
1b6c82cc 9711 const char *sname = ACONCAT ((new_prefix,
9712 name + strlen (old_prefix), NULL));
bf412f98 9713 return get_section (sname, sect->common.flags, sect->named.decl);
9714 }
9715 }
0558f5da 9716
9717 if (!progmem_section[as])
9718 {
9719 progmem_section[as]
9720 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
9721 avr_addrspace[as].section_name);
9722 }
9723
9724 return progmem_section[as];
bf412f98 9725 }
9726
9727 return sect;
9728}
9729
7c2339f8 9730/* Implement `TARGET_ASM_FILE_START'. */
5bd39e93 9731/* Outputs some text at the start of each assembler file. */
a28e4651 9732
92c473b8 9733static void
206a5129 9734avr_file_start (void)
a28e4651 9735{
f0aa7fe2 9736 int sfr_offset = avr_arch->sfr_offset;
5bd39e93 9737
f0aa7fe2 9738 if (avr_arch->asm_only)
9739 error ("architecture %qs supported for assembler only", avr_mmcu);
235d7594 9740
92c473b8 9741 default_file_start ();
9742
72851b68 9743 /* Print I/O addresses of some SFRs used with IN and OUT. */
9744
d32d7e3a 9745 if (AVR_HAVE_SPH)
72851b68 9746 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8c8193e1 9747
72851b68 9748 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
9749 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
0b6cf66f 9750 if (AVR_HAVE_RAMPZ)
9751 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
9752 if (AVR_HAVE_RAMPY)
9753 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
9754 if (AVR_HAVE_RAMPX)
9755 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
9756 if (AVR_HAVE_RAMPD)
9757 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
b4e6d2e2 9758 if (AVR_XMEGA || AVR_TINY)
0b6cf66f 9759 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
b4e6d2e2 9760 fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
9761 fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
a28e4651 9762}
9763
7c2339f8 9764
9765/* Implement `TARGET_ASM_FILE_END'. */
a28e4651 9766/* Outputs to the stdio stream FILE some
9767 appropriate text to go at the end of an assembler file. */
9768
f6940372 9769static void
206a5129 9770avr_file_end (void)
a28e4651 9771{
7c2339f8 9772 /* Output these only if there is anything in the
0dff9558 9773 .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
7c2339f8 9774 input section(s) - some code size can be saved by not
9775 linking in the initialization code from libgcc if resp.
0dff9558 9776 sections are empty, see PR18145. */
7c2339f8 9777
9778 if (avr_need_copy_data_p)
9779 fputs (".global __do_copy_data\n", asm_out_file);
9780
9781 if (avr_need_clear_bss_p)
9782 fputs (".global __do_clear_bss\n", asm_out_file);
a28e4651 9783}
9784
0dff9558 9785
9786/* Worker function for `ADJUST_REG_ALLOC_ORDER'. */
a28e4651 9787/* Choose the order in which to allocate hard registers for
9788 pseudo-registers local to a basic block.
9789
9790 Store the desired register order in the array `reg_alloc_order'.
9791 Element 0 should be the register to allocate first; element 1, the
9792 next register; and so on. */
9793
9794void
0dff9558 9795avr_adjust_reg_alloc_order (void)
a28e4651 9796{
9797 unsigned int i;
0dff9558 9798 static const int order_0[] =
9799 {
9800 24, 25,
9801 18, 19, 20, 21, 22, 23,
9802 30, 31,
9803 26, 27, 28, 29,
9804 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9805 0, 1,
9806 32, 33, 34, 35
a28e4651 9807 };
b4e6d2e2 9808 static const int tiny_order_0[] = {
9809 20, 21,
9810 22, 23,
9811 24, 25,
9812 30, 31,
9813 26, 27,
9814 28, 29,
9815 19, 18,
9816 16, 17,
9817 32, 33, 34, 35,
9818 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9819 };
0dff9558 9820 static const int order_1[] =
9821 {
9822 18, 19, 20, 21, 22, 23, 24, 25,
9823 30, 31,
9824 26, 27, 28, 29,
9825 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9826 0, 1,
9827 32, 33, 34, 35
a28e4651 9828 };
b4e6d2e2 9829 static const int tiny_order_1[] = {
9830 22, 23,
9831 24, 25,
9832 30, 31,
9833 26, 27,
9834 28, 29,
9835 21, 20, 19, 18,
9836 16, 17,
9837 32, 33, 34, 35,
9838 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9839 };
0dff9558 9840 static const int order_2[] =
9841 {
9842 25, 24, 23, 22, 21, 20, 19, 18,
9843 30, 31,
9844 26, 27, 28, 29,
9845 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9846 1, 0,
9847 32, 33, 34, 35
a28e4651 9848 };
0dff9558 9849
1a96adb9 9850 /* Select specific register allocation order.
9851 Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9852 so different allocation order should be used. */
9853
9854 const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
9855 : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
9856 : (AVR_TINY ? tiny_order_0 : order_0));
b4e6d2e2 9857
0dff9558 9858 for (i = 0; i < ARRAY_SIZE (order_0); ++i)
a28e4651 9859 reg_alloc_order[i] = order[i];
9860}
9861
433a5f02 9862
dfc1e3e4 9863/* Implement `TARGET_REGISTER_MOVE_COST' */
9864
9865static int
3754d046 9866avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
dfc1e3e4 9867 reg_class_t from, reg_class_t to)
9868{
9869 return (from == STACK_REG ? 6
9870 : to == STACK_REG ? 12
9871 : 2);
9872}
9873
9874
9875/* Implement `TARGET_MEMORY_MOVE_COST' */
9876
9877static int
3754d046 9878avr_memory_move_cost (machine_mode mode,
a45076aa 9879 reg_class_t rclass ATTRIBUTE_UNUSED,
dfc1e3e4 9880 bool in ATTRIBUTE_UNUSED)
9881{
9882 return (mode == QImode ? 2
9883 : mode == HImode ? 4
9884 : mode == SImode ? 8
9885 : mode == SFmode ? 8
9886 : 16);
9887}
9888
9889
433a5f02 9890/* Mutually recursive subroutine of avr_rtx_cost for calculating the
9891 cost of an RTX operand given its context. X is the rtx of the
9892 operand, MODE is its mode, and OUTER is the rtx_code of this
9893 operand's parent operator. */
a28e4651 9894
fab7adbf 9895static int
3754d046 9896avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
20d892d1 9897 int opno, bool speed)
a28e4651 9898{
433a5f02 9899 enum rtx_code code = GET_CODE (x);
9900 int total;
9901
a28e4651 9902 switch (code)
9903 {
433a5f02 9904 case REG:
9905 case SUBREG:
9906 return 0;
9907
a28e4651 9908 case CONST_INT:
017c5b98 9909 case CONST_FIXED:
433a5f02 9910 case CONST_DOUBLE:
9911 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
9912
a28e4651 9913 default:
9914 break;
9915 }
433a5f02 9916
9917 total = 0;
20d892d1 9918 avr_rtx_costs (x, code, outer, opno, &total, speed);
433a5f02 9919 return total;
a28e4651 9920}
9921
ae86bb47 9922/* Worker function for AVR backend's rtx_cost function.
9923 X is rtx expression whose cost is to be calculated.
9924 Return true if the complete cost has been computed.
9925 Return false if subexpressions should be scanned.
9926 In either case, *TOTAL contains the cost result. */
433a5f02 9927
fab7adbf 9928static bool
ae86bb47 9929avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
9930 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
fab7adbf 9931{
ef51d1e3 9932 enum rtx_code code = (enum rtx_code) codearg;
3754d046 9933 machine_mode mode = GET_MODE (x);
433a5f02 9934 HOST_WIDE_INT val;
fab7adbf 9935
9936 switch (code)
9937 {
9938 case CONST_INT:
017c5b98 9939 case CONST_FIXED:
433a5f02 9940 case CONST_DOUBLE:
9685fb69 9941 case SYMBOL_REF:
f9fb96f9 9942 case CONST:
9943 case LABEL_REF:
433a5f02 9944 /* Immediate constants are as cheap as registers. */
9945 *total = 0;
9946 return true;
9947
9948 case MEM:
433a5f02 9949 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
9950 return true;
9951
9952 case NEG:
9953 switch (mode)
fab7adbf 9954 {
433a5f02 9955 case QImode:
9956 case SFmode:
9957 *total = COSTS_N_INSNS (1);
9958 break;
9959
02d9a2c3 9960 case HImode:
9961 case PSImode:
9962 case SImode:
9963 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
9964 break;
433a5f02 9965
9966 default:
9967 return false;
fab7adbf 9968 }
20d892d1 9969 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9970 return true;
9971
9972 case ABS:
9973 switch (mode)
fab7adbf 9974 {
433a5f02 9975 case QImode:
9976 case SFmode:
9977 *total = COSTS_N_INSNS (1);
9978 break;
9979
9980 default:
9981 return false;
fab7adbf 9982 }
20d892d1 9983 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9984 return true;
fab7adbf 9985
433a5f02 9986 case NOT:
9987 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 9988 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fab7adbf 9989 return true;
9990
433a5f02 9991 case ZERO_EXTEND:
9992 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
9993 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 9994 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 9995 return true;
9996
9997 case SIGN_EXTEND:
9998 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
9999 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
20d892d1 10000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10001 return true;
10002
10003 case PLUS:
10004 switch (mode)
10005 {
10006 case QImode:
37ee98f3 10007 if (AVR_HAVE_MUL
10008 && MULT == GET_CODE (XEXP (x, 0))
10009 && register_operand (XEXP (x, 1), QImode))
10010 {
10011 /* multiply-add */
10012 *total = COSTS_N_INSNS (speed ? 4 : 3);
10013 /* multiply-add with constant: will be split and load constant. */
10014 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10015 *total = COSTS_N_INSNS (1) + *total;
10016 return true;
10017 }
433a5f02 10018 *total = COSTS_N_INSNS (1);
10019 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 10020 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10021 break;
10022
10023 case HImode:
73cd2c42 10024 if (AVR_HAVE_MUL
10025 && (MULT == GET_CODE (XEXP (x, 0))
10026 || ASHIFT == GET_CODE (XEXP (x, 0)))
10027 && register_operand (XEXP (x, 1), HImode)
10028 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10029 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10030 {
37ee98f3 10031 /* multiply-add */
73cd2c42 10032 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 10033 /* multiply-add with constant: will be split and load constant. */
10034 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10035 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 10036 return true;
10037 }
433a5f02 10038 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10039 {
10040 *total = COSTS_N_INSNS (2);
20d892d1 10041 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10042 speed);
433a5f02 10043 }
10044 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10045 *total = COSTS_N_INSNS (1);
10046 else
10047 *total = COSTS_N_INSNS (2);
10048 break;
10049
02d9a2c3 10050 case PSImode:
10051 if (!CONST_INT_P (XEXP (x, 1)))
10052 {
10053 *total = COSTS_N_INSNS (3);
10054 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10055 speed);
10056 }
10057 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10058 *total = COSTS_N_INSNS (2);
10059 else
10060 *total = COSTS_N_INSNS (3);
10061 break;
10062
433a5f02 10063 case SImode:
10064 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10065 {
10066 *total = COSTS_N_INSNS (4);
20d892d1 10067 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10068 speed);
433a5f02 10069 }
10070 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10071 *total = COSTS_N_INSNS (1);
10072 else
10073 *total = COSTS_N_INSNS (4);
10074 break;
10075
10076 default:
10077 return false;
10078 }
20d892d1 10079 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10080 return true;
10081
10082 case MINUS:
37ee98f3 10083 if (AVR_HAVE_MUL
10084 && QImode == mode
10085 && register_operand (XEXP (x, 0), QImode)
10086 && MULT == GET_CODE (XEXP (x, 1)))
10087 {
10088 /* multiply-sub */
10089 *total = COSTS_N_INSNS (speed ? 4 : 3);
10090 /* multiply-sub with constant: will be split and load constant. */
10091 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10092 *total = COSTS_N_INSNS (1) + *total;
10093 return true;
10094 }
73cd2c42 10095 if (AVR_HAVE_MUL
10096 && HImode == mode
10097 && register_operand (XEXP (x, 0), HImode)
10098 && (MULT == GET_CODE (XEXP (x, 1))
10099 || ASHIFT == GET_CODE (XEXP (x, 1)))
10100 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10101 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10102 {
37ee98f3 10103 /* multiply-sub */
73cd2c42 10104 *total = COSTS_N_INSNS (speed ? 5 : 4);
37ee98f3 10105 /* multiply-sub with constant: will be split and load constant. */
10106 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10107 *total = COSTS_N_INSNS (1) + *total;
73cd2c42 10108 return true;
10109 }
02d9a2c3 10110 /* FALLTHRU */
433a5f02 10111 case AND:
10112 case IOR:
10113 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 10114 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10115 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 10116 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10117 return true;
10118
10119 case XOR:
10120 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
20d892d1 10121 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10122 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10123 return true;
10124
10125 case MULT:
10126 switch (mode)
10127 {
10128 case QImode:
8cc5a1af 10129 if (AVR_HAVE_MUL)
f529eb25 10130 *total = COSTS_N_INSNS (!speed ? 3 : 4);
10131 else if (!speed)
4f0e2214 10132 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 10133 else
10134 return false;
ba92127f 10135 break;
433a5f02 10136
10137 case HImode:
8cc5a1af 10138 if (AVR_HAVE_MUL)
0b90fc76 10139 {
10140 rtx op0 = XEXP (x, 0);
10141 rtx op1 = XEXP (x, 1);
10142 enum rtx_code code0 = GET_CODE (op0);
10143 enum rtx_code code1 = GET_CODE (op1);
10144 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10145 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10146
10147 if (ex0
10148 && (u8_operand (op1, HImode)
10149 || s8_operand (op1, HImode)))
10150 {
10151 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10152 return true;
10153 }
10154 if (ex0
10155 && register_operand (op1, HImode))
10156 {
10157 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10158 return true;
10159 }
10160 else if (ex0 || ex1)
10161 {
10162 *total = COSTS_N_INSNS (!speed ? 3 : 5);
10163 return true;
10164 }
10165 else if (register_operand (op0, HImode)
10166 && (u8_operand (op1, HImode)
10167 || s8_operand (op1, HImode)))
10168 {
10169 *total = COSTS_N_INSNS (!speed ? 6 : 9);
10170 return true;
10171 }
10172 else
10173 *total = COSTS_N_INSNS (!speed ? 7 : 10);
10174 }
f529eb25 10175 else if (!speed)
4f0e2214 10176 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 10177 else
10178 return false;
ba92127f 10179 break;
433a5f02 10180
02d9a2c3 10181 case PSImode:
10182 if (!speed)
10183 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10184 else
10185 *total = 10;
10186 break;
10187
713e2ad9 10188 case SImode:
10189 if (AVR_HAVE_MUL)
10190 {
10191 if (!speed)
10192 {
10193 /* Add some additional costs besides CALL like moves etc. */
10194
10195 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10196 }
10197 else
10198 {
10199 /* Just a rough estimate. Even with -O2 we don't want bulky
10200 code expanded inline. */
10201
10202 *total = COSTS_N_INSNS (25);
10203 }
10204 }
10205 else
10206 {
10207 if (speed)
10208 *total = COSTS_N_INSNS (300);
10209 else
10210 /* Add some additional costs besides CALL like moves etc. */
10211 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10212 }
0dff9558 10213
713e2ad9 10214 return true;
0dff9558 10215
433a5f02 10216 default:
10217 return false;
10218 }
20d892d1 10219 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10220 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10221 return true;
10222
10223 case DIV:
10224 case MOD:
10225 case UDIV:
10226 case UMOD:
f529eb25 10227 if (!speed)
fd2db4d6 10228 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
433a5f02 10229 else
fd2db4d6 10230 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
20d892d1 10231 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
fd2db4d6 10232 /* For div/mod with const-int divisor we have at least the cost of
10233 loading the divisor. */
10234 if (CONST_INT_P (XEXP (x, 1)))
10235 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10236 /* Add some overall penaly for clobbering and moving around registers */
10237 *total += COSTS_N_INSNS (2);
433a5f02 10238 return true;
10239
8f14d2e0 10240 case ROTATE:
10241 switch (mode)
10242 {
10243 case QImode:
10244 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10245 *total = COSTS_N_INSNS (1);
10246
10247 break;
10248
10249 case HImode:
10250 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10251 *total = COSTS_N_INSNS (3);
10252
10253 break;
10254
10255 case SImode:
10256 if (CONST_INT_P (XEXP (x, 1)))
10257 switch (INTVAL (XEXP (x, 1)))
10258 {
10259 case 8:
10260 case 24:
10261 *total = COSTS_N_INSNS (5);
10262 break;
10263 case 16:
10264 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10265 break;
10266 }
10267 break;
10268
10269 default:
10270 return false;
10271 }
20d892d1 10272 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
0dff9558 10273 return true;
8f14d2e0 10274
433a5f02 10275 case ASHIFT:
10276 switch (mode)
10277 {
10278 case QImode:
10279 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10280 {
f529eb25 10281 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 10282 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10283 speed);
433a5f02 10284 }
10285 else
10286 {
10287 val = INTVAL (XEXP (x, 1));
10288 if (val == 7)
10289 *total = COSTS_N_INSNS (3);
10290 else if (val >= 0 && val <= 7)
10291 *total = COSTS_N_INSNS (val);
10292 else
10293 *total = COSTS_N_INSNS (1);
10294 }
10295 break;
10296
10297 case HImode:
0b90fc76 10298 if (AVR_HAVE_MUL)
10299 {
10300 if (const_2_to_7_operand (XEXP (x, 1), HImode)
10301 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10302 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10303 {
10304 *total = COSTS_N_INSNS (!speed ? 4 : 6);
10305 return true;
10306 }
10307 }
0dff9558 10308
37ee98f3 10309 if (const1_rtx == (XEXP (x, 1))
10310 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10311 {
10312 *total = COSTS_N_INSNS (2);
10313 return true;
10314 }
0dff9558 10315
433a5f02 10316 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10317 {
f529eb25 10318 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10319 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10320 speed);
433a5f02 10321 }
10322 else
10323 switch (INTVAL (XEXP (x, 1)))
10324 {
10325 case 0:
10326 *total = 0;
10327 break;
10328 case 1:
10329 case 8:
10330 *total = COSTS_N_INSNS (2);
10331 break;
10332 case 9:
10333 *total = COSTS_N_INSNS (3);
10334 break;
10335 case 2:
10336 case 3:
10337 case 10:
10338 case 15:
10339 *total = COSTS_N_INSNS (4);
10340 break;
10341 case 7:
10342 case 11:
10343 case 12:
10344 *total = COSTS_N_INSNS (5);
10345 break;
10346 case 4:
f529eb25 10347 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 10348 break;
10349 case 6:
12564c56 10350 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 10351 break;
10352 case 5:
f529eb25 10353 *total = COSTS_N_INSNS (!speed ? 5 : 10);
433a5f02 10354 break;
10355 default:
f529eb25 10356 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10357 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10358 speed);
433a5f02 10359 }
10360 break;
10361
02d9a2c3 10362 case PSImode:
10363 if (!CONST_INT_P (XEXP (x, 1)))
10364 {
10365 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10366 }
10367 else
10368 switch (INTVAL (XEXP (x, 1)))
10369 {
10370 case 0:
10371 *total = 0;
10372 break;
10373 case 1:
10374 case 8:
10375 case 16:
10376 *total = COSTS_N_INSNS (3);
10377 break;
10378 case 23:
10379 *total = COSTS_N_INSNS (5);
10380 break;
10381 default:
10382 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10383 break;
10384 }
10385 break;
10386
433a5f02 10387 case SImode:
10388 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10389 {
f529eb25 10390 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10391 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10392 speed);
433a5f02 10393 }
10394 else
10395 switch (INTVAL (XEXP (x, 1)))
10396 {
10397 case 0:
10398 *total = 0;
10399 break;
10400 case 24:
10401 *total = COSTS_N_INSNS (3);
10402 break;
10403 case 1:
10404 case 8:
10405 case 16:
10406 *total = COSTS_N_INSNS (4);
10407 break;
10408 case 31:
10409 *total = COSTS_N_INSNS (6);
10410 break;
10411 case 2:
f529eb25 10412 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 10413 break;
10414 default:
f529eb25 10415 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10416 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10417 speed);
433a5f02 10418 }
10419 break;
10420
10421 default:
10422 return false;
10423 }
20d892d1 10424 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10425 return true;
10426
10427 case ASHIFTRT:
10428 switch (mode)
10429 {
10430 case QImode:
10431 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10432 {
f529eb25 10433 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 10434 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10435 speed);
433a5f02 10436 }
10437 else
10438 {
10439 val = INTVAL (XEXP (x, 1));
10440 if (val == 6)
10441 *total = COSTS_N_INSNS (4);
10442 else if (val == 7)
10443 *total = COSTS_N_INSNS (2);
10444 else if (val >= 0 && val <= 7)
10445 *total = COSTS_N_INSNS (val);
10446 else
10447 *total = COSTS_N_INSNS (1);
10448 }
10449 break;
10450
10451 case HImode:
10452 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10453 {
f529eb25 10454 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10455 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10456 speed);
433a5f02 10457 }
10458 else
10459 switch (INTVAL (XEXP (x, 1)))
10460 {
10461 case 0:
10462 *total = 0;
10463 break;
10464 case 1:
10465 *total = COSTS_N_INSNS (2);
10466 break;
10467 case 15:
10468 *total = COSTS_N_INSNS (3);
10469 break;
10470 case 2:
10471 case 7:
10472 case 8:
10473 case 9:
10474 *total = COSTS_N_INSNS (4);
10475 break;
10476 case 10:
10477 case 14:
10478 *total = COSTS_N_INSNS (5);
10479 break;
10480 case 11:
f529eb25 10481 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 10482 break;
10483 case 12:
f529eb25 10484 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 10485 break;
10486 case 6:
10487 case 13:
f529eb25 10488 *total = COSTS_N_INSNS (!speed ? 5 : 8);
433a5f02 10489 break;
10490 default:
f529eb25 10491 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10492 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10493 speed);
433a5f02 10494 }
10495 break;
10496
02d9a2c3 10497 case PSImode:
10498 if (!CONST_INT_P (XEXP (x, 1)))
10499 {
10500 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10501 }
10502 else
10503 switch (INTVAL (XEXP (x, 1)))
10504 {
10505 case 0:
10506 *total = 0;
10507 break;
10508 case 1:
10509 *total = COSTS_N_INSNS (3);
10510 break;
10511 case 16:
10512 case 8:
10513 *total = COSTS_N_INSNS (5);
10514 break;
10515 case 23:
10516 *total = COSTS_N_INSNS (4);
10517 break;
10518 default:
10519 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10520 break;
10521 }
10522 break;
10523
433a5f02 10524 case SImode:
10525 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10526 {
f529eb25 10527 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10528 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10529 speed);
433a5f02 10530 }
10531 else
10532 switch (INTVAL (XEXP (x, 1)))
10533 {
10534 case 0:
10535 *total = 0;
10536 break;
10537 case 1:
10538 *total = COSTS_N_INSNS (4);
10539 break;
10540 case 8:
10541 case 16:
10542 case 24:
10543 *total = COSTS_N_INSNS (6);
10544 break;
10545 case 2:
f529eb25 10546 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 10547 break;
10548 case 31:
0aab73c2 10549 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
433a5f02 10550 break;
10551 default:
f529eb25 10552 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10553 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10554 speed);
433a5f02 10555 }
10556 break;
10557
10558 default:
10559 return false;
10560 }
20d892d1 10561 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10562 return true;
10563
10564 case LSHIFTRT:
10565 switch (mode)
10566 {
10567 case QImode:
10568 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10569 {
f529eb25 10570 *total = COSTS_N_INSNS (!speed ? 4 : 17);
20d892d1 10571 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10572 speed);
433a5f02 10573 }
10574 else
10575 {
10576 val = INTVAL (XEXP (x, 1));
10577 if (val == 7)
10578 *total = COSTS_N_INSNS (3);
10579 else if (val >= 0 && val <= 7)
10580 *total = COSTS_N_INSNS (val);
10581 else
10582 *total = COSTS_N_INSNS (1);
10583 }
10584 break;
10585
10586 case HImode:
10587 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10588 {
f529eb25 10589 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10590 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10591 speed);
433a5f02 10592 }
10593 else
10594 switch (INTVAL (XEXP (x, 1)))
10595 {
10596 case 0:
10597 *total = 0;
10598 break;
10599 case 1:
10600 case 8:
10601 *total = COSTS_N_INSNS (2);
10602 break;
10603 case 9:
10604 *total = COSTS_N_INSNS (3);
10605 break;
10606 case 2:
10607 case 10:
10608 case 15:
10609 *total = COSTS_N_INSNS (4);
10610 break;
10611 case 7:
10612 case 11:
10613 *total = COSTS_N_INSNS (5);
10614 break;
10615 case 3:
10616 case 12:
10617 case 13:
10618 case 14:
f529eb25 10619 *total = COSTS_N_INSNS (!speed ? 5 : 6);
433a5f02 10620 break;
10621 case 4:
f529eb25 10622 *total = COSTS_N_INSNS (!speed ? 5 : 7);
433a5f02 10623 break;
10624 case 5:
10625 case 6:
f529eb25 10626 *total = COSTS_N_INSNS (!speed ? 5 : 9);
433a5f02 10627 break;
10628 default:
f529eb25 10629 *total = COSTS_N_INSNS (!speed ? 5 : 41);
20d892d1 10630 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10631 speed);
433a5f02 10632 }
10633 break;
10634
02d9a2c3 10635 case PSImode:
10636 if (!CONST_INT_P (XEXP (x, 1)))
10637 {
10638 *total = COSTS_N_INSNS (!speed ? 6 : 73);
10639 }
10640 else
10641 switch (INTVAL (XEXP (x, 1)))
10642 {
10643 case 0:
10644 *total = 0;
10645 break;
10646 case 1:
10647 case 8:
10648 case 16:
10649 *total = COSTS_N_INSNS (3);
10650 break;
10651 case 23:
10652 *total = COSTS_N_INSNS (5);
10653 break;
10654 default:
10655 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10656 break;
10657 }
10658 break;
10659
433a5f02 10660 case SImode:
10661 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10662 {
f529eb25 10663 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10664 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10665 speed);
433a5f02 10666 }
10667 else
10668 switch (INTVAL (XEXP (x, 1)))
10669 {
10670 case 0:
10671 *total = 0;
10672 break;
10673 case 1:
10674 *total = COSTS_N_INSNS (4);
10675 break;
10676 case 2:
f529eb25 10677 *total = COSTS_N_INSNS (!speed ? 7 : 8);
433a5f02 10678 break;
10679 case 8:
10680 case 16:
10681 case 24:
10682 *total = COSTS_N_INSNS (4);
10683 break;
10684 case 31:
10685 *total = COSTS_N_INSNS (6);
10686 break;
10687 default:
f529eb25 10688 *total = COSTS_N_INSNS (!speed ? 7 : 113);
20d892d1 10689 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10690 speed);
433a5f02 10691 }
10692 break;
10693
10694 default:
10695 return false;
10696 }
20d892d1 10697 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10698 return true;
10699
10700 case COMPARE:
10701 switch (GET_MODE (XEXP (x, 0)))
fab7adbf 10702 {
433a5f02 10703 case QImode:
10704 *total = COSTS_N_INSNS (1);
10705 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 10706 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10707 break;
10708
10709 case HImode:
10710 *total = COSTS_N_INSNS (2);
10711 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 10712 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10713 else if (INTVAL (XEXP (x, 1)) != 0)
10714 *total += COSTS_N_INSNS (1);
10715 break;
10716
02d9a2c3 10717 case PSImode:
10718 *total = COSTS_N_INSNS (3);
10719 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
10720 *total += COSTS_N_INSNS (2);
10721 break;
10722
433a5f02 10723 case SImode:
10724 *total = COSTS_N_INSNS (4);
10725 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
20d892d1 10726 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
433a5f02 10727 else if (INTVAL (XEXP (x, 1)) != 0)
10728 *total += COSTS_N_INSNS (3);
10729 break;
10730
10731 default:
10732 return false;
fab7adbf 10733 }
20d892d1 10734 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
433a5f02 10735 return true;
10736
12bf3919 10737 case TRUNCATE:
10738 if (AVR_HAVE_MUL
10739 && LSHIFTRT == GET_CODE (XEXP (x, 0))
10740 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
10741 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10742 {
10743 if (QImode == mode || HImode == mode)
10744 {
10745 *total = COSTS_N_INSNS (2);
10746 return true;
10747 }
10748 }
10749 break;
10750
433a5f02 10751 default:
10752 break;
fab7adbf 10753 }
433a5f02 10754 return false;
fab7adbf 10755}
10756
ae86bb47 10757
10758/* Implement `TARGET_RTX_COSTS'. */
10759
10760static bool
10761avr_rtx_costs (rtx x, int codearg, int outer_code,
10762 int opno, int *total, bool speed)
10763{
10764 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
10765 opno, total, speed);
10766
10767 if (avr_log.rtx_costs)
10768 {
10769 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10770 done, speed ? "speed" : "size", *total, outer_code, x);
10771 }
10772
10773 return done;
10774}
10775
8c3bcbe3 10776
10777/* Implement `TARGET_ADDRESS_COST'. */
a28e4651 10778
ec0457a8 10779static int
3754d046 10780avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
b4ebb666 10781 addr_space_t as ATTRIBUTE_UNUSED,
10782 bool speed ATTRIBUTE_UNUSED)
a28e4651 10783{
8c3bcbe3 10784 int cost = 4;
0dff9558 10785
a28e4651 10786 if (GET_CODE (x) == PLUS
8c3bcbe3 10787 && CONST_INT_P (XEXP (x, 1))
10788 && (REG_P (XEXP (x, 0))
10789 || GET_CODE (XEXP (x, 0)) == SUBREG))
37ac04dc 10790 {
8c3bcbe3 10791 if (INTVAL (XEXP (x, 1)) >= 61)
10792 cost = 18;
37ac04dc 10793 }
8c3bcbe3 10794 else if (CONSTANT_ADDRESS_P (x))
10795 {
10796 if (optimize > 0
10797 && io_address_operand (x, QImode))
10798 cost = 2;
10799 }
10800
10801 if (avr_log.address_cost)
10802 avr_edump ("\n%?: %d = %r\n", cost, x);
0dff9558 10803
8c3bcbe3 10804 return cost;
a28e4651 10805}
10806
164f5b34 10807/* Test for extra memory constraint 'Q'.
10808 It's a memory address based on Y or Z pointer with valid displacement. */
a28e4651 10809
10810int
164f5b34 10811extra_constraint_Q (rtx x)
a28e4651 10812{
ae86bb47 10813 int ok = 0;
0dff9558 10814
164f5b34 10815 if (GET_CODE (XEXP (x,0)) == PLUS
10816 && REG_P (XEXP (XEXP (x,0), 0))
10817 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
10818 && (INTVAL (XEXP (XEXP (x,0), 1))
10819 <= MAX_LD_OFFSET (GET_MODE (x))))
a28e4651 10820 {
164f5b34 10821 rtx xx = XEXP (XEXP (x,0), 0);
10822 int regno = REGNO (xx);
0dff9558 10823
ae86bb47 10824 ok = (/* allocate pseudos */
10825 regno >= FIRST_PSEUDO_REGISTER
10826 /* strictly check */
10827 || regno == REG_Z || regno == REG_Y
10828 /* XXX frame & arg pointer checks */
10829 || xx == frame_pointer_rtx
10830 || xx == arg_pointer_rtx);
0dff9558 10831
ae86bb47 10832 if (avr_log.constraints)
10833 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10834 ok, reload_completed, reload_in_progress, x);
a28e4651 10835 }
ae86bb47 10836
10837 return ok;
a28e4651 10838}
10839
20c71901 10840/* Convert condition code CONDITION to the valid AVR condition code. */
a28e4651 10841
10842RTX_CODE
206a5129 10843avr_normalize_condition (RTX_CODE condition)
a28e4651 10844{
10845 switch (condition)
10846 {
10847 case GT:
10848 return GE;
10849 case GTU:
10850 return GEU;
10851 case LE:
10852 return LT;
10853 case LEU:
10854 return LTU;
10855 default:
8ef66241 10856 gcc_unreachable ();
a28e4651 10857 }
10858}
10859
cffa155c 10860/* Helper function for `avr_reorg'. */
10861
10862static rtx
375204de 10863avr_compare_pattern (rtx_insn *insn)
cffa155c 10864{
10865 rtx pattern = single_set (insn);
10866
10867 if (pattern
10868 && NONJUMP_INSN_P (insn)
10869 && SET_DEST (pattern) == cc0_rtx
017c5b98 10870 && GET_CODE (SET_SRC (pattern)) == COMPARE)
cffa155c 10871 {
3754d046 10872 machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
10873 machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
017c5b98 10874
10875 /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10876 They must not be swapped, thus skip them. */
10877
10878 if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
10879 && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
10880 return pattern;
cffa155c 10881 }
10882
10883 return NULL_RTX;
10884}
10885
10886/* Helper function for `avr_reorg'. */
10887
10888/* Expansion of switch/case decision trees leads to code like
10889
10890 cc0 = compare (Reg, Num)
10891 if (cc0 == 0)
10892 goto L1
0dff9558 10893
cffa155c 10894 cc0 = compare (Reg, Num)
10895 if (cc0 > 0)
10896 goto L2
10897
10898 The second comparison is superfluous and can be deleted.
10899 The second jump condition can be transformed from a
10900 "difficult" one to a "simple" one because "cc0 > 0" and
10901 "cc0 >= 0" will have the same effect here.
10902
10903 This function relies on the way switch/case is being expaned
10904 as binary decision tree. For example code see PR 49903.
0dff9558 10905
cffa155c 10906 Return TRUE if optimization performed.
10907 Return FALSE if nothing changed.
10908
10909 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
10910
10911 We don't want to do this in text peephole because it is
10912 tedious to work out jump offsets there and the second comparison
10913 might have been transormed by `avr_reorg'.
10914
10915 RTL peephole won't do because peephole2 does not scan across
0dff9558 10916 basic blocks. */
10917
cffa155c 10918static bool
375204de 10919avr_reorg_remove_redundant_compare (rtx_insn *insn1)
cffa155c 10920{
375204de 10921 rtx comp1, ifelse1, xcond1;
10922 rtx_insn *branch1;
10923 rtx comp2, ifelse2, xcond2;
10924 rtx_insn *branch2, *insn2;
cffa155c 10925 enum rtx_code code;
375204de 10926 rtx_insn *jump;
10927 rtx target, cond;
0dff9558 10928
cffa155c 10929 /* Look out for: compare1 - branch1 - compare2 - branch2 */
10930
10931 branch1 = next_nonnote_nondebug_insn (insn1);
10932 if (!branch1 || !JUMP_P (branch1))
10933 return false;
10934
10935 insn2 = next_nonnote_nondebug_insn (branch1);
10936 if (!insn2 || !avr_compare_pattern (insn2))
10937 return false;
10938
10939 branch2 = next_nonnote_nondebug_insn (insn2);
10940 if (!branch2 || !JUMP_P (branch2))
10941 return false;
10942
10943 comp1 = avr_compare_pattern (insn1);
10944 comp2 = avr_compare_pattern (insn2);
10945 xcond1 = single_set (branch1);
10946 xcond2 = single_set (branch2);
0dff9558 10947
cffa155c 10948 if (!comp1 || !comp2
10949 || !rtx_equal_p (comp1, comp2)
10950 || !xcond1 || SET_DEST (xcond1) != pc_rtx
10951 || !xcond2 || SET_DEST (xcond2) != pc_rtx
10952 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
10953 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
10954 {
10955 return false;
10956 }
10957
10958 comp1 = SET_SRC (comp1);
10959 ifelse1 = SET_SRC (xcond1);
10960 ifelse2 = SET_SRC (xcond2);
10961
10962 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
10963
10964 if (EQ != GET_CODE (XEXP (ifelse1, 0))
10965 || !REG_P (XEXP (comp1, 0))
10966 || !CONST_INT_P (XEXP (comp1, 1))
10967 || XEXP (ifelse1, 2) != pc_rtx
10968 || XEXP (ifelse2, 2) != pc_rtx
10969 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
10970 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
10971 || !COMPARISON_P (XEXP (ifelse2, 0))
10972 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
10973 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
10974 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
10975 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
10976 {
10977 return false;
10978 }
10979
10980 /* We filtered the insn sequence to look like
10981
10982 (set (cc0)
10983 (compare (reg:M N)
10984 (const_int VAL)))
10985 (set (pc)
10986 (if_then_else (eq (cc0)
10987 (const_int 0))
10988 (label_ref L1)
10989 (pc)))
0dff9558 10990
cffa155c 10991 (set (cc0)
10992 (compare (reg:M N)
10993 (const_int VAL)))
10994 (set (pc)
10995 (if_then_else (CODE (cc0)
10996 (const_int 0))
10997 (label_ref L2)
10998 (pc)))
10999 */
11000
11001 code = GET_CODE (XEXP (ifelse2, 0));
11002
11003 /* Map GT/GTU to GE/GEU which is easier for AVR.
11004 The first two instructions compare/branch on EQ
11005 so we may replace the difficult
0dff9558 11006
cffa155c 11007 if (x == VAL) goto L1;
11008 if (x > VAL) goto L2;
11009
11010 with easy
0dff9558 11011
cffa155c 11012 if (x == VAL) goto L1;
11013 if (x >= VAL) goto L2;
11014
11015 Similarly, replace LE/LEU by LT/LTU. */
0dff9558 11016
cffa155c 11017 switch (code)
11018 {
11019 case EQ:
11020 case LT: case LTU:
11021 case GE: case GEU:
11022 break;
11023
11024 case LE: case LEU:
11025 case GT: case GTU:
11026 code = avr_normalize_condition (code);
11027 break;
0dff9558 11028
cffa155c 11029 default:
11030 return false;
11031 }
11032
11033 /* Wrap the branches into UNSPECs so they won't be changed or
11034 optimized in the remainder. */
11035
11036 target = XEXP (XEXP (ifelse1, 1), 0);
11037 cond = XEXP (ifelse1, 0);
11038 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11039
11040 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11041
11042 target = XEXP (XEXP (ifelse2, 1), 0);
11043 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11044 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11045
11046 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11047
11048 /* The comparisons in insn1 and insn2 are exactly the same;
11049 insn2 is superfluous so delete it. */
0dff9558 11050
cffa155c 11051 delete_insn (insn2);
11052 delete_insn (branch1);
11053 delete_insn (branch2);
11054
11055 return true;
11056}
11057
11058
11059/* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
11060/* Optimize conditional jumps. */
a28e4651 11061
2efea8c0 11062static void
206a5129 11063avr_reorg (void)
a28e4651 11064{
375204de 11065 rtx_insn *insn = get_insns();
0dff9558 11066
cffa155c 11067 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
a28e4651 11068 {
cffa155c 11069 rtx pattern = avr_compare_pattern (insn);
0dff9558 11070
cffa155c 11071 if (!pattern)
11072 continue;
a28e4651 11073
cffa155c 11074 if (optimize
11075 && avr_reorg_remove_redundant_compare (insn))
11076 {
11077 continue;
11078 }
a28e4651 11079
cffa155c 11080 if (compare_diff_p (insn))
a28e4651 11081 {
cffa155c 11082 /* Now we work under compare insn with difficult branch. */
0dff9558 11083
bf79ca12 11084 rtx_insn *next = next_real_insn (insn);
cffa155c 11085 rtx pat = PATTERN (next);
11086
11087 pattern = SET_SRC (pattern);
0dff9558 11088
cffa155c 11089 if (true_regnum (XEXP (pattern, 0)) >= 0
11090 && true_regnum (XEXP (pattern, 1)) >= 0)
11091 {
11092 rtx x = XEXP (pattern, 0);
11093 rtx src = SET_SRC (pat);
11094 rtx t = XEXP (src,0);
11095 PUT_CODE (t, swap_condition (GET_CODE (t)));
11096 XEXP (pattern, 0) = XEXP (pattern, 1);
11097 XEXP (pattern, 1) = x;
11098 INSN_CODE (next) = -1;
11099 }
11100 else if (true_regnum (XEXP (pattern, 0)) >= 0
11101 && XEXP (pattern, 1) == const0_rtx)
11102 {
11103 /* This is a tst insn, we can reverse it. */
11104 rtx src = SET_SRC (pat);
11105 rtx t = XEXP (src,0);
0dff9558 11106
cffa155c 11107 PUT_CODE (t, swap_condition (GET_CODE (t)));
11108 XEXP (pattern, 1) = XEXP (pattern, 0);
11109 XEXP (pattern, 0) = const0_rtx;
11110 INSN_CODE (next) = -1;
11111 INSN_CODE (insn) = -1;
11112 }
11113 else if (true_regnum (XEXP (pattern, 0)) >= 0
11114 && CONST_INT_P (XEXP (pattern, 1)))
11115 {
11116 rtx x = XEXP (pattern, 1);
11117 rtx src = SET_SRC (pat);
11118 rtx t = XEXP (src,0);
3754d046 11119 machine_mode mode = GET_MODE (XEXP (pattern, 0));
0dff9558 11120
cffa155c 11121 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11122 {
11123 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11124 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11125 INSN_CODE (next) = -1;
11126 INSN_CODE (insn) = -1;
11127 }
11128 }
11129 }
a28e4651 11130 }
11131}
11132
11133/* Returns register number for function return value.*/
11134
1086ba15 11135static inline unsigned int
206a5129 11136avr_ret_register (void)
a28e4651 11137{
11138 return 24;
11139}
11140
0dff9558 11141
11142/* Implement `TARGET_FUNCTION_VALUE_REGNO_P'. */
73475e84 11143
11144static bool
11145avr_function_value_regno_p (const unsigned int regno)
11146{
11147 return (regno == avr_ret_register ());
11148}
11149
0dff9558 11150
11151/* Implement `TARGET_LIBCALL_VALUE'. */
f2b32076 11152/* Create an RTX representing the place where a
a28e4651 11153 library function returns a value of mode MODE. */
11154
73475e84 11155static rtx
3754d046 11156avr_libcall_value (machine_mode mode,
73475e84 11157 const_rtx func ATTRIBUTE_UNUSED)
a28e4651 11158{
11159 int offs = GET_MODE_SIZE (mode);
0dff9558 11160
02d9a2c3 11161 if (offs <= 4)
11162 offs = (offs + 1) & ~1;
0dff9558 11163
73475e84 11164 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
a28e4651 11165}
11166
0dff9558 11167
11168/* Implement `TARGET_FUNCTION_VALUE'. */
a28e4651 11169/* Create an RTX representing the place where a
11170 function returns a value of data type VALTYPE. */
11171
73475e84 11172static rtx
1086ba15 11173avr_function_value (const_tree type,
11174 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11175 bool outgoing ATTRIBUTE_UNUSED)
a28e4651 11176{
1cb39658 11177 unsigned int offs;
73475e84 11178
a28e4651 11179 if (TYPE_MODE (type) != BLKmode)
1086ba15 11180 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
0dff9558 11181
a28e4651 11182 offs = int_size_in_bytes (type);
11183 if (offs < 2)
11184 offs = 2;
11185 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11186 offs = GET_MODE_SIZE (SImode);
11187 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11188 offs = GET_MODE_SIZE (DImode);
0dff9558 11189
73475e84 11190 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
a28e4651 11191}
11192
8a2a7305 11193int
8deb3959 11194test_hard_reg_class (enum reg_class rclass, rtx x)
8a2a7305 11195{
11196 int regno = true_regnum (x);
11197 if (regno < 0)
11198 return 0;
cb39cd35 11199
8deb3959 11200 if (TEST_HARD_REG_CLASS (rclass, regno))
cb39cd35 11201 return 1;
11202
11203 return 0;
8a2a7305 11204}
11205
2ebcbfe8 11206
8ef28ef2 11207/* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
11208 and thus is suitable to be skipped by CPSE, SBRC, etc. */
11209
11210static bool
375204de 11211avr_2word_insn_p (rtx_insn *insn)
8ef28ef2 11212{
c0d7a1d0 11213 if (TARGET_SKIP_BUG
8ef28ef2 11214 || !insn
11215 || 2 != get_attr_length (insn))
11216 {
11217 return false;
11218 }
11219
11220 switch (INSN_CODE (insn))
11221 {
11222 default:
11223 return false;
0dff9558 11224
8ef28ef2 11225 case CODE_FOR_movqi_insn:
017c5b98 11226 case CODE_FOR_movuqq_insn:
11227 case CODE_FOR_movqq_insn:
8ef28ef2 11228 {
11229 rtx set = single_set (insn);
11230 rtx src = SET_SRC (set);
11231 rtx dest = SET_DEST (set);
0dff9558 11232
8ef28ef2 11233 /* Factor out LDS and STS from movqi_insn. */
0dff9558 11234
8ef28ef2 11235 if (MEM_P (dest)
017c5b98 11236 && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
8ef28ef2 11237 {
11238 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11239 }
11240 else if (REG_P (dest)
11241 && MEM_P (src))
11242 {
11243 return CONSTANT_ADDRESS_P (XEXP (src, 0));
11244 }
0dff9558 11245
8ef28ef2 11246 return false;
11247 }
11248
11249 case CODE_FOR_call_insn:
11250 case CODE_FOR_call_value_insn:
11251 return true;
11252 }
11253}
11254
11255
2ebcbfe8 11256int
375204de 11257jump_over_one_insn_p (rtx_insn *insn, rtx dest)
2ebcbfe8 11258{
11259 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11260 ? XEXP (dest, 0)
11261 : dest);
47fc0706 11262 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11263 int dest_addr = INSN_ADDRESSES (uid);
8ef28ef2 11264 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
0dff9558 11265
8ef28ef2 11266 return (jump_offset == 1
11267 || (jump_offset == 2
11268 && avr_2word_insn_p (next_active_insn (insn))));
2ebcbfe8 11269}
a7690ba9 11270
0dff9558 11271
11272/* Worker function for `HARD_REGNO_MODE_OK'. */
a7690ba9 11273/* Returns 1 if a value of mode MODE can be stored starting with hard
0af74aa0 11274 register number REGNO. On the enhanced core, anything larger than
11275 1 byte must start in even numbered register for "movw" to work
11276 (this way we don't have to check for odd registers everywhere). */
a7690ba9 11277
11278int
3754d046 11279avr_hard_regno_mode_ok (int regno, machine_mode mode)
a7690ba9 11280{
3d4d979d 11281 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11282 Disallowing QI et al. in these regs might lead to code like
11283 (set (subreg:QI (reg:HI 28) n) ...)
11284 which will result in wrong code because reload does not
11285 handle SUBREGs of hard regsisters like this.
11286 This could be fixed in reload. However, it appears
11287 that fixing reload is not wanted by reload people. */
0dff9558 11288
3d4d979d 11289 /* Any GENERAL_REGS register can hold 8-bit values. */
0dff9558 11290
3d4d979d 11291 if (GET_MODE_SIZE (mode) == 1)
a7690ba9 11292 return 1;
60d76236 11293
3d4d979d 11294 /* FIXME: Ideally, the following test is not needed.
11295 However, it turned out that it can reduce the number
11296 of spill fails. AVR and it's poor endowment with
11297 address registers is extreme stress test for reload. */
0dff9558 11298
3d4d979d 11299 if (GET_MODE_SIZE (mode) >= 4
11300 && regno >= REG_X)
60d76236 11301 return 0;
11302
3d4d979d 11303 /* All modes larger than 8 bits should start in an even register. */
0dff9558 11304
a7690ba9 11305 return !(regno & 1);
11306}
e511e253 11307
e511e253 11308
f55cb01e 11309/* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
11310
11311int
3754d046 11312avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
f55cb01e 11313{
11314 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11315 represent valid hard registers like, e.g. HI:29. Returning TRUE
11316 for such registers can lead to performance degradation as mentioned
11317 in PR53595. Thus, report invalid hard registers as FALSE. */
0dff9558 11318
f55cb01e 11319 if (!avr_hard_regno_mode_ok (regno, mode))
11320 return 0;
0dff9558 11321
f55cb01e 11322 /* Return true if any of the following boundaries is crossed:
ed0ab386 11323 17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30. */
0dff9558 11324
ed0ab386 11325 return ((regno <= LAST_CALLEE_SAVED_REG &&
11326 regno + GET_MODE_SIZE (mode) > (LAST_CALLEE_SAVED_REG + 1))
f55cb01e 11327 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11328 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11329}
11330
11331
8b0ecac5 11332/* Implement `MODE_CODE_BASE_REG_CLASS'. */
11333
63477dcc 11334enum reg_class
3754d046 11335avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
4202ef11 11336 addr_space_t as, RTX_CODE outer_code,
8b0ecac5 11337 RTX_CODE index_code ATTRIBUTE_UNUSED)
11338{
4202ef11 11339 if (!ADDR_SPACE_GENERIC_P (as))
11340 {
11341 return POINTER_Z_REGS;
11342 }
0dff9558 11343
f9efb148 11344 if (!avr_strict_X)
11345 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11346
11347 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8b0ecac5 11348}
11349
11350
11351/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
11352
11353bool
11354avr_regno_mode_code_ok_for_base_p (int regno,
3754d046 11355 machine_mode mode ATTRIBUTE_UNUSED,
f8a8fc7b 11356 addr_space_t as ATTRIBUTE_UNUSED,
f9efb148 11357 RTX_CODE outer_code,
8b0ecac5 11358 RTX_CODE index_code ATTRIBUTE_UNUSED)
11359{
f9efb148 11360 bool ok = false;
0dff9558 11361
4202ef11 11362 if (!ADDR_SPACE_GENERIC_P (as))
11363 {
11364 if (regno < FIRST_PSEUDO_REGISTER
11365 && regno == REG_Z)
11366 {
11367 return true;
11368 }
0dff9558 11369
4202ef11 11370 if (reg_renumber)
11371 {
11372 regno = reg_renumber[regno];
0dff9558 11373
4202ef11 11374 if (regno == REG_Z)
11375 {
11376 return true;
11377 }
11378 }
0dff9558 11379
4202ef11 11380 return false;
11381 }
11382
8b0ecac5 11383 if (regno < FIRST_PSEUDO_REGISTER
11384 && (regno == REG_X
11385 || regno == REG_Y
11386 || regno == REG_Z
9f42c829 11387 || regno == ARG_POINTER_REGNUM))
8b0ecac5 11388 {
f9efb148 11389 ok = true;
8b0ecac5 11390 }
f9efb148 11391 else if (reg_renumber)
8b0ecac5 11392 {
11393 regno = reg_renumber[regno];
11394
11395 if (regno == REG_X
11396 || regno == REG_Y
11397 || regno == REG_Z
9f42c829 11398 || regno == ARG_POINTER_REGNUM)
8b0ecac5 11399 {
f9efb148 11400 ok = true;
8b0ecac5 11401 }
11402 }
f9efb148 11403
11404 if (avr_strict_X
11405 && PLUS == outer_code
11406 && regno == REG_X)
11407 {
11408 ok = false;
11409 }
11410
11411 return ok;
8b0ecac5 11412}
11413
11414
5bca95a8 11415/* A helper for `output_reload_insisf' and `output_reload_inhi'. */
28913f6b 11416/* Set 32-bit register OP[0] to compile-time constant OP[1].
11417 CLOBBER_REG is a QI clobber register or NULL_RTX.
11418 LEN == NULL: output instructions.
11419 LEN != NULL: set *LEN to the length of the instruction sequence
11420 (in words) printed with LEN = NULL.
11421 If CLEAR_P is true, OP[0] had been cleard to Zero already.
33817c7e 11422 If CLEAR_P is false, nothing is known about OP[0].
11423
11424 The effect on cc0 is as follows:
11425
f4806884 11426 Load 0 to any register except ZERO_REG : NONE
11427 Load ld register with any value : NONE
11428 Anything else: : CLOBBER */
9ce2d202 11429
28913f6b 11430static void
2f2d376f 11431output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
e511e253 11432{
9ce2d202 11433 rtx src = op[1];
11434 rtx dest = op[0];
11435 rtx xval, xdest[4];
11436 int ival[4];
11437 int clobber_val = 1234;
11438 bool cooked_clobber_p = false;
11439 bool set_p = false;
3754d046 11440 machine_mode mode = GET_MODE (dest);
4202ef11 11441 int n, n_bytes = GET_MODE_SIZE (mode);
0dff9558 11442
a49907f9 11443 gcc_assert (REG_P (dest)
11444 && CONSTANT_P (src));
37ac04dc 11445
11446 if (len)
9ce2d202 11447 *len = 0;
0dff9558 11448
9ce2d202 11449 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11450 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
0dff9558 11451
02d9a2c3 11452 if (REGNO (dest) < 16
11453 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
37ac04dc 11454 {
4202ef11 11455 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
37ac04dc 11456 }
e511e253 11457
a49907f9 11458 /* We might need a clobber reg but don't have one. Look at the value to
11459 be loaded more closely. A clobber is only needed if it is a symbol
11460 or contains a byte that is neither 0, -1 or a power of 2. */
0dff9558 11461
9ce2d202 11462 if (NULL_RTX == clobber_reg
2f2d376f 11463 && !test_hard_reg_class (LD_REGS, dest)
017c5b98 11464 && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
a49907f9 11465 || !avr_popcount_each_byte (src, n_bytes,
11466 (1 << 0) | (1 << 1) | (1 << 8))))
e511e253 11467 {
2f2d376f 11468 /* We have no clobber register but need one. Cook one up.
11469 That's cheaper than loading from constant pool. */
0dff9558 11470
2f2d376f 11471 cooked_clobber_p = true;
4202ef11 11472 clobber_reg = all_regs_rtx[REG_Z + 1];
2f2d376f 11473 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
e511e253 11474 }
9ce2d202 11475
11476 /* Now start filling DEST from LSB to MSB. */
0dff9558 11477
a49907f9 11478 for (n = 0; n < n_bytes; n++)
e511e253 11479 {
a49907f9 11480 int ldreg_p;
9ce2d202 11481 bool done_byte = false;
4202ef11 11482 int j;
9ce2d202 11483 rtx xop[3];
11484
a49907f9 11485 /* Crop the n-th destination byte. */
11486
9ce2d202 11487 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
a49907f9 11488 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
11489
11490 if (!CONST_INT_P (src)
017c5b98 11491 && !CONST_FIXED_P (src)
a49907f9 11492 && !CONST_DOUBLE_P (src))
11493 {
11494 static const char* const asm_code[][2] =
11495 {
11496 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
11497 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
11498 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
11499 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
11500 };
0dff9558 11501
a49907f9 11502 xop[0] = xdest[n];
11503 xop[1] = src;
11504 xop[2] = clobber_reg;
11505
ed2541ea 11506 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
0dff9558 11507
a49907f9 11508 continue;
11509 }
11510
11511 /* Crop the n-th source byte. */
11512
11513 xval = simplify_gen_subreg (QImode, src, mode, n);
9ce2d202 11514 ival[n] = INTVAL (xval);
11515
11516 /* Look if we can reuse the low word by means of MOVW. */
0dff9558 11517
9ce2d202 11518 if (n == 2
a49907f9 11519 && n_bytes >= 4
9ce2d202 11520 && AVR_HAVE_MOVW)
11521 {
11522 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
11523 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
11524
11525 if (INTVAL (lo16) == INTVAL (hi16))
11526 {
28913f6b 11527 if (0 != INTVAL (lo16)
11528 || !clear_p)
11529 {
11530 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
11531 }
0dff9558 11532
9ce2d202 11533 break;
11534 }
11535 }
11536
33817c7e 11537 /* Don't use CLR so that cc0 is set as expected. */
0dff9558 11538
9ce2d202 11539 if (ival[n] == 0)
11540 {
28913f6b 11541 if (!clear_p)
f4806884 11542 avr_asm_len (ldreg_p ? "ldi %0,0"
b4e6d2e2 11543 : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
f4806884 11544 : "mov %0,__zero_reg__",
33817c7e 11545 &xdest[n], len, 1);
9ce2d202 11546 continue;
11547 }
11548
11549 if (clobber_val == ival[n]
11550 && REGNO (clobber_reg) == REGNO (xdest[n]))
11551 {
11552 continue;
11553 }
11554
11555 /* LD_REGS can use LDI to move a constant value */
0dff9558 11556
a49907f9 11557 if (ldreg_p)
9ce2d202 11558 {
11559 xop[0] = xdest[n];
11560 xop[1] = xval;
11561 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
11562 continue;
11563 }
11564
11565 /* Try to reuse value already loaded in some lower byte. */
0dff9558 11566
9ce2d202 11567 for (j = 0; j < n; j++)
11568 if (ival[j] == ival[n])
11569 {
11570 xop[0] = xdest[n];
11571 xop[1] = xdest[j];
0dff9558 11572
9ce2d202 11573 avr_asm_len ("mov %0,%1", xop, len, 1);
11574 done_byte = true;
11575 break;
11576 }
11577
11578 if (done_byte)
11579 continue;
11580
11581 /* Need no clobber reg for -1: Use CLR/DEC */
0dff9558 11582
9ce2d202 11583 if (-1 == ival[n])
11584 {
28913f6b 11585 if (!clear_p)
11586 avr_asm_len ("clr %0", &xdest[n], len, 1);
0dff9558 11587
28913f6b 11588 avr_asm_len ("dec %0", &xdest[n], len, 1);
11589 continue;
11590 }
11591 else if (1 == ival[n])
11592 {
11593 if (!clear_p)
11594 avr_asm_len ("clr %0", &xdest[n], len, 1);
0dff9558 11595
28913f6b 11596 avr_asm_len ("inc %0", &xdest[n], len, 1);
9ce2d202 11597 continue;
11598 }
11599
11600 /* Use T flag or INC to manage powers of 2 if we have
11601 no clobber reg. */
11602
11603 if (NULL_RTX == clobber_reg
11604 && single_one_operand (xval, QImode))
11605 {
9ce2d202 11606 xop[0] = xdest[n];
11607 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
11608
11609 gcc_assert (constm1_rtx != xop[1]);
11610
11611 if (!set_p)
11612 {
11613 set_p = true;
11614 avr_asm_len ("set", xop, len, 1);
11615 }
11616
28913f6b 11617 if (!clear_p)
11618 avr_asm_len ("clr %0", xop, len, 1);
0dff9558 11619
28913f6b 11620 avr_asm_len ("bld %0,%1", xop, len, 1);
9ce2d202 11621 continue;
11622 }
11623
11624 /* We actually need the LD_REGS clobber reg. */
11625
11626 gcc_assert (NULL_RTX != clobber_reg);
0dff9558 11627
9ce2d202 11628 xop[0] = xdest[n];
11629 xop[1] = xval;
11630 xop[2] = clobber_reg;
11631 clobber_val = ival[n];
0dff9558 11632
9ce2d202 11633 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11634 "mov %0,%2", xop, len, 2);
e511e253 11635 }
0dff9558 11636
9ce2d202 11637 /* If we cooked up a clobber reg above, restore it. */
0dff9558 11638
9ce2d202 11639 if (cooked_clobber_p)
e511e253 11640 {
9ce2d202 11641 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
e511e253 11642 }
28913f6b 11643}
11644
11645
2f2d376f 11646/* Reload the constant OP[1] into the HI register OP[0].
11647 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11648 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11649 need a clobber reg or have to cook one up.
11650
11651 PLEN == NULL: Output instructions.
11652 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
11653 by the insns printed.
11654
11655 Return "". */
11656
11657const char*
11658output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
11659{
a49907f9 11660 output_reload_in_const (op, clobber_reg, plen, false);
2f2d376f 11661 return "";
11662}
11663
11664
28913f6b 11665/* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11666 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11667 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
11668 need a clobber reg or have to cook one up.
11669
11670 LEN == NULL: Output instructions.
0dff9558 11671
b0e2b973 11672 LEN != NULL: Output nothing. Set *LEN to number of words occupied
28913f6b 11673 by the insns printed.
11674
11675 Return "". */
11676
11677const char *
5bca95a8 11678output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
28913f6b 11679{
28913f6b 11680 if (AVR_HAVE_MOVW
644ac9c5 11681 && !test_hard_reg_class (LD_REGS, op[0])
11682 && (CONST_INT_P (op[1])
017c5b98 11683 || CONST_FIXED_P (op[1])
644ac9c5 11684 || CONST_DOUBLE_P (op[1])))
28913f6b 11685 {
11686 int len_clr, len_noclr;
0dff9558 11687
28913f6b 11688 /* In some cases it is better to clear the destination beforehand, e.g.
11689
11690 CLR R2 CLR R3 MOVW R4,R2 INC R2
11691
11692 is shorther than
11693
11694 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
11695
11696 We find it too tedious to work that out in the print function.
11697 Instead, we call the print function twice to get the lengths of
11698 both methods and use the shortest one. */
0dff9558 11699
2f2d376f 11700 output_reload_in_const (op, clobber_reg, &len_clr, true);
11701 output_reload_in_const (op, clobber_reg, &len_noclr, false);
0dff9558 11702
28913f6b 11703 if (len_noclr - len_clr == 4)
11704 {
11705 /* Default needs 4 CLR instructions: clear register beforehand. */
0dff9558 11706
f4806884 11707 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11708 "mov %B0,__zero_reg__" CR_TAB
28913f6b 11709 "movw %C0,%A0", &op[0], len, 3);
0dff9558 11710
2f2d376f 11711 output_reload_in_const (op, clobber_reg, len, true);
0dff9558 11712
28913f6b 11713 if (len)
11714 *len += 3;
11715
11716 return "";
11717 }
11718 }
11719
11720 /* Default: destination not pre-cleared. */
11721
2f2d376f 11722 output_reload_in_const (op, clobber_reg, len, false);
e511e253 11723 return "";
11724}
b681d971 11725
0dff9558 11726const char*
02d9a2c3 11727avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
11728{
02d9a2c3 11729 output_reload_in_const (op, clobber_reg, len, false);
11730 return "";
11731}
11732
b681d971 11733
0dff9558 11734/* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'. */
11735
91b18013 11736void
206a5129 11737avr_output_addr_vec_elt (FILE *stream, int value)
91b18013 11738{
90ef7269 11739 if (AVR_HAVE_JMP_CALL)
11740 fprintf (stream, "\t.word gs(.L%d)\n", value);
91b18013 11741 else
11742 fprintf (stream, "\trjmp .L%d\n", value);
91b18013 11743}
11744
b4e6d2e2 11745static void
1a96adb9 11746avr_conditional_register_usage(void)
11747{
11748 if (AVR_TINY)
11749 {
11750 unsigned int i;
b4e6d2e2 11751
1a96adb9 11752 const int tiny_reg_alloc_order[] = {
11753 24, 25,
11754 22, 23,
11755 30, 31,
11756 26, 27,
11757 28, 29,
11758 21, 20, 19, 18,
11759 16, 17,
11760 32, 33, 34, 35,
11761 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11762 };
b4e6d2e2 11763
1a96adb9 11764 /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11765 - R0-R15 are not available in Tiny Core devices
11766 - R16 and R17 are fixed registers. */
b4e6d2e2 11767
1a96adb9 11768 for (i = 0; i <= 17; i++)
11769 {
11770 fixed_regs[i] = 1;
11771 call_used_regs[i] = 1;
11772 }
b4e6d2e2 11773
1a96adb9 11774 /* Set R18 to R21 as callee saved registers
11775 - R18, R19, R20 and R21 are the callee saved registers in
11776 Tiny Core devices */
b4e6d2e2 11777
1a96adb9 11778 for (i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
11779 {
11780 call_used_regs[i] = 0;
11781 }
b4e6d2e2 11782
1a96adb9 11783 /* Update register allocation order for Tiny Core devices */
11784
11785 for (i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
11786 {
11787 reg_alloc_order[i] = tiny_reg_alloc_order[i];
11788 }
11789
11790 CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
11791 CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
11792 }
b4e6d2e2 11793}
0dff9558 11794
11795/* Implement `TARGET_HARD_REGNO_SCRATCH_OK'. */
5431d4c2 11796/* Returns true if SCRATCH are safe to be allocated as a scratch
51fe7379 11797 registers (for a define_peephole2) in the current function. */
11798
a45076aa 11799static bool
5431d4c2 11800avr_hard_regno_scratch_ok (unsigned int regno)
51fe7379 11801{
5431d4c2 11802 /* Interrupt functions can only use registers that have already been saved
11803 by the prologue, even if they would normally be call-clobbered. */
51fe7379 11804
5431d4c2 11805 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11806 && !df_regs_ever_live_p (regno))
11807 return false;
11808
3d4d979d 11809 /* Don't allow hard registers that might be part of the frame pointer.
11810 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11811 and don't care for a frame pointer that spans more than one register. */
11812
11813 if ((!reload_completed || frame_pointer_needed)
11814 && (regno == REG_Y || regno == REG_Y + 1))
11815 {
11816 return false;
11817 }
11818
5431d4c2 11819 return true;
51fe7379 11820}
4af90ac0 11821
0dff9558 11822
11823/* Worker function for `HARD_REGNO_RENAME_OK'. */
afe7695c 11824/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
11825
11826int
3d4d979d 11827avr_hard_regno_rename_ok (unsigned int old_reg,
afe7695c 11828 unsigned int new_reg)
11829{
11830 /* Interrupt functions can only use registers that have already been
11831 saved by the prologue, even if they would normally be
11832 call-clobbered. */
11833
11834 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11835 && !df_regs_ever_live_p (new_reg))
11836 return 0;
11837
3d4d979d 11838 /* Don't allow hard registers that might be part of the frame pointer.
11839 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11840 and don't care for a frame pointer that spans more than one register. */
11841
11842 if ((!reload_completed || frame_pointer_needed)
11843 && (old_reg == REG_Y || old_reg == REG_Y + 1
11844 || new_reg == REG_Y || new_reg == REG_Y + 1))
11845 {
11846 return 0;
11847 }
0dff9558 11848
afe7695c 11849 return 1;
11850}
11851
dd7bbc23 11852/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
4af90ac0 11853 or memory location in the I/O space (QImode only).
11854
11855 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11856 Operand 1: register operand to test, or CONST_INT memory address.
dd7bbc23 11857 Operand 2: bit number.
4af90ac0 11858 Operand 3: label to jump to if the test is true. */
11859
0dff9558 11860const char*
375204de 11861avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
4af90ac0 11862{
11863 enum rtx_code comp = GET_CODE (operands[0]);
5bd39e93 11864 bool long_jump = get_attr_length (insn) >= 4;
11865 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
4af90ac0 11866
11867 if (comp == GE)
11868 comp = EQ;
11869 else if (comp == LT)
11870 comp = NE;
11871
11872 if (reverse)
11873 comp = reverse_condition (comp);
11874
5bd39e93 11875 switch (GET_CODE (operands[1]))
4af90ac0 11876 {
5bd39e93 11877 default:
11878 gcc_unreachable();
0dff9558 11879
5bd39e93 11880 case CONST_INT:
c0d7a1d0 11881 case CONST:
11882 case SYMBOL_REF:
5bd39e93 11883
11884 if (low_io_address_operand (operands[1], QImode))
11885 {
11886 if (comp == EQ)
644ac9c5 11887 output_asm_insn ("sbis %i1,%2", operands);
5bd39e93 11888 else
644ac9c5 11889 output_asm_insn ("sbic %i1,%2", operands);
5bd39e93 11890 }
4af90ac0 11891 else
5bd39e93 11892 {
c0d7a1d0 11893 gcc_assert (io_address_operand (operands[1], QImode));
644ac9c5 11894 output_asm_insn ("in __tmp_reg__,%i1", operands);
5bd39e93 11895 if (comp == EQ)
11896 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
11897 else
11898 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
11899 }
11900
11901 break; /* CONST_INT */
11902
11903 case REG:
11904
16d17f31 11905 if (comp == EQ)
11906 output_asm_insn ("sbrs %T1%T2", operands);
11907 else
11908 output_asm_insn ("sbrc %T1%T2", operands);
5bd39e93 11909
11910 break; /* REG */
11911 } /* switch */
4af90ac0 11912
11913 if (long_jump)
5bd39e93 11914 return ("rjmp .+4" CR_TAB
11915 "jmp %x3");
11916
4af90ac0 11917 if (!reverse)
5bd39e93 11918 return "rjmp %x3";
11919
4af90ac0 11920 return "";
11921}
9aa7484c 11922
0dff9558 11923/* Worker function for `TARGET_ASM_CONSTRUCTOR'. */
6644435d 11924
9aa7484c 11925static void
206a5129 11926avr_asm_out_ctor (rtx symbol, int priority)
9aa7484c 11927{
11928 fputs ("\t.global __do_global_ctors\n", asm_out_file);
11929 default_ctor_section_asm_out_constructor (symbol, priority);
11930}
11931
0dff9558 11932
11933/* Worker function for `TARGET_ASM_DESTRUCTOR'. */
6644435d 11934
9aa7484c 11935static void
206a5129 11936avr_asm_out_dtor (rtx symbol, int priority)
9aa7484c 11937{
11938 fputs ("\t.global __do_global_dtors\n", asm_out_file);
11939 default_dtor_section_asm_out_destructor (symbol, priority);
11940}
11941
0dff9558 11942
11943/* Worker function for `TARGET_RETURN_IN_MEMORY'. */
6644435d 11944
cfd55026 11945static bool
fb80456a 11946avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
cfd55026 11947{
b4e6d2e2 11948 HOST_WIDE_INT size = int_size_in_bytes (type);
1a96adb9 11949 HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
b4e6d2e2 11950
1a96adb9 11951 /* In avr, there are 8 return registers. But, for Tiny Core
11952 (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
11953 Return true if size is unknown or greater than the limit. */
11954
11955 if (size == -1 || size > ret_size_limit)
11956 {
11957 return true;
11958 }
39cc9599 11959 else
1a96adb9 11960 {
11961 return false;
11962 }
cfd55026 11963}
11964
6d8ed506 11965
11966/* Implement `CASE_VALUES_THRESHOLD'. */
11967/* Supply the default for --param case-values-threshold=0 */
b4a3be2d 11968
a45076aa 11969static unsigned int
11970avr_case_values_threshold (void)
b4a3be2d 11971{
6d8ed506 11972 /* The exact break-even point between a jump table and an if-else tree
11973 depends on several factors not available here like, e.g. if 8-bit
11974 comparisons can be used in the if-else tree or not, on the
11975 range of the case values, if the case value can be reused, on the
11976 register allocation, etc. '7' appears to be a good choice. */
0dff9558 11977
6d8ed506 11978 return 7;
b4a3be2d 11979}
11980
4202ef11 11981
11982/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
11983
3754d046 11984static machine_mode
5bd39e93 11985avr_addr_space_address_mode (addr_space_t as)
4202ef11 11986{
9d734fa8 11987 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
4202ef11 11988}
11989
11990
11991/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
11992
3754d046 11993static machine_mode
5bd39e93 11994avr_addr_space_pointer_mode (addr_space_t as)
4202ef11 11995{
9d734fa8 11996 return avr_addr_space_address_mode (as);
4202ef11 11997}
11998
11999
12000/* Helper for following function. */
12001
12002static bool
12003avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12004{
be6d8823 12005 gcc_assert (REG_P (reg));
4202ef11 12006
12007 if (strict)
12008 {
12009 return REGNO (reg) == REG_Z;
12010 }
0dff9558 12011
4202ef11 12012 /* Avoid combine to propagate hard regs. */
0dff9558 12013
4202ef11 12014 if (can_create_pseudo_p()
12015 && REGNO (reg) < REG_Z)
12016 {
12017 return false;
12018 }
0dff9558 12019
4202ef11 12020 return true;
12021}
12022
12023
12024/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
12025
12026static bool
3754d046 12027avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
4202ef11 12028 bool strict, addr_space_t as)
12029{
12030 bool ok = false;
12031
12032 switch (as)
12033 {
12034 default:
12035 gcc_unreachable();
0dff9558 12036
4202ef11 12037 case ADDR_SPACE_GENERIC:
12038 return avr_legitimate_address_p (mode, x, strict);
12039
590da9f2 12040 case ADDR_SPACE_FLASH:
12041 case ADDR_SPACE_FLASH1:
12042 case ADDR_SPACE_FLASH2:
12043 case ADDR_SPACE_FLASH3:
12044 case ADDR_SPACE_FLASH4:
12045 case ADDR_SPACE_FLASH5:
4202ef11 12046
12047 switch (GET_CODE (x))
12048 {
12049 case REG:
12050 ok = avr_reg_ok_for_pgm_addr (x, strict);
12051 break;
0dff9558 12052
4202ef11 12053 case POST_INC:
5bd39e93 12054 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
4202ef11 12055 break;
0dff9558 12056
4202ef11 12057 default:
12058 break;
12059 }
12060
590da9f2 12061 break; /* FLASH */
0dff9558 12062
590da9f2 12063 case ADDR_SPACE_MEMX:
5bd39e93 12064 if (REG_P (x))
12065 ok = (!strict
12066 && can_create_pseudo_p());
12067
12068 if (LO_SUM == GET_CODE (x))
12069 {
12070 rtx hi = XEXP (x, 0);
12071 rtx lo = XEXP (x, 1);
12072
12073 ok = (REG_P (hi)
12074 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12075 && REG_P (lo)
12076 && REGNO (lo) == REG_Z);
12077 }
0dff9558 12078
590da9f2 12079 break; /* MEMX */
4202ef11 12080 }
12081
12082 if (avr_log.legitimate_address_p)
12083 {
12084 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12085 "reload_completed=%d reload_in_progress=%d %s:",
12086 ok, mode, strict, reload_completed, reload_in_progress,
12087 reg_renumber ? "(reg_renumber)" : "");
0dff9558 12088
4202ef11 12089 if (GET_CODE (x) == PLUS
12090 && REG_P (XEXP (x, 0))
12091 && CONST_INT_P (XEXP (x, 1))
12092 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12093 && reg_renumber)
12094 {
12095 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12096 true_regnum (XEXP (x, 0)));
12097 }
0dff9558 12098
4202ef11 12099 avr_edump ("\n%r\n", x);
12100 }
12101
12102 return ok;
12103}
12104
12105
12106/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
12107
12108static rtx
12109avr_addr_space_legitimize_address (rtx x, rtx old_x,
3754d046 12110 machine_mode mode, addr_space_t as)
4202ef11 12111{
12112 if (ADDR_SPACE_GENERIC_P (as))
12113 return avr_legitimize_address (x, old_x, mode);
12114
12115 if (avr_log.legitimize_address)
12116 {
12117 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12118 }
12119
12120 return old_x;
12121}
12122
12123
12124/* Implement `TARGET_ADDR_SPACE_CONVERT'. */
12125
12126static rtx
12127avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12128{
5bd39e93 12129 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12130 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12131
4202ef11 12132 if (avr_log.progmem)
12133 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12134 src, type_from, type_to);
12135
ed2541ea 12136 /* Up-casting from 16-bit to 24-bit pointer. */
0dff9558 12137
590da9f2 12138 if (as_from != ADDR_SPACE_MEMX
12139 && as_to == ADDR_SPACE_MEMX)
5bd39e93 12140 {
ed2541ea 12141 int msb;
12142 rtx sym = src;
12143 rtx reg = gen_reg_rtx (PSImode);
12144
12145 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12146 sym = XEXP (sym, 0);
12147
12148 /* Look at symbol flags: avr_encode_section_info set the flags
12149 also if attribute progmem was seen so that we get the right
12150 promotion for, e.g. PSTR-like strings that reside in generic space
12151 but are located in flash. In that case we patch the incoming
12152 address space. */
5bd39e93 12153
ed2541ea 12154 if (SYMBOL_REF == GET_CODE (sym)
590da9f2 12155 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
5bd39e93 12156 {
590da9f2 12157 as_from = ADDR_SPACE_FLASH;
5bd39e93 12158 }
12159
ed2541ea 12160 /* Linearize memory: RAM has bit 23 set. */
0dff9558 12161
ed2541ea 12162 msb = ADDR_SPACE_GENERIC_P (as_from)
12163 ? 0x80
e508bf98 12164 : avr_addrspace[as_from].segment;
ed2541ea 12165
5bd39e93 12166 src = force_reg (Pmode, src);
0dff9558 12167
ed2541ea 12168 emit_insn (msb == 0
12169 ? gen_zero_extendhipsi2 (reg, src)
12170 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
0dff9558 12171
ed2541ea 12172 return reg;
12173 }
5bd39e93 12174
ed2541ea 12175 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
5bd39e93 12176
590da9f2 12177 if (as_from == ADDR_SPACE_MEMX
12178 && as_to != ADDR_SPACE_MEMX)
ed2541ea 12179 {
12180 rtx new_src = gen_reg_rtx (Pmode);
12181
12182 src = force_reg (PSImode, src);
0dff9558 12183
ed2541ea 12184 emit_move_insn (new_src,
12185 simplify_gen_subreg (Pmode, src, PSImode, 0));
12186 return new_src;
5bd39e93 12187 }
0dff9558 12188
4202ef11 12189 return src;
12190}
12191
12192
12193/* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
12194
12195static bool
ed2541ea 12196avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12197 addr_space_t superset ATTRIBUTE_UNUSED)
4202ef11 12198{
ed2541ea 12199 /* Allow any kind of pointer mess. */
0dff9558 12200
5bd39e93 12201 return true;
12202}
12203
12204
1c72da59 12205/* Implement `TARGET_CONVERT_TO_TYPE'. */
12206
12207static tree
12208avr_convert_to_type (tree type, tree expr)
12209{
12210 /* Print a diagnose for pointer conversion that changes the address
12211 space of the pointer target to a non-enclosing address space,
12212 provided -Waddr-space-convert is on.
12213
12214 FIXME: Filter out cases where the target object is known to
12215 be located in the right memory, like in
12216
12217 (const __flash*) PSTR ("text")
12218
12219 Also try to distinguish between explicit casts requested by
12220 the user and implicit casts like
12221
12222 void f (const __flash char*);
12223
12224 void g (const char *p)
12225 {
12226 f ((const __flash*) p);
12227 }
12228
12229 under the assumption that an explicit casts means that the user
12230 knows what he is doing, e.g. interface with PSTR or old style
12231 code with progmem and pgm_read_xxx.
12232 */
12233
12234 if (avr_warn_addr_space_convert
12235 && expr != error_mark_node
12236 && POINTER_TYPE_P (type)
12237 && POINTER_TYPE_P (TREE_TYPE (expr)))
12238 {
12239 addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12240 addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
1a96adb9 12241
1c72da59 12242 if (avr_log.progmem)
12243 avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12244
12245 if (as_new != ADDR_SPACE_MEMX
12246 && as_new != as_old)
12247 {
12248 location_t loc = EXPR_LOCATION (expr);
12249 const char *name_old = avr_addrspace[as_old].name;
12250 const char *name_new = avr_addrspace[as_new].name;
12251
12252 warning (OPT_Waddr_space_convert,
12253 "conversion from address space %qs to address space %qs",
12254 ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12255 ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12256
12257 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12258 }
12259 }
12260
12261 return NULL_TREE;
12262}
12263
12264
98945935 12265/* PR63633: The middle-end might come up with hard regs as input operands.
12266
12267 RMASK is a bit mask representing a subset of hard registers R0...R31:
12268 Rn is an element of that set iff bit n of RMASK is set.
12269 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12270 OP[n] has to be fixed; otherwise OP[n] is left alone.
12271
12272 For each element of OPMASK which is a hard register overlapping RMASK,
12273 replace OP[n] with a newly created pseudo register
12274
12275 HREG == 0: Also emit a move insn that copies the contents of that
12276 hard register into the new pseudo.
12277
12278 HREG != 0: Also set HREG[n] to the hard register. */
12279
12280static void
12281avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12282{
12283 for (; opmask; opmask >>= 1, op++)
12284 {
12285 rtx reg = *op;
12286
12287 if (hreg)
12288 *hreg = NULL_RTX;
12289
12290 if ((opmask & 1)
12291 && REG_P (reg)
12292 && REGNO (reg) < FIRST_PSEUDO_REGISTER
12293 // This hard-reg overlaps other prohibited hard regs?
12294 && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12295 {
12296 *op = gen_reg_rtx (GET_MODE (reg));
12297 if (hreg == NULL)
12298 emit_move_insn (*op, reg);
12299 else
12300 *hreg = reg;
12301 }
12302
12303 if (hreg)
12304 hreg++;
12305 }
12306}
12307
12308
12309void
12310avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12311{
12312 avr_fix_operands (op, NULL, opmask, rmask);
12313}
12314
12315
12316/* Helper for the function below: If bit n of MASK is set and
12317 HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12318 Otherwise do nothing for that n. Return TRUE. */
12319
12320static bool
12321avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12322{
12323 for (; mask; mask >>= 1, op++, hreg++)
12324 if ((mask & 1)
12325 && *hreg)
12326 emit_move_insn (*hreg, *op);
12327
12328 return true;
12329}
12330
12331
12332/* PR63633: The middle-end might come up with hard regs as output operands.
12333
12334 GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12335 RMASK is a bit mask representing a subset of hard registers R0...R31:
12336 Rn is an element of that set iff bit n of RMASK is set.
12337 OPMASK describes a subset of OP[]: If bit n of OPMASK is 1 then
12338 OP[n] has to be fixed; otherwise OP[n] is left alone.
12339
12340 Emit the insn sequence as generated by GEN() with all elements of OPMASK
12341 which are hard registers overlapping RMASK replaced by newly created
12342 pseudo registers. After the sequence has been emitted, emit insns that
12343 move the contents of respective pseudos to their hard regs. */
12344
12345bool
12346avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12347 unsigned opmask, unsigned rmask)
12348{
12349 const int n = 3;
12350 rtx hreg[n];
12351
12352 /* It is letigimate for GEN to call this function, and in order not to
12353 get self-recursive we use the following static kludge. This is the
12354 only way not to duplicate all expanders and to avoid ugly and
12355 hard-to-maintain C-code instead of the much more appreciated RTL
12356 representation as supplied by define_expand. */
12357 static bool lock = false;
12358
12359 gcc_assert (opmask < (1u << n));
12360
12361 if (lock)
12362 return false;
12363
12364 avr_fix_operands (op, hreg, opmask, rmask);
12365
12366 lock = true;
12367 emit_insn (gen (op[0], op[1], op[2]));
12368 lock = false;
12369
12370 return avr_move_fixed_operands (op, hreg, opmask);
12371}
12372
12373
ed2541ea 12374/* Worker function for movmemhi expander.
5bd39e93 12375 XOP[0] Destination as MEM:BLK
12376 XOP[1] Source " "
12377 XOP[2] # Bytes to copy
12378
12379 Return TRUE if the expansion is accomplished.
12380 Return FALSE if the operand compination is not supported. */
12381
12382bool
12383avr_emit_movmemhi (rtx *xop)
12384{
12385 HOST_WIDE_INT count;
3754d046 12386 machine_mode loop_mode;
5bd39e93 12387 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
f1222c71 12388 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
5bd39e93 12389 rtx a_hi8 = NULL_RTX;
12390
590da9f2 12391 if (avr_mem_flash_p (xop[0]))
5bd39e93 12392 return false;
12393
12394 if (!CONST_INT_P (xop[2]))
12395 return false;
12396
12397 count = INTVAL (xop[2]);
12398 if (count <= 0)
12399 return false;
12400
12401 a_src = XEXP (xop[1], 0);
12402 a_dest = XEXP (xop[0], 0);
12403
5bd39e93 12404 if (PSImode == GET_MODE (a_src))
12405 {
590da9f2 12406 gcc_assert (as == ADDR_SPACE_MEMX);
ed2541ea 12407
12408 loop_mode = (count < 0x100) ? QImode : HImode;
12409 loop_reg = gen_rtx_REG (loop_mode, 24);
12410 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
12411
5bd39e93 12412 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
12413 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
12414 }
12415 else
12416 {
e508bf98 12417 int segment = avr_addrspace[as].segment;
0dff9558 12418
ed2541ea 12419 if (segment
c0d7a1d0 12420 && avr_n_flash > 1)
ed2541ea 12421 {
12422 a_hi8 = GEN_INT (segment);
12423 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
12424 }
12425 else if (!ADDR_SPACE_GENERIC_P (as))
12426 {
590da9f2 12427 as = ADDR_SPACE_FLASH;
ed2541ea 12428 }
0dff9558 12429
5bd39e93 12430 addr1 = a_src;
12431
ed2541ea 12432 loop_mode = (count <= 0x100) ? QImode : HImode;
12433 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
5bd39e93 12434 }
12435
12436 xas = GEN_INT (as);
12437
5bd39e93 12438 /* FIXME: Register allocator might come up with spill fails if it is left
ed2541ea 12439 on its own. Thus, we allocate the pointer registers by hand:
12440 Z = source address
12441 X = destination address */
5bd39e93 12442
12443 emit_move_insn (lpm_addr_reg_rtx, addr1);
f1222c71 12444 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
5bd39e93 12445
12446 /* FIXME: Register allocator does a bad job and might spill address
12447 register(s) inside the loop leading to additional move instruction
12448 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9d75589a 12449 load and store as separate insns. Instead, we perform the copy
5bd39e93 12450 by means of one monolithic insn. */
12451
ed2541ea 12452 gcc_assert (TMP_REGNO == LPM_REGNO);
12453
590da9f2 12454 if (as != ADDR_SPACE_MEMX)
5bd39e93 12455 {
ed2541ea 12456 /* Load instruction ([E]LPM or LD) is known at compile time:
12457 Do the copy-loop inline. */
0dff9558 12458
f1222c71 12459 rtx (*fun) (rtx, rtx, rtx)
5bd39e93 12460 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
12461
f1222c71 12462 insn = fun (xas, loop_reg, loop_reg);
5bd39e93 12463 }
5bd39e93 12464 else
12465 {
f1222c71 12466 rtx (*fun) (rtx, rtx)
ed2541ea 12467 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
12468
f1222c71 12469 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
0dff9558 12470
f1222c71 12471 insn = fun (xas, GEN_INT (avr_addr.rampz));
5bd39e93 12472 }
12473
12474 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
12475 emit_insn (insn);
12476
4202ef11 12477 return true;
12478}
12479
12480
5bd39e93 12481/* Print assembler for movmem_qi, movmem_hi insns...
f1222c71 12482 $0 : Address Space
12483 $1, $2 : Loop register
12484 Z : Source address
12485 X : Destination address
5bd39e93 12486*/
12487
12488const char*
375204de 12489avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
5bd39e93 12490{
f1222c71 12491 addr_space_t as = (addr_space_t) INTVAL (op[0]);
3754d046 12492 machine_mode loop_mode = GET_MODE (op[1]);
f1222c71 12493 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
12494 rtx xop[3];
5bd39e93 12495
12496 if (plen)
12497 *plen = 0;
12498
f1222c71 12499 xop[0] = op[0];
12500 xop[1] = op[1];
12501 xop[2] = tmp_reg_rtx;
12502
5bd39e93 12503 /* Loop label */
12504
12505 avr_asm_len ("0:", xop, plen, 0);
12506
12507 /* Load with post-increment */
12508
12509 switch (as)
12510 {
12511 default:
12512 gcc_unreachable();
0dff9558 12513
5bd39e93 12514 case ADDR_SPACE_GENERIC:
12515
f1222c71 12516 avr_asm_len ("ld %2,Z+", xop, plen, 1);
5bd39e93 12517 break;
0dff9558 12518
590da9f2 12519 case ADDR_SPACE_FLASH:
5bd39e93 12520
12521 if (AVR_HAVE_LPMX)
6bdcc5e4 12522 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
5bd39e93 12523 else
12524 avr_asm_len ("lpm" CR_TAB
f1222c71 12525 "adiw r30,1", xop, plen, 2);
5bd39e93 12526 break;
0dff9558 12527
590da9f2 12528 case ADDR_SPACE_FLASH1:
12529 case ADDR_SPACE_FLASH2:
12530 case ADDR_SPACE_FLASH3:
12531 case ADDR_SPACE_FLASH4:
12532 case ADDR_SPACE_FLASH5:
5bd39e93 12533
12534 if (AVR_HAVE_ELPMX)
f1222c71 12535 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
5bd39e93 12536 else
12537 avr_asm_len ("elpm" CR_TAB
f1222c71 12538 "adiw r30,1", xop, plen, 2);
5bd39e93 12539 break;
12540 }
12541
12542 /* Store with post-increment */
12543
f1222c71 12544 avr_asm_len ("st X+,%2", xop, plen, 1);
5bd39e93 12545
12546 /* Decrement loop-counter and set Z-flag */
12547
12548 if (QImode == loop_mode)
12549 {
f1222c71 12550 avr_asm_len ("dec %1", xop, plen, 1);
5bd39e93 12551 }
12552 else if (sbiw_p)
12553 {
f1222c71 12554 avr_asm_len ("sbiw %1,1", xop, plen, 1);
5bd39e93 12555 }
12556 else
12557 {
f1222c71 12558 avr_asm_len ("subi %A1,1" CR_TAB
12559 "sbci %B1,0", xop, plen, 2);
5bd39e93 12560 }
12561
12562 /* Loop until zero */
0dff9558 12563
5bd39e93 12564 return avr_asm_len ("brne 0b", xop, plen, 1);
12565}
12566
12567
12568\f
c5be380e 12569/* Helper for __builtin_avr_delay_cycles */
12570
5af5ea69 12571static rtx
12572avr_mem_clobber (void)
12573{
12574 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
12575 MEM_VOLATILE_P (mem) = 1;
12576 return mem;
12577}
12578
c5be380e 12579static void
12580avr_expand_delay_cycles (rtx operands0)
12581{
c1a60a68 12582 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
c5be380e 12583 unsigned HOST_WIDE_INT cycles_used;
12584 unsigned HOST_WIDE_INT loop_count;
0dff9558 12585
c5be380e 12586 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
12587 {
12588 loop_count = ((cycles - 9) / 6) + 1;
12589 cycles_used = ((loop_count - 1) * 6) + 9;
5af5ea69 12590 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
12591 avr_mem_clobber()));
c5be380e 12592 cycles -= cycles_used;
12593 }
0dff9558 12594
c5be380e 12595 if (IN_RANGE (cycles, 262145, 83886081))
12596 {
12597 loop_count = ((cycles - 7) / 5) + 1;
12598 if (loop_count > 0xFFFFFF)
12599 loop_count = 0xFFFFFF;
12600 cycles_used = ((loop_count - 1) * 5) + 7;
5af5ea69 12601 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
12602 avr_mem_clobber()));
c5be380e 12603 cycles -= cycles_used;
12604 }
0dff9558 12605
c5be380e 12606 if (IN_RANGE (cycles, 768, 262144))
12607 {
12608 loop_count = ((cycles - 5) / 4) + 1;
12609 if (loop_count > 0xFFFF)
12610 loop_count = 0xFFFF;
12611 cycles_used = ((loop_count - 1) * 4) + 5;
5af5ea69 12612 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
12613 avr_mem_clobber()));
c5be380e 12614 cycles -= cycles_used;
12615 }
0dff9558 12616
c5be380e 12617 if (IN_RANGE (cycles, 6, 767))
12618 {
12619 loop_count = cycles / 3;
0dff9558 12620 if (loop_count > 255)
c5be380e 12621 loop_count = 255;
12622 cycles_used = loop_count * 3;
5af5ea69 12623 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
12624 avr_mem_clobber()));
c5be380e 12625 cycles -= cycles_used;
12626 }
0dff9558 12627
c5be380e 12628 while (cycles >= 2)
12629 {
12630 emit_insn (gen_nopv (GEN_INT(2)));
12631 cycles -= 2;
12632 }
12633
12634 if (cycles == 1)
12635 {
12636 emit_insn (gen_nopv (GEN_INT(1)));
12637 cycles--;
12638 }
12639}
12640
384f6361 12641
384f6361 12642/* Compute the image of x under f, i.e. perform x --> f(x) */
12643
12644static int
7b5733e8 12645avr_map (unsigned int f, int x)
384f6361 12646{
7b5733e8 12647 return x < 8 ? (f >> (4 * x)) & 0xf : 0;
384f6361 12648}
12649
12650
15b84087 12651/* Return some metrics of map A. */
384f6361 12652
15b84087 12653enum
12654 {
12655 /* Number of fixed points in { 0 ... 7 } */
12656 MAP_FIXED_0_7,
384f6361 12657
15b84087 12658 /* Size of preimage of non-fixed points in { 0 ... 7 } */
12659 MAP_NONFIXED_0_7,
0dff9558 12660
15b84087 12661 /* Mask representing the fixed points in { 0 ... 7 } */
12662 MAP_MASK_FIXED_0_7,
0dff9558 12663
15b84087 12664 /* Size of the preimage of { 0 ... 7 } */
12665 MAP_PREIMAGE_0_7,
0dff9558 12666
15b84087 12667 /* Mask that represents the preimage of { f } */
12668 MAP_MASK_PREIMAGE_F
12669 };
12670
12671static unsigned
7b5733e8 12672avr_map_metric (unsigned int a, int mode)
384f6361 12673{
15b84087 12674 unsigned i, metric = 0;
384f6361 12675
15b84087 12676 for (i = 0; i < 8; i++)
12677 {
12678 unsigned ai = avr_map (a, i);
384f6361 12679
15b84087 12680 if (mode == MAP_FIXED_0_7)
12681 metric += ai == i;
12682 else if (mode == MAP_NONFIXED_0_7)
12683 metric += ai < 8 && ai != i;
12684 else if (mode == MAP_MASK_FIXED_0_7)
12685 metric |= ((unsigned) (ai == i)) << i;
12686 else if (mode == MAP_PREIMAGE_0_7)
12687 metric += ai < 8;
12688 else if (mode == MAP_MASK_PREIMAGE_F)
12689 metric |= ((unsigned) (ai == 0xf)) << i;
12690 else
12691 gcc_unreachable();
12692 }
0dff9558 12693
15b84087 12694 return metric;
384f6361 12695}
12696
12697
15b84087 12698/* Return true if IVAL has a 0xf in its hexadecimal representation
12699 and false, otherwise. Only nibbles 0..7 are taken into account.
12700 Used as constraint helper for C0f and Cxf. */
384f6361 12701
15b84087 12702bool
12703avr_has_nibble_0xf (rtx ival)
12704{
7b5733e8 12705 unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
12706 return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
15b84087 12707}
384f6361 12708
384f6361 12709
15b84087 12710/* We have a set of bits that are mapped by a function F.
12711 Try to decompose F by means of a second function G so that
384f6361 12712
15b84087 12713 F = F o G^-1 o G
384f6361 12714
15b84087 12715 and
384f6361 12716
15b84087 12717 cost (F o G^-1) + cost (G) < cost (F)
384f6361 12718
15b84087 12719 Example: Suppose builtin insert_bits supplies us with the map
12720 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
12721 nibble of the result, we can just as well rotate the bits before inserting
12722 them and use the map 0x7654ffff which is cheaper than the original map.
12723 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
0dff9558 12724
15b84087 12725typedef struct
12726{
12727 /* tree code of binary function G */
12728 enum tree_code code;
384f6361 12729
15b84087 12730 /* The constant second argument of G */
12731 int arg;
384f6361 12732
15b84087 12733 /* G^-1, the inverse of G (*, arg) */
12734 unsigned ginv;
384f6361 12735
15b84087 12736 /* The cost of appplying G (*, arg) */
12737 int cost;
384f6361 12738
15b84087 12739 /* The composition F o G^-1 (*, arg) for some function F */
7b5733e8 12740 unsigned int map;
384f6361 12741
15b84087 12742 /* For debug purpose only */
12743 const char *str;
12744} avr_map_op_t;
384f6361 12745
15b84087 12746static const avr_map_op_t avr_map_op[] =
384f6361 12747 {
7b5733e8 12748 { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
12749 { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
12750 { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
12751 { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
12752 { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
12753 { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
12754 { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
12755 { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
12756 { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
12757 { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
12758 { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
12759 { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
12760 { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
12761 { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
12762 { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
384f6361 12763 };
12764
12765
15b84087 12766/* Try to decompose F as F = (F o G^-1) o G as described above.
12767 The result is a struct representing F o G^-1 and G.
12768 If result.cost < 0 then such a decomposition does not exist. */
0dff9558 12769
15b84087 12770static avr_map_op_t
7b5733e8 12771avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
384f6361 12772{
15b84087 12773 int i;
12774 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
12775 avr_map_op_t f_ginv = *g;
7b5733e8 12776 unsigned int ginv = g->ginv;
384f6361 12777
15b84087 12778 f_ginv.cost = -1;
0dff9558 12779
15b84087 12780 /* Step 1: Computing F o G^-1 */
384f6361 12781
15b84087 12782 for (i = 7; i >= 0; i--)
12783 {
12784 int x = avr_map (f, i);
0dff9558 12785
15b84087 12786 if (x <= 7)
12787 {
12788 x = avr_map (ginv, x);
384f6361 12789
15b84087 12790 /* The bit is no element of the image of G: no avail (cost = -1) */
0dff9558 12791
15b84087 12792 if (x > 7)
12793 return f_ginv;
12794 }
0dff9558 12795
7b5733e8 12796 f_ginv.map = (f_ginv.map << 4) + x;
15b84087 12797 }
384f6361 12798
15b84087 12799 /* Step 2: Compute the cost of the operations.
12800 The overall cost of doing an operation prior to the insertion is
12801 the cost of the insertion plus the cost of the operation. */
384f6361 12802
15b84087 12803 /* Step 2a: Compute cost of F o G^-1 */
384f6361 12804
15b84087 12805 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
12806 {
12807 /* The mapping consists only of fixed points and can be folded
12808 to AND/OR logic in the remainder. Reasonable cost is 3. */
384f6361 12809
15b84087 12810 f_ginv.cost = 2 + (val_used_p && !val_const_p);
12811 }
12812 else
12813 {
12814 rtx xop[4];
384f6361 12815
15b84087 12816 /* Get the cost of the insn by calling the output worker with some
12817 fake values. Mimic effect of reloading xop[3]: Unused operands
12818 are mapped to 0 and used operands are reloaded to xop[0]. */
384f6361 12819
15b84087 12820 xop[0] = all_regs_rtx[24];
7b5733e8 12821 xop[1] = gen_int_mode (f_ginv.map, SImode);
15b84087 12822 xop[2] = all_regs_rtx[25];
12823 xop[3] = val_used_p ? xop[0] : const0_rtx;
0dff9558 12824
15b84087 12825 avr_out_insert_bits (xop, &f_ginv.cost);
0dff9558 12826
15b84087 12827 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
12828 }
0dff9558 12829
15b84087 12830 /* Step 2b: Add cost of G */
384f6361 12831
15b84087 12832 f_ginv.cost += g->cost;
384f6361 12833
15b84087 12834 if (avr_log.builtin)
12835 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
12836
12837 return f_ginv;
384f6361 12838}
12839
12840
15b84087 12841/* Insert bits from XOP[1] into XOP[0] according to MAP.
12842 XOP[0] and XOP[1] don't overlap.
12843 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
12844 If FIXP_P = false: Just move the bit if its position in the destination
12845 is different to its source position. */
384f6361 12846
12847static void
7b5733e8 12848avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
384f6361 12849{
15b84087 12850 int bit_dest, b;
384f6361 12851
12852 /* T-flag contains this bit of the source, i.e. of XOP[1] */
12853 int t_bit_src = -1;
12854
384f6361 12855 /* We order the operations according to the requested source bit b. */
0dff9558 12856
15b84087 12857 for (b = 0; b < 8; b++)
12858 for (bit_dest = 0; bit_dest < 8; bit_dest++)
384f6361 12859 {
12860 int bit_src = avr_map (map, bit_dest);
0dff9558 12861
384f6361 12862 if (b != bit_src
15b84087 12863 || bit_src >= 8
12864 /* Same position: No need to copy as requested by FIXP_P. */
12865 || (bit_dest == bit_src && !fixp_p))
384f6361 12866 continue;
12867
12868 if (t_bit_src != bit_src)
12869 {
12870 /* Source bit is not yet in T: Store it to T. */
0dff9558 12871
384f6361 12872 t_bit_src = bit_src;
12873
15b84087 12874 xop[3] = GEN_INT (bit_src);
12875 avr_asm_len ("bst %T1%T3", xop, plen, 1);
384f6361 12876 }
12877
12878 /* Load destination bit with T. */
0dff9558 12879
15b84087 12880 xop[3] = GEN_INT (bit_dest);
12881 avr_asm_len ("bld %T0%T3", xop, plen, 1);
384f6361 12882 }
12883}
12884
12885
15b84087 12886/* PLEN == 0: Print assembler code for `insert_bits'.
12887 PLEN != 0: Compute code length in bytes.
0dff9558 12888
15b84087 12889 OP[0]: Result
12890 OP[1]: The mapping composed of nibbles. If nibble no. N is
12891 0: Bit N of result is copied from bit OP[2].0
12892 ... ...
12893 7: Bit N of result is copied from bit OP[2].7
12894 0xf: Bit N of result is copied from bit OP[3].N
12895 OP[2]: Bits to be inserted
12896 OP[3]: Target value */
384f6361 12897
12898const char*
15b84087 12899avr_out_insert_bits (rtx *op, int *plen)
384f6361 12900{
7b5733e8 12901 unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
15b84087 12902 unsigned mask_fixed;
12903 bool fixp_p = true;
12904 rtx xop[4];
384f6361 12905
15b84087 12906 xop[0] = op[0];
12907 xop[1] = op[2];
12908 xop[2] = op[3];
384f6361 12909
15b84087 12910 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
0dff9558 12911
384f6361 12912 if (plen)
12913 *plen = 0;
12914 else if (flag_print_asm_name)
7b5733e8 12915 fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
384f6361 12916
15b84087 12917 /* If MAP has fixed points it might be better to initialize the result
12918 with the bits to be inserted instead of moving all bits by hand. */
0dff9558 12919
15b84087 12920 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
384f6361 12921
15b84087 12922 if (REGNO (xop[0]) == REGNO (xop[1]))
12923 {
12924 /* Avoid early-clobber conflicts */
0dff9558 12925
15b84087 12926 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
12927 xop[1] = tmp_reg_rtx;
12928 fixp_p = false;
384f6361 12929 }
12930
15b84087 12931 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
384f6361 12932 {
15b84087 12933 /* XOP[2] is used and reloaded to XOP[0] already */
0dff9558 12934
15b84087 12935 int n_fix = 0, n_nofix = 0;
0dff9558 12936
15b84087 12937 gcc_assert (REG_P (xop[2]));
0dff9558 12938
15b84087 12939 /* Get the code size of the bit insertions; once with all bits
12940 moved and once with fixed points omitted. */
0dff9558 12941
15b84087 12942 avr_move_bits (xop, map, true, &n_fix);
12943 avr_move_bits (xop, map, false, &n_nofix);
12944
12945 if (fixp_p && n_fix - n_nofix > 3)
384f6361 12946 {
15b84087 12947 xop[3] = gen_int_mode (~mask_fixed, QImode);
0dff9558 12948
15b84087 12949 avr_asm_len ("eor %0,%1" CR_TAB
12950 "andi %0,%3" CR_TAB
12951 "eor %0,%1", xop, plen, 3);
12952 fixp_p = false;
384f6361 12953 }
12954 }
384f6361 12955 else
12956 {
15b84087 12957 /* XOP[2] is unused */
0dff9558 12958
15b84087 12959 if (fixp_p && mask_fixed)
12960 {
12961 avr_asm_len ("mov %0,%1", xop, plen, 1);
12962 fixp_p = false;
12963 }
384f6361 12964 }
0dff9558 12965
15b84087 12966 /* Move/insert remaining bits. */
384f6361 12967
15b84087 12968 avr_move_bits (xop, map, fixp_p, plen);
0dff9558 12969
384f6361 12970 return "";
12971}
12972
12973
c5be380e 12974/* IDs for all the AVR builtins. */
12975
12976enum avr_builtin_id
12977 {
f3297245 12978#define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
73263209 12979 AVR_BUILTIN_ ## NAME,
0dff9558 12980#include "builtins.def"
c19a2f5f 12981#undef DEF_BUILTIN
12982
12983 AVR_BUILTIN_COUNT
c5be380e 12984 };
12985
58cf0091 12986struct GTY(()) avr_builtin_description
12987{
12988 enum insn_code icode;
58cf0091 12989 int n_args;
12990 tree fndecl;
12991};
12992
12993
12994/* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
12995 that a built-in's ID can be used to access the built-in by means of
12996 avr_bdesc[ID] */
12997
12998static GTY(()) struct avr_builtin_description
12999avr_bdesc[AVR_BUILTIN_COUNT] =
13000 {
f3297245 13001#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME) \
73263209 13002 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
0dff9558 13003#include "builtins.def"
58cf0091 13004#undef DEF_BUILTIN
13005 };
13006
13007
13008/* Implement `TARGET_BUILTIN_DECL'. */
13009
13010static tree
13011avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13012{
13013 if (id < AVR_BUILTIN_COUNT)
13014 return avr_bdesc[id].fndecl;
13015
13016 return error_mark_node;
13017}
13018
13019
02d9a2c3 13020static void
13021avr_init_builtin_int24 (void)
13022{
13023 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
13024 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13025
d0acfa59 13026 lang_hooks.types.register_builtin_type (int24_type, "__int24");
13027 lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
02d9a2c3 13028}
13029
58cf0091 13030
c5be380e 13031/* Implement `TARGET_INIT_BUILTINS' */
13032/* Set up all builtin functions for this target. */
13033
13034static void
13035avr_init_builtins (void)
13036{
13037 tree void_ftype_void
c0010db9 13038 = build_function_type_list (void_type_node, NULL_TREE);
c5be380e 13039 tree uchar_ftype_uchar
0dff9558 13040 = build_function_type_list (unsigned_char_type_node,
c5be380e 13041 unsigned_char_type_node,
13042 NULL_TREE);
13043 tree uint_ftype_uchar_uchar
0dff9558 13044 = build_function_type_list (unsigned_type_node,
13045 unsigned_char_type_node,
c5be380e 13046 unsigned_char_type_node,
c5be380e 13047 NULL_TREE);
13048 tree int_ftype_char_char
0dff9558 13049 = build_function_type_list (integer_type_node,
13050 char_type_node,
c5be380e 13051 char_type_node,
c5be380e 13052 NULL_TREE);
13053 tree int_ftype_char_uchar
0dff9558 13054 = build_function_type_list (integer_type_node,
c5be380e 13055 char_type_node,
0dff9558 13056 unsigned_char_type_node,
c5be380e 13057 NULL_TREE);
13058 tree void_ftype_ulong
0dff9558 13059 = build_function_type_list (void_type_node,
c5be380e 13060 long_unsigned_type_node,
13061 NULL_TREE);
13062
15b84087 13063 tree uchar_ftype_ulong_uchar_uchar
384f6361 13064 = build_function_type_list (unsigned_char_type_node,
13065 long_unsigned_type_node,
13066 unsigned_char_type_node,
15b84087 13067 unsigned_char_type_node,
384f6361 13068 NULL_TREE);
13069
12ffadfa 13070 tree const_memx_void_node
73263209 13071 = build_qualified_type (void_type_node,
13072 TYPE_QUAL_CONST
13073 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
12ffadfa 13074
13075 tree const_memx_ptr_type_node
73263209 13076 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
0dff9558 13077
12ffadfa 13078 tree char_ftype_const_memx_ptr
73263209 13079 = build_function_type_list (char_type_node,
13080 const_memx_ptr_type_node,
13081 NULL);
13082
eafea251 13083#define ITYP(T) \
13084 lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
1a96adb9 13085
eafea251 13086#define FX_FTYPE_FX(fx) \
13087 tree fx##r_ftype_##fx##r \
13088 = build_function_type_list (node_##fx##r, node_##fx##r, NULL); \
13089 tree fx##k_ftype_##fx##k \
13090 = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13091
13092#define FX_FTYPE_FX_INT(fx) \
13093 tree fx##r_ftype_##fx##r_int \
13094 = build_function_type_list (node_##fx##r, node_##fx##r, \
13095 integer_type_node, NULL); \
13096 tree fx##k_ftype_##fx##k_int \
13097 = build_function_type_list (node_##fx##k, node_##fx##k, \
13098 integer_type_node, NULL)
1a96adb9 13099
eafea251 13100#define INT_FTYPE_FX(fx) \
13101 tree int_ftype_##fx##r \
13102 = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13103 tree int_ftype_##fx##k \
13104 = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13105
13106#define INTX_FTYPE_FX(fx) \
13107 tree int##fx##r_ftype_##fx##r \
13108 = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13109 tree int##fx##k_ftype_##fx##k \
13110 = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13111
13112#define FX_FTYPE_INTX(fx) \
13113 tree fx##r_ftype_int##fx##r \
13114 = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13115 tree fx##k_ftype_int##fx##k \
13116 = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13117
13118 tree node_hr = short_fract_type_node;
3e88585b 13119 tree node_nr = fract_type_node;
eafea251 13120 tree node_lr = long_fract_type_node;
13121 tree node_llr = long_long_fract_type_node;
13122
13123 tree node_uhr = unsigned_short_fract_type_node;
3e88585b 13124 tree node_unr = unsigned_fract_type_node;
eafea251 13125 tree node_ulr = unsigned_long_fract_type_node;
13126 tree node_ullr = unsigned_long_long_fract_type_node;
13127
13128 tree node_hk = short_accum_type_node;
3e88585b 13129 tree node_nk = accum_type_node;
eafea251 13130 tree node_lk = long_accum_type_node;
13131 tree node_llk = long_long_accum_type_node;
13132
13133 tree node_uhk = unsigned_short_accum_type_node;
3e88585b 13134 tree node_unk = unsigned_accum_type_node;
eafea251 13135 tree node_ulk = unsigned_long_accum_type_node;
13136 tree node_ullk = unsigned_long_long_accum_type_node;
13137
13138
13139 /* For absfx builtins. */
13140
13141 FX_FTYPE_FX (h);
3e88585b 13142 FX_FTYPE_FX (n);
eafea251 13143 FX_FTYPE_FX (l);
13144 FX_FTYPE_FX (ll);
13145
13146 /* For roundfx builtins. */
13147
13148 FX_FTYPE_FX_INT (h);
3e88585b 13149 FX_FTYPE_FX_INT (n);
eafea251 13150 FX_FTYPE_FX_INT (l);
13151 FX_FTYPE_FX_INT (ll);
13152
13153 FX_FTYPE_FX_INT (uh);
3e88585b 13154 FX_FTYPE_FX_INT (un);
eafea251 13155 FX_FTYPE_FX_INT (ul);
13156 FX_FTYPE_FX_INT (ull);
13157
13158 /* For countlsfx builtins. */
13159
13160 INT_FTYPE_FX (h);
3e88585b 13161 INT_FTYPE_FX (n);
eafea251 13162 INT_FTYPE_FX (l);
13163 INT_FTYPE_FX (ll);
13164
13165 INT_FTYPE_FX (uh);
3e88585b 13166 INT_FTYPE_FX (un);
eafea251 13167 INT_FTYPE_FX (ul);
13168 INT_FTYPE_FX (ull);
13169
13170 /* For bitsfx builtins. */
13171
13172 INTX_FTYPE_FX (h);
3e88585b 13173 INTX_FTYPE_FX (n);
eafea251 13174 INTX_FTYPE_FX (l);
13175 INTX_FTYPE_FX (ll);
13176
13177 INTX_FTYPE_FX (uh);
3e88585b 13178 INTX_FTYPE_FX (un);
eafea251 13179 INTX_FTYPE_FX (ul);
13180 INTX_FTYPE_FX (ull);
13181
13182 /* For fxbits builtins. */
13183
13184 FX_FTYPE_INTX (h);
3e88585b 13185 FX_FTYPE_INTX (n);
eafea251 13186 FX_FTYPE_INTX (l);
13187 FX_FTYPE_INTX (ll);
13188
13189 FX_FTYPE_INTX (uh);
3e88585b 13190 FX_FTYPE_INTX (un);
eafea251 13191 FX_FTYPE_INTX (ul);
13192 FX_FTYPE_INTX (ull);
13193
13194
f3297245 13195#define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME) \
73263209 13196 { \
13197 int id = AVR_BUILTIN_ ## NAME; \
13198 const char *Name = "__builtin_avr_" #NAME; \
13199 char *name = (char*) alloca (1 + strlen (Name)); \
13200 \
13201 gcc_assert (id < AVR_BUILTIN_COUNT); \
13202 avr_bdesc[id].fndecl \
13203 = add_builtin_function (avr_tolower (name, Name), TYPE, id, \
f3297245 13204 BUILT_IN_MD, LIBNAME, NULL_TREE); \
73263209 13205 }
0dff9558 13206#include "builtins.def"
c19a2f5f 13207#undef DEF_BUILTIN
0dff9558 13208
02d9a2c3 13209 avr_init_builtin_int24 ();
c5be380e 13210}
13211
c5be380e 13212
d0acfa59 13213/* Subroutine of avr_expand_builtin to expand vanilla builtins
13214 with non-void result and 1 ... 3 arguments. */
c5be380e 13215
13216static rtx
d0acfa59 13217avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
c5be380e 13218{
d0acfa59 13219 rtx pat, xop[3];
13220 int n, n_args = call_expr_nargs (exp);
3754d046 13221 machine_mode tmode = insn_data[icode].operand[0].mode;
c5be380e 13222
d0acfa59 13223 gcc_assert (n_args >= 1 && n_args <= 3);
0dff9558 13224
d0acfa59 13225 if (target == NULL_RTX
c5be380e 13226 || GET_MODE (target) != tmode
d0acfa59 13227 || !insn_data[icode].operand[0].predicate (target, tmode))
c5be380e 13228 {
13229 target = gen_reg_rtx (tmode);
13230 }
13231
d0acfa59 13232 for (n = 0; n < n_args; n++)
c5be380e 13233 {
d0acfa59 13234 tree arg = CALL_EXPR_ARG (exp, n);
13235 rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3754d046 13236 machine_mode opmode = GET_MODE (op);
13237 machine_mode mode = insn_data[icode].operand[n+1].mode;
c5be380e 13238
d0acfa59 13239 if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13240 {
13241 opmode = HImode;
13242 op = gen_lowpart (HImode, op);
13243 }
c5be380e 13244
d0acfa59 13245 /* In case the insn wants input operands in modes different from
13246 the result, abort. */
0dff9558 13247
d0acfa59 13248 gcc_assert (opmode == mode || opmode == VOIDmode);
c5be380e 13249
d0acfa59 13250 if (!insn_data[icode].operand[n+1].predicate (op, mode))
13251 op = copy_to_mode_reg (mode, op);
c5be380e 13252
d0acfa59 13253 xop[n] = op;
c5be380e 13254 }
13255
d0acfa59 13256 switch (n_args)
c5be380e 13257 {
d0acfa59 13258 case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13259 case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13260 case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
c5be380e 13261
d0acfa59 13262 default:
13263 gcc_unreachable();
15b84087 13264 }
0dff9558 13265
d0acfa59 13266 if (pat == NULL_RTX)
13267 return NULL_RTX;
15b84087 13268
13269 emit_insn (pat);
d0acfa59 13270
15b84087 13271 return target;
13272}
13273
c5be380e 13274
58cf0091 13275/* Implement `TARGET_EXPAND_BUILTIN'. */
c5be380e 13276/* Expand an expression EXP that calls a built-in function,
13277 with result going to TARGET if that's convenient
13278 (and in mode MODE if that's convenient).
13279 SUBTARGET may be used as the target for computing one of EXP's operands.
13280 IGNORE is nonzero if the value is to be ignored. */
13281
13282static rtx
13283avr_expand_builtin (tree exp, rtx target,
13284 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 13285 machine_mode mode ATTRIBUTE_UNUSED,
f3297245 13286 int ignore)
c5be380e 13287{
c5be380e 13288 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
d0acfa59 13289 const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
c5be380e 13290 unsigned int id = DECL_FUNCTION_CODE (fndecl);
58cf0091 13291 const struct avr_builtin_description *d = &avr_bdesc[id];
c5be380e 13292 tree arg0;
13293 rtx op0;
13294
58cf0091 13295 gcc_assert (id < AVR_BUILTIN_COUNT);
13296
c5be380e 13297 switch (id)
13298 {
13299 case AVR_BUILTIN_NOP:
13300 emit_insn (gen_nopv (GEN_INT(1)));
13301 return 0;
0dff9558 13302
c5be380e 13303 case AVR_BUILTIN_DELAY_CYCLES:
13304 {
13305 arg0 = CALL_EXPR_ARG (exp, 0);
1086ba15 13306 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
c5be380e 13307
c19a2f5f 13308 if (!CONST_INT_P (op0))
384f6361 13309 error ("%s expects a compile time integer constant", bname);
c19a2f5f 13310 else
13311 avr_expand_delay_cycles (op0);
c5be380e 13312
d0acfa59 13313 return NULL_RTX;
c5be380e 13314 }
384f6361 13315
15b84087 13316 case AVR_BUILTIN_INSERT_BITS:
384f6361 13317 {
13318 arg0 = CALL_EXPR_ARG (exp, 0);
13319 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13320
13321 if (!CONST_INT_P (op0))
13322 {
13323 error ("%s expects a compile time long integer constant"
13324 " as first argument", bname);
13325 return target;
13326 }
eafea251 13327
13328 break;
384f6361 13329 }
eafea251 13330
13331 case AVR_BUILTIN_ROUNDHR: case AVR_BUILTIN_ROUNDUHR:
13332 case AVR_BUILTIN_ROUNDR: case AVR_BUILTIN_ROUNDUR:
13333 case AVR_BUILTIN_ROUNDLR: case AVR_BUILTIN_ROUNDULR:
13334 case AVR_BUILTIN_ROUNDLLR: case AVR_BUILTIN_ROUNDULLR:
13335
13336 case AVR_BUILTIN_ROUNDHK: case AVR_BUILTIN_ROUNDUHK:
13337 case AVR_BUILTIN_ROUNDK: case AVR_BUILTIN_ROUNDUK:
13338 case AVR_BUILTIN_ROUNDLK: case AVR_BUILTIN_ROUNDULK:
13339 case AVR_BUILTIN_ROUNDLLK: case AVR_BUILTIN_ROUNDULLK:
13340
13341 /* Warn about odd rounding. Rounding points >= FBIT will have
13342 no effect. */
1a96adb9 13343
eafea251 13344 if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
13345 break;
13346
f9ae6f95 13347 int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
eafea251 13348
13349 if (rbit >= (int) GET_MODE_FBIT (mode))
13350 {
13351 warning (OPT_Wextra, "rounding to %d bits has no effect for "
13352 "fixed-point value with %d fractional bits",
13353 rbit, GET_MODE_FBIT (mode));
13354
13355 return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
13356 EXPAND_NORMAL);
13357 }
13358 else if (rbit <= - (int) GET_MODE_IBIT (mode))
13359 {
13360 warning (0, "rounding result will always be 0");
13361 return CONST0_RTX (mode);
13362 }
13363
13364 /* The rounding points RP satisfies now: -IBIT < RP < FBIT.
13365
13366 TR 18037 only specifies results for RP > 0. However, the
13367 remaining cases of -IBIT < RP <= 0 can easily be supported
13368 without any additional overhead. */
13369
13370 break; /* round */
c5be380e 13371 }
13372
f3297245 13373 /* No fold found and no insn: Call support function from libgcc. */
13374
13375 if (d->icode == CODE_FOR_nothing
13376 && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
13377 {
13378 return expand_call (exp, target, ignore);
13379 }
13380
58cf0091 13381 /* No special treatment needed: vanilla expand. */
d0acfa59 13382
73263209 13383 gcc_assert (d->icode != CODE_FOR_nothing);
d0acfa59 13384 gcc_assert (d->n_args == call_expr_nargs (exp));
13385
13386 if (d->n_args == 0)
c19a2f5f 13387 {
58cf0091 13388 emit_insn ((GEN_FCN (d->icode)) (target));
d0acfa59 13389 return NULL_RTX;
c19a2f5f 13390 }
d0acfa59 13391
13392 return avr_default_expand_builtin (d->icode, exp, target);
c5be380e 13393}
13394
15b84087 13395
f3297245 13396/* Helper for `avr_fold_builtin' that folds absfx (FIXED_CST). */
13397
13398static tree
13399avr_fold_absfx (tree tval)
13400{
13401 if (FIXED_CST != TREE_CODE (tval))
13402 return NULL_TREE;
13403
13404 /* Our fixed-points have no padding: Use double_int payload directly. */
13405
13406 FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
13407 unsigned int bits = GET_MODE_BITSIZE (fval.mode);
13408 double_int ival = fval.data.sext (bits);
13409
13410 if (!ival.is_negative())
13411 return tval;
13412
13413 /* ISO/IEC TR 18037, 7.18a.6.2: The absfx functions are saturating. */
13414
13415 fval.data = (ival == double_int::min_value (bits, false).sext (bits))
13416 ? double_int::max_value (bits, false)
13417 : -ival;
13418
13419 return build_fixed (TREE_TYPE (tval), fval);
13420}
13421
13422
15b84087 13423/* Implement `TARGET_FOLD_BUILTIN'. */
13424
13425static tree
13426avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
13427 bool ignore ATTRIBUTE_UNUSED)
13428{
13429 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
13430 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
13431
13432 if (!optimize)
13433 return NULL_TREE;
0dff9558 13434
15b84087 13435 switch (fcode)
13436 {
13437 default:
13438 break;
13439
c19a2f5f 13440 case AVR_BUILTIN_SWAP:
13441 {
13442 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
13443 build_int_cst (val_type, 4));
13444 }
0dff9558 13445
f3297245 13446 case AVR_BUILTIN_ABSHR:
13447 case AVR_BUILTIN_ABSR:
13448 case AVR_BUILTIN_ABSLR:
13449 case AVR_BUILTIN_ABSLLR:
13450
13451 case AVR_BUILTIN_ABSHK:
13452 case AVR_BUILTIN_ABSK:
13453 case AVR_BUILTIN_ABSLK:
13454 case AVR_BUILTIN_ABSLLK:
13455 /* GCC is not good with folding ABS for fixed-point. Do it by hand. */
13456
13457 return avr_fold_absfx (arg[0]);
13458
eafea251 13459 case AVR_BUILTIN_BITSHR: case AVR_BUILTIN_HRBITS:
13460 case AVR_BUILTIN_BITSHK: case AVR_BUILTIN_HKBITS:
13461 case AVR_BUILTIN_BITSUHR: case AVR_BUILTIN_UHRBITS:
13462 case AVR_BUILTIN_BITSUHK: case AVR_BUILTIN_UHKBITS:
13463
13464 case AVR_BUILTIN_BITSR: case AVR_BUILTIN_RBITS:
13465 case AVR_BUILTIN_BITSK: case AVR_BUILTIN_KBITS:
13466 case AVR_BUILTIN_BITSUR: case AVR_BUILTIN_URBITS:
13467 case AVR_BUILTIN_BITSUK: case AVR_BUILTIN_UKBITS:
13468
13469 case AVR_BUILTIN_BITSLR: case AVR_BUILTIN_LRBITS:
13470 case AVR_BUILTIN_BITSLK: case AVR_BUILTIN_LKBITS:
13471 case AVR_BUILTIN_BITSULR: case AVR_BUILTIN_ULRBITS:
13472 case AVR_BUILTIN_BITSULK: case AVR_BUILTIN_ULKBITS:
13473
13474 case AVR_BUILTIN_BITSLLR: case AVR_BUILTIN_LLRBITS:
13475 case AVR_BUILTIN_BITSLLK: case AVR_BUILTIN_LLKBITS:
13476 case AVR_BUILTIN_BITSULLR: case AVR_BUILTIN_ULLRBITS:
13477 case AVR_BUILTIN_BITSULLK: case AVR_BUILTIN_ULLKBITS:
13478
13479 gcc_assert (TYPE_PRECISION (val_type)
13480 == TYPE_PRECISION (TREE_TYPE (arg[0])));
13481
13482 return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
13483
15b84087 13484 case AVR_BUILTIN_INSERT_BITS:
13485 {
13486 tree tbits = arg[1];
13487 tree tval = arg[2];
13488 tree tmap;
13489 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
7b5733e8 13490 unsigned int map;
15b84087 13491 bool changed = false;
13492 unsigned i;
13493 avr_map_op_t best_g;
c19a2f5f 13494
13495 if (TREE_CODE (arg[0]) != INTEGER_CST)
13496 {
13497 /* No constant as first argument: Don't fold this and run into
13498 error in avr_expand_builtin. */
0dff9558 13499
c19a2f5f 13500 break;
13501 }
0dff9558 13502
85fbad9e 13503 tmap = wide_int_to_tree (map_type, arg[0]);
08f4222b 13504 map = TREE_INT_CST_LOW (tmap);
15b84087 13505
13506 if (TREE_CODE (tval) != INTEGER_CST
13507 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13508 {
13509 /* There are no F in the map, i.e. 3rd operand is unused.
13510 Replace that argument with some constant to render
13511 respective input unused. */
0dff9558 13512
15b84087 13513 tval = build_int_cst (val_type, 0);
13514 changed = true;
13515 }
13516
13517 if (TREE_CODE (tbits) != INTEGER_CST
13518 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
13519 {
13520 /* Similar for the bits to be inserted. If they are unused,
13521 we can just as well pass 0. */
0dff9558 13522
15b84087 13523 tbits = build_int_cst (val_type, 0);
13524 }
13525
13526 if (TREE_CODE (tbits) == INTEGER_CST)
13527 {
13528 /* Inserting bits known at compile time is easy and can be
13529 performed by AND and OR with appropriate masks. */
13530
f9ae6f95 13531 int bits = TREE_INT_CST_LOW (tbits);
15b84087 13532 int mask_ior = 0, mask_and = 0xff;
13533
13534 for (i = 0; i < 8; i++)
13535 {
13536 int mi = avr_map (map, i);
13537
13538 if (mi < 8)
13539 {
13540 if (bits & (1 << mi)) mask_ior |= (1 << i);
13541 else mask_and &= ~(1 << i);
13542 }
13543 }
13544
13545 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
13546 build_int_cst (val_type, mask_ior));
13547 return fold_build2 (BIT_AND_EXPR, val_type, tval,
13548 build_int_cst (val_type, mask_and));
13549 }
13550
13551 if (changed)
13552 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13553
13554 /* If bits don't change their position we can use vanilla logic
13555 to merge the two arguments. */
13556
13557 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
13558 {
13559 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13560 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
13561
13562 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
13563 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
13564 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
13565 }
13566
13567 /* Try to decomposing map to reduce overall cost. */
13568
13569 if (avr_log.builtin)
7b5733e8 13570 avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
0dff9558 13571
15b84087 13572 best_g = avr_map_op[0];
13573 best_g.cost = 1000;
0dff9558 13574
15b84087 13575 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
13576 {
13577 avr_map_op_t g
13578 = avr_map_decompose (map, avr_map_op + i,
13579 TREE_CODE (tval) == INTEGER_CST);
13580
13581 if (g.cost >= 0 && g.cost < best_g.cost)
13582 best_g = g;
13583 }
13584
13585 if (avr_log.builtin)
13586 avr_edump ("\n");
0dff9558 13587
15b84087 13588 if (best_g.arg == 0)
13589 /* No optimization found */
13590 break;
0dff9558 13591
15b84087 13592 /* Apply operation G to the 2nd argument. */
0dff9558 13593
15b84087 13594 if (avr_log.builtin)
7b5733e8 13595 avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
15b84087 13596 best_g.str, best_g.arg, best_g.map, best_g.cost);
13597
13598 /* Do right-shifts arithmetically: They copy the MSB instead of
13599 shifting in a non-usable value (0) as with logic right-shift. */
0dff9558 13600
15b84087 13601 tbits = fold_convert (signed_char_type_node, tbits);
13602 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
13603 build_int_cst (val_type, best_g.arg));
13604 tbits = fold_convert (val_type, tbits);
13605
13606 /* Use map o G^-1 instead of original map to undo the effect of G. */
0dff9558 13607
85fbad9e 13608 tmap = wide_int_to_tree (map_type, best_g.map);
0dff9558 13609
15b84087 13610 return build_call_expr (fndecl, 3, tmap, tbits, tval);
13611 } /* AVR_BUILTIN_INSERT_BITS */
13612 }
13613
13614 return NULL_TREE;
13615}
13616
15b84087 13617\f
1602e4b0 13618
13619/* Initialize the GCC target structure. */
13620
13621#undef TARGET_ASM_ALIGNED_HI_OP
13622#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13623#undef TARGET_ASM_ALIGNED_SI_OP
13624#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13625#undef TARGET_ASM_UNALIGNED_HI_OP
13626#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13627#undef TARGET_ASM_UNALIGNED_SI_OP
13628#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13629#undef TARGET_ASM_INTEGER
13630#define TARGET_ASM_INTEGER avr_assemble_integer
13631#undef TARGET_ASM_FILE_START
13632#define TARGET_ASM_FILE_START avr_file_start
13633#undef TARGET_ASM_FILE_END
13634#define TARGET_ASM_FILE_END avr_file_end
13635
13636#undef TARGET_ASM_FUNCTION_END_PROLOGUE
13637#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13638#undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13639#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13640
13641#undef TARGET_FUNCTION_VALUE
13642#define TARGET_FUNCTION_VALUE avr_function_value
13643#undef TARGET_LIBCALL_VALUE
13644#define TARGET_LIBCALL_VALUE avr_libcall_value
13645#undef TARGET_FUNCTION_VALUE_REGNO_P
13646#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13647
13648#undef TARGET_ATTRIBUTE_TABLE
13649#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13650#undef TARGET_INSERT_ATTRIBUTES
13651#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13652#undef TARGET_SECTION_TYPE_FLAGS
13653#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13654
13655#undef TARGET_ASM_NAMED_SECTION
13656#define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13657#undef TARGET_ASM_INIT_SECTIONS
13658#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13659#undef TARGET_ENCODE_SECTION_INFO
13660#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13661#undef TARGET_ASM_SELECT_SECTION
13662#define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13663
13664#undef TARGET_REGISTER_MOVE_COST
13665#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13666#undef TARGET_MEMORY_MOVE_COST
13667#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13668#undef TARGET_RTX_COSTS
13669#define TARGET_RTX_COSTS avr_rtx_costs
13670#undef TARGET_ADDRESS_COST
13671#define TARGET_ADDRESS_COST avr_address_cost
13672#undef TARGET_MACHINE_DEPENDENT_REORG
13673#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13674#undef TARGET_FUNCTION_ARG
13675#define TARGET_FUNCTION_ARG avr_function_arg
13676#undef TARGET_FUNCTION_ARG_ADVANCE
13677#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13678
ea679361 13679#undef TARGET_SET_CURRENT_FUNCTION
13680#define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13681
1602e4b0 13682#undef TARGET_RETURN_IN_MEMORY
13683#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13684
13685#undef TARGET_STRICT_ARGUMENT_NAMING
13686#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13687
13688#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
13689#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13690
b4e6d2e2 13691#undef TARGET_CONDITIONAL_REGISTER_USAGE
13692#define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13693
1602e4b0 13694#undef TARGET_HARD_REGNO_SCRATCH_OK
13695#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13696#undef TARGET_CASE_VALUES_THRESHOLD
13697#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13698
13699#undef TARGET_FRAME_POINTER_REQUIRED
13700#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13701#undef TARGET_CAN_ELIMINATE
13702#define TARGET_CAN_ELIMINATE avr_can_eliminate
13703
a28e3283 13704#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13705#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13706
08c6cbd2 13707#undef TARGET_WARN_FUNC_RETURN
13708#define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13709
1602e4b0 13710#undef TARGET_CLASS_LIKELY_SPILLED_P
13711#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13712
13713#undef TARGET_OPTION_OVERRIDE
13714#define TARGET_OPTION_OVERRIDE avr_option_override
13715
13716#undef TARGET_CANNOT_MODIFY_JUMPS_P
13717#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13718
13719#undef TARGET_FUNCTION_OK_FOR_SIBCALL
13720#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13721
13722#undef TARGET_INIT_BUILTINS
13723#define TARGET_INIT_BUILTINS avr_init_builtins
13724
58cf0091 13725#undef TARGET_BUILTIN_DECL
13726#define TARGET_BUILTIN_DECL avr_builtin_decl
13727
1602e4b0 13728#undef TARGET_EXPAND_BUILTIN
13729#define TARGET_EXPAND_BUILTIN avr_expand_builtin
13730
13731#undef TARGET_FOLD_BUILTIN
13732#define TARGET_FOLD_BUILTIN avr_fold_builtin
13733
13734#undef TARGET_ASM_FUNCTION_RODATA_SECTION
13735#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13736
13737#undef TARGET_SCALAR_MODE_SUPPORTED_P
13738#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13739
017c5b98 13740#undef TARGET_BUILD_BUILTIN_VA_LIST
13741#define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13742
13743#undef TARGET_FIXED_POINT_SUPPORTED_P
13744#define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13745
1c72da59 13746#undef TARGET_CONVERT_TO_TYPE
13747#define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13748
1602e4b0 13749#undef TARGET_ADDR_SPACE_SUBSET_P
13750#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13751
13752#undef TARGET_ADDR_SPACE_CONVERT
13753#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13754
13755#undef TARGET_ADDR_SPACE_ADDRESS_MODE
13756#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13757
13758#undef TARGET_ADDR_SPACE_POINTER_MODE
13759#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13760
13761#undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13762#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
13763 avr_addr_space_legitimate_address_p
13764
13765#undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13766#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13767
002565f0 13768#undef TARGET_MODE_DEPENDENT_ADDRESS_P
13769#define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13770
be6d8823 13771#undef TARGET_SECONDARY_RELOAD
13772#define TARGET_SECONDARY_RELOAD avr_secondary_reload
13773
1602e4b0 13774#undef TARGET_PRINT_OPERAND
13775#define TARGET_PRINT_OPERAND avr_print_operand
13776#undef TARGET_PRINT_OPERAND_ADDRESS
13777#define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13778#undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
13779#define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13780
a45076aa 13781struct gcc_target targetm = TARGET_INITIALIZER;
c5be380e 13782
1602e4b0 13783\f
c84f2269 13784#include "gt-avr.h"