]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/arc/arc.c
Update copyright years.
[thirdparty/gcc.git] / gcc / config / arc / arc.c
CommitLineData
526b7aee 1/* Subroutines used for code generation on the Synopsys DesignWare ARC cpu.
99dee823 2 Copyright (C) 1994-2021 Free Software Foundation, Inc.
526b7aee
SV
3
4 Sources derived from work done by Sankhya Technologies (www.sankhya.com) on
5 behalf of Synopsys Inc.
6
7 Position Independent Code support added,Code cleaned up,
8 Comments and Support For ARC700 instructions added by
9 Saurabh Verma (saurabh.verma@codito.com)
10 Ramana Radhakrishnan(ramana.radhakrishnan@codito.com)
11
12 Fixing ABI inconsistencies, optimizations for ARC600 / ARC700 pipelines,
13 profiling support added by Joern Rennecke <joern.rennecke@embecosm.com>
14
15This file is part of GCC.
16
17GCC is free software; you can redistribute it and/or modify
18it under the terms of the GNU General Public License as published by
19the Free Software Foundation; either version 3, or (at your option)
20any later version.
21
22GCC is distributed in the hope that it will be useful,
23but WITHOUT ANY WARRANTY; without even the implied warranty of
24MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25GNU General Public License for more details.
26
27You should have received a copy of the GNU General Public License
28along with GCC; see the file COPYING3. If not see
29<http://www.gnu.org/licenses/>. */
30
8fcc61f8
RS
31#define IN_TARGET_CODE 1
32
526b7aee 33#include "config.h"
526b7aee
SV
34#include "system.h"
35#include "coretypes.h"
4d0cdd0c 36#include "memmodel.h"
c7131fb2 37#include "backend.h"
e11c4407 38#include "target.h"
c7131fb2 39#include "rtl.h"
e11c4407
AM
40#include "tree.h"
41#include "cfghooks.h"
c7131fb2 42#include "df.h"
e11c4407
AM
43#include "tm_p.h"
44#include "stringpool.h"
314e6352 45#include "attribs.h"
e11c4407
AM
46#include "optabs.h"
47#include "regs.h"
48#include "emit-rtl.h"
49#include "recog.h"
50#include "diagnostic.h"
40e23961 51#include "fold-const.h"
d8a2d370
DN
52#include "varasm.h"
53#include "stor-layout.h"
d8a2d370 54#include "calls.h"
526b7aee
SV
55#include "output.h"
56#include "insn-attr.h"
57#include "flags.h"
36566b39 58#include "explow.h"
526b7aee 59#include "expr.h"
526b7aee 60#include "langhooks.h"
526b7aee
SV
61#include "tm-constrs.h"
62#include "reload.h" /* For operands_match_p */
60393bbc 63#include "cfgrtl.h"
526b7aee
SV
64#include "tree-pass.h"
65#include "context.h"
9b2b7279 66#include "builtins.h"
6733978e 67#include "rtl-iter.h"
b8a64b7f 68#include "alias.h"
41453183 69#include "opts.h"
a2de90a4 70#include "hw-doloop.h"
526b7aee 71
fb155425 72/* Which cpu we're compiling for (ARC600, ARC601, ARC700). */
f9ccf899
CZ
73static char arc_cpu_name[10] = "";
74static const char *arc_cpu_string = arc_cpu_name;
526b7aee 75
6b55f8c9
CZ
76typedef struct GTY (()) _arc_jli_section
77{
78 const char *name;
79 struct _arc_jli_section *next;
80} arc_jli_section;
81
82static arc_jli_section *arc_jli_sections = NULL;
83
66825a30
CZ
84/* Track which regs are set fixed/call saved/call used from commnad line. */
85HARD_REG_SET overrideregs;
86
a2de90a4
CZ
87/* Maximum size of a loop. */
88#define ARC_MAX_LOOP_LENGTH 4095
89
90b48013
CZ
90/* Check if an rtx fits in the store instruction format. Loads can
91 handle any constant. */
92#define RTX_OK_FOR_OFFSET_P(MODE, X) \
93 (GET_CODE (X) == CONST_INT \
94 && SMALL_INT_RANGE (INTVAL (X), (GET_MODE_SIZE (MODE) - 1) & (~0x03), \
95 (INTVAL (X) & (GET_MODE_SIZE (MODE) - 1) & 3 \
96 ? 0 \
97 : -(-GET_MODE_SIZE (MODE) | (~0x03)) >> 1)))
526b7aee 98
526b7aee
SV
99/* Array of valid operand punctuation characters. */
100char arc_punct_chars[256];
101
102/* State used by arc_ccfsm_advance to implement conditional execution. */
103struct GTY (()) arc_ccfsm
104{
105 int state;
106 int cc;
107 rtx cond;
b3458f61 108 rtx_insn *target_insn;
526b7aee
SV
109 int target_label;
110};
111
41453183
CZ
112/* Status of the IRQ_CTRL_AUX register. */
113typedef struct irq_ctrl_saved_t
114{
115 /* Last register number used by IRQ_CTRL_SAVED aux_reg. */
116 short irq_save_last_reg;
117 /* True if BLINK is automatically saved. */
118 bool irq_save_blink;
119 /* True if LPCOUNT is automatically saved. */
120 bool irq_save_lpcount;
121} irq_ctrl_saved_t;
122static irq_ctrl_saved_t irq_ctrl_saved;
123
124#define ARC_AUTOBLINK_IRQ_P(FNTYPE) \
c7314bc1
CZ
125 ((ARC_INTERRUPT_P (FNTYPE) \
126 && irq_ctrl_saved.irq_save_blink) \
127 || (ARC_FAST_INTERRUPT_P (FNTYPE) \
128 && rgf_banked_register_count > 8))
129
130#define ARC_AUTOFP_IRQ_P(FNTYPE) \
131 ((ARC_INTERRUPT_P (FNTYPE) \
132 && (irq_ctrl_saved.irq_save_last_reg > 26)) \
133 || (ARC_FAST_INTERRUPT_P (FNTYPE) \
134 && rgf_banked_register_count > 8))
135
136#define ARC_AUTO_IRQ_P(FNTYPE) \
137 (ARC_INTERRUPT_P (FNTYPE) && !ARC_FAST_INTERRUPT_P (FNTYPE) \
138 && (irq_ctrl_saved.irq_save_blink \
41453183
CZ
139 || (irq_ctrl_saved.irq_save_last_reg >= 0)))
140
c7314bc1
CZ
141/* Number of registers in second bank for FIRQ support. */
142static int rgf_banked_register_count;
143
526b7aee
SV
144#define arc_ccfsm_current cfun->machine->ccfsm_current
145
146#define ARC_CCFSM_BRANCH_DELETED_P(STATE) \
147 ((STATE)->state == 1 || (STATE)->state == 2)
148
149/* Indicate we're conditionalizing insns now. */
150#define ARC_CCFSM_RECORD_BRANCH_DELETED(STATE) \
151 ((STATE)->state += 2)
152
153#define ARC_CCFSM_COND_EXEC_P(STATE) \
154 ((STATE)->state == 3 || (STATE)->state == 4 || (STATE)->state == 5 \
155 || current_insn_predicate)
156
157/* Check if INSN has a 16 bit opcode considering struct arc_ccfsm *STATE. */
158#define CCFSM_ISCOMPACT(INSN,STATE) \
159 (ARC_CCFSM_COND_EXEC_P (STATE) \
160 ? (get_attr_iscompact (INSN) == ISCOMPACT_TRUE \
161 || get_attr_iscompact (INSN) == ISCOMPACT_TRUE_LIMM) \
162 : get_attr_iscompact (INSN) != ISCOMPACT_FALSE)
163
164/* Likewise, but also consider that INSN might be in a delay slot of JUMP. */
165#define CCFSM_DBR_ISCOMPACT(INSN,JUMP,STATE) \
166 ((ARC_CCFSM_COND_EXEC_P (STATE) \
167 || (JUMP_P (JUMP) \
168 && INSN_ANNULLED_BRANCH_P (JUMP) \
169 && (TARGET_AT_DBR_CONDEXEC || INSN_FROM_TARGET_P (INSN)))) \
170 ? (get_attr_iscompact (INSN) == ISCOMPACT_TRUE \
171 || get_attr_iscompact (INSN) == ISCOMPACT_TRUE_LIMM) \
172 : get_attr_iscompact (INSN) != ISCOMPACT_FALSE)
173
90b48013
CZ
174/* Start enter/leave register range. */
175#define ENTER_LEAVE_START_REG 13
176
177/* End enter/leave register range. */
178#define ENTER_LEAVE_END_REG 26
179
526b7aee
SV
180/* The maximum number of insns skipped which will be conditionalised if
181 possible. */
182/* When optimizing for speed:
183 Let p be the probability that the potentially skipped insns need to
184 be executed, pn the cost of a correctly predicted non-taken branch,
185 mt the cost of a mis/non-predicted taken branch,
186 mn mispredicted non-taken, pt correctly predicted taken ;
187 costs expressed in numbers of instructions like the ones considered
188 skipping.
189 Unfortunately we don't have a measure of predictability - this
190 is linked to probability only in that in the no-eviction-scenario
191 there is a lower bound 1 - 2 * min (p, 1-p), and a somewhat larger
192 value that can be assumed *if* the distribution is perfectly random.
193 A predictability of 1 is perfectly plausible not matter what p is,
194 because the decision could be dependent on an invocation parameter
195 of the program.
196 For large p, we want MAX_INSNS_SKIPPED == pn/(1-p) + mt - pn
197 For small p, we want MAX_INSNS_SKIPPED == pt
198
199 When optimizing for size:
200 We want to skip insn unless we could use 16 opcodes for the
201 non-conditionalized insn to balance the branch length or more.
202 Performance can be tie-breaker. */
203/* If the potentially-skipped insns are likely to be executed, we'll
204 generally save one non-taken branch
205 o
206 this to be no less than the 1/p */
207#define MAX_INSNS_SKIPPED 3
208
ce9dbf20
CZ
209/* ZOL control registers. */
210#define AUX_LP_START 0x02
211#define AUX_LP_END 0x03
212
213/* FPX AUX registers. */
214#define AUX_DPFP_START 0x301
215
82cd9a96
CZ
216/* ARC600 MULHI register. */
217#define AUX_MULHI 0x12
218
526b7aee
SV
219/* A nop is needed between a 4 byte insn that sets the condition codes and
220 a branch that uses them (the same isn't true for an 8 byte insn that sets
221 the condition codes). Set by arc_ccfsm_advance. Used by
222 arc_print_operand. */
223
224static int get_arc_condition_code (rtx);
225
226static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
1825c61e 227static tree arc_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
6b55f8c9 228static tree arc_handle_jli_attribute (tree *, tree, tree, int, bool *);
7778a1ad 229static tree arc_handle_secure_attribute (tree *, tree, tree, int, bool *);
8180c03f 230static tree arc_handle_uncached_attribute (tree *, tree, tree, int, bool *);
b6fb257b 231static tree arc_handle_aux_attribute (tree *, tree, tree, int, bool *);
526b7aee
SV
232
233/* Initialized arc_attribute_table to NULL since arc doesnot have any
234 machine specific supported attributes. */
235const struct attribute_spec arc_attribute_table[] =
236{
4849deb1
JJ
237 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
238 affects_type_identity, handler, exclude } */
239 { "interrupt", 1, 1, true, false, false, true,
240 arc_handle_interrupt_attribute, NULL },
526b7aee
SV
241 /* Function calls made to this symbol must be done indirectly, because
242 it may lie outside of the 21/25 bit addressing range of a normal function
243 call. */
4849deb1 244 { "long_call", 0, 0, false, true, true, false, NULL, NULL },
526b7aee
SV
245 /* Whereas these functions are always known to reside within the 25 bit
246 addressing range of unconditionalized bl. */
4849deb1 247 { "medium_call", 0, 0, false, true, true, false, NULL, NULL },
526b7aee
SV
248 /* And these functions are always known to reside within the 21 bit
249 addressing range of blcc. */
4849deb1 250 { "short_call", 0, 0, false, true, true, false, NULL, NULL },
1825c61e
CZ
251 /* Function which are not having the prologue and epilogue generated
252 by the compiler. */
4849deb1 253 { "naked", 0, 0, true, false, false, false, arc_handle_fndecl_attribute,
5d9ae53d 254 NULL },
6b55f8c9
CZ
255 /* Functions calls made using jli instruction. The pointer in JLI
256 table is found latter. */
8180c03f 257 { "jli_always", 0, 0, false, true, true, false, NULL, NULL },
6b55f8c9
CZ
258 /* Functions calls made using jli instruction. The pointer in JLI
259 table is given as input parameter. */
8180c03f 260 { "jli_fixed", 1, 1, false, true, true, false, arc_handle_jli_attribute,
6b55f8c9 261 NULL },
7778a1ad 262 /* Call a function using secure-mode. */
8180c03f
CZ
263 { "secure_call", 1, 1, false, true, true, false, arc_handle_secure_attribute,
264 NULL },
265 /* Bypass caches using .di flag. */
266 { "uncached", 0, 0, false, true, false, false, arc_handle_uncached_attribute,
7778a1ad 267 NULL },
b6fb257b 268 { "aux", 0, 1, true, false, false, false, arc_handle_aux_attribute, NULL },
4849deb1 269 { NULL, 0, 0, false, false, false, false, NULL, NULL }
526b7aee
SV
270};
271static int arc_comp_type_attributes (const_tree, const_tree);
272static void arc_file_start (void);
273static void arc_internal_label (FILE *, const char *, unsigned long);
274static void arc_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
275 tree);
ef4bddc2 276static int arc_address_cost (rtx, machine_mode, addr_space_t, bool);
526b7aee
SV
277static void arc_encode_section_info (tree decl, rtx rtl, int first);
278
279static void arc_init_builtins (void);
ef4bddc2 280static rtx arc_expand_builtin (tree, rtx, rtx, machine_mode, int);
526b7aee
SV
281
282static int branch_dest (rtx);
283
284static void arc_output_pic_addr_const (FILE *, rtx, int);
526b7aee
SV
285static bool arc_function_ok_for_sibcall (tree, tree);
286static rtx arc_function_value (const_tree, const_tree, bool);
287const char * output_shift (rtx *);
288static void arc_reorg (void);
289static bool arc_in_small_data_p (const_tree);
290
291static void arc_init_reg_tables (void);
292static bool arc_return_in_memory (const_tree, const_tree);
ef4bddc2 293static bool arc_vector_mode_supported_p (machine_mode);
526b7aee 294
807e902e
KZ
295static bool arc_can_use_doloop_p (const widest_int &, const widest_int &,
296 unsigned int, bool);
ac44248e 297static const char *arc_invalid_within_doloop (const rtx_insn *);
526b7aee
SV
298
299static void output_short_suffix (FILE *file);
300
301static bool arc_frame_pointer_required (void);
302
445d7826 303static bool arc_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT,
ad23f5d4
JG
304 unsigned int,
305 enum by_pieces_operation op,
306 bool);
307
c3bde35a
AB
308/* Globally visible information about currently selected cpu. */
309const arc_cpu_t *arc_selected_cpu;
f9ccf899 310
ce9dbf20
CZ
311/* Traditionally, we push saved registers first in the prologue,
312 then we allocate the rest of the frame - and reverse in the epilogue.
313 This has still its merits for ease of debugging, or saving code size
314 or even execution time if the stack frame is so large that some accesses
315 can't be encoded anymore with offsets in the instruction code when using
316 a different scheme.
317 Also, it would be a good starting point if we got instructions to help
318 with register save/restore.
319
320 However, often stack frames are small, and the pushing / popping has
321 some costs:
322 - the stack modification prevents a lot of scheduling.
323 - frame allocation / deallocation may need extra instructions.
324 - we need to place a memory barrier after frame allocation to avoid
325 the delay slot scheduler to reschedule a frame related info and
326 messing up with dwarf unwinding. The barrier before deallocation
327 is for flushing all pending sp operations.
328
329 Thus, for small frames, we'd like to use a different scheme:
330 - The frame is allocated in full with the first prologue instruction,
331 and deallocated in full with the last epilogue instruction.
332 Thus, the instructions in-between can be freely scheduled.
333 - If the function has no outgoing arguments on the stack, we can allocate
334 one register save slot at the top of the stack. This register can then
335 be saved simultaneously with frame allocation, and restored with
336 frame deallocation.
337 This register can be picked depending on scheduling considerations,
338 although same though should go into having some set of registers
339 to be potentially lingering after a call, and others to be available
340 immediately - i.e. in the absence of interprocedual optimization, we
341 can use an ABI-like convention for register allocation to reduce
342 stalls after function return. */
343
344/* ARCompact stack frames look like:
345
346 Before call After call
347 high +-----------------------+ +-----------------------+
348 mem | reg parm save area | | reg parm save area |
349 | only created for | | only created for |
350 | variable arg fns | | variable arg fns |
351 AP +-----------------------+ +-----------------------+
352 | return addr register | | return addr register |
353 | (if required) | | (if required) |
354 +-----------------------+ +-----------------------+
355 | | | |
356 | reg save area | | reg save area |
357 | | | |
358 +-----------------------+ +-----------------------+
359 | frame pointer | | frame pointer |
360 | (if required) | | (if required) |
361 FP +-----------------------+ +-----------------------+
362 | | | |
363 | local/temp variables | | local/temp variables |
364 | | | |
365 +-----------------------+ +-----------------------+
366 | | | |
367 | arguments on stack | | arguments on stack |
368 | | | |
369 SP +-----------------------+ +-----------------------+
370 | reg parm save area |
371 | only created for |
372 | variable arg fns |
373 AP +-----------------------+
374 | return addr register |
375 | (if required) |
376 +-----------------------+
377 | |
378 | reg save area |
379 | |
380 +-----------------------+
381 | frame pointer |
382 | (if required) |
383 FP +-----------------------+
384 | |
385 | local/temp variables |
386 | |
387 +-----------------------+
388 | |
389 | arguments on stack |
390 low | |
391 mem SP +-----------------------+
392
393Notes:
3941) The "reg parm save area" does not exist for non variable argument fns.
395 The "reg parm save area" can be eliminated completely if we created our
396 own va-arc.h, but that has tradeoffs as well (so it's not done). */
397
398/* Structure to be filled in by arc_compute_frame_size with register
399 save masks, and offsets for the current function. */
400struct GTY (()) arc_frame_info
401{
402 unsigned int total_size; /* # bytes that the entire frame takes up. */
403 unsigned int extra_size; /* # bytes of extra stuff. */
404 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
405 unsigned int args_size; /* # bytes that outgoing arguments take up. */
406 unsigned int reg_size; /* # bytes needed to store regs. */
407 unsigned int var_size; /* # bytes that variables take up. */
408 uint64_t gmask; /* Mask of saved gp registers. */
409 bool initialized; /* FALSE if frame size already calculated. */
410 short millicode_start_reg;
411 short millicode_end_reg;
412 bool save_return_addr;
413};
414
415/* GMASK bit length -1. */
416#define GMASK_LEN 63
417
418/* Defining data structures for per-function information. */
419
420typedef struct GTY (()) machine_function
421{
422 unsigned int fn_type;
423 struct arc_frame_info frame_info;
424 /* To keep track of unalignment caused by short insns. */
425 int unalign;
426 struct arc_ccfsm ccfsm_current;
427 /* Map from uid to ccfsm state during branch shortening. */
428 rtx ccfsm_current_insn;
429 char arc_reorg_started;
430 char prescan_initialized;
431} machine_function;
432
433
e0be3321
CZ
434/* Given a symbol RTX (const (symb <+ const_int>), returns its
435 alignment. */
436
437static int
438get_symbol_alignment (rtx x)
439{
440 tree decl = NULL_TREE;
441 int align = 0;
442
443 switch (GET_CODE (x))
444 {
445 case SYMBOL_REF:
446 decl = SYMBOL_REF_DECL (x);
447 break;
448 case CONST:
449 return get_symbol_alignment (XEXP (x, 0));
450 case PLUS:
451 gcc_assert (CONST_INT_P (XEXP (x, 1)));
452 return get_symbol_alignment (XEXP (x, 0));
453 default:
454 return 0;
455 }
456
457 if (decl)
458 align = DECL_ALIGN (decl);
459 align = align / BITS_PER_UNIT;
460 return align;
461}
462
463/* Return true if x is ok to be used as a small data address. */
464
465static bool
02ae0e08 466legitimate_small_data_address_p (rtx x, machine_mode mode)
e0be3321
CZ
467{
468 switch (GET_CODE (x))
469 {
470 case CONST:
02ae0e08 471 return legitimate_small_data_address_p (XEXP (x, 0), mode);
e0be3321
CZ
472 case SYMBOL_REF:
473 return SYMBOL_REF_SMALL_P (x);
474 case PLUS:
475 {
476 bool p0 = (GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
477 && SYMBOL_REF_SMALL_P (XEXP (x, 0));
02ae0e08
CZ
478
479 /* If no constant then we cannot do small data. */
480 if (!CONST_INT_P (XEXP (x, 1)))
481 return false;
482
483 /* Small data relocs works with scalled addresses, check if
484 the immediate fits the requirements. */
485 switch (GET_MODE_SIZE (mode))
486 {
487 case 1:
488 return p0;
489 case 2:
490 return p0 && ((INTVAL (XEXP (x, 1)) & 0x1) == 0);
491 case 4:
492 case 8:
493 return p0 && ((INTVAL (XEXP (x, 1)) & 0x3) == 0);
494 default:
495 return false;
496 }
e0be3321
CZ
497 }
498 default:
499 return false;
500 }
501}
502
503/* TRUE if op is an scaled address. */
9f532472
CZ
504static bool
505legitimate_scaled_address_p (machine_mode mode, rtx op, bool strict)
506{
507 if (GET_CODE (op) != PLUS)
508 return false;
509
510 if (GET_CODE (XEXP (op, 0)) != MULT)
511 return false;
512
513 /* Check multiplication operands. */
514 if (!RTX_OK_FOR_INDEX_P (XEXP (XEXP (op, 0), 0), strict))
515 return false;
516
517 if (!CONST_INT_P (XEXP (XEXP (op, 0), 1)))
518 return false;
519
520 switch (GET_MODE_SIZE (mode))
521 {
522 case 2:
523 if (INTVAL (XEXP (XEXP (op, 0), 1)) != 2)
524 return false;
525 break;
526 case 8:
527 if (!TARGET_LL64)
528 return false;
529 /* Fall through. */
530 case 4:
531 if (INTVAL (XEXP (XEXP (op, 0), 1)) != 4)
532 return false;
41bc2c0b 533 /* Fall through. */
9f532472
CZ
534 default:
535 return false;
536 }
537
538 /* Check the base. */
539 if (RTX_OK_FOR_BASE_P (XEXP (op, 1), (strict)))
540 return true;
541
542 if (flag_pic)
543 {
544 if (CONST_INT_P (XEXP (op, 1)))
545 return true;
546 return false;
547 }
e0be3321
CZ
548
549 /* Scalled addresses for sdata is done other places. */
02ae0e08 550 if (legitimate_small_data_address_p (op, mode))
e0be3321
CZ
551 return false;
552
9f532472 553 if (CONSTANT_P (XEXP (op, 1)))
9f532472 554 return true;
9f532472
CZ
555
556 return false;
557}
558
ac2e1a51
CZ
559/* Check for constructions like REG + OFFS, where OFFS can be a
560 register, an immediate or an long immediate. */
561
562static bool
b8506a8a 563legitimate_offset_address_p (machine_mode mode, rtx x, bool index, bool strict)
ac2e1a51
CZ
564{
565 if (GET_CODE (x) != PLUS)
566 return false;
567
568 if (!RTX_OK_FOR_BASE_P (XEXP (x, 0), (strict)))
569 return false;
570
571 /* Check for: [Rx + small offset] or [Rx + Ry]. */
572 if (((index && RTX_OK_FOR_INDEX_P (XEXP (x, 1), (strict))
573 && GET_MODE_SIZE ((mode)) <= 4)
574 || RTX_OK_FOR_OFFSET_P (mode, XEXP (x, 1))))
575 return true;
576
577 /* Check for [Rx + symbol]. */
578 if (!flag_pic
579 && (GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
580 /* Avoid this type of address for double or larger modes. */
581 && (GET_MODE_SIZE (mode) <= 4)
582 /* Avoid small data which ends in something like GP +
583 symb@sda. */
9f532472 584 && (!SYMBOL_REF_SMALL_P (XEXP (x, 1))))
ac2e1a51
CZ
585 return true;
586
587 return false;
588}
589
526b7aee
SV
590/* Implements target hook vector_mode_supported_p. */
591
592static bool
ef4bddc2 593arc_vector_mode_supported_p (machine_mode mode)
526b7aee 594{
00c072ae
CZ
595 switch (mode)
596 {
4e10a5a7 597 case E_V2HImode:
00c072ae 598 return TARGET_PLUS_DMPY;
4e10a5a7
RS
599 case E_V4HImode:
600 case E_V2SImode:
00c072ae 601 return TARGET_PLUS_QMACW;
4e10a5a7
RS
602 case E_V4SImode:
603 case E_V8HImode:
00c072ae 604 return TARGET_SIMD_SET;
526b7aee 605
00c072ae
CZ
606 default:
607 return false;
608 }
609}
526b7aee 610
00c072ae
CZ
611/* Implements target hook TARGET_VECTORIZE_PREFERRED_SIMD_MODE. */
612
cd1e4d41 613static machine_mode
005ba29c 614arc_preferred_simd_mode (scalar_mode mode)
00c072ae
CZ
615{
616 switch (mode)
617 {
4e10a5a7 618 case E_HImode:
00c072ae 619 return TARGET_PLUS_QMACW ? V4HImode : V2HImode;
4e10a5a7 620 case E_SImode:
00c072ae
CZ
621 return V2SImode;
622
623 default:
624 return word_mode;
625 }
526b7aee
SV
626}
627
00c072ae 628/* Implements target hook
e021fb86 629 TARGET_VECTORIZE_AUTOVECTORIZE_VECTOR_MODES. */
00c072ae 630
bcc7e346 631static unsigned int
e021fb86 632arc_autovectorize_vector_modes (vector_modes *modes, bool)
00c072ae 633{
86e36728
RS
634 if (TARGET_PLUS_QMACW)
635 {
e021fb86
RS
636 modes->quick_push (V4HImode);
637 modes->quick_push (V2HImode);
86e36728 638 }
bcc7e346 639 return 0;
00c072ae 640}
526b7aee 641
43bb0fc2
CZ
642
643/* Implements target hook TARGET_SCHED_ISSUE_RATE. */
644static int
645arc_sched_issue_rate (void)
646{
647 switch (arc_tune)
648 {
649 case TUNE_ARCHS4X:
650 case TUNE_ARCHS4XD:
651 return 3;
652 default:
653 break;
654 }
655 return 1;
656}
657
526b7aee
SV
658/* TARGET_PRESERVE_RELOAD_P is still awaiting patch re-evaluation / review. */
659static bool arc_preserve_reload_p (rtx in) ATTRIBUTE_UNUSED;
660static rtx arc_delegitimize_address (rtx);
c1ce59ab
DM
661static bool arc_can_follow_jump (const rtx_insn *follower,
662 const rtx_insn *followee);
526b7aee
SV
663
664static rtx frame_insn (rtx);
6930c98c
RS
665static void arc_function_arg_advance (cumulative_args_t,
666 const function_arg_info &);
ef4bddc2 667static rtx arc_legitimize_address_0 (rtx, rtx, machine_mode mode);
526b7aee 668
526b7aee
SV
669/* initialize the GCC target structure. */
670#undef TARGET_COMP_TYPE_ATTRIBUTES
671#define TARGET_COMP_TYPE_ATTRIBUTES arc_comp_type_attributes
672#undef TARGET_ASM_FILE_START
673#define TARGET_ASM_FILE_START arc_file_start
674#undef TARGET_ATTRIBUTE_TABLE
675#define TARGET_ATTRIBUTE_TABLE arc_attribute_table
676#undef TARGET_ASM_INTERNAL_LABEL
677#define TARGET_ASM_INTERNAL_LABEL arc_internal_label
678#undef TARGET_RTX_COSTS
679#define TARGET_RTX_COSTS arc_rtx_costs
680#undef TARGET_ADDRESS_COST
681#define TARGET_ADDRESS_COST arc_address_cost
682
683#undef TARGET_ENCODE_SECTION_INFO
684#define TARGET_ENCODE_SECTION_INFO arc_encode_section_info
685
686#undef TARGET_CANNOT_FORCE_CONST_MEM
687#define TARGET_CANNOT_FORCE_CONST_MEM arc_cannot_force_const_mem
688
689#undef TARGET_INIT_BUILTINS
690#define TARGET_INIT_BUILTINS arc_init_builtins
691
692#undef TARGET_EXPAND_BUILTIN
693#define TARGET_EXPAND_BUILTIN arc_expand_builtin
694
c69899f0
CZ
695#undef TARGET_BUILTIN_DECL
696#define TARGET_BUILTIN_DECL arc_builtin_decl
697
526b7aee
SV
698#undef TARGET_ASM_OUTPUT_MI_THUNK
699#define TARGET_ASM_OUTPUT_MI_THUNK arc_output_mi_thunk
700
701#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
702#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
703
704#undef TARGET_FUNCTION_OK_FOR_SIBCALL
705#define TARGET_FUNCTION_OK_FOR_SIBCALL arc_function_ok_for_sibcall
706
707#undef TARGET_MACHINE_DEPENDENT_REORG
708#define TARGET_MACHINE_DEPENDENT_REORG arc_reorg
709
710#undef TARGET_IN_SMALL_DATA_P
711#define TARGET_IN_SMALL_DATA_P arc_in_small_data_p
712
713#undef TARGET_PROMOTE_FUNCTION_MODE
714#define TARGET_PROMOTE_FUNCTION_MODE \
715 default_promote_function_mode_always_promote
716
717#undef TARGET_PROMOTE_PROTOTYPES
718#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
719
720#undef TARGET_RETURN_IN_MEMORY
721#define TARGET_RETURN_IN_MEMORY arc_return_in_memory
722#undef TARGET_PASS_BY_REFERENCE
723#define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
724
725#undef TARGET_SETUP_INCOMING_VARARGS
726#define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
727
728#undef TARGET_ARG_PARTIAL_BYTES
729#define TARGET_ARG_PARTIAL_BYTES arc_arg_partial_bytes
730
731#undef TARGET_MUST_PASS_IN_STACK
732#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
733
734#undef TARGET_FUNCTION_VALUE
735#define TARGET_FUNCTION_VALUE arc_function_value
736
737#undef TARGET_SCHED_ADJUST_PRIORITY
738#define TARGET_SCHED_ADJUST_PRIORITY arc_sched_adjust_priority
739
43bb0fc2
CZ
740#undef TARGET_SCHED_ISSUE_RATE
741#define TARGET_SCHED_ISSUE_RATE arc_sched_issue_rate
742
526b7aee
SV
743#undef TARGET_VECTOR_MODE_SUPPORTED_P
744#define TARGET_VECTOR_MODE_SUPPORTED_P arc_vector_mode_supported_p
745
00c072ae
CZ
746#undef TARGET_VECTORIZE_PREFERRED_SIMD_MODE
747#define TARGET_VECTORIZE_PREFERRED_SIMD_MODE arc_preferred_simd_mode
748
e021fb86
RS
749#undef TARGET_VECTORIZE_AUTOVECTORIZE_VECTOR_MODES
750#define TARGET_VECTORIZE_AUTOVECTORIZE_VECTOR_MODES arc_autovectorize_vector_modes
00c072ae 751
1d0216c8
RS
752#undef TARGET_CAN_USE_DOLOOP_P
753#define TARGET_CAN_USE_DOLOOP_P arc_can_use_doloop_p
754
526b7aee
SV
755#undef TARGET_INVALID_WITHIN_DOLOOP
756#define TARGET_INVALID_WITHIN_DOLOOP arc_invalid_within_doloop
757
758#undef TARGET_PRESERVE_RELOAD_P
759#define TARGET_PRESERVE_RELOAD_P arc_preserve_reload_p
760
761#undef TARGET_CAN_FOLLOW_JUMP
762#define TARGET_CAN_FOLLOW_JUMP arc_can_follow_jump
763
764#undef TARGET_DELEGITIMIZE_ADDRESS
765#define TARGET_DELEGITIMIZE_ADDRESS arc_delegitimize_address
766
ad23f5d4
JG
767#undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
768#define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
769 arc_use_by_pieces_infrastructure_p
770
526b7aee
SV
771/* Usually, we will be able to scale anchor offsets.
772 When this fails, we want LEGITIMIZE_ADDRESS to kick in. */
773#undef TARGET_MIN_ANCHOR_OFFSET
774#define TARGET_MIN_ANCHOR_OFFSET (-1024)
775#undef TARGET_MAX_ANCHOR_OFFSET
776#define TARGET_MAX_ANCHOR_OFFSET (1020)
777
778#undef TARGET_SECONDARY_RELOAD
779#define TARGET_SECONDARY_RELOAD arc_secondary_reload
780
781#define TARGET_OPTION_OVERRIDE arc_override_options
782
783#define TARGET_CONDITIONAL_REGISTER_USAGE arc_conditional_register_usage
784
785#define TARGET_TRAMPOLINE_INIT arc_initialize_trampoline
786
526b7aee
SV
787#define TARGET_CAN_ELIMINATE arc_can_eliminate
788
789#define TARGET_FRAME_POINTER_REQUIRED arc_frame_pointer_required
790
791#define TARGET_FUNCTION_ARG arc_function_arg
792
793#define TARGET_FUNCTION_ARG_ADVANCE arc_function_arg_advance
794
795#define TARGET_LEGITIMATE_CONSTANT_P arc_legitimate_constant_p
796
797#define TARGET_LEGITIMATE_ADDRESS_P arc_legitimate_address_p
798
799#define TARGET_MODE_DEPENDENT_ADDRESS_P arc_mode_dependent_address_p
800
801#define TARGET_LEGITIMIZE_ADDRESS arc_legitimize_address
802
bf9e9dc5
CZ
803#undef TARGET_NO_SPECULATION_IN_DELAY_SLOTS_P
804#define TARGET_NO_SPECULATION_IN_DELAY_SLOTS_P \
805 arc_no_speculation_in_delay_slots_p
806
53c8d5a7 807#undef TARGET_LRA_P
526b7aee
SV
808#define TARGET_LRA_P arc_lra_p
809#define TARGET_REGISTER_PRIORITY arc_register_priority
810/* Stores with scaled offsets have different displacement ranges. */
811#define TARGET_DIFFERENT_ADDR_DISPLACEMENT_P hook_bool_void_true
812#define TARGET_SPILL_CLASS arc_spill_class
813
1825c61e
CZ
814#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
815#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS arc_allocate_stack_slots_for_args
816
817#undef TARGET_WARN_FUNC_RETURN
818#define TARGET_WARN_FUNC_RETURN arc_warn_func_return
819
526b7aee
SV
820#include "target-def.h"
821
822#undef TARGET_ASM_ALIGNED_HI_OP
823#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
824#undef TARGET_ASM_ALIGNED_SI_OP
825#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
826
28633bbd
CZ
827#ifdef HAVE_AS_TLS
828#undef TARGET_HAVE_TLS
829#define TARGET_HAVE_TLS HAVE_AS_TLS
830#endif
831
d34a0fdc
CZ
832#undef TARGET_DWARF_REGISTER_SPAN
833#define TARGET_DWARF_REGISTER_SPAN arc_dwarf_register_span
834
c43f4279
RS
835#undef TARGET_HARD_REGNO_NREGS
836#define TARGET_HARD_REGNO_NREGS arc_hard_regno_nregs
f939c3e6
RS
837#undef TARGET_HARD_REGNO_MODE_OK
838#define TARGET_HARD_REGNO_MODE_OK arc_hard_regno_mode_ok
839
99e1629f
RS
840#undef TARGET_MODES_TIEABLE_P
841#define TARGET_MODES_TIEABLE_P arc_modes_tieable_p
842
526b7aee
SV
843/* Try to keep the (mov:DF _, reg) as early as possible so
844 that the d<add/sub/mul>h-lr insns appear together and can
845 use the peephole2 pattern. */
846
847static int
ac44248e 848arc_sched_adjust_priority (rtx_insn *insn, int priority)
526b7aee
SV
849{
850 rtx set = single_set (insn);
851 if (set
852 && GET_MODE (SET_SRC(set)) == DFmode
853 && GET_CODE (SET_SRC(set)) == REG)
854 {
855 /* Incrementing priority by 20 (empirically derived). */
856 return priority + 20;
857 }
858
859 return priority;
860}
861
f50bb868
CZ
862/* For ARC base register + offset addressing, the validity of the
863 address is mode-dependent for most of the offset range, as the
864 offset can be scaled by the access size.
865 We don't expose these as mode-dependent addresses in the
866 mode_dependent_address_p target hook, because that would disable
867 lots of optimizations, and most uses of these addresses are for 32
868 or 64 bit accesses anyways, which are fine.
869 However, that leaves some addresses for 8 / 16 bit values not
870 properly reloaded by the generic code, which is why we have to
871 schedule secondary reloads for these. */
872
526b7aee 873static reg_class_t
f50bb868
CZ
874arc_secondary_reload (bool in_p,
875 rtx x,
876 reg_class_t cl,
877 machine_mode mode,
878 secondary_reload_info *sri)
526b7aee 879{
f50bb868
CZ
880 enum rtx_code code = GET_CODE (x);
881
526b7aee
SV
882 if (cl == DOUBLE_REGS)
883 return GENERAL_REGS;
884
f50bb868
CZ
885 /* If we have a subreg (reg), where reg is a pseudo (that will end in
886 a memory location), then we may need a scratch register to handle
887 the fp/sp+largeoffset address. */
888 if (code == SUBREG)
889 {
890 rtx addr = NULL_RTX;
891 x = SUBREG_REG (x);
892
893 if (REG_P (x))
894 {
895 int regno = REGNO (x);
896 if (regno >= FIRST_PSEUDO_REGISTER)
897 regno = reg_renumber[regno];
898
899 if (regno != -1)
900 return NO_REGS;
901
73dac59b
CZ
902 /* It is a pseudo that ends in a stack location. This
903 procedure only works with the old reload step. */
8a90673b 904 if (!lra_in_progress && reg_equiv_mem (REGNO (x)))
f50bb868
CZ
905 {
906 /* Get the equivalent address and check the range of the
907 offset. */
908 rtx mem = reg_equiv_mem (REGNO (x));
909 addr = find_replacement (&XEXP (mem, 0));
910 }
911 }
912 else
913 {
914 gcc_assert (MEM_P (x));
915 addr = XEXP (x, 0);
916 addr = simplify_rtx (addr);
917 }
918 if (addr && GET_CODE (addr) == PLUS
919 && CONST_INT_P (XEXP (addr, 1))
920 && (!RTX_OK_FOR_OFFSET_P (mode, XEXP (addr, 1))))
921 {
922 switch (mode)
923 {
4e10a5a7 924 case E_QImode:
f50bb868
CZ
925 sri->icode =
926 in_p ? CODE_FOR_reload_qi_load : CODE_FOR_reload_qi_store;
927 break;
4e10a5a7 928 case E_HImode:
f50bb868
CZ
929 sri->icode =
930 in_p ? CODE_FOR_reload_hi_load : CODE_FOR_reload_hi_store;
931 break;
932 default:
933 break;
934 }
935 }
936 }
526b7aee
SV
937 return NO_REGS;
938}
939
f50bb868
CZ
940/* Convert reloads using offsets that are too large to use indirect
941 addressing. */
942
943void
944arc_secondary_reload_conv (rtx reg, rtx mem, rtx scratch, bool store_p)
945{
946 rtx addr;
947
948 gcc_assert (GET_CODE (mem) == MEM);
949 addr = XEXP (mem, 0);
950
951 /* Large offset: use a move. FIXME: ld ops accepts limms as
952 offsets. Hence, the following move insn is not required. */
953 emit_move_insn (scratch, addr);
954 mem = replace_equiv_address_nv (mem, scratch);
955
956 /* Now create the move. */
957 if (store_p)
958 emit_insn (gen_rtx_SET (mem, reg));
959 else
960 emit_insn (gen_rtx_SET (reg, mem));
961
962 return;
963}
964
526b7aee
SV
965static unsigned arc_ifcvt (void);
966
967namespace {
968
969const pass_data pass_data_arc_ifcvt =
970{
971 RTL_PASS,
972 "arc_ifcvt", /* name */
973 OPTGROUP_NONE, /* optinfo_flags */
526b7aee
SV
974 TV_IFCVT2, /* tv_id */
975 0, /* properties_required */
976 0, /* properties_provided */
977 0, /* properties_destroyed */
978 0, /* todo_flags_start */
979 TODO_df_finish /* todo_flags_finish */
980};
981
982class pass_arc_ifcvt : public rtl_opt_pass
983{
f60689fa
CZ
984 public:
985 pass_arc_ifcvt (gcc::context *ctxt)
986 : rtl_opt_pass (pass_data_arc_ifcvt, ctxt)
987 {}
526b7aee
SV
988
989 /* opt_pass methods: */
f60689fa
CZ
990 opt_pass * clone ()
991 {
992 return new pass_arc_ifcvt (m_ctxt);
993 }
994 virtual unsigned int execute (function *)
995 {
996 return arc_ifcvt ();
997 }
998 virtual bool gate (function *)
999 {
1000 return (optimize > 1 && !TARGET_NO_COND_EXEC);
1001 }
526b7aee
SV
1002};
1003
1004} // anon namespace
1005
1006rtl_opt_pass *
1007make_pass_arc_ifcvt (gcc::context *ctxt)
1008{
1009 return new pass_arc_ifcvt (ctxt);
1010}
1011
0bc69b81
JR
1012static unsigned arc_predicate_delay_insns (void);
1013
1014namespace {
1015
1016const pass_data pass_data_arc_predicate_delay_insns =
1017{
1018 RTL_PASS,
1019 "arc_predicate_delay_insns", /* name */
1020 OPTGROUP_NONE, /* optinfo_flags */
0bc69b81
JR
1021 TV_IFCVT2, /* tv_id */
1022 0, /* properties_required */
1023 0, /* properties_provided */
1024 0, /* properties_destroyed */
1025 0, /* todo_flags_start */
1026 TODO_df_finish /* todo_flags_finish */
1027};
1028
1029class pass_arc_predicate_delay_insns : public rtl_opt_pass
1030{
f60689fa
CZ
1031 public:
1032 pass_arc_predicate_delay_insns(gcc::context *ctxt)
1033 : rtl_opt_pass(pass_data_arc_predicate_delay_insns, ctxt)
1034 {}
0bc69b81
JR
1035
1036 /* opt_pass methods: */
be55bfe6 1037 virtual unsigned int execute (function *)
f60689fa
CZ
1038 {
1039 return arc_predicate_delay_insns ();
1040 }
1041 virtual bool gate (function *)
1042 {
1043 return flag_delayed_branch;
1044 }
0bc69b81
JR
1045};
1046
1047} // anon namespace
1048
1049rtl_opt_pass *
1050make_pass_arc_predicate_delay_insns (gcc::context *ctxt)
1051{
1052 return new pass_arc_predicate_delay_insns (ctxt);
1053}
1054
526b7aee
SV
1055/* Called by OVERRIDE_OPTIONS to initialize various things. */
1056
f9ccf899 1057static void
526b7aee
SV
1058arc_init (void)
1059{
0e5172eb
CZ
1060 if (TARGET_V2)
1061 {
1062 /* I have the multiplier, then use it*/
1063 if (TARGET_MPYW || TARGET_MULTI)
1064 arc_multcost = COSTS_N_INSNS (1);
1065 }
526b7aee
SV
1066 /* Note: arc_multcost is only used in rtx_cost if speed is true. */
1067 if (arc_multcost < 0)
1068 switch (arc_tune)
1069 {
62f26645 1070 case ARC_TUNE_ARC700_4_2_STD:
526b7aee
SV
1071 /* latency 7;
1072 max throughput (1 multiply + 4 other insns) / 5 cycles. */
1073 arc_multcost = COSTS_N_INSNS (4);
1074 if (TARGET_NOMPY_SET)
1075 arc_multcost = COSTS_N_INSNS (30);
1076 break;
62f26645 1077 case ARC_TUNE_ARC700_4_2_XMAC:
526b7aee
SV
1078 /* latency 5;
1079 max throughput (1 multiply + 2 other insns) / 3 cycles. */
1080 arc_multcost = COSTS_N_INSNS (3);
1081 if (TARGET_NOMPY_SET)
1082 arc_multcost = COSTS_N_INSNS (30);
1083 break;
62f26645 1084 case ARC_TUNE_ARC600:
526b7aee
SV
1085 if (TARGET_MUL64_SET)
1086 {
1087 arc_multcost = COSTS_N_INSNS (4);
1088 break;
1089 }
1090 /* Fall through. */
1091 default:
1092 arc_multcost = COSTS_N_INSNS (30);
1093 break;
1094 }
1095
f50bb868
CZ
1096 /* MPY instructions valid only for ARC700 or ARCv2. */
1097 if (TARGET_NOMPY_SET && TARGET_ARC600_FAMILY)
a3f9f006 1098 error ("%<-mno-mpy%> supported only for ARC700 or ARCv2");
526b7aee 1099
526b7aee 1100 if (!TARGET_DPFP && TARGET_DPFP_DISABLE_LRSR)
a3f9f006 1101 error ("%<-mno-dpfp-lrsr%> supported only with %<-mdpfp%>");
526b7aee
SV
1102
1103 /* FPX-1. No fast and compact together. */
1104 if ((TARGET_DPFP_FAST_SET && TARGET_DPFP_COMPACT_SET)
1105 || (TARGET_SPFP_FAST_SET && TARGET_SPFP_COMPACT_SET))
1106 error ("FPX fast and compact options cannot be specified together");
1107
1108 /* FPX-2. No fast-spfp for arc600 or arc601. */
f50bb868 1109 if (TARGET_SPFP_FAST_SET && TARGET_ARC600_FAMILY)
a3f9f006 1110 error ("%<-mspfp_fast%> not available on ARC600 or ARC601");
526b7aee 1111
f9ccf899
CZ
1112 /* FPX-4. No FPX extensions mixed with FPU extensions. */
1113 if ((TARGET_DPFP_FAST_SET || TARGET_DPFP_COMPACT_SET || TARGET_SPFP)
1114 && TARGET_HARD_FLOAT)
2fa9c1f6 1115 error ("no FPX/FPU mixing allowed");
8f3304d0 1116
526b7aee 1117 /* Warn for unimplemented PIC in pre-ARC700 cores, and disable flag_pic. */
f50bb868 1118 if (flag_pic && TARGET_ARC600_FAMILY)
526b7aee 1119 {
fe3ddee9 1120 warning (0, "PIC is not supported for %qs",
f50bb868 1121 arc_cpu_string);
526b7aee
SV
1122 flag_pic = 0;
1123 }
1124
1125 arc_init_reg_tables ();
1126
1127 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
1128 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
1129 arc_punct_chars['#'] = 1;
1130 arc_punct_chars['*'] = 1;
1131 arc_punct_chars['?'] = 1;
1132 arc_punct_chars['!'] = 1;
1133 arc_punct_chars['^'] = 1;
1134 arc_punct_chars['&'] = 1;
f50bb868
CZ
1135 arc_punct_chars['+'] = 1;
1136 arc_punct_chars['_'] = 1;
526b7aee
SV
1137}
1138
41453183
CZ
1139/* Parse -mirq-ctrl-saved=RegisterRange, blink, lp_copunt. The
1140 register range is specified as two registers separated by a dash.
1141 It always starts with r0, and its upper limit is fp register.
1142 blink and lp_count registers are optional. */
1143
1144static void
1145irq_range (const char *cstr)
1146{
1147 int i, first, last, blink, lpcount, xreg;
1148 char *str, *dash, *comma;
1149
1150 i = strlen (cstr);
1151 str = (char *) alloca (i + 1);
1152 memcpy (str, cstr, i + 1);
1153 blink = -1;
1154 lpcount = -1;
1155
1156 dash = strchr (str, '-');
1157 if (!dash)
1158 {
d65485c5 1159 warning (OPT_mirq_ctrl_saved_, "missing dash");
41453183
CZ
1160 return;
1161 }
1162 *dash = '\0';
1163
1164 comma = strchr (dash + 1, ',');
1165 if (comma)
1166 *comma = '\0';
1167
1168 first = decode_reg_name (str);
1169 if (first != 0)
1170 {
d65485c5 1171 warning (OPT_mirq_ctrl_saved_, "first register must be R0");
41453183
CZ
1172 return;
1173 }
1174
1175 /* At this moment we do not have the register names initialized
1176 accordingly. */
1177 if (!strcmp (dash + 1, "ilink"))
1178 last = 29;
1179 else
1180 last = decode_reg_name (dash + 1);
1181
1182 if (last < 0)
1183 {
d65485c5 1184 warning (OPT_mirq_ctrl_saved_, "unknown register name: %s", dash + 1);
41453183
CZ
1185 return;
1186 }
1187
1188 if (!(last & 0x01))
1189 {
d65485c5
CZ
1190 warning (OPT_mirq_ctrl_saved_,
1191 "last register name %s must be an odd register", dash + 1);
41453183
CZ
1192 return;
1193 }
1194
1195 *dash = '-';
1196
1197 if (first > last)
1198 {
d65485c5
CZ
1199 warning (OPT_mirq_ctrl_saved_,
1200 "%s-%s is an empty range", str, dash + 1);
41453183
CZ
1201 return;
1202 }
1203
1204 while (comma)
1205 {
1206 *comma = ',';
1207 str = comma + 1;
1208
1209 comma = strchr (str, ',');
1210 if (comma)
1211 *comma = '\0';
1212
1213 xreg = decode_reg_name (str);
1214 switch (xreg)
1215 {
1216 case 31:
1217 blink = 31;
1218 break;
1219
1220 case 60:
1221 lpcount = 60;
1222 break;
1223
1224 default:
d65485c5
CZ
1225 warning (OPT_mirq_ctrl_saved_,
1226 "unknown register name: %s", str);
41453183
CZ
1227 return;
1228 }
1229 }
1230
1231 irq_ctrl_saved.irq_save_last_reg = last;
1232 irq_ctrl_saved.irq_save_blink = (blink == 31) || (last == 31);
1233 irq_ctrl_saved.irq_save_lpcount = (lpcount == 60);
1234}
1235
c7314bc1
CZ
1236/* Parse -mrgf-banked-regs=NUM option string. Valid values for NUM are 4,
1237 8, 16, or 32. */
1238
1239static void
1240parse_mrgf_banked_regs_option (const char *arg)
1241{
1242 long int val;
1243 char *end_ptr;
1244
1245 errno = 0;
1246 val = strtol (arg, &end_ptr, 10);
1247 if (errno != 0 || *arg == '\0' || *end_ptr != '\0'
1248 || (val != 0 && val != 4 && val != 8 && val != 16 && val != 32))
1249 {
a3f9f006 1250 error ("invalid number in %<-mrgf-banked-regs=%s%> "
c7314bc1
CZ
1251 "valid values are 0, 4, 8, 16, or 32", arg);
1252 return;
1253 }
1254 rgf_banked_register_count = (int) val;
1255}
1256
526b7aee
SV
1257/* Check ARC options, generate derived target attributes. */
1258
1259static void
1260arc_override_options (void)
1261{
41453183
CZ
1262 unsigned int i;
1263 cl_deferred_option *opt;
1264 vec<cl_deferred_option> *vopt
1265 = (vec<cl_deferred_option> *) arc_deferred_options;
1266
526b7aee 1267 if (arc_cpu == PROCESSOR_NONE)
f9ccf899
CZ
1268 arc_cpu = TARGET_CPU_DEFAULT;
1269
1270 /* Set the default cpu options. */
1271 arc_selected_cpu = &arc_cpu_types[(int) arc_cpu];
f9ccf899
CZ
1272
1273 /* Set the architectures. */
c3bde35a 1274 switch (arc_selected_cpu->arch_info->arch_id)
f9ccf899
CZ
1275 {
1276 case BASE_ARCH_em:
1277 arc_cpu_string = "EM";
1278 break;
1279 case BASE_ARCH_hs:
1280 arc_cpu_string = "HS";
1281 break;
1282 case BASE_ARCH_700:
1283 if (arc_selected_cpu->processor == PROCESSOR_nps400)
1284 arc_cpu_string = "NPS400";
1285 else
1286 arc_cpu_string = "ARC700";
1287 break;
1288 case BASE_ARCH_6xx:
1289 arc_cpu_string = "ARC600";
1290 break;
1291 default:
1292 gcc_unreachable ();
1293 }
1294
41453183
CZ
1295 irq_ctrl_saved.irq_save_last_reg = -1;
1296 irq_ctrl_saved.irq_save_blink = false;
1297 irq_ctrl_saved.irq_save_lpcount = false;
1298
c7314bc1
CZ
1299 rgf_banked_register_count = 0;
1300
41453183
CZ
1301 /* Handle the deferred options. */
1302 if (vopt)
1303 FOR_EACH_VEC_ELT (*vopt, i, opt)
1304 {
1305 switch (opt->opt_index)
1306 {
1307 case OPT_mirq_ctrl_saved_:
1308 if (TARGET_V2)
1309 irq_range (opt->arg);
1310 else
d65485c5 1311 warning (OPT_mirq_ctrl_saved_,
a3f9f006
ML
1312 "option %<-mirq-ctrl-saved%> valid only "
1313 "for ARC v2 processors");
41453183
CZ
1314 break;
1315
c7314bc1
CZ
1316 case OPT_mrgf_banked_regs_:
1317 if (TARGET_V2)
1318 parse_mrgf_banked_regs_option (opt->arg);
1319 else
d65485c5 1320 warning (OPT_mrgf_banked_regs_,
a3f9f006
ML
1321 "option %<-mrgf-banked-regs%> valid only for "
1322 "ARC v2 processors");
c7314bc1
CZ
1323 break;
1324
41453183
CZ
1325 default:
1326 gcc_unreachable();
1327 }
1328 }
1329
66825a30
CZ
1330 CLEAR_HARD_REG_SET (overrideregs);
1331 if (common_deferred_options)
1332 {
1333 vec<cl_deferred_option> v =
1334 *((vec<cl_deferred_option> *) common_deferred_options);
1335 int reg, nregs, j;
1336
1337 FOR_EACH_VEC_ELT (v, i, opt)
1338 {
1339 switch (opt->opt_index)
1340 {
1341 case OPT_ffixed_:
1342 case OPT_fcall_used_:
1343 case OPT_fcall_saved_:
1344 if ((reg = decode_reg_name_and_count (opt->arg, &nregs)) >= 0)
1345 for (j = reg; j < reg + nregs; j++)
1346 SET_HARD_REG_BIT (overrideregs, j);
1347 break;
1348 default:
1349 break;
1350 }
1351 }
1352 }
1353
d65485c5
CZ
1354 /* Check options against architecture options. Throw an error if
1355 option is not allowed. Extra, check options against default
1356 architecture/cpu flags and throw an warning if we find a
1357 mismatch. */
fe3ddee9
CZ
1358 /* TRANSLATORS: the DOC/DOC0/DOC1 are strings which shouldn't be
1359 translated. They are like keywords which one can relate with the
1360 architectural choices taken for an ARC CPU implementation. */
d65485c5
CZ
1361#define ARC_OPTX(NAME, CODE, VAR, VAL, DOC0, DOC1) \
1362 do { \
1363 if ((!(arc_selected_cpu->arch_info->flags & CODE)) \
1364 && (VAR == VAL)) \
fe3ddee9 1365 error ("option %<%s=%s%> is not available for %qs CPU", \
d65485c5
CZ
1366 DOC0, DOC1, arc_selected_cpu->name); \
1367 if ((arc_selected_cpu->arch_info->dflags & CODE) \
1368 && (VAR != DEFAULT_##VAR) \
1369 && (VAR != VAL)) \
fe3ddee9
CZ
1370 warning (0, "option %qs is ignored, the default value %qs" \
1371 " is considered for %qs CPU", DOC0, DOC1, \
d65485c5
CZ
1372 arc_selected_cpu->name); \
1373 } while (0);
1374#define ARC_OPT(NAME, CODE, MASK, DOC) \
1375 do { \
1376 if ((!(arc_selected_cpu->arch_info->flags & CODE)) \
1377 && (target_flags & MASK)) \
fe3ddee9 1378 error ("option %qs is not available for %qs CPU", \
d65485c5
CZ
1379 DOC, arc_selected_cpu->name); \
1380 if ((arc_selected_cpu->arch_info->dflags & CODE) \
1381 && (target_flags_explicit & MASK) \
1382 && (!(target_flags & MASK))) \
fe3ddee9
CZ
1383 warning (0, "unset option %qs is ignored, it is always" \
1384 " enabled for %qs CPU", DOC, \
d65485c5
CZ
1385 arc_selected_cpu->name); \
1386 } while (0);
1387
1388#include "arc-options.def"
1389
1390#undef ARC_OPTX
1391#undef ARC_OPT
1392
f9ccf899
CZ
1393 /* Set cpu flags accordingly to architecture/selected cpu. The cpu
1394 specific flags are set in arc-common.c. The architecture forces
1395 the default hardware configurations in, regardless what command
1396 line options are saying. The CPU optional hw options can be
1397 turned on or off. */
1398#define ARC_OPT(NAME, CODE, MASK, DOC) \
1399 do { \
1400 if ((arc_selected_cpu->flags & CODE) \
1401 && ((target_flags_explicit & MASK) == 0)) \
1402 target_flags |= MASK; \
c3bde35a 1403 if (arc_selected_cpu->arch_info->dflags & CODE) \
f9ccf899
CZ
1404 target_flags |= MASK; \
1405 } while (0);
d65485c5 1406#define ARC_OPTX(NAME, CODE, VAR, VAL, DOC0, DOC1) \
c3bde35a
AB
1407 do { \
1408 if ((arc_selected_cpu->flags & CODE) \
1409 && (VAR == DEFAULT_##VAR)) \
1410 VAR = VAL; \
1411 if (arc_selected_cpu->arch_info->dflags & CODE) \
1412 VAR = VAL; \
f9ccf899
CZ
1413 } while (0);
1414
1415#include "arc-options.def"
1416
f9ccf899
CZ
1417#undef ARC_OPTX
1418#undef ARC_OPT
1419
09d69286
CZ
1420 /* Set extras. */
1421 switch (arc_selected_cpu->extra)
1422 {
1423 case HAS_LPCOUNT_16:
1424 arc_lpcwidth = 16;
1425 break;
1426 default:
1427 break;
1428 }
1429
f9ccf899 1430 /* Set Tune option. */
62f26645
CZ
1431 if (arc_tune == ARC_TUNE_NONE)
1432 arc_tune = (enum arc_tune_attr) arc_selected_cpu->tune;
526b7aee
SV
1433
1434 if (arc_size_opt_level == 3)
1435 optimize_size = 1;
1436
f5d56cf9
CZ
1437 if (TARGET_V2 && optimize_size && (ATTRIBUTE_PCS == 2))
1438 TARGET_CODE_DENSITY_FRAME = 1;
1439
526b7aee
SV
1440 if (flag_pic)
1441 target_flags |= MASK_NO_SDATA_SET;
1442
1443 if (flag_no_common == 255)
1444 flag_no_common = !TARGET_NO_SDATA_SET;
1445
9f532472
CZ
1446 /* Check for small data option */
1447 if (!global_options_set.x_g_switch_value && !TARGET_NO_SDATA_SET)
1448 g_switch_value = TARGET_LL64 ? 8 : 4;
1449
635aeaa2
CZ
1450 /* A7 has an issue with delay slots. */
1451 if (TARGET_ARC700 && (arc_tune != ARC_TUNE_ARC7XX))
1452 flag_delayed_branch = 0;
1453
90b48013
CZ
1454 /* Millicode thunks doesn't work with long calls. */
1455 if (TARGET_LONG_CALLS_SET)
1456 target_flags &= ~MASK_MILLICODE_THUNK_SET;
1457
9f54ba8f
CZ
1458 /* Set unaligned to all HS cpus. */
1459 if (!global_options_set.x_unaligned_access && TARGET_HS)
1460 unaligned_access = 1;
1461
526b7aee
SV
1462 /* These need to be done at start up. It's convenient to do them here. */
1463 arc_init ();
1464}
1465
1466/* The condition codes of the ARC, and the inverse function. */
1467/* For short branches, the "c" / "nc" names are not defined in the ARC
1468 Programmers manual, so we have to use "lo" / "hs"" instead. */
1469static const char *arc_condition_codes[] =
1470{
1471 "al", 0, "eq", "ne", "p", "n", "lo", "hs", "v", "nv",
1472 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
1473};
1474
1475enum arc_cc_code_index
1476{
1477 ARC_CC_AL, ARC_CC_EQ = ARC_CC_AL+2, ARC_CC_NE, ARC_CC_P, ARC_CC_N,
1478 ARC_CC_C, ARC_CC_NC, ARC_CC_V, ARC_CC_NV,
1479 ARC_CC_GT, ARC_CC_LE, ARC_CC_GE, ARC_CC_LT, ARC_CC_HI, ARC_CC_LS, ARC_CC_PNZ,
1480 ARC_CC_LO = ARC_CC_C, ARC_CC_HS = ARC_CC_NC
1481};
1482
1483#define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
1484
1485/* Returns the index of the ARC condition code string in
1486 `arc_condition_codes'. COMPARISON should be an rtx like
1487 `(eq (...) (...))'. */
1488
1489static int
1490get_arc_condition_code (rtx comparison)
1491{
1492 switch (GET_MODE (XEXP (comparison, 0)))
1493 {
4e10a5a7
RS
1494 case E_CCmode:
1495 case E_SImode: /* For BRcc. */
526b7aee
SV
1496 switch (GET_CODE (comparison))
1497 {
1498 case EQ : return ARC_CC_EQ;
1499 case NE : return ARC_CC_NE;
1500 case GT : return ARC_CC_GT;
1501 case LE : return ARC_CC_LE;
1502 case GE : return ARC_CC_GE;
1503 case LT : return ARC_CC_LT;
1504 case GTU : return ARC_CC_HI;
1505 case LEU : return ARC_CC_LS;
1506 case LTU : return ARC_CC_LO;
1507 case GEU : return ARC_CC_HS;
1508 default : gcc_unreachable ();
1509 }
4e10a5a7 1510 case E_CC_ZNmode:
526b7aee
SV
1511 switch (GET_CODE (comparison))
1512 {
1513 case EQ : return ARC_CC_EQ;
1514 case NE : return ARC_CC_NE;
1515 case GE: return ARC_CC_P;
1516 case LT: return ARC_CC_N;
1517 case GT : return ARC_CC_PNZ;
1518 default : gcc_unreachable ();
1519 }
4e10a5a7 1520 case E_CC_Zmode:
526b7aee
SV
1521 switch (GET_CODE (comparison))
1522 {
1523 case EQ : return ARC_CC_EQ;
1524 case NE : return ARC_CC_NE;
1525 default : gcc_unreachable ();
1526 }
4e10a5a7 1527 case E_CC_Cmode:
526b7aee
SV
1528 switch (GET_CODE (comparison))
1529 {
1530 case LTU : return ARC_CC_C;
1531 case GEU : return ARC_CC_NC;
1532 default : gcc_unreachable ();
1533 }
4e10a5a7 1534 case E_CC_FP_GTmode:
526b7aee
SV
1535 if (TARGET_ARGONAUT_SET && TARGET_SPFP)
1536 switch (GET_CODE (comparison))
1537 {
1538 case GT : return ARC_CC_N;
1539 case UNLE: return ARC_CC_P;
1540 default : gcc_unreachable ();
1541 }
1542 else
1543 switch (GET_CODE (comparison))
1544 {
1545 case GT : return ARC_CC_HI;
1546 case UNLE : return ARC_CC_LS;
1547 default : gcc_unreachable ();
1548 }
4e10a5a7 1549 case E_CC_FP_GEmode:
526b7aee
SV
1550 /* Same for FPX and non-FPX. */
1551 switch (GET_CODE (comparison))
1552 {
1553 case GE : return ARC_CC_HS;
1554 case UNLT : return ARC_CC_LO;
1555 default : gcc_unreachable ();
1556 }
4e10a5a7 1557 case E_CC_FP_UNEQmode:
526b7aee
SV
1558 switch (GET_CODE (comparison))
1559 {
1560 case UNEQ : return ARC_CC_EQ;
1561 case LTGT : return ARC_CC_NE;
1562 default : gcc_unreachable ();
1563 }
4e10a5a7 1564 case E_CC_FP_ORDmode:
526b7aee
SV
1565 switch (GET_CODE (comparison))
1566 {
1567 case UNORDERED : return ARC_CC_C;
1568 case ORDERED : return ARC_CC_NC;
1569 default : gcc_unreachable ();
1570 }
4e10a5a7 1571 case E_CC_FPXmode:
526b7aee
SV
1572 switch (GET_CODE (comparison))
1573 {
1574 case EQ : return ARC_CC_EQ;
1575 case NE : return ARC_CC_NE;
1576 case UNORDERED : return ARC_CC_C;
1577 case ORDERED : return ARC_CC_NC;
1578 case LTGT : return ARC_CC_HI;
1579 case UNEQ : return ARC_CC_LS;
1580 default : gcc_unreachable ();
1581 }
4e10a5a7 1582 case E_CC_FPUmode:
fbf8314b 1583 case E_CC_FPUEmode:
8f3304d0
CZ
1584 switch (GET_CODE (comparison))
1585 {
1586 case EQ : return ARC_CC_EQ;
1587 case NE : return ARC_CC_NE;
1588 case GT : return ARC_CC_GT;
1589 case GE : return ARC_CC_GE;
1590 case LT : return ARC_CC_C;
1591 case LE : return ARC_CC_LS;
1592 case UNORDERED : return ARC_CC_V;
1593 case ORDERED : return ARC_CC_NV;
1594 case UNGT : return ARC_CC_HI;
1595 case UNGE : return ARC_CC_HS;
1596 case UNLT : return ARC_CC_LT;
1597 case UNLE : return ARC_CC_LE;
1598 /* UNEQ and LTGT do not have representation. */
1599 case LTGT : /* Fall through. */
1600 case UNEQ : /* Fall through. */
1601 default : gcc_unreachable ();
1602 }
4e10a5a7 1603 case E_CC_FPU_UNEQmode:
8f3304d0
CZ
1604 switch (GET_CODE (comparison))
1605 {
1606 case LTGT : return ARC_CC_NE;
1607 case UNEQ : return ARC_CC_EQ;
1608 default : gcc_unreachable ();
1609 }
526b7aee
SV
1610 default : gcc_unreachable ();
1611 }
1612 /*NOTREACHED*/
1613 return (42);
1614}
1615
1616/* Return true if COMPARISON has a short form that can accomodate OFFSET. */
1617
1618bool
1619arc_short_comparison_p (rtx comparison, int offset)
1620{
1621 gcc_assert (ARC_CC_NC == ARC_CC_HS);
1622 gcc_assert (ARC_CC_C == ARC_CC_LO);
1623 switch (get_arc_condition_code (comparison))
1624 {
1625 case ARC_CC_EQ: case ARC_CC_NE:
1626 return offset >= -512 && offset <= 506;
1627 case ARC_CC_GT: case ARC_CC_LE: case ARC_CC_GE: case ARC_CC_LT:
1628 case ARC_CC_HI: case ARC_CC_LS: case ARC_CC_LO: case ARC_CC_HS:
1629 return offset >= -64 && offset <= 58;
1630 default:
1631 return false;
1632 }
1633}
1634
1635/* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
1636 return the mode to be used for the comparison. */
1637
ef4bddc2 1638machine_mode
526b7aee
SV
1639arc_select_cc_mode (enum rtx_code op, rtx x, rtx y)
1640{
ef4bddc2 1641 machine_mode mode = GET_MODE (x);
526b7aee
SV
1642 rtx x1;
1643
1644 /* For an operation that sets the condition codes as a side-effect, the
1645 C and V flags is not set as for cmp, so we can only use comparisons where
1646 this doesn't matter. (For LT and GE we can use "mi" and "pl"
1647 instead.) */
1648 /* ??? We could use "pnz" for greater than zero, however, we could then
1649 get into trouble because the comparison could not be reversed. */
1650 if (GET_MODE_CLASS (mode) == MODE_INT
1651 && y == const0_rtx
1652 && (op == EQ || op == NE
486c559b 1653 || ((op == LT || op == GE) && GET_MODE_SIZE (GET_MODE (x)) <= 4)))
526b7aee
SV
1654 return CC_ZNmode;
1655
1656 /* add.f for if (a+b) */
1657 if (mode == SImode
1658 && GET_CODE (y) == NEG
1659 && (op == EQ || op == NE))
1660 return CC_ZNmode;
1661
1662 /* Check if this is a test suitable for bxor.f . */
1663 if (mode == SImode && (op == EQ || op == NE) && CONST_INT_P (y)
1664 && ((INTVAL (y) - 1) & INTVAL (y)) == 0
1665 && INTVAL (y))
1666 return CC_Zmode;
1667
1668 /* Check if this is a test suitable for add / bmsk.f . */
1669 if (mode == SImode && (op == EQ || op == NE) && CONST_INT_P (y)
1670 && GET_CODE (x) == AND && CONST_INT_P ((x1 = XEXP (x, 1)))
1671 && ((INTVAL (x1) + 1) & INTVAL (x1)) == 0
1672 && (~INTVAL (x1) | INTVAL (y)) < 0
1673 && (~INTVAL (x1) | INTVAL (y)) > -0x800)
1674 return CC_Zmode;
1675
1676 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
1677 && GET_CODE (x) == PLUS
1678 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
1679 return CC_Cmode;
1680
1681 if (TARGET_ARGONAUT_SET
1682 && ((mode == SFmode && TARGET_SPFP) || (mode == DFmode && TARGET_DPFP)))
1683 switch (op)
1684 {
1685 case EQ: case NE: case UNEQ: case LTGT: case ORDERED: case UNORDERED:
1686 return CC_FPXmode;
1687 case LT: case UNGE: case GT: case UNLE:
1688 return CC_FP_GTmode;
1689 case LE: case UNGT: case GE: case UNLT:
1690 return CC_FP_GEmode;
1691 default: gcc_unreachable ();
1692 }
8f3304d0
CZ
1693 else if (TARGET_HARD_FLOAT
1694 && ((mode == SFmode && TARGET_FP_SP_BASE)
1695 || (mode == DFmode && TARGET_FP_DP_BASE)))
526b7aee
SV
1696 switch (op)
1697 {
8f3304d0
CZ
1698 case EQ:
1699 case NE:
1700 case UNORDERED:
1701 case ORDERED:
1702 case UNLT:
1703 case UNLE:
1704 case UNGT:
1705 case UNGE:
fbf8314b
VG
1706 return CC_FPUmode;
1707
8f3304d0
CZ
1708 case LT:
1709 case LE:
1710 case GT:
1711 case GE:
fbf8314b 1712 return CC_FPUEmode;
8f3304d0
CZ
1713
1714 case LTGT:
1715 case UNEQ:
1716 return CC_FPU_UNEQmode;
526b7aee 1717
8f3304d0
CZ
1718 default:
1719 gcc_unreachable ();
1720 }
1721 else if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_OPTFPE)
1722 {
1723 switch (op)
1724 {
1725 case EQ: case NE: return CC_Zmode;
1726 case LT: case UNGE:
1727 case GT: case UNLE: return CC_FP_GTmode;
1728 case LE: case UNGT:
1729 case GE: case UNLT: return CC_FP_GEmode;
1730 case UNEQ: case LTGT: return CC_FP_UNEQmode;
1731 case ORDERED: case UNORDERED: return CC_FP_ORDmode;
1732 default: gcc_unreachable ();
1733 }
1734 }
526b7aee
SV
1735 return CCmode;
1736}
1737
1738/* Vectors to keep interesting information about registers where it can easily
1739 be got. We use to use the actual mode value as the bit number, but there
1740 is (or may be) more than 32 modes now. Instead we use two tables: one
1741 indexed by hard register number, and one indexed by mode. */
1742
1743/* The purpose of arc_mode_class is to shrink the range of modes so that
1744 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
1745 mapped into one arc_mode_class mode. */
1746
1747enum arc_mode_class {
1748 C_MODE,
1749 S_MODE, D_MODE, T_MODE, O_MODE,
1750 SF_MODE, DF_MODE, TF_MODE, OF_MODE,
1751 V_MODE
1752};
1753
1754/* Modes for condition codes. */
1755#define C_MODES (1 << (int) C_MODE)
1756
1757/* Modes for single-word and smaller quantities. */
1758#define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
1759
1760/* Modes for double-word and smaller quantities. */
1761#define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
1762
1763/* Mode for 8-byte DF values only. */
1764#define DF_MODES (1 << DF_MODE)
1765
1766/* Modes for quad-word and smaller quantities. */
1767#define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
1768
1769/* Modes for 128-bit vectors. */
1770#define V_MODES (1 << (int) V_MODE)
1771
1772/* Value is 1 if register/mode pair is acceptable on arc. */
1773
f939c3e6 1774static unsigned int arc_hard_regno_modes[] = {
526b7aee
SV
1775 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
1776 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
1777 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
1778 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
1779
1780 /* ??? Leave these as S_MODES for now. */
1781 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
1782 DF_MODES, 0, DF_MODES, 0, S_MODES, S_MODES, S_MODES, S_MODES,
1783 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
1784 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES, S_MODES,
1785
1786 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1787 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1788 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1789 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1790
1791 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1792 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1793 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1794 V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES, V_MODES,
1795
1796 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
47d8cb23
CZ
1797 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
1798 S_MODES, S_MODES
526b7aee
SV
1799};
1800
f939c3e6 1801static unsigned int arc_mode_class [NUM_MACHINE_MODES];
526b7aee
SV
1802
1803enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
1804
1805enum reg_class
1806arc_preferred_reload_class (rtx, enum reg_class cl)
1807{
526b7aee
SV
1808 return cl;
1809}
1810
1811/* Initialize the arc_mode_class array. */
1812
1813static void
1814arc_init_reg_tables (void)
1815{
1816 int i;
1817
1818 for (i = 0; i < NUM_MACHINE_MODES; i++)
1819 {
ef4bddc2 1820 machine_mode m = (machine_mode) i;
f8d91e80
NC
1821
1822 switch (GET_MODE_CLASS (m))
526b7aee
SV
1823 {
1824 case MODE_INT:
1825 case MODE_PARTIAL_INT:
1826 case MODE_COMPLEX_INT:
f8d91e80 1827 if (GET_MODE_SIZE (m) <= 4)
526b7aee 1828 arc_mode_class[i] = 1 << (int) S_MODE;
f8d91e80 1829 else if (GET_MODE_SIZE (m) == 8)
526b7aee 1830 arc_mode_class[i] = 1 << (int) D_MODE;
f8d91e80 1831 else if (GET_MODE_SIZE (m) == 16)
526b7aee 1832 arc_mode_class[i] = 1 << (int) T_MODE;
f8d91e80 1833 else if (GET_MODE_SIZE (m) == 32)
526b7aee
SV
1834 arc_mode_class[i] = 1 << (int) O_MODE;
1835 else
1836 arc_mode_class[i] = 0;
1837 break;
1838 case MODE_FLOAT:
1839 case MODE_COMPLEX_FLOAT:
f8d91e80 1840 if (GET_MODE_SIZE (m) <= 4)
526b7aee 1841 arc_mode_class[i] = 1 << (int) SF_MODE;
f8d91e80 1842 else if (GET_MODE_SIZE (m) == 8)
526b7aee 1843 arc_mode_class[i] = 1 << (int) DF_MODE;
f8d91e80 1844 else if (GET_MODE_SIZE (m) == 16)
526b7aee 1845 arc_mode_class[i] = 1 << (int) TF_MODE;
f8d91e80 1846 else if (GET_MODE_SIZE (m) == 32)
526b7aee
SV
1847 arc_mode_class[i] = 1 << (int) OF_MODE;
1848 else
1849 arc_mode_class[i] = 0;
1850 break;
1851 case MODE_VECTOR_INT:
00c072ae
CZ
1852 if (GET_MODE_SIZE (m) == 4)
1853 arc_mode_class[i] = (1 << (int) S_MODE);
1854 else if (GET_MODE_SIZE (m) == 8)
1855 arc_mode_class[i] = (1 << (int) D_MODE);
1856 else
1857 arc_mode_class[i] = (1 << (int) V_MODE);
526b7aee
SV
1858 break;
1859 case MODE_CC:
1860 default:
1861 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
1862 we must explicitly check for them here. */
1863 if (i == (int) CCmode || i == (int) CC_ZNmode || i == (int) CC_Zmode
1864 || i == (int) CC_Cmode
8f3304d0 1865 || i == CC_FP_GTmode || i == CC_FP_GEmode || i == CC_FP_ORDmode
fbf8314b 1866 || i == CC_FPUmode || i == CC_FPUEmode || i == CC_FPU_UNEQmode)
526b7aee
SV
1867 arc_mode_class[i] = 1 << (int) C_MODE;
1868 else
1869 arc_mode_class[i] = 0;
1870 break;
1871 }
1872 }
1873}
1874
1875/* Core registers 56..59 are used for multiply extension options.
1876 The dsp option uses r56 and r57, these are then named acc1 and acc2.
1877 acc1 is the highpart, and acc2 the lowpart, so which register gets which
1878 number depends on endianness.
1879 The mul64 multiplier options use r57 for mlo, r58 for mmid and r59 for mhi.
1880 Because mlo / mhi form a 64 bit value, we use different gcc internal
1881 register numbers to make them form a register pair as the gcc internals
1882 know it. mmid gets number 57, if still available, and mlo / mhi get
1883 number 58 and 59, depending on endianness. We use DBX_REGISTER_NUMBER
1884 to map this back. */
1885 char rname56[5] = "r56";
1886 char rname57[5] = "r57";
1887 char rname58[5] = "r58";
1888 char rname59[5] = "r59";
f50bb868
CZ
1889 char rname29[7] = "ilink1";
1890 char rname30[7] = "ilink2";
526b7aee
SV
1891
1892static void
1893arc_conditional_register_usage (void)
1894{
1895 int regno;
1896 int i;
1897 int fix_start = 60, fix_end = 55;
1898
f50bb868
CZ
1899 if (TARGET_V2)
1900 {
1901 /* For ARCv2 the core register set is changed. */
1902 strcpy (rname29, "ilink");
1903 strcpy (rname30, "r30");
66825a30 1904
73dac59b 1905 if (!TEST_HARD_REG_BIT (overrideregs, R30_REG))
66825a30
CZ
1906 {
1907 /* No user interference. Set the r30 to be used by the
1908 compiler. */
73dac59b
CZ
1909 call_used_regs[R30_REG] = 1;
1910 fixed_regs[R30_REG] = 0;
66825a30 1911
73dac59b 1912 arc_regno_reg_class[R30_REG] = GENERAL_REGS;
66825a30 1913 }
f50bb868
CZ
1914 }
1915
526b7aee
SV
1916 if (TARGET_MUL64_SET)
1917 {
73dac59b
CZ
1918 fix_start = R57_REG;
1919 fix_end = R59_REG;
526b7aee
SV
1920
1921 /* We don't provide a name for mmed. In rtl / assembly resource lists,
1922 you are supposed to refer to it as mlo & mhi, e.g
1923 (zero_extract:SI (reg:DI 58) (const_int 32) (16)) .
1924 In an actual asm instruction, you are of course use mmed.
1925 The point of avoiding having a separate register for mmed is that
1926 this way, we don't have to carry clobbers of that reg around in every
1927 isntruction that modifies mlo and/or mhi. */
1928 strcpy (rname57, "");
82cd9a96
CZ
1929 strcpy (rname58, "mlo");
1930 strcpy (rname59, "mhi");
526b7aee 1931 }
28633bbd
CZ
1932
1933 /* The nature of arc_tp_regno is actually something more like a global
1934 register, however globalize_reg requires a declaration.
1935 We use EPILOGUE_USES to compensate so that sets from
1936 __builtin_set_frame_pointer are not deleted. */
1937 if (arc_tp_regno != -1)
1938 fixed_regs[arc_tp_regno] = call_used_regs[arc_tp_regno] = 1;
1939
526b7aee
SV
1940 if (TARGET_MULMAC_32BY16_SET)
1941 {
73dac59b
CZ
1942 fix_start = MUL32x16_REG;
1943 fix_end = fix_end > R57_REG ? fix_end : R57_REG;
526b7aee
SV
1944 strcpy (rname56, TARGET_BIG_ENDIAN ? "acc1" : "acc2");
1945 strcpy (rname57, TARGET_BIG_ENDIAN ? "acc2" : "acc1");
1946 }
1947 for (regno = fix_start; regno <= fix_end; regno++)
1948 {
1949 if (!fixed_regs[regno])
1950 warning (0, "multiply option implies r%d is fixed", regno);
1951 fixed_regs [regno] = call_used_regs[regno] = 1;
1952 }
a2de90a4 1953
048c6a9a
CZ
1954 /* Reduced configuration: don't use r4-r9, r16-r25. */
1955 if (TARGET_RF16)
1956 {
73dac59b
CZ
1957 for (i = R4_REG; i <= R9_REG; i++)
1958 fixed_regs[i] = call_used_regs[i] = 1;
1959 for (i = R16_REG; i <= R25_REG; i++)
1960 fixed_regs[i] = call_used_regs[i] = 1;
526b7aee
SV
1961 }
1962
8f3304d0
CZ
1963 /* ARCHS has 64-bit data-path which makes use of the even-odd paired
1964 registers. */
1965 if (TARGET_HS)
73dac59b
CZ
1966 for (regno = R1_REG; regno < R32_REG; regno +=2)
1967 arc_hard_regno_modes[regno] = S_MODES;
8f3304d0 1968
526b7aee 1969 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
73dac59b
CZ
1970 if (i < ILINK1_REG)
1971 {
9ebba06b 1972 if ((i <= R3_REG) || ((i >= R12_REG) && (i <= R15_REG)))
73dac59b
CZ
1973 arc_regno_reg_class[i] = ARCOMPACT16_REGS;
1974 else
1975 arc_regno_reg_class[i] = GENERAL_REGS;
1976 }
1977 else if (i < LP_COUNT)
1978 arc_regno_reg_class[i] = GENERAL_REGS;
1979 else
1980 arc_regno_reg_class[i] = NO_REGS;
526b7aee
SV
1981
1982 /* Handle Special Registers. */
73dac59b 1983 arc_regno_reg_class[CC_REG] = NO_REGS; /* CC_REG: must be NO_REGS. */
47d8cb23
CZ
1984 arc_regno_reg_class[FRAME_POINTER_REGNUM] = GENERAL_REGS;
1985 arc_regno_reg_class[ARG_POINTER_REGNUM] = GENERAL_REGS;
526b7aee
SV
1986
1987 if (TARGET_DPFP)
73dac59b
CZ
1988 for (i = R40_REG; i < R44_REG; ++i)
1989 {
1990 arc_regno_reg_class[i] = DOUBLE_REGS;
1991 if (!TARGET_ARGONAUT_SET)
1992 CLEAR_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i);
1993 }
526b7aee
SV
1994 else
1995 {
73dac59b
CZ
1996 /* Disable all DOUBLE_REGISTER settings, if not generating DPFP
1997 code. */
1998 arc_regno_reg_class[R40_REG] = ALL_REGS;
1999 arc_regno_reg_class[R41_REG] = ALL_REGS;
2000 arc_regno_reg_class[R42_REG] = ALL_REGS;
2001 arc_regno_reg_class[R43_REG] = ALL_REGS;
526b7aee 2002
73dac59b
CZ
2003 fixed_regs[R40_REG] = 1;
2004 fixed_regs[R41_REG] = 1;
2005 fixed_regs[R42_REG] = 1;
2006 fixed_regs[R43_REG] = 1;
ad3d6e77 2007
73dac59b
CZ
2008 arc_hard_regno_modes[R40_REG] = 0;
2009 arc_hard_regno_modes[R42_REG] = 0;
526b7aee
SV
2010 }
2011
2012 if (TARGET_SIMD_SET)
2013 {
2014 gcc_assert (ARC_FIRST_SIMD_VR_REG == 64);
2015 gcc_assert (ARC_LAST_SIMD_VR_REG == 127);
2016
2017 for (i = ARC_FIRST_SIMD_VR_REG; i <= ARC_LAST_SIMD_VR_REG; i++)
2018 arc_regno_reg_class [i] = SIMD_VR_REGS;
2019
2020 gcc_assert (ARC_FIRST_SIMD_DMA_CONFIG_REG == 128);
2021 gcc_assert (ARC_FIRST_SIMD_DMA_CONFIG_IN_REG == 128);
2022 gcc_assert (ARC_FIRST_SIMD_DMA_CONFIG_OUT_REG == 136);
2023 gcc_assert (ARC_LAST_SIMD_DMA_CONFIG_REG == 143);
2024
2025 for (i = ARC_FIRST_SIMD_DMA_CONFIG_REG;
2026 i <= ARC_LAST_SIMD_DMA_CONFIG_REG; i++)
2027 arc_regno_reg_class [i] = SIMD_DMA_CONFIG_REGS;
2028 }
2029
2030 /* pc : r63 */
73dac59b 2031 arc_regno_reg_class[PCL_REG] = NO_REGS;
8f3304d0
CZ
2032
2033 /*ARCV2 Accumulator. */
79557bae
CZ
2034 if ((TARGET_V2
2035 && (TARGET_FP_DP_FUSED || TARGET_FP_SP_FUSED))
2036 || TARGET_PLUS_DMPY)
8f3304d0 2037 {
73dac59b
CZ
2038 arc_regno_reg_class[ACCL_REGNO] = GENERAL_REGS;
2039 arc_regno_reg_class[ACCH_REGNO] = GENERAL_REGS;
8b22ef6a 2040
66825a30
CZ
2041 /* Allow the compiler to freely use them. */
2042 if (!TEST_HARD_REG_BIT (overrideregs, ACCL_REGNO))
2043 fixed_regs[ACCL_REGNO] = 0;
2044 if (!TEST_HARD_REG_BIT (overrideregs, ACCH_REGNO))
2045 fixed_regs[ACCH_REGNO] = 0;
8b22ef6a 2046
66825a30
CZ
2047 if (!fixed_regs[ACCH_REGNO] && !fixed_regs[ACCL_REGNO])
2048 arc_hard_regno_modes[ACC_REG_FIRST] = D_MODES;
8f3304d0 2049 }
526b7aee
SV
2050}
2051
c43f4279
RS
2052/* Implement TARGET_HARD_REGNO_NREGS. */
2053
2054static unsigned int
2055arc_hard_regno_nregs (unsigned int regno, machine_mode mode)
2056{
2057 if (GET_MODE_SIZE (mode) == 16
2058 && regno >= ARC_FIRST_SIMD_VR_REG
2059 && regno <= ARC_LAST_SIMD_VR_REG)
2060 return 1;
2061
2062 return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
2063}
2064
f939c3e6
RS
2065/* Implement TARGET_HARD_REGNO_MODE_OK. */
2066
2067static bool
2068arc_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2069{
2070 return (arc_hard_regno_modes[regno] & arc_mode_class[mode]) != 0;
2071}
2072
99e1629f
RS
2073/* Implement TARGET_MODES_TIEABLE_P. Tie QI/HI/SI modes together. */
2074
2075static bool
2076arc_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2077{
2078 return (GET_MODE_CLASS (mode1) == MODE_INT
2079 && GET_MODE_CLASS (mode2) == MODE_INT
2080 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
2081 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
2082}
2083
526b7aee
SV
2084/* Handle an "interrupt" attribute; arguments as in
2085 struct attribute_spec.handler. */
2086
2087static tree
2088arc_handle_interrupt_attribute (tree *, tree name, tree args, int,
2089 bool *no_add_attrs)
2090{
2091 gcc_assert (args);
2092
2093 tree value = TREE_VALUE (args);
2094
2095 if (TREE_CODE (value) != STRING_CST)
2096 {
2097 warning (OPT_Wattributes,
2098 "argument of %qE attribute is not a string constant",
2099 name);
2100 *no_add_attrs = true;
2101 }
c7314bc1
CZ
2102 else if (!TARGET_V2
2103 && strcmp (TREE_STRING_POINTER (value), "ilink1")
2104 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
526b7aee
SV
2105 {
2106 warning (OPT_Wattributes,
2107 "argument of %qE attribute is not \"ilink1\" or \"ilink2\"",
2108 name);
2109 *no_add_attrs = true;
2110 }
f50bb868 2111 else if (TARGET_V2
c7314bc1
CZ
2112 && strcmp (TREE_STRING_POINTER (value), "ilink")
2113 && strcmp (TREE_STRING_POINTER (value), "firq"))
f50bb868
CZ
2114 {
2115 warning (OPT_Wattributes,
c7314bc1 2116 "argument of %qE attribute is not \"ilink\" or \"firq\"",
f50bb868
CZ
2117 name);
2118 *no_add_attrs = true;
2119 }
2120
526b7aee
SV
2121 return NULL_TREE;
2122}
2123
1825c61e
CZ
2124static tree
2125arc_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2126 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
2127{
2128 if (TREE_CODE (*node) != FUNCTION_DECL)
2129 {
2130 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2131 name);
2132 *no_add_attrs = true;
2133 }
2134
2135 return NULL_TREE;
2136}
2137
ce9dbf20
CZ
2138/* Type of function DECL.
2139
2140 The result is cached. To reset the cache at the end of a function,
2141 call with DECL = NULL_TREE. */
2142
2143static unsigned int
2144arc_compute_function_type (struct function *fun)
2145{
2146 tree attr, decl = fun->decl;
2147 unsigned int fn_type = fun->machine->fn_type;
2148
2149 if (fn_type != ARC_FUNCTION_UNKNOWN)
2150 return fn_type;
2151
2152 /* Check if it is a naked function. */
2153 if (lookup_attribute ("naked", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2154 fn_type |= ARC_FUNCTION_NAKED;
2155 else
2156 fn_type |= ARC_FUNCTION_NORMAL;
2157
2158 /* Now see if this is an interrupt handler. */
2159 attr = lookup_attribute ("interrupt", DECL_ATTRIBUTES (decl));
2160 if (attr != NULL_TREE)
2161 {
2162 tree value, args = TREE_VALUE (attr);
2163
2164 gcc_assert (list_length (args) == 1);
2165 value = TREE_VALUE (args);
2166 gcc_assert (TREE_CODE (value) == STRING_CST);
2167
2168 if (!strcmp (TREE_STRING_POINTER (value), "ilink1")
2169 || !strcmp (TREE_STRING_POINTER (value), "ilink"))
2170 fn_type |= ARC_FUNCTION_ILINK1;
2171 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
2172 fn_type |= ARC_FUNCTION_ILINK2;
2173 else if (!strcmp (TREE_STRING_POINTER (value), "firq"))
2174 fn_type |= ARC_FUNCTION_FIRQ;
2175 else
2176 gcc_unreachable ();
2177 }
2178
2179 return fun->machine->fn_type = fn_type;
2180}
2181
1825c61e
CZ
2182/* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
2183
2184static bool
2185arc_allocate_stack_slots_for_args (void)
2186{
2187 /* Naked functions should not allocate stack slots for arguments. */
2188 unsigned int fn_type = arc_compute_function_type (cfun);
2189
2190 return !ARC_NAKED_P(fn_type);
2191}
2192
2193/* Implement `TARGET_WARN_FUNC_RETURN'. */
2194
2195static bool
2196arc_warn_func_return (tree decl)
2197{
2198 struct function *func = DECL_STRUCT_FUNCTION (decl);
2199 unsigned int fn_type = arc_compute_function_type (func);
2200
2201 return !ARC_NAKED_P (fn_type);
2202}
2203
526b7aee
SV
2204/* Return zero if TYPE1 and TYPE are incompatible, one if they are compatible,
2205 and two if they are nearly compatible (which causes a warning to be
2206 generated). */
2207
2208static int
2209arc_comp_type_attributes (const_tree type1,
2210 const_tree type2)
2211{
2212 int l1, l2, m1, m2, s1, s2;
2213
2214 /* Check for mismatch of non-default calling convention. */
2215 if (TREE_CODE (type1) != FUNCTION_TYPE)
2216 return 1;
2217
2218 /* Check for mismatched call attributes. */
2219 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2220 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2221 m1 = lookup_attribute ("medium_call", TYPE_ATTRIBUTES (type1)) != NULL;
2222 m2 = lookup_attribute ("medium_call", TYPE_ATTRIBUTES (type2)) != NULL;
2223 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2224 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2225
2226 /* Only bother to check if an attribute is defined. */
2227 if (l1 | l2 | m1 | m2 | s1 | s2)
2228 {
2229 /* If one type has an attribute, the other must have the same attribute. */
2230 if ((l1 != l2) || (m1 != m2) || (s1 != s2))
2231 return 0;
2232
2233 /* Disallow mixed attributes. */
2234 if (l1 + m1 + s1 > 1)
2235 return 0;
2236 }
2237
2238
2239 return 1;
2240}
2241
526b7aee
SV
2242/* Misc. utilities. */
2243
2244/* X and Y are two things to compare using CODE. Emit the compare insn and
2245 return the rtx for the cc reg in the proper mode. */
2246
2247rtx
ef4bddc2 2248gen_compare_reg (rtx comparison, machine_mode omode)
526b7aee
SV
2249{
2250 enum rtx_code code = GET_CODE (comparison);
2251 rtx x = XEXP (comparison, 0);
2252 rtx y = XEXP (comparison, 1);
2253 rtx tmp, cc_reg;
ef4bddc2 2254 machine_mode mode, cmode;
526b7aee
SV
2255
2256
2257 cmode = GET_MODE (x);
2258 if (cmode == VOIDmode)
2259 cmode = GET_MODE (y);
2260 gcc_assert (cmode == SImode || cmode == SFmode || cmode == DFmode);
2261 if (cmode == SImode)
2262 {
2263 if (!register_operand (x, SImode))
2264 {
2265 if (register_operand (y, SImode))
2266 {
2267 tmp = x;
2268 x = y;
2269 y = tmp;
2270 code = swap_condition (code);
2271 }
2272 else
2273 x = copy_to_mode_reg (SImode, x);
2274 }
2275 if (GET_CODE (y) == SYMBOL_REF && flag_pic)
2276 y = copy_to_mode_reg (SImode, y);
2277 }
2278 else
2279 {
2280 x = force_reg (cmode, x);
2281 y = force_reg (cmode, y);
2282 }
2283 mode = SELECT_CC_MODE (code, x, y);
2284
2285 cc_reg = gen_rtx_REG (mode, CC_REG);
2286
2287 /* ??? FIXME (x-y)==0, as done by both cmpsfpx_raw and
2288 cmpdfpx_raw, is not a correct comparison for floats:
2289 http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm
2290 */
2291 if (TARGET_ARGONAUT_SET
2292 && ((cmode == SFmode && TARGET_SPFP) || (cmode == DFmode && TARGET_DPFP)))
2293 {
2294 switch (code)
2295 {
2296 case NE: case EQ: case LT: case UNGE: case LE: case UNGT:
2297 case UNEQ: case LTGT: case ORDERED: case UNORDERED:
2298 break;
2299 case GT: case UNLE: case GE: case UNLT:
2300 code = swap_condition (code);
2301 tmp = x;
2302 x = y;
2303 y = tmp;
2304 break;
2305 default:
2306 gcc_unreachable ();
2307 }
2308 if (cmode == SFmode)
2309 {
2310 emit_insn (gen_cmpsfpx_raw (x, y));
2311 }
2312 else /* DFmode */
2313 {
2314 /* Accepts Dx regs directly by insns. */
2315 emit_insn (gen_cmpdfpx_raw (x, y));
2316 }
2317
2318 if (mode != CC_FPXmode)
f7df4a84 2319 emit_insn (gen_rtx_SET (cc_reg,
526b7aee
SV
2320 gen_rtx_COMPARE (mode,
2321 gen_rtx_REG (CC_FPXmode, 61),
2322 const0_rtx)));
2323 }
c4014855
CZ
2324 else if (TARGET_FPX_QUARK && (cmode == SFmode))
2325 {
2326 switch (code)
2327 {
2328 case NE: case EQ: case GT: case UNLE: case GE: case UNLT:
2329 case UNEQ: case LTGT: case ORDERED: case UNORDERED:
2330 break;
2331 case LT: case UNGE: case LE: case UNGT:
2332 code = swap_condition (code);
2333 tmp = x;
2334 x = y;
2335 y = tmp;
2336 break;
2337 default:
2338 gcc_unreachable ();
2339 }
2340
2341 emit_insn (gen_cmp_quark (cc_reg,
2342 gen_rtx_COMPARE (mode, x, y)));
2343 }
8f3304d0
CZ
2344 else if (TARGET_HARD_FLOAT
2345 && ((cmode == SFmode && TARGET_FP_SP_BASE)
2346 || (cmode == DFmode && TARGET_FP_DP_BASE)))
2347 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (mode, x, y)));
526b7aee
SV
2348 else if (GET_MODE_CLASS (cmode) == MODE_FLOAT && TARGET_OPTFPE)
2349 {
2350 rtx op0 = gen_rtx_REG (cmode, 0);
2351 rtx op1 = gen_rtx_REG (cmode, GET_MODE_SIZE (cmode) / UNITS_PER_WORD);
b1a82751 2352 bool swap = false;
526b7aee
SV
2353
2354 switch (code)
2355 {
2356 case NE: case EQ: case GT: case UNLE: case GE: case UNLT:
2357 case UNEQ: case LTGT: case ORDERED: case UNORDERED:
2358 break;
2359 case LT: case UNGE: case LE: case UNGT:
2360 code = swap_condition (code);
b1a82751 2361 swap = true;
526b7aee
SV
2362 break;
2363 default:
2364 gcc_unreachable ();
2365 }
2366 if (currently_expanding_to_rtl)
2367 {
b1a82751
CZ
2368 if (swap)
2369 {
2370 tmp = x;
2371 x = y;
2372 y = tmp;
2373 }
526b7aee
SV
2374 emit_move_insn (op0, x);
2375 emit_move_insn (op1, y);
2376 }
2377 else
2378 {
2379 gcc_assert (rtx_equal_p (op0, x));
2380 gcc_assert (rtx_equal_p (op1, y));
b1a82751
CZ
2381 if (swap)
2382 {
2383 op0 = y;
2384 op1 = x;
2385 }
526b7aee
SV
2386 }
2387 emit_insn (gen_cmp_float (cc_reg, gen_rtx_COMPARE (mode, op0, op1)));
2388 }
2389 else
f7df4a84 2390 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (mode, x, y)));
526b7aee
SV
2391 return gen_rtx_fmt_ee (code, omode, cc_reg, const0_rtx);
2392}
2393
2394/* Return true if VALUE, a const_double, will fit in a limm (4 byte number).
2395 We assume the value can be either signed or unsigned. */
2396
2397bool
2398arc_double_limm_p (rtx value)
2399{
2400 HOST_WIDE_INT low, high;
2401
2402 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
2403
2404 if (TARGET_DPFP)
2405 return true;
2406
2407 low = CONST_DOUBLE_LOW (value);
2408 high = CONST_DOUBLE_HIGH (value);
2409
2410 if (low & 0x80000000)
2411 {
2412 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
2413 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
2414 == - (unsigned HOST_WIDE_INT) 0x80000000)
2415 && high == -1));
2416 }
2417 else
2418 {
2419 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
2420 }
2421}
2422
2423/* Do any needed setup for a variadic function. For the ARC, we must
2424 create a register parameter block, and then copy any anonymous arguments
2425 in registers to memory.
2426
e7056ca4
RS
2427 CUM has not been updated for the last named argument (which is given
2428 by ARG), and we rely on this fact. */
526b7aee
SV
2429
2430static void
2431arc_setup_incoming_varargs (cumulative_args_t args_so_far,
e7056ca4 2432 const function_arg_info &arg,
526b7aee
SV
2433 int *pretend_size, int no_rtl)
2434{
2435 int first_anon_arg;
2436 CUMULATIVE_ARGS next_cum;
2437
2438 /* We must treat `__builtin_va_alist' as an anonymous arg. */
2439
2440 next_cum = *get_cumulative_args (args_so_far);
6930c98c 2441 arc_function_arg_advance (pack_cumulative_args (&next_cum), arg);
526b7aee
SV
2442 first_anon_arg = next_cum;
2443
8f3304d0 2444 if (FUNCTION_ARG_REGNO_P (first_anon_arg))
526b7aee
SV
2445 {
2446 /* First anonymous (unnamed) argument is in a reg. */
2447
2448 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
2449 int first_reg_offset = first_anon_arg;
2450
2451 if (!no_rtl)
2452 {
2453 rtx regblock
2454 = gen_rtx_MEM (BLKmode, plus_constant (Pmode, arg_pointer_rtx,
2455 FIRST_PARM_OFFSET (0)));
2456 move_block_from_reg (first_reg_offset, regblock,
2457 MAX_ARC_PARM_REGS - first_reg_offset);
2458 }
2459
2460 *pretend_size
2461 = ((MAX_ARC_PARM_REGS - first_reg_offset ) * UNITS_PER_WORD);
2462 }
2463}
2464
2465/* Cost functions. */
2466
2467/* Provide the costs of an addressing mode that contains ADDR.
2468 If ADDR is not a valid address, its cost is irrelevant. */
2469
b51addd6 2470static int
ef4bddc2 2471arc_address_cost (rtx addr, machine_mode, addr_space_t, bool speed)
526b7aee
SV
2472{
2473 switch (GET_CODE (addr))
2474 {
2475 case REG :
2476 return speed || satisfies_constraint_Rcq (addr) ? 0 : 1;
2477 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
2478 case PRE_MODIFY: case POST_MODIFY:
2479 return !speed;
2480
2481 case LABEL_REF :
2482 case SYMBOL_REF :
2483 case CONST :
4d03dc2f
JR
2484 if (TARGET_NPS_CMEM && cmem_address (addr, SImode))
2485 return 0;
526b7aee
SV
2486 /* Most likely needs a LIMM. */
2487 return COSTS_N_INSNS (1);
2488
2489 case PLUS :
2490 {
2491 register rtx plus0 = XEXP (addr, 0);
2492 register rtx plus1 = XEXP (addr, 1);
2493
2494 if (GET_CODE (plus0) != REG
2495 && (GET_CODE (plus0) != MULT
2496 || !CONST_INT_P (XEXP (plus0, 1))
2497 || (INTVAL (XEXP (plus0, 1)) != 2
2498 && INTVAL (XEXP (plus0, 1)) != 4)))
2499 break;
2500
2501 switch (GET_CODE (plus1))
2502 {
2503 case CONST_INT :
2504 return (!RTX_OK_FOR_OFFSET_P (SImode, plus1)
2505 ? COSTS_N_INSNS (1)
2506 : speed
2507 ? 0
2508 : (satisfies_constraint_Rcq (plus0)
2509 && satisfies_constraint_O (plus1))
2510 ? 0
2511 : 1);
2512 case REG:
2513 return (speed < 1 ? 0
2514 : (satisfies_constraint_Rcq (plus0)
2515 && satisfies_constraint_Rcq (plus1))
2516 ? 0 : 1);
2517 case CONST :
2518 case SYMBOL_REF :
2519 case LABEL_REF :
2520 return COSTS_N_INSNS (1);
2521 default:
2522 break;
2523 }
2524 break;
2525 }
2526 default:
2527 break;
2528 }
2529
2530 return 4;
2531}
2532
2533/* Emit instruction X with the frame related bit set. */
2534
2535static rtx
2536frame_insn (rtx x)
2537{
2538 x = emit_insn (x);
2539 RTX_FRAME_RELATED_P (x) = 1;
2540 return x;
2541}
2542
2543/* Emit a frame insn to move SRC to DST. */
2544
2545static rtx
2546frame_move (rtx dst, rtx src)
2547{
67a96300
CZ
2548 rtx tmp = gen_rtx_SET (dst, src);
2549 RTX_FRAME_RELATED_P (tmp) = 1;
2550 return frame_insn (tmp);
526b7aee
SV
2551}
2552
2553/* Like frame_move, but add a REG_INC note for REG if ADDR contains an
2554 auto increment address, or is zero. */
2555
2556static rtx
2557frame_move_inc (rtx dst, rtx src, rtx reg, rtx addr)
2558{
2559 rtx insn = frame_move (dst, src);
2560
2561 if (!addr
2562 || GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == POST_INC
2563 || GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2564 add_reg_note (insn, REG_INC, reg);
2565 return insn;
2566}
2567
2568/* Emit a frame insn which adjusts a frame address register REG by OFFSET. */
2569
2570static rtx
2571frame_add (rtx reg, HOST_WIDE_INT offset)
2572{
2573 gcc_assert ((offset & 0x3) == 0);
2574 if (!offset)
2575 return NULL_RTX;
2576 return frame_move (reg, plus_constant (Pmode, reg, offset));
2577}
2578
2579/* Emit a frame insn which adjusts stack pointer by OFFSET. */
2580
2581static rtx
2582frame_stack_add (HOST_WIDE_INT offset)
2583{
2584 return frame_add (stack_pointer_rtx, offset);
2585}
2586
47d8cb23
CZ
2587/* Helper function to wrap FRAME_POINTER_NEEDED. We do this as
2588 FRAME_POINTER_NEEDED will not be true until the IRA (Integrated
2589 Register Allocator) pass, while we want to get the frame size
2590 correct earlier than the IRA pass.
2591
2592 When a function uses eh_return we must ensure that the fp register
2593 is saved and then restored so that the unwinder can restore the
2594 correct value for the frame we are going to jump to.
2595
2596 To do this we force all frames that call eh_return to require a
2597 frame pointer (see arc_frame_pointer_required), this
2598 will ensure that the previous frame pointer is stored on entry to
2599 the function, and will then be reloaded at function exit.
2600
2601 As the frame pointer is handled as a special case in our prologue
2602 and epilogue code it must not be saved and restored using the
2603 MUST_SAVE_REGISTER mechanism otherwise we run into issues where GCC
2604 believes that the function is not using a frame pointer and that
2605 the value in the fp register is the frame pointer, while the
2606 prologue and epilogue are busy saving and restoring the fp
2607 register.
2608
2609 During compilation of a function the frame size is evaluated
d5029d45 2610 multiple times, it is not until the reload pass is complete the
47d8cb23
CZ
2611 frame size is considered fixed (it is at this point that space for
2612 all spills has been allocated). However the frame_pointer_needed
2613 variable is not set true until the register allocation pass, as a
2614 result in the early stages the frame size does not include space
2615 for the frame pointer to be spilled.
2616
2617 The problem that this causes is that the rtl generated for
2618 EH_RETURN_HANDLER_RTX uses the details of the frame size to compute
2619 the offset from the frame pointer at which the return address
2620 lives. However, in early passes GCC has not yet realised we need a
2621 frame pointer, and so has not included space for the frame pointer
2622 in the frame size, and so gets the offset of the return address
2623 wrong. This should not be an issue as in later passes GCC has
2624 realised that the frame pointer needs to be spilled, and has
2625 increased the frame size. However, the rtl for the
2626 EH_RETURN_HANDLER_RTX is not regenerated to use the newer, larger
2627 offset, and the wrong smaller offset is used. */
2628
2629static bool
2630arc_frame_pointer_needed (void)
2631{
2632 return (frame_pointer_needed || crtl->calls_eh_return);
2633}
526b7aee 2634
1ec86e1e 2635/* Tell prologue and epilogue if register REGNO should be saved /
ce9dbf20
CZ
2636 restored. The SPECIAL_P is true when the register may need special
2637 ld/st sequence. The return address, and stack pointer are treated
2638 separately. Don't consider them here. */
526b7aee 2639
41453183 2640static bool
ce9dbf20 2641arc_must_save_register (int regno, struct function *func, bool special_p)
41453183 2642{
1825c61e 2643 unsigned int fn_type = arc_compute_function_type (func);
41453183 2644 bool irq_auto_save_p = ((irq_ctrl_saved.irq_save_last_reg >= regno)
c7314bc1
CZ
2645 && ARC_AUTO_IRQ_P (fn_type));
2646 bool firq_auto_save_p = ARC_FAST_INTERRUPT_P (fn_type);
2647
2648 switch (rgf_banked_register_count)
2649 {
2650 case 4:
2651 firq_auto_save_p &= (regno < 4);
2652 break;
2653 case 8:
2654 firq_auto_save_p &= ((regno < 4) || ((regno > 11) && (regno < 16)));
2655 break;
2656 case 16:
2657 firq_auto_save_p &= ((regno < 4) || ((regno > 9) && (regno < 16))
2658 || ((regno > 25) && (regno < 29))
2659 || ((regno > 29) && (regno < 32)));
2660 break;
2661 case 32:
2662 firq_auto_save_p &= (regno != 29) && (regno < 32);
2663 break;
2664 default:
2665 firq_auto_save_p = false;
2666 break;
2667 }
41453183 2668
47d8cb23
CZ
2669 switch (regno)
2670 {
ce9dbf20 2671 case ILINK1_REG:
47d8cb23
CZ
2672 case RETURN_ADDR_REGNUM:
2673 case STACK_POINTER_REGNUM:
ce9dbf20
CZ
2674 /* The stack pointer and the return address are handled
2675 separately. */
2676 return false;
2677
2678 case R30_REG:
2679 /* r30 is either used as ilink2 by ARCv1 or as a free register
2680 by ARCv2. */
2681 if (!TARGET_V2)
2682 return false;
2683 break;
2684
2685 case R40_REG:
2686 case R41_REG:
2687 case R42_REG:
2688 case R43_REG:
2689 case R44_REG:
2690 /* If those ones are used by the FPX machinery, we handle them
2691 separately. */
2692 if (TARGET_DPFP && !special_p)
2693 return false;
2694 /* FALLTHRU. */
2695
2696 case R32_REG:
2697 case R33_REG:
2698 case R34_REG:
2699 case R35_REG:
2700 case R36_REG:
2701 case R37_REG:
2702 case R38_REG:
2703 case R39_REG:
2704 case R45_REG:
2705 case R46_REG:
2706 case R47_REG:
2707 case R48_REG:
2708 case R49_REG:
2709 case R50_REG:
2710 case R51_REG:
2711 case R52_REG:
2712 case R53_REG:
2713 case R54_REG:
2714 case R55_REG:
2715 case R56_REG:
2716 case R57_REG:
ce9dbf20
CZ
2717 /* The Extension Registers. */
2718 if (ARC_INTERRUPT_P (fn_type)
2719 && (df_regs_ever_live_p (RETURN_ADDR_REGNUM)
2720 || df_regs_ever_live_p (regno))
2721 /* Not all extension registers are available, choose the
2722 real ones. */
2723 && !fixed_regs[regno])
2724 return true;
2725 return false;
2726
82cd9a96
CZ
2727 case R58_REG:
2728 case R59_REG:
2729 /* ARC600 specifies those ones as mlo/mhi registers, otherwise
2730 just handle them like any other extension register. */
2731 if (ARC_INTERRUPT_P (fn_type)
2732 && (df_regs_ever_live_p (RETURN_ADDR_REGNUM)
2733 || df_regs_ever_live_p (regno))
2734 /* Not all extension registers are available, choose the
2735 real ones. */
2736 && ((!fixed_regs[regno] && !special_p)
2737 || (TARGET_MUL64_SET && special_p)))
2738 return true;
2739 return false;
2740
ce9dbf20
CZ
2741 case 61:
2742 case 62:
2743 case 63:
2744 /* Fixed/control register, nothing to do. LP_COUNT is
2745 different. */
47d8cb23
CZ
2746 return false;
2747
2748 case HARD_FRAME_POINTER_REGNUM:
2749 /* If we need FP reg as a frame pointer then don't save it as a
2750 regular reg. */
2751 if (arc_frame_pointer_needed ())
2752 return false;
ce9dbf20 2753 break;
47d8cb23 2754
47d8cb23 2755 default:
ce9dbf20 2756 break;
47d8cb23 2757 }
41453183 2758
a365fa06 2759 if (((df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
ce9dbf20
CZ
2760 /* In an interrupt save everything. */
2761 || (ARC_INTERRUPT_P (fn_type)
2762 && (df_regs_ever_live_p (RETURN_ADDR_REGNUM)
2763 || df_regs_ever_live_p (regno))))
2764 /* Do not emit code for auto saved regs. */
2765 && !irq_auto_save_p
2766 && !firq_auto_save_p)
2767 return true;
41453183
CZ
2768 return false;
2769}
2770
2771/* Return true if the return address must be saved in the current function,
2772 otherwise return false. */
2773
2774static bool
2775arc_must_save_return_addr (struct function *func)
2776{
2777 if (func->machine->frame_info.save_return_addr)
2778 return true;
2779
2780 return false;
2781}
2782
526b7aee
SV
2783/* Return non-zero if there are registers to be saved or loaded using
2784 millicode thunks. We can only use consecutive sequences starting
2785 with r13, and not going beyond r25.
2786 GMASK is a bitmask of registers to save. This function sets
2787 FRAME->millicod_start_reg .. FRAME->millicode_end_reg to the range
2788 of registers to be saved / restored with a millicode call. */
2789
2790static int
ce9dbf20 2791arc_compute_millicode_save_restore_regs (uint64_t gmask,
526b7aee
SV
2792 struct arc_frame_info *frame)
2793{
2794 int regno;
2795
2796 int start_reg = 13, end_reg = 25;
2797
ce9dbf20 2798 for (regno = start_reg; regno <= end_reg && (gmask & (1ULL << regno));)
526b7aee
SV
2799 regno++;
2800 end_reg = regno - 1;
2801 /* There is no point in using millicode thunks if we don't save/restore
2802 at least three registers. For non-leaf functions we also have the
2803 blink restore. */
2804 if (regno - start_reg >= 3 - (crtl->is_leaf == 0))
2805 {
2806 frame->millicode_start_reg = 13;
2807 frame->millicode_end_reg = regno - 1;
2808 return 1;
2809 }
2810 return 0;
2811}
2812
6fe5e235
CZ
2813/* Return the bytes needed to compute the frame pointer from the
2814 current stack pointer. */
526b7aee 2815
6fe5e235
CZ
2816static unsigned int
2817arc_compute_frame_size (void)
526b7aee
SV
2818{
2819 int regno;
2820 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
90b48013 2821 unsigned int reg_size;
ce9dbf20 2822 uint64_t gmask;
6fe5e235
CZ
2823 struct arc_frame_info *frame_info;
2824 int size;
90b48013
CZ
2825 unsigned int extra_plus_reg_size;
2826 unsigned int extra_plus_reg_size_aligned;
82cd9a96 2827 unsigned int fn_type = arc_compute_function_type (cfun);
6fe5e235
CZ
2828
2829 /* The answer might already be known. */
2830 if (cfun->machine->frame_info.initialized)
2831 return cfun->machine->frame_info.total_size;
526b7aee 2832
6fe5e235
CZ
2833 frame_info = &cfun->machine->frame_info;
2834 size = ARC_STACK_ALIGN (get_frame_size ());
526b7aee 2835
6fe5e235 2836 /* 1) Size of locals and temporaries. */
526b7aee
SV
2837 var_size = size;
2838
6fe5e235 2839 /* 2) Size of outgoing arguments. */
526b7aee
SV
2840 args_size = crtl->outgoing_args_size;
2841
2842 /* 3) Calculate space needed for saved registers.
2843 ??? We ignore the extension registers for now. */
2844
2845 /* See if this is an interrupt handler. Call used registers must be saved
2846 for them too. */
2847
2848 reg_size = 0;
2849 gmask = 0;
526b7aee 2850
ce9dbf20
CZ
2851 /* The last 4 regs are special, avoid them. */
2852 for (regno = 0; regno <= (GMASK_LEN - 4); regno++)
526b7aee 2853 {
ce9dbf20 2854 if (arc_must_save_register (regno, cfun, false))
526b7aee
SV
2855 {
2856 reg_size += UNITS_PER_WORD;
ce9dbf20 2857 gmask |= 1ULL << regno;
526b7aee
SV
2858 }
2859 }
2860
6fe5e235
CZ
2861 /* In a frame that calls __builtin_eh_return two data registers are
2862 used to pass values back to the exception handler.
2863
2864 Ensure that these registers are spilled to the stack so that the
2865 exception throw code can find them, and update the saved values.
2866 The handling code will then consume these reloaded values to
2867 handle the exception. */
2868 if (crtl->calls_eh_return)
2869 for (regno = 0; EH_RETURN_DATA_REGNO (regno) != INVALID_REGNUM; regno++)
2870 {
2871 reg_size += UNITS_PER_WORD;
ce9dbf20 2872 gmask |= 1ULL << regno;
6fe5e235
CZ
2873 }
2874
90b48013
CZ
2875 /* Check if we need to save the return address. */
2876 frame_info->save_return_addr = (!crtl->is_leaf
2877 || df_regs_ever_live_p (RETURN_ADDR_REGNUM)
2878 || crtl->calls_eh_return);
2879
2880 /* Saving blink reg for millicode thunk calls. */
2881 if (TARGET_MILLICODE_THUNK_SET
82cd9a96
CZ
2882 && !ARC_INTERRUPT_P (fn_type)
2883 && !crtl->calls_eh_return)
526b7aee
SV
2884 {
2885 if (arc_compute_millicode_save_restore_regs (gmask, frame_info))
2886 frame_info->save_return_addr = true;
2887 }
2888
ce9dbf20
CZ
2889 /* Save lp_count, lp_start and lp_end. */
2890 if (arc_lpcwidth != 0 && arc_must_save_register (LP_COUNT, cfun, true))
2891 reg_size += UNITS_PER_WORD * 3;
2892
2893 /* Check for the special R40-R44 regs used by FPX extension. */
2894 if (arc_must_save_register (TARGET_BIG_ENDIAN ? R41_REG : R40_REG,
2895 cfun, TARGET_DPFP))
2896 reg_size += UNITS_PER_WORD * 2;
2897 if (arc_must_save_register (TARGET_BIG_ENDIAN ? R43_REG : R42_REG,
2898 cfun, TARGET_DPFP))
2899 reg_size += UNITS_PER_WORD * 2;
2900
82cd9a96
CZ
2901 /* Check for special MLO/MHI case used by ARC600' MUL64
2902 extension. */
2903 if (arc_must_save_register (R58_REG, cfun, TARGET_MUL64_SET))
2904 reg_size += UNITS_PER_WORD * 2;
2905
90b48013 2906 /* 4) Calculate extra size made up of the blink + fp size. */
526b7aee 2907 extra_size = 0;
41453183 2908 if (arc_must_save_return_addr (cfun))
526b7aee 2909 extra_size = 4;
82cd9a96
CZ
2910 /* Add FP size only when it is not autosaved. */
2911 if (arc_frame_pointer_needed ()
2912 && !ARC_AUTOFP_IRQ_P (fn_type))
526b7aee
SV
2913 extra_size += 4;
2914
2915 /* 5) Space for variable arguments passed in registers */
2916 pretend_size = crtl->args.pretend_args_size;
2917
2918 /* Ensure everything before the locals is aligned appropriately. */
90b48013
CZ
2919 extra_plus_reg_size = extra_size + reg_size;
2920 extra_plus_reg_size_aligned = ARC_STACK_ALIGN (extra_plus_reg_size);
2921 reg_size = extra_plus_reg_size_aligned - extra_size;
526b7aee
SV
2922
2923 /* Compute total frame size. */
2924 total_size = var_size + args_size + extra_size + pretend_size + reg_size;
2925
6fe5e235
CZ
2926 /* It used to be the case that the alignment was forced at this
2927 point. However, that is dangerous, calculations based on
2928 total_size would be wrong. Given that this has never cropped up
2929 as an issue I've changed this to an assert for now. */
2930 gcc_assert (total_size == ARC_STACK_ALIGN (total_size));
526b7aee 2931
526b7aee
SV
2932 /* Save computed information. */
2933 frame_info->total_size = total_size;
2934 frame_info->extra_size = extra_size;
2935 frame_info->pretend_size = pretend_size;
2936 frame_info->var_size = var_size;
2937 frame_info->args_size = args_size;
2938 frame_info->reg_size = reg_size;
526b7aee
SV
2939 frame_info->gmask = gmask;
2940 frame_info->initialized = reload_completed;
2941
2942 /* Ok, we're done. */
2943 return total_size;
2944}
2945
41453183
CZ
2946/* Build dwarf information when the context is saved via AUX_IRQ_CTRL
2947 mechanism. */
2948
2949static void
2950arc_dwarf_emit_irq_save_regs (void)
2951{
2952 rtx tmp, par, insn, reg;
2953 int i, offset, j;
2954
2955 par = gen_rtx_SEQUENCE (VOIDmode,
2956 rtvec_alloc (irq_ctrl_saved.irq_save_last_reg + 1
2957 + irq_ctrl_saved.irq_save_blink
2958 + irq_ctrl_saved.irq_save_lpcount
2959 + 1));
2960
2961 /* Build the stack adjustment note for unwind info. */
2962 j = 0;
2963 offset = UNITS_PER_WORD * (irq_ctrl_saved.irq_save_last_reg + 1
2964 + irq_ctrl_saved.irq_save_blink
2965 + irq_ctrl_saved.irq_save_lpcount);
2966 tmp = plus_constant (Pmode, stack_pointer_rtx, -1 * offset);
2967 tmp = gen_rtx_SET (stack_pointer_rtx, tmp);
2968 RTX_FRAME_RELATED_P (tmp) = 1;
2969 XVECEXP (par, 0, j++) = tmp;
2970
2971 offset -= UNITS_PER_WORD;
2972
2973 /* 1st goes LP_COUNT. */
2974 if (irq_ctrl_saved.irq_save_lpcount)
2975 {
2976 reg = gen_rtx_REG (SImode, 60);
2977 tmp = plus_constant (Pmode, stack_pointer_rtx, offset);
2978 tmp = gen_frame_mem (SImode, tmp);
2979 tmp = gen_rtx_SET (tmp, reg);
2980 RTX_FRAME_RELATED_P (tmp) = 1;
2981 XVECEXP (par, 0, j++) = tmp;
2982 offset -= UNITS_PER_WORD;
2983 }
2984
2985 /* 2nd goes BLINK. */
2986 if (irq_ctrl_saved.irq_save_blink)
2987 {
2988 reg = gen_rtx_REG (SImode, 31);
2989 tmp = plus_constant (Pmode, stack_pointer_rtx, offset);
2990 tmp = gen_frame_mem (SImode, tmp);
2991 tmp = gen_rtx_SET (tmp, reg);
2992 RTX_FRAME_RELATED_P (tmp) = 1;
2993 XVECEXP (par, 0, j++) = tmp;
2994 offset -= UNITS_PER_WORD;
2995 }
2996
2997 /* Build the parallel of the remaining registers recorded as saved
2998 for unwind. */
2999 for (i = irq_ctrl_saved.irq_save_last_reg; i >= 0; i--)
3000 {
3001 reg = gen_rtx_REG (SImode, i);
3002 tmp = plus_constant (Pmode, stack_pointer_rtx, offset);
3003 tmp = gen_frame_mem (SImode, tmp);
3004 tmp = gen_rtx_SET (tmp, reg);
3005 RTX_FRAME_RELATED_P (tmp) = 1;
3006 XVECEXP (par, 0, j++) = tmp;
3007 offset -= UNITS_PER_WORD;
3008 }
3009
3010 /* Dummy insn used to anchor the dwarf info. */
3011 insn = emit_insn (gen_stack_irq_dwarf());
3012 add_reg_note (insn, REG_FRAME_RELATED_EXPR, par);
3013 RTX_FRAME_RELATED_P (insn) = 1;
3014}
3015
90b48013
CZ
3016/* Helper for prologue: emit frame store with pre_modify or pre_dec to
3017 save register REG on stack. An initial offset OFFSET can be passed
3018 to the function. */
3019
3020static int
3021frame_save_reg (rtx reg, HOST_WIDE_INT offset)
3022{
3023 rtx addr;
3024
3025 if (offset)
3026 {
3027 rtx tmp = plus_constant (Pmode, stack_pointer_rtx,
3028 offset - GET_MODE_SIZE (GET_MODE (reg)));
3029 addr = gen_frame_mem (GET_MODE (reg),
3030 gen_rtx_PRE_MODIFY (Pmode,
3031 stack_pointer_rtx,
3032 tmp));
3033 }
3034 else
3035 addr = gen_frame_mem (GET_MODE (reg), gen_rtx_PRE_DEC (Pmode,
3036 stack_pointer_rtx));
3037 frame_move_inc (addr, reg, stack_pointer_rtx, 0);
3038
3039 return GET_MODE_SIZE (GET_MODE (reg)) - offset;
3040}
3041
ce9dbf20
CZ
3042/* Helper used when saving AUX regs during ISR. */
3043
3044static int
3045push_reg (rtx reg)
3046{
3047 rtx stkslot = gen_rtx_MEM (GET_MODE (reg), gen_rtx_PRE_DEC (Pmode,
3048 stack_pointer_rtx));
3049 rtx insn = emit_move_insn (stkslot, reg);
3050 RTX_FRAME_RELATED_P (insn) = 1;
3051 add_reg_note (insn, REG_CFA_ADJUST_CFA,
3052 gen_rtx_SET (stack_pointer_rtx,
3053 plus_constant (Pmode, stack_pointer_rtx,
3054 -GET_MODE_SIZE (GET_MODE (reg)))));
3055 return GET_MODE_SIZE (GET_MODE (reg));
3056}
3057
90b48013
CZ
3058/* Helper for epilogue: emit frame load with post_modify or post_inc
3059 to restore register REG from stack. The initial offset is passed
3060 via OFFSET. */
3061
3062static int
3063frame_restore_reg (rtx reg, HOST_WIDE_INT offset)
3064{
3065 rtx addr, insn;
3066
3067 if (offset)
3068 {
3069 rtx tmp = plus_constant (Pmode, stack_pointer_rtx,
3070 offset + GET_MODE_SIZE (GET_MODE (reg)));
3071 addr = gen_frame_mem (GET_MODE (reg),
3072 gen_rtx_POST_MODIFY (Pmode,
3073 stack_pointer_rtx,
3074 tmp));
3075 }
3076 else
3077 addr = gen_frame_mem (GET_MODE (reg), gen_rtx_POST_INC (Pmode,
3078 stack_pointer_rtx));
3079 insn = frame_move_inc (reg, addr, stack_pointer_rtx, 0);
3080 add_reg_note (insn, REG_CFA_RESTORE, reg);
3081
47d8cb23 3082 if (reg == hard_frame_pointer_rtx)
90b48013
CZ
3083 add_reg_note (insn, REG_CFA_DEF_CFA,
3084 plus_constant (Pmode, stack_pointer_rtx,
3085 GET_MODE_SIZE (GET_MODE (reg)) + offset));
3086 else
3087 add_reg_note (insn, REG_CFA_ADJUST_CFA,
3088 gen_rtx_SET (stack_pointer_rtx,
3089 plus_constant (Pmode, stack_pointer_rtx,
3090 GET_MODE_SIZE (GET_MODE (reg))
3091 + offset)));
3092
3093 return GET_MODE_SIZE (GET_MODE (reg)) + offset;
3094}
3095
ce9dbf20
CZ
3096/* Helper used when restoring AUX regs during ISR. */
3097
3098static int
3099pop_reg (rtx reg)
3100{
3101 rtx stkslot = gen_rtx_MEM (GET_MODE (reg), gen_rtx_POST_INC (Pmode,
3102 stack_pointer_rtx));
3103 rtx insn = emit_move_insn (reg, stkslot);
3104 RTX_FRAME_RELATED_P (insn) = 1;
3105 add_reg_note (insn, REG_CFA_ADJUST_CFA,
3106 gen_rtx_SET (stack_pointer_rtx,
3107 plus_constant (Pmode, stack_pointer_rtx,
3108 GET_MODE_SIZE (GET_MODE (reg)))));
3109 return GET_MODE_SIZE (GET_MODE (reg));
3110}
3111
90b48013
CZ
3112/* Check if we have a continous range to be save/restored with the
3113 help of enter/leave instructions. A vaild register range starts
3114 from $r13 and is up to (including) $r26. */
3115
3116static bool
ce9dbf20 3117arc_enter_leave_p (uint64_t gmask)
90b48013
CZ
3118{
3119 int regno;
3120 unsigned int rmask = 0;
3121
3122 if (!gmask)
3123 return false;
3124
3125 for (regno = ENTER_LEAVE_START_REG;
ce9dbf20
CZ
3126 regno <= ENTER_LEAVE_END_REG && (gmask & (1ULL << regno)); regno++)
3127 rmask |= 1ULL << regno;
90b48013
CZ
3128
3129 if (rmask ^ gmask)
3130 return false;
3131
3132 return true;
3133}
3134
3135/* ARC's prologue, save any needed call-saved regs (and call-used if
3136 this is an interrupt handler) for ARCompact ISA, using ST/STD
3137 instructions. */
3138
3139static int
ce9dbf20 3140arc_save_callee_saves (uint64_t gmask,
90b48013
CZ
3141 bool save_blink,
3142 bool save_fp,
82cd9a96
CZ
3143 HOST_WIDE_INT offset,
3144 bool emit_move)
90b48013
CZ
3145{
3146 rtx reg;
3147 int frame_allocated = 0;
ce9dbf20 3148 int i;
90b48013
CZ
3149
3150 /* The home-grown ABI says link register is saved first. */
3151 if (save_blink)
3152 {
3153 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3154 frame_allocated += frame_save_reg (reg, offset);
3155 offset = 0;
3156 }
3157
3158 /* N.B. FRAME_POINTER_MASK and RETURN_ADDR_MASK are cleared in gmask. */
3159 if (gmask)
ce9dbf20 3160 for (i = GMASK_LEN; i >= 0; i--)
90b48013
CZ
3161 {
3162 machine_mode save_mode = SImode;
3163
3164 if (TARGET_LL64
3165 && ((i - 1) % 2 == 0)
ce9dbf20
CZ
3166 && ((gmask & (1ULL << i)) != 0)
3167 && ((gmask & (1ULL << (i - 1))) != 0))
90b48013
CZ
3168 {
3169 save_mode = DImode;
3170 --i;
3171 }
ce9dbf20 3172 else if ((gmask & (1ULL << i)) == 0)
90b48013
CZ
3173 continue;
3174
3175 reg = gen_rtx_REG (save_mode, i);
3176 frame_allocated += frame_save_reg (reg, offset);
3177 offset = 0;
3178 }
3179
3180 /* Save frame pointer if needed. First save the FP on stack, if not
3181 autosaved. Unfortunately, I cannot add it to gmask and use the
3182 above loop to save fp because our ABI states fp goes aftert all
3183 registers are saved. */
3184 if (save_fp)
3185 {
47d8cb23 3186 frame_allocated += frame_save_reg (hard_frame_pointer_rtx, offset);
90b48013
CZ
3187 offset = 0;
3188 }
3189
3190 /* Emit mov fp,sp. */
82cd9a96 3191 if (emit_move)
47d8cb23 3192 frame_move (hard_frame_pointer_rtx, stack_pointer_rtx);
90b48013
CZ
3193
3194 return frame_allocated;
3195}
3196
3197/* ARC's epilogue, restore any required call-saved regs (and call-used
3198 if it is for an interrupt handler) using LD/LDD instructions. */
3199
3200static int
ce9dbf20 3201arc_restore_callee_saves (uint64_t gmask,
90b48013
CZ
3202 bool restore_blink,
3203 bool restore_fp,
3204 HOST_WIDE_INT offset,
3205 HOST_WIDE_INT allocated)
3206{
3207 rtx reg;
3208 int frame_deallocated = 0;
8f845213 3209 HOST_WIDE_INT offs = cfun->machine->frame_info.reg_size;
82cd9a96 3210 unsigned int fn_type = arc_compute_function_type (cfun);
8f845213 3211 bool early_blink_restore;
ce9dbf20 3212 int i;
90b48013
CZ
3213
3214 /* Emit mov fp,sp. */
3215 if (arc_frame_pointer_needed () && offset)
3216 {
47d8cb23 3217 frame_move (stack_pointer_rtx, hard_frame_pointer_rtx);
90b48013
CZ
3218 frame_deallocated += offset;
3219 offset = 0;
3220 }
3221
3222 if (restore_fp)
3223 {
3224 /* Any offset is taken care by previous if-statement. */
3225 gcc_assert (offset == 0);
47d8cb23 3226 frame_deallocated += frame_restore_reg (hard_frame_pointer_rtx, 0);
90b48013
CZ
3227 }
3228
3229 if (offset)
3230 {
3231 /* No $fp involved, we need to do an add to set the $sp to the
3232 location of the first register. */
3233 frame_stack_add (offset);
3234 frame_deallocated += offset;
3235 offset = 0;
3236 }
3237
82cd9a96
CZ
3238 /* When we do not optimize for size or we aren't in an interrupt,
3239 restore first blink. */
ce9dbf20 3240 early_blink_restore = restore_blink && !optimize_size && offs
82cd9a96 3241 && !ARC_INTERRUPT_P (fn_type);
8f845213
CZ
3242 if (early_blink_restore)
3243 {
3244 rtx addr = plus_constant (Pmode, stack_pointer_rtx, offs);
3245 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3246 rtx insn = frame_move_inc (reg, gen_frame_mem (Pmode, addr),
3247 stack_pointer_rtx, NULL_RTX);
3248 add_reg_note (insn, REG_CFA_RESTORE, reg);
3249 restore_blink = false;
3250 }
3251
90b48013
CZ
3252 /* N.B. FRAME_POINTER_MASK and RETURN_ADDR_MASK are cleared in gmask. */
3253 if (gmask)
ce9dbf20 3254 for (i = 0; i <= GMASK_LEN; i++)
90b48013
CZ
3255 {
3256 machine_mode restore_mode = SImode;
3257
3258 if (TARGET_LL64
3259 && ((i % 2) == 0)
ce9dbf20
CZ
3260 && ((gmask & (1ULL << i)) != 0)
3261 && ((gmask & (1ULL << (i + 1))) != 0))
90b48013 3262 restore_mode = DImode;
ce9dbf20 3263 else if ((gmask & (1ULL << i)) == 0)
90b48013
CZ
3264 continue;
3265
3266 reg = gen_rtx_REG (restore_mode, i);
8f845213
CZ
3267 offs = 0;
3268 switch (restore_mode)
3269 {
3270 case E_DImode:
ce9dbf20 3271 if ((GMASK_LEN - __builtin_clzll (gmask)) == (i + 1)
8f845213
CZ
3272 && early_blink_restore)
3273 offs = 4;
3274 break;
3275 case E_SImode:
ce9dbf20 3276 if ((GMASK_LEN - __builtin_clzll (gmask)) == i
8f845213
CZ
3277 && early_blink_restore)
3278 offs = 4;
3279 break;
3280 default:
3281 offs = 0;
3282 }
3283 frame_deallocated += frame_restore_reg (reg, offs);
90b48013
CZ
3284 offset = 0;
3285
3286 if (restore_mode == DImode)
3287 i++;
3288 }
3289
3290 if (restore_blink)
3291 {
3292 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3293 frame_deallocated += frame_restore_reg (reg, allocated
3294 - frame_deallocated
3295 /* Consider as well the
3296 current restored
3297 register size. */
3298 - UNITS_PER_WORD);
3299 }
3300
3301 return frame_deallocated;
3302}
3303
3304/* ARC prologue, save the registers using enter instruction. Leave
3305 instruction can also save $blink (SAVE_BLINK) and $fp (SAVE_FP)
3306 register. */
3307
3308static int
ce9dbf20 3309arc_save_callee_enter (uint64_t gmask,
90b48013
CZ
3310 bool save_blink,
3311 bool save_fp,
3312 HOST_WIDE_INT offset)
3313{
3314 int start_reg = ENTER_LEAVE_START_REG;
3315 int end_reg = ENTER_LEAVE_END_REG;
3316 int regno, indx, off, nregs;
3317 rtx insn, reg, mem;
3318 int frame_allocated = 0;
3319
ce9dbf20 3320 for (regno = start_reg; regno <= end_reg && (gmask & (1ULL << regno));)
90b48013
CZ
3321 regno++;
3322
3323 end_reg = regno - 1;
3324 nregs = end_reg - start_reg + 1;
3325 nregs += save_blink ? 1 : 0;
3326 nregs += save_fp ? 1 : 0;
3327
3328 if (offset)
3329 frame_stack_add (offset);
3330
3331 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs + (save_fp ? 1 : 0)
3332 + 1));
3333 indx = 0;
3334
3335 reg = gen_rtx_SET (stack_pointer_rtx,
3336 plus_constant (Pmode,
3337 stack_pointer_rtx,
f5d56cf9 3338 -nregs * UNITS_PER_WORD));
90b48013
CZ
3339 RTX_FRAME_RELATED_P (reg) = 1;
3340 XVECEXP (insn, 0, indx++) = reg;
3341 off = nregs * UNITS_PER_WORD;
3342
3343 if (save_blink)
3344 {
3345 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3346 mem = gen_frame_mem (Pmode, plus_constant (Pmode,
3347 stack_pointer_rtx,
3348 off));
3349 XVECEXP (insn, 0, indx) = gen_rtx_SET (mem, reg);
3350 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx++)) = 1;
3351 off -= UNITS_PER_WORD;
3352 save_blink = false;
3353 }
3354
3355 for (regno = start_reg;
3356 regno <= end_reg;
3357 regno++, indx++, off -= UNITS_PER_WORD)
3358 {
3359 reg = gen_rtx_REG (SImode, regno);
3360 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3361 stack_pointer_rtx,
3362 off));
3363 XVECEXP (insn, 0, indx) = gen_rtx_SET (mem, reg);
3364 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx)) = 1;
ce9dbf20 3365 gmask = gmask & ~(1ULL << regno);
90b48013
CZ
3366 }
3367
3368 if (save_fp)
3369 {
3370 mem = gen_frame_mem (Pmode, plus_constant (Pmode,
3371 stack_pointer_rtx,
3372 off));
47d8cb23 3373 XVECEXP (insn, 0, indx) = gen_rtx_SET (mem, hard_frame_pointer_rtx);
90b48013
CZ
3374 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx++)) = 1;
3375 off -= UNITS_PER_WORD;
3376
47d8cb23 3377 XVECEXP (insn, 0, indx) = gen_rtx_SET (hard_frame_pointer_rtx,
90b48013
CZ
3378 stack_pointer_rtx);
3379 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx++)) = 1;
3380 save_fp = false;
3381 }
3382
3383 gcc_assert (off == 0);
3384 insn = frame_insn (insn);
3385
3386 add_reg_note (insn, REG_INC, stack_pointer_rtx);
3387
3388 frame_allocated = nregs * UNITS_PER_WORD;
3389
3390 /* offset is a negative number, make sure we add it. */
3391 return frame_allocated - offset;
3392}
3393
3394/* ARC epilogue, restore the registers using leave instruction. An
3395 initial offset is passed in OFFSET. Besides restoring an register
3396 range, leave can also restore $blink (RESTORE_BLINK), or $fp
3397 (RESTORE_FP), and can automatic return (RETURN_P). */
3398
3399static int
ce9dbf20 3400arc_restore_callee_leave (uint64_t gmask,
90b48013
CZ
3401 bool restore_blink,
3402 bool restore_fp,
3403 bool return_p,
3404 HOST_WIDE_INT offset)
3405{
3406 int start_reg = ENTER_LEAVE_START_REG;
3407 int end_reg = ENTER_LEAVE_END_REG;
3408 int regno, indx, off, nregs;
3409 rtx insn, reg, mem;
3410 int frame_allocated = 0;
3411
ce9dbf20 3412 for (regno = start_reg; regno <= end_reg && (gmask & (1ULL << regno));)
90b48013
CZ
3413 regno++;
3414
3415 end_reg = regno - 1;
3416 nregs = end_reg - start_reg + 1;
3417 nregs += restore_blink ? 1 : 0;
3418 nregs += restore_fp ? 1 : 0;
3419
3420 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs + 1
3421 + (return_p ? 1 : 0)));
3422 indx = 0;
3423
3424 if (return_p)
3425 XVECEXP (insn, 0, indx++) = ret_rtx;
3426
3427 if (restore_fp)
3428 {
3429 /* I cannot emit set (sp, fp) here as cselib expects a single sp
3430 set and not two. Thus, use the offset, and change sp adjust
3431 value. */
3432 frame_allocated += offset;
3433 }
3434
3435 if (offset && !restore_fp)
3436 {
3437 /* This add is only emmited when we do not restore fp with leave
3438 instruction. */
3439 frame_stack_add (offset);
3440 frame_allocated += offset;
3441 offset = 0;
3442 }
3443
3444 reg = gen_rtx_SET (stack_pointer_rtx,
3445 plus_constant (Pmode,
3446 stack_pointer_rtx,
3447 offset + nregs * UNITS_PER_WORD));
3448 RTX_FRAME_RELATED_P (reg) = 1;
3449 XVECEXP (insn, 0, indx++) = reg;
3450 off = nregs * UNITS_PER_WORD;
3451
3452 if (restore_blink)
3453 {
3454 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3455 mem = gen_frame_mem (Pmode, plus_constant (Pmode,
3456 stack_pointer_rtx,
3457 off));
3458 XVECEXP (insn, 0, indx) = gen_rtx_SET (reg, mem);
3459 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx++)) = 1;
3460 off -= UNITS_PER_WORD;
3461 }
3462
3463 for (regno = start_reg;
3464 regno <= end_reg;
3465 regno++, indx++, off -= UNITS_PER_WORD)
3466 {
3467 reg = gen_rtx_REG (SImode, regno);
3468 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3469 stack_pointer_rtx,
3470 off));
3471 XVECEXP (insn, 0, indx) = gen_rtx_SET (reg, mem);
3472 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx)) = 1;
ce9dbf20 3473 gmask = gmask & ~(1ULL << regno);
90b48013
CZ
3474 }
3475
3476 if (restore_fp)
3477 {
3478 mem = gen_frame_mem (Pmode, plus_constant (Pmode,
3479 stack_pointer_rtx,
3480 off));
47d8cb23 3481 XVECEXP (insn, 0, indx) = gen_rtx_SET (hard_frame_pointer_rtx, mem);
90b48013
CZ
3482 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx++)) = 1;
3483 off -= UNITS_PER_WORD;
3484 }
3485
3486 gcc_assert (off == 0);
3487 if (return_p)
3488 {
3489 insn = emit_jump_insn (insn);
3490 RTX_FRAME_RELATED_P (insn) = 1;
3491 }
3492 else
3493 insn = frame_insn (insn);
3494
3495 add_reg_note (insn, REG_INC, stack_pointer_rtx);
3496
3497 /* Dwarf related info. */
3498 if (restore_fp)
3499 {
47d8cb23 3500 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
90b48013
CZ
3501 add_reg_note (insn, REG_CFA_DEF_CFA,
3502 plus_constant (Pmode, stack_pointer_rtx,
3503 offset + nregs * UNITS_PER_WORD));
3504 }
3505 else
3506 {
3507 add_reg_note (insn, REG_CFA_ADJUST_CFA,
3508 gen_rtx_SET (stack_pointer_rtx,
3509 plus_constant (Pmode, stack_pointer_rtx,
3510 nregs * UNITS_PER_WORD)));
3511 }
3512 if (restore_blink)
3513 add_reg_note (insn, REG_CFA_RESTORE,
3514 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM));
3515 for (regno = start_reg; regno <= end_reg; regno++)
3516 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (SImode, regno));
3517
3518 frame_allocated += nregs * UNITS_PER_WORD;
3519
3520 return frame_allocated;
3521}
3522
3523/* Millicode thunks implementation:
3524 Generates calls to millicodes for registers starting from r13 to r25
3525 Present Limitations:
3526 - Only one range supported. The remaining regs will have the ordinary
3527 st and ld instructions for store and loads. Hence a gmask asking
3528 to store r13-14, r16-r25 will only generate calls to store and
3529 load r13 to r14 while store and load insns will be generated for
3530 r16 to r25 in the prologue and epilogue respectively.
3531
3532 - Presently library only supports register ranges starting from r13.
3533*/
3534
3535static int
ce9dbf20 3536arc_save_callee_milli (uint64_t gmask,
90b48013
CZ
3537 bool save_blink,
3538 bool save_fp,
3539 HOST_WIDE_INT offset,
3540 HOST_WIDE_INT reg_size)
3541{
3542 int start_reg = 13;
3543 int end_reg = 25;
3544 int regno, indx, off, nregs;
3545 rtx insn, reg, mem;
3546 int frame_allocated = 0;
3547
ce9dbf20 3548 for (regno = start_reg; regno <= end_reg && (gmask & (1ULL << regno));)
90b48013
CZ
3549 regno++;
3550
3551 end_reg = regno - 1;
3552 nregs = end_reg - start_reg + 1;
3553 gcc_assert (end_reg > 14);
3554
3555
3556 /* Allocate space on stack for the registers, and take into account
3557 also the initial offset. The registers will be saved using
3558 offsets. N.B. OFFSET is a negative number. */
3559 if (save_blink)
3560 {
3561 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3562 frame_allocated += frame_save_reg (reg, offset);
3563 offset = 0;
3564 }
3565
3566 if (reg_size || offset)
3567 {
3568 frame_stack_add (offset - reg_size);
3569 frame_allocated += nregs * UNITS_PER_WORD - offset;
3570 offset = 0;
3571 }
3572
3573 /* Start generate millicode call. */
3574 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs + 1));
3575 indx = 0;
3576
3577 /* This is a call, we clobber blink. */
3578 XVECEXP (insn, 0, nregs) =
3579 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM));
3580
3581 for (regno = start_reg, indx = 0, off = 0;
3582 regno <= end_reg;
3583 regno++, indx++, off += UNITS_PER_WORD)
3584 {
3585 reg = gen_rtx_REG (SImode, regno);
3586 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3587 stack_pointer_rtx,
3588 off));
3589 XVECEXP (insn, 0, indx) = gen_rtx_SET (mem, reg);
3590 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx)) = 1;
ce9dbf20 3591 gmask = gmask & ~(1ULL << regno);
90b48013
CZ
3592 }
3593 insn = frame_insn (insn);
3594
3595 /* Add DWARF info. */
3596 for (regno = start_reg, off = 0;
3597 regno <= end_reg;
3598 regno++, off += UNITS_PER_WORD)
3599 {
3600 reg = gen_rtx_REG (SImode, regno);
3601 mem = gen_rtx_MEM (SImode, plus_constant (Pmode,
3602 stack_pointer_rtx, off));
3603 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (mem, reg));
3604
3605 }
3606
3607 /* In the case of millicode thunk, we need to restore the
3608 clobbered blink register. */
3609 if (arc_must_save_return_addr (cfun))
3610 {
3611 emit_insn (gen_rtx_SET (gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM),
3612 gen_rtx_MEM (Pmode,
3613 plus_constant (Pmode,
3614 stack_pointer_rtx,
3615 reg_size))));
3616 }
3617
3618 /* Save remaining registers using st instructions. */
ce9dbf20 3619 for (regno = 0; regno <= GMASK_LEN; regno++)
90b48013 3620 {
ce9dbf20 3621 if ((gmask & (1ULL << regno)) == 0)
90b48013
CZ
3622 continue;
3623
3624 reg = gen_rtx_REG (SImode, regno);
3625 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3626 stack_pointer_rtx,
3627 off));
3628 frame_move_inc (mem, reg, stack_pointer_rtx, 0);
3629 frame_allocated += UNITS_PER_WORD;
3630 off += UNITS_PER_WORD;
3631 }
3632
3633 /* Save frame pointer if needed. First save the FP on stack, if not
3634 autosaved. Unfortunately, I cannot add it to gmask and use the
3635 above loop to save fp because our ABI states fp goes aftert all
3636 registers are saved. */
3637 if (save_fp)
47d8cb23 3638 frame_allocated += frame_save_reg (hard_frame_pointer_rtx, offset);
90b48013
CZ
3639
3640 /* Emit mov fp,sp. */
3641 if (arc_frame_pointer_needed ())
47d8cb23 3642 frame_move (hard_frame_pointer_rtx, stack_pointer_rtx);
90b48013
CZ
3643
3644 return frame_allocated;
3645}
3646
3647/* Like the previous function but restore. */
3648
3649static int
ce9dbf20 3650arc_restore_callee_milli (uint64_t gmask,
90b48013
CZ
3651 bool restore_blink,
3652 bool restore_fp,
3653 bool return_p,
3654 HOST_WIDE_INT offset)
3655{
3656 int start_reg = 13;
3657 int end_reg = 25;
3658 int regno, indx, off, nregs;
3659 rtx insn, reg, mem;
3660 int frame_allocated = 0;
3661
ce9dbf20 3662 for (regno = start_reg; regno <= end_reg && (gmask & (1ULL << regno));)
90b48013
CZ
3663 regno++;
3664
3665 end_reg = regno - 1;
3666 nregs = end_reg - start_reg + 1;
3667 gcc_assert (end_reg > 14);
3668
3669 /* Emit mov fp,sp. */
3670 if (arc_frame_pointer_needed () && offset)
3671 {
47d8cb23 3672 frame_move (stack_pointer_rtx, hard_frame_pointer_rtx);
90b48013
CZ
3673 frame_allocated = offset;
3674 offset = 0;
3675 }
3676
3677 if (restore_fp)
47d8cb23 3678 frame_allocated += frame_restore_reg (hard_frame_pointer_rtx, 0);
90b48013
CZ
3679
3680 if (offset)
3681 {
3682 /* No fp involved, hence, we need to adjust the sp via an
3683 add. */
3684 frame_stack_add (offset);
3685 frame_allocated += offset;
3686 offset = 0;
3687 }
3688
3689 /* Start generate millicode call. */
3690 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc ((return_p ? 1 : 0)
3691 + nregs + 1));
3692 indx = 0;
3693
3694 if (return_p)
3695 {
3696 /* sibling call, the blink is restored with the help of the
3697 value held into r12. */
3698 reg = gen_rtx_REG (Pmode, 12);
3699 XVECEXP (insn, 0, indx++) = ret_rtx;
3700 XVECEXP (insn, 0, indx++) =
3701 gen_rtx_SET (stack_pointer_rtx,
3702 gen_rtx_PLUS (Pmode, stack_pointer_rtx, reg));
3703 frame_allocated += UNITS_PER_WORD;
3704 }
3705 else
3706 {
3707 /* This is a call, we clobber blink. */
3708 XVECEXP (insn, 0, nregs) =
3709 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM));
3710 }
3711
3712 for (regno = start_reg, off = 0;
3713 regno <= end_reg;
3714 regno++, indx++, off += UNITS_PER_WORD)
3715 {
3716 reg = gen_rtx_REG (SImode, regno);
3717 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3718 stack_pointer_rtx,
3719 off));
3720 XVECEXP (insn, 0, indx) = gen_rtx_SET (reg, mem);
3721 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, indx)) = 1;
ce9dbf20 3722 gmask = gmask & ~(1ULL << regno);
90b48013
CZ
3723 }
3724
3725 /* Restore remaining registers using LD instructions. */
ce9dbf20 3726 for (regno = 0; regno <= GMASK_LEN; regno++)
90b48013 3727 {
ce9dbf20 3728 if ((gmask & (1ULL << regno)) == 0)
90b48013
CZ
3729 continue;
3730
3731 reg = gen_rtx_REG (SImode, regno);
3732 mem = gen_frame_mem (SImode, plus_constant (Pmode,
3733 stack_pointer_rtx,
3734 off));
3735 rtx tmp = frame_move_inc (reg, mem, stack_pointer_rtx, 0);
3736 add_reg_note (tmp, REG_CFA_RESTORE, reg);
3737 off += UNITS_PER_WORD;
3738 }
3739
3740 /* Emit millicode call. */
3741 if (return_p)
3742 {
3743 reg = gen_rtx_REG (Pmode, 12);
3744 frame_insn (gen_rtx_SET (reg, GEN_INT (off)));
3745 frame_allocated += off;
3746 insn = emit_jump_insn (insn);
3747 RTX_FRAME_RELATED_P (insn) = 1;
3748 }
3749 else
3750 insn = frame_insn (insn);
3751
3752 /* Add DWARF info. */
544a4843 3753 for (regno = start_reg; regno <= end_reg; regno++)
90b48013
CZ
3754 {
3755 reg = gen_rtx_REG (SImode, regno);
3756 add_reg_note (insn, REG_CFA_RESTORE, reg);
3757
3758 }
3759
3760 if (restore_blink && !return_p)
3761 {
3762 reg = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
3763 mem = gen_frame_mem (Pmode, plus_constant (Pmode, stack_pointer_rtx,
3764 off));
3765 insn = frame_insn (gen_rtx_SET (reg, mem));
3766 add_reg_note (insn, REG_CFA_RESTORE, reg);
3767 }
3768
3769 return frame_allocated;
3770}
3771
526b7aee
SV
3772/* Set up the stack and frame pointer (if desired) for the function. */
3773
3774void
3775arc_expand_prologue (void)
3776{
6fe5e235 3777 int size;
ce9dbf20 3778 uint64_t gmask = cfun->machine->frame_info.gmask;
90b48013 3779 struct arc_frame_info *frame = &cfun->machine->frame_info;
526b7aee 3780 unsigned int frame_size_to_allocate;
526b7aee 3781 int first_offset = 0;
1825c61e 3782 unsigned int fn_type = arc_compute_function_type (cfun);
90b48013
CZ
3783 bool save_blink = false;
3784 bool save_fp = false;
82cd9a96 3785 bool emit_move = false;
1825c61e
CZ
3786
3787 /* Naked functions don't have prologue. */
3788 if (ARC_NAKED_P (fn_type))
83b2a5f4
AB
3789 {
3790 if (flag_stack_usage_info)
3791 current_function_static_stack_size = 0;
3792 return;
3793 }
526b7aee 3794
6fe5e235
CZ
3795 /* Compute total frame size. */
3796 size = arc_compute_frame_size ();
526b7aee
SV
3797
3798 if (flag_stack_usage_info)
3799 current_function_static_stack_size = size;
3800
3801 /* Keep track of frame size to be allocated. */
3802 frame_size_to_allocate = size;
3803
3804 /* These cases shouldn't happen. Catch them now. */
3805 gcc_assert (!(size == 0 && gmask));
3806
3807 /* Allocate space for register arguments if this is a variadic function. */
90b48013
CZ
3808 if (frame->pretend_size != 0)
3809 first_offset = -frame->pretend_size;
526b7aee 3810
41453183
CZ
3811 /* IRQ using automatic save mechanism will save the register before
3812 anything we do. */
c7314bc1
CZ
3813 if (ARC_AUTO_IRQ_P (fn_type)
3814 && !ARC_FAST_INTERRUPT_P (fn_type))
41453183 3815 {
90b48013 3816 frame_stack_add (first_offset);
526b7aee 3817 first_offset = 0;
90b48013 3818 arc_dwarf_emit_irq_save_regs ();
41453183
CZ
3819 }
3820
90b48013
CZ
3821 save_blink = arc_must_save_return_addr (cfun)
3822 && !ARC_AUTOBLINK_IRQ_P (fn_type);
82cd9a96
CZ
3823 save_fp = arc_frame_pointer_needed () && !ARC_AUTOFP_IRQ_P (fn_type)
3824 && !ARC_INTERRUPT_P (fn_type);
3825 emit_move = arc_frame_pointer_needed () && !ARC_INTERRUPT_P (fn_type);
90b48013
CZ
3826
3827 /* Use enter/leave only for non-interrupt functions. */
3828 if (TARGET_CODE_DENSITY
3829 && TARGET_CODE_DENSITY_FRAME
3830 && !ARC_AUTOFP_IRQ_P (fn_type)
3831 && !ARC_AUTOBLINK_IRQ_P (fn_type)
3832 && !ARC_INTERRUPT_P (fn_type)
3833 && arc_enter_leave_p (gmask))
3834 frame_size_to_allocate -= arc_save_callee_enter (gmask, save_blink,
3835 save_fp,
3836 first_offset);
3837 else if (frame->millicode_end_reg > 14)
3838 frame_size_to_allocate -= arc_save_callee_milli (gmask, save_blink,
3839 save_fp,
3840 first_offset,
3841 frame->reg_size);
3842 else
3843 frame_size_to_allocate -= arc_save_callee_saves (gmask, save_blink, save_fp,
82cd9a96
CZ
3844 first_offset, emit_move);
3845
3846 /* Check if we need to save the ZOL machinery. */
3847 if (arc_lpcwidth != 0 && arc_must_save_register (LP_COUNT, cfun, true))
3848 {
3849 rtx reg0 = gen_rtx_REG (SImode, R0_REG);
3850 emit_insn (gen_rtx_SET (reg0,
3851 gen_rtx_UNSPEC_VOLATILE
3852 (Pmode, gen_rtvec (1, GEN_INT (AUX_LP_START)),
3853 VUNSPEC_ARC_LR)));
3854 frame_size_to_allocate -= push_reg (reg0);
3855 emit_insn (gen_rtx_SET (reg0,
3856 gen_rtx_UNSPEC_VOLATILE
3857 (Pmode, gen_rtvec (1, GEN_INT (AUX_LP_END)),
3858 VUNSPEC_ARC_LR)));
3859 frame_size_to_allocate -= push_reg (reg0);
3860 emit_move_insn (reg0, gen_rtx_REG (SImode, LP_COUNT));
3861 frame_size_to_allocate -= push_reg (reg0);
3862 }
3863
3864 /* Save AUX regs used by FPX machinery. */
3865 if (arc_must_save_register (TARGET_BIG_ENDIAN ? R41_REG : R40_REG,
3866 cfun, TARGET_DPFP))
3867 {
3868 rtx reg0 = gen_rtx_REG (SImode, R0_REG);
3869 int i;
3870
3871 for (i = 0; i < 4; i++)
3872 {
3873 emit_insn (gen_rtx_SET (reg0,
3874 gen_rtx_UNSPEC_VOLATILE
3875 (Pmode, gen_rtvec (1, GEN_INT (AUX_DPFP_START
3876 + i)),
3877 VUNSPEC_ARC_LR)));
3878 frame_size_to_allocate -= push_reg (reg0);
3879 }
3880 }
3881
3882 /* Save ARC600' MUL64 registers. */
3883 if (arc_must_save_register (R58_REG, cfun, true))
3884 frame_size_to_allocate -= arc_save_callee_saves (3ULL << 58,
3885 false, false, 0, false);
3886
3887 if (arc_frame_pointer_needed () && ARC_INTERRUPT_P (fn_type))
3888 {
3889 /* Just save fp at the end of the saving context. */
3890 frame_size_to_allocate -=
3891 arc_save_callee_saves (0, false, !ARC_AUTOFP_IRQ_P (fn_type), 0, true);
3892 }
526b7aee 3893
526b7aee
SV
3894 /* Allocate the stack frame. */
3895 if (frame_size_to_allocate > 0)
1ec86e1e
CZ
3896 frame_stack_add ((HOST_WIDE_INT) 0 - frame_size_to_allocate);
3897
3898 /* Emit a blockage to avoid delay slot scheduling. */
3899 emit_insn (gen_blockage ());
526b7aee
SV
3900}
3901
ce9dbf20
CZ
3902/* Return the register number of the register holding the return address
3903 for a function of type TYPE. */
3904
3905static int
3906arc_return_address_register (unsigned int fn_type)
3907{
3908 int regno = 0;
3909
3910 if (ARC_INTERRUPT_P (fn_type))
3911 {
3912 if ((fn_type & (ARC_FUNCTION_ILINK1 | ARC_FUNCTION_FIRQ)) != 0)
3913 regno = ILINK1_REG;
3914 else if ((fn_type & ARC_FUNCTION_ILINK2) != 0)
3915 regno = ILINK2_REG;
3916 else
3917 gcc_unreachable ();
3918 }
3919 else if (ARC_NORMAL_P (fn_type) || ARC_NAKED_P (fn_type))
3920 regno = RETURN_ADDR_REGNUM;
3921
3922 gcc_assert (regno != 0);
3923 return regno;
3924}
3925
526b7aee
SV
3926/* Do any necessary cleanup after a function to restore stack, frame,
3927 and regs. */
3928
3929void
3930arc_expand_epilogue (int sibcall_p)
3931{
6fe5e235 3932 int size;
1825c61e 3933 unsigned int fn_type = arc_compute_function_type (cfun);
5719867d
JR
3934 unsigned int size_to_deallocate;
3935 int restored;
3936 int can_trust_sp_p = !cfun->calls_alloca;
90b48013
CZ
3937 int first_offset;
3938 bool restore_fp = arc_frame_pointer_needed () && !ARC_AUTOFP_IRQ_P (fn_type);
3939 bool restore_blink = arc_must_save_return_addr (cfun)
3940 && !ARC_AUTOBLINK_IRQ_P (fn_type);
ce9dbf20 3941 uint64_t gmask = cfun->machine->frame_info.gmask;
90b48013
CZ
3942 bool return_p = !sibcall_p && fn_type == ARC_FUNCTION_NORMAL
3943 && !cfun->machine->frame_info.pretend_size;
3944 struct arc_frame_info *frame = &cfun->machine->frame_info;
3945
1825c61e
CZ
3946 /* Naked functions don't have epilogue. */
3947 if (ARC_NAKED_P (fn_type))
3948 return;
3949
90b48013 3950 size = arc_compute_frame_size ();
5719867d 3951 size_to_deallocate = size;
526b7aee 3952
90b48013
CZ
3953 first_offset = size - (frame->pretend_size + frame->reg_size
3954 + frame->extra_size);
526b7aee 3955
5719867d 3956 if (!can_trust_sp_p)
41453183 3957 gcc_assert (arc_frame_pointer_needed ());
526b7aee 3958
1ec86e1e
CZ
3959 /* Emit a blockage to avoid/flush all pending sp operations. */
3960 if (size)
3961 emit_insn (gen_blockage ());
3962
82cd9a96
CZ
3963 if (ARC_INTERRUPT_P (fn_type) && restore_fp)
3964 {
3965 /* We need to restore FP before any SP operation in an
3966 interrupt. */
3967 size_to_deallocate -= arc_restore_callee_saves (0, false,
3968 restore_fp,
3969 first_offset,
3970 size_to_deallocate);
3971 restore_fp = false;
3972 first_offset = 0;
3973 }
3974
3975 /* Restore ARC600' MUL64 registers. */
3976 if (arc_must_save_register (R58_REG, cfun, true))
3977 {
3978 rtx insn;
3979 rtx reg0 = gen_rtx_REG (SImode, R0_REG);
3980 rtx reg1 = gen_rtx_REG (SImode, R1_REG);
3981 size_to_deallocate -= pop_reg (reg0);
3982 size_to_deallocate -= pop_reg (reg1);
3983
3984 insn = emit_insn (gen_mulu64 (reg0, const1_rtx));
3985 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (SImode, R58_REG));
3986 RTX_FRAME_RELATED_P (insn) = 1;
3987 emit_insn (gen_arc600_stall ());
3988 insn = emit_insn (gen_rtx_UNSPEC_VOLATILE
3989 (VOIDmode, gen_rtvec (2, reg1, GEN_INT (AUX_MULHI)),
3990 VUNSPEC_ARC_SR));
3991 add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (SImode, R59_REG));
3992 RTX_FRAME_RELATED_P (insn) = 1;
3993 }
3994
3995 /* Restore AUX-regs used by FPX machinery. */
3996 if (arc_must_save_register (TARGET_BIG_ENDIAN ? R41_REG : R40_REG,
3997 cfun, TARGET_DPFP))
3998 {
3999 rtx reg0 = gen_rtx_REG (SImode, R0_REG);
4000 int i;
4001
4002 for (i = 0; i < 4; i++)
4003 {
4004 size_to_deallocate -= pop_reg (reg0);
4005 emit_insn (gen_rtx_UNSPEC_VOLATILE
4006 (VOIDmode, gen_rtvec (2, reg0, GEN_INT (AUX_DPFP_START
4007 + i)),
4008 VUNSPEC_ARC_SR));
4009 }
4010 }
4011
4012 /* Check if we need to restore the ZOL machinery. */
4013 if (arc_lpcwidth !=0 && arc_must_save_register (LP_COUNT, cfun, true))
4014 {
4015 rtx reg0 = gen_rtx_REG (SImode, R0_REG);
4016
4017 size_to_deallocate -= pop_reg (reg0);
4018 emit_move_insn (gen_rtx_REG (SImode, LP_COUNT), reg0);
4019
4020 size_to_deallocate -= pop_reg (reg0);
4021 emit_insn (gen_rtx_UNSPEC_VOLATILE
4022 (VOIDmode, gen_rtvec (2, reg0, GEN_INT (AUX_LP_END)),
4023 VUNSPEC_ARC_SR));
4024
4025 size_to_deallocate -= pop_reg (reg0);
4026 emit_insn (gen_rtx_UNSPEC_VOLATILE
4027 (VOIDmode, gen_rtvec (2, reg0, GEN_INT (AUX_LP_START)),
4028 VUNSPEC_ARC_SR));
4029 }
4030
90b48013
CZ
4031 if (TARGET_CODE_DENSITY
4032 && TARGET_CODE_DENSITY_FRAME
4033 && !ARC_AUTOFP_IRQ_P (fn_type)
4034 && !ARC_AUTOBLINK_IRQ_P (fn_type)
4035 && !ARC_INTERRUPT_P (fn_type)
4036 && arc_enter_leave_p (gmask))
4037 {
4038 /* Using leave instruction. */
4039 size_to_deallocate -= arc_restore_callee_leave (gmask, restore_blink,
4040 restore_fp,
4041 return_p,
4042 first_offset);
4043 if (return_p)
67a96300 4044 {
90b48013
CZ
4045 gcc_assert (size_to_deallocate == 0);
4046 return;
67a96300 4047 }
5719867d 4048 }
90b48013 4049 else if (frame->millicode_end_reg > 14)
5719867d 4050 {
90b48013
CZ
4051 /* Using millicode calls. */
4052 size_to_deallocate -= arc_restore_callee_milli (gmask, restore_blink,
4053 restore_fp,
4054 return_p,
4055 first_offset);
4056 if (return_p)
4057 {
4058 gcc_assert (size_to_deallocate == 0);
4059 return;
4060 }
5719867d 4061 }
90b48013
CZ
4062 else
4063 size_to_deallocate -= arc_restore_callee_saves (gmask, restore_blink,
4064 restore_fp,
4065 first_offset,
4066 size_to_deallocate);
526b7aee 4067
90b48013
CZ
4068 /* Keep track of how much of the stack pointer we've restored. It
4069 makes the following a lot more readable. */
5719867d 4070 restored = size - size_to_deallocate;
526b7aee 4071
5719867d
JR
4072 if (size > restored)
4073 frame_stack_add (size - restored);
67a96300 4074
6fe5e235
CZ
4075 /* For frames that use __builtin_eh_return, the register defined by
4076 EH_RETURN_STACKADJ_RTX is set to 0 for all standard return paths.
4077 On eh_return paths however, the register is set to the value that
4078 should be added to the stack pointer in order to restore the
4079 correct stack pointer for the exception handling frame.
4080
4081 For ARC we are going to use r2 for EH_RETURN_STACKADJ_RTX, add
4082 this onto the stack for eh_return frames. */
4083 if (crtl->calls_eh_return)
4084 emit_insn (gen_add2_insn (stack_pointer_rtx,
4085 EH_RETURN_STACKADJ_RTX));
4086
5719867d 4087 /* Emit the return instruction. */
ce9dbf20
CZ
4088 if (ARC_INTERRUPT_P (fn_type))
4089 {
4090 rtx ra = gen_rtx_REG (Pmode, arc_return_address_register (fn_type));
4091
4092 if (TARGET_V2)
4093 emit_jump_insn (gen_rtie ());
4094 else if (TARGET_ARC700)
4095 emit_jump_insn (gen_rtie ());
4096 else
4097 emit_jump_insn (gen_arc600_rtie (ra));
4098 }
4099 else if (sibcall_p == FALSE)
5719867d 4100 emit_jump_insn (gen_simple_return ());
526b7aee
SV
4101}
4102
90b48013
CZ
4103/* Helper for {push/pop}_multi_operand: check if rtx OP is a suitable
4104 construct to match either enter or leave instruction. Which one
4105 which is selected by PUSH_P argument. */
4106
4107bool
4108arc_check_multi (rtx op, bool push_p)
4109{
4110 HOST_WIDE_INT len = XVECLEN (op, 0);
4111 unsigned int regno, i, start;
4112 unsigned int memp = push_p ? 0 : 1;
4113 rtx elt;
4114
4115 if (len <= 1)
4116 return false;
4117
4118 start = 1;
4119 elt = XVECEXP (op, 0, 0);
4120 if (!push_p && GET_CODE (elt) == RETURN)
4121 start = 2;
4122
4123 for (i = start, regno = ENTER_LEAVE_START_REG; i < len; i++, regno++)
4124 {
4125 rtx elt = XVECEXP (op, 0, i);
4126 rtx reg, mem, addr;
4127
4128 if (GET_CODE (elt) != SET)
4129 return false;
4130 mem = XEXP (elt, memp);
4131 reg = XEXP (elt, 1 - memp);
4132
4133 if (!REG_P (reg)
4134 || !MEM_P (mem))
4135 return false;
4136
4137 /* Check for blink. */
4138 if (REGNO (reg) == RETURN_ADDR_REGNUM
4139 && i == start)
4140 regno = 12;
47d8cb23 4141 else if (REGNO (reg) == HARD_FRAME_POINTER_REGNUM)
90b48013
CZ
4142 ++i;
4143 else if (REGNO (reg) != regno)
4144 return false;
4145
4146 addr = XEXP (mem, 0);
4147 if (GET_CODE (addr) == PLUS)
4148 {
4149 if (!rtx_equal_p (stack_pointer_rtx, XEXP (addr, 0))
4150 || !CONST_INT_P (XEXP (addr, 1)))
4151 return false;
4152 }
4153 else
4154 {
4155 if (!rtx_equal_p (stack_pointer_rtx, addr))
4156 return false;
4157 }
4158 }
4159 return true;
4160}
4161
6fe5e235
CZ
4162/* Return rtx for the location of the return address on the stack,
4163 suitable for use in __builtin_eh_return. The new return address
4164 will be written to this location in order to redirect the return to
3fd6ae8a
CZ
4165 the exception handler. Our ABI says the blink is pushed first on
4166 stack followed by an unknown number of register saves, and finally
4167 by fp. Hence we cannot use the EH_RETURN_ADDRESS macro as the
4168 stack is not finalized. */
526b7aee 4169
3fd6ae8a
CZ
4170void
4171arc_eh_return_address_location (rtx source)
6fe5e235
CZ
4172{
4173 rtx mem;
4174 int offset;
4175 struct arc_frame_info *afi;
4176
4177 arc_compute_frame_size ();
4178 afi = &cfun->machine->frame_info;
4179
4180 gcc_assert (crtl->calls_eh_return);
4181 gcc_assert (afi->save_return_addr);
4182 gcc_assert (afi->extra_size >= 4);
4183
4184 /* The '-4' removes the size of the return address, which is
4185 included in the 'extra_size' field. */
4186 offset = afi->reg_size + afi->extra_size - 4;
4187 mem = gen_frame_mem (Pmode,
47d8cb23 4188 plus_constant (Pmode, hard_frame_pointer_rtx, offset));
6fe5e235
CZ
4189
4190 /* The following should not be needed, and is, really a hack. The
4191 issue being worked around here is that the DSE (Dead Store
4192 Elimination) pass will remove this write to the stack as it sees
4193 a single store and no corresponding read. The read however
4194 occurs in the epilogue code, which is not added into the function
4195 rtl until a later pass. So, at the time of DSE, the decision to
4196 remove this store seems perfectly sensible. Marking the memory
4197 address as volatile obviously has the effect of preventing DSE
4198 from removing the store. */
3fd6ae8a
CZ
4199 MEM_VOLATILE_P (mem) = true;
4200 emit_move_insn (mem, source);
526b7aee
SV
4201}
4202
4203/* PIC */
4204
5a5c5784
CZ
4205/* Helper to generate unspec constant. */
4206
4207static rtx
4208arc_unspec_offset (rtx loc, int unspec)
4209{
4210 return gen_rtx_CONST (Pmode, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, loc),
4211 unspec));
4212}
4213
526b7aee
SV
4214/* !TARGET_BARREL_SHIFTER support. */
4215/* Emit a shift insn to set OP0 to OP1 shifted by OP2; CODE specifies what
4216 kind of shift. */
4217
4218void
4219emit_shift (enum rtx_code code, rtx op0, rtx op1, rtx op2)
4220{
4221 rtx shift = gen_rtx_fmt_ee (code, SImode, op1, op2);
4222 rtx pat
4223 = ((shift4_operator (shift, SImode) ? gen_shift_si3 : gen_shift_si3_loop)
4224 (op0, op1, op2, shift));
4225 emit_insn (pat);
4226}
4227
4228/* Output the assembler code for doing a shift.
4229 We go to a bit of trouble to generate efficient code as the ARC601 only has
4230 single bit shifts. This is taken from the h8300 port. We only have one
4231 mode of shifting and can't access individual bytes like the h8300 can, so
4232 this is greatly simplified (at the expense of not generating hyper-
4233 efficient code).
4234
4235 This function is not used if the variable shift insns are present. */
4236
4237/* FIXME: This probably can be done using a define_split in arc.md.
4238 Alternately, generate rtx rather than output instructions. */
4239
4240const char *
4241output_shift (rtx *operands)
4242{
4243 /* static int loopend_lab;*/
4244 rtx shift = operands[3];
ef4bddc2 4245 machine_mode mode = GET_MODE (shift);
526b7aee
SV
4246 enum rtx_code code = GET_CODE (shift);
4247 const char *shift_one;
4248
4249 gcc_assert (mode == SImode);
4250
4251 switch (code)
4252 {
4253 case ASHIFT: shift_one = "add %0,%1,%1"; break;
4254 case ASHIFTRT: shift_one = "asr %0,%1"; break;
4255 case LSHIFTRT: shift_one = "lsr %0,%1"; break;
4256 default: gcc_unreachable ();
4257 }
4258
4259 if (GET_CODE (operands[2]) != CONST_INT)
4260 {
4261 output_asm_insn ("and.f lp_count,%2, 0x1f", operands);
4262 goto shiftloop;
4263 }
4264 else
4265 {
4266 int n;
4267
4268 n = INTVAL (operands[2]);
4269
4270 /* Only consider the lower 5 bits of the shift count. */
4271 n = n & 0x1f;
4272
4273 /* First see if we can do them inline. */
4274 /* ??? We could get better scheduling & shorter code (using short insns)
4275 by using splitters. Alas, that'd be even more verbose. */
4276 if (code == ASHIFT && n <= 9 && n > 2
4277 && dest_reg_operand (operands[4], SImode))
4278 {
4279 output_asm_insn ("mov %4,0\n\tadd3 %0,%4,%1", operands);
4280 for (n -=3 ; n >= 3; n -= 3)
4281 output_asm_insn ("add3 %0,%4,%0", operands);
4282 if (n == 2)
4283 output_asm_insn ("add2 %0,%4,%0", operands);
4284 else if (n)
4285 output_asm_insn ("add %0,%0,%0", operands);
4286 }
4287 else if (n <= 4)
4288 {
4289 while (--n >= 0)
4290 {
4291 output_asm_insn (shift_one, operands);
4292 operands[1] = operands[0];
4293 }
4294 }
4295 /* See if we can use a rotate/and. */
4296 else if (n == BITS_PER_WORD - 1)
4297 {
4298 switch (code)
4299 {
4300 case ASHIFT :
4301 output_asm_insn ("and %0,%1,1\n\tror %0,%0", operands);
4302 break;
4303 case ASHIFTRT :
4304 /* The ARC doesn't have a rol insn. Use something else. */
4305 output_asm_insn ("add.f 0,%1,%1\n\tsbc %0,%0,%0", operands);
4306 break;
4307 case LSHIFTRT :
4308 /* The ARC doesn't have a rol insn. Use something else. */
4309 output_asm_insn ("add.f 0,%1,%1\n\trlc %0,0", operands);
4310 break;
4311 default:
4312 break;
4313 }
4314 }
4315 else if (n == BITS_PER_WORD - 2 && dest_reg_operand (operands[4], SImode))
4316 {
4317 switch (code)
4318 {
4319 case ASHIFT :
4320 output_asm_insn ("and %0,%1,3\n\tror %0,%0\n\tror %0,%0", operands);
4321 break;
4322 case ASHIFTRT :
4323#if 1 /* Need some scheduling comparisons. */
4324 output_asm_insn ("add.f %4,%1,%1\n\tsbc %0,%0,%0\n\t"
4325 "add.f 0,%4,%4\n\trlc %0,%0", operands);
4326#else
4327 output_asm_insn ("add.f %4,%1,%1\n\tbxor %0,%4,31\n\t"
4328 "sbc.f %0,%0,%4\n\trlc %0,%0", operands);
4329#endif
4330 break;
4331 case LSHIFTRT :
4332#if 1
4333 output_asm_insn ("add.f %4,%1,%1\n\trlc %0,0\n\t"
4334 "add.f 0,%4,%4\n\trlc %0,%0", operands);
4335#else
4336 output_asm_insn ("add.f %0,%1,%1\n\trlc.f %0,0\n\t"
4337 "and %0,%0,1\n\trlc %0,%0", operands);
4338#endif
4339 break;
4340 default:
4341 break;
4342 }
4343 }
4344 else if (n == BITS_PER_WORD - 3 && code == ASHIFT)
4345 output_asm_insn ("and %0,%1,7\n\tror %0,%0\n\tror %0,%0\n\tror %0,%0",
4346 operands);
4347 /* Must loop. */
4348 else
4349 {
4350 operands[2] = GEN_INT (n);
4351 output_asm_insn ("mov.f lp_count, %2", operands);
4352
4353 shiftloop:
4354 {
4355 output_asm_insn ("lpnz\t2f", operands);
4356 output_asm_insn (shift_one, operands);
4357 output_asm_insn ("nop", operands);
4358 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
4359 ASM_COMMENT_START);
4360 }
4361 }
4362 }
4363
4364 return "";
4365}
4366\f
4367/* Nested function support. */
4368
efcc2e30
CZ
4369/* Output assembler code for a block containing the constant parts of
4370 a trampoline, leaving space for variable parts. A trampoline looks
4371 like this:
4372
4373 ld_s r12,[pcl,8]
4374 ld r11,[pcl,12]
4375 j_s [r12]
4376 .word function's address
4377 .word static chain value
4378
4379*/
526b7aee
SV
4380
4381static void
efcc2e30 4382arc_asm_trampoline_template (FILE *f)
526b7aee 4383{
efcc2e30
CZ
4384 asm_fprintf (f, "\tld_s\t%s,[pcl,8]\n", ARC_TEMP_SCRATCH_REG);
4385 asm_fprintf (f, "\tld\t%s,[pcl,12]\n", reg_names[STATIC_CHAIN_REGNUM]);
4386 asm_fprintf (f, "\tj_s\t[%s]\n", ARC_TEMP_SCRATCH_REG);
4387 assemble_aligned_integer (UNITS_PER_WORD, const0_rtx);
4388 assemble_aligned_integer (UNITS_PER_WORD, const0_rtx);
526b7aee
SV
4389}
4390
4391/* Emit RTL insns to initialize the variable parts of a trampoline.
efcc2e30
CZ
4392 FNADDR is an RTX for the address of the function's pure code. CXT
4393 is an RTX for the static chain value for the function.
526b7aee
SV
4394
4395 The fastest trampoline to execute for trampolines within +-8KB of CTX
4396 would be:
efcc2e30 4397
526b7aee
SV
4398 add2 r11,pcl,s12
4399 j [limm] 0x20200f80 limm
efcc2e30
CZ
4400
4401 and that would also be faster to write to the stack by computing
4402 the offset from CTX to TRAMP at compile time. However, it would
4403 really be better to get rid of the high cost of cache invalidation
4404 when generating trampolines, which requires that the code part of
4405 trampolines stays constant, and additionally either making sure
4406 that no executable code but trampolines is on the stack, no icache
4407 entries linger for the area of the stack from when before the stack
4408 was allocated, and allocating trampolines in trampoline-only cache
4409 lines or allocate trampolines fram a special pool of pre-allocated
4410 trampolines. */
526b7aee
SV
4411
4412static void
4413arc_initialize_trampoline (rtx tramp, tree fndecl, rtx cxt)
4414{
4415 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4416
efcc2e30
CZ
4417 emit_block_move (tramp, assemble_trampoline_template (),
4418 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4419 emit_move_insn (adjust_address (tramp, SImode, 8), fnaddr);
4420 emit_move_insn (adjust_address (tramp, SImode, 12), cxt);
c05ece92
AO
4421 maybe_emit_call_builtin___clear_cache (XEXP (tramp, 0),
4422 plus_constant (Pmode,
4423 XEXP (tramp, 0),
4424 TRAMPOLINE_SIZE));
526b7aee
SV
4425}
4426
7778a1ad
CZ
4427/* Add the given function declaration to emit code in JLI section. */
4428
4429static void
4430arc_add_jli_section (rtx pat)
4431{
4432 const char *name;
4433 tree attrs;
4434 arc_jli_section *sec = arc_jli_sections, *new_section;
4435 tree decl = SYMBOL_REF_DECL (pat);
4436
4437 if (!pat)
4438 return;
4439
4440 if (decl)
4441 {
4442 /* For fixed locations do not generate the jli table entry. It
4443 should be provided by the user as an asm file. */
4444 attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4445 if (lookup_attribute ("jli_fixed", attrs))
4446 return;
4447 }
4448
4449 name = XSTR (pat, 0);
4450
4451 /* Don't insert the same symbol twice. */
4452 while (sec != NULL)
4453 {
4454 if(strcmp (name, sec->name) == 0)
4455 return;
4456 sec = sec->next;
4457 }
4458
4459 /* New name, insert it. */
4460 new_section = (arc_jli_section *) xmalloc (sizeof (arc_jli_section));
4461 gcc_assert (new_section != NULL);
4462 new_section->name = name;
4463 new_section->next = arc_jli_sections;
4464 arc_jli_sections = new_section;
4465}
4466
526b7aee
SV
4467/* This is set briefly to 1 when we output a ".as" address modifer, and then
4468 reset when we output the scaled address. */
4469static int output_scaled = 0;
4470
e0be3321
CZ
4471/* Set when we force sdata output. */
4472static int output_sdata = 0;
4473
526b7aee
SV
4474/* Print operand X (an rtx) in assembler syntax to file FILE.
4475 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
4476 For `%' followed by punctuation, CODE is the punctuation and X is null. */
4477/* In final.c:output_asm_insn:
4478 'l' : label
4479 'a' : address
4480 'c' : constant address if CONSTANT_ADDRESS_P
4481 'n' : negative
4482 Here:
4483 'Z': log2(x+1)-1
4484 'z': log2
4485 'M': log2(~x)
ceaaa9fe
JR
4486 'p': bit Position of lsb
4487 's': size of bit field
526b7aee
SV
4488 '#': condbranch delay slot suffix
4489 '*': jump delay slot suffix
4490 '?' : nonjump-insn suffix for conditional execution or short instruction
4491 '!' : jump / call suffix for conditional execution or short instruction
4492 '`': fold constant inside unary o-perator, re-recognize, and emit.
4493 'd'
4494 'D'
4495 'R': Second word
6b55f8c9 4496 'S': JLI instruction
7778a1ad 4497 'j': used by mov instruction to properly emit jli related labels.
526b7aee
SV
4498 'B': Branch comparison operand - suppress sda reference
4499 'H': Most significant word
4500 'L': Least significant word
4501 'A': ASCII decimal representation of floating point value
4502 'U': Load/store update or scaling indicator
4503 'V': cache bypass indicator for volatile
4504 'P'
4505 'F'
4506 '^'
4507 'O': Operator
4508 'o': original symbol - no @ prepending. */
4509
4510void
4511arc_print_operand (FILE *file, rtx x, int code)
4512{
4513 switch (code)
4514 {
4515 case 'Z':
4516 if (GET_CODE (x) == CONST_INT)
4517 fprintf (file, "%d",exact_log2(INTVAL (x) + 1) - 1 );
4518 else
4519 output_operand_lossage ("invalid operand to %%Z code");
4520
4521 return;
4522
4523 case 'z':
4524 if (GET_CODE (x) == CONST_INT)
03301dcc 4525 fprintf (file, "%d",exact_log2 (INTVAL (x) & 0xffffffff));
526b7aee
SV
4526 else
4527 output_operand_lossage ("invalid operand to %%z code");
4528
4529 return;
4530
1e466f04
GM
4531 case 'c':
4532 if (GET_CODE (x) == CONST_INT)
dc56917d 4533 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) );
1e466f04
GM
4534 else
4535 output_operand_lossage ("invalid operands to %%c code");
4536
4537 return;
4538
526b7aee
SV
4539 case 'M':
4540 if (GET_CODE (x) == CONST_INT)
4541 fprintf (file, "%d",exact_log2(~INTVAL (x)) );
4542 else
4543 output_operand_lossage ("invalid operand to %%M code");
4544
4545 return;
4546
ceaaa9fe
JR
4547 case 'p':
4548 if (GET_CODE (x) == CONST_INT)
4549 fprintf (file, "%d", exact_log2 (INTVAL (x) & -INTVAL (x)));
4550 else
4551 output_operand_lossage ("invalid operand to %%p code");
4552 return;
4553
4554 case 's':
4555 if (GET_CODE (x) == CONST_INT)
4556 {
4557 HOST_WIDE_INT i = INTVAL (x);
4558 HOST_WIDE_INT s = exact_log2 (i & -i);
4559 fprintf (file, "%d", exact_log2 (((0xffffffffUL & i) >> s) + 1));
4560 }
4561 else
4562 output_operand_lossage ("invalid operand to %%s code");
4563 return;
4564
526b7aee
SV
4565 case '#' :
4566 /* Conditional branches depending on condition codes.
4567 Note that this is only for branches that were known to depend on
4568 condition codes before delay slot scheduling;
4569 out-of-range brcc / bbit expansions should use '*'.
4570 This distinction is important because of the different
4571 allowable delay slot insns and the output of the delay suffix
4572 for TARGET_AT_DBR_COND_EXEC. */
4573 case '*' :
4574 /* Unconditional branches / branches not depending on condition codes.
4575 This could also be a CALL_INSN.
4576 Output the appropriate delay slot suffix. */
84034c69 4577 if (final_sequence && final_sequence->len () != 1)
526b7aee 4578 {
84034c69
DM
4579 rtx_insn *jump = final_sequence->insn (0);
4580 rtx_insn *delay = final_sequence->insn (1);
526b7aee
SV
4581
4582 /* For TARGET_PAD_RETURN we might have grabbed the delay insn. */
4654c0cf 4583 if (delay->deleted ())
526b7aee
SV
4584 return;
4585 if (JUMP_P (jump) && INSN_ANNULLED_BRANCH_P (jump))
4586 fputs (INSN_FROM_TARGET_P (delay) ? ".d"
4587 : TARGET_AT_DBR_CONDEXEC && code == '#' ? ".d"
4588 : get_attr_type (jump) == TYPE_RETURN && code == '#' ? ""
4589 : ".nd",
4590 file);
4591 else
4592 fputs (".d", file);
4593 }
4594 return;
4595 case '?' : /* with leading "." */
4596 case '!' : /* without leading "." */
4597 /* This insn can be conditionally executed. See if the ccfsm machinery
4598 says it should be conditionalized.
4599 If it shouldn't, we'll check the compact attribute if this insn
4600 has a short variant, which may be used depending on code size and
4601 alignment considerations. */
4602 if (current_insn_predicate)
4603 arc_ccfsm_current.cc
4604 = get_arc_condition_code (current_insn_predicate);
4605 if (ARC_CCFSM_COND_EXEC_P (&arc_ccfsm_current))
4606 {
4607 /* Is this insn in a delay slot sequence? */
4608 if (!final_sequence || XVECLEN (final_sequence, 0) < 2
4609 || current_insn_predicate
68a1a6c0
DM
4610 || CALL_P (final_sequence->insn (0))
4611 || simplejump_p (final_sequence->insn (0)))
526b7aee
SV
4612 {
4613 /* This insn isn't in a delay slot sequence, or conditionalized
4614 independently of its position in a delay slot. */
4615 fprintf (file, "%s%s",
4616 code == '?' ? "." : "",
4617 arc_condition_codes[arc_ccfsm_current.cc]);
4618 /* If this is a jump, there are still short variants. However,
4619 only beq_s / bne_s have the same offset range as b_s,
4620 and the only short conditional returns are jeq_s and jne_s. */
4621 if (code == '!'
4622 && (arc_ccfsm_current.cc == ARC_CC_EQ
4623 || arc_ccfsm_current.cc == ARC_CC_NE
4624 || 0 /* FIXME: check if branch in 7 bit range. */))
4625 output_short_suffix (file);
4626 }
4627 else if (code == '!') /* Jump with delay slot. */
4628 fputs (arc_condition_codes[arc_ccfsm_current.cc], file);
4629 else /* An Instruction in a delay slot of a jump or call. */
4630 {
4631 rtx jump = XVECEXP (final_sequence, 0, 0);
4632 rtx insn = XVECEXP (final_sequence, 0, 1);
4633
4634 /* If the insn is annulled and is from the target path, we need
4635 to inverse the condition test. */
4636 if (JUMP_P (jump) && INSN_ANNULLED_BRANCH_P (jump))
4637 {
4638 if (INSN_FROM_TARGET_P (insn))
4639 fprintf (file, "%s%s",
4640 code == '?' ? "." : "",
4641 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current.cc)]);
4642 else
4643 fprintf (file, "%s%s",
4644 code == '?' ? "." : "",
4645 arc_condition_codes[arc_ccfsm_current.cc]);
4646 if (arc_ccfsm_current.state == 5)
4647 arc_ccfsm_current.state = 0;
4648 }
4649 else
4650 /* This insn is executed for either path, so don't
4651 conditionalize it at all. */
4652 output_short_suffix (file);
4653
4654 }
4655 }
4656 else
4657 output_short_suffix (file);
4658 return;
4659 case'`':
4660 /* FIXME: fold constant inside unary operator, re-recognize, and emit. */
4661 gcc_unreachable ();
4662 case 'd' :
4663 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
4664 return;
4665 case 'D' :
4666 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
4667 (get_arc_condition_code (x))],
4668 file);
4669 return;
4670 case 'R' :
4671 /* Write second word of DImode or DFmode reference,
4672 register or memory. */
4673 if (GET_CODE (x) == REG)
4674 fputs (reg_names[REGNO (x)+1], file);
4675 else if (GET_CODE (x) == MEM)
4676 {
4677 fputc ('[', file);
4678
4679 /* Handle possible auto-increment. For PRE_INC / PRE_DEC /
4680 PRE_MODIFY, we will have handled the first word already;
4681 For POST_INC / POST_DEC / POST_MODIFY, the access to the
4682 first word will be done later. In either case, the access
4683 to the first word will do the modify, and we only have
4684 to add an offset of four here. */
4685 if (GET_CODE (XEXP (x, 0)) == PRE_INC
4686 || GET_CODE (XEXP (x, 0)) == PRE_DEC
4687 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY
4688 || GET_CODE (XEXP (x, 0)) == POST_INC
4689 || GET_CODE (XEXP (x, 0)) == POST_DEC
4690 || GET_CODE (XEXP (x, 0)) == POST_MODIFY)
cc8ca59e
JB
4691 output_address (VOIDmode,
4692 plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
526b7aee
SV
4693 else if (output_scaled)
4694 {
4695 rtx addr = XEXP (x, 0);
4696 int size = GET_MODE_SIZE (GET_MODE (x));
4697
cc8ca59e
JB
4698 output_address (VOIDmode,
4699 plus_constant (Pmode, XEXP (addr, 0),
526b7aee
SV
4700 ((INTVAL (XEXP (addr, 1)) + 4)
4701 >> (size == 2 ? 1 : 2))));
4702 output_scaled = 0;
4703 }
4704 else
cc8ca59e
JB
4705 output_address (VOIDmode,
4706 plus_constant (Pmode, XEXP (x, 0), 4));
526b7aee
SV
4707 fputc (']', file);
4708 }
4709 else
4710 output_operand_lossage ("invalid operand to %%R code");
4711 return;
7778a1ad 4712 case 'j':
526b7aee 4713 case 'S' :
6b55f8c9
CZ
4714 if (GET_CODE (x) == SYMBOL_REF
4715 && arc_is_jli_call_p (x))
4716 {
4717 if (SYMBOL_REF_DECL (x))
4718 {
4719 tree attrs = (TREE_TYPE (SYMBOL_REF_DECL (x)) != error_mark_node
4720 ? TYPE_ATTRIBUTES (TREE_TYPE (SYMBOL_REF_DECL (x)))
4721 : NULL_TREE);
4722 if (lookup_attribute ("jli_fixed", attrs))
4723 {
7778a1ad
CZ
4724 /* No special treatment for jli_fixed functions. */
4725 if (code == 'j')
4726 break;
dc56917d 4727 fprintf (file, HOST_WIDE_INT_PRINT_DEC "\t; @",
6b55f8c9
CZ
4728 TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attrs))));
4729 assemble_name (file, XSTR (x, 0));
4730 return;
4731 }
4732 }
4733 fprintf (file, "@__jli.");
4734 assemble_name (file, XSTR (x, 0));
7778a1ad
CZ
4735 if (code == 'j')
4736 arc_add_jli_section (x);
4737 return;
4738 }
4739 if (GET_CODE (x) == SYMBOL_REF
4740 && arc_is_secure_call_p (x))
4741 {
4742 /* No special treatment for secure functions. */
4743 if (code == 'j' )
4744 break;
4745 tree attrs = (TREE_TYPE (SYMBOL_REF_DECL (x)) != error_mark_node
4746 ? TYPE_ATTRIBUTES (TREE_TYPE (SYMBOL_REF_DECL (x)))
4747 : NULL_TREE);
dc56917d 4748 fprintf (file, HOST_WIDE_INT_PRINT_DEC "\t; @",
7778a1ad
CZ
4749 TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attrs))));
4750 assemble_name (file, XSTR (x, 0));
6b55f8c9
CZ
4751 return;
4752 }
4753 break;
526b7aee
SV
4754 case 'B' /* Branch or other LIMM ref - must not use sda references. */ :
4755 if (CONSTANT_P (x))
4756 {
4757 output_addr_const (file, x);
4758 return;
4759 }
4760 break;
4761 case 'H' :
4762 case 'L' :
4763 if (GET_CODE (x) == REG)
4764 {
4765 /* L = least significant word, H = most significant word. */
4766 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
4767 fputs (reg_names[REGNO (x)], file);
4768 else
4769 fputs (reg_names[REGNO (x)+1], file);
4770 }
4771 else if (GET_CODE (x) == CONST_INT
4772 || GET_CODE (x) == CONST_DOUBLE)
4773 {
8ad9df62 4774 rtx first, second, word;
526b7aee
SV
4775
4776 split_double (x, &first, &second);
4777
4778 if((WORDS_BIG_ENDIAN) == 0)
8ad9df62 4779 word = (code == 'L' ? first : second);
526b7aee 4780 else
8ad9df62 4781 word = (code == 'L' ? second : first);
526b7aee 4782
8ad9df62
JR
4783 fprintf (file, "0x%08" PRIx32, ((uint32_t) INTVAL (word)));
4784 }
526b7aee
SV
4785 else
4786 output_operand_lossage ("invalid operand to %%H/%%L code");
4787 return;
4788 case 'A' :
4789 {
4790 char str[30];
4791
4792 gcc_assert (GET_CODE (x) == CONST_DOUBLE
4793 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
4794
4795 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
4796 fprintf (file, "%s", str);
4797 return;
4798 }
4799 case 'U' :
4800 /* Output a load/store with update indicator if appropriate. */
4801 if (GET_CODE (x) == MEM)
4802 {
4803 rtx addr = XEXP (x, 0);
4804 switch (GET_CODE (addr))
4805 {
4806 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
4807 fputs (".a", file); break;
4808 case POST_INC: case POST_DEC: case POST_MODIFY:
4809 fputs (".ab", file); break;
4810 case PLUS:
4811 /* Are we using a scaled index? */
4812 if (GET_CODE (XEXP (addr, 0)) == MULT)
4813 fputs (".as", file);
4814 /* Can we use a scaled offset? */
4815 else if (CONST_INT_P (XEXP (addr, 1))
4816 && GET_MODE_SIZE (GET_MODE (x)) > 1
4817 && (!(INTVAL (XEXP (addr, 1))
4818 & (GET_MODE_SIZE (GET_MODE (x)) - 1) & 3))
4819 /* Does it make a difference? */
4820 && !SMALL_INT_RANGE(INTVAL (XEXP (addr, 1)),
4821 GET_MODE_SIZE (GET_MODE (x)) - 2, 0))
4822 {
4823 fputs (".as", file);
4824 output_scaled = 1;
4825 }
e0be3321
CZ
4826 break;
4827 case SYMBOL_REF:
4828 case CONST:
02ae0e08 4829 if (legitimate_small_data_address_p (addr, GET_MODE (x))
e0be3321 4830 && GET_MODE_SIZE (GET_MODE (x)) > 1)
b6fb7933 4831 {
e0be3321
CZ
4832 int align = get_symbol_alignment (addr);
4833 int mask = 0;
4834 switch (GET_MODE (x))
4835 {
4836 case E_HImode:
4837 mask = 1;
4838 break;
4839 default:
4840 mask = 3;
4841 break;
4842 }
4843 if (align && ((align & mask) == 0))
b6fb7933
CZ
4844 fputs (".as", file);
4845 }
526b7aee
SV
4846 break;
4847 case REG:
4848 break;
4849 default:
4850 gcc_assert (CONSTANT_P (addr)); break;
4851 }
4852 }
4853 else
4854 output_operand_lossage ("invalid operand to %%U code");
4855 return;
4856 case 'V' :
4857 /* Output cache bypass indicator for a load/store insn. Volatile memory
4858 refs are defined to use the cache bypass mechanism. */
4859 if (GET_CODE (x) == MEM)
4860 {
8180c03f
CZ
4861 if ((MEM_VOLATILE_P (x) && !TARGET_VOLATILE_CACHE_SET)
4862 || arc_is_uncached_mem_p (x))
526b7aee
SV
4863 fputs (".di", file);
4864 }
4865 else
4866 output_operand_lossage ("invalid operand to %%V code");
4867 return;
4868 /* plt code. */
4869 case 'P':
4870 case 0 :
4871 /* Do nothing special. */
4872 break;
4873 case 'F':
4874 fputs (reg_names[REGNO (x)]+1, file);
4875 return;
4876 case '^':
4877 /* This punctuation character is needed because label references are
4878 printed in the output template using %l. This is a front end
4879 character, and when we want to emit a '@' before it, we have to use
4880 this '^'. */
4881
4882 fputc('@',file);
4883 return;
4884 case 'O':
4885 /* Output an operator. */
4886 switch (GET_CODE (x))
4887 {
4888 case PLUS: fputs ("add", file); return;
4889 case SS_PLUS: fputs ("adds", file); return;
4890 case AND: fputs ("and", file); return;
4891 case IOR: fputs ("or", file); return;
4892 case XOR: fputs ("xor", file); return;
4893 case MINUS: fputs ("sub", file); return;
4894 case SS_MINUS: fputs ("subs", file); return;
4895 case ASHIFT: fputs ("asl", file); return;
4896 case ASHIFTRT: fputs ("asr", file); return;
4897 case LSHIFTRT: fputs ("lsr", file); return;
4898 case ROTATERT: fputs ("ror", file); return;
4899 case MULT: fputs ("mpy", file); return;
4900 case ABS: fputs ("abs", file); return; /* Unconditional. */
4901 case NEG: fputs ("neg", file); return;
4902 case SS_NEG: fputs ("negs", file); return;
4903 case NOT: fputs ("not", file); return; /* Unconditional. */
4904 case ZERO_EXTEND:
4905 fputs ("ext", file); /* bmsk allows predication. */
4906 goto size_suffix;
4907 case SIGN_EXTEND: /* Unconditional. */
4908 fputs ("sex", file);
4909 size_suffix:
4910 switch (GET_MODE (XEXP (x, 0)))
4911 {
4e10a5a7
RS
4912 case E_QImode: fputs ("b", file); return;
4913 case E_HImode: fputs ("w", file); return;
526b7aee
SV
4914 default: break;
4915 }
4916 break;
4917 case SS_TRUNCATE:
4918 if (GET_MODE (x) != HImode)
4919 break;
4920 fputs ("sat16", file);
4921 default: break;
4922 }
4923 output_operand_lossage ("invalid operand to %%O code"); return;
4924 case 'o':
4925 if (GET_CODE (x) == SYMBOL_REF)
4926 {
4927 assemble_name (file, XSTR (x, 0));
4928 return;
4929 }
4930 break;
4931 case '&':
16493b57 4932 if (TARGET_ANNOTATE_ALIGN)
526b7aee
SV
4933 fprintf (file, "; unalign: %d", cfun->machine->unalign);
4934 return;
f50bb868
CZ
4935 case '+':
4936 if (TARGET_V2)
4937 fputs ("m", file);
4938 else
4939 fputs ("h", file);
4940 return;
4941 case '_':
4942 if (TARGET_V2)
4943 fputs ("h", file);
4944 else
4945 fputs ("w", file);
4946 return;
526b7aee
SV
4947 default :
4948 /* Unknown flag. */
4949 output_operand_lossage ("invalid operand output code");
4950 }
4951
4952 switch (GET_CODE (x))
4953 {
4954 case REG :
4955 fputs (reg_names[REGNO (x)], file);
4956 break;
4957 case MEM :
4958 {
4959 rtx addr = XEXP (x, 0);
4960 int size = GET_MODE_SIZE (GET_MODE (x));
4961
02ae0e08 4962 if (legitimate_small_data_address_p (addr, GET_MODE (x)))
e0be3321
CZ
4963 output_sdata = 1;
4964
526b7aee
SV
4965 fputc ('[', file);
4966
4967 switch (GET_CODE (addr))
4968 {
4969 case PRE_INC: case POST_INC:
cc8ca59e
JB
4970 output_address (VOIDmode,
4971 plus_constant (Pmode, XEXP (addr, 0), size)); break;
526b7aee 4972 case PRE_DEC: case POST_DEC:
cc8ca59e
JB
4973 output_address (VOIDmode,
4974 plus_constant (Pmode, XEXP (addr, 0), -size));
526b7aee
SV
4975 break;
4976 case PRE_MODIFY: case POST_MODIFY:
cc8ca59e 4977 output_address (VOIDmode, XEXP (addr, 1)); break;
526b7aee
SV
4978 case PLUS:
4979 if (output_scaled)
4980 {
cc8ca59e
JB
4981 output_address (VOIDmode,
4982 plus_constant (Pmode, XEXP (addr, 0),
526b7aee
SV
4983 (INTVAL (XEXP (addr, 1))
4984 >> (size == 2 ? 1 : 2))));
4985 output_scaled = 0;
4986 }
4987 else
cc8ca59e 4988 output_address (VOIDmode, addr);
526b7aee
SV
4989 break;
4990 default:
4991 if (flag_pic && CONSTANT_ADDRESS_P (addr))
4992 arc_output_pic_addr_const (file, addr, code);
4993 else
cc8ca59e 4994 output_address (VOIDmode, addr);
526b7aee
SV
4995 break;
4996 }
4997 fputc (']', file);
4998 break;
4999 }
5000 case CONST_DOUBLE :
5001 /* We handle SFmode constants here as output_addr_const doesn't. */
5002 if (GET_MODE (x) == SFmode)
5003 {
526b7aee
SV
5004 long l;
5005
34a72c33 5006 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
526b7aee
SV
5007 fprintf (file, "0x%08lx", l);
5008 break;
5009 }
3bbe0b82
JL
5010 /* FALLTHRU */
5011 /* Let output_addr_const deal with it. */
526b7aee 5012 default :
28633bbd
CZ
5013 if (flag_pic
5014 || (GET_CODE (x) == CONST
5015 && GET_CODE (XEXP (x, 0)) == UNSPEC
5016 && (XINT (XEXP (x, 0), 1) == UNSPEC_TLS_OFF
5017 || XINT (XEXP (x, 0), 1) == UNSPEC_TLS_GD))
5018 || (GET_CODE (x) == CONST
5019 && GET_CODE (XEXP (x, 0)) == PLUS
5020 && GET_CODE (XEXP (XEXP (x, 0), 0)) == UNSPEC
5021 && (XINT (XEXP (XEXP (x, 0), 0), 1) == UNSPEC_TLS_OFF
5022 || XINT (XEXP (XEXP (x, 0), 0), 1) == UNSPEC_TLS_GD)))
526b7aee
SV
5023 arc_output_pic_addr_const (file, x, code);
5024 else
e0be3321 5025 output_addr_const (file, x);
526b7aee
SV
5026 break;
5027 }
5028}
5029
5030/* Print a memory address as an operand to reference that memory location. */
5031
5032void
5033arc_print_operand_address (FILE *file , rtx addr)
5034{
5035 register rtx base, index = 0;
5036
5037 switch (GET_CODE (addr))
5038 {
5039 case REG :
5040 fputs (reg_names[REGNO (addr)], file);
5041 break;
e0be3321
CZ
5042 case SYMBOL_REF:
5043 if (output_sdata)
5044 fputs ("gp,", file);
526b7aee 5045 output_addr_const (file, addr);
e0be3321
CZ
5046 if (output_sdata)
5047 fputs ("@sda", file);
5048 output_sdata = 0;
526b7aee
SV
5049 break;
5050 case PLUS :
5051 if (GET_CODE (XEXP (addr, 0)) == MULT)
5052 index = XEXP (XEXP (addr, 0), 0), base = XEXP (addr, 1);
5053 else if (CONST_INT_P (XEXP (addr, 0)))
5054 index = XEXP (addr, 0), base = XEXP (addr, 1);
5055 else
5056 base = XEXP (addr, 0), index = XEXP (addr, 1);
5057
5058 gcc_assert (OBJECT_P (base));
5059 arc_print_operand_address (file, base);
5060 if (CONSTANT_P (base) && CONST_INT_P (index))
5061 fputc ('+', file);
5062 else
5063 fputc (',', file);
5064 gcc_assert (OBJECT_P (index));
5065 arc_print_operand_address (file, index);
5066 break;
5067 case CONST:
5068 {
5069 rtx c = XEXP (addr, 0);
5070
28633bbd
CZ
5071 if ((GET_CODE (c) == UNSPEC
5072 && (XINT (c, 1) == UNSPEC_TLS_OFF
5073 || XINT (c, 1) == UNSPEC_TLS_IE))
5074 || (GET_CODE (c) == PLUS
5075 && GET_CODE (XEXP (c, 0)) == UNSPEC
f5e336b1
CZ
5076 && (XINT (XEXP (c, 0), 1) == UNSPEC_TLS_OFF
5077 || XINT (XEXP (c, 0), 1) == ARC_UNSPEC_GOTOFFPC)))
28633bbd
CZ
5078 {
5079 arc_output_pic_addr_const (file, c, 0);
5080 break;
5081 }
5082 gcc_assert (GET_CODE (c) == PLUS);
526b7aee
SV
5083 gcc_assert (GET_CODE (XEXP (c, 0)) == SYMBOL_REF);
5084 gcc_assert (GET_CODE (XEXP (c, 1)) == CONST_INT);
5085
cc8ca59e 5086 output_address (VOIDmode, XEXP (addr, 0));
526b7aee
SV
5087
5088 break;
5089 }
5090 case PRE_INC :
5091 case PRE_DEC :
5092 /* We shouldn't get here as we've lost the mode of the memory object
5093 (which says how much to inc/dec by. */
5094 gcc_unreachable ();
5095 break;
5096 default :
5097 if (flag_pic)
5098 arc_output_pic_addr_const (file, addr, 0);
5099 else
5100 output_addr_const (file, addr);
5101 break;
5102 }
5103}
5104
526b7aee
SV
5105/* Conditional execution support.
5106
5107 This is based on the ARM port but for now is much simpler.
5108
5109 A finite state machine takes care of noticing whether or not instructions
5110 can be conditionally executed, and thus decrease execution time and code
5111 size by deleting branch instructions. The fsm is controlled by
5112 arc_ccfsm_advance (called by arc_final_prescan_insn), and controls the
5113 actions of PRINT_OPERAND. The patterns in the .md file for the branch
5114 insns also have a hand in this. */
5115/* The way we leave dealing with non-anulled or annull-false delay slot
5116 insns to the consumer is awkward. */
5117
5118/* The state of the fsm controlling condition codes are:
5119 0: normal, do nothing special
5120 1: don't output this insn
5121 2: don't output this insn
5122 3: make insns conditional
5123 4: make insns conditional
5124 5: make insn conditional (only for outputting anulled delay slot insns)
5125
5126 special value for cfun->machine->uid_ccfsm_state:
5127 6: return with but one insn before it since function start / call
5128
5129 State transitions (state->state by whom, under what condition):
5130 0 -> 1 arc_ccfsm_advance, if insn is a conditional branch skipping over
5131 some instructions.
5132 0 -> 2 arc_ccfsm_advance, if insn is a conditional branch followed
5133 by zero or more non-jump insns and an unconditional branch with
5134 the same target label as the condbranch.
5135 1 -> 3 branch patterns, after having not output the conditional branch
5136 2 -> 4 branch patterns, after having not output the conditional branch
5137 0 -> 5 branch patterns, for anulled delay slot insn.
5138 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL, if the `target' label is reached
5139 (the target label has CODE_LABEL_NUMBER equal to
5140 arc_ccfsm_target_label).
5141 4 -> 0 arc_ccfsm_advance, if `target' unconditional branch is reached
5142 3 -> 1 arc_ccfsm_advance, finding an 'else' jump skipping over some insns.
5143 5 -> 0 when outputting the delay slot insn
5144
5145 If the jump clobbers the conditions then we use states 2 and 4.
5146
5147 A similar thing can be done with conditional return insns.
5148
5149 We also handle separating branches from sets of the condition code.
5150 This is done here because knowledge of the ccfsm state is required,
5151 we may not be outputting the branch. */
5152
5153/* arc_final_prescan_insn calls arc_ccfsm_advance to adjust arc_ccfsm_current,
5154 before letting final output INSN. */
5155
5156static void
b3458f61 5157arc_ccfsm_advance (rtx_insn *insn, struct arc_ccfsm *state)
526b7aee
SV
5158{
5159 /* BODY will hold the body of INSN. */
5160 register rtx body;
5161
5162 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
5163 an if/then/else), and things need to be reversed. */
5164 int reverse = 0;
5165
5166 /* If we start with a return insn, we only succeed if we find another one. */
5167 int seeking_return = 0;
5168
5169 /* START_INSN will hold the insn from where we start looking. This is the
5170 first insn after the following code_label if REVERSE is true. */
b3458f61 5171 rtx_insn *start_insn = insn;
526b7aee
SV
5172
5173 /* Type of the jump_insn. Brcc insns don't affect ccfsm changes,
5174 since they don't rely on a cmp preceding the. */
5175 enum attr_type jump_insn_type;
5176
5177 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
5178 We can't do this in macro FINAL_PRESCAN_INSN because its called from
5179 final_scan_insn which has `optimize' as a local. */
5180 if (optimize < 2 || TARGET_NO_COND_EXEC)
5181 return;
5182
5183 /* Ignore notes and labels. */
5184 if (!INSN_P (insn))
5185 return;
5186 body = PATTERN (insn);
5187 /* If in state 4, check if the target branch is reached, in order to
5188 change back to state 0. */
5189 if (state->state == 4)
5190 {
5191 if (insn == state->target_insn)
5192 {
5193 state->target_insn = NULL;
5194 state->state = 0;
5195 }
5196 return;
5197 }
5198
5199 /* If in state 3, it is possible to repeat the trick, if this insn is an
5200 unconditional branch to a label, and immediately following this branch
5201 is the previous target label which is only used once, and the label this
5202 branch jumps to is not too far off. Or in other words "we've done the
5203 `then' part, see if we can do the `else' part." */
5204 if (state->state == 3)
5205 {
5206 if (simplejump_p (insn))
5207 {
5208 start_insn = next_nonnote_insn (start_insn);
5209 if (GET_CODE (start_insn) == BARRIER)
5210 {
5211 /* ??? Isn't this always a barrier? */
5212 start_insn = next_nonnote_insn (start_insn);
5213 }
5214 if (GET_CODE (start_insn) == CODE_LABEL
5215 && CODE_LABEL_NUMBER (start_insn) == state->target_label
5216 && LABEL_NUSES (start_insn) == 1)
5217 reverse = TRUE;
5218 else
5219 return;
5220 }
5221 else if (GET_CODE (body) == SIMPLE_RETURN)
5222 {
5223 start_insn = next_nonnote_insn (start_insn);
5224 if (GET_CODE (start_insn) == BARRIER)
5225 start_insn = next_nonnote_insn (start_insn);
5226 if (GET_CODE (start_insn) == CODE_LABEL
5227 && CODE_LABEL_NUMBER (start_insn) == state->target_label
5228 && LABEL_NUSES (start_insn) == 1)
5229 {
5230 reverse = TRUE;
5231 seeking_return = 1;
5232 }
5233 else
5234 return;
5235 }
5236 else
5237 return;
5238 }
5239
5240 if (GET_CODE (insn) != JUMP_INSN
5241 || GET_CODE (PATTERN (insn)) == ADDR_VEC
5242 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5243 return;
5244
5245 /* We can't predicate BRCC or loop ends.
5246 Also, when generating PIC code, and considering a medium range call,
5247 we can't predicate the call. */
5248 jump_insn_type = get_attr_type (insn);
5249 if (jump_insn_type == TYPE_BRCC
5250 || jump_insn_type == TYPE_BRCC_NO_DELAY_SLOT
5251 || jump_insn_type == TYPE_LOOP_END
5252 || (jump_insn_type == TYPE_CALL && !get_attr_predicable (insn)))
5253 return;
5254
5255 /* This jump might be paralleled with a clobber of the condition codes,
5256 the jump should always come first. */
5257 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5258 body = XVECEXP (body, 0, 0);
5259
5260 if (reverse
5261 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5262 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5263 {
5264 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
5265 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5266 int then_not_else = TRUE;
5267 /* Nonzero if next insn must be the target label. */
5268 int next_must_be_target_label_p;
b3458f61
DM
5269 rtx_insn *this_insn = start_insn;
5270 rtx label = 0;
526b7aee
SV
5271
5272 /* Register the insn jumped to. */
5273 if (reverse)
5274 {
5275 if (!seeking_return)
5276 label = XEXP (SET_SRC (body), 0);
5277 }
5278 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5279 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5280 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5281 {
5282 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5283 then_not_else = FALSE;
5284 }
5285 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == SIMPLE_RETURN)
5286 seeking_return = 1;
5287 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == SIMPLE_RETURN)
5288 {
5289 seeking_return = 1;
5290 then_not_else = FALSE;
5291 }
5292 else
5293 gcc_unreachable ();
5294
5295 /* If this is a non-annulled branch with a delay slot, there is
5296 no need to conditionalize the delay slot. */
782bdf21 5297 if ((GET_CODE (PATTERN (NEXT_INSN (PREV_INSN (insn)))) == SEQUENCE)
526b7aee
SV
5298 && state->state == 0 && !INSN_ANNULLED_BRANCH_P (insn))
5299 {
5300 this_insn = NEXT_INSN (this_insn);
526b7aee
SV
5301 }
5302 /* See how many insns this branch skips, and what kind of insns. If all
5303 insns are okay, and the label or unconditional branch to the same
5304 label is not too far away, succeed. */
5305 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
5306 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
5307 insns_skipped++)
5308 {
5309 rtx scanbody;
5310
5311 this_insn = next_nonnote_insn (this_insn);
5312 if (!this_insn)
5313 break;
5314
5315 if (next_must_be_target_label_p)
5316 {
5317 if (GET_CODE (this_insn) == BARRIER)
5318 continue;
5319 if (GET_CODE (this_insn) == CODE_LABEL
5320 && this_insn == label)
5321 {
5322 state->state = 1;
5323 succeed = TRUE;
5324 }
5325 else
5326 fail = TRUE;
5327 break;
5328 }
5329
526b7aee
SV
5330 switch (GET_CODE (this_insn))
5331 {
5332 case CODE_LABEL:
5333 /* Succeed if it is the target label, otherwise fail since
5334 control falls in from somewhere else. */
5335 if (this_insn == label)
5336 {
5337 state->state = 1;
5338 succeed = TRUE;
5339 }
5340 else
5341 fail = TRUE;
5342 break;
5343
5344 case BARRIER:
5345 /* Succeed if the following insn is the target label.
5346 Otherwise fail.
5347 If return insns are used then the last insn in a function
5348 will be a barrier. */
5349 next_must_be_target_label_p = TRUE;
5350 break;
5351
5352 case CALL_INSN:
5353 /* Can handle a call insn if there are no insns after it.
5354 IE: The next "insn" is the target label. We don't have to
5355 worry about delay slots as such insns are SEQUENCE's inside
5356 INSN's. ??? It is possible to handle such insns though. */
5357 if (get_attr_cond (this_insn) == COND_CANUSE)
5358 next_must_be_target_label_p = TRUE;
5359 else
5360 fail = TRUE;
5361 break;
5362
5363 case JUMP_INSN:
4173ddaf
SB
5364 scanbody = PATTERN (this_insn);
5365
526b7aee
SV
5366 /* If this is an unconditional branch to the same label, succeed.
5367 If it is to another label, do nothing. If it is conditional,
5368 fail. */
5369 /* ??? Probably, the test for the SET and the PC are
5370 unnecessary. */
5371
5372 if (GET_CODE (scanbody) == SET
5373 && GET_CODE (SET_DEST (scanbody)) == PC)
5374 {
5375 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5376 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5377 {
5378 state->state = 2;
5379 succeed = TRUE;
5380 }
5381 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5382 fail = TRUE;
5383 else if (get_attr_cond (this_insn) != COND_CANUSE)
5384 fail = TRUE;
5385 }
5386 else if (GET_CODE (scanbody) == SIMPLE_RETURN
5387 && seeking_return)
5388 {
5389 state->state = 2;
5390 succeed = TRUE;
5391 }
5392 else if (GET_CODE (scanbody) == PARALLEL)
5393 {
5394 if (get_attr_cond (this_insn) != COND_CANUSE)
5395 fail = TRUE;
5396 }
5397 break;
5398
5399 case INSN:
4173ddaf
SB
5400 scanbody = PATTERN (this_insn);
5401
526b7aee
SV
5402 /* We can only do this with insns that can use the condition
5403 codes (and don't set them). */
5404 if (GET_CODE (scanbody) == SET
5405 || GET_CODE (scanbody) == PARALLEL)
5406 {
5407 if (get_attr_cond (this_insn) != COND_CANUSE)
5408 fail = TRUE;
5409 }
5410 /* We can't handle other insns like sequences. */
5411 else
5412 fail = TRUE;
5413 break;
5414
5415 default:
5416 break;
5417 }
5418 }
5419
5420 if (succeed)
5421 {
5422 if ((!seeking_return) && (state->state == 1 || reverse))
5423 state->target_label = CODE_LABEL_NUMBER (label);
5424 else if (seeking_return || state->state == 2)
5425 {
5426 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5427 {
5428 this_insn = next_nonnote_insn (this_insn);
5429
5430 gcc_assert (!this_insn ||
5431 (GET_CODE (this_insn) != BARRIER
5432 && GET_CODE (this_insn) != CODE_LABEL));
5433 }
5434 if (!this_insn)
5435 {
5436 /* Oh dear! we ran off the end, give up. */
5437 extract_insn_cached (insn);
5438 state->state = 0;
5439 state->target_insn = NULL;
5440 return;
5441 }
5442 state->target_insn = this_insn;
5443 }
5444 else
5445 gcc_unreachable ();
5446
5447 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5448 what it was. */
5449 if (!reverse)
5450 {
5451 state->cond = XEXP (SET_SRC (body), 0);
5452 state->cc = get_arc_condition_code (XEXP (SET_SRC (body), 0));
5453 }
5454
5455 if (reverse || then_not_else)
5456 state->cc = ARC_INVERSE_CONDITION_CODE (state->cc);
5457 }
5458
5459 /* Restore recog_operand. Getting the attributes of other insns can
5460 destroy this array, but final.c assumes that it remains intact
5461 across this call; since the insn has been recognized already we
5462 call insn_extract direct. */
5463 extract_insn_cached (insn);
5464 }
5465}
5466
5467/* Record that we are currently outputting label NUM with prefix PREFIX.
5468 It it's the label we're looking for, reset the ccfsm machinery.
5469
5470 Called from ASM_OUTPUT_INTERNAL_LABEL. */
5471
5472static void
5473arc_ccfsm_at_label (const char *prefix, int num, struct arc_ccfsm *state)
5474{
5475 if (state->state == 3 && state->target_label == num
5476 && !strcmp (prefix, "L"))
5477 {
5478 state->state = 0;
b3458f61 5479 state->target_insn = NULL;
526b7aee
SV
5480 }
5481}
5482
5483/* We are considering a conditional branch with the condition COND.
5484 Check if we want to conditionalize a delay slot insn, and if so modify
5485 the ccfsm state accordingly.
5486 REVERSE says branch will branch when the condition is false. */
5487void
b32d5189 5488arc_ccfsm_record_condition (rtx cond, bool reverse, rtx_insn *jump,
526b7aee
SV
5489 struct arc_ccfsm *state)
5490{
b3458f61 5491 rtx_insn *seq_insn = NEXT_INSN (PREV_INSN (jump));
526b7aee
SV
5492 if (!state)
5493 state = &arc_ccfsm_current;
5494
5495 gcc_assert (state->state == 0);
5496 if (seq_insn != jump)
5497 {
5498 rtx insn = XVECEXP (PATTERN (seq_insn), 0, 1);
5499
4654c0cf 5500 if (!as_a<rtx_insn *> (insn)->deleted ()
526b7aee
SV
5501 && INSN_ANNULLED_BRANCH_P (jump)
5502 && (TARGET_AT_DBR_CONDEXEC || INSN_FROM_TARGET_P (insn)))
5503 {
5504 state->cond = cond;
5505 state->cc = get_arc_condition_code (cond);
5506 if (!reverse)
5507 arc_ccfsm_current.cc
5508 = ARC_INVERSE_CONDITION_CODE (state->cc);
5509 rtx pat = PATTERN (insn);
5510 if (GET_CODE (pat) == COND_EXEC)
5511 gcc_assert ((INSN_FROM_TARGET_P (insn)
5512 ? ARC_INVERSE_CONDITION_CODE (state->cc) : state->cc)
5513 == get_arc_condition_code (XEXP (pat, 0)));
5514 else
5515 state->state = 5;
5516 }
5517 }
5518}
5519
5520/* Update *STATE as we would when we emit INSN. */
5521
5522static void
b3458f61 5523arc_ccfsm_post_advance (rtx_insn *insn, struct arc_ccfsm *state)
526b7aee 5524{
53ea364f
JR
5525 enum attr_type type;
5526
526b7aee
SV
5527 if (LABEL_P (insn))
5528 arc_ccfsm_at_label ("L", CODE_LABEL_NUMBER (insn), state);
5529 else if (JUMP_P (insn)
5530 && GET_CODE (PATTERN (insn)) != ADDR_VEC
5531 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
53ea364f 5532 && ((type = get_attr_type (insn)) == TYPE_BRANCH
6c28e6ae
CZ
5533 || ((type == TYPE_UNCOND_BRANCH
5534 || type == TYPE_RETURN)
53ea364f 5535 && ARC_CCFSM_BRANCH_DELETED_P (state))))
526b7aee
SV
5536 {
5537 if (ARC_CCFSM_BRANCH_DELETED_P (state))
5538 ARC_CCFSM_RECORD_BRANCH_DELETED (state);
5539 else
5540 {
5541 rtx src = SET_SRC (PATTERN (insn));
5542 arc_ccfsm_record_condition (XEXP (src, 0), XEXP (src, 1) == pc_rtx,
5543 insn, state);
5544 }
5545 }
5546 else if (arc_ccfsm_current.state == 5)
5547 arc_ccfsm_current.state = 0;
5548}
5549
5550/* Return true if the current insn, which is a conditional branch, is to be
5551 deleted. */
5552
5553bool
5554arc_ccfsm_branch_deleted_p (void)
5555{
5556 return ARC_CCFSM_BRANCH_DELETED_P (&arc_ccfsm_current);
5557}
5558
5559/* Record a branch isn't output because subsequent insns can be
5560 conditionalized. */
5561
5562void
5563arc_ccfsm_record_branch_deleted (void)
5564{
5565 ARC_CCFSM_RECORD_BRANCH_DELETED (&arc_ccfsm_current);
5566}
5567
5568/* During insn output, indicate if the current insn is predicated. */
5569
5570bool
5571arc_ccfsm_cond_exec_p (void)
5572{
5573 return (cfun->machine->prescan_initialized
5574 && ARC_CCFSM_COND_EXEC_P (&arc_ccfsm_current));
5575}
5576
526b7aee
SV
5577/* When deciding if an insn should be output short, we want to know something
5578 about the following insns:
5579 - if another insn follows which we know we can output as a short insn
5580 before an alignment-sensitive point, we can output this insn short:
5581 the decision about the eventual alignment can be postponed.
5582 - if a to-be-aligned label comes next, we should output this insn such
5583 as to get / preserve 4-byte alignment.
5584 - if a likely branch without delay slot insn, or a call with an immediately
5585 following short insn comes next, we should out output this insn such as to
5586 get / preserve 2 mod 4 unalignment.
5587 - do the same for a not completely unlikely branch with a short insn
5588 following before any other branch / label.
5589 - in order to decide if we are actually looking at a branch, we need to
5590 call arc_ccfsm_advance.
5591 - in order to decide if we are looking at a short insn, we should know
5592 if it is conditionalized. To a first order of approximation this is
5593 the case if the state from arc_ccfsm_advance from before this insn
5594 indicates the insn is conditionalized. However, a further refinement
5595 could be to not conditionalize an insn if the destination register(s)
5596 is/are dead in the non-executed case. */
5597/* Return non-zero if INSN should be output as a short insn. UNALIGN is
5598 zero if the current insn is aligned to a 4-byte-boundary, two otherwise.
5599 If CHECK_ATTR is greater than 0, check the iscompact attribute first. */
5600
b51addd6 5601static int
b3458f61 5602arc_verify_short (rtx_insn *insn, int, int check_attr)
526b7aee
SV
5603{
5604 enum attr_iscompact iscompact;
526b7aee
SV
5605
5606 if (check_attr > 0)
5607 {
5608 iscompact = get_attr_iscompact (insn);
5609 if (iscompact == ISCOMPACT_FALSE)
5610 return 0;
5611 }
526b7aee
SV
5612
5613 return (get_attr_length (insn) & 2) != 0;
5614}
5615
5616/* When outputting an instruction (alternative) that can potentially be short,
5617 output the short suffix if the insn is in fact short, and update
5618 cfun->machine->unalign accordingly. */
5619
5620static void
5621output_short_suffix (FILE *file)
5622{
b3458f61 5623 rtx_insn *insn = current_output_insn;
41fe06f8
AP
5624 if (!insn)
5625 return;
526b7aee
SV
5626
5627 if (arc_verify_short (insn, cfun->machine->unalign, 1))
5628 {
5629 fprintf (file, "_s");
5630 cfun->machine->unalign ^= 2;
5631 }
5632 /* Restore recog_operand. */
5633 extract_insn_cached (insn);
5634}
5635
5636/* Implement FINAL_PRESCAN_INSN. */
5637
5638void
b3458f61 5639arc_final_prescan_insn (rtx_insn *insn, rtx *opvec ATTRIBUTE_UNUSED,
526b7aee
SV
5640 int noperands ATTRIBUTE_UNUSED)
5641{
5642 if (TARGET_DUMPISIZE)
5643 fprintf (asm_out_file, "\n; at %04x\n", INSN_ADDRESSES (INSN_UID (insn)));
5644
526b7aee
SV
5645 if (!cfun->machine->prescan_initialized)
5646 {
5647 /* Clear lingering state from branch shortening. */
5648 memset (&arc_ccfsm_current, 0, sizeof arc_ccfsm_current);
5649 cfun->machine->prescan_initialized = 1;
5650 }
5651 arc_ccfsm_advance (insn, &arc_ccfsm_current);
526b7aee
SV
5652}
5653
5654/* Given FROM and TO register numbers, say whether this elimination is allowed.
5655 Frame pointer elimination is automatically handled.
5656
5657 All eliminations are permissible. If we need a frame
5658 pointer, we must eliminate ARG_POINTER_REGNUM into
5659 FRAME_POINTER_REGNUM and not into STACK_POINTER_REGNUM. */
5660
5661static bool
5662arc_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
5663{
47d8cb23 5664 return ((to == HARD_FRAME_POINTER_REGNUM) || (to == STACK_POINTER_REGNUM));
526b7aee
SV
5665}
5666
5667/* Define the offset between two registers, one to be eliminated, and
5668 the other its replacement, at the start of a routine. */
5669
5670int
5671arc_initial_elimination_offset (int from, int to)
5672{
6fe5e235
CZ
5673 if (!cfun->machine->frame_info.initialized)
5674 arc_compute_frame_size ();
526b7aee 5675
47d8cb23 5676 if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
526b7aee
SV
5677 {
5678 return (cfun->machine->frame_info.extra_size
5679 + cfun->machine->frame_info.reg_size);
5680 }
5681
5682 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
5683 {
5684 return (cfun->machine->frame_info.total_size
5685 - cfun->machine->frame_info.pretend_size);
5686 }
5687
5688 if ((from == FRAME_POINTER_REGNUM) && (to == STACK_POINTER_REGNUM))
5689 {
5690 return (cfun->machine->frame_info.total_size
5691 - (cfun->machine->frame_info.pretend_size
5692 + cfun->machine->frame_info.extra_size
5693 + cfun->machine->frame_info.reg_size));
5694 }
47d8cb23
CZ
5695 if ((from == FRAME_POINTER_REGNUM) && (to == HARD_FRAME_POINTER_REGNUM))
5696 return 0;
526b7aee
SV
5697
5698 gcc_unreachable ();
5699}
5700
5701static bool
5702arc_frame_pointer_required (void)
5703{
6fe5e235 5704 return cfun->calls_alloca || crtl->calls_eh_return;
526b7aee
SV
5705}
5706
5707
5708/* Return the destination address of a branch. */
5709
b51addd6 5710static int
526b7aee
SV
5711branch_dest (rtx branch)
5712{
5713 rtx pat = PATTERN (branch);
5714 rtx dest = (GET_CODE (pat) == PARALLEL
5715 ? SET_SRC (XVECEXP (pat, 0, 0)) : SET_SRC (pat));
5716 int dest_uid;
5717
5718 if (GET_CODE (dest) == IF_THEN_ELSE)
5719 dest = XEXP (dest, XEXP (dest, 1) == pc_rtx ? 2 : 1);
5720
5721 dest = XEXP (dest, 0);
5722 dest_uid = INSN_UID (dest);
5723
5724 return INSN_ADDRESSES (dest_uid);
5725}
5726
5727
5719867d 5728/* Implement TARGET_ENCODE_SECTION_INFO hook. */
526b7aee
SV
5729
5730static void
5731arc_encode_section_info (tree decl, rtx rtl, int first)
5732{
5733 /* For sdata, SYMBOL_FLAG_LOCAL and SYMBOL_FLAG_FUNCTION.
5734 This clears machine specific flags, so has to come first. */
5735 default_encode_section_info (decl, rtl, first);
5736
5737 /* Check if it is a function, and whether it has the
5738 [long/medium/short]_call attribute specified. */
5739 if (TREE_CODE (decl) == FUNCTION_DECL)
5740 {
5741 rtx symbol = XEXP (rtl, 0);
5742 int flags = SYMBOL_REF_FLAGS (symbol);
5743
5744 tree attr = (TREE_TYPE (decl) != error_mark_node
5745 ? TYPE_ATTRIBUTES (TREE_TYPE (decl)) : NULL_TREE);
5746 tree long_call_attr = lookup_attribute ("long_call", attr);
5747 tree medium_call_attr = lookup_attribute ("medium_call", attr);
5748 tree short_call_attr = lookup_attribute ("short_call", attr);
5749
5750 if (long_call_attr != NULL_TREE)
5751 flags |= SYMBOL_FLAG_LONG_CALL;
5752 else if (medium_call_attr != NULL_TREE)
5753 flags |= SYMBOL_FLAG_MEDIUM_CALL;
5754 else if (short_call_attr != NULL_TREE)
5755 flags |= SYMBOL_FLAG_SHORT_CALL;
5756
5757 SYMBOL_REF_FLAGS (symbol) = flags;
5758 }
4d03dc2f
JR
5759 else if (TREE_CODE (decl) == VAR_DECL)
5760 {
5761 rtx symbol = XEXP (rtl, 0);
5762
5763 tree attr = (TREE_TYPE (decl) != error_mark_node
5764 ? DECL_ATTRIBUTES (decl) : NULL_TREE);
5765
5766 tree sec_attr = lookup_attribute ("section", attr);
5767 if (sec_attr)
5768 {
5769 const char *sec_name
5770 = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (sec_attr)));
5771 if (strcmp (sec_name, ".cmem") == 0
5772 || strcmp (sec_name, ".cmem_shared") == 0
5773 || strcmp (sec_name, ".cmem_private") == 0)
5774 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_CMEM;
5775 }
5776 }
526b7aee
SV
5777}
5778
5779/* This is how to output a definition of an internal numbered label where
5780 PREFIX is the class of label and NUM is the number within the class. */
5781
5782static void arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
5783{
5784 if (cfun)
5785 arc_ccfsm_at_label (prefix, labelno, &arc_ccfsm_current);
5786 default_internal_label (stream, prefix, labelno);
5787}
5788
5789/* Set the cpu type and print out other fancy things,
5790 at the top of the file. */
5791
5792static void arc_file_start (void)
5793{
5794 default_file_start ();
5795 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
048c6a9a
CZ
5796
5797 /* Set some want to have build attributes. */
5798 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_PCS_config, %d\n",
5799 ATTRIBUTE_PCS);
5800 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_ABI_rf16, %d\n",
5801 TARGET_RF16 ? 1 : 0);
5802 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_ABI_pic, %d\n",
5803 flag_pic ? 2 : 0);
5804 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_ABI_tls, %d\n",
5805 (arc_tp_regno != -1) ? 1 : 0);
5806 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_ABI_sda, %d\n",
5807 TARGET_NO_SDATA_SET ? 0 : 2);
5808 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_ABI_exceptions, %d\n",
5809 TARGET_OPTFPE ? 1 : 0);
62f26645
CZ
5810 if (TARGET_V2)
5811 asm_fprintf (asm_out_file, "\t.arc_attribute Tag_ARC_CPU_variation, %d\n",
dd1fd744
VG
5812 (arc_tune < ARC_TUNE_CORE_3) ? 2 :
5813 (arc_tune == ARC_TUNE_CORE_3 ? 3 : 4));
526b7aee
SV
5814}
5815
6b55f8c9
CZ
5816/* Implement `TARGET_ASM_FILE_END'. */
5817/* Outputs to the stdio stream FILE jli related text. */
5818
5819void arc_file_end (void)
5820{
5821 arc_jli_section *sec = arc_jli_sections;
5822
5823 while (sec != NULL)
5824 {
5825 fprintf (asm_out_file, "\n");
5826 fprintf (asm_out_file, "# JLI entry for function ");
5827 assemble_name (asm_out_file, sec->name);
5828 fprintf (asm_out_file, "\n\t.section .jlitab, \"axG\", @progbits, "
5829 ".jlitab.");
5830 assemble_name (asm_out_file, sec->name);
5831 fprintf (asm_out_file,", comdat\n");
5832
5833 fprintf (asm_out_file, "\t.align\t4\n");
5834 fprintf (asm_out_file, "__jli.");
5835 assemble_name (asm_out_file, sec->name);
5836 fprintf (asm_out_file, ":\n\t.weak __jli.");
5837 assemble_name (asm_out_file, sec->name);
5838 fprintf (asm_out_file, "\n\tb\t@");
5839 assemble_name (asm_out_file, sec->name);
5840 fprintf (asm_out_file, "\n");
5841 sec = sec->next;
5842 }
5843 file_end_indicate_exec_stack ();
5844}
5845
526b7aee
SV
5846/* Cost functions. */
5847
5848/* Compute a (partial) cost for rtx X. Return true if the complete
5849 cost has been computed, and false if subexpressions should be
5850 scanned. In either case, *TOTAL contains the cost result. */
5851
5852static bool
e548c9df
AM
5853arc_rtx_costs (rtx x, machine_mode mode, int outer_code,
5854 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
526b7aee 5855{
e548c9df
AM
5856 int code = GET_CODE (x);
5857
526b7aee
SV
5858 switch (code)
5859 {
5860 /* Small integers are as cheap as registers. */
5861 case CONST_INT:
5862 {
5863 bool nolimm = false; /* Can we do without long immediate? */
526b7aee 5864
d797b115 5865 nolimm = false;
526b7aee 5866 if (UNSIGNED_INT6 (INTVAL (x)))
d797b115 5867 nolimm = true;
526b7aee
SV
5868 else
5869 {
526b7aee
SV
5870 switch (outer_code)
5871 {
5872 case AND: /* bclr, bmsk, ext[bw] */
5873 if (satisfies_constraint_Ccp (x) /* bclr */
5874 || satisfies_constraint_C1p (x) /* bmsk */)
d797b115 5875 nolimm = true;
526b7aee
SV
5876 break;
5877 case IOR: /* bset */
5878 if (satisfies_constraint_C0p (x)) /* bset */
d797b115 5879 nolimm = true;
526b7aee
SV
5880 break;
5881 case XOR:
5882 if (satisfies_constraint_C0p (x)) /* bxor */
d797b115 5883 nolimm = true;
526b7aee 5884 break;
d797b115
CZ
5885 case SET:
5886 if (UNSIGNED_INT8 (INTVAL (x)))
5887 nolimm = true;
5888 if (satisfies_constraint_Chi (x))
5889 nolimm = true;
5890 if (satisfies_constraint_Clo (x))
5891 nolimm = true;
f261388f
CZ
5892 break;
5893 case MULT:
5894 if (TARGET_MUL64_SET)
5895 if (SIGNED_INT12 (INTVAL (x)))
5896 nolimm = true;
5897 break;
526b7aee
SV
5898 default:
5899 break;
5900 }
5901 }
807c3ab5 5902 if (nolimm)
526b7aee
SV
5903 {
5904 *total = 0;
5905 return true;
5906 }
5907 }
5908 /* FALLTHRU */
5909
5910 /* 4 byte values can be fetched as immediate constants -
5911 let's give that the cost of an extra insn. */
5912 case CONST:
5913 case LABEL_REF:
5914 case SYMBOL_REF:
d797b115 5915 *total = speed ? COSTS_N_INSNS (1) : COSTS_N_INSNS (4);
526b7aee
SV
5916 return true;
5917
5918 case CONST_DOUBLE:
5919 {
7d81a567 5920 rtx first, second;
526b7aee
SV
5921
5922 if (TARGET_DPFP)
5923 {
5924 *total = COSTS_N_INSNS (1);
5925 return true;
5926 }
7d81a567
CZ
5927 split_double (x, &first, &second);
5928 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (first))
5929 + !SMALL_INT (INTVAL (second)));
526b7aee
SV
5930 return true;
5931 }
5932
5933 /* Encourage synth_mult to find a synthetic multiply when reasonable.
5934 If we need more than 12 insns to do a multiply, then go out-of-line,
5935 since the call overhead will be < 10% of the cost of the multiply. */
5936 case ASHIFT:
5937 case ASHIFTRT:
5938 case LSHIFTRT:
5939 if (TARGET_BARREL_SHIFTER)
5940 {
526b7aee
SV
5941 if (CONSTANT_P (XEXP (x, 0)))
5942 {
d797b115
CZ
5943 *total += rtx_cost (XEXP (x, 1), mode, (enum rtx_code) code,
5944 0, speed);
526b7aee
SV
5945 return true;
5946 }
5947 *total = COSTS_N_INSNS (1);
5948 }
5949 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5950 *total = COSTS_N_INSNS (16);
5951 else
5952 {
5953 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
5954 /* ??? want_to_gcse_p can throw negative shift counts at us,
5955 and then panics when it gets a negative cost as result.
5956 Seen for gcc.c-torture/compile/20020710-1.c -Os . */
5957 if (*total < 0)
5958 *total = 0;
5959 }
5960 return false;
5961
5962 case DIV:
5963 case UDIV:
d797b115
CZ
5964 if (GET_MODE_CLASS (mode) == MODE_FLOAT
5965 && (TARGET_FP_SP_SQRT || TARGET_FP_DP_SQRT))
5966 *total = COSTS_N_INSNS(1);
5967 else if (GET_MODE_CLASS (mode) == MODE_INT
5968 && TARGET_DIVREM)
5969 *total = COSTS_N_INSNS(1);
5970 else if (speed)
526b7aee
SV
5971 *total = COSTS_N_INSNS(30);
5972 else
5973 *total = COSTS_N_INSNS(1);
5974 return false;
5975
5976 case MULT:
5977 if ((TARGET_DPFP && GET_MODE (x) == DFmode))
5978 *total = COSTS_N_INSNS (1);
5979 else if (speed)
5980 *total= arc_multcost;
5981 /* We do not want synth_mult sequences when optimizing
5982 for size. */
d797b115 5983 else if (TARGET_ANY_MPY)
526b7aee
SV
5984 *total = COSTS_N_INSNS (1);
5985 else
5986 *total = COSTS_N_INSNS (2);
5987 return false;
d797b115 5988
526b7aee 5989 case PLUS:
d797b115
CZ
5990 if (outer_code == MEM && CONST_INT_P (XEXP (x, 1))
5991 && RTX_OK_FOR_OFFSET_P (mode, XEXP (x, 1)))
5992 {
5993 *total = 0;
5994 return true;
5995 }
5996
1e466f04
GM
5997 if ((GET_CODE (XEXP (x, 0)) == ASHIFT
5998 && _1_2_3_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
5999 || (GET_CODE (XEXP (x, 0)) == MULT
6000 && _2_4_8_operand (XEXP (XEXP (x, 0), 1), VOIDmode)))
526b7aee 6001 {
d797b115
CZ
6002 if (CONSTANT_P (XEXP (x, 1)) && !speed)
6003 *total += COSTS_N_INSNS (4);
6004 *total += rtx_cost (XEXP (XEXP (x, 0), 0), mode, PLUS, 1, speed);
526b7aee
SV
6005 return true;
6006 }
6007 return false;
6008 case MINUS:
1e466f04
GM
6009 if ((GET_CODE (XEXP (x, 1)) == ASHIFT
6010 && _1_2_3_operand (XEXP (XEXP (x, 1), 1), VOIDmode))
6011 || (GET_CODE (XEXP (x, 1)) == MULT
6012 && _2_4_8_operand (XEXP (XEXP (x, 1), 1), VOIDmode)))
526b7aee 6013 {
d797b115
CZ
6014 if (CONSTANT_P (XEXP (x, 0)) && !speed)
6015 *total += COSTS_N_INSNS (4);
6016 *total += rtx_cost (XEXP (XEXP (x, 1), 0), mode, PLUS, 1, speed);
526b7aee
SV
6017 return true;
6018 }
6019 return false;
d797b115 6020
526b7aee
SV
6021 case COMPARE:
6022 {
6023 rtx op0 = XEXP (x, 0);
6024 rtx op1 = XEXP (x, 1);
6025
6026 if (GET_CODE (op0) == ZERO_EXTRACT && op1 == const0_rtx
6027 && XEXP (op0, 1) == const1_rtx)
6028 {
6029 /* btst / bbit0 / bbit1:
6030 Small integers and registers are free; everything else can
6031 be put in a register. */
e548c9df
AM
6032 mode = GET_MODE (XEXP (op0, 0));
6033 *total = (rtx_cost (XEXP (op0, 0), mode, SET, 1, speed)
6034 + rtx_cost (XEXP (op0, 2), mode, SET, 1, speed));
526b7aee
SV
6035 return true;
6036 }
6037 if (GET_CODE (op0) == AND && op1 == const0_rtx
6038 && satisfies_constraint_C1p (XEXP (op0, 1)))
6039 {
6040 /* bmsk.f */
e548c9df 6041 *total = rtx_cost (XEXP (op0, 0), VOIDmode, SET, 1, speed);
526b7aee
SV
6042 return true;
6043 }
6044 /* add.f */
6045 if (GET_CODE (op1) == NEG)
6046 {
6047 /* op0 might be constant, the inside of op1 is rather
6048 unlikely to be so. So swapping the operands might lower
6049 the cost. */
e548c9df
AM
6050 mode = GET_MODE (op0);
6051 *total = (rtx_cost (op0, mode, PLUS, 1, speed)
6052 + rtx_cost (XEXP (op1, 0), mode, PLUS, 0, speed));
526b7aee
SV
6053 }
6054 return false;
6055 }
6056 case EQ: case NE:
6057 if (outer_code == IF_THEN_ELSE
6058 && GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
6059 && XEXP (x, 1) == const0_rtx
6060 && XEXP (XEXP (x, 0), 1) == const1_rtx)
6061 {
6062 /* btst / bbit0 / bbit1:
6063 Small integers and registers are free; everything else can
6064 be put in a register. */
6065 rtx op0 = XEXP (x, 0);
6066
e548c9df
AM
6067 mode = GET_MODE (XEXP (op0, 0));
6068 *total = (rtx_cost (XEXP (op0, 0), mode, SET, 1, speed)
6069 + rtx_cost (XEXP (op0, 2), mode, SET, 1, speed));
526b7aee
SV
6070 return true;
6071 }
6072 /* Fall through. */
6073 /* scc_insn expands into two insns. */
6074 case GTU: case GEU: case LEU:
e548c9df 6075 if (mode == SImode)
526b7aee
SV
6076 *total += COSTS_N_INSNS (1);
6077 return false;
6078 case LTU: /* might use adc. */
e548c9df 6079 if (mode == SImode)
526b7aee
SV
6080 *total += COSTS_N_INSNS (1) - 1;
6081 return false;
6082 default:
6083 return false;
6084 }
6085}
6086
526b7aee
SV
6087/* Return true if ADDR is a valid pic address.
6088 A valid pic address on arc should look like
6089 const (unspec (SYMBOL_REF/LABEL) (ARC_UNSPEC_GOTOFF/ARC_UNSPEC_GOT)) */
6090
6091bool
6092arc_legitimate_pic_addr_p (rtx addr)
6093{
526b7aee
SV
6094 if (GET_CODE (addr) != CONST)
6095 return false;
6096
6097 addr = XEXP (addr, 0);
6098
6099
6100 if (GET_CODE (addr) == PLUS)
6101 {
6102 if (GET_CODE (XEXP (addr, 1)) != CONST_INT)
6103 return false;
6104 addr = XEXP (addr, 0);
6105 }
6106
6107 if (GET_CODE (addr) != UNSPEC
6108 || XVECLEN (addr, 0) != 1)
6109 return false;
6110
f5e336b1 6111 /* Must be one of @GOT, @GOTOFF, @GOTOFFPC, @tlsgd, tlsie. */
526b7aee 6112 if (XINT (addr, 1) != ARC_UNSPEC_GOT
28633bbd 6113 && XINT (addr, 1) != ARC_UNSPEC_GOTOFF
f5e336b1 6114 && XINT (addr, 1) != ARC_UNSPEC_GOTOFFPC
28633bbd
CZ
6115 && XINT (addr, 1) != UNSPEC_TLS_GD
6116 && XINT (addr, 1) != UNSPEC_TLS_IE)
526b7aee
SV
6117 return false;
6118
6119 if (GET_CODE (XVECEXP (addr, 0, 0)) != SYMBOL_REF
6120 && GET_CODE (XVECEXP (addr, 0, 0)) != LABEL_REF)
6121 return false;
6122
6123 return true;
6124}
6125
6126
6127
6128/* Return true if OP contains a symbol reference. */
6129
6130static bool
6131symbolic_reference_mentioned_p (rtx op)
6132{
6133 register const char *fmt;
6134 register int i;
6135
6136 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
6137 return true;
6138
6139 fmt = GET_RTX_FORMAT (GET_CODE (op));
6140 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
6141 {
6142 if (fmt[i] == 'E')
6143 {
6144 register int j;
6145
6146 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
6147 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
6148 return true;
6149 }
6150
6151 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
6152 return true;
6153 }
6154
6155 return false;
6156}
6157
6158/* Return true if OP contains a SYMBOL_REF that is not wrapped in an unspec.
6159 If SKIP_LOCAL is true, skip symbols that bind locally.
6160 This is used further down in this file, and, without SKIP_LOCAL,
6161 in the addsi3 / subsi3 expanders when generating PIC code. */
6162
6163bool
6164arc_raw_symbolic_reference_mentioned_p (rtx op, bool skip_local)
6165{
6166 register const char *fmt;
6167 register int i;
6168
6169 if (GET_CODE(op) == UNSPEC)
6170 return false;
6171
6172 if (GET_CODE (op) == SYMBOL_REF)
6173 {
28633bbd
CZ
6174 if (SYMBOL_REF_TLS_MODEL (op))
6175 return true;
6176 if (!flag_pic)
6177 return false;
526b7aee
SV
6178 tree decl = SYMBOL_REF_DECL (op);
6179 return !skip_local || !decl || !default_binds_local_p (decl);
6180 }
6181
6182 fmt = GET_RTX_FORMAT (GET_CODE (op));
6183 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
6184 {
6185 if (fmt[i] == 'E')
6186 {
6187 register int j;
6188
6189 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
6190 if (arc_raw_symbolic_reference_mentioned_p (XVECEXP (op, i, j),
6191 skip_local))
6192 return true;
6193 }
6194
6195 else if (fmt[i] == 'e'
6196 && arc_raw_symbolic_reference_mentioned_p (XEXP (op, i),
6197 skip_local))
6198 return true;
6199 }
6200
6201 return false;
6202}
6203
8efa18d6
CZ
6204/* The __tls_get_attr symbol. */
6205static GTY(()) rtx arc_tls_symbol;
28633bbd 6206
8efa18d6
CZ
6207/* Emit a call to __tls_get_addr. TI is the argument to this function.
6208 RET is an RTX for the return value location. The entire insn sequence
6209 is returned. */
28633bbd
CZ
6210
6211static rtx
8efa18d6 6212arc_call_tls_get_addr (rtx ti)
28633bbd 6213{
8efa18d6
CZ
6214 rtx arg = gen_rtx_REG (Pmode, R0_REG);
6215 rtx ret = gen_rtx_REG (Pmode, R0_REG);
6216 rtx fn;
6217 rtx_insn *insn;
6218
6219 if (!arc_tls_symbol)
6220 arc_tls_symbol = init_one_libfunc ("__tls_get_addr");
6221
6222 emit_move_insn (arg, ti);
6223 fn = gen_rtx_MEM (SImode, arc_tls_symbol);
6224 insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
6225 RTL_CONST_CALL_P (insn) = 1;
6226 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
6227 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
6228
6229 return ret;
28633bbd
CZ
6230}
6231
6232#define DTPOFF_ZERO_SYM ".tdata"
6233
6234/* Return a legitimized address for ADDR,
6235 which is a SYMBOL_REF with tls_model MODEL. */
6236
6237static rtx
6238arc_legitimize_tls_address (rtx addr, enum tls_model model)
6239{
8efa18d6
CZ
6240 rtx tmp;
6241
28633bbd
CZ
6242 if (!flag_pic && model == TLS_MODEL_LOCAL_DYNAMIC)
6243 model = TLS_MODEL_LOCAL_EXEC;
6244
8efa18d6
CZ
6245
6246 /* The TP pointer needs to be set. */
6247 gcc_assert (arc_tp_regno != -1);
6248
28633bbd
CZ
6249 switch (model)
6250 {
8efa18d6
CZ
6251 case TLS_MODEL_GLOBAL_DYNAMIC:
6252 tmp = gen_reg_rtx (Pmode);
6253 emit_move_insn (tmp, arc_unspec_offset (addr, UNSPEC_TLS_GD));
6254 return arc_call_tls_get_addr (tmp);
6255
28633bbd
CZ
6256 case TLS_MODEL_LOCAL_DYNAMIC:
6257 rtx base;
6258 tree decl;
6259 const char *base_name;
28633bbd
CZ
6260
6261 decl = SYMBOL_REF_DECL (addr);
6262 base_name = DTPOFF_ZERO_SYM;
6263 if (decl && bss_initializer_p (decl))
6264 base_name = ".tbss";
6265
6266 base = gen_rtx_SYMBOL_REF (Pmode, base_name);
8efa18d6
CZ
6267 tmp = gen_reg_rtx (Pmode);
6268 emit_move_insn (tmp, arc_unspec_offset (base, UNSPEC_TLS_GD));
6269 base = arc_call_tls_get_addr (tmp);
6270 return gen_rtx_PLUS (Pmode, force_reg (Pmode, base),
6271 arc_unspec_offset (addr, UNSPEC_TLS_OFF));
5a5c5784 6272
28633bbd 6273 case TLS_MODEL_INITIAL_EXEC:
5a5c5784 6274 addr = arc_unspec_offset (addr, UNSPEC_TLS_IE);
28633bbd 6275 addr = copy_to_mode_reg (Pmode, gen_const_mem (Pmode, addr));
8efa18d6 6276 return gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode, arc_tp_regno), addr);
5a5c5784 6277
28633bbd 6278 case TLS_MODEL_LOCAL_EXEC:
5a5c5784 6279 addr = arc_unspec_offset (addr, UNSPEC_TLS_OFF);
8efa18d6
CZ
6280 return gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode, arc_tp_regno), addr);
6281
28633bbd
CZ
6282 default:
6283 gcc_unreachable ();
6284 }
6285}
6286
673f01b8 6287/* Return true if SYMBOL_REF X binds locally. */
526b7aee 6288
673f01b8
CZ
6289static bool
6290arc_symbol_binds_local_p (const_rtx x)
526b7aee 6291{
673f01b8
CZ
6292 return (SYMBOL_REF_DECL (x)
6293 ? targetm.binds_local_p (SYMBOL_REF_DECL (x))
6294 : SYMBOL_REF_LOCAL_P (x));
6295}
6296
6297/* Legitimize a pic address reference in ADDR. The return value is
6298 the legitimated address. */
526b7aee 6299
673f01b8
CZ
6300static rtx
6301arc_legitimize_pic_address (rtx addr)
6302{
6303 if (!flag_pic)
6304 return addr;
526b7aee 6305
673f01b8 6306 switch (GET_CODE (addr))
526b7aee 6307 {
3a6dd06b
CZ
6308 case UNSPEC:
6309 /* Can be one or our GOT or GOTOFFPC unspecs. This situation
6310 happens when an address is not a legitimate constant and we
6311 need the resolve it via force_reg in
6312 prepare_move_operands. */
6313 switch (XINT (addr, 1))
6314 {
6315 case ARC_UNSPEC_GOT:
6316 case ARC_UNSPEC_GOTOFFPC:
6317 /* Recover the symbol ref. */
6318 addr = XVECEXP (addr, 0, 0);
6319 break;
6320 default:
6321 return addr;
6322 }
6323 /* Fall through. */
673f01b8
CZ
6324 case SYMBOL_REF:
6325 /* TLS symbols are handled in different place. */
6326 if (SYMBOL_REF_TLS_MODEL (addr))
6327 return addr;
f5e336b1
CZ
6328
6329 /* This symbol must be referenced via a load from the Global
6330 Offset Table (@GOTPC). */
673f01b8
CZ
6331 if (!arc_symbol_binds_local_p (addr))
6332 return gen_const_mem (Pmode, arc_unspec_offset (addr, ARC_UNSPEC_GOT));
526b7aee 6333
673f01b8
CZ
6334 /* Local symb: use @pcl to access it. */
6335 /* Fall through. */
6336 case LABEL_REF:
6337 return arc_unspec_offset (addr, ARC_UNSPEC_GOTOFFPC);
28633bbd 6338
673f01b8
CZ
6339 default:
6340 break;
526b7aee
SV
6341 }
6342
673f01b8 6343 return addr;
526b7aee
SV
6344}
6345
6346/* Output address constant X to FILE, taking PIC into account. */
6347
9f532472 6348static void
526b7aee
SV
6349arc_output_pic_addr_const (FILE * file, rtx x, int code)
6350{
6351 char buf[256];
6352
6353 restart:
6354 switch (GET_CODE (x))
6355 {
6356 case PC:
6357 if (flag_pic)
6358 putc ('.', file);
6359 else
6360 gcc_unreachable ();
6361 break;
6362
6363 case SYMBOL_REF:
6364 output_addr_const (file, x);
6365
6366 /* Local functions do not get references through the PLT. */
6367 if (code == 'P' && ! SYMBOL_REF_LOCAL_P (x))
6368 fputs ("@plt", file);
6369 break;
6370
6371 case LABEL_REF:
6372 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (x, 0)));
6373 assemble_name (file, buf);
6374 break;
6375
6376 case CODE_LABEL:
6377 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
6378 assemble_name (file, buf);
6379 break;
6380
6381 case CONST_INT:
6382 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
6383 break;
6384
6385 case CONST:
6386 arc_output_pic_addr_const (file, XEXP (x, 0), code);
6387 break;
6388
6389 case CONST_DOUBLE:
6390 if (GET_MODE (x) == VOIDmode)
6391 {
6392 /* We can use %d if the number is one word and positive. */
6393 if (CONST_DOUBLE_HIGH (x))
6394 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
6395 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
6396 else if (CONST_DOUBLE_LOW (x) < 0)
6397 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
6398 else
6399 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
6400 }
6401 else
6402 /* We can't handle floating point constants;
6403 PRINT_OPERAND must handle them. */
6404 output_operand_lossage ("floating constant misused");
6405 break;
6406
6407 case PLUS:
6408 /* FIXME: Not needed here. */
6409 /* Some assemblers need integer constants to appear last (eg masm). */
6410 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
6411 {
6412 arc_output_pic_addr_const (file, XEXP (x, 1), code);
6413 fprintf (file, "+");
6414 arc_output_pic_addr_const (file, XEXP (x, 0), code);
6415 }
6416 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6417 {
6418 arc_output_pic_addr_const (file, XEXP (x, 0), code);
6419 if (INTVAL (XEXP (x, 1)) >= 0)
6420 fprintf (file, "+");
6421 arc_output_pic_addr_const (file, XEXP (x, 1), code);
6422 }
6423 else
6424 gcc_unreachable();
6425 break;
6426
6427 case MINUS:
6428 /* Avoid outputting things like x-x or x+5-x,
6429 since some assemblers can't handle that. */
6430 x = simplify_subtraction (x);
6431 if (GET_CODE (x) != MINUS)
6432 goto restart;
6433
6434 arc_output_pic_addr_const (file, XEXP (x, 0), code);
6435 fprintf (file, "-");
6436 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6437 && INTVAL (XEXP (x, 1)) < 0)
6438 {
6439 fprintf (file, "(");
6440 arc_output_pic_addr_const (file, XEXP (x, 1), code);
6441 fprintf (file, ")");
6442 }
6443 else
6444 arc_output_pic_addr_const (file, XEXP (x, 1), code);
6445 break;
6446
6447 case ZERO_EXTEND:
6448 case SIGN_EXTEND:
6449 arc_output_pic_addr_const (file, XEXP (x, 0), code);
6450 break;
6451
6452
6453 case UNSPEC:
28633bbd
CZ
6454 const char *suffix;
6455 bool pcrel; pcrel = false;
6456 rtx base; base = NULL;
6457 gcc_assert (XVECLEN (x, 0) >= 1);
526b7aee
SV
6458 switch (XINT (x, 1))
6459 {
6460 case ARC_UNSPEC_GOT:
28633bbd 6461 suffix = "@gotpc", pcrel = true;
526b7aee
SV
6462 break;
6463 case ARC_UNSPEC_GOTOFF:
28633bbd 6464 suffix = "@gotoff";
526b7aee 6465 break;
f5e336b1
CZ
6466 case ARC_UNSPEC_GOTOFFPC:
6467 suffix = "@pcl", pcrel = true;
6468 break;
526b7aee 6469 case ARC_UNSPEC_PLT:
28633bbd
CZ
6470 suffix = "@plt";
6471 break;
6472 case UNSPEC_TLS_GD:
6473 suffix = "@tlsgd", pcrel = true;
6474 break;
6475 case UNSPEC_TLS_IE:
6476 suffix = "@tlsie", pcrel = true;
6477 break;
6478 case UNSPEC_TLS_OFF:
6479 if (XVECLEN (x, 0) == 2)
6480 base = XVECEXP (x, 0, 1);
6481 if (SYMBOL_REF_TLS_MODEL (XVECEXP (x, 0, 0)) == TLS_MODEL_LOCAL_EXEC
6482 || (!flag_pic && !base))
6483 suffix = "@tpoff";
6484 else
6485 suffix = "@dtpoff";
526b7aee
SV
6486 break;
6487 default:
cd1e4d41 6488 suffix = "@invalid";
526b7aee
SV
6489 output_operand_lossage ("invalid UNSPEC as operand: %d", XINT (x,1));
6490 break;
6491 }
28633bbd
CZ
6492 if (pcrel)
6493 fputs ("pcl,", file);
6494 arc_output_pic_addr_const (file, XVECEXP (x, 0, 0), code);
6495 fputs (suffix, file);
6496 if (base)
6497 arc_output_pic_addr_const (file, base, code);
6498 break;
526b7aee
SV
6499
6500 default:
6501 output_operand_lossage ("invalid expression as operand");
6502 }
6503}
6504
526b7aee
SV
6505/* The function returning the number of words, at the beginning of an
6506 argument, must be put in registers. The returned value must be
6507 zero for arguments that are passed entirely in registers or that
6508 are entirely pushed on the stack.
6509
6510 On some machines, certain arguments must be passed partially in
6511 registers and partially in memory. On these machines, typically
6512 the first N words of arguments are passed in registers, and the
6513 rest on the stack. If a multi-word argument (a `double' or a
6514 structure) crosses that boundary, its first few words must be
6515 passed in registers and the rest must be pushed. This function
6516 tells the compiler when this occurs, and how many of the words
6517 should go in registers.
6518
6519 `FUNCTION_ARG' for these arguments should return the first register
6520 to be used by the caller for this argument; likewise
6521 `FUNCTION_INCOMING_ARG', for the called function.
6522
6523 The function is used to implement macro FUNCTION_ARG_PARTIAL_NREGS. */
6524
6525/* If REGNO is the least arg reg available then what is the total number of arg
6526 regs available. */
6527#define GPR_REST_ARG_REGS(REGNO) \
6528 ((REGNO) <= MAX_ARC_PARM_REGS ? MAX_ARC_PARM_REGS - (REGNO) : 0 )
6529
6530/* Since arc parm regs are contiguous. */
6531#define ARC_NEXT_ARG_REG(REGNO) ( (REGNO) + 1 )
6532
6533/* Implement TARGET_ARG_PARTIAL_BYTES. */
6534
6535static int
a7c81bc1 6536arc_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
526b7aee
SV
6537{
6538 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
a7c81bc1 6539 int bytes = arg.promoted_size_in_bytes ();
526b7aee
SV
6540 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6541 int arg_num = *cum;
6542 int ret;
6543
a7c81bc1 6544 arg_num = ROUND_ADVANCE_CUM (arg_num, arg.mode, arg.type);
526b7aee
SV
6545 ret = GPR_REST_ARG_REGS (arg_num);
6546
6547 /* ICEd at function.c:2361, and ret is copied to data->partial */
6548 ret = (ret >= words ? 0 : ret * UNITS_PER_WORD);
6549
6550 return ret;
6551}
6552
6783fdb7
RS
6553/* Implement TARGET_FUNCTION_ARG. On the ARC the first MAX_ARC_PARM_REGS
6554 args are normally in registers and the rest are pushed. */
526b7aee
SV
6555
6556static rtx
6783fdb7 6557arc_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
526b7aee
SV
6558{
6559 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
6560 int arg_num = *cum;
6561 rtx ret;
6562 const char *debstr ATTRIBUTE_UNUSED;
6563
6783fdb7 6564 arg_num = ROUND_ADVANCE_CUM (arg_num, arg.mode, arg.type);
526b7aee 6565 /* Return a marker for use in the call instruction. */
6783fdb7 6566 if (arg.end_marker_p ())
526b7aee
SV
6567 {
6568 ret = const0_rtx;
6569 debstr = "<0>";
6570 }
6571 else if (GPR_REST_ARG_REGS (arg_num) > 0)
6572 {
6783fdb7 6573 ret = gen_rtx_REG (arg.mode, arg_num);
526b7aee
SV
6574 debstr = reg_names [arg_num];
6575 }
6576 else
6577 {
6578 ret = NULL_RTX;
6579 debstr = "memory";
6580 }
6581 return ret;
6582}
6583
6930c98c 6584/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
526b7aee
SV
6585/* For the ARC: the cum set here is passed on to function_arg where we
6586 look at its value and say which reg to use. Strategy: advance the
6587 regnumber here till we run out of arg regs, then set *cum to last
6588 reg. In function_arg, since *cum > last arg reg we would return 0
6589 and thus the arg will end up on the stack. For straddling args of
6590 course function_arg_partial_nregs will come into play. */
6591
6592static void
8f3304d0 6593arc_function_arg_advance (cumulative_args_t cum_v,
6930c98c 6594 const function_arg_info &arg)
526b7aee
SV
6595{
6596 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
6930c98c 6597 int bytes = arg.promoted_size_in_bytes ();
526b7aee
SV
6598 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6599 int i;
6600
6601 if (words)
6930c98c 6602 *cum = ROUND_ADVANCE_CUM (*cum, arg.mode, arg.type);
526b7aee
SV
6603 for (i = 0; i < words; i++)
6604 *cum = ARC_NEXT_ARG_REG (*cum);
6605
6606}
6607
6608/* Define how to find the value returned by a function.
6609 VALTYPE is the data type of the value (as a tree).
6610 If the precise function being called is known, FN_DECL_OR_TYPE is its
6611 FUNCTION_DECL; otherwise, FN_DECL_OR_TYPE is its type. */
6612
6613static rtx
6614arc_function_value (const_tree valtype,
6615 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6616 bool outgoing ATTRIBUTE_UNUSED)
6617{
ef4bddc2 6618 machine_mode mode = TYPE_MODE (valtype);
526b7aee
SV
6619 int unsignedp ATTRIBUTE_UNUSED;
6620
6621 unsignedp = TYPE_UNSIGNED (valtype);
6622 if (INTEGRAL_TYPE_P (valtype) || TREE_CODE (valtype) == OFFSET_TYPE)
6623 PROMOTE_MODE (mode, unsignedp, valtype);
6624 return gen_rtx_REG (mode, 0);
6625}
6626
6627/* Returns the return address that is used by builtin_return_address. */
6628
6629rtx
6630arc_return_addr_rtx (int count, ATTRIBUTE_UNUSED rtx frame)
6631{
6632 if (count != 0)
6633 return const0_rtx;
6634
6635 return get_hard_reg_initial_val (Pmode , RETURN_ADDR_REGNUM);
6636}
6637
526b7aee
SV
6638/* Determine if a given RTX is a valid constant. We already know this
6639 satisfies CONSTANT_P. */
6640
6641bool
28633bbd 6642arc_legitimate_constant_p (machine_mode mode, rtx x)
526b7aee 6643{
526b7aee
SV
6644 switch (GET_CODE (x))
6645 {
6646 case CONST:
b6c354eb 6647 if (flag_pic)
526b7aee 6648 {
b6c354eb 6649 if (arc_legitimate_pic_addr_p (x))
526b7aee 6650 return true;
b6c354eb
CZ
6651 }
6652 return arc_legitimate_constant_p (mode, XEXP (x, 0));
526b7aee 6653
526b7aee 6654 case SYMBOL_REF:
28633bbd
CZ
6655 if (SYMBOL_REF_TLS_MODEL (x))
6656 return false;
6657 /* Fall through. */
6658 case LABEL_REF:
6659 if (flag_pic)
6660 return false;
6661 /* Fall through. */
b6c354eb
CZ
6662 case CONST_INT:
6663 case CONST_DOUBLE:
6664 return true;
6665
6666 case NEG:
6667 return arc_legitimate_constant_p (mode, XEXP (x, 0));
6668
6669 case PLUS:
6670 case MINUS:
6671 {
6672 bool t1 = arc_legitimate_constant_p (mode, XEXP (x, 0));
6673 bool t2 = arc_legitimate_constant_p (mode, XEXP (x, 1));
6674
6675 return (t1 && t2);
6676 }
6677
6678 case CONST_VECTOR:
6679 switch (mode)
6680 {
4e10a5a7 6681 case E_V2HImode:
b6c354eb 6682 return TARGET_PLUS_DMPY;
4e10a5a7
RS
6683 case E_V2SImode:
6684 case E_V4HImode:
b6c354eb
CZ
6685 return TARGET_PLUS_QMACW;
6686 default:
6687 return false;
6688 }
6689
6690 case UNSPEC:
6691 switch (XINT (x, 1))
6692 {
6693 case UNSPEC_TLS_GD:
6694 case UNSPEC_TLS_OFF:
6695 case UNSPEC_TLS_IE:
6696 return true;
6697 default:
6698 /* Any other unspec ending here are pic related, hence the above
6699 constant pic address checking returned false. */
6700 return false;
6701 }
6702 /* Fall through. */
526b7aee
SV
6703
6704 default:
b6c354eb 6705 fatal_insn ("unrecognized supposed constant", x);
526b7aee
SV
6706 }
6707
b6c354eb 6708 gcc_unreachable ();
526b7aee
SV
6709}
6710
6711static bool
ef4bddc2 6712arc_legitimate_address_p (machine_mode mode, rtx x, bool strict)
526b7aee
SV
6713{
6714 if (RTX_OK_FOR_BASE_P (x, strict))
6715 return true;
ac2e1a51 6716 if (legitimate_offset_address_p (mode, x, TARGET_INDEXED_LOADS, strict))
526b7aee 6717 return true;
9f532472 6718 if (legitimate_scaled_address_p (mode, x, strict))
526b7aee 6719 return true;
02ae0e08 6720 if (legitimate_small_data_address_p (x, mode))
526b7aee
SV
6721 return true;
6722 if (GET_CODE (x) == CONST_INT && LARGE_INT (INTVAL (x)))
6723 return true;
28633bbd
CZ
6724
6725 /* When we compile for size avoid const (@sym + offset)
6726 addresses. */
6727 if (!flag_pic && optimize_size && !reload_completed
6728 && (GET_CODE (x) == CONST)
6729 && (GET_CODE (XEXP (x, 0)) == PLUS)
6730 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)
6731 && SYMBOL_REF_TLS_MODEL (XEXP (XEXP (x, 0), 0)) == 0
6732 && !SYMBOL_REF_FUNCTION_P (XEXP (XEXP (x, 0), 0)))
526b7aee 6733 {
28633bbd
CZ
6734 rtx addend = XEXP (XEXP (x, 0), 1);
6735 gcc_assert (CONST_INT_P (addend));
6736 HOST_WIDE_INT offset = INTVAL (addend);
6737
6738 /* Allow addresses having a large offset to pass. Anyhow they
6739 will end in a limm. */
6740 return !(offset > -1024 && offset < 1020);
6741 }
6742
6743 if ((GET_MODE_SIZE (mode) != 16) && CONSTANT_P (x))
6744 {
b6c354eb 6745 return arc_legitimate_constant_p (mode, x);
526b7aee
SV
6746 }
6747 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == PRE_INC
6748 || GET_CODE (x) == POST_DEC || GET_CODE (x) == POST_INC)
6749 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict))
6750 return true;
6751 /* We're restricted here by the `st' insn. */
6752 if ((GET_CODE (x) == PRE_MODIFY || GET_CODE (x) == POST_MODIFY)
6753 && GET_CODE (XEXP ((x), 1)) == PLUS
6754 && rtx_equal_p (XEXP ((x), 0), XEXP (XEXP (x, 1), 0))
ac2e1a51 6755 && legitimate_offset_address_p (QImode, XEXP (x, 1),
526b7aee
SV
6756 TARGET_AUTO_MODIFY_REG, strict))
6757 return true;
6758 return false;
6759}
6760
6761/* Return true iff ADDR (a legitimate address expression)
6762 has an effect that depends on the machine mode it is used for. */
6763
6764static bool
6765arc_mode_dependent_address_p (const_rtx addr, addr_space_t)
6766{
6767 /* SYMBOL_REF is not mode dependent: it is either a small data reference,
6768 which is valid for loads and stores, or a limm offset, which is valid for
1fccdd40 6769 loads. Scaled indices are scaled by the access mode. */
526b7aee 6770 if (GET_CODE (addr) == PLUS
1fccdd40 6771 && GET_CODE (XEXP ((addr), 0)) == MULT)
526b7aee
SV
6772 return true;
6773 return false;
6774}
6775
6776/* Determine if it's legal to put X into the constant pool. */
6777
6778static bool
ef4bddc2 6779arc_cannot_force_const_mem (machine_mode mode, rtx x)
526b7aee
SV
6780{
6781 return !arc_legitimate_constant_p (mode, x);
6782}
6783
c69899f0
CZ
6784/* IDs for all the ARC builtins. */
6785
6786enum arc_builtin_id
6787 {
6788#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
6789 ARC_BUILTIN_ ## NAME,
6790#include "builtins.def"
6791#undef DEF_BUILTIN
6792
6793 ARC_BUILTIN_COUNT
6794 };
6795
6796struct GTY(()) arc_builtin_description
6797{
6798 enum insn_code icode;
6799 int n_args;
6800 tree fndecl;
6801};
6802
6803static GTY(()) struct arc_builtin_description
6804arc_bdesc[ARC_BUILTIN_COUNT] =
6805{
6806#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
6807 { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
6808#include "builtins.def"
6809#undef DEF_BUILTIN
6810};
6811
6812/* Transform UP into lowercase and write the result to LO.
6813 You must provide enough space for LO. Return LO. */
6814
6815static char*
6816arc_tolower (char *lo, const char *up)
6817{
6818 char *lo0 = lo;
6819
6820 for (; *up; up++, lo++)
6821 *lo = TOLOWER (*up);
6822
6823 *lo = '\0';
6824
6825 return lo0;
6826}
6827
6828/* Implement `TARGET_BUILTIN_DECL'. */
526b7aee 6829
c69899f0
CZ
6830static tree
6831arc_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
6832{
6833 if (id < ARC_BUILTIN_COUNT)
6834 return arc_bdesc[id].fndecl;
526b7aee 6835
c69899f0
CZ
6836 return error_mark_node;
6837}
526b7aee
SV
6838
6839static void
6840arc_init_builtins (void)
6841{
00c072ae
CZ
6842 tree V4HI_type_node;
6843 tree V2SI_type_node;
6844 tree V2HI_type_node;
6845
6846 /* Vector types based on HS SIMD elements. */
6847 V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
6848 V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
6849 V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
6850
c69899f0
CZ
6851 tree pcvoid_type_node
6852 = build_pointer_type (build_qualified_type (void_type_node,
6853 TYPE_QUAL_CONST));
6854 tree V8HI_type_node = build_vector_type_for_mode (intHI_type_node,
6855 V8HImode);
6856
6857 tree void_ftype_void
6858 = build_function_type_list (void_type_node, NULL_TREE);
6859 tree int_ftype_int
6860 = build_function_type_list (integer_type_node, integer_type_node,
6861 NULL_TREE);
6862 tree int_ftype_pcvoid_int
6863 = build_function_type_list (integer_type_node, pcvoid_type_node,
6864 integer_type_node, NULL_TREE);
6865 tree void_ftype_usint_usint
6866 = build_function_type_list (void_type_node, long_unsigned_type_node,
6867 long_unsigned_type_node, NULL_TREE);
6868 tree int_ftype_int_int
6869 = build_function_type_list (integer_type_node, integer_type_node,
6870 integer_type_node, NULL_TREE);
6871 tree usint_ftype_usint
6872 = build_function_type_list (long_unsigned_type_node,
6873 long_unsigned_type_node, NULL_TREE);
6874 tree void_ftype_usint
6875 = build_function_type_list (void_type_node, long_unsigned_type_node,
6876 NULL_TREE);
6877 tree int_ftype_void
6878 = build_function_type_list (integer_type_node, void_type_node,
6879 NULL_TREE);
6880 tree void_ftype_int
6881 = build_function_type_list (void_type_node, integer_type_node,
6882 NULL_TREE);
6883 tree int_ftype_short
6884 = build_function_type_list (integer_type_node, short_integer_type_node,
6885 NULL_TREE);
6886
6887 /* Old ARC SIMD types. */
6888 tree v8hi_ftype_v8hi_v8hi
6889 = build_function_type_list (V8HI_type_node, V8HI_type_node,
6890 V8HI_type_node, NULL_TREE);
6891 tree v8hi_ftype_v8hi_int
6892 = build_function_type_list (V8HI_type_node, V8HI_type_node,
6893 integer_type_node, NULL_TREE);
6894 tree v8hi_ftype_v8hi_int_int
6895 = build_function_type_list (V8HI_type_node, V8HI_type_node,
6896 integer_type_node, integer_type_node,
6897 NULL_TREE);
6898 tree void_ftype_v8hi_int_int
6899 = build_function_type_list (void_type_node, V8HI_type_node,
6900 integer_type_node, integer_type_node,
6901 NULL_TREE);
6902 tree void_ftype_v8hi_int_int_int
6903 = build_function_type_list (void_type_node, V8HI_type_node,
6904 integer_type_node, integer_type_node,
6905 integer_type_node, NULL_TREE);
6906 tree v8hi_ftype_int_int
6907 = build_function_type_list (V8HI_type_node, integer_type_node,
6908 integer_type_node, NULL_TREE);
6909 tree void_ftype_int_int
6910 = build_function_type_list (void_type_node, integer_type_node,
6911 integer_type_node, NULL_TREE);
6912 tree v8hi_ftype_v8hi
6913 = build_function_type_list (V8HI_type_node, V8HI_type_node,
6914 NULL_TREE);
00c072ae
CZ
6915 /* ARCv2 SIMD types. */
6916 tree long_ftype_v4hi_v4hi
6917 = build_function_type_list (long_long_integer_type_node,
6918 V4HI_type_node, V4HI_type_node, NULL_TREE);
6919 tree int_ftype_v2hi_v2hi
6920 = build_function_type_list (integer_type_node,
6921 V2HI_type_node, V2HI_type_node, NULL_TREE);
6922 tree v2si_ftype_v2hi_v2hi
6923 = build_function_type_list (V2SI_type_node,
6924 V2HI_type_node, V2HI_type_node, NULL_TREE);
6925 tree v2hi_ftype_v2hi_v2hi
6926 = build_function_type_list (V2HI_type_node,
6927 V2HI_type_node, V2HI_type_node, NULL_TREE);
6928 tree v2si_ftype_v2si_v2si
6929 = build_function_type_list (V2SI_type_node,
6930 V2SI_type_node, V2SI_type_node, NULL_TREE);
6931 tree v4hi_ftype_v4hi_v4hi
6932 = build_function_type_list (V4HI_type_node,
6933 V4HI_type_node, V4HI_type_node, NULL_TREE);
6934 tree long_ftype_v2si_v2hi
6935 = build_function_type_list (long_long_integer_type_node,
6936 V2SI_type_node, V2HI_type_node, NULL_TREE);
c69899f0
CZ
6937
6938 /* Add the builtins. */
6939#define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, MASK) \
6940 { \
6941 int id = ARC_BUILTIN_ ## NAME; \
6942 const char *Name = "__builtin_arc_" #NAME; \
6943 char *name = (char*) alloca (1 + strlen (Name)); \
6944 \
6945 gcc_assert (id < ARC_BUILTIN_COUNT); \
6946 if (MASK) \
6947 arc_bdesc[id].fndecl \
6948 = add_builtin_function (arc_tolower(name, Name), TYPE, id, \
6949 BUILT_IN_MD, NULL, NULL_TREE); \
6950 }
6951#include "builtins.def"
6952#undef DEF_BUILTIN
6953}
6954
6955/* Helper to expand __builtin_arc_aligned (void* val, int
6956 alignval). */
6957
6958static rtx
6959arc_expand_builtin_aligned (tree exp)
6960{
6961 tree arg0 = CALL_EXPR_ARG (exp, 0);
6962 tree arg1 = CALL_EXPR_ARG (exp, 1);
6963 fold (arg1);
6964 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6965 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6966
6967 if (!CONST_INT_P (op1))
6968 {
6969 /* If we can't fold the alignment to a constant integer
6970 whilst optimizing, this is probably a user error. */
6971 if (optimize)
a3f9f006 6972 warning (0, "%<__builtin_arc_aligned%> with non-constant alignment");
c69899f0
CZ
6973 }
6974 else
6975 {
6976 HOST_WIDE_INT alignTest = INTVAL (op1);
6977 /* Check alignTest is positive, and a power of two. */
6978 if (alignTest <= 0 || alignTest != (alignTest & -alignTest))
6979 {
a3f9f006 6980 error ("invalid alignment value for %<__builtin_arc_aligned%>");
c69899f0
CZ
6981 return NULL_RTX;
6982 }
6983
6984 if (CONST_INT_P (op0))
6985 {
6986 HOST_WIDE_INT pnt = INTVAL (op0);
6987
6988 if ((pnt & (alignTest - 1)) == 0)
6989 return const1_rtx;
6990 }
6991 else
6992 {
6993 unsigned align = get_pointer_alignment (arg0);
6994 unsigned numBits = alignTest * BITS_PER_UNIT;
6995
6996 if (align && align >= numBits)
6997 return const1_rtx;
6998 /* Another attempt to ascertain alignment. Check the type
6999 we are pointing to. */
7000 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7001 && TYPE_ALIGN (TREE_TYPE (TREE_TYPE (arg0))) >= numBits)
7002 return const1_rtx;
7003 }
7004 }
7005
7006 /* Default to false. */
7007 return const0_rtx;
7008}
7009
7010/* Helper arc_expand_builtin, generates a pattern for the given icode
7011 and arguments. */
7012
7013static rtx_insn *
7014apply_GEN_FCN (enum insn_code icode, rtx *arg)
7015{
7016 switch (insn_data[icode].n_generator_args)
7017 {
7018 case 0:
7019 return GEN_FCN (icode) ();
7020 case 1:
7021 return GEN_FCN (icode) (arg[0]);
7022 case 2:
7023 return GEN_FCN (icode) (arg[0], arg[1]);
7024 case 3:
7025 return GEN_FCN (icode) (arg[0], arg[1], arg[2]);
7026 case 4:
7027 return GEN_FCN (icode) (arg[0], arg[1], arg[2], arg[3]);
7028 case 5:
7029 return GEN_FCN (icode) (arg[0], arg[1], arg[2], arg[3], arg[4]);
7030 default:
7031 gcc_unreachable ();
7032 }
7033}
526b7aee
SV
7034
7035/* Expand an expression EXP that calls a built-in function,
7036 with result going to TARGET if that's convenient
7037 (and in mode MODE if that's convenient).
7038 SUBTARGET may be used as the target for computing one of EXP's operands.
7039 IGNORE is nonzero if the value is to be ignored. */
7040
7041static rtx
7042arc_expand_builtin (tree exp,
7043 rtx target,
c69899f0
CZ
7044 rtx subtarget ATTRIBUTE_UNUSED,
7045 machine_mode mode ATTRIBUTE_UNUSED,
7046 int ignore ATTRIBUTE_UNUSED)
7047{
7048 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
4d732405 7049 unsigned int id = DECL_MD_FUNCTION_CODE (fndecl);
c69899f0
CZ
7050 const struct arc_builtin_description *d = &arc_bdesc[id];
7051 int i, j, n_args = call_expr_nargs (exp);
7052 rtx pat = NULL_RTX;
7053 rtx xop[5];
7054 enum insn_code icode = d->icode;
7055 machine_mode tmode = insn_data[icode].operand[0].mode;
7056 int nonvoid;
7057 tree arg0;
7058 tree arg1;
7059 tree arg2;
7060 tree arg3;
7061 rtx op0;
7062 rtx op1;
7063 rtx op2;
7064 rtx op3;
7065 rtx op4;
ef4bddc2
RS
7066 machine_mode mode0;
7067 machine_mode mode1;
c69899f0
CZ
7068 machine_mode mode2;
7069 machine_mode mode3;
7070 machine_mode mode4;
526b7aee 7071
c69899f0
CZ
7072 if (id >= ARC_BUILTIN_COUNT)
7073 internal_error ("bad builtin fcode");
526b7aee 7074
c69899f0
CZ
7075 /* 1st part: Expand special builtins. */
7076 switch (id)
526b7aee
SV
7077 {
7078 case ARC_BUILTIN_NOP:
c69899f0 7079 emit_insn (gen_nopv ());
526b7aee
SV
7080 return NULL_RTX;
7081
c69899f0
CZ
7082 case ARC_BUILTIN_RTIE:
7083 case ARC_BUILTIN_SYNC:
7084 case ARC_BUILTIN_BRK:
7085 case ARC_BUILTIN_SWI:
7086 case ARC_BUILTIN_UNIMP_S:
7087 gcc_assert (icode != 0);
7088 emit_insn (GEN_FCN (icode) (const1_rtx));
7089 return NULL_RTX;
526b7aee 7090
c69899f0
CZ
7091 case ARC_BUILTIN_ALIGNED:
7092 return arc_expand_builtin_aligned (exp);
526b7aee 7093
c69899f0
CZ
7094 case ARC_BUILTIN_CLRI:
7095 target = gen_reg_rtx (SImode);
7096 emit_insn (gen_clri (target, const1_rtx));
526b7aee
SV
7097 return target;
7098
c69899f0
CZ
7099 case ARC_BUILTIN_TRAP_S:
7100 case ARC_BUILTIN_SLEEP:
526b7aee 7101 arg0 = CALL_EXPR_ARG (exp, 0);
c69899f0 7102 fold (arg0);
526b7aee 7103 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
526b7aee 7104
c69899f0
CZ
7105 gcc_assert (icode != 0);
7106 emit_insn (GEN_FCN (icode) (op0));
7107 return NULL_RTX;
526b7aee 7108
c69899f0
CZ
7109 case ARC_BUILTIN_VDORUN:
7110 case ARC_BUILTIN_VDIRUN:
526b7aee
SV
7111 arg0 = CALL_EXPR_ARG (exp, 0);
7112 arg1 = CALL_EXPR_ARG (exp, 1);
c69899f0
CZ
7113 op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
7114 op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
526b7aee 7115
c69899f0
CZ
7116 target = gen_rtx_REG (SImode, (id == ARC_BUILTIN_VDIRUN) ? 131 : 139);
7117
7118 mode0 = insn_data[icode].operand[1].mode;
7119 mode1 = insn_data[icode].operand[2].mode;
526b7aee 7120
c69899f0 7121 if (!insn_data[icode].operand[1].predicate (op0, mode0))
526b7aee
SV
7122 op0 = copy_to_mode_reg (mode0, op0);
7123
c69899f0 7124 if (!insn_data[icode].operand[2].predicate (op1, mode1))
526b7aee
SV
7125 op1 = copy_to_mode_reg (mode1, op1);
7126
c69899f0
CZ
7127 pat = GEN_FCN (icode) (target, op0, op1);
7128 if (!pat)
7129 return NULL_RTX;
7130
7131 emit_insn (pat);
526b7aee
SV
7132 return NULL_RTX;
7133
c69899f0
CZ
7134 case ARC_BUILTIN_VDIWR:
7135 case ARC_BUILTIN_VDOWR:
526b7aee
SV
7136 arg0 = CALL_EXPR_ARG (exp, 0);
7137 arg1 = CALL_EXPR_ARG (exp, 1);
c69899f0
CZ
7138 op0 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
7139 op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
7140
7141 if (!CONST_INT_P (op0)
7142 || !(UNSIGNED_INT3 (INTVAL (op0))))
7143 error ("operand 1 should be an unsigned 3-bit immediate");
526b7aee 7144
526b7aee
SV
7145 mode1 = insn_data[icode].operand[1].mode;
7146
c69899f0
CZ
7147 if (icode == CODE_FOR_vdiwr_insn)
7148 target = gen_rtx_REG (SImode,
7149 ARC_FIRST_SIMD_DMA_CONFIG_IN_REG + INTVAL (op0));
7150 else if (icode == CODE_FOR_vdowr_insn)
7151 target = gen_rtx_REG (SImode,
7152 ARC_FIRST_SIMD_DMA_CONFIG_OUT_REG + INTVAL (op0));
7153 else
7154 gcc_unreachable ();
526b7aee 7155
c69899f0 7156 if (!insn_data[icode].operand[2].predicate (op1, mode1))
526b7aee
SV
7157 op1 = copy_to_mode_reg (mode1, op1);
7158
c69899f0
CZ
7159 pat = GEN_FCN (icode) (target, op1);
7160 if (!pat)
7161 return NULL_RTX;
526b7aee 7162
c69899f0 7163 emit_insn (pat);
526b7aee
SV
7164 return NULL_RTX;
7165
c69899f0
CZ
7166 case ARC_BUILTIN_VASRW:
7167 case ARC_BUILTIN_VSR8:
7168 case ARC_BUILTIN_VSR8AW:
526b7aee 7169 arg0 = CALL_EXPR_ARG (exp, 0);
c69899f0
CZ
7170 arg1 = CALL_EXPR_ARG (exp, 1);
7171 op0 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
7172 op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
7173 op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
7174
7175 target = gen_reg_rtx (V8HImode);
526b7aee 7176 mode0 = insn_data[icode].operand[1].mode;
c69899f0 7177 mode1 = insn_data[icode].operand[2].mode;
526b7aee 7178
c69899f0 7179 if (!insn_data[icode].operand[1].predicate (op0, mode0))
526b7aee
SV
7180 op0 = copy_to_mode_reg (mode0, op0);
7181
c69899f0
CZ
7182 if ((!insn_data[icode].operand[2].predicate (op1, mode1))
7183 || !(UNSIGNED_INT3 (INTVAL (op1))))
7184 error ("operand 2 should be an unsigned 3-bit value (I0-I7)");
526b7aee 7185
c69899f0
CZ
7186 pat = GEN_FCN (icode) (target, op0, op1, op2);
7187 if (!pat)
7188 return NULL_RTX;
526b7aee 7189
c69899f0
CZ
7190 emit_insn (pat);
7191 return target;
526b7aee 7192
c69899f0
CZ
7193 case ARC_BUILTIN_VLD32WH:
7194 case ARC_BUILTIN_VLD32WL:
7195 case ARC_BUILTIN_VLD64:
7196 case ARC_BUILTIN_VLD32:
7197 rtx src_vreg;
7198 icode = d->icode;
7199 arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg. */
7200 arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
7201 arg2 = CALL_EXPR_ARG (exp, 2); /* u8. */
526b7aee 7202
c69899f0
CZ
7203 src_vreg = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
7204 op0 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
7205 op1 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
7206 op2 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
526b7aee 7207
c69899f0
CZ
7208 /* target <- src vreg. */
7209 emit_insn (gen_move_insn (target, src_vreg));
526b7aee 7210
c69899f0
CZ
7211 /* target <- vec_concat: target, mem (Ib, u8). */
7212 mode0 = insn_data[icode].operand[3].mode;
7213 mode1 = insn_data[icode].operand[1].mode;
526b7aee 7214
c69899f0
CZ
7215 if ((!insn_data[icode].operand[3].predicate (op0, mode0))
7216 || !(UNSIGNED_INT3 (INTVAL (op0))))
7217 error ("operand 1 should be an unsigned 3-bit value (I0-I7)");
526b7aee 7218
c69899f0
CZ
7219 if ((!insn_data[icode].operand[1].predicate (op1, mode1))
7220 || !(UNSIGNED_INT8 (INTVAL (op1))))
7221 error ("operand 2 should be an unsigned 8-bit value");
526b7aee 7222
c69899f0
CZ
7223 pat = GEN_FCN (icode) (target, op1, op2, op0);
7224 if (!pat)
7225 return NULL_RTX;
526b7aee 7226
c69899f0
CZ
7227 emit_insn (pat);
7228 return target;
526b7aee 7229
c69899f0
CZ
7230 case ARC_BUILTIN_VLD64W:
7231 case ARC_BUILTIN_VLD128:
7232 arg0 = CALL_EXPR_ARG (exp, 0); /* dest vreg. */
7233 arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
526b7aee 7234
c69899f0
CZ
7235 op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
7236 op1 = expand_expr (arg0, NULL_RTX, SImode, EXPAND_NORMAL);
7237 op2 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
526b7aee 7238
c69899f0
CZ
7239 /* target <- src vreg. */
7240 target = gen_reg_rtx (V8HImode);
526b7aee 7241
c69899f0
CZ
7242 /* target <- vec_concat: target, mem (Ib, u8). */
7243 mode0 = insn_data[icode].operand[1].mode;
7244 mode1 = insn_data[icode].operand[2].mode;
7245 mode2 = insn_data[icode].operand[3].mode;
526b7aee 7246
c69899f0
CZ
7247 if ((!insn_data[icode].operand[2].predicate (op1, mode1))
7248 || !(UNSIGNED_INT3 (INTVAL (op1))))
7249 error ("operand 1 should be an unsigned 3-bit value (I0-I7)");
526b7aee 7250
c69899f0
CZ
7251 if ((!insn_data[icode].operand[3].predicate (op2, mode2))
7252 || !(UNSIGNED_INT8 (INTVAL (op2))))
7253 error ("operand 2 should be an unsigned 8-bit value");
526b7aee 7254
c69899f0 7255 pat = GEN_FCN (icode) (target, op0, op1, op2);
526b7aee 7256
c69899f0
CZ
7257 if (!pat)
7258 return NULL_RTX;
526b7aee 7259
c69899f0 7260 emit_insn (pat);
526b7aee
SV
7261 return target;
7262
c69899f0
CZ
7263 case ARC_BUILTIN_VST128:
7264 case ARC_BUILTIN_VST64:
7265 arg0 = CALL_EXPR_ARG (exp, 0); /* src vreg. */
7266 arg1 = CALL_EXPR_ARG (exp, 1); /* [I]0-7. */
7267 arg2 = CALL_EXPR_ARG (exp, 2); /* u8. */
526b7aee 7268
c69899f0
CZ
7269 op0 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
7270 op1 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
7271 op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
7272 op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
526b7aee
SV
7273
7274 mode0 = insn_data[icode].operand[0].mode;
7275 mode1 = insn_data[icode].operand[1].mode;
c69899f0
CZ
7276 mode2 = insn_data[icode].operand[2].mode;
7277 mode3 = insn_data[icode].operand[3].mode;
526b7aee 7278
c69899f0
CZ
7279 if ((!insn_data[icode].operand[1].predicate (op1, mode1))
7280 || !(UNSIGNED_INT3 (INTVAL (op1))))
7281 error ("operand 2 should be an unsigned 3-bit value (I0-I7)");
526b7aee 7282
c69899f0
CZ
7283 if ((!insn_data[icode].operand[2].predicate (op2, mode2))
7284 || !(UNSIGNED_INT8 (INTVAL (op2))))
7285 error ("operand 3 should be an unsigned 8-bit value");
526b7aee 7286
c69899f0
CZ
7287 if (!insn_data[icode].operand[3].predicate (op3, mode3))
7288 op3 = copy_to_mode_reg (mode3, op3);
526b7aee 7289
c69899f0
CZ
7290 pat = GEN_FCN (icode) (op0, op1, op2, op3);
7291 if (!pat)
7292 return NULL_RTX;
526b7aee 7293
c69899f0
CZ
7294 emit_insn (pat);
7295 return NULL_RTX;
526b7aee 7296
c69899f0
CZ
7297 case ARC_BUILTIN_VST16_N:
7298 case ARC_BUILTIN_VST32_N:
7299 arg0 = CALL_EXPR_ARG (exp, 0); /* source vreg. */
7300 arg1 = CALL_EXPR_ARG (exp, 1); /* u3. */
7301 arg2 = CALL_EXPR_ARG (exp, 2); /* [I]0-7. */
7302 arg3 = CALL_EXPR_ARG (exp, 3); /* u8. */
526b7aee 7303
c69899f0
CZ
7304 op0 = expand_expr (arg3, NULL_RTX, SImode, EXPAND_NORMAL);
7305 op1 = gen_rtx_REG (V8HImode, ARC_FIRST_SIMD_VR_REG);
7306 op2 = expand_expr (arg2, NULL_RTX, SImode, EXPAND_NORMAL);
7307 op3 = expand_expr (arg0, NULL_RTX, V8HImode, EXPAND_NORMAL);
7308 op4 = expand_expr (arg1, NULL_RTX, SImode, EXPAND_NORMAL);
526b7aee
SV
7309
7310 mode0 = insn_data[icode].operand[0].mode;
c69899f0
CZ
7311 mode2 = insn_data[icode].operand[2].mode;
7312 mode3 = insn_data[icode].operand[3].mode;
7313 mode4 = insn_data[icode].operand[4].mode;
526b7aee 7314
c69899f0
CZ
7315 /* Do some correctness checks for the operands. */
7316 if ((!insn_data[icode].operand[0].predicate (op0, mode0))
7317 || !(UNSIGNED_INT8 (INTVAL (op0))))
7318 error ("operand 4 should be an unsigned 8-bit value (0-255)");
526b7aee 7319
c69899f0
CZ
7320 if ((!insn_data[icode].operand[2].predicate (op2, mode2))
7321 || !(UNSIGNED_INT3 (INTVAL (op2))))
7322 error ("operand 3 should be an unsigned 3-bit value (I0-I7)");
526b7aee 7323
c69899f0
CZ
7324 if (!insn_data[icode].operand[3].predicate (op3, mode3))
7325 op3 = copy_to_mode_reg (mode3, op3);
526b7aee 7326
c69899f0
CZ
7327 if ((!insn_data[icode].operand[4].predicate (op4, mode4))
7328 || !(UNSIGNED_INT3 (INTVAL (op4))))
7329 error ("operand 2 should be an unsigned 3-bit value (subreg 0-7)");
7330 else if (icode == CODE_FOR_vst32_n_insn
7331 && ((INTVAL (op4) % 2) != 0))
7332 error ("operand 2 should be an even 3-bit value (subreg 0,2,4,6)");
526b7aee 7333
c69899f0
CZ
7334 pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
7335 if (!pat)
7336 return NULL_RTX;
526b7aee 7337
c69899f0 7338 emit_insn (pat);
526b7aee
SV
7339 return NULL_RTX;
7340
c69899f0
CZ
7341 default:
7342 break;
7343 }
7344
7345 /* 2nd part: Expand regular builtins. */
7346 if (icode == 0)
7347 internal_error ("bad builtin fcode");
7348
7349 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7350 j = 0;
526b7aee 7351
c69899f0
CZ
7352 if (nonvoid)
7353 {
7354 if (target == NULL_RTX
7355 || GET_MODE (target) != tmode
7356 || !insn_data[icode].operand[0].predicate (target, tmode))
526b7aee 7357 {
c69899f0 7358 target = gen_reg_rtx (tmode);
526b7aee 7359 }
c69899f0
CZ
7360 xop[j++] = target;
7361 }
7362
7363 gcc_assert (n_args <= 4);
7364 for (i = 0; i < n_args; i++, j++)
7365 {
7366 tree arg = CALL_EXPR_ARG (exp, i);
7367 machine_mode mode = insn_data[icode].operand[j].mode;
7368 rtx op = expand_expr (arg, NULL_RTX, mode, EXPAND_NORMAL);
7369 machine_mode opmode = GET_MODE (op);
7370 char c = insn_data[icode].operand[j].constraint[0];
7371
7372 /* SIMD extension requires exact immediate operand match. */
7373 if ((id > ARC_BUILTIN_SIMD_BEGIN)
7374 && (id < ARC_BUILTIN_SIMD_END)
7375 && (c != 'v')
7376 && (c != 'r'))
526b7aee 7377 {
c69899f0
CZ
7378 if (!CONST_INT_P (op))
7379 error ("builtin requires an immediate for operand %d", j);
7380 switch (c)
526b7aee 7381 {
c69899f0
CZ
7382 case 'L':
7383 if (!satisfies_constraint_L (op))
7384 error ("operand %d should be a 6 bit unsigned immediate", j);
7385 break;
7386 case 'P':
7387 if (!satisfies_constraint_P (op))
7388 error ("operand %d should be a 8 bit unsigned immediate", j);
7389 break;
7390 case 'K':
7391 if (!satisfies_constraint_K (op))
7392 error ("operand %d should be a 3 bit unsigned immediate", j);
7393 break;
7394 default:
7395 error ("unknown builtin immediate operand type for operand %d",
7396 j);
526b7aee 7397 }
c69899f0 7398 }
526b7aee 7399
c69899f0
CZ
7400 if (CONST_INT_P (op))
7401 opmode = mode;
526b7aee 7402
c69899f0
CZ
7403 if ((opmode == SImode) && (mode == HImode))
7404 {
7405 opmode = HImode;
7406 op = gen_lowpart (HImode, op);
526b7aee
SV
7407 }
7408
c69899f0
CZ
7409 /* In case the insn wants input operands in modes different from
7410 the result, abort. */
7411 gcc_assert (opmode == mode || opmode == VOIDmode);
526b7aee 7412
c69899f0
CZ
7413 if (!insn_data[icode].operand[i + nonvoid].predicate (op, mode))
7414 op = copy_to_mode_reg (mode, op);
7415
7416 xop[j] = op;
526b7aee
SV
7417 }
7418
c69899f0
CZ
7419 pat = apply_GEN_FCN (icode, xop);
7420 if (pat == NULL_RTX)
7421 return NULL_RTX;
7422
7423 emit_insn (pat);
7424
7425 if (nonvoid)
7426 return target;
7427 else
7428 return const0_rtx;
526b7aee
SV
7429}
7430
7431/* Returns true if the operands[opno] is a valid compile-time constant to be
7432 used as register number in the code for builtins. Else it flags an error
7433 and returns false. */
7434
7435bool
7436check_if_valid_regno_const (rtx *operands, int opno)
7437{
7438
7439 switch (GET_CODE (operands[opno]))
7440 {
7441 case SYMBOL_REF :
7442 case CONST :
7443 case CONST_INT :
7444 return true;
7445 default:
2fa9c1f6
CZ
7446 error ("register number must be a compile-time constant. "
7447 "Try giving higher optimization levels");
526b7aee
SV
7448 break;
7449 }
7450 return false;
7451}
7452
526b7aee
SV
7453/* Return true if it is ok to make a tail-call to DECL. */
7454
7455static bool
6b55f8c9 7456arc_function_ok_for_sibcall (tree decl,
526b7aee
SV
7457 tree exp ATTRIBUTE_UNUSED)
7458{
6b55f8c9
CZ
7459 tree attrs = NULL_TREE;
7460
526b7aee
SV
7461 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
7462 if (ARC_INTERRUPT_P (arc_compute_function_type (cfun)))
7463 return false;
7464
6b55f8c9
CZ
7465 if (decl)
7466 {
7467 attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
7468
7469 if (lookup_attribute ("jli_always", attrs))
7470 return false;
7471 if (lookup_attribute ("jli_fixed", attrs))
7472 return false;
7778a1ad
CZ
7473 if (lookup_attribute ("secure_call", attrs))
7474 return false;
6b55f8c9
CZ
7475 }
7476
526b7aee
SV
7477 /* Everything else is ok. */
7478 return true;
7479}
7480
7481/* Output code to add DELTA to the first argument, and then jump
7482 to FUNCTION. Used for C++ multiple inheritance. */
7483
7484static void
7485arc_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
7486 HOST_WIDE_INT delta,
7487 HOST_WIDE_INT vcall_offset,
7488 tree function)
7489{
f7430263 7490 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
526b7aee
SV
7491 int mi_delta = delta;
7492 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
7493 int shift = 0;
7494 int this_regno
7495 = aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function) ? 1 : 0;
7496 rtx fnaddr;
7497
f7430263
MF
7498 assemble_start_function (thunk, fnname);
7499
526b7aee
SV
7500 if (mi_delta < 0)
7501 mi_delta = - mi_delta;
7502
7503 /* Add DELTA. When possible use a plain add, otherwise load it into
7504 a register first. */
7505
7506 while (mi_delta != 0)
7507 {
7508 if ((mi_delta & (3 << shift)) == 0)
7509 shift += 2;
7510 else
7511 {
7512 asm_fprintf (file, "\t%s\t%s, %s, %d\n",
7513 mi_op, reg_names[this_regno], reg_names[this_regno],
7514 mi_delta & (0xff << shift));
7515 mi_delta &= ~(0xff << shift);
7516 shift += 8;
7517 }
7518 }
7519
7520 /* If needed, add *(*THIS + VCALL_OFFSET) to THIS. */
7521 if (vcall_offset != 0)
7522 {
7523 /* ld r12,[this] --> temp = *this
7524 add r12,r12,vcall_offset --> temp = *(*this + vcall_offset)
7525 ld r12,[r12]
7526 add this,this,r12 --> this+ = *(*this + vcall_offset) */
7527 asm_fprintf (file, "\tld\t%s, [%s]\n",
7528 ARC_TEMP_SCRATCH_REG, reg_names[this_regno]);
dfca07ea 7529 asm_fprintf (file, "\tadd\t%s, %s, " HOST_WIDE_INT_PRINT_DEC "\n",
526b7aee
SV
7530 ARC_TEMP_SCRATCH_REG, ARC_TEMP_SCRATCH_REG, vcall_offset);
7531 asm_fprintf (file, "\tld\t%s, [%s]\n",
7532 ARC_TEMP_SCRATCH_REG, ARC_TEMP_SCRATCH_REG);
7533 asm_fprintf (file, "\tadd\t%s, %s, %s\n", reg_names[this_regno],
7534 reg_names[this_regno], ARC_TEMP_SCRATCH_REG);
7535 }
7536
7537 fnaddr = XEXP (DECL_RTL (function), 0);
7538
7539 if (arc_is_longcall_p (fnaddr))
1f8876c7
CZ
7540 {
7541 if (flag_pic)
7542 {
7543 asm_fprintf (file, "\tld\t%s, [pcl, @",
7544 ARC_TEMP_SCRATCH_REG);
7545 assemble_name (file, XSTR (fnaddr, 0));
7546 fputs ("@gotpc]\n", file);
7547 asm_fprintf (file, "\tj\t[%s]", ARC_TEMP_SCRATCH_REG);
7548 }
7549 else
7550 {
7551 fputs ("\tj\t@", file);
7552 assemble_name (file, XSTR (fnaddr, 0));
7553 }
7554 }
526b7aee 7555 else
1f8876c7
CZ
7556 {
7557 fputs ("\tb\t@", file);
7558 assemble_name (file, XSTR (fnaddr, 0));
7559 if (flag_pic)
7560 fputs ("@plt\n", file);
7561 }
526b7aee 7562 fputc ('\n', file);
f7430263 7563 assemble_end_function (thunk, fnname);
526b7aee
SV
7564}
7565
7566/* Return true if a 32 bit "long_call" should be generated for
7567 this calling SYM_REF. We generate a long_call if the function:
7568
7569 a. has an __attribute__((long call))
7570 or b. the -mlong-calls command line switch has been specified
7571
7572 However we do not generate a long call if the function has an
7573 __attribute__ ((short_call)) or __attribute__ ((medium_call))
7574
7575 This function will be called by C fragments contained in the machine
7576 description file. */
7577
7578bool
7579arc_is_longcall_p (rtx sym_ref)
7580{
7581 if (GET_CODE (sym_ref) != SYMBOL_REF)
7582 return false;
7583
7584 return (SYMBOL_REF_LONG_CALL_P (sym_ref)
7585 || (TARGET_LONG_CALLS_SET
7586 && !SYMBOL_REF_SHORT_CALL_P (sym_ref)
7587 && !SYMBOL_REF_MEDIUM_CALL_P (sym_ref)));
7588
7589}
7590
7591/* Likewise for short calls. */
7592
7593bool
7594arc_is_shortcall_p (rtx sym_ref)
7595{
7596 if (GET_CODE (sym_ref) != SYMBOL_REF)
7597 return false;
7598
7599 return (SYMBOL_REF_SHORT_CALL_P (sym_ref)
7600 || (!TARGET_LONG_CALLS_SET && !TARGET_MEDIUM_CALLS
7601 && !SYMBOL_REF_LONG_CALL_P (sym_ref)
7602 && !SYMBOL_REF_MEDIUM_CALL_P (sym_ref)));
7603
7604}
7605
526b7aee
SV
7606/* Worker function for TARGET_RETURN_IN_MEMORY. */
7607
7608static bool
7609arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7610{
7611 if (AGGREGATE_TYPE_P (type) || TREE_ADDRESSABLE (type))
7612 return true;
7613 else
7614 {
7615 HOST_WIDE_INT size = int_size_in_bytes (type);
f50bb868 7616 return (size == -1 || size > (TARGET_V2 ? 16 : 8));
526b7aee
SV
7617 }
7618}
7619
526b7aee 7620static bool
52090e4d
RS
7621arc_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
7622{
7623 return (arg.type != 0
7624 && (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST
7625 || TREE_ADDRESSABLE (arg.type)));
526b7aee
SV
7626}
7627
1d0216c8
RS
7628/* Implement TARGET_CAN_USE_DOLOOP_P. */
7629
7630static bool
a2de90a4
CZ
7631arc_can_use_doloop_p (const widest_int &,
7632 const widest_int &iterations_max,
1d0216c8
RS
7633 unsigned int loop_depth, bool entered_at_top)
7634{
a2de90a4
CZ
7635 /* Considering limitations in the hardware, only use doloop
7636 for innermost loops which must be entered from the top. */
7637 if (loop_depth > 1 || !entered_at_top)
1d0216c8 7638 return false;
a2de90a4
CZ
7639
7640 /* Check for lp_count width boundary. */
7641 if (arc_lpcwidth != 32
7642 && (wi::gtu_p (iterations_max, ((1 << arc_lpcwidth) - 1))
7643 || wi::eq_p (iterations_max, 0)))
1d0216c8
RS
7644 return false;
7645 return true;
7646}
526b7aee 7647
a2de90a4
CZ
7648/* NULL if INSN insn is valid within a low-overhead loop. Otherwise
7649 return why doloop cannot be applied. */
526b7aee
SV
7650
7651static const char *
ac44248e 7652arc_invalid_within_doloop (const rtx_insn *insn)
526b7aee
SV
7653{
7654 if (CALL_P (insn))
7655 return "Function call in the loop.";
a2de90a4
CZ
7656
7657 /* FIXME! add here all the ZOL exceptions. */
526b7aee
SV
7658 return NULL;
7659}
7660
635aeaa2
CZ
7661/* Return the next active insn, skiping the inline assembly code. */
7662
7663static rtx_insn *
7664arc_active_insn (rtx_insn *insn)
7665{
78e9cfe1
CZ
7666 while (insn)
7667 {
7668 insn = NEXT_INSN (insn);
7669 if (insn == 0
7670 || (active_insn_p (insn)
7671 && NONDEBUG_INSN_P (insn)
7672 && !NOTE_P (insn)
7673 && GET_CODE (PATTERN (insn)) != UNSPEC_VOLATILE
7674 && GET_CODE (PATTERN (insn)) != PARALLEL))
7675 break;
7676 }
7677 return insn;
635aeaa2
CZ
7678}
7679
7680/* Search for a sequence made out of two stores and a given number of
7681 loads, insert a nop if required. */
7682
7683static void
7684check_store_cacheline_hazard (void)
7685{
7686 rtx_insn *insn, *succ0, *insn1;
7687 bool found = false;
7688
7689 for (insn = get_insns (); insn; insn = arc_active_insn (insn))
7690 {
7691 succ0 = arc_active_insn (insn);
7692
7693 if (!succ0)
7694 return;
7695
78e9cfe1 7696 if (!single_set (insn))
635aeaa2
CZ
7697 continue;
7698
78e9cfe1 7699 if ((get_attr_type (insn) != TYPE_STORE))
635aeaa2
CZ
7700 continue;
7701
7702 /* Found at least two consecutive stores. Goto the end of the
7703 store sequence. */
7704 for (insn1 = succ0; insn1; insn1 = arc_active_insn (insn1))
7705 if (!single_set (insn1) || get_attr_type (insn1) != TYPE_STORE)
7706 break;
7707
78e9cfe1
CZ
7708 /* Save were we are. */
7709 succ0 = insn1;
7710
635aeaa2
CZ
7711 /* Now, check the next two instructions for the following cases:
7712 1. next instruction is a LD => insert 2 nops between store
7713 sequence and load.
7714 2. next-next instruction is a LD => inset 1 nop after the store
7715 sequence. */
7716 if (insn1 && single_set (insn1)
7717 && (get_attr_type (insn1) == TYPE_LOAD))
7718 {
7719 found = true;
7720 emit_insn_before (gen_nopv (), insn1);
7721 emit_insn_before (gen_nopv (), insn1);
7722 }
7723 else
7724 {
7725 if (insn1 && (get_attr_type (insn1) == TYPE_COMPARE))
7726 {
7727 /* REG_SAVE_NOTE is used by Haifa scheduler, we are in
7728 reorg, so it is safe to reuse it for avoiding the
7729 current compare insn to be part of a BRcc
7730 optimization. */
7731 add_reg_note (insn1, REG_SAVE_NOTE, GEN_INT (3));
7732 }
7733 insn1 = arc_active_insn (insn1);
7734 if (insn1 && single_set (insn1)
7735 && (get_attr_type (insn1) == TYPE_LOAD))
7736 {
7737 found = true;
7738 emit_insn_before (gen_nopv (), insn1);
7739 }
7740 }
7741
635aeaa2 7742 if (found)
78e9cfe1
CZ
7743 {
7744 insn = insn1;
7745 found = false;
7746 }
7747 else
7748 insn = succ0;
635aeaa2
CZ
7749 }
7750}
7751
e9472c81
AB
7752/* Return true if a load instruction (CONSUMER) uses the same address as a
7753 store instruction (PRODUCER). This function is used to avoid st/ld
7754 address hazard in ARC700 cores. */
635aeaa2
CZ
7755
7756static bool
7757arc_store_addr_hazard_internal_p (rtx_insn* producer, rtx_insn* consumer)
e9472c81
AB
7758{
7759 rtx in_set, out_set;
7760 rtx out_addr, in_addr;
7761
7762 if (!producer)
7763 return false;
7764
7765 if (!consumer)
7766 return false;
7767
7768 /* Peel the producer and the consumer for the address. */
7769 out_set = single_set (producer);
7770 if (out_set)
7771 {
7772 out_addr = SET_DEST (out_set);
7773 if (!out_addr)
7774 return false;
7775 if (GET_CODE (out_addr) == ZERO_EXTEND
7776 || GET_CODE (out_addr) == SIGN_EXTEND)
7777 out_addr = XEXP (out_addr, 0);
7778
7779 if (!MEM_P (out_addr))
7780 return false;
7781
7782 in_set = single_set (consumer);
7783 if (in_set)
7784 {
7785 in_addr = SET_SRC (in_set);
7786 if (!in_addr)
7787 return false;
7788 if (GET_CODE (in_addr) == ZERO_EXTEND
7789 || GET_CODE (in_addr) == SIGN_EXTEND)
7790 in_addr = XEXP (in_addr, 0);
7791
7792 if (!MEM_P (in_addr))
7793 return false;
7794 /* Get rid of the MEM and check if the addresses are
7795 equivalent. */
7796 in_addr = XEXP (in_addr, 0);
7797 out_addr = XEXP (out_addr, 0);
7798
7799 return exp_equiv_p (in_addr, out_addr, 0, true);
7800 }
7801 }
7802 return false;
7803}
7804
635aeaa2
CZ
7805/* Return TRUE is we have an store address hazard. */
7806
7807bool
7808arc_store_addr_hazard_p (rtx_insn* producer, rtx_insn* consumer)
7809{
7810 if (TARGET_ARC700 && (arc_tune != ARC_TUNE_ARC7XX))
7811 return true;
7812 return arc_store_addr_hazard_internal_p (producer, consumer);
7813}
7814
f50bb868
CZ
7815/* The same functionality as arc_hazard. It is called in machine
7816 reorg before any other optimization. Hence, the NOP size is taken
7817 into account when doing branch shortening. */
7818
7819static void
7820workaround_arc_anomaly (void)
7821{
7822 rtx_insn *insn, *succ0;
7823
7824 /* For any architecture: call arc_hazard here. */
7825 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7826 {
7827 succ0 = next_real_insn (insn);
7828 if (arc_hazard (insn, succ0))
7829 {
7830 emit_insn_before (gen_nopv (), succ0);
7831 }
7832 }
e9472c81 7833
635aeaa2
CZ
7834 if (!TARGET_ARC700)
7835 return;
e9472c81 7836
635aeaa2
CZ
7837 /* Old A7 are suffering of a cache hazard, and we need to insert two
7838 nops between any sequence of stores and a load. */
7839 if (arc_tune != ARC_TUNE_ARC7XX)
7840 check_store_cacheline_hazard ();
f50bb868
CZ
7841}
7842
a2de90a4
CZ
7843/* A callback for the hw-doloop pass. Called when a loop we have discovered
7844 turns out not to be optimizable; we have to split the loop_end pattern into
7845 a subtract and a test. */
7846
7847static void
7848hwloop_fail (hwloop_info loop)
7849{
7850 rtx test;
7851 rtx insn = loop->loop_end;
7852
62f26645 7853 if (TARGET_DBNZ
a2de90a4
CZ
7854 && (loop->length && (loop->length <= ARC_MAX_LOOP_LENGTH))
7855 && REG_P (loop->iter_reg))
7856 {
62f26645 7857 /* TARGET_V2 core3 has dbnz instructions. */
a2de90a4
CZ
7858 test = gen_dbnz (loop->iter_reg, loop->start_label);
7859 insn = emit_jump_insn_before (test, loop->loop_end);
7860 }
7861 else if (REG_P (loop->iter_reg) && (REGNO (loop->iter_reg) == LP_COUNT))
7862 {
7863 /* We have the lp_count as loop iterator, try to use it. */
7864 emit_insn_before (gen_loop_fail (), loop->loop_end);
7865 test = gen_rtx_NE (VOIDmode, gen_rtx_REG (CC_ZNmode, CC_REG),
7866 const0_rtx);
7867 test = gen_rtx_IF_THEN_ELSE (VOIDmode, test,
7868 gen_rtx_LABEL_REF (Pmode, loop->start_label),
7869 pc_rtx);
7870 insn = emit_jump_insn_before (gen_rtx_SET (pc_rtx, test),
7871 loop->loop_end);
7872 }
7873 else
7874 {
7875 emit_insn_before (gen_addsi3 (loop->iter_reg,
7876 loop->iter_reg,
7877 constm1_rtx),
7878 loop->loop_end);
7879 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
7880 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
7881 loop->iter_reg,
7882 const0_rtx,
7883 loop->start_label),
7884 loop->loop_end);
7885 }
7886 JUMP_LABEL (insn) = loop->start_label;
7887 LABEL_NUSES (loop->start_label)++;
7888 delete_insn (loop->loop_end);
7889}
7890
73dac59b
CZ
7891/* Return the next insn after INSN that is not a NOTE, but stop the
7892 search before we enter another basic block. This routine does not
7893 look inside SEQUENCEs. */
7894
7895static rtx_insn *
7896next_nonnote_insn_bb (rtx_insn *insn)
7897{
7898 while (insn)
7899 {
7900 insn = NEXT_INSN (insn);
7901 if (insn == 0 || !NOTE_P (insn))
7902 break;
7903 if (NOTE_INSN_BASIC_BLOCK_P (insn))
7904 return NULL;
7905 }
7906
7907 return insn;
7908}
7909
a2de90a4
CZ
7910/* Optimize LOOP. */
7911
7912static bool
7913hwloop_optimize (hwloop_info loop)
7914{
7915 int i;
7916 edge entry_edge;
7917 basic_block entry_bb, bb;
4dea3bff
DM
7918 rtx iter_reg;
7919 rtx_insn *insn, *seq, *entry_after, *last_insn, *end_label;
a2de90a4
CZ
7920 unsigned int length;
7921 bool need_fix = false;
7922 rtx lp_reg = gen_rtx_REG (SImode, LP_COUNT);
7923
7924 if (loop->depth > 1)
7925 {
7926 if (dump_file)
73dac59b
CZ
7927 fprintf (dump_file, ";; loop %d is not innermost\n",
7928 loop->loop_no);
a2de90a4
CZ
7929 return false;
7930 }
7931
7932 if (!loop->incoming_dest)
7933 {
7934 if (dump_file)
73dac59b
CZ
7935 fprintf (dump_file, ";; loop %d has more than one entry\n",
7936 loop->loop_no);
a2de90a4
CZ
7937 return false;
7938 }
7939
7940 if (loop->incoming_dest != loop->head)
7941 {
7942 if (dump_file)
73dac59b
CZ
7943 fprintf (dump_file, ";; loop %d is not entered from head\n",
7944 loop->loop_no);
a2de90a4
CZ
7945 return false;
7946 }
7947
7948 if (loop->has_call || loop->has_asm)
7949 {
7950 if (dump_file)
73dac59b
CZ
7951 fprintf (dump_file, ";; loop %d has invalid insn\n",
7952 loop->loop_no);
a2de90a4
CZ
7953 return false;
7954 }
7955
7956 /* Scan all the blocks to make sure they don't use iter_reg. */
7957 if (loop->iter_reg_used || loop->iter_reg_used_outside)
7958 {
7959 if (dump_file)
73dac59b
CZ
7960 fprintf (dump_file, ";; loop %d uses iterator\n",
7961 loop->loop_no);
a2de90a4
CZ
7962 return false;
7963 }
7964
7965 /* Check if start_label appears before doloop_end. */
7966 length = 0;
7967 for (insn = loop->start_label;
7968 insn && insn != loop->loop_end;
7969 insn = NEXT_INSN (insn))
dddc1815
CZ
7970 {
7971 length += NONDEBUG_INSN_P (insn) ? get_attr_length (insn) : 0;
7972 if (JUMP_TABLES_IN_TEXT_SECTION
7973 && JUMP_TABLE_DATA_P (insn))
7974 {
7975 if (dump_file)
7976 fprintf (dump_file, ";; loop %d has a jump table\n",
7977 loop->loop_no);
7978 return false;
7979 }
7980 }
a2de90a4
CZ
7981
7982 if (!insn)
7983 {
7984 if (dump_file)
73dac59b
CZ
7985 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
7986 loop->loop_no);
a2de90a4
CZ
7987 return false;
7988 }
7989
7990 loop->length = length;
7991 if (loop->length > ARC_MAX_LOOP_LENGTH)
7992 {
7993 if (dump_file)
7994 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
7995 return false;
7996 }
5b5905bb
CZ
7997 else if (!loop->length)
7998 {
7999 if (dump_file)
8000 fprintf (dump_file, ";; loop %d is empty\n", loop->loop_no);
8001 return false;
8002 }
a2de90a4 8003
73dac59b 8004 /* Check if we use a register or not. */
a2de90a4
CZ
8005 if (!REG_P (loop->iter_reg))
8006 {
8007 if (dump_file)
73dac59b
CZ
8008 fprintf (dump_file, ";; loop %d iterator is MEM\n",
8009 loop->loop_no);
8010 return false;
8011 }
8012
8013 /* Check if we use a register or not. */
8014 if (!REG_P (loop->iter_reg))
8015 {
8016 if (dump_file)
8017 fprintf (dump_file, ";; loop %d iterator is MEM\n",
8018 loop->loop_no);
a2de90a4
CZ
8019 return false;
8020 }
8021
8022 /* Check if loop register is lpcount. */
8023 if (REG_P (loop->iter_reg) && (REGNO (loop->iter_reg)) != LP_COUNT)
8024 {
8025 if (dump_file)
8026 fprintf (dump_file, ";; loop %d doesn't use lp_count as loop"
8027 " iterator\n",
8028 loop->loop_no);
8029 /* This loop doesn't use the lp_count, check though if we can
8030 fix it. */
8031 if (TEST_HARD_REG_BIT (loop->regs_set_in_loop, LP_COUNT)
8032 /* In very unique cases we may have LP_COUNT alive. */
8033 || (loop->incoming_src
8034 && REGNO_REG_SET_P (df_get_live_out (loop->incoming_src),
8035 LP_COUNT)))
73dac59b
CZ
8036 {
8037 if (dump_file)
8038 fprintf (dump_file, ";; loop %d, lp_count is alive", loop->loop_no);
8039 return false;
8040 }
a2de90a4
CZ
8041 else
8042 need_fix = true;
8043 }
8044
8045 /* Check for control like instruction as the last instruction of a
8046 ZOL. */
8047 bb = loop->tail;
8048 last_insn = PREV_INSN (loop->loop_end);
8049
8050 while (1)
8051 {
8052 for (; last_insn != BB_HEAD (bb);
8053 last_insn = PREV_INSN (last_insn))
8054 if (NONDEBUG_INSN_P (last_insn))
8055 break;
8056
8057 if (last_insn != BB_HEAD (bb))
8058 break;
8059
8060 if (single_pred_p (bb)
8061 && single_pred_edge (bb)->flags & EDGE_FALLTHRU
8062 && single_pred (bb) != ENTRY_BLOCK_PTR_FOR_FN (cfun))
8063 {
8064 bb = single_pred (bb);
8065 last_insn = BB_END (bb);
8066 continue;
8067 }
8068 else
8069 {
8070 last_insn = NULL;
8071 break;
8072 }
8073 }
8074
8075 if (!last_insn)
8076 {
8077 if (dump_file)
8078 fprintf (dump_file, ";; loop %d has no last instruction\n",
8079 loop->loop_no);
8080 return false;
8081 }
8082
8083 if ((TARGET_ARC600_FAMILY || TARGET_HS)
8084 && INSN_P (last_insn)
8085 && (JUMP_P (last_insn) || CALL_P (last_insn)
8086 || GET_CODE (PATTERN (last_insn)) == SEQUENCE
5b5905bb
CZ
8087 /* At this stage we can have (insn (clobber (mem:BLK
8088 (reg)))) instructions, ignore them. */
8089 || (GET_CODE (PATTERN (last_insn)) != CLOBBER
8090 && (get_attr_type (last_insn) == TYPE_BRCC
8091 || get_attr_type (last_insn) == TYPE_BRCC_NO_DELAY_SLOT))))
a2de90a4
CZ
8092 {
8093 if (loop->length + 2 > ARC_MAX_LOOP_LENGTH)
8094 {
8095 if (dump_file)
8096 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
8097 return false;
8098 }
8099 if (dump_file)
f8cb8bcd 8100 fprintf (dump_file, ";; loop %d has a control like last insn; "
a2de90a4
CZ
8101 "add a nop\n",
8102 loop->loop_no);
8103
8104 last_insn = emit_insn_after (gen_nopv (), last_insn);
8105 }
8106
8107 if (LABEL_P (last_insn))
8108 {
8109 if (dump_file)
f8cb8bcd 8110 fprintf (dump_file, ";; loop %d has a label as last insn; "
a2de90a4
CZ
8111 "add a nop\n",
8112 loop->loop_no);
8113 last_insn = emit_insn_after (gen_nopv (), last_insn);
8114 }
a0920243
CZ
8115
8116 /* SAVE_NOTE is used by haifa scheduler. However, we are after it
8117 and we can use it to indicate the last ZOL instruction cannot be
8118 part of a delay slot. */
8119 add_reg_note (last_insn, REG_SAVE_NOTE, GEN_INT (2));
8120
a2de90a4
CZ
8121 loop->last_insn = last_insn;
8122
8123 /* Get the loop iteration register. */
8124 iter_reg = loop->iter_reg;
8125
8126 gcc_assert (REG_P (iter_reg));
8127
8128 entry_edge = NULL;
8129
8130 FOR_EACH_VEC_SAFE_ELT (loop->incoming, i, entry_edge)
8131 if (entry_edge->flags & EDGE_FALLTHRU)
8132 break;
8133
8134 if (entry_edge == NULL)
8135 {
8136 if (dump_file)
f8cb8bcd 8137 fprintf (dump_file, ";; loop %d has no fallthru edge jumping "
a2de90a4
CZ
8138 "into the loop\n",
8139 loop->loop_no);
8140 return false;
8141 }
8142 /* The loop is good. */
8143 end_label = gen_label_rtx ();
8144 loop->end_label = end_label;
8145
8146 /* Place the zero_cost_loop_start instruction before the loop. */
8147 entry_bb = entry_edge->src;
8148
8149 start_sequence ();
8150
8151 if (need_fix)
8152 {
8153 /* The loop uses a R-register, but the lp_count is free, thus
8154 use lp_count. */
73dac59b 8155 emit_insn (gen_rtx_SET (lp_reg, iter_reg));
a2de90a4
CZ
8156 SET_HARD_REG_BIT (loop->regs_set_in_loop, LP_COUNT);
8157 iter_reg = lp_reg;
8158 if (dump_file)
8159 {
8160 fprintf (dump_file, ";; fix loop %d to use lp_count\n",
8161 loop->loop_no);
8162 }
8163 }
8164
73dac59b 8165 insn = emit_insn (gen_arc_lp (loop->start_label,
a2de90a4
CZ
8166 loop->end_label));
8167
8168 seq = get_insns ();
8169 end_sequence ();
8170
8171 entry_after = BB_END (entry_bb);
8172 if (!single_succ_p (entry_bb) || vec_safe_length (loop->incoming) > 1
8173 || !entry_after)
8174 {
8175 basic_block new_bb;
8176 edge e;
8177 edge_iterator ei;
8178
8179 emit_insn_before (seq, BB_HEAD (loop->head));
8180 seq = emit_label_before (gen_label_rtx (), seq);
8181 new_bb = create_basic_block (seq, insn, entry_bb);
8182 FOR_EACH_EDGE (e, ei, loop->incoming)
73dac59b
CZ
8183 {
8184 if (!(e->flags & EDGE_FALLTHRU))
8185 redirect_edge_and_branch_force (e, new_bb);
8186 else
8187 redirect_edge_succ (e, new_bb);
8188 }
a2de90a4
CZ
8189
8190 make_edge (new_bb, loop->head, 0);
8191 }
8192 else
8193 {
8194#if 0
8195 while (DEBUG_INSN_P (entry_after)
73dac59b
CZ
8196 || (NOTE_P (entry_after)
8197 && NOTE_KIND (entry_after) != NOTE_INSN_BASIC_BLOCK
8198 /* Make sure we don't split a call and its corresponding
8199 CALL_ARG_LOCATION note. */
8200 && NOTE_KIND (entry_after) != NOTE_INSN_CALL_ARG_LOCATION))
a2de90a4
CZ
8201 entry_after = NEXT_INSN (entry_after);
8202#endif
73dac59b 8203 entry_after = next_nonnote_insn_bb (entry_after);
a2de90a4
CZ
8204
8205 gcc_assert (entry_after);
8206 emit_insn_before (seq, entry_after);
8207 }
8208
a2de90a4
CZ
8209 /* Insert the loop end label before the last instruction of the
8210 loop. */
8211 emit_label_after (end_label, loop->last_insn);
5d4c34aa
CZ
8212 /* Make sure we mark the begining and end label as used. */
8213 LABEL_NUSES (loop->end_label)++;
8214 LABEL_NUSES (loop->start_label)++;
a2de90a4
CZ
8215
8216 return true;
8217}
8218
8219/* A callback for the hw-doloop pass. This function examines INSN; if
8220 it is a loop_end pattern we recognize, return the reg rtx for the
8221 loop counter. Otherwise, return NULL_RTX. */
8222
8223static rtx
8224hwloop_pattern_reg (rtx_insn *insn)
8225{
8226 rtx reg;
8227
8228 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
8229 return NULL_RTX;
8230
8231 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
8232 if (!REG_P (reg))
8233 return NULL_RTX;
8234 return reg;
8235}
8236
8237static struct hw_doloop_hooks arc_doloop_hooks =
8238{
8239 hwloop_pattern_reg,
8240 hwloop_optimize,
8241 hwloop_fail
8242};
8243
8244/* Run from machine_dependent_reorg, this pass looks for doloop_end insns
8245 and tries to rewrite the RTL of these loops so that proper Blackfin
8246 hardware loops are generated. */
8247
8248static void
8249arc_reorg_loops (void)
8250{
8251 reorg_loops (true, &arc_doloop_hooks);
8252}
8253
6b55f8c9
CZ
8254/* Scan all calls and add symbols to be emitted in the jli section if
8255 needed. */
8256
8257static void
8258jli_call_scan (void)
8259{
8260 rtx_insn *insn;
8261
8262 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8263 {
8264 if (!CALL_P (insn))
8265 continue;
8266
8267 rtx pat = PATTERN (insn);
8268 if (GET_CODE (pat) == COND_EXEC)
8269 pat = COND_EXEC_CODE (pat);
8270 pat = XVECEXP (pat, 0, 0);
8271 if (GET_CODE (pat) == SET)
8272 pat = SET_SRC (pat);
8273
8274 pat = XEXP (XEXP (pat, 0), 0);
8275 if (GET_CODE (pat) == SYMBOL_REF
8276 && arc_is_jli_call_p (pat))
8277 arc_add_jli_section (pat);
8278 }
8279}
8280
16493b57
CZ
8281/* Add padding if necessary to avoid a mispredict. A return could
8282 happen immediately after the function start. A call/return and
8283 return/return must be 6 bytes apart to avoid mispredict. */
8284
8285static void
8286pad_return (void)
8287{
8288 rtx_insn *insn;
8289 long offset;
8290
8291 if (!TARGET_PAD_RETURN)
8292 return;
8293
8294 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8295 {
8296 rtx_insn *prev0 = prev_active_insn (insn);
8297 bool wantlong = false;
8298
8299 if (!INSN_P (insn) || GET_CODE (PATTERN (insn)) != SIMPLE_RETURN)
8300 continue;
8301
8302 if (!prev0)
8303 {
8304 prev0 = emit_insn_before (gen_nopv (), insn);
8305 /* REG_SAVE_NOTE is used by Haifa scheduler, we are in reorg
8306 so it is safe to reuse it for forcing a particular length
8307 for an instruction. */
8308 add_reg_note (prev0, REG_SAVE_NOTE, GEN_INT (1));
8309 emit_insn_before (gen_nopv (), insn);
8310 continue;
8311 }
8312 offset = get_attr_length (prev0);
8313
8314 if (get_attr_length (prev0) == 2
8315 && get_attr_iscompact (prev0) != ISCOMPACT_TRUE)
8316 {
8317 /* Force long version of the insn. */
8318 wantlong = true;
8319 offset += 2;
8320 }
8321
8322 rtx_insn *prev = prev_active_insn (prev0);
8323 if (prev)
8324 offset += get_attr_length (prev);
8325
8326 prev = prev_active_insn (prev);
8327 if (prev)
8328 offset += get_attr_length (prev);
8329
8330 switch (offset)
8331 {
8332 case 2:
8333 prev = emit_insn_before (gen_nopv (), insn);
8334 add_reg_note (prev, REG_SAVE_NOTE, GEN_INT (1));
8335 break;
8336 case 4:
8337 emit_insn_before (gen_nopv (), insn);
8338 break;
8339 default:
8340 continue;
8341 }
8342
8343 if (wantlong)
8344 add_reg_note (prev0, REG_SAVE_NOTE, GEN_INT (1));
8345
8346 /* Emit a blockage to avoid delay slot scheduling. */
8347 emit_insn_before (gen_blockage (), insn);
8348 }
8349}
8350
526b7aee
SV
8351static int arc_reorg_in_progress = 0;
8352
8353/* ARC's machince specific reorg function. */
8354
8355static void
8356arc_reorg (void)
8357{
b3458f61
DM
8358 rtx_insn *insn;
8359 rtx pattern;
526b7aee
SV
8360 rtx pc_target;
8361 long offset;
8362 int changed;
8363
8364 cfun->machine->arc_reorg_started = 1;
8365 arc_reorg_in_progress = 1;
8366
a2de90a4 8367 compute_bb_for_insn ();
526b7aee 8368
a2de90a4 8369 df_analyze ();
526b7aee 8370
a2de90a4
CZ
8371 /* Doloop optimization. */
8372 arc_reorg_loops ();
526b7aee 8373
a2de90a4 8374 workaround_arc_anomaly ();
6b55f8c9 8375 jli_call_scan ();
16493b57 8376 pad_return ();
526b7aee
SV
8377
8378/* FIXME: should anticipate ccfsm action, generate special patterns for
8379 to-be-deleted branches that have no delay slot and have at least the
8380 length of the size increase forced on other insns that are conditionalized.
8381 This can also have an insn_list inside that enumerates insns which are
8382 not actually conditionalized because the destinations are dead in the
8383 not-execute case.
8384 Could also tag branches that we want to be unaligned if they get no delay
8385 slot, or even ones that we don't want to do delay slot sheduling for
8386 because we can unalign them.
8387
8388 However, there are cases when conditional execution is only possible after
8389 delay slot scheduling:
8390
8391 - If a delay slot is filled with a nocond/set insn from above, the previous
8392 basic block can become elegible for conditional execution.
8393 - If a delay slot is filled with a nocond insn from the fall-through path,
8394 the branch with that delay slot can become eligble for conditional
8395 execution (however, with the same sort of data flow analysis that dbr
8396 does, we could have figured out before that we don't need to
8397 conditionalize this insn.)
8398 - If a delay slot insn is filled with an insn from the target, the
8399 target label gets its uses decremented (even deleted if falling to zero),
8400 thus possibly creating more condexec opportunities there.
8401 Therefore, we should still be prepared to apply condexec optimization on
8402 non-prepared branches if the size increase of conditionalized insns is no
8403 more than the size saved from eliminating the branch. An invocation option
8404 could also be used to reserve a bit of extra size for condbranches so that
8405 this'll work more often (could also test in arc_reorg if the block is
8406 'close enough' to be eligible for condexec to make this likely, and
8407 estimate required size increase). */
8408 /* Generate BRcc insns, by combining cmp and Bcc insns wherever possible. */
8409 if (TARGET_NO_BRCC_SET)
8410 return;
8411
8412 do
8413 {
8414 init_insn_lengths();
8415 changed = 0;
8416
8417 if (optimize > 1 && !TARGET_NO_COND_EXEC)
8418 {
8419 arc_ifcvt ();
8420 unsigned int flags = pass_data_arc_ifcvt.todo_flags_finish;
8421 df_finish_pass ((flags & TODO_df_verify) != 0);
782bdf21
CZ
8422
8423 if (dump_file)
8424 {
8425 fprintf (dump_file, ";; After if conversion:\n\n");
8426 print_rtl (dump_file, get_insns ());
8427 }
526b7aee
SV
8428 }
8429
8430 /* Call shorten_branches to calculate the insn lengths. */
8431 shorten_branches (get_insns());
8432 cfun->machine->ccfsm_current_insn = NULL_RTX;
8433
8434 if (!INSN_ADDRESSES_SET_P())
2fa9c1f6
CZ
8435 fatal_error (input_location,
8436 "insn addresses not set after shorten_branches");
526b7aee
SV
8437
8438 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8439 {
8440 rtx label;
8441 enum attr_type insn_type;
8442
8443 /* If a non-jump insn (or a casesi jump table), continue. */
8444 if (GET_CODE (insn) != JUMP_INSN ||
8445 GET_CODE (PATTERN (insn)) == ADDR_VEC
8446 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
8447 continue;
8448
8449 /* If we already have a brcc, note if it is suitable for brcc_s.
8450 Be a bit generous with the brcc_s range so that we can take
8451 advantage of any code shortening from delay slot scheduling. */
8452 if (recog_memoized (insn) == CODE_FOR_cbranchsi4_scratch)
8453 {
8454 rtx pat = PATTERN (insn);
8455 rtx op = XEXP (SET_SRC (XVECEXP (pat, 0, 0)), 0);
8456 rtx *ccp = &XEXP (XVECEXP (pat, 0, 1), 0);
8457
8458 offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
8459 if ((offset >= -140 && offset < 140)
8460 && rtx_equal_p (XEXP (op, 1), const0_rtx)
8461 && compact_register_operand (XEXP (op, 0), VOIDmode)
8462 && equality_comparison_operator (op, VOIDmode))
8463 PUT_MODE (*ccp, CC_Zmode);
8464 else if (GET_MODE (*ccp) == CC_Zmode)
8465 PUT_MODE (*ccp, CC_ZNmode);
8466 continue;
8467 }
8468 if ((insn_type = get_attr_type (insn)) == TYPE_BRCC
8469 || insn_type == TYPE_BRCC_NO_DELAY_SLOT)
8470 continue;
8471
8472 /* OK. so we have a jump insn. */
8473 /* We need to check that it is a bcc. */
8474 /* Bcc => set (pc) (if_then_else ) */
8475 pattern = PATTERN (insn);
8476 if (GET_CODE (pattern) != SET
8477 || GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE
8478 || ANY_RETURN_P (XEXP (SET_SRC (pattern), 1)))
8479 continue;
8480
8481 /* Now check if the jump is beyond the s9 range. */
339ba33b 8482 if (CROSSING_JUMP_P (insn))
526b7aee
SV
8483 continue;
8484 offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
8485
8486 if(offset > 253 || offset < -254)
8487 continue;
8488
8489 pc_target = SET_SRC (pattern);
8490
8f3304d0
CZ
8491 /* Avoid FPU instructions. */
8492 if ((GET_MODE (XEXP (XEXP (pc_target, 0), 0)) == CC_FPUmode)
fbf8314b 8493 || (GET_MODE (XEXP (XEXP (pc_target, 0), 0)) == CC_FPUEmode)
8f3304d0
CZ
8494 || (GET_MODE (XEXP (XEXP (pc_target, 0), 0)) == CC_FPU_UNEQmode))
8495 continue;
8496
526b7aee
SV
8497 /* Now go back and search for the set cc insn. */
8498
8499 label = XEXP (pc_target, 1);
8500
8501 {
b3458f61
DM
8502 rtx pat;
8503 rtx_insn *scan, *link_insn = NULL;
526b7aee
SV
8504
8505 for (scan = PREV_INSN (insn);
8506 scan && GET_CODE (scan) != CODE_LABEL;
8507 scan = PREV_INSN (scan))
8508 {
8509 if (! INSN_P (scan))
8510 continue;
8511 pat = PATTERN (scan);
8512 if (GET_CODE (pat) == SET
8513 && cc_register (SET_DEST (pat), VOIDmode))
8514 {
8515 link_insn = scan;
8516 break;
8517 }
8518 }
8f3304d0 8519 if (!link_insn)
526b7aee
SV
8520 continue;
8521 else
526b7aee 8522 {
635aeaa2 8523 /* Check if this is a data dependency. */
526b7aee
SV
8524 rtx op, cc_clob_rtx, op0, op1, brcc_insn, note;
8525 rtx cmp0, cmp1;
8526
635aeaa2
CZ
8527 /* Make sure we can use it for brcc insns. */
8528 if (find_reg_note (link_insn, REG_SAVE_NOTE, GEN_INT (3)))
8529 continue;
8530
526b7aee
SV
8531 /* Ok this is the set cc. copy args here. */
8532 op = XEXP (pc_target, 0);
8533
8534 op0 = cmp0 = XEXP (SET_SRC (pat), 0);
8535 op1 = cmp1 = XEXP (SET_SRC (pat), 1);
8536 if (GET_CODE (op0) == ZERO_EXTRACT
8537 && XEXP (op0, 1) == const1_rtx
8538 && (GET_CODE (op) == EQ
8539 || GET_CODE (op) == NE))
8540 {
8541 /* btst / b{eq,ne} -> bbit{0,1} */
8542 op0 = XEXP (cmp0, 0);
8543 op1 = XEXP (cmp0, 2);
8544 }
8545 else if (!register_operand (op0, VOIDmode)
8546 || !general_operand (op1, VOIDmode))
8547 continue;
8548 /* Be careful not to break what cmpsfpx_raw is
8549 trying to create for checking equality of
8550 single-precision floats. */
8551 else if (TARGET_SPFP
8552 && GET_MODE (op0) == SFmode
8553 && GET_MODE (op1) == SFmode)
8554 continue;
8555
8556 /* None of the two cmp operands should be set between the
8557 cmp and the branch. */
8558 if (reg_set_between_p (op0, link_insn, insn))
8559 continue;
8560
8561 if (reg_set_between_p (op1, link_insn, insn))
8562 continue;
8563
8564 /* Since the MODE check does not work, check that this is
8565 CC reg's last set location before insn, and also no
8566 instruction between the cmp and branch uses the
8567 condition codes. */
8568 if ((reg_set_between_p (SET_DEST (pat), link_insn, insn))
8569 || (reg_used_between_p (SET_DEST (pat), link_insn, insn)))
8570 continue;
8571
8572 /* CC reg should be dead after insn. */
8573 if (!find_regno_note (insn, REG_DEAD, CC_REG))
8574 continue;
8575
8576 op = gen_rtx_fmt_ee (GET_CODE (op),
8577 GET_MODE (op), cmp0, cmp1);
8578 /* If we create a LIMM where there was none before,
8579 we only benefit if we can avoid a scheduling bubble
8580 for the ARC600. Otherwise, we'd only forgo chances
8581 at short insn generation, and risk out-of-range
8582 branches. */
8583 if (!brcc_nolimm_operator (op, VOIDmode)
8584 && !long_immediate_operand (op1, VOIDmode)
8585 && (TARGET_ARC700
a412be00 8586 || (TARGET_V2 && optimize_size)
526b7aee
SV
8587 || next_active_insn (link_insn) != insn))
8588 continue;
8589
8590 /* Emit bbit / brcc (or brcc_s if possible).
8591 CC_Zmode indicates that brcc_s is possible. */
8592
8593 if (op0 != cmp0)
8594 cc_clob_rtx = gen_rtx_REG (CC_ZNmode, CC_REG);
8595 else if ((offset >= -140 && offset < 140)
8596 && rtx_equal_p (op1, const0_rtx)
8597 && compact_register_operand (op0, VOIDmode)
8598 && (GET_CODE (op) == EQ
8599 || GET_CODE (op) == NE))
8600 cc_clob_rtx = gen_rtx_REG (CC_Zmode, CC_REG);
8601 else
8602 cc_clob_rtx = gen_rtx_REG (CCmode, CC_REG);
8603
8604 brcc_insn
8605 = gen_rtx_IF_THEN_ELSE (VOIDmode, op, label, pc_rtx);
f7df4a84 8606 brcc_insn = gen_rtx_SET (pc_rtx, brcc_insn);
526b7aee
SV
8607 cc_clob_rtx = gen_rtx_CLOBBER (VOIDmode, cc_clob_rtx);
8608 brcc_insn
8609 = gen_rtx_PARALLEL
8610 (VOIDmode, gen_rtvec (2, brcc_insn, cc_clob_rtx));
8611 brcc_insn = emit_jump_insn_before (brcc_insn, insn);
8612
8613 JUMP_LABEL (brcc_insn) = JUMP_LABEL (insn);
8614 note = find_reg_note (insn, REG_BR_PROB, 0);
8615 if (note)
8616 {
8617 XEXP (note, 1) = REG_NOTES (brcc_insn);
8618 REG_NOTES (brcc_insn) = note;
8619 }
8620 note = find_reg_note (link_insn, REG_DEAD, op0);
8621 if (note)
8622 {
8623 remove_note (link_insn, note);
8624 XEXP (note, 1) = REG_NOTES (brcc_insn);
8625 REG_NOTES (brcc_insn) = note;
8626 }
8627 note = find_reg_note (link_insn, REG_DEAD, op1);
8628 if (note)
8629 {
8630 XEXP (note, 1) = REG_NOTES (brcc_insn);
8631 REG_NOTES (brcc_insn) = note;
8632 }
8633
8634 changed = 1;
8635
8636 /* Delete the bcc insn. */
8637 set_insn_deleted (insn);
8638
8639 /* Delete the cmp insn. */
8640 set_insn_deleted (link_insn);
8641
8642 }
8643 }
8644 }
8645 /* Clear out insn_addresses. */
8646 INSN_ADDRESSES_FREE ();
8647
8648 } while (changed);
8649
8650 if (INSN_ADDRESSES_SET_P())
40fecdd6 8651 fatal_error (input_location, "insn addresses not freed");
526b7aee
SV
8652
8653 arc_reorg_in_progress = 0;
8654}
8655
8656 /* Check if the operands are valid for BRcc.d generation
8657 Valid Brcc.d patterns are
8658 Brcc.d b, c, s9
8659 Brcc.d b, u6, s9
8660
67914693 8661 For cc={GT, LE, GTU, LEU}, u6=63 cannot be allowed,
526b7aee
SV
8662 since they are encoded by the assembler as {GE, LT, HS, LS} 64, which
8663 does not have a delay slot
8664
8665 Assumed precondition: Second operand is either a register or a u6 value. */
8666
8667bool
8668valid_brcc_with_delay_p (rtx *operands)
8669{
8670 if (optimize_size && GET_MODE (operands[4]) == CC_Zmode)
8671 return false;
8672 return brcc_nolimm_operator (operands[0], VOIDmode);
8673}
8674
526b7aee
SV
8675/* Implement TARGET_IN_SMALL_DATA_P. Return true if it would be safe to
8676 access DECL using %gp_rel(...)($gp). */
8677
8678static bool
8679arc_in_small_data_p (const_tree decl)
8680{
8681 HOST_WIDE_INT size;
8180c03f 8682 tree attr;
526b7aee 8683
9f532472
CZ
8684 /* Only variables are going into small data area. */
8685 if (TREE_CODE (decl) != VAR_DECL)
526b7aee
SV
8686 return false;
8687
526b7aee
SV
8688 if (TARGET_NO_SDATA_SET)
8689 return false;
8690
526b7aee
SV
8691 /* Disable sdata references to weak variables. */
8692 if (DECL_WEAK (decl))
8693 return false;
8694
9f532472
CZ
8695 /* Don't put constants into the small data section: we want them to
8696 be in ROM rather than RAM. */
8697 if (TREE_READONLY (decl))
8698 return false;
8699
8700 /* To ensure -mvolatile-cache works ld.di does not have a
8701 gp-relative variant. */
8702 if (!TARGET_VOLATILE_CACHE_SET
8703 && TREE_THIS_VOLATILE (decl))
8704 return false;
526b7aee 8705
8180c03f
CZ
8706 /* Likewise for uncached data. */
8707 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
8708 if (lookup_attribute ("uncached", attr))
8709 return false;
8710
b6fb257b
CZ
8711 /* and for aux regs. */
8712 attr = DECL_ATTRIBUTES (decl);
8713 if (lookup_attribute ("aux", attr))
8714 return false;
8715
9f532472
CZ
8716 if (DECL_SECTION_NAME (decl) != 0)
8717 {
8718 const char *name = DECL_SECTION_NAME (decl);
8719 if (strcmp (name, ".sdata") == 0
8720 || strcmp (name, ".sbss") == 0)
8721 return true;
8722 }
8723 /* If it's not public, there's no need to put it in the small data
8724 section. */
8725 else if (TREE_PUBLIC (decl))
8726 {
8727 size = int_size_in_bytes (TREE_TYPE (decl));
8728 return (size > 0 && size <= g_switch_value);
8729 }
8730 return false;
526b7aee
SV
8731}
8732
526b7aee
SV
8733/* Return true if OP is an acceptable memory operand for ARCompact
8734 16-bit gp-relative load instructions.
e0be3321 8735*/
526b7aee
SV
8736/* volatile cache option still to be handled. */
8737
8738bool
b6fb7933 8739compact_sda_memory_operand (rtx op, machine_mode mode, bool short_p)
526b7aee
SV
8740{
8741 rtx addr;
8742 int size;
b6fb7933
CZ
8743 int align = 0;
8744 int mask = 0;
526b7aee
SV
8745
8746 /* Eliminate non-memory operations. */
8747 if (GET_CODE (op) != MEM)
8748 return false;
8749
8750 if (mode == VOIDmode)
8751 mode = GET_MODE (op);
8752
8753 size = GET_MODE_SIZE (mode);
8754
8755 /* dword operations really put out 2 instructions, so eliminate them. */
8756 if (size > UNITS_PER_WORD)
8757 return false;
8758
8759 /* Decode the address now. */
8760 addr = XEXP (op, 0);
8761
02ae0e08 8762 if (!legitimate_small_data_address_p (addr, mode))
b6fb7933
CZ
8763 return false;
8764
8765 if (!short_p || size == 1)
8766 return true;
8767
8768 /* Now check for the alignment, the short loads using gp require the
8769 addresses to be aligned. */
e0be3321 8770 align = get_symbol_alignment (addr);
b6fb7933
CZ
8771 switch (mode)
8772 {
8773 case E_HImode:
8774 mask = 1;
8775 break;
8776 default:
8777 mask = 3;
8778 break;
8779 }
8780
8781 if (align && ((align & mask) == 0))
8782 return true;
8783 return false;
526b7aee
SV
8784}
8785
b6fb257b
CZ
8786/* Return TRUE if PAT is accessing an aux-reg. */
8787
8788static bool
8789arc_is_aux_reg_p (rtx pat)
8790{
8791 tree attrs = NULL_TREE;
8792 tree addr;
8793
8794 if (!MEM_P (pat))
8795 return false;
8796
8797 /* Get the memory attributes. */
8798 addr = MEM_EXPR (pat);
8799 if (!addr)
8800 return false;
8801
8802 /* Get the attributes. */
8803 if (TREE_CODE (addr) == VAR_DECL)
8804 attrs = DECL_ATTRIBUTES (addr);
8805 else if (TREE_CODE (addr) == MEM_REF)
8806 attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_OPERAND (addr, 0)));
8807 else
8808 return false;
8809
8810 if (lookup_attribute ("aux", attrs))
8811 return true;
8812 return false;
8813}
8814
526b7aee
SV
8815/* Implement ASM_OUTPUT_ALIGNED_DECL_LOCAL. */
8816
8817void
8818arc_asm_output_aligned_decl_local (FILE * stream, tree decl, const char * name,
8819 unsigned HOST_WIDE_INT size,
8820 unsigned HOST_WIDE_INT align,
8821 unsigned HOST_WIDE_INT globalize_p)
8822{
b6fb257b
CZ
8823 int in_small_data = arc_in_small_data_p (decl);
8824 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
8825
8826 /* Don't output aux-reg symbols. */
8827 if (mem != NULL_RTX && MEM_P (mem)
8828 && SYMBOL_REF_P (XEXP (mem, 0))
8829 && arc_is_aux_reg_p (mem))
8830 return;
526b7aee
SV
8831
8832 if (in_small_data)
8833 switch_to_section (get_named_section (NULL, ".sbss", 0));
8834 /* named_section (0,".sbss",0); */
8835 else
8836 switch_to_section (bss_section);
8837
8838 if (globalize_p)
8839 (*targetm.asm_out.globalize_label) (stream, name);
8840
8841 ASM_OUTPUT_ALIGN (stream, floor_log2 ((align) / BITS_PER_UNIT));
8842 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
8843 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
8844 ASM_OUTPUT_LABEL (stream, name);
8845
8846 if (size != 0)
8847 ASM_OUTPUT_SKIP (stream, size);
8848}
8849
526b7aee
SV
8850static bool
8851arc_preserve_reload_p (rtx in)
8852{
8853 return (GET_CODE (in) == PLUS
8854 && RTX_OK_FOR_BASE_P (XEXP (in, 0), true)
8855 && CONST_INT_P (XEXP (in, 1))
8856 && !((INTVAL (XEXP (in, 1)) & 511)));
8857}
8858
b9bc3b12
CZ
8859/* Implement TARGET_REGISTER_MOVE_COST. */
8860
8861static int
ef4bddc2 8862arc_register_move_cost (machine_mode,
b9bc3b12 8863 reg_class_t from_class, reg_class_t to_class)
526b7aee 8864{
526b7aee 8865 /* Force an attempt to 'mov Dy,Dx' to spill. */
c4014855 8866 if ((TARGET_ARC700 || TARGET_EM) && TARGET_DPFP
526b7aee
SV
8867 && from_class == DOUBLE_REGS && to_class == DOUBLE_REGS)
8868 return 100;
8869
8870 return 2;
8871}
8872
8873/* Emit code for an addsi3 instruction with OPERANDS.
8874 COND_P indicates if this will use conditional execution.
8875 Return the length of the instruction.
8876 If OUTPUT_P is false, don't actually output the instruction, just return
8877 its length. */
8878int
8879arc_output_addsi (rtx *operands, bool cond_p, bool output_p)
8880{
3bbe0b82 8881 char format[35];
526b7aee
SV
8882
8883 int match = operands_match_p (operands[0], operands[1]);
8884 int match2 = operands_match_p (operands[0], operands[2]);
8885 int intval = (REG_P (operands[2]) ? 1
8886 : CONST_INT_P (operands[2]) ? INTVAL (operands[2]) : 0xbadc057);
8887 int neg_intval = -intval;
8888 int short_0 = satisfies_constraint_Rcq (operands[0]);
8889 int short_p = (!cond_p && short_0 && satisfies_constraint_Rcq (operands[1]));
8890 int ret = 0;
8891
a0caeef6
CZ
8892#define REG_H_P(OP) (REG_P (OP) && ((TARGET_V2 && REGNO (OP) <= 31 \
8893 && REGNO (OP) != 30) \
8894 || !TARGET_V2))
8895
526b7aee
SV
8896#define ADDSI_OUTPUT1(FORMAT) do {\
8897 if (output_p) \
8898 output_asm_insn (FORMAT, operands);\
8899 return ret; \
8900} while (0)
8901#define ADDSI_OUTPUT(LIST) do {\
8902 if (output_p) \
8903 sprintf LIST;\
8904 ADDSI_OUTPUT1 (format);\
8905 return ret; \
8906} while (0)
8907
8908 /* First try to emit a 16 bit insn. */
8909 ret = 2;
8910 if (!cond_p
8911 /* If we are actually about to output this insn, don't try a 16 bit
8912 variant if we already decided that we don't want that
8913 (I.e. we upsized this insn to align some following insn.)
8914 E.g. add_s r0,sp,70 is 16 bit, but add r0,sp,70 requires a LIMM -
8915 but add1 r0,sp,35 doesn't. */
8916 && (!output_p || (get_attr_length (current_output_insn) & 2)))
8917 {
a0caeef6
CZ
8918 /* Generate add_s a,b,c; add_s b,b,u7; add_s c,b,u3; add_s b,b,h
8919 patterns. */
526b7aee 8920 if (short_p
a0caeef6
CZ
8921 && ((REG_H_P (operands[2])
8922 && (match || satisfies_constraint_Rcq (operands[2])))
8923 || (CONST_INT_P (operands[2])
8924 && ((unsigned) intval <= (match ? 127 : 7)))))
8925 ADDSI_OUTPUT1 ("add%? %0,%1,%2 ;1");
8926
8927 /* Generate add_s b,b,h patterns. */
8928 if (short_0 && match2 && REG_H_P (operands[1]))
8929 ADDSI_OUTPUT1 ("add%? %0,%2,%1 ;2");
8930
8931 /* Generate add_s b,sp,u7; add_s sp,sp,u7 patterns. */
526b7aee
SV
8932 if ((short_0 || REGNO (operands[0]) == STACK_POINTER_REGNUM)
8933 && REGNO (operands[1]) == STACK_POINTER_REGNUM && !(intval & ~124))
a0caeef6 8934 ADDSI_OUTPUT1 ("add%? %0,%1,%2 ;3");
526b7aee
SV
8935
8936 if ((short_p && (unsigned) neg_intval <= (match ? 31 : 7))
8937 || (REGNO (operands[0]) == STACK_POINTER_REGNUM
8938 && match && !(neg_intval & ~124)))
a0caeef6 8939 ADDSI_OUTPUT1 ("sub%? %0,%1,%n2 ;4");
fa9c1b3c 8940
a0caeef6
CZ
8941 /* Generate add_s h,h,s3 patterns. */
8942 if (REG_H_P (operands[0]) && match && TARGET_V2
8943 && CONST_INT_P (operands[2]) && ((intval>= -1) && (intval <= 6)))
8944 ADDSI_OUTPUT1 ("add%? %0,%1,%2 ;5");
fa9c1b3c 8945
a0caeef6
CZ
8946 /* Generate add_s r0,b,u6; add_s r1,b,u6 patterns. */
8947 if (TARGET_CODE_DENSITY && REG_P (operands[0]) && REG_P (operands[1])
8948 && ((REGNO (operands[0]) == 0) || (REGNO (operands[0]) == 1))
fa9c1b3c
CZ
8949 && satisfies_constraint_Rcq (operands[1])
8950 && satisfies_constraint_L (operands[2]))
a0caeef6 8951 ADDSI_OUTPUT1 ("add%? %0,%1,%2 ;6");
526b7aee
SV
8952 }
8953
8954 /* Now try to emit a 32 bit insn without long immediate. */
8955 ret = 4;
8956 if (!match && match2 && REG_P (operands[1]))
8957 ADDSI_OUTPUT1 ("add%? %0,%2,%1");
8958 if (match || !cond_p)
8959 {
8960 int limit = (match && !cond_p) ? 0x7ff : 0x3f;
8961 int range_factor = neg_intval & intval;
8962 int shift;
8963
c419f71c 8964 if (intval == (HOST_WIDE_INT) (HOST_WIDE_INT_M1U << 31))
526b7aee
SV
8965 ADDSI_OUTPUT1 ("bxor%? %0,%1,31");
8966
8967 /* If we can use a straight add / sub instead of a {add,sub}[123] of
8968 same size, do, so - the insn latency is lower. */
8969 /* -0x800 is a 12-bit constant for add /add3 / sub / sub3, but
8970 0x800 is not. */
8971 if ((intval >= 0 && intval <= limit)
8972 || (intval == -0x800 && limit == 0x7ff))
8973 ADDSI_OUTPUT1 ("add%? %0,%1,%2");
8974 else if ((intval < 0 && neg_intval <= limit)
8975 || (intval == 0x800 && limit == 0x7ff))
8976 ADDSI_OUTPUT1 ("sub%? %0,%1,%n2");
8977 shift = range_factor >= 8 ? 3 : (range_factor >> 1);
8978 gcc_assert (shift == 0 || shift == 1 || shift == 2 || shift == 3);
8979 gcc_assert ((((1 << shift) - 1) & intval) == 0);
8980 if (((intval < 0 && intval != -0x4000)
8981 /* sub[123] is slower than add_s / sub, only use it if it
8982 avoids a long immediate. */
8983 && neg_intval <= limit << shift)
8984 || (intval == 0x4000 && limit == 0x7ff))
8985 ADDSI_OUTPUT ((format, "sub%d%%? %%0,%%1,%d",
8986 shift, neg_intval >> shift));
8987 else if ((intval >= 0 && intval <= limit << shift)
8988 || (intval == -0x4000 && limit == 0x7ff))
8989 ADDSI_OUTPUT ((format, "add%d%%? %%0,%%1,%d", shift, intval >> shift));
8990 }
8991 /* Try to emit a 16 bit opcode with long immediate. */
8992 ret = 6;
8993 if (short_p && match)
6b55f8c9 8994 ADDSI_OUTPUT1 ("add%? %0,%1,%2");
526b7aee
SV
8995
8996 /* We have to use a 32 bit opcode, and with a long immediate. */
8997 ret = 8;
6b55f8c9 8998 ADDSI_OUTPUT1 (intval < 0 ? "sub%? %0,%1,%n2" : "add%? %0,%1,%2");
526b7aee
SV
8999}
9000
9001/* Emit code for an commutative_cond_exec instruction with OPERANDS.
9002 Return the length of the instruction.
9003 If OUTPUT_P is false, don't actually output the instruction, just return
9004 its length. */
9005int
9006arc_output_commutative_cond_exec (rtx *operands, bool output_p)
9007{
9008 enum rtx_code commutative_op = GET_CODE (operands[3]);
9009 const char *pat = NULL;
9010
9011 /* Canonical rtl should not have a constant in the first operand position. */
9012 gcc_assert (!CONSTANT_P (operands[1]));
9013
9014 switch (commutative_op)
9015 {
9016 case AND:
9017 if (satisfies_constraint_C1p (operands[2]))
9018 pat = "bmsk%? %0,%1,%Z2";
fc1c2d04
CZ
9019 else if (satisfies_constraint_C2p (operands[2]))
9020 {
9021 operands[2] = GEN_INT ((~INTVAL (operands[2])));
9022 pat = "bmskn%? %0,%1,%Z2";
9023 }
526b7aee
SV
9024 else if (satisfies_constraint_Ccp (operands[2]))
9025 pat = "bclr%? %0,%1,%M2";
9026 else if (satisfies_constraint_CnL (operands[2]))
9027 pat = "bic%? %0,%1,%n2-1";
9028 break;
9029 case IOR:
9030 if (satisfies_constraint_C0p (operands[2]))
9031 pat = "bset%? %0,%1,%z2";
9032 break;
9033 case XOR:
9034 if (satisfies_constraint_C0p (operands[2]))
9035 pat = "bxor%? %0,%1,%z2";
9036 break;
9037 case PLUS:
9038 return arc_output_addsi (operands, true, output_p);
9039 default: break;
9040 }
9041 if (output_p)
9042 output_asm_insn (pat ? pat : "%O3.%d5 %0,%1,%2", operands);
9043 if (pat || REG_P (operands[2]) || satisfies_constraint_L (operands[2]))
9044 return 4;
9045 return 8;
9046}
9047
76715c32 9048/* Helper function of arc_expand_cpymem. ADDR points to a chunk of memory.
526b7aee
SV
9049 Emit code and return an potentially modified address such that offsets
9050 up to SIZE are can be added to yield a legitimate address.
9051 if REUSE is set, ADDR is a register that may be modified. */
9052
9053static rtx
9054force_offsettable (rtx addr, HOST_WIDE_INT size, bool reuse)
9055{
9056 rtx base = addr;
9057 rtx offs = const0_rtx;
9058
9059 if (GET_CODE (base) == PLUS)
9060 {
9061 offs = XEXP (base, 1);
9062 base = XEXP (base, 0);
9063 }
9064 if (!REG_P (base)
9065 || (REGNO (base) != STACK_POINTER_REGNUM
4173ddaf 9066 && REGNO_PTR_FRAME_P (REGNO (base)))
526b7aee
SV
9067 || !CONST_INT_P (offs) || !SMALL_INT (INTVAL (offs))
9068 || !SMALL_INT (INTVAL (offs) + size))
9069 {
9070 if (reuse)
9071 emit_insn (gen_add2_insn (addr, offs));
9072 else
9073 addr = copy_to_mode_reg (Pmode, addr);
9074 }
9075 return addr;
9076}
9077
d34a0fdc
CZ
9078/* Like move_by_pieces, but take account of load latency, and actual
9079 offset ranges. Return true on success. */
526b7aee
SV
9080
9081bool
76715c32 9082arc_expand_cpymem (rtx *operands)
526b7aee
SV
9083{
9084 rtx dst = operands[0];
9085 rtx src = operands[1];
9086 rtx dst_addr, src_addr;
9087 HOST_WIDE_INT size;
9088 int align = INTVAL (operands[3]);
9089 unsigned n_pieces;
9090 int piece = align;
9091 rtx store[2];
9092 rtx tmpx[2];
9093 int i;
9094
9095 if (!CONST_INT_P (operands[2]))
9096 return false;
9097 size = INTVAL (operands[2]);
9098 /* move_by_pieces_ninsns is static, so we can't use it. */
9099 if (align >= 4)
d34a0fdc
CZ
9100 {
9101 if (TARGET_LL64)
9102 n_pieces = (size + 4) / 8U + ((size >> 1) & 1) + (size & 1);
9103 else
9104 n_pieces = (size + 2) / 4U + (size & 1);
9105 }
526b7aee
SV
9106 else if (align == 2)
9107 n_pieces = (size + 1) / 2U;
9108 else
9109 n_pieces = size;
9110 if (n_pieces >= (unsigned int) (optimize_size ? 3 : 15))
9111 return false;
d34a0fdc
CZ
9112 /* Force 32 bit aligned and larger datum to use 64 bit transfers, if
9113 possible. */
9114 if (TARGET_LL64 && (piece >= 4) && (size >= 8))
9115 piece = 8;
9116 else if (piece > 4)
526b7aee
SV
9117 piece = 4;
9118 dst_addr = force_offsettable (XEXP (operands[0], 0), size, 0);
9119 src_addr = force_offsettable (XEXP (operands[1], 0), size, 0);
9120 store[0] = store[1] = NULL_RTX;
9121 tmpx[0] = tmpx[1] = NULL_RTX;
9122 for (i = 0; size > 0; i ^= 1, size -= piece)
9123 {
9124 rtx tmp;
ef4bddc2 9125 machine_mode mode;
526b7aee 9126
d34a0fdc
CZ
9127 while (piece > size)
9128 piece >>= 1;
f67f4dff 9129 mode = smallest_int_mode_for_size (piece * BITS_PER_UNIT);
526b7aee
SV
9130 /* If we don't re-use temporaries, the scheduler gets carried away,
9131 and the register pressure gets unnecessarily high. */
9132 if (0 && tmpx[i] && GET_MODE (tmpx[i]) == mode)
9133 tmp = tmpx[i];
9134 else
9135 tmpx[i] = tmp = gen_reg_rtx (mode);
9136 dst_addr = force_offsettable (dst_addr, piece, 1);
9137 src_addr = force_offsettable (src_addr, piece, 1);
9138 if (store[i])
9139 emit_insn (store[i]);
9140 emit_move_insn (tmp, change_address (src, mode, src_addr));
9141 store[i] = gen_move_insn (change_address (dst, mode, dst_addr), tmp);
9142 dst_addr = plus_constant (Pmode, dst_addr, piece);
9143 src_addr = plus_constant (Pmode, src_addr, piece);
9144 }
9145 if (store[i])
9146 emit_insn (store[i]);
9147 if (store[i^1])
9148 emit_insn (store[i^1]);
9149 return true;
9150}
9151
b6fb257b
CZ
9152static bool
9153arc_get_aux_arg (rtx pat, int *auxr)
9154{
9155 tree attr, addr = MEM_EXPR (pat);
9156 if (TREE_CODE (addr) != VAR_DECL)
9157 return false;
9158
9159 attr = DECL_ATTRIBUTES (addr);
9160 if (lookup_attribute ("aux", attr))
9161 {
9162 tree arg = TREE_VALUE (attr);
9163 if (arg)
9164 {
9165 *auxr = TREE_INT_CST_LOW (TREE_VALUE (arg));
9166 return true;
9167 }
9168 }
9169
9170 return false;
9171}
9172
526b7aee
SV
9173/* Prepare operands for move in MODE. Return true iff the move has
9174 been emitted. */
9175
9176bool
ef4bddc2 9177prepare_move_operands (rtx *operands, machine_mode mode)
526b7aee 9178{
62a715c7
CZ
9179 if ((MEM_P (operands[0]) || MEM_P (operands[1]))
9180 && SCALAR_INT_MODE_P (mode))
b6fb257b 9181 {
62a715c7
CZ
9182 /* First handle aux attribute. */
9183 if (mode == SImode)
b6fb257b 9184 {
62a715c7
CZ
9185 rtx tmp;
9186 int auxr = 0;
9187 if (MEM_P (operands[0]) && arc_is_aux_reg_p (operands[0]))
b6fb257b 9188 {
62a715c7
CZ
9189 /* Save operation. */
9190 if (arc_get_aux_arg (operands[0], &auxr))
9191 {
9192 tmp = gen_reg_rtx (SImode);
9193 emit_move_insn (tmp, GEN_INT (auxr));
9194 }
9195 else
9196 tmp = XEXP (operands[0], 0);
9197
9198 operands[1] = force_reg (SImode, operands[1]);
9199 emit_insn (gen_rtx_UNSPEC_VOLATILE
9200 (VOIDmode, gen_rtvec (2, operands[1], tmp),
9201 VUNSPEC_ARC_SR));
9202 return true;
b6fb257b 9203 }
62a715c7 9204 if (MEM_P (operands[1]) && arc_is_aux_reg_p (operands[1]))
b6fb257b 9205 {
62a715c7
CZ
9206 if (arc_get_aux_arg (operands[1], &auxr))
9207 {
9208 tmp = gen_reg_rtx (SImode);
9209 emit_move_insn (tmp, GEN_INT (auxr));
9210 }
9211 else
9212 {
9213 tmp = XEXP (operands[1], 0);
9214 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
9215 }
9216 /* Load operation. */
9217 gcc_assert (REG_P (operands[0]));
9218 emit_insn (gen_rtx_SET (operands[0],
9219 gen_rtx_UNSPEC_VOLATILE
9220 (SImode, gen_rtvec (1, tmp),
9221 VUNSPEC_ARC_LR)));
9222 return true;
b6fb257b 9223 }
62a715c7
CZ
9224 }
9225 /* Second, we check for the uncached. */
9226 if (arc_is_uncached_mem_p (operands[0]))
9227 {
9228 if (!REG_P (operands[1]))
9229 operands[1] = force_reg (mode, operands[1]);
b6fb257b 9230 emit_insn (gen_rtx_UNSPEC_VOLATILE
62a715c7
CZ
9231 (VOIDmode, gen_rtvec (2, operands[0], operands[1]),
9232 VUNSPEC_ARC_STDI));
b6fb257b
CZ
9233 return true;
9234 }
62a715c7 9235 if (arc_is_uncached_mem_p (operands[1]))
b6fb257b 9236 {
e51727c6
CZ
9237 rtx tmp = operands[0];
9238
62a715c7 9239 if (MEM_P (operands[0]))
e51727c6
CZ
9240 tmp = gen_reg_rtx (mode);
9241
62a715c7 9242 emit_insn (gen_rtx_SET
e51727c6 9243 (tmp,
62a715c7
CZ
9244 gen_rtx_UNSPEC_VOLATILE
9245 (mode, gen_rtvec (1, operands[1]),
9246 VUNSPEC_ARC_LDDI)));
e51727c6
CZ
9247 if (MEM_P (operands[0]))
9248 {
9249 operands[1] = tmp;
9250 return false;
9251 }
b6fb257b
CZ
9252 return true;
9253 }
9254 }
9255
673f01b8 9256 if (GET_CODE (operands[1]) == SYMBOL_REF)
526b7aee 9257 {
673f01b8 9258 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4be6c9b9 9259 if (MEM_P (operands[0]))
673f01b8
CZ
9260 operands[1] = force_reg (mode, operands[1]);
9261 else if (model)
9262 operands[1] = arc_legitimize_tls_address (operands[1], model);
28633bbd
CZ
9263 }
9264
673f01b8
CZ
9265 operands[1] = arc_legitimize_pic_address (operands[1]);
9266
9267 /* Store instructions are limited, they only accept as address an
9268 immediate, a register or a register plus a small immediate. */
526b7aee 9269 if (MEM_P (operands[0])
673f01b8 9270 && !move_dest_operand (operands[0], mode))
526b7aee 9271 {
673f01b8
CZ
9272 rtx tmp0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
9273 rtx tmp1 = change_address (operands[0], mode, tmp0);
9274 MEM_COPY_ATTRIBUTES (tmp1, operands[0]);
9275 operands[0] = tmp1;
526b7aee
SV
9276 }
9277
673f01b8
CZ
9278 /* Check if it is constant but it is not legitimized. */
9279 if (CONSTANT_P (operands[1])
9280 && !arc_legitimate_constant_p (mode, operands[1]))
9281 operands[1] = force_reg (mode, XEXP (operands[1], 0));
9282 else if (MEM_P (operands[0])
9283 && ((CONSTANT_P (operands[1])
9284 && !satisfies_constraint_Cm3 (operands[1]))
9285 || MEM_P (operands[1])))
9286 operands[1] = force_reg (mode, operands[1]);
9287
526b7aee
SV
9288 return false;
9289}
9290
526b7aee
SV
9291/* Output a library call to a function called FNAME that has been arranged
9292 to be local to any dso. */
9293
9294const char *
9295arc_output_libcall (const char *fname)
9296{
9297 unsigned len = strlen (fname);
9298 static char buf[64];
9299
9300 gcc_assert (len < sizeof buf - 35);
9301 if (TARGET_LONG_CALLS_SET
9302 || (TARGET_MEDIUM_CALLS && arc_ccfsm_cond_exec_p ()))
9303 {
9304 if (flag_pic)
f5e336b1 9305 sprintf (buf, "add r12,pcl,@%s@pcl\n\tjl%%!%%* [r12]", fname);
526b7aee
SV
9306 else
9307 sprintf (buf, "jl%%! @%s", fname);
9308 }
9309 else
9310 sprintf (buf, "bl%%!%%* @%s", fname);
9311 return buf;
9312}
9313
9314/* Return the SImode highpart of the DImode value IN. */
9315
9316rtx
9317disi_highpart (rtx in)
9318{
9319 return simplify_gen_subreg (SImode, in, DImode, TARGET_BIG_ENDIAN ? 0 : 4);
9320}
9321
526b7aee
SV
9322/* Return length adjustment for INSN.
9323 For ARC600:
9324 A write to a core reg greater or equal to 32 must not be immediately
9325 followed by a use. Anticipate the length requirement to insert a nop
9326 between PRED and SUCC to prevent a hazard. */
9327
9328static int
647d790d 9329arc600_corereg_hazard (rtx_insn *pred, rtx_insn *succ)
526b7aee
SV
9330{
9331 if (!TARGET_ARC600)
9332 return 0;
526b7aee 9333 if (GET_CODE (PATTERN (pred)) == SEQUENCE)
647d790d 9334 pred = as_a <rtx_sequence *> (PATTERN (pred))->insn (1);
526b7aee 9335 if (GET_CODE (PATTERN (succ)) == SEQUENCE)
647d790d 9336 succ = as_a <rtx_sequence *> (PATTERN (succ))->insn (0);
526b7aee
SV
9337 if (recog_memoized (pred) == CODE_FOR_mulsi_600
9338 || recog_memoized (pred) == CODE_FOR_umul_600
9339 || recog_memoized (pred) == CODE_FOR_mac_600
9340 || recog_memoized (pred) == CODE_FOR_mul64_600
9341 || recog_memoized (pred) == CODE_FOR_mac64_600
9342 || recog_memoized (pred) == CODE_FOR_umul64_600
9343 || recog_memoized (pred) == CODE_FOR_umac64_600)
9344 return 0;
36cc6254
RS
9345 subrtx_iterator::array_type array;
9346 FOR_EACH_SUBRTX (iter, array, PATTERN (pred), NONCONST)
9347 {
9348 const_rtx x = *iter;
9349 switch (GET_CODE (x))
9350 {
9351 case SET: case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
9352 break;
9353 default:
9354 /* This is also fine for PRE/POST_MODIFY, because they
9355 contain a SET. */
9356 continue;
9357 }
9358 rtx dest = XEXP (x, 0);
53b28abf 9359 /* Check if this sets an extension register. N.B. we use 61 for the
36cc6254
RS
9360 condition codes, which is definitely not an extension register. */
9361 if (REG_P (dest) && REGNO (dest) >= 32 && REGNO (dest) < 61
9362 /* Check if the same register is used by the PAT. */
9363 && (refers_to_regno_p
9364 (REGNO (dest),
9365 REGNO (dest) + (GET_MODE_SIZE (GET_MODE (dest)) + 3) / 4U,
9366 PATTERN (succ), 0)))
9367 return 4;
9368 }
9369 return 0;
526b7aee
SV
9370}
9371
f50bb868
CZ
9372/* Given a rtx, check if it is an assembly instruction or not. */
9373
9374static int
9375arc_asm_insn_p (rtx x)
9376{
9377 int i, j;
9378
9379 if (x == 0)
9380 return 0;
9381
9382 switch (GET_CODE (x))
9383 {
9384 case ASM_OPERANDS:
9385 case ASM_INPUT:
9386 return 1;
9387
9388 case SET:
9389 return arc_asm_insn_p (SET_SRC (x));
9390
9391 case PARALLEL:
9392 j = 0;
9393 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
9394 j += arc_asm_insn_p (XVECEXP (x, 0, i));
9395 if ( j > 0)
9396 return 1;
9397 break;
9398
9399 default:
9400 break;
9401 }
9402
9403 return 0;
9404}
9405
526b7aee
SV
9406/* For ARC600:
9407 A write to a core reg greater or equal to 32 must not be immediately
9408 followed by a use. Anticipate the length requirement to insert a nop
9409 between PRED and SUCC to prevent a hazard. */
9410
9411int
647d790d 9412arc_hazard (rtx_insn *pred, rtx_insn *succ)
526b7aee 9413{
526b7aee
SV
9414 if (!pred || !INSN_P (pred) || !succ || !INSN_P (succ))
9415 return 0;
f50bb868 9416
f50bb868
CZ
9417 if (TARGET_ARC600)
9418 return arc600_corereg_hazard (pred, succ);
9419
9420 return 0;
526b7aee
SV
9421}
9422
9423/* Return length adjustment for INSN. */
9424
9425int
647d790d 9426arc_adjust_insn_length (rtx_insn *insn, int len, bool)
526b7aee
SV
9427{
9428 if (!INSN_P (insn))
9429 return len;
9430 /* We already handle sequences by ignoring the delay sequence flag. */
9431 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
9432 return len;
9433
526b7aee
SV
9434 /* Check for return with but one preceding insn since function
9435 start / call. */
9436 if (TARGET_PAD_RETURN
9437 && JUMP_P (insn)
9438 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9439 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9440 && get_attr_type (insn) == TYPE_RETURN)
9441 {
84034c69 9442 rtx_insn *prev = prev_active_insn (insn);
526b7aee
SV
9443
9444 if (!prev || !(prev = prev_active_insn (prev))
9445 || ((NONJUMP_INSN_P (prev)
9446 && GET_CODE (PATTERN (prev)) == SEQUENCE)
84034c69
DM
9447 ? CALL_ATTR (as_a <rtx_sequence *> (PATTERN (prev))->insn (0),
9448 NON_SIBCALL)
526b7aee
SV
9449 : CALL_ATTR (prev, NON_SIBCALL)))
9450 return len + 4;
9451 }
9452 if (TARGET_ARC600)
9453 {
b3458f61 9454 rtx_insn *succ = next_real_insn (insn);
526b7aee
SV
9455
9456 /* One the ARC600, a write to an extension register must be separated
9457 from a read. */
9458 if (succ && INSN_P (succ))
9459 len += arc600_corereg_hazard (insn, succ);
9460 }
9461
9462 /* Restore extracted operands - otherwise splitters like the addsi3_mixed one
9463 can go awry. */
9464 extract_constrain_insn_cached (insn);
9465
9466 return len;
9467}
9468
526b7aee
SV
9469/* Return a copy of COND from *STATEP, inverted if that is indicated by the
9470 CC field of *STATEP. */
9471
9472static rtx
9473arc_get_ccfsm_cond (struct arc_ccfsm *statep, bool reverse)
9474{
9475 rtx cond = statep->cond;
9476 int raw_cc = get_arc_condition_code (cond);
9477 if (reverse)
9478 raw_cc = ARC_INVERSE_CONDITION_CODE (raw_cc);
9479
9480 if (statep->cc == raw_cc)
9481 return copy_rtx (cond);
9482
9483 gcc_assert (ARC_INVERSE_CONDITION_CODE (raw_cc) == statep->cc);
9484
ef4bddc2 9485 machine_mode ccm = GET_MODE (XEXP (cond, 0));
526b7aee
SV
9486 enum rtx_code code = reverse_condition (GET_CODE (cond));
9487 if (code == UNKNOWN || ccm == CC_FP_GTmode || ccm == CC_FP_GEmode)
9488 code = reverse_condition_maybe_unordered (GET_CODE (cond));
9489
9490 return gen_rtx_fmt_ee (code, GET_MODE (cond),
9491 copy_rtx (XEXP (cond, 0)), copy_rtx (XEXP (cond, 1)));
9492}
9493
bae56bbb
JR
9494/* Return version of PAT conditionalized with COND, which is part of INSN.
9495 ANNULLED indicates if INSN is an annulled delay-slot insn.
9496 Register further changes if necessary. */
9497static rtx
9498conditionalize_nonjump (rtx pat, rtx cond, rtx insn, bool annulled)
9499{
9500 /* For commutative operators, we generally prefer to have
9501 the first source match the destination. */
9502 if (GET_CODE (pat) == SET)
9503 {
9504 rtx src = SET_SRC (pat);
9505
9506 if (COMMUTATIVE_P (src))
9507 {
9508 rtx src0 = XEXP (src, 0);
9509 rtx src1 = XEXP (src, 1);
9510 rtx dst = SET_DEST (pat);
9511
9512 if (rtx_equal_p (src1, dst) && !rtx_equal_p (src0, dst)
9513 /* Leave add_n alone - the canonical form is to
9514 have the complex summand first. */
9515 && REG_P (src0))
f7df4a84 9516 pat = gen_rtx_SET (dst,
bae56bbb
JR
9517 gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src),
9518 src1, src0));
9519 }
9520 }
9521
9522 /* dwarf2out.c:dwarf2out_frame_debug_expr doesn't know
9523 what to do with COND_EXEC. */
9524 if (RTX_FRAME_RELATED_P (insn))
9525 {
9526 /* If this is the delay slot insn of an anulled branch,
9527 dwarf2out.c:scan_trace understands the anulling semantics
9528 without the COND_EXEC. */
9529 gcc_assert (annulled);
9530 rtx note = alloc_reg_note (REG_FRAME_RELATED_EXPR, pat,
9531 REG_NOTES (insn));
9532 validate_change (insn, &REG_NOTES (insn), note, 1);
9533 }
9534 pat = gen_rtx_COND_EXEC (VOIDmode, cond, pat);
9535 return pat;
9536}
9537
526b7aee
SV
9538/* Use the ccfsm machinery to do if conversion. */
9539
9540static unsigned
9541arc_ifcvt (void)
9542{
9543 struct arc_ccfsm *statep = &cfun->machine->ccfsm_current;
526b7aee
SV
9544
9545 memset (statep, 0, sizeof *statep);
b3458f61 9546 for (rtx_insn *insn = get_insns (); insn; insn = next_insn (insn))
526b7aee
SV
9547 {
9548 arc_ccfsm_advance (insn, statep);
9549
9550 switch (statep->state)
9551 {
9552 case 0:
526b7aee
SV
9553 break;
9554 case 1: case 2:
9555 {
9556 /* Deleted branch. */
526b7aee 9557 arc_ccfsm_post_advance (insn, statep);
53ea364f 9558 gcc_assert (!IN_RANGE (statep->state, 1, 2));
b3458f61 9559 rtx_insn *seq = NEXT_INSN (PREV_INSN (insn));
782bdf21 9560 if (GET_CODE (PATTERN (seq)) == SEQUENCE)
526b7aee
SV
9561 {
9562 rtx slot = XVECEXP (PATTERN (seq), 0, 1);
9563 rtx pat = PATTERN (slot);
9564 if (INSN_ANNULLED_BRANCH_P (insn))
9565 {
9566 rtx cond
9567 = arc_get_ccfsm_cond (statep, INSN_FROM_TARGET_P (slot));
9568 pat = gen_rtx_COND_EXEC (VOIDmode, cond, pat);
9569 }
9570 if (!validate_change (seq, &PATTERN (seq), pat, 0))
9571 gcc_unreachable ();
9572 PUT_CODE (slot, NOTE);
9573 NOTE_KIND (slot) = NOTE_INSN_DELETED;
526b7aee
SV
9574 }
9575 else
9576 {
782bdf21 9577 set_insn_deleted (insn);
526b7aee
SV
9578 }
9579 continue;
9580 }
9581 case 3:
9582 if (LABEL_P (insn)
9583 && statep->target_label == CODE_LABEL_NUMBER (insn))
9584 {
9585 arc_ccfsm_post_advance (insn, statep);
782bdf21
CZ
9586 if (--LABEL_NUSES (insn) == 0)
9587 delete_insn (insn);
526b7aee
SV
9588 continue;
9589 }
9590 /* Fall through. */
9591 case 4: case 5:
9592 if (!NONDEBUG_INSN_P (insn))
9593 break;
9594
9595 /* Conditionalized insn. */
9596
b3458f61
DM
9597 rtx_insn *prev, *pprev;
9598 rtx *patp, pat, cond;
bae56bbb 9599 bool annulled; annulled = false;
526b7aee
SV
9600
9601 /* If this is a delay slot insn in a non-annulled branch,
9602 don't conditionalize it. N.B., this should be fine for
9603 conditional return too. However, don't do this for
9604 unconditional branches, as these would be encountered when
9605 processing an 'else' part. */
9606 prev = PREV_INSN (insn);
9607 pprev = PREV_INSN (prev);
9608 if (pprev && NEXT_INSN (NEXT_INSN (pprev)) == NEXT_INSN (insn)
bae56bbb
JR
9609 && JUMP_P (prev) && get_attr_cond (prev) == COND_USE)
9610 {
9611 if (!INSN_ANNULLED_BRANCH_P (prev))
9612 break;
9613 annulled = true;
9614 }
526b7aee
SV
9615
9616 patp = &PATTERN (insn);
9617 pat = *patp;
9618 cond = arc_get_ccfsm_cond (statep, INSN_FROM_TARGET_P (insn));
9619 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
9620 {
9621 /* ??? don't conditionalize if all side effects are dead
9622 in the not-execute case. */
9bf218f9 9623
bae56bbb 9624 pat = conditionalize_nonjump (pat, cond, insn, annulled);
526b7aee
SV
9625 }
9626 else if (simplejump_p (insn))
9627 {
9628 patp = &SET_SRC (pat);
9629 pat = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, *patp, pc_rtx);
9630 }
9631 else if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
9632 {
9633 pat = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, pat, pc_rtx);
f7df4a84 9634 pat = gen_rtx_SET (pc_rtx, pat);
526b7aee
SV
9635 }
9636 else
9637 gcc_unreachable ();
9638 validate_change (insn, patp, pat, 1);
9639 if (!apply_change_group ())
9640 gcc_unreachable ();
9641 if (JUMP_P (insn))
9642 {
b3458f61 9643 rtx_insn *next = next_nonnote_insn (insn);
526b7aee
SV
9644 if (GET_CODE (next) == BARRIER)
9645 delete_insn (next);
9646 if (statep->state == 3)
9647 continue;
9648 }
9649 break;
9650 default:
9651 gcc_unreachable ();
9652 }
9653 arc_ccfsm_post_advance (insn, statep);
9654 }
9655 return 0;
9656}
9657
0bc69b81
JR
9658/* Find annulled delay insns and convert them to use the appropriate predicate.
9659 This allows branch shortening to size up these insns properly. */
9660
9661static unsigned
9662arc_predicate_delay_insns (void)
9663{
b3458f61 9664 for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
0bc69b81
JR
9665 {
9666 rtx pat, jump, dlay, src, cond, *patp;
9667 int reverse;
9668
9669 if (!NONJUMP_INSN_P (insn)
9670 || GET_CODE (pat = PATTERN (insn)) != SEQUENCE)
9671 continue;
9672 jump = XVECEXP (pat, 0, 0);
9673 dlay = XVECEXP (pat, 0, 1);
9674 if (!JUMP_P (jump) || !INSN_ANNULLED_BRANCH_P (jump))
9675 continue;
9676 /* If the branch insn does the annulling, leave the delay insn alone. */
9677 if (!TARGET_AT_DBR_CONDEXEC && !INSN_FROM_TARGET_P (dlay))
9678 continue;
9679 /* ??? Could also leave DLAY un-conditionalized if its target is dead
9680 on the other path. */
9681 gcc_assert (GET_CODE (PATTERN (jump)) == SET);
9682 gcc_assert (SET_DEST (PATTERN (jump)) == pc_rtx);
9683 src = SET_SRC (PATTERN (jump));
9684 gcc_assert (GET_CODE (src) == IF_THEN_ELSE);
9685 cond = XEXP (src, 0);
9686 if (XEXP (src, 2) == pc_rtx)
9687 reverse = 0;
9688 else if (XEXP (src, 1) == pc_rtx)
9689 reverse = 1;
9690 else
9691 gcc_unreachable ();
9af539fe 9692 if (reverse != !INSN_FROM_TARGET_P (dlay))
0bc69b81 9693 {
ef4bddc2 9694 machine_mode ccm = GET_MODE (XEXP (cond, 0));
0bc69b81
JR
9695 enum rtx_code code = reverse_condition (GET_CODE (cond));
9696 if (code == UNKNOWN || ccm == CC_FP_GTmode || ccm == CC_FP_GEmode)
9697 code = reverse_condition_maybe_unordered (GET_CODE (cond));
9698
9699 cond = gen_rtx_fmt_ee (code, GET_MODE (cond),
9700 copy_rtx (XEXP (cond, 0)),
9701 copy_rtx (XEXP (cond, 1)));
9702 }
9703 else
9704 cond = copy_rtx (cond);
9705 patp = &PATTERN (dlay);
9706 pat = *patp;
eeac7d15 9707 pat = conditionalize_nonjump (pat, cond, dlay, true);
0bc69b81
JR
9708 validate_change (dlay, patp, pat, 1);
9709 if (!apply_change_group ())
9710 gcc_unreachable ();
9711 }
9712 return 0;
9713}
9714
526b7aee
SV
9715/* For ARC600: If a write to a core reg >=32 appears in a delay slot
9716 (other than of a forward brcc), it creates a hazard when there is a read
9717 of the same register at the branch target. We can't know what is at the
9718 branch target of calls, and for branches, we don't really know before the
9719 end of delay slot scheduling, either. Not only can individual instruction
9720 be hoisted out into a delay slot, a basic block can also be emptied this
9721 way, and branch and/or fall through targets be redirected. Hence we don't
9722 want such writes in a delay slot. */
526b7aee
SV
9723
9724/* Return nonzreo iff INSN writes to an extension core register. */
9725
9726int
9727arc_write_ext_corereg (rtx insn)
9728{
24dbe738
RS
9729 subrtx_iterator::array_type array;
9730 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
9731 {
9732 const_rtx x = *iter;
9733 switch (GET_CODE (x))
9734 {
9735 case SET: case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
9736 break;
9737 default:
9738 /* This is also fine for PRE/POST_MODIFY, because they
9739 contain a SET. */
9740 continue;
9741 }
9742 const_rtx dest = XEXP (x, 0);
9743 if (REG_P (dest) && REGNO (dest) >= 32 && REGNO (dest) < 61)
9744 return 1;
9745 }
9746 return 0;
526b7aee
SV
9747}
9748
9749/* This is like the hook, but returns NULL when it can't / won't generate
9750 a legitimate address. */
9751
9752static rtx
9753arc_legitimize_address_0 (rtx x, rtx oldx ATTRIBUTE_UNUSED,
ef4bddc2 9754 machine_mode mode)
526b7aee
SV
9755{
9756 rtx addr, inner;
9757
526b7aee
SV
9758 addr = x;
9759 if (GET_CODE (addr) == CONST)
9760 addr = XEXP (addr, 0);
673f01b8 9761
526b7aee
SV
9762 if (GET_CODE (addr) == PLUS
9763 && CONST_INT_P (XEXP (addr, 1))
9764 && ((GET_CODE (XEXP (addr, 0)) == SYMBOL_REF
9765 && !SYMBOL_REF_FUNCTION_P (XEXP (addr, 0)))
9766 || (REG_P (XEXP (addr, 0))
9767 && (INTVAL (XEXP (addr, 1)) & 252))))
9768 {
9769 HOST_WIDE_INT offs, upper;
9770 int size = GET_MODE_SIZE (mode);
9771
9772 offs = INTVAL (XEXP (addr, 1));
9773 upper = (offs + 256 * size) & ~511 * size;
9774 inner = plus_constant (Pmode, XEXP (addr, 0), upper);
9775#if 0 /* ??? this produces worse code for EEMBC idctrn01 */
9776 if (GET_CODE (x) == CONST)
9777 inner = gen_rtx_CONST (Pmode, inner);
9778#endif
9779 addr = plus_constant (Pmode, force_reg (Pmode, inner), offs - upper);
9780 x = addr;
9781 }
9782 else if (GET_CODE (addr) == SYMBOL_REF && !SYMBOL_REF_FUNCTION_P (addr))
9783 x = force_reg (Pmode, x);
ef4bddc2 9784 if (memory_address_p ((machine_mode) mode, x))
526b7aee
SV
9785 return x;
9786 return NULL_RTX;
9787}
9788
9789static rtx
ef4bddc2 9790arc_legitimize_address (rtx orig_x, rtx oldx, machine_mode mode)
526b7aee
SV
9791{
9792 rtx new_x = arc_legitimize_address_0 (orig_x, oldx, mode);
9793
9794 if (new_x)
9795 return new_x;
9796 return orig_x;
9797}
9798
9799static rtx
20565692
CZ
9800arc_delegitimize_address_0 (rtx op)
9801{
9802 switch (GET_CODE (op))
9803 {
9804 case CONST:
9805 return arc_delegitimize_address_0 (XEXP (op, 0));
9806
9807 case UNSPEC:
9808 switch (XINT (op, 1))
9809 {
9810 case ARC_UNSPEC_GOT:
9811 case ARC_UNSPEC_GOTOFFPC:
9812 return XVECEXP (op, 0, 0);
9813 default:
9814 break;
9815 }
9816 break;
9817
9818 case PLUS:
9819 {
9820 rtx t1 = arc_delegitimize_address_0 (XEXP (op, 0));
9821 rtx t2 = XEXP (op, 1);
9822
9823 if (t1 && t2)
9824 return gen_rtx_PLUS (GET_MODE (op), t1, t2);
9825 break;
9826 }
9827
9828 default:
9829 break;
9830 }
526b7aee
SV
9831 return NULL_RTX;
9832}
9833
9834static rtx
20565692 9835arc_delegitimize_address (rtx orig_x)
526b7aee 9836{
20565692
CZ
9837 rtx x = orig_x;
9838
9839 if (MEM_P (x))
526b7aee 9840 x = XEXP (x, 0);
20565692 9841
526b7aee 9842 x = arc_delegitimize_address_0 (x);
20565692
CZ
9843 if (!x)
9844 return orig_x;
9845
9846 if (MEM_P (orig_x))
9847 x = replace_equiv_address_nv (orig_x, x);
9848 return x;
526b7aee
SV
9849}
9850
9851/* Return a REG rtx for acc1. N.B. the gcc-internal representation may
9852 differ from the hardware register number in order to allow the generic
9853 code to correctly split the concatenation of acc1 and acc2. */
9854
9855rtx
9856gen_acc1 (void)
9857{
9858 return gen_rtx_REG (SImode, TARGET_BIG_ENDIAN ? 56: 57);
9859}
9860
9861/* Return a REG rtx for acc2. N.B. the gcc-internal representation may
9862 differ from the hardware register number in order to allow the generic
9863 code to correctly split the concatenation of acc1 and acc2. */
9864
9865rtx
9866gen_acc2 (void)
9867{
9868 return gen_rtx_REG (SImode, TARGET_BIG_ENDIAN ? 57: 56);
9869}
9870
526b7aee
SV
9871/* FIXME: a parameter should be added, and code added to final.c,
9872 to reproduce this functionality in shorten_branches. */
9873#if 0
9874/* Return nonzero iff BRANCH should be unaligned if possible by upsizing
9875 a previous instruction. */
9876int
9877arc_unalign_branch_p (rtx branch)
9878{
9879 rtx note;
9880
9881 if (!TARGET_UNALIGN_BRANCH)
9882 return 0;
9883 /* Do not do this if we have a filled delay slot. */
9884 if (get_attr_delay_slot_filled (branch) == DELAY_SLOT_FILLED_YES
4654c0cf 9885 && !NEXT_INSN (branch)->deleted ())
526b7aee
SV
9886 return 0;
9887 note = find_reg_note (branch, REG_BR_PROB, 0);
9888 return (!note
9889 || (arc_unalign_prob_threshold && !br_prob_note_reliable_p (note))
9890 || INTVAL (XEXP (note, 0)) < arc_unalign_prob_threshold);
9891}
9892#endif
9893
9894/* When estimating sizes during arc_reorg, when optimizing for speed, there
9895 are three reasons why we need to consider branches to be length 6:
9896 - annull-false delay slot insns are implemented using conditional execution,
9897 thus preventing short insn formation where used.
9898 - for ARC600: annul-true delay slot insns are implemented where possible
9899 using conditional execution, preventing short insn formation where used.
9900 - for ARC700: likely or somewhat likely taken branches are made long and
9901 unaligned if possible to avoid branch penalty. */
9902
9903bool
9904arc_branch_size_unknown_p (void)
9905{
9906 return !optimize_size && arc_reorg_in_progress;
9907}
9908
526b7aee
SV
9909/* The usual; we set up our machine_function data. */
9910
9911static struct machine_function *
9912arc_init_machine_status (void)
9913{
9914 struct machine_function *machine;
766090c2 9915 machine = ggc_cleared_alloc<machine_function> ();
526b7aee 9916 machine->fn_type = ARC_FUNCTION_UNKNOWN;
526b7aee
SV
9917
9918 return machine;
9919}
9920
9921/* Implements INIT_EXPANDERS. We just set up to call the above
9922 function. */
9923
9924void
9925arc_init_expanders (void)
9926{
9927 init_machine_status = arc_init_machine_status;
9928}
9929
9930/* Check if OP is a proper parallel of a millicode call pattern. OFFSET
9931 indicates a number of elements to ignore - that allows to have a
9932 sibcall pattern that starts with (return). LOAD_P is zero for store
9933 multiple (for prologues), and one for load multiples (for epilogues),
9934 and two for load multiples where no final clobber of blink is required.
9935 We also skip the first load / store element since this is supposed to
9936 be checked in the instruction pattern. */
9937
9938int
9939arc_check_millicode (rtx op, int offset, int load_p)
9940{
9941 int len = XVECLEN (op, 0) - offset;
9942 int i;
9943
9944 if (load_p == 2)
9945 {
9946 if (len < 2 || len > 13)
9947 return 0;
9948 load_p = 1;
9949 }
9950 else
9951 {
9952 rtx elt = XVECEXP (op, 0, --len);
9953
9954 if (GET_CODE (elt) != CLOBBER
9955 || !REG_P (XEXP (elt, 0))
9956 || REGNO (XEXP (elt, 0)) != RETURN_ADDR_REGNUM
9957 || len < 3 || len > 13)
9958 return 0;
9959 }
9960 for (i = 1; i < len; i++)
9961 {
9962 rtx elt = XVECEXP (op, 0, i + offset);
9963 rtx reg, mem, addr;
9964
9965 if (GET_CODE (elt) != SET)
9966 return 0;
9967 mem = XEXP (elt, load_p);
9968 reg = XEXP (elt, 1-load_p);
9969 if (!REG_P (reg) || REGNO (reg) != 13U+i || !MEM_P (mem))
9970 return 0;
9971 addr = XEXP (mem, 0);
9972 if (GET_CODE (addr) != PLUS
9973 || !rtx_equal_p (stack_pointer_rtx, XEXP (addr, 0))
9974 || !CONST_INT_P (XEXP (addr, 1)) || INTVAL (XEXP (addr, 1)) != i*4)
9975 return 0;
9976 }
9977 return 1;
9978}
9979
9980/* Accessor functions for cfun->machine->unalign. */
9981
526b7aee
SV
9982void
9983arc_clear_unalign (void)
9984{
9985 if (cfun)
9986 cfun->machine->unalign = 0;
9987}
9988
9989void
9990arc_toggle_unalign (void)
9991{
9992 cfun->machine->unalign ^= 2;
9993}
9994
9995/* Operands 0..2 are the operands of a addsi which uses a 12 bit
9996 constant in operand 2, but which would require a LIMM because of
9997 operand mismatch.
9998 operands 3 and 4 are new SET_SRCs for operands 0. */
9999
10000void
10001split_addsi (rtx *operands)
10002{
10003 int val = INTVAL (operands[2]);
10004
10005 /* Try for two short insns first. Lengths being equal, we prefer
10006 expansions with shorter register lifetimes. */
10007 if (val > 127 && val <= 255
10008 && satisfies_constraint_Rcq (operands[0]))
10009 {
10010 operands[3] = operands[2];
10011 operands[4] = gen_rtx_PLUS (SImode, operands[0], operands[1]);
10012 }
10013 else
10014 {
10015 operands[3] = operands[1];
10016 operands[4] = gen_rtx_PLUS (SImode, operands[0], operands[2]);
10017 }
10018}
10019
10020/* Operands 0..2 are the operands of a subsi which uses a 12 bit
10021 constant in operand 1, but which would require a LIMM because of
10022 operand mismatch.
10023 operands 3 and 4 are new SET_SRCs for operands 0. */
10024
10025void
10026split_subsi (rtx *operands)
10027{
10028 int val = INTVAL (operands[1]);
10029
10030 /* Try for two short insns first. Lengths being equal, we prefer
10031 expansions with shorter register lifetimes. */
10032 if (satisfies_constraint_Rcq (operands[0])
10033 && satisfies_constraint_Rcq (operands[2]))
10034 {
10035 if (val >= -31 && val <= 127)
10036 {
10037 operands[3] = gen_rtx_NEG (SImode, operands[2]);
10038 operands[4] = gen_rtx_PLUS (SImode, operands[0], operands[1]);
10039 return;
10040 }
10041 else if (val >= 0 && val < 255)
10042 {
10043 operands[3] = operands[1];
10044 operands[4] = gen_rtx_MINUS (SImode, operands[0], operands[2]);
10045 return;
10046 }
10047 }
10048 /* If the destination is not an ARCompact16 register, we might
10049 still have a chance to make a short insn if the source is;
10050 we need to start with a reg-reg move for this. */
10051 operands[3] = operands[2];
10052 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[0]);
10053}
10054
10055/* Handle DOUBLE_REGS uses.
10056 Operand 0: destination register
10057 Operand 1: source register */
10058
d34a0fdc 10059static bool
526b7aee
SV
10060arc_process_double_reg_moves (rtx *operands)
10061{
526b7aee
SV
10062 enum usesDxState { none, srcDx, destDx, maxDx };
10063 enum usesDxState state = none;
73dac59b
CZ
10064 rtx dest = operands[0];
10065 rtx src = operands[1];
526b7aee
SV
10066
10067 if (refers_to_regno_p (40, 44, src, 0))
73dac59b
CZ
10068 {
10069 state = srcDx;
10070 gcc_assert (REG_P (dest));
10071 }
526b7aee
SV
10072 if (refers_to_regno_p (40, 44, dest, 0))
10073 {
10074 /* Via arc_register_move_cost, we should never see D,D moves. */
73dac59b 10075 gcc_assert (REG_P (src));
526b7aee
SV
10076 gcc_assert (state == none);
10077 state = destDx;
10078 }
10079
10080 if (state == none)
d34a0fdc 10081 return false;
526b7aee
SV
10082
10083 if (state == srcDx)
10084 {
10085 /* Without the LR insn, we need to split this into a
10086 sequence of insns which will use the DEXCLx and DADDHxy
10087 insns to be able to read the Dx register in question. */
10088 if (TARGET_DPFP_DISABLE_LRSR)
10089 {
10090 /* gen *movdf_insn_nolrsr */
f7df4a84 10091 rtx set = gen_rtx_SET (dest, src);
526b7aee
SV
10092 rtx use1 = gen_rtx_USE (VOIDmode, const1_rtx);
10093 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, use1)));
10094 }
10095 else
10096 {
10097 /* When we have 'mov D, r' or 'mov D, D' then get the target
10098 register pair for use with LR insn. */
7d81a567
CZ
10099 rtx destHigh = simplify_gen_subreg (SImode, dest, DFmode,
10100 TARGET_BIG_ENDIAN ? 0 : 4);
10101 rtx destLow = simplify_gen_subreg (SImode, dest, DFmode,
10102 TARGET_BIG_ENDIAN ? 4 : 0);
526b7aee
SV
10103
10104 /* Produce the two LR insns to get the high and low parts. */
f7df4a84 10105 emit_insn (gen_rtx_SET (destHigh,
c69899f0
CZ
10106 gen_rtx_UNSPEC_VOLATILE (Pmode,
10107 gen_rtvec (1, src),
10108 VUNSPEC_ARC_LR_HIGH)));
f7df4a84 10109 emit_insn (gen_rtx_SET (destLow,
c69899f0
CZ
10110 gen_rtx_UNSPEC_VOLATILE (Pmode,
10111 gen_rtvec (1, src),
10112 VUNSPEC_ARC_LR)));
526b7aee
SV
10113 }
10114 }
10115 else if (state == destDx)
10116 {
10117 /* When we have 'mov r, D' or 'mov D, D' and we have access to the
10118 LR insn get the target register pair. */
7d81a567
CZ
10119 rtx srcHigh = simplify_gen_subreg (SImode, src, DFmode,
10120 TARGET_BIG_ENDIAN ? 0 : 4);
10121 rtx srcLow = simplify_gen_subreg (SImode, src, DFmode,
10122 TARGET_BIG_ENDIAN ? 4 : 0);
526b7aee 10123
491483b0 10124 emit_insn (gen_dexcl_2op (dest, srcHigh, srcLow));
526b7aee
SV
10125 }
10126 else
10127 gcc_unreachable ();
10128
d34a0fdc 10129 return true;
526b7aee
SV
10130}
10131
10132/* operands 0..1 are the operands of a 64 bit move instruction.
10133 split it into two moves with operands 2/3 and 4/5. */
10134
d34a0fdc 10135void
526b7aee
SV
10136arc_split_move (rtx *operands)
10137{
ef4bddc2 10138 machine_mode mode = GET_MODE (operands[0]);
526b7aee
SV
10139 int i;
10140 int swap = 0;
10141 rtx xop[4];
526b7aee
SV
10142
10143 if (TARGET_DPFP)
10144 {
d34a0fdc
CZ
10145 if (arc_process_double_reg_moves (operands))
10146 return;
526b7aee
SV
10147 }
10148
d34a0fdc
CZ
10149 if (TARGET_LL64
10150 && ((memory_operand (operands[0], mode)
2295aa75
CZ
10151 && (even_register_operand (operands[1], mode)
10152 || satisfies_constraint_Cm3 (operands[1])))
d34a0fdc
CZ
10153 || (memory_operand (operands[1], mode)
10154 && even_register_operand (operands[0], mode))))
10155 {
10156 emit_move_insn (operands[0], operands[1]);
10157 return;
10158 }
10159
c5395d88
CZ
10160 if (TARGET_PLUS_QMACW
10161 && even_register_operand (operands[0], mode)
10162 && even_register_operand (operands[1], mode))
10163 {
10164 emit_move_insn (operands[0], operands[1]);
10165 return;
10166 }
10167
00c072ae
CZ
10168 if (TARGET_PLUS_QMACW
10169 && GET_CODE (operands[1]) == CONST_VECTOR)
10170 {
10171 HOST_WIDE_INT intval0, intval1;
10172 if (GET_MODE (operands[1]) == V2SImode)
10173 {
10174 intval0 = INTVAL (XVECEXP (operands[1], 0, 0));
10175 intval1 = INTVAL (XVECEXP (operands[1], 0, 1));
10176 }
10177 else
10178 {
10179 intval1 = INTVAL (XVECEXP (operands[1], 0, 3)) << 16;
10180 intval1 |= INTVAL (XVECEXP (operands[1], 0, 2)) & 0xFFFF;
10181 intval0 = INTVAL (XVECEXP (operands[1], 0, 1)) << 16;
10182 intval0 |= INTVAL (XVECEXP (operands[1], 0, 0)) & 0xFFFF;
10183 }
10184 xop[0] = gen_rtx_REG (SImode, REGNO (operands[0]));
10185 xop[3] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
10186 xop[2] = GEN_INT (trunc_int_for_mode (intval0, SImode));
10187 xop[1] = GEN_INT (trunc_int_for_mode (intval1, SImode));
10188 emit_move_insn (xop[0], xop[2]);
10189 emit_move_insn (xop[3], xop[1]);
10190 return;
10191 }
10192
526b7aee
SV
10193 for (i = 0; i < 2; i++)
10194 {
10195 if (MEM_P (operands[i]) && auto_inc_p (XEXP (operands[i], 0)))
10196 {
10197 rtx addr = XEXP (operands[i], 0);
10198 rtx r, o;
10199 enum rtx_code code;
10200
10201 gcc_assert (!reg_overlap_mentioned_p (operands[0], addr));
10202 switch (GET_CODE (addr))
10203 {
10204 case PRE_DEC: o = GEN_INT (-8); goto pre_modify;
10205 case PRE_INC: o = GEN_INT (8); goto pre_modify;
10206 case PRE_MODIFY: o = XEXP (XEXP (addr, 1), 1);
10207 pre_modify:
10208 code = PRE_MODIFY;
10209 break;
10210 case POST_DEC: o = GEN_INT (-8); goto post_modify;
10211 case POST_INC: o = GEN_INT (8); goto post_modify;
10212 case POST_MODIFY: o = XEXP (XEXP (addr, 1), 1);
10213 post_modify:
10214 code = POST_MODIFY;
10215 swap = 2;
10216 break;
10217 default:
10218 gcc_unreachable ();
10219 }
10220 r = XEXP (addr, 0);
10221 xop[0+i] = adjust_automodify_address_nv
10222 (operands[i], SImode,
10223 gen_rtx_fmt_ee (code, Pmode, r,
10224 gen_rtx_PLUS (Pmode, r, o)),
10225 0);
10226 xop[2+i] = adjust_automodify_address_nv
10227 (operands[i], SImode, plus_constant (Pmode, r, 4), 4);
10228 }
10229 else
10230 {
10231 xop[0+i] = operand_subword (operands[i], 0, 0, mode);
10232 xop[2+i] = operand_subword (operands[i], 1, 0, mode);
10233 }
10234 }
10235 if (reg_overlap_mentioned_p (xop[0], xop[3]))
10236 {
10237 swap = 2;
10238 gcc_assert (!reg_overlap_mentioned_p (xop[2], xop[1]));
10239 }
526b7aee 10240
d34a0fdc
CZ
10241 emit_move_insn (xop[0 + swap], xop[1 + swap]);
10242 emit_move_insn (xop[2 - swap], xop[3 - swap]);
526b7aee 10243
526b7aee
SV
10244}
10245
10246/* Select between the instruction output templates s_tmpl (for short INSNs)
10247 and l_tmpl (for long INSNs). */
10248
10249const char *
b3458f61 10250arc_short_long (rtx_insn *insn, const char *s_tmpl, const char *l_tmpl)
526b7aee
SV
10251{
10252 int is_short = arc_verify_short (insn, cfun->machine->unalign, -1);
10253
10254 extract_constrain_insn_cached (insn);
10255 return is_short ? s_tmpl : l_tmpl;
10256}
10257
10258/* Searches X for any reference to REGNO, returning the rtx of the
10259 reference found if any. Otherwise, returns NULL_RTX. */
10260
10261rtx
10262arc_regno_use_in (unsigned int regno, rtx x)
10263{
10264 const char *fmt;
10265 int i, j;
10266 rtx tem;
10267
c9bd6bcd 10268 if (REG_P (x) && refers_to_regno_p (regno, x))
526b7aee
SV
10269 return x;
10270
10271 fmt = GET_RTX_FORMAT (GET_CODE (x));
10272 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
10273 {
10274 if (fmt[i] == 'e')
10275 {
10276 if ((tem = regno_use_in (regno, XEXP (x, i))))
10277 return tem;
10278 }
10279 else if (fmt[i] == 'E')
10280 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10281 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
10282 return tem;
10283 }
10284
10285 return NULL_RTX;
10286}
10287
10288/* Return the integer value of the "type" attribute for INSN, or -1 if
10289 INSN can't have attributes. */
10290
b51addd6 10291static int
84034c69 10292arc_attr_type (rtx_insn *insn)
526b7aee
SV
10293{
10294 if (NONJUMP_INSN_P (insn)
10295 ? (GET_CODE (PATTERN (insn)) == USE
10296 || GET_CODE (PATTERN (insn)) == CLOBBER)
10297 : JUMP_P (insn)
10298 ? (GET_CODE (PATTERN (insn)) == ADDR_VEC
10299 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
10300 : !CALL_P (insn))
10301 return -1;
10302 return get_attr_type (insn);
10303}
10304
3f445374
CZ
10305/* Code has a minimum p2 alignment of 1, which we must restore after
10306 an ADDR_DIFF_VEC. */
10307
526b7aee 10308int
82082f65 10309arc_label_align (rtx_insn *label)
526b7aee 10310{
3f445374 10311 if (align_labels.levels[0].log < 1)
526b7aee 10312 {
b3458f61 10313 rtx_insn *next = next_nonnote_nondebug_insn (label);
526b7aee
SV
10314 if (INSN_P (next) && recog_memoized (next) >= 0)
10315 return 1;
10316 }
3f445374 10317 return align_labels.levels[0].log;
526b7aee
SV
10318}
10319
10320/* Return true if LABEL is in executable code. */
10321
10322bool
b32d5189 10323arc_text_label (rtx_insn *label)
526b7aee 10324{
b3458f61 10325 rtx_insn *next;
526b7aee
SV
10326
10327 /* ??? We use deleted labels like they were still there, see
10328 gcc.c-torture/compile/20000326-2.c . */
10329 gcc_assert (GET_CODE (label) == CODE_LABEL
10330 || (GET_CODE (label) == NOTE
10331 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL));
10332 next = next_nonnote_insn (label);
10333 if (next)
10334 return (!JUMP_TABLE_DATA_P (next)
10335 || GET_CODE (PATTERN (next)) != ADDR_VEC);
10336 else if (!PREV_INSN (label))
10337 /* ??? sometimes text labels get inserted very late, see
10338 gcc.dg/torture/stackalign/comp-goto-1.c */
10339 return true;
10340 return false;
10341}
10342
526b7aee
SV
10343/* Without this, gcc.dg/tree-prof/bb-reorg.c fails to assemble
10344 when compiling with -O2 -freorder-blocks-and-partition -fprofile-use
339ba33b 10345 -D_PROFILE_USE; delay branch scheduling then follows a crossing jump
526b7aee
SV
10346 to redirect two breqs. */
10347
10348static bool
c1ce59ab 10349arc_can_follow_jump (const rtx_insn *follower, const rtx_insn *followee)
526b7aee
SV
10350{
10351 /* ??? get_attr_type is declared to take an rtx. */
c1ce59ab 10352 union { const rtx_insn *c; rtx_insn *r; } u;
526b7aee
SV
10353
10354 u.c = follower;
339ba33b 10355 if (CROSSING_JUMP_P (followee))
526b7aee
SV
10356 switch (get_attr_type (u.r))
10357 {
28f4ff35
CZ
10358 case TYPE_BRANCH:
10359 if (get_attr_length (u.r) != 2)
10360 break;
41bc2c0b 10361 /* Fall through. */
526b7aee
SV
10362 case TYPE_BRCC:
10363 case TYPE_BRCC_NO_DELAY_SLOT:
10364 return false;
10365 default:
10366 return true;
10367 }
10368 return true;
10369}
10370
c7314bc1 10371
1825c61e 10372/* Implement EPILOGUE_USES.
526b7aee
SV
10373 Return true if REGNO should be added to the deemed uses of the epilogue.
10374
1825c61e
CZ
10375 We have to make sure all the register restore instructions are
10376 known to be live in interrupt functions, plus the blink register if
10377 it is clobbered by the isr. */
526b7aee
SV
10378
10379bool
10380arc_epilogue_uses (int regno)
10381{
1825c61e 10382 unsigned int fn_type;
ce9dbf20 10383 fn_type = arc_compute_function_type (cfun);
1825c61e 10384
28633bbd
CZ
10385 if (regno == arc_tp_regno)
10386 return true;
1825c61e 10387
ce9dbf20
CZ
10388 if (regno == RETURN_ADDR_REGNUM)
10389 return true;
10390
10391 if (regno == arc_return_address_register (fn_type))
10392 return true;
10393
10394 if (epilogue_completed && ARC_INTERRUPT_P (fn_type))
526b7aee 10395 {
ce9dbf20 10396 /* An interrupt function restores more registers. */
a365fa06 10397 if (df_regs_ever_live_p (regno) || call_used_or_fixed_reg_p (regno))
ce9dbf20 10398 return true;
526b7aee 10399 }
ce9dbf20
CZ
10400
10401 return false;
526b7aee
SV
10402}
10403
28633bbd
CZ
10404/* Helper for EH_USES macro. */
10405
10406bool
10407arc_eh_uses (int regno)
10408{
10409 if (regno == arc_tp_regno)
10410 return true;
10411 return false;
10412}
10413
73dac59b 10414/* Return true if we use LRA instead of reload pass. */
526b7aee 10415
73dac59b 10416bool
526b7aee
SV
10417arc_lra_p (void)
10418{
73dac59b 10419 return arc_lra_flag;
526b7aee
SV
10420}
10421
10422/* ??? Should we define TARGET_REGISTER_PRIORITY? We might perfer to use
10423 Rcq registers, because some insn are shorter with them. OTOH we already
10424 have separate alternatives for this purpose, and other insns don't
10425 mind, so maybe we should rather prefer the other registers?
10426 We need more data, and we can only get that if we allow people to
10427 try all options. */
10428static int
10429arc_register_priority (int r)
10430{
10431 switch (arc_lra_priority_tag)
10432 {
10433 case ARC_LRA_PRIORITY_NONE:
10434 return 0;
10435 case ARC_LRA_PRIORITY_NONCOMPACT:
10436 return ((((r & 7) ^ 4) - 4) & 15) != r;
10437 case ARC_LRA_PRIORITY_COMPACT:
10438 return ((((r & 7) ^ 4) - 4) & 15) == r;
10439 default:
10440 gcc_unreachable ();
10441 }
10442}
10443
10444static reg_class_t
ef4bddc2 10445arc_spill_class (reg_class_t /* orig_class */, machine_mode)
526b7aee
SV
10446{
10447 return GENERAL_REGS;
10448}
10449
10450bool
ef4bddc2 10451arc_legitimize_reload_address (rtx *p, machine_mode mode, int opnum,
526b7aee
SV
10452 int itype)
10453{
10454 rtx x = *p;
10455 enum reload_type type = (enum reload_type) itype;
10456
10457 if (GET_CODE (x) == PLUS
10458 && CONST_INT_P (XEXP (x, 1))
10459 && (RTX_OK_FOR_BASE_P (XEXP (x, 0), true)
10460 || (REG_P (XEXP (x, 0))
10461 && reg_equiv_constant (REGNO (XEXP (x, 0))))))
10462 {
10463 int scale = GET_MODE_SIZE (mode);
10464 int shift;
10465 rtx index_rtx = XEXP (x, 1);
10466 HOST_WIDE_INT offset = INTVAL (index_rtx), offset_base;
10467 rtx reg, sum, sum2;
10468
10469 if (scale > 4)
10470 scale = 4;
10471 if ((scale-1) & offset)
10472 scale = 1;
10473 shift = scale >> 1;
c419f71c
JL
10474 offset_base
10475 = ((offset + (256 << shift))
4e671509 10476 & ((HOST_WIDE_INT)((unsigned HOST_WIDE_INT) -512 << shift)));
526b7aee
SV
10477 /* Sometimes the normal form does not suit DImode. We
10478 could avoid that by using smaller ranges, but that
10479 would give less optimized code when SImode is
10480 prevalent. */
10481 if (GET_MODE_SIZE (mode) + offset - offset_base <= (256 << shift))
10482 {
10483 int regno;
10484
10485 reg = XEXP (x, 0);
10486 regno = REGNO (reg);
10487 sum2 = sum = plus_constant (Pmode, reg, offset_base);
10488
10489 if (reg_equiv_constant (regno))
10490 {
10491 sum2 = plus_constant (Pmode, reg_equiv_constant (regno),
10492 offset_base);
10493 if (GET_CODE (sum2) == PLUS)
10494 sum2 = gen_rtx_CONST (Pmode, sum2);
10495 }
10496 *p = gen_rtx_PLUS (Pmode, sum, GEN_INT (offset - offset_base));
10497 push_reload (sum2, NULL_RTX, &XEXP (*p, 0), NULL,
10498 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum,
10499 type);
10500 return true;
10501 }
10502 }
10503 /* We must re-recognize what we created before. */
10504 else if (GET_CODE (x) == PLUS
10505 && GET_CODE (XEXP (x, 0)) == PLUS
10506 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
10507 && REG_P (XEXP (XEXP (x, 0), 0))
10508 && CONST_INT_P (XEXP (x, 1)))
10509 {
10510 /* Because this address is so complex, we know it must have
10511 been created by LEGITIMIZE_RELOAD_ADDRESS before; thus,
10512 it is already unshared, and needs no further unsharing. */
10513 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
10514 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum, type);
10515 return true;
10516 }
10517 return false;
10518}
10519
ad23f5d4
JG
10520/* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P. */
10521
10522static bool
445d7826 10523arc_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
ad23f5d4
JG
10524 unsigned int align,
10525 enum by_pieces_operation op,
10526 bool speed_p)
10527{
76715c32 10528 /* Let the cpymem expander handle small block moves. */
ad23f5d4
JG
10529 if (op == MOVE_BY_PIECES)
10530 return false;
10531
10532 return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
10533}
10534
b8a64b7f
CZ
10535/* Emit a (pre) memory barrier around an atomic sequence according to
10536 MODEL. */
10537
10538static void
10539arc_pre_atomic_barrier (enum memmodel model)
10540{
10541 if (need_atomic_barrier_p (model, true))
10542 emit_insn (gen_memory_barrier ());
10543}
10544
10545/* Emit a (post) memory barrier around an atomic sequence according to
10546 MODEL. */
10547
10548static void
10549arc_post_atomic_barrier (enum memmodel model)
10550{
10551 if (need_atomic_barrier_p (model, false))
10552 emit_insn (gen_memory_barrier ());
10553}
10554
10555/* Expand a compare and swap pattern. */
10556
10557static void
10558emit_unlikely_jump (rtx insn)
10559{
f370536c 10560 rtx_insn *jump = emit_jump_insn (insn);
5fa396ad 10561 add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
b8a64b7f
CZ
10562}
10563
10564/* Expand code to perform a 8 or 16-bit compare and swap by doing
10565 32-bit compare and swap on the word containing the byte or
10566 half-word. The difference between a weak and a strong CAS is that
10567 the weak version may simply fail. The strong version relies on two
10568 loops, one checks if the SCOND op is succsfully or not, the other
10569 checks if the 32 bit accessed location which contains the 8 or 16
10570 bit datum is not changed by other thread. The first loop is
10571 implemented by the atomic_compare_and_swapsi_1 pattern. The second
10572 loops is implemented by this routine. */
10573
10574static void
10575arc_expand_compare_and_swap_qh (rtx bool_result, rtx result, rtx mem,
10576 rtx oldval, rtx newval, rtx weak,
10577 rtx mod_s, rtx mod_f)
10578{
10579 rtx addr1 = force_reg (Pmode, XEXP (mem, 0));
10580 rtx addr = gen_reg_rtx (Pmode);
10581 rtx off = gen_reg_rtx (SImode);
10582 rtx oldv = gen_reg_rtx (SImode);
10583 rtx newv = gen_reg_rtx (SImode);
10584 rtx oldvalue = gen_reg_rtx (SImode);
10585 rtx newvalue = gen_reg_rtx (SImode);
10586 rtx res = gen_reg_rtx (SImode);
10587 rtx resv = gen_reg_rtx (SImode);
10588 rtx memsi, val, mask, end_label, loop_label, cc, x;
10589 machine_mode mode;
10590 bool is_weak = (weak != const0_rtx);
10591
10592 /* Truncate the address. */
10593 emit_insn (gen_rtx_SET (addr,
10594 gen_rtx_AND (Pmode, addr1, GEN_INT (-4))));
10595
10596 /* Compute the datum offset. */
10597 emit_insn (gen_rtx_SET (off,
10598 gen_rtx_AND (SImode, addr1, GEN_INT (3))));
10599 if (TARGET_BIG_ENDIAN)
10600 emit_insn (gen_rtx_SET (off,
10601 gen_rtx_MINUS (SImode,
10602 (GET_MODE (mem) == QImode) ?
10603 GEN_INT (3) : GEN_INT (2), off)));
10604
10605 /* Normal read from truncated address. */
10606 memsi = gen_rtx_MEM (SImode, addr);
10607 set_mem_alias_set (memsi, ALIAS_SET_MEMORY_BARRIER);
10608 MEM_VOLATILE_P (memsi) = MEM_VOLATILE_P (mem);
10609
10610 val = copy_to_reg (memsi);
10611
10612 /* Convert the offset in bits. */
10613 emit_insn (gen_rtx_SET (off,
10614 gen_rtx_ASHIFT (SImode, off, GEN_INT (3))));
10615
10616 /* Get the proper mask. */
10617 if (GET_MODE (mem) == QImode)
10618 mask = force_reg (SImode, GEN_INT (0xff));
10619 else
10620 mask = force_reg (SImode, GEN_INT (0xffff));
10621
10622 emit_insn (gen_rtx_SET (mask,
10623 gen_rtx_ASHIFT (SImode, mask, off)));
10624
10625 /* Prepare the old and new values. */
10626 emit_insn (gen_rtx_SET (val,
10627 gen_rtx_AND (SImode, gen_rtx_NOT (SImode, mask),
10628 val)));
10629
10630 oldval = gen_lowpart (SImode, oldval);
10631 emit_insn (gen_rtx_SET (oldv,
10632 gen_rtx_ASHIFT (SImode, oldval, off)));
10633
10634 newval = gen_lowpart_common (SImode, newval);
10635 emit_insn (gen_rtx_SET (newv,
10636 gen_rtx_ASHIFT (SImode, newval, off)));
10637
10638 emit_insn (gen_rtx_SET (oldv,
10639 gen_rtx_AND (SImode, oldv, mask)));
10640
10641 emit_insn (gen_rtx_SET (newv,
10642 gen_rtx_AND (SImode, newv, mask)));
10643
10644 if (!is_weak)
10645 {
10646 end_label = gen_label_rtx ();
10647 loop_label = gen_label_rtx ();
10648 emit_label (loop_label);
10649 }
10650
10651 /* Make the old and new values. */
10652 emit_insn (gen_rtx_SET (oldvalue,
10653 gen_rtx_IOR (SImode, oldv, val)));
10654
10655 emit_insn (gen_rtx_SET (newvalue,
10656 gen_rtx_IOR (SImode, newv, val)));
10657
10658 /* Try an 32bit atomic compare and swap. It clobbers the CC
10659 register. */
10660 emit_insn (gen_atomic_compare_and_swapsi_1 (res, memsi, oldvalue, newvalue,
10661 weak, mod_s, mod_f));
10662
10663 /* Regardless of the weakness of the operation, a proper boolean
10664 result needs to be provided. */
10665 x = gen_rtx_REG (CC_Zmode, CC_REG);
10666 x = gen_rtx_EQ (SImode, x, const0_rtx);
10667 emit_insn (gen_rtx_SET (bool_result, x));
10668
10669 if (!is_weak)
10670 {
10671 /* Check the results: if the atomic op is successfully the goto
10672 to end label. */
10673 x = gen_rtx_REG (CC_Zmode, CC_REG);
10674 x = gen_rtx_EQ (VOIDmode, x, const0_rtx);
10675 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
10676 gen_rtx_LABEL_REF (Pmode, end_label), pc_rtx);
10677 emit_jump_insn (gen_rtx_SET (pc_rtx, x));
10678
10679 /* Wait for the right moment when the accessed 32-bit location
10680 is stable. */
10681 emit_insn (gen_rtx_SET (resv,
10682 gen_rtx_AND (SImode, gen_rtx_NOT (SImode, mask),
10683 res)));
10684 mode = SELECT_CC_MODE (NE, resv, val);
10685 cc = gen_rtx_REG (mode, CC_REG);
10686 emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (mode, resv, val)));
10687
10688 /* Set the new value of the 32 bit location, proper masked. */
10689 emit_insn (gen_rtx_SET (val, resv));
10690
10691 /* Try again if location is unstable. Fall through if only
10692 scond op failed. */
10693 x = gen_rtx_NE (VOIDmode, cc, const0_rtx);
10694 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
10695 gen_rtx_LABEL_REF (Pmode, loop_label), pc_rtx);
10696 emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
10697
10698 emit_label (end_label);
10699 }
10700
10701 /* End: proper return the result for the given mode. */
10702 emit_insn (gen_rtx_SET (res,
10703 gen_rtx_AND (SImode, res, mask)));
10704
10705 emit_insn (gen_rtx_SET (res,
10706 gen_rtx_LSHIFTRT (SImode, res, off)));
10707
10708 emit_move_insn (result, gen_lowpart (GET_MODE (result), res));
10709}
10710
10711/* Helper function used by "atomic_compare_and_swap" expand
10712 pattern. */
10713
10714void
10715arc_expand_compare_and_swap (rtx operands[])
10716{
10717 rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
10718 machine_mode mode;
10719
10720 bval = operands[0];
10721 rval = operands[1];
10722 mem = operands[2];
10723 oldval = operands[3];
10724 newval = operands[4];
10725 is_weak = operands[5];
10726 mod_s = operands[6];
10727 mod_f = operands[7];
10728 mode = GET_MODE (mem);
10729
10730 if (reg_overlap_mentioned_p (rval, oldval))
10731 oldval = copy_to_reg (oldval);
10732
10733 if (mode == SImode)
10734 {
10735 emit_insn (gen_atomic_compare_and_swapsi_1 (rval, mem, oldval, newval,
10736 is_weak, mod_s, mod_f));
10737 x = gen_rtx_REG (CC_Zmode, CC_REG);
10738 x = gen_rtx_EQ (SImode, x, const0_rtx);
10739 emit_insn (gen_rtx_SET (bval, x));
10740 }
10741 else
10742 {
10743 arc_expand_compare_and_swap_qh (bval, rval, mem, oldval, newval,
10744 is_weak, mod_s, mod_f);
10745 }
10746}
10747
10748/* Helper function used by the "atomic_compare_and_swapsi_1"
10749 pattern. */
10750
10751void
10752arc_split_compare_and_swap (rtx operands[])
10753{
10754 rtx rval, mem, oldval, newval;
10755 machine_mode mode;
10756 enum memmodel mod_s, mod_f;
10757 bool is_weak;
10758 rtx label1, label2, x, cond;
10759
10760 rval = operands[0];
10761 mem = operands[1];
10762 oldval = operands[2];
10763 newval = operands[3];
10764 is_weak = (operands[4] != const0_rtx);
10765 mod_s = (enum memmodel) INTVAL (operands[5]);
10766 mod_f = (enum memmodel) INTVAL (operands[6]);
10767 mode = GET_MODE (mem);
10768
10769 /* ARC atomic ops work only with 32-bit aligned memories. */
10770 gcc_assert (mode == SImode);
10771
10772 arc_pre_atomic_barrier (mod_s);
10773
10774 label1 = NULL_RTX;
10775 if (!is_weak)
10776 {
10777 label1 = gen_label_rtx ();
10778 emit_label (label1);
10779 }
10780 label2 = gen_label_rtx ();
10781
10782 /* Load exclusive. */
10783 emit_insn (gen_arc_load_exclusivesi (rval, mem));
10784
10785 /* Check if it is oldval. */
10786 mode = SELECT_CC_MODE (NE, rval, oldval);
10787 cond = gen_rtx_REG (mode, CC_REG);
10788 emit_insn (gen_rtx_SET (cond, gen_rtx_COMPARE (mode, rval, oldval)));
10789
10790 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
10791 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
10792 gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
10793 emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
10794
10795 /* Exclusively store new item. Store clobbers CC reg. */
10796 emit_insn (gen_arc_store_exclusivesi (mem, newval));
10797
10798 if (!is_weak)
10799 {
10800 /* Check the result of the store. */
10801 cond = gen_rtx_REG (CC_Zmode, CC_REG);
10802 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
10803 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
10804 gen_rtx_LABEL_REF (Pmode, label1), pc_rtx);
10805 emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
10806 }
10807
10808 if (mod_f != MEMMODEL_RELAXED)
10809 emit_label (label2);
10810
10811 arc_post_atomic_barrier (mod_s);
10812
10813 if (mod_f == MEMMODEL_RELAXED)
10814 emit_label (label2);
10815}
10816
10817/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
10818 to perform. MEM is the memory on which to operate. VAL is the second
10819 operand of the binary operator. BEFORE and AFTER are optional locations to
10820 return the value of MEM either before of after the operation. MODEL_RTX
10821 is a CONST_INT containing the memory model to use. */
10822
10823void
10824arc_expand_atomic_op (enum rtx_code code, rtx mem, rtx val,
10825 rtx orig_before, rtx orig_after, rtx model_rtx)
10826{
10827 enum memmodel model = (enum memmodel) INTVAL (model_rtx);
10828 machine_mode mode = GET_MODE (mem);
10829 rtx label, x, cond;
10830 rtx before = orig_before, after = orig_after;
10831
10832 /* ARC atomic ops work only with 32-bit aligned memories. */
10833 gcc_assert (mode == SImode);
10834
10835 arc_pre_atomic_barrier (model);
10836
10837 label = gen_label_rtx ();
10838 emit_label (label);
10839 label = gen_rtx_LABEL_REF (VOIDmode, label);
10840
10841 if (before == NULL_RTX)
10842 before = gen_reg_rtx (mode);
10843
10844 if (after == NULL_RTX)
10845 after = gen_reg_rtx (mode);
10846
10847 /* Load exclusive. */
10848 emit_insn (gen_arc_load_exclusivesi (before, mem));
10849
10850 switch (code)
10851 {
10852 case NOT:
10853 x = gen_rtx_AND (mode, before, val);
10854 emit_insn (gen_rtx_SET (after, x));
10855 x = gen_rtx_NOT (mode, after);
10856 emit_insn (gen_rtx_SET (after, x));
10857 break;
10858
10859 case MINUS:
10860 if (CONST_INT_P (val))
10861 {
10862 val = GEN_INT (-INTVAL (val));
10863 code = PLUS;
10864 }
10865
10866 /* FALLTHRU. */
10867 default:
10868 x = gen_rtx_fmt_ee (code, mode, before, val);
10869 emit_insn (gen_rtx_SET (after, x));
10870 break;
10871 }
10872
10873 /* Exclusively store new item. Store clobbers CC reg. */
10874 emit_insn (gen_arc_store_exclusivesi (mem, after));
10875
10876 /* Check the result of the store. */
10877 cond = gen_rtx_REG (CC_Zmode, CC_REG);
10878 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
10879 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
10880 label, pc_rtx);
10881 emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
10882
10883 arc_post_atomic_barrier (model);
10884}
10885
bf9e9dc5
CZ
10886/* Implement TARGET_NO_SPECULATION_IN_DELAY_SLOTS_P. */
10887
10888static bool
10889arc_no_speculation_in_delay_slots_p ()
10890{
10891 return true;
10892}
10893
d34a0fdc
CZ
10894/* Return a parallel of registers to represent where to find the
10895 register pieces if required, otherwise NULL_RTX. */
10896
10897static rtx
10898arc_dwarf_register_span (rtx rtl)
10899{
cd1e4d41 10900 machine_mode mode = GET_MODE (rtl);
d34a0fdc
CZ
10901 unsigned regno;
10902 rtx p;
10903
10904 if (GET_MODE_SIZE (mode) != 8)
10905 return NULL_RTX;
10906
10907 p = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (2));
10908 regno = REGNO (rtl);
10909 XVECEXP (p, 0, 0) = gen_rtx_REG (SImode, regno);
10910 XVECEXP (p, 0, 1) = gen_rtx_REG (SImode, regno + 1);
10911
10912 return p;
10913}
10914
fc1c2d04
CZ
10915/* Return true if OP is an acceptable memory operand for ARCompact
10916 16-bit load instructions of MODE.
10917
10918 AV2SHORT: TRUE if address needs to fit into the new ARCv2 short
10919 non scaled instructions.
10920
10921 SCALED: TRUE if address can be scaled. */
10922
10923bool
10924compact_memory_operand_p (rtx op, machine_mode mode,
10925 bool av2short, bool scaled)
10926{
10927 rtx addr, plus0, plus1;
10928 int size, off;
10929
10930 /* Eliminate non-memory operations. */
10931 if (GET_CODE (op) != MEM)
10932 return 0;
10933
10934 /* .di instructions have no 16-bit form. */
10935 if (MEM_VOLATILE_P (op) && !TARGET_VOLATILE_CACHE_SET)
10936 return false;
10937
3e4a5f54
CZ
10938 /* likewise for uncached types. */
10939 if (arc_is_uncached_mem_p (op))
10940 return false;
10941
fc1c2d04
CZ
10942 if (mode == VOIDmode)
10943 mode = GET_MODE (op);
10944
10945 size = GET_MODE_SIZE (mode);
10946
10947 /* dword operations really put out 2 instructions, so eliminate
10948 them. */
10949 if (size > UNITS_PER_WORD)
10950 return false;
10951
10952 /* Decode the address now. */
10953 addr = XEXP (op, 0);
10954 switch (GET_CODE (addr))
10955 {
10956 case REG:
10957 return (REGNO (addr) >= FIRST_PSEUDO_REGISTER
10958 || COMPACT_GP_REG_P (REGNO (addr))
10959 || (SP_REG_P (REGNO (addr)) && (size != 2)));
10960 case PLUS:
10961 plus0 = XEXP (addr, 0);
10962 plus1 = XEXP (addr, 1);
10963
10964 if ((GET_CODE (plus0) == REG)
10965 && ((REGNO (plus0) >= FIRST_PSEUDO_REGISTER)
10966 || COMPACT_GP_REG_P (REGNO (plus0)))
10967 && ((GET_CODE (plus1) == REG)
10968 && ((REGNO (plus1) >= FIRST_PSEUDO_REGISTER)
10969 || COMPACT_GP_REG_P (REGNO (plus1)))))
10970 {
10971 return !av2short;
10972 }
10973
10974 if ((GET_CODE (plus0) == REG)
10975 && ((REGNO (plus0) >= FIRST_PSEUDO_REGISTER)
10976 || (COMPACT_GP_REG_P (REGNO (plus0)) && !av2short)
10977 || (IN_RANGE (REGNO (plus0), 0, 31) && av2short))
10978 && (GET_CODE (plus1) == CONST_INT))
10979 {
10980 bool valid = false;
10981
10982 off = INTVAL (plus1);
10983
10984 /* Negative offset is not supported in 16-bit load/store insns. */
10985 if (off < 0)
10986 return 0;
10987
10988 /* Only u5 immediates allowed in code density instructions. */
10989 if (av2short)
10990 {
10991 switch (size)
10992 {
10993 case 1:
10994 return false;
10995 case 2:
10996 /* This is an ldh_s.x instruction, check the u6
10997 immediate. */
10998 if (COMPACT_GP_REG_P (REGNO (plus0)))
10999 valid = true;
11000 break;
11001 case 4:
11002 /* Only u5 immediates allowed in 32bit access code
11003 density instructions. */
11004 if (REGNO (plus0) <= 31)
11005 return ((off < 32) && (off % 4 == 0));
11006 break;
11007 default:
11008 return false;
11009 }
11010 }
11011 else
11012 if (COMPACT_GP_REG_P (REGNO (plus0)))
11013 valid = true;
11014
11015 if (valid)
11016 {
11017
11018 switch (size)
11019 {
11020 case 1:
11021 return (off < 32);
11022 case 2:
11023 /* The 6-bit constant get shifted to fit the real
11024 5-bits field. Check also for the alignment. */
11025 return ((off < 64) && (off % 2 == 0));
11026 case 4:
11027 return ((off < 128) && (off % 4 == 0));
11028 default:
11029 return false;
11030 }
11031 }
11032 }
11033
11034 if (REG_P (plus0) && CONST_INT_P (plus1)
11035 && ((REGNO (plus0) >= FIRST_PSEUDO_REGISTER)
11036 || SP_REG_P (REGNO (plus0)))
11037 && !av2short)
11038 {
11039 off = INTVAL (plus1);
11040 return ((size != 2) && (off >= 0 && off < 128) && (off % 4 == 0));
11041 }
11042
11043 if ((GET_CODE (plus0) == MULT)
11044 && (GET_CODE (XEXP (plus0, 0)) == REG)
11045 && ((REGNO (XEXP (plus0, 0)) >= FIRST_PSEUDO_REGISTER)
11046 || COMPACT_GP_REG_P (REGNO (XEXP (plus0, 0))))
11047 && (GET_CODE (plus1) == REG)
11048 && ((REGNO (plus1) >= FIRST_PSEUDO_REGISTER)
11049 || COMPACT_GP_REG_P (REGNO (plus1))))
11050 return scaled;
11051 default:
11052 break ;
11053 /* TODO: 'gp' and 'pcl' are to supported as base address operand
11054 for 16-bit load instructions. */
11055 }
11056 return false;
11057}
11058
6b55f8c9
CZ
11059/* Return nonzero if a jli call should be generated for a call from
11060 the current function to DECL. */
11061
11062bool
11063arc_is_jli_call_p (rtx pat)
11064{
11065 tree attrs;
11066 tree decl = SYMBOL_REF_DECL (pat);
11067
11068 /* If it is not a well defined public function then return false. */
11069 if (!decl || !SYMBOL_REF_FUNCTION_P (pat) || !TREE_PUBLIC (decl))
11070 return false;
11071
11072 attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11073 if (lookup_attribute ("jli_always", attrs))
11074 return true;
11075
11076 if (lookup_attribute ("jli_fixed", attrs))
11077 return true;
11078
11079 return TARGET_JLI_ALWAYS;
11080}
11081
11082/* Handle and "jli" attribute; arguments as in struct
11083 attribute_spec.handler. */
11084
11085static tree
11086arc_handle_jli_attribute (tree *node ATTRIBUTE_UNUSED,
11087 tree name, tree args, int,
11088 bool *no_add_attrs)
11089{
11090 if (!TARGET_V2)
11091 {
11092 warning (OPT_Wattributes,
11093 "%qE attribute only valid for ARCv2 architecture",
11094 name);
11095 *no_add_attrs = true;
11096 }
11097
11098 if (args == NULL_TREE)
11099 {
11100 warning (OPT_Wattributes,
11101 "argument of %qE attribute is missing",
11102 name);
11103 *no_add_attrs = true;
11104 }
11105 else
11106 {
11107 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
11108 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
11109 tree arg = TREE_VALUE (args);
11110 if (TREE_CODE (arg) != INTEGER_CST)
11111 {
11112 warning (0, "%qE attribute allows only an integer constant argument",
11113 name);
11114 *no_add_attrs = true;
11115 }
11116 /* FIXME! add range check. TREE_INT_CST_LOW (arg) */
11117 }
11118 return NULL_TREE;
11119}
11120
7778a1ad
CZ
11121/* Handle and "scure" attribute; arguments as in struct
11122 attribute_spec.handler. */
11123
11124static tree
11125arc_handle_secure_attribute (tree *node ATTRIBUTE_UNUSED,
11126 tree name, tree args, int,
11127 bool *no_add_attrs)
11128{
11129 if (!TARGET_EM)
11130 {
11131 warning (OPT_Wattributes,
11132 "%qE attribute only valid for ARC EM architecture",
11133 name);
11134 *no_add_attrs = true;
11135 }
11136
11137 if (args == NULL_TREE)
11138 {
11139 warning (OPT_Wattributes,
11140 "argument of %qE attribute is missing",
11141 name);
11142 *no_add_attrs = true;
11143 }
11144 else
11145 {
11146 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
11147 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
11148 tree arg = TREE_VALUE (args);
11149 if (TREE_CODE (arg) != INTEGER_CST)
11150 {
11151 warning (0, "%qE attribute allows only an integer constant argument",
11152 name);
11153 *no_add_attrs = true;
11154 }
11155 }
11156 return NULL_TREE;
11157}
11158
11159/* Return nonzero if the symbol is a secure function. */
11160
11161bool
11162arc_is_secure_call_p (rtx pat)
11163{
11164 tree attrs;
11165 tree decl = SYMBOL_REF_DECL (pat);
11166
11167 if (!decl)
11168 return false;
11169
11170 attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11171 if (lookup_attribute ("secure_call", attrs))
11172 return true;
11173
11174 return false;
11175}
11176
8180c03f
CZ
11177/* Handle "uncached" qualifier. */
11178
11179static tree
11180arc_handle_uncached_attribute (tree *node,
11181 tree name, tree args,
11182 int flags ATTRIBUTE_UNUSED,
11183 bool *no_add_attrs)
11184{
11185 if (DECL_P (*node) && TREE_CODE (*node) != TYPE_DECL)
11186 {
11187 error ("%qE attribute only applies to types",
11188 name);
11189 *no_add_attrs = true;
11190 }
11191 else if (args)
11192 {
11193 warning (OPT_Wattributes, "argument of %qE attribute ignored", name);
11194 }
11195 return NULL_TREE;
11196}
11197
11198/* Return TRUE if PAT is a memory addressing an uncached data. */
11199
11200bool
11201arc_is_uncached_mem_p (rtx pat)
11202{
3e4a5f54
CZ
11203 tree attrs = NULL_TREE;
11204 tree addr;
8180c03f
CZ
11205
11206 if (!MEM_P (pat))
11207 return false;
11208
11209 /* Get the memory attributes. */
3e4a5f54
CZ
11210 addr = MEM_EXPR (pat);
11211 if (!addr)
8180c03f
CZ
11212 return false;
11213
3e4a5f54 11214 /* Get the attributes. */
62a715c7
CZ
11215 if (TREE_CODE (addr) == MEM_REF
11216 || TREE_CODE (addr) == VAR_DECL)
3e4a5f54
CZ
11217 {
11218 attrs = TYPE_ATTRIBUTES (TREE_TYPE (addr));
11219 if (lookup_attribute ("uncached", attrs))
11220 return true;
62a715c7
CZ
11221 }
11222 if (TREE_CODE (addr) == MEM_REF)
11223 {
3e4a5f54
CZ
11224 attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_OPERAND (addr, 0)));
11225 if (lookup_attribute ("uncached", attrs))
11226 return true;
3e4a5f54
CZ
11227 attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_OPERAND (addr, 1)));
11228 if (lookup_attribute ("uncached", attrs))
11229 return true;
11230 }
62a715c7
CZ
11231
11232 /* Check the definitions of the structs. */
11233 while (handled_component_p (addr))
11234 {
11235 if (TREE_CODE (addr) == COMPONENT_REF)
11236 {
11237 attrs = TYPE_ATTRIBUTES (TREE_TYPE (addr));
11238 if (lookup_attribute ("uncached", attrs))
11239 return true;
11240 attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_OPERAND (addr, 0)));
11241 if (lookup_attribute ("uncached", attrs))
11242 return true;
11243 attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_OPERAND (addr, 1)));
11244 if (lookup_attribute ("uncached", attrs))
11245 return true;
11246 }
11247 addr = TREE_OPERAND (addr, 0);
11248 }
8180c03f
CZ
11249 return false;
11250}
11251
b6fb257b
CZ
11252/* Handle aux attribute. The auxiliary registers are addressed using
11253 special instructions lr and sr. The attribute 'aux' indicates if a
11254 variable refers to the aux-regs and what is the register number
11255 desired. */
11256
11257static tree
11258arc_handle_aux_attribute (tree *node,
11259 tree name, tree args, int,
11260 bool *no_add_attrs)
11261{
11262 /* Isn't it better to use address spaces for the aux-regs? */
11263 if (DECL_P (*node))
11264 {
11265 if (TREE_CODE (*node) != VAR_DECL)
11266 {
11267 error ("%qE attribute only applies to variables", name);
11268 *no_add_attrs = true;
11269 }
11270 else if (args)
11271 {
11272 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
11273 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
11274 tree arg = TREE_VALUE (args);
11275 if (TREE_CODE (arg) != INTEGER_CST)
11276 {
d65485c5 11277 warning (OPT_Wattributes, "%qE attribute allows only an integer "
b6fb257b
CZ
11278 "constant argument", name);
11279 *no_add_attrs = true;
11280 }
11281 /* FIXME! add range check. TREE_INT_CST_LOW (arg) */
11282 }
11283
11284 if (TREE_CODE (*node) == VAR_DECL)
11285 {
11286 tree fntype = TREE_TYPE (*node);
11287 if (fntype && TREE_CODE (fntype) == POINTER_TYPE)
11288 {
11289 tree attrs = tree_cons (get_identifier ("aux"), NULL_TREE,
11290 TYPE_ATTRIBUTES (fntype));
11291 TYPE_ATTRIBUTES (fntype) = attrs;
11292 }
11293 }
11294 }
11295 return NULL_TREE;
11296}
11297
7cfbf676
CZ
11298/* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
11299 anchors for small data: the GP register acts as an anchor in that
11300 case. We also don't want to use them for PC-relative accesses,
11301 where the PC acts as an anchor. Prohibit also TLS symbols to use
11302 anchors. */
11303
11304static bool
11305arc_use_anchors_for_symbol_p (const_rtx symbol)
11306{
11307 if (SYMBOL_REF_TLS_MODEL (symbol))
11308 return false;
11309
11310 if (flag_pic)
11311 return false;
11312
11313 if (SYMBOL_REF_SMALL_P (symbol))
11314 return false;
11315
11316 return default_use_anchors_for_symbol_p (symbol);
11317}
11318
31e72f4f
CZ
11319/* Return true if SUBST can't safely replace its equivalent during RA. */
11320static bool
11321arc_cannot_substitute_mem_equiv_p (rtx)
11322{
11323 /* If SUBST is mem[base+index], the address may not fit ISA,
11324 thus return true. */
11325 return true;
11326}
11327
8fa2c211
CZ
11328/* Checks whether the operands are valid for use in an LDD/STD
11329 instruction. Assumes that RT, and RT2 are REG. This is guaranteed
11330 by the patterns. Assumes that the address in the base register RN
11331 is word aligned. Pattern guarantees that both memory accesses use
11332 the same base register, the offsets are constants within the range,
11333 and the gap between the offsets is 4. If reload complete then
11334 check that registers are legal. */
11335
11336static bool
11337operands_ok_ldd_std (rtx rt, rtx rt2, HOST_WIDE_INT offset)
11338{
11339 unsigned int t, t2;
11340
11341 if (!reload_completed)
11342 return true;
11343
11344 if (!(SMALL_INT_RANGE (offset, (GET_MODE_SIZE (DImode) - 1) & (~0x03),
11345 (offset & (GET_MODE_SIZE (DImode) - 1) & 3
11346 ? 0 : -(-GET_MODE_SIZE (DImode) | (~0x03)) >> 1))))
11347 return false;
11348
11349 t = REGNO (rt);
11350 t2 = REGNO (rt2);
11351
73dac59b 11352 if ((t2 == PCL_REG)
8fa2c211
CZ
11353 || (t % 2 != 0) /* First destination register is not even. */
11354 || (t2 != t + 1))
11355 return false;
11356
11357 return true;
11358}
11359
11360/* Helper for gen_operands_ldd_std. Returns true iff the memory
11361 operand MEM's address contains an immediate offset from the base
11362 register and has no side effects, in which case it sets BASE and
11363 OFFSET accordingly. */
11364
11365static bool
11366mem_ok_for_ldd_std (rtx mem, rtx *base, rtx *offset)
11367{
11368 rtx addr;
11369
11370 gcc_assert (base != NULL && offset != NULL);
11371
11372 /* TODO: Handle more general memory operand patterns, such as
11373 PRE_DEC and PRE_INC. */
11374
11375 if (side_effects_p (mem))
11376 return false;
11377
11378 /* Can't deal with subregs. */
11379 if (GET_CODE (mem) == SUBREG)
11380 return false;
11381
11382 gcc_assert (MEM_P (mem));
11383
11384 *offset = const0_rtx;
11385
11386 addr = XEXP (mem, 0);
11387
11388 /* If addr isn't valid for DImode, then we can't handle it. */
11389 if (!arc_legitimate_address_p (DImode, addr,
11390 reload_in_progress || reload_completed))
11391 return false;
11392
11393 if (REG_P (addr))
11394 {
11395 *base = addr;
11396 return true;
11397 }
11398 else if (GET_CODE (addr) == PLUS || GET_CODE (addr) == MINUS)
11399 {
11400 *base = XEXP (addr, 0);
11401 *offset = XEXP (addr, 1);
11402 return (REG_P (*base) && CONST_INT_P (*offset));
11403 }
11404
11405 return false;
11406}
11407
11408/* Called from peephole2 to replace two word-size accesses with a
11409 single LDD/STD instruction. Returns true iff we can generate a new
11410 instruction sequence. That is, both accesses use the same base
11411 register and the gap between constant offsets is 4. OPERANDS are
11412 the operands found by the peephole matcher; OPERANDS[0,1] are
11413 register operands, and OPERANDS[2,3] are the corresponding memory
11414 operands. LOAD indicates whether the access is load or store. */
11415
11416bool
11417gen_operands_ldd_std (rtx *operands, bool load, bool commute)
11418{
11419 int i, gap;
11420 HOST_WIDE_INT offsets[2], offset;
11421 int nops = 2;
11422 rtx cur_base, cur_offset, tmp;
11423 rtx base = NULL_RTX;
11424
11425 /* Check that the memory references are immediate offsets from the
11426 same base register. Extract the base register, the destination
11427 registers, and the corresponding memory offsets. */
11428 for (i = 0; i < nops; i++)
11429 {
11430 if (!mem_ok_for_ldd_std (operands[nops+i], &cur_base, &cur_offset))
11431 return false;
11432
11433 if (i == 0)
11434 base = cur_base;
11435 else if (REGNO (base) != REGNO (cur_base))
11436 return false;
11437
11438 offsets[i] = INTVAL (cur_offset);
11439 if (GET_CODE (operands[i]) == SUBREG)
11440 {
11441 tmp = SUBREG_REG (operands[i]);
11442 gcc_assert (GET_MODE (operands[i]) == GET_MODE (tmp));
11443 operands[i] = tmp;
11444 }
11445 }
11446
11447 /* Make sure there is no dependency between the individual loads. */
11448 if (load && REGNO (operands[0]) == REGNO (base))
11449 return false; /* RAW. */
11450
11451 if (load && REGNO (operands[0]) == REGNO (operands[1]))
11452 return false; /* WAW. */
11453
11454 /* Make sure the instructions are ordered with lower memory access first. */
11455 if (offsets[0] > offsets[1])
11456 {
11457 gap = offsets[0] - offsets[1];
11458 offset = offsets[1];
11459
11460 /* Swap the instructions such that lower memory is accessed first. */
11461 std::swap (operands[0], operands[1]);
11462 std::swap (operands[2], operands[3]);
11463 }
11464 else
11465 {
11466 gap = offsets[1] - offsets[0];
11467 offset = offsets[0];
11468 }
11469
11470 /* Make sure accesses are to consecutive memory locations. */
11471 if (gap != 4)
11472 return false;
11473
11474 /* Make sure we generate legal instructions. */
11475 if (operands_ok_ldd_std (operands[0], operands[1], offset))
11476 return true;
11477
11478 if (load && commute)
11479 {
11480 /* Try reordering registers. */
11481 std::swap (operands[0], operands[1]);
11482 if (operands_ok_ldd_std (operands[0], operands[1], offset))
11483 return true;
11484 }
11485
11486 return false;
11487}
11488
864e2eaa
CZ
11489/* This order of allocation is used when we compile for size. It
11490 allocates first the registers which are most probably to end up in
11491 a short instruction. */
11492static const int size_alloc_order[] =
11493{
11494 0, 1, 2, 3, 12, 13, 14, 15,
11495 4, 5, 6, 7, 8, 9, 10, 11
11496};
11497
11498/* Adjust register allocation order when compiling for size. */
11499void
11500arc_adjust_reg_alloc_order (void)
11501{
11502 const int arc_default_alloc_order[] = REG_ALLOC_ORDER;
11503 memcpy (reg_alloc_order, arc_default_alloc_order, sizeof (reg_alloc_order));
11504 if (optimize_size)
11505 memcpy (reg_alloc_order, size_alloc_order, sizeof (size_alloc_order));
11506}
11507
b9bc3b12
CZ
11508/* Implement TARGET_MEMORY_MOVE_COST. */
11509
11510static int
11511arc_memory_move_cost (machine_mode mode,
11512 reg_class_t rclass ATTRIBUTE_UNUSED,
11513 bool in ATTRIBUTE_UNUSED)
11514{
11515 if ((GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
11516 || ((GET_MODE_SIZE (mode) <= UNITS_PER_WORD * 2) && TARGET_LL64))
11517 return 6;
11518
11519 return (2 * GET_MODE_SIZE (mode));
11520}
11521
03301dcc
CZ
11522/* Split an OR instruction into multiple BSET/OR instructions in a
11523 attempt to avoid long immediate constants. The next strategies are
11524 employed when destination is 'q' reg.
11525
11526 1. if there are up to three bits set in the mask, a succession of
11527 three bset instruction will be emitted:
11528 OR rA, rB, mask ->
11529 BSET(_S) rA,rB,mask1/BSET_S rA,rA,mask2/BSET_S rA,rA,mask3
11530
11531 2. if the lower 6 bits of the mask is set and there is only one
11532 bit set in the upper remaining bits then we will emit one bset and
11533 one OR instruction:
11534 OR rA, rB, mask -> OR rA,rB,mask1/BSET_S rA,mask2
11535
11536 3. otherwise an OR with limm will be emmitted. */
11537
11538void
11539arc_split_ior (rtx *operands)
11540{
11541 unsigned HOST_WIDE_INT mask, maskx;
11542 rtx op1 = operands[1];
11543
11544 gcc_assert (CONST_INT_P (operands[2]));
11545 mask = INTVAL (operands[2]) & 0xffffffff;
11546
11547 if (__builtin_popcount (mask) > 3 || (mask & 0x3f))
11548 {
11549 maskx = mask & 0x3f;
11550 emit_insn (gen_rtx_SET (operands[0],
11551 gen_rtx_IOR (SImode, op1, GEN_INT (maskx))));
11552 op1 = operands[0];
11553 mask &= ~maskx;
11554 }
11555
11556 switch (__builtin_popcount (mask))
11557 {
11558 case 3:
11559 maskx = 1 << (__builtin_ffs (mask) - 1);
11560 emit_insn (gen_rtx_SET (operands[0],
11561 gen_rtx_IOR (SImode, op1, GEN_INT (maskx))));
11562 mask &= ~maskx;
11563 op1 = operands[0];
11564 /* FALLTHRU */
11565 case 2:
11566 maskx = 1 << (__builtin_ffs (mask) - 1);
11567 emit_insn (gen_rtx_SET (operands[0],
11568 gen_rtx_IOR (SImode, op1, GEN_INT (maskx))));
11569 mask &= ~maskx;
11570 op1 = operands[0];
11571 /* FALLTHRU */
11572 case 1:
11573 maskx = 1 << (__builtin_ffs (mask) - 1);
11574 emit_insn (gen_rtx_SET (operands[0],
11575 gen_rtx_IOR (SImode, op1, GEN_INT (maskx))));
11576 break;
4653da0b 11577 case 0:
03301dcc 11578 break;
4653da0b
CZ
11579 default:
11580 gcc_unreachable ();
03301dcc
CZ
11581 }
11582}
11583
11584/* Helper to check C0x constraint. */
11585
11586bool
11587arc_check_ior_const (HOST_WIDE_INT ival)
11588{
11589 unsigned int mask = (unsigned int) (ival & 0xffffffff);
4653da0b
CZ
11590
11591 if (UNSIGNED_INT6 (ival)
11592 || IS_POWEROF2_P (mask))
11593 return false;
03301dcc
CZ
11594 if (__builtin_popcount (mask) <= 3)
11595 return true;
11596 if (__builtin_popcount (mask & ~0x3f) <= 1)
11597 return true;
11598 return false;
11599}
11600
11601/* Split a mov with long immediate instruction into smaller, size
11602 friendly instructions. */
11603
11604bool
11605arc_split_mov_const (rtx *operands)
11606{
11607 unsigned HOST_WIDE_INT ival;
11608 HOST_WIDE_INT shimm;
11609 machine_mode mode = GET_MODE (operands[0]);
11610
11611 /* Manage a constant. */
11612 gcc_assert (CONST_INT_P (operands[1]));
11613 ival = INTVAL (operands[1]) & 0xffffffff;
11614
03301dcc
CZ
11615 /* 1. Check if we can just rotate limm by 8 but using ROR8. */
11616 if (TARGET_BARREL_SHIFTER && TARGET_V2
11617 && ((ival & ~0x3f000000) == 0))
11618 {
11619 shimm = (ival >> 24) & 0x3f;
11620 emit_insn (gen_rtx_SET (operands[0],
11621 gen_rtx_ROTATERT (mode, GEN_INT (shimm),
11622 GEN_INT (8))));
11623 return true;
11624 }
11625 /* 2. Check if we can just shift by 8 to fit into the u6 of LSL8. */
11626 if (TARGET_BARREL_SHIFTER && TARGET_V2
11627 && ((ival & ~0x3f00) == 0))
11628 {
11629 shimm = (ival >> 8) & 0x3f;
11630 emit_insn (gen_rtx_SET (operands[0],
11631 gen_rtx_ASHIFT (mode, GEN_INT (shimm),
11632 GEN_INT (8))));
11633 return true;
11634 }
11635
11636 /* 3. Check if we can just shift by 16 to fit into the u6 of LSL16. */
11637 if (TARGET_BARREL_SHIFTER && TARGET_V2
11638 && ((ival & ~0x3f0000) == 0))
11639 {
11640 shimm = (ival >> 16) & 0x3f;
11641 emit_insn (gen_rtx_SET (operands[0],
11642 gen_rtx_ASHIFT (mode, GEN_INT (shimm),
11643 GEN_INT (16))));
11644 return true;
11645 }
11646
11647 /* 4. Check if we can do something like mov_s h,u8 / asl_s ra,h,#nb. */
11648 if (((ival >> (__builtin_ffs (ival) - 1)) & 0xffffff00) == 0
11649 && TARGET_BARREL_SHIFTER)
11650 {
11651 HOST_WIDE_INT shift = __builtin_ffs (ival);
11652 shimm = (ival >> (shift - 1)) & 0xff;
11653 emit_insn (gen_rtx_SET (operands[0], GEN_INT (shimm)));
11654 emit_insn (gen_rtx_SET (operands[0],
11655 gen_rtx_ASHIFT (mode, operands[0],
11656 GEN_INT (shift - 1))));
11657 return true;
11658 }
11659
11660 /* 5. Check if we can just rotate the limm, useful when no barrel
11661 shifter is present. */
11662 if ((ival & ~0x8000001f) == 0)
11663 {
11664 shimm = (ival * 2 + 1) & 0x3f;
11665 emit_insn (gen_rtx_SET (operands[0],
11666 gen_rtx_ROTATERT (mode, GEN_INT (shimm),
11667 const1_rtx)));
11668 return true;
11669 }
11670
11671 /* 6. Check if we can do something with bmask. */
11672 if (IS_POWEROF2_P (ival + 1))
11673 {
11674 emit_insn (gen_rtx_SET (operands[0], constm1_rtx));
11675 emit_insn (gen_rtx_SET (operands[0],
11676 gen_rtx_AND (mode, operands[0],
11677 GEN_INT (ival))));
11678 return true;
11679 }
11680
4653da0b 11681 gcc_unreachable ();
03301dcc
CZ
11682}
11683
11684/* Helper to check Cax constraint. */
11685
11686bool
11687arc_check_mov_const (HOST_WIDE_INT ival)
11688{
11689 ival = ival & 0xffffffff;
11690
4653da0b
CZ
11691 if (SIGNED_INT12 (ival))
11692 return false;
11693
03301dcc
CZ
11694 if ((ival & ~0x8000001f) == 0)
11695 return true;
11696
11697 if (IS_POWEROF2_P (ival + 1))
11698 return true;
11699
11700 /* The next rules requires a barrel shifter. */
11701 if (!TARGET_BARREL_SHIFTER)
11702 return false;
11703
11704 if (((ival >> (__builtin_ffs (ival) - 1)) & 0xffffff00) == 0)
11705 return true;
11706
11707 if ((ival & ~0x3f00) == 0)
11708 return true;
11709
11710 if ((ival & ~0x3f0000) == 0)
11711 return true;
11712
11713 if ((ival & ~0x3f000000) == 0)
11714 return true;
11715
11716 return false;
11717}
11718
ce9dbf20
CZ
11719/* Return nonzero if this function is known to have a null epilogue.
11720 This allows the optimizer to omit jumps to jumps if no stack
11721 was created. */
11722
11723bool
11724arc_can_use_return_insn (void)
11725{
11726 return (reload_completed && cfun->machine->frame_info.total_size == 0
11727 && !ARC_INTERRUPT_P (arc_compute_function_type (cfun)));
11728}
03301dcc 11729
e57764be
CZ
11730/* Helper for INSN_COST.
11731
11732 Per Segher Boessenkool: rtx_costs computes the cost for any rtx (an
11733 insn, a set, a set source, any random piece of one). set_src_cost,
11734 set_rtx_cost, etc. are helper functions that use that.
11735
11736 Those functions do not work for parallels. Also, costs are not
11737 additive like this simplified model assumes. Also, more complex
11738 backends tend to miss many cases in their rtx_costs function.
11739
11740 Many passes that want costs want to know the cost of a full insn. Like
11741 combine. That's why I created insn_cost: it solves all of the above
11742 problems. */
11743
11744static int
11745arc_insn_cost (rtx_insn *insn, bool speed)
11746{
11747 int cost;
11748 if (recog_memoized (insn) < 0)
11749 return 0;
11750
11751 /* If optimizing for size, we want the insn size. */
11752 if (!speed)
11753 return get_attr_length (insn);
11754
11755 /* Use cost if provided. */
11756 cost = get_attr_cost (insn);
11757 if (cost > 0)
11758 return cost;
11759
11760 /* For speed make a simple cost model: memory access is more
11761 expensive than any other instruction. */
11762 enum attr_type type = get_attr_type (insn);
11763
11764 switch (type)
11765 {
11766 case TYPE_LOAD:
11767 case TYPE_STORE:
11768 cost = COSTS_N_INSNS (2);
11769 break;
11770
11771 default:
11772 cost = COSTS_N_INSNS (1);
11773 break;
11774 }
11775
11776 return cost;
11777}
11778
7cfbf676
CZ
11779#undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
11780#define TARGET_USE_ANCHORS_FOR_SYMBOL_P arc_use_anchors_for_symbol_p
11781
58e17cf8
RS
11782#undef TARGET_CONSTANT_ALIGNMENT
11783#define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
11784
31e72f4f
CZ
11785#undef TARGET_CANNOT_SUBSTITUTE_MEM_EQUIV_P
11786#define TARGET_CANNOT_SUBSTITUTE_MEM_EQUIV_P arc_cannot_substitute_mem_equiv_p
11787
efcc2e30
CZ
11788#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
11789#define TARGET_ASM_TRAMPOLINE_TEMPLATE arc_asm_trampoline_template
11790
8e95721a
CZ
11791#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
11792#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
11793
b9bc3b12
CZ
11794#undef TARGET_REGISTER_MOVE_COST
11795#define TARGET_REGISTER_MOVE_COST arc_register_move_cost
11796
11797#undef TARGET_MEMORY_MOVE_COST
11798#define TARGET_MEMORY_MOVE_COST arc_memory_move_cost
11799
e57764be
CZ
11800#undef TARGET_INSN_COST
11801#define TARGET_INSN_COST arc_insn_cost
11802
526b7aee
SV
11803struct gcc_target targetm = TARGET_INITIALIZER;
11804
11805#include "gt-arc.h"