]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/arm/arm.c
rtl.h (MEM_READONLY_P): Replace RTX_UNCHANGING_P.
[thirdparty/gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
f954388e
RE
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
3 2002, 2003, 2004 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e 7
4f448245 8 This file is part of GCC.
cce8749e 9
4f448245
NC
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
cce8749e 14
4f448245
NC
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
cce8749e 19
4f448245
NC
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
4977bab6
ZW
27#include "coretypes.h"
28#include "tm.h"
cce8749e 29#include "rtl.h"
d5b7b3ae 30#include "tree.h"
c7319d87 31#include "obstack.h"
cce8749e
CH
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "real.h"
35#include "insn-config.h"
36#include "conditions.h"
cce8749e
CH
37#include "output.h"
38#include "insn-attr.h"
39#include "flags.h"
af48348a 40#include "reload.h"
49ad7cfa 41#include "function.h"
bee06f3d 42#include "expr.h"
e78d8e51 43#include "optabs.h"
ad076f4e 44#include "toplev.h"
aec3cfba 45#include "recog.h"
92a432f4 46#include "ggc.h"
d5b7b3ae 47#include "except.h"
8b97c5f8 48#include "c-pragma.h"
7b8b8ade 49#include "integrate.h"
c27ba912 50#include "tm_p.h"
672a6f42
NB
51#include "target.h"
52#include "target-def.h"
980e61bb 53#include "debug.h"
6e34d3a3 54#include "langhooks.h"
cce8749e 55
d5b7b3ae
RE
56/* Forward definitions of types. */
57typedef struct minipool_node Mnode;
58typedef struct minipool_fixup Mfix;
59
1d6e90ac
NC
60const struct attribute_spec arm_attribute_table[];
61
d5b7b3ae 62/* Forward function declarations. */
5848830f 63static arm_stack_offsets *arm_get_frame_offsets (void);
e32bac5b 64static void arm_add_gc_roots (void);
a406f566
MM
65static int arm_gen_constant (enum rtx_code, enum machine_mode, rtx,
66 HOST_WIDE_INT, rtx, rtx, int, int);
e32bac5b
RE
67static unsigned bit_count (unsigned long);
68static int arm_address_register_rtx_p (rtx, int);
1e1ab407 69static int arm_legitimate_index_p (enum machine_mode, rtx, RTX_CODE, int);
e32bac5b
RE
70static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
71inline static int thumb_index_register_rtx_p (rtx, int);
5848830f 72static int thumb_far_jump_used_p (void);
57934c39
PB
73static bool thumb_force_lr_save (void);
74static unsigned long thumb_compute_save_reg_mask (void);
e32bac5b 75static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
e32bac5b
RE
76static rtx emit_multi_reg_push (int);
77static rtx emit_sfm (int, int);
301d03af 78#ifndef AOF_ASSEMBLER
e32bac5b 79static bool arm_assemble_integer (rtx, unsigned int, int);
301d03af 80#endif
e32bac5b
RE
81static const char *fp_const_from_val (REAL_VALUE_TYPE *);
82static arm_cc get_arm_condition_code (rtx);
e32bac5b
RE
83static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
84static rtx is_jump_table (rtx);
85static const char *output_multi_immediate (rtx *, const char *, const char *,
86 int, HOST_WIDE_INT);
87static void print_multi_reg (FILE *, const char *, int, int);
88static const char *shift_op (rtx, HOST_WIDE_INT *);
89static struct machine_function *arm_init_machine_status (void);
90static int number_of_first_bit_set (int);
91static void replace_symbols_in_block (tree, rtx, rtx);
c9ca9b88 92static void thumb_exit (FILE *, int);
980e61bb 93static void thumb_pushpop (FILE *, int, int, int *, int);
e32bac5b
RE
94static rtx is_jump_table (rtx);
95static HOST_WIDE_INT get_jump_table_size (rtx);
96static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
97static Mnode *add_minipool_forward_ref (Mfix *);
98static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
99static Mnode *add_minipool_backward_ref (Mfix *);
100static void assign_minipool_offsets (Mfix *);
101static void arm_print_value (FILE *, rtx);
102static void dump_minipool (rtx);
103static int arm_barrier_cost (rtx);
104static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
105static void push_minipool_barrier (rtx, HOST_WIDE_INT);
106static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
107 rtx);
108static void arm_reorg (void);
109static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
110static int current_file_function_operand (rtx);
111static unsigned long arm_compute_save_reg0_reg12_mask (void);
112static unsigned long arm_compute_save_reg_mask (void);
113static unsigned long arm_isr_value (tree);
114static unsigned long arm_compute_func_type (void);
115static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
116static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
117static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
118static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
119static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
120static int arm_comp_type_attributes (tree, tree);
121static void arm_set_default_type_attributes (tree);
122static int arm_adjust_cost (rtx, rtx, rtx, int);
e32bac5b
RE
123static int count_insns_for_constant (HOST_WIDE_INT, int);
124static int arm_get_strip_length (int);
125static bool arm_function_ok_for_sibcall (tree, tree);
126static void arm_internal_label (FILE *, const char *, unsigned long);
127static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 tree);
129static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
9b66ebb1
PB
130static bool arm_slowmul_rtx_costs (rtx, int, int, int *);
131static bool arm_fastmul_rtx_costs (rtx, int, int, int *);
132static bool arm_xscale_rtx_costs (rtx, int, int, int *);
133static bool arm_9e_rtx_costs (rtx, int, int, int *);
e32bac5b
RE
134static int arm_address_cost (rtx);
135static bool arm_memory_load_p (rtx);
136static bool arm_cirrus_insn_p (rtx);
137static void cirrus_reorg (rtx);
5a9335ef
NC
138static void arm_init_builtins (void);
139static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
140static void arm_init_iwmmxt_builtins (void);
141static rtx safe_vector_operand (rtx, enum machine_mode);
142static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
143static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
144static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
a406f566 145static void emit_constant_insn (rtx cond, rtx pattern);
5a9335ef 146
ebe413e5 147#ifdef OBJECT_FORMAT_ELF
e32bac5b 148static void arm_elf_asm_named_section (const char *, unsigned int);
ebe413e5 149#endif
fb49053f 150#ifndef ARM_PE
e32bac5b 151static void arm_encode_section_info (tree, rtx, int);
fb49053f 152#endif
5eb99654 153#ifdef AOF_ASSEMBLER
e32bac5b
RE
154static void aof_globalize_label (FILE *, const char *);
155static void aof_dump_imports (FILE *);
156static void aof_dump_pic_table (FILE *);
1bc7c5b6 157static void aof_file_start (void);
e32bac5b 158static void aof_file_end (void);
5eb99654 159#endif
f9ba5949 160static rtx arm_struct_value_rtx (tree, int);
1cc9f5f5
KH
161static void arm_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
162 tree, int *, int);
8cd5a4e0
RH
163static bool arm_pass_by_reference (CUMULATIVE_ARGS *,
164 enum machine_mode, tree, bool);
70301b45 165static bool arm_promote_prototypes (tree);
6b045785 166static bool arm_default_short_enums (void);
13c1cd82 167static bool arm_align_anon_bitfield (void);
c237e94a 168
4185ae53
PB
169static tree arm_cxx_guard_type (void);
170static bool arm_cxx_guard_mask_bit (void);
46e995e0
PB
171static tree arm_get_cookie_size (tree);
172static bool arm_cookie_has_size (void);
44d10c10 173static bool arm_cxx_cdtor_returns_this (void);
b3f8d95d 174static void arm_init_libfuncs (void);
4185ae53 175
672a6f42
NB
176\f
177/* Initialize the GCC target structure. */
b2ca3702 178#if TARGET_DLLIMPORT_DECL_ATTRIBUTES
1d6e90ac 179#undef TARGET_MERGE_DECL_ATTRIBUTES
672a6f42
NB
180#define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
181#endif
f3bb6135 182
1d6e90ac 183#undef TARGET_ATTRIBUTE_TABLE
91d231cb 184#define TARGET_ATTRIBUTE_TABLE arm_attribute_table
672a6f42 185
301d03af 186#ifdef AOF_ASSEMBLER
1d6e90ac 187#undef TARGET_ASM_BYTE_OP
301d03af 188#define TARGET_ASM_BYTE_OP "\tDCB\t"
1d6e90ac 189#undef TARGET_ASM_ALIGNED_HI_OP
301d03af 190#define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
1d6e90ac 191#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 192#define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
5eb99654
KG
193#undef TARGET_ASM_GLOBALIZE_LABEL
194#define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
1bc7c5b6
ZW
195#undef TARGET_ASM_FILE_START
196#define TARGET_ASM_FILE_START aof_file_start
a5fe455b
ZW
197#undef TARGET_ASM_FILE_END
198#define TARGET_ASM_FILE_END aof_file_end
301d03af 199#else
1d6e90ac 200#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 201#define TARGET_ASM_ALIGNED_SI_OP NULL
1d6e90ac 202#undef TARGET_ASM_INTEGER
301d03af
RS
203#define TARGET_ASM_INTEGER arm_assemble_integer
204#endif
205
1d6e90ac 206#undef TARGET_ASM_FUNCTION_PROLOGUE
08c148a8
NB
207#define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
208
1d6e90ac 209#undef TARGET_ASM_FUNCTION_EPILOGUE
08c148a8
NB
210#define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
211
1d6e90ac 212#undef TARGET_COMP_TYPE_ATTRIBUTES
8d8e52be
JM
213#define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
214
1d6e90ac 215#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
8d8e52be
JM
216#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
217
1d6e90ac 218#undef TARGET_SCHED_ADJUST_COST
c237e94a
ZW
219#define TARGET_SCHED_ADJUST_COST arm_adjust_cost
220
fb49053f
RH
221#undef TARGET_ENCODE_SECTION_INFO
222#ifdef ARM_PE
223#define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
224#else
225#define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
226#endif
227
5a9335ef 228#undef TARGET_STRIP_NAME_ENCODING
772c5265
RH
229#define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
230
5a9335ef 231#undef TARGET_ASM_INTERNAL_LABEL
4977bab6
ZW
232#define TARGET_ASM_INTERNAL_LABEL arm_internal_label
233
5a9335ef 234#undef TARGET_FUNCTION_OK_FOR_SIBCALL
4977bab6
ZW
235#define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
236
5a9335ef 237#undef TARGET_ASM_OUTPUT_MI_THUNK
c590b625 238#define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
5a9335ef 239#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3961e8fe 240#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
c590b625 241
9b66ebb1 242/* This will be overridden in arm_override_options. */
5a9335ef 243#undef TARGET_RTX_COSTS
9b66ebb1 244#define TARGET_RTX_COSTS arm_slowmul_rtx_costs
5a9335ef 245#undef TARGET_ADDRESS_COST
dcefdf67 246#define TARGET_ADDRESS_COST arm_address_cost
3c50106f 247
5a9335ef 248#undef TARGET_MACHINE_DEPENDENT_REORG
18dbd950
RS
249#define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
250
5a9335ef
NC
251#undef TARGET_INIT_BUILTINS
252#define TARGET_INIT_BUILTINS arm_init_builtins
253#undef TARGET_EXPAND_BUILTIN
254#define TARGET_EXPAND_BUILTIN arm_expand_builtin
255
b3f8d95d
MM
256#undef TARGET_INIT_LIBFUNCS
257#define TARGET_INIT_LIBFUNCS arm_init_libfuncs
258
f9ba5949
KH
259#undef TARGET_PROMOTE_FUNCTION_ARGS
260#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
d4453b7a
PB
261#undef TARGET_PROMOTE_FUNCTION_RETURN
262#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
f9ba5949 263#undef TARGET_PROMOTE_PROTOTYPES
70301b45 264#define TARGET_PROMOTE_PROTOTYPES arm_promote_prototypes
8cd5a4e0
RH
265#undef TARGET_PASS_BY_REFERENCE
266#define TARGET_PASS_BY_REFERENCE arm_pass_by_reference
f9ba5949
KH
267
268#undef TARGET_STRUCT_VALUE_RTX
269#define TARGET_STRUCT_VALUE_RTX arm_struct_value_rtx
270
1cc9f5f5
KH
271#undef TARGET_SETUP_INCOMING_VARARGS
272#define TARGET_SETUP_INCOMING_VARARGS arm_setup_incoming_varargs
273
6b045785
PB
274#undef TARGET_DEFAULT_SHORT_ENUMS
275#define TARGET_DEFAULT_SHORT_ENUMS arm_default_short_enums
276
13c1cd82
PB
277#undef TARGET_ALIGN_ANON_BITFIELD
278#define TARGET_ALIGN_ANON_BITFIELD arm_align_anon_bitfield
279
4185ae53
PB
280#undef TARGET_CXX_GUARD_TYPE
281#define TARGET_CXX_GUARD_TYPE arm_cxx_guard_type
282
283#undef TARGET_CXX_GUARD_MASK_BIT
284#define TARGET_CXX_GUARD_MASK_BIT arm_cxx_guard_mask_bit
285
46e995e0
PB
286#undef TARGET_CXX_GET_COOKIE_SIZE
287#define TARGET_CXX_GET_COOKIE_SIZE arm_get_cookie_size
288
289#undef TARGET_CXX_COOKIE_HAS_SIZE
290#define TARGET_CXX_COOKIE_HAS_SIZE arm_cookie_has_size
291
44d10c10
PB
292#undef TARGET_CXX_CDTOR_RETURNS_THIS
293#define TARGET_CXX_CDTOR_RETURNS_THIS arm_cxx_cdtor_returns_this
294
f6897b10 295struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 296\f
c7319d87
RE
297/* Obstack for minipool constant handling. */
298static struct obstack minipool_obstack;
1d6e90ac 299static char * minipool_startobj;
c7319d87 300
1d6e90ac
NC
301/* The maximum number of insns skipped which
302 will be conditionalised if possible. */
c27ba912
DM
303static int max_insns_skipped = 5;
304
305extern FILE * asm_out_file;
306
6354dc9b 307/* True if we are currently building a constant table. */
13bd191d
PB
308int making_const_table;
309
60d0536b 310/* Define the information needed to generate branch insns. This is
6354dc9b 311 stored from the compare operation. */
ff9940b0 312rtx arm_compare_op0, arm_compare_op1;
ff9940b0 313
9b66ebb1
PB
314/* The processor for which instructions should be scheduled. */
315enum processor_type arm_tune = arm_none;
316
317/* Which floating point model to use. */
318enum arm_fp_model arm_fp_model;
bee06f3d 319
9b66ebb1 320/* Which floating point hardware is available. */
29ad9694 321enum fputype arm_fpu_arch;
b111229a 322
9b66ebb1
PB
323/* Which floating point hardware to schedule for. */
324enum fputype arm_fpu_tune;
325
326/* Whether to use floating point hardware. */
327enum float_abi_type arm_float_abi;
328
5848830f
PB
329/* Which ABI to use. */
330enum arm_abi_type arm_abi;
331
9b66ebb1
PB
332/* Set by the -mfpu=... option. */
333const char * target_fpu_name = NULL;
334
335/* Set by the -mfpe=... option. */
336const char * target_fpe_name = NULL;
337
338/* Set by the -mfloat-abi=... option. */
339const char * target_float_abi_name = NULL;
2b835d68 340
5848830f
PB
341/* Set by the -mabi=... option. */
342const char * target_abi_name = NULL;
343
b355a481 344/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 345const char * structure_size_string = NULL;
723ae7c1 346int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 347
aec3cfba 348/* Bit values used to identify processor capabilities. */
62b10bbc 349#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
9b66ebb1 350#define FL_ARCH3M (1 << 1) /* Extended multiply */
62b10bbc
NC
351#define FL_MODE26 (1 << 2) /* 26-bit mode support */
352#define FL_MODE32 (1 << 3) /* 32-bit mode support */
353#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
354#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
355#define FL_THUMB (1 << 6) /* Thumb aware */
356#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
357#define FL_STRONG (1 << 8) /* StrongARM */
6bc82793 358#define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
d19fb8e3 359#define FL_XSCALE (1 << 10) /* XScale */
9b6b54e2 360#define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
9b66ebb1 361#define FL_ARCH6 (1 << 12) /* Architecture rel 6. Adds
81f9037c
MM
362 media instructions. */
363#define FL_VFPV2 (1 << 13) /* Vector Floating Point V2. */
aec3cfba 364
9b66ebb1
PB
365#define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */
366
78011587
PB
367#define FL_FOR_ARCH2 0
368#define FL_FOR_ARCH3 FL_MODE32
369#define FL_FOR_ARCH3M (FL_FOR_ARCH3 | FL_ARCH3M)
370#define FL_FOR_ARCH4 (FL_FOR_ARCH3M | FL_ARCH4)
371#define FL_FOR_ARCH4T (FL_FOR_ARCH4 | FL_THUMB)
372#define FL_FOR_ARCH5 (FL_FOR_ARCH4 | FL_ARCH5)
373#define FL_FOR_ARCH5T (FL_FOR_ARCH5 | FL_THUMB)
374#define FL_FOR_ARCH5E (FL_FOR_ARCH5 | FL_ARCH5E)
375#define FL_FOR_ARCH5TE (FL_FOR_ARCH5E | FL_THUMB)
376#define FL_FOR_ARCH5TEJ FL_FOR_ARCH5TE
377#define FL_FOR_ARCH6 (FL_FOR_ARCH5TE | FL_ARCH6)
378#define FL_FOR_ARCH6J FL_FOR_ARCH6
379
1d6e90ac
NC
380/* The bits in this mask specify which
381 instructions we are allowed to generate. */
0977774b 382static unsigned long insn_flags = 0;
d5b7b3ae 383
aec3cfba 384/* The bits in this mask specify which instruction scheduling options should
9b66ebb1 385 be used. */
0977774b 386static unsigned long tune_flags = 0;
aec3cfba
NC
387
388/* The following are used in the arm.md file as equivalents to bits
389 in the above two flag variables. */
390
9b66ebb1
PB
391/* Nonzero if this chip supports the ARM Architecture 3M extensions. */
392int arm_arch3m = 0;
2b835d68 393
6354dc9b 394/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
395int arm_arch4 = 0;
396
68d560d4
RE
397/* Nonzero if this chip supports the ARM Architecture 4t extensions. */
398int arm_arch4t = 0;
399
6354dc9b 400/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
401int arm_arch5 = 0;
402
b15bca31
RE
403/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
404int arm_arch5e = 0;
405
9b66ebb1
PB
406/* Nonzero if this chip supports the ARM Architecture 6 extensions. */
407int arm_arch6 = 0;
408
aec3cfba 409/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
410int arm_ld_sched = 0;
411
412/* Nonzero if this chip is a StrongARM. */
413int arm_is_strong = 0;
414
78011587
PB
415/* Nonzero if this chip is a Cirrus variant. */
416int arm_arch_cirrus = 0;
417
5a9335ef
NC
418/* Nonzero if this chip supports Intel Wireless MMX technology. */
419int arm_arch_iwmmxt = 0;
420
d19fb8e3 421/* Nonzero if this chip is an XScale. */
4b3c2e48
PB
422int arm_arch_xscale = 0;
423
424/* Nonzero if tuning for XScale */
425int arm_tune_xscale = 0;
d19fb8e3 426
3569057d 427/* Nonzero if this chip is an ARM6 or an ARM7. */
f5a1b0d2 428int arm_is_6_or_7 = 0;
b111229a 429
0616531f
RE
430/* Nonzero if generating Thumb instructions. */
431int thumb_code = 0;
432
2ad4dcf9
RE
433/* Nonzero if we should define __THUMB_INTERWORK__ in the
434 preprocessor.
435 XXX This is a bit of a hack, it's intended to help work around
436 problems in GLD which doesn't understand that armv5t code is
437 interworking clean. */
438int arm_cpp_interwork = 0;
439
cce8749e
CH
440/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
441 must report the mode of the memory reference from PRINT_OPERAND to
442 PRINT_OPERAND_ADDRESS. */
f3bb6135 443enum machine_mode output_memory_reference_mode;
cce8749e 444
32de079a 445/* The register number to be used for the PIC offset register. */
ed0e6530 446const char * arm_pic_register_string = NULL;
5b43fed1 447int arm_pic_register = INVALID_REGNUM;
32de079a 448
ff9940b0 449/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 450 is not needed. */
d5b7b3ae 451int return_used_this_function;
ff9940b0 452
aec3cfba
NC
453/* Set to 1 after arm_reorg has started. Reset to start at the start of
454 the next function. */
4b632bf1
RE
455static int after_arm_reorg = 0;
456
aec3cfba 457/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
458static int arm_constant_limit = 3;
459
cce8749e
CH
460/* For an explanation of these variables, see final_prescan_insn below. */
461int arm_ccfsm_state;
84ed5e79 462enum arm_cond_code arm_current_cc;
cce8749e
CH
463rtx arm_target_insn;
464int arm_target_label;
9997d19d
RE
465
466/* The condition codes of the ARM, and the inverse function. */
1d6e90ac 467static const char * const arm_condition_codes[] =
9997d19d
RE
468{
469 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
470 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
471};
472
f5a1b0d2 473#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 474\f
6354dc9b 475/* Initialization code. */
2b835d68 476
2b835d68
RE
477struct processors
478{
8b60264b 479 const char *const name;
9b66ebb1 480 enum processor_type core;
78011587 481 const char *arch;
0977774b 482 const unsigned long flags;
9b66ebb1 483 bool (* rtx_costs) (rtx, int, int, int *);
2b835d68
RE
484};
485
486/* Not all of these give usefully different compilation alternatives,
487 but there is no simple way of generalizing them. */
8b60264b 488static const struct processors all_cores[] =
f5a1b0d2
NC
489{
490 /* ARM Cores */
78011587
PB
491#define ARM_CORE(NAME, ARCH, FLAGS, COSTS) \
492 {#NAME, arm_none, #ARCH, FLAGS | FL_FOR_ARCH##ARCH, arm_##COSTS##_rtx_costs},
9b66ebb1
PB
493#include "arm-cores.def"
494#undef ARM_CORE
78011587 495 {NULL, arm_none, NULL, 0, NULL}
f5a1b0d2
NC
496};
497
8b60264b 498static const struct processors all_architectures[] =
2b835d68 499{
f5a1b0d2 500 /* ARM Architectures */
9b66ebb1
PB
501 /* We don't specify rtx_costs here as it will be figured out
502 from the core. */
f5a1b0d2 503
78011587
PB
504 {"armv2", arm2, "2", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH2, NULL},
505 {"armv2a", arm2, "2", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH2, NULL},
506 {"armv3", arm6, "3", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH3, NULL},
507 {"armv3m", arm7m, "3M", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH3M, NULL},
508 {"armv4", arm7tdmi, "4", FL_CO_PROC | FL_MODE26 | FL_FOR_ARCH4, NULL},
b111229a
RE
509 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
510 implementations that support it, so we will leave it out for now. */
78011587
PB
511 {"armv4t", arm7tdmi, "4T", FL_CO_PROC | FL_FOR_ARCH4T, NULL},
512 {"armv5", arm10tdmi, "5", FL_CO_PROC | FL_FOR_ARCH5, NULL},
513 {"armv5t", arm10tdmi, "5T", FL_CO_PROC | FL_FOR_ARCH5T, NULL},
514 {"armv5e", arm1026ejs, "5E", FL_CO_PROC | FL_FOR_ARCH5E, NULL},
515 {"armv5te", arm1026ejs, "5TE", FL_CO_PROC | FL_FOR_ARCH5TE, NULL},
516 {"armv6", arm1136js, "6", FL_CO_PROC | FL_FOR_ARCH6, NULL},
517 {"armv6j", arm1136js, "6J", FL_CO_PROC | FL_FOR_ARCH6J, NULL},
518 {"ep9312", ep9312, "4T", FL_LDSCHED | FL_CIRRUS | FL_FOR_ARCH4, NULL},
519 {"iwmmxt", iwmmxt, "5TE", FL_LDSCHED | FL_STRONG | FL_FOR_ARCH5TE | FL_XSCALE | FL_IWMMXT , NULL},
520 {NULL, arm_none, NULL, 0 , NULL}
f5a1b0d2
NC
521};
522
9a9f7594 523/* This is a magic structure. The 'string' field is magically filled in
f5a1b0d2
NC
524 with a pointer to the value specified by the user on the command line
525 assuming that the user has specified such a value. */
526
527struct arm_cpu_select arm_select[] =
528{
529 /* string name processors */
530 { NULL, "-mcpu=", all_cores },
531 { NULL, "-march=", all_architectures },
532 { NULL, "-mtune=", all_cores }
2b835d68
RE
533};
534
78011587
PB
535
536/* The name of the proprocessor macro to define for this architecture. */
537
538char arm_arch_name[] = "__ARM_ARCH_0UNK__";
539
9b66ebb1
PB
540struct fpu_desc
541{
542 const char * name;
543 enum fputype fpu;
544};
545
546
547/* Available values for for -mfpu=. */
548
549static const struct fpu_desc all_fpus[] =
550{
551 {"fpa", FPUTYPE_FPA},
552 {"fpe2", FPUTYPE_FPA_EMU2},
553 {"fpe3", FPUTYPE_FPA_EMU2},
554 {"maverick", FPUTYPE_MAVERICK},
555 {"vfp", FPUTYPE_VFP}
556};
557
558
559/* Floating point models used by the different hardware.
560 See fputype in arm.h. */
561
562static const enum fputype fp_model_for_fpu[] =
563{
564 /* No FP hardware. */
565 ARM_FP_MODEL_UNKNOWN, /* FPUTYPE_NONE */
566 ARM_FP_MODEL_FPA, /* FPUTYPE_FPA */
567 ARM_FP_MODEL_FPA, /* FPUTYPE_FPA_EMU2 */
568 ARM_FP_MODEL_FPA, /* FPUTYPE_FPA_EMU3 */
569 ARM_FP_MODEL_MAVERICK, /* FPUTYPE_MAVERICK */
570 ARM_FP_MODEL_VFP /* FPUTYPE_VFP */
571};
572
573
574struct float_abi
575{
576 const char * name;
577 enum float_abi_type abi_type;
578};
579
580
581/* Available values for -mfloat-abi=. */
582
583static const struct float_abi all_float_abis[] =
584{
585 {"soft", ARM_FLOAT_ABI_SOFT},
586 {"softfp", ARM_FLOAT_ABI_SOFTFP},
587 {"hard", ARM_FLOAT_ABI_HARD}
588};
589
590
5848830f
PB
591struct abi_name
592{
593 const char *name;
594 enum arm_abi_type abi_type;
595};
596
597
598/* Available values for -mabi=. */
599
600static const struct abi_name arm_all_abis[] =
601{
602 {"apcs-gnu", ARM_ABI_APCS},
603 {"atpcs", ARM_ABI_ATPCS},
604 {"aapcs", ARM_ABI_AAPCS},
605 {"iwmmxt", ARM_ABI_IWMMXT}
606};
607
0977774b
JT
608/* Return the number of bits set in VALUE. */
609static unsigned
e32bac5b 610bit_count (unsigned long value)
aec3cfba 611{
d5b7b3ae 612 unsigned long count = 0;
aec3cfba
NC
613
614 while (value)
615 {
0977774b
JT
616 count++;
617 value &= value - 1; /* Clear the least-significant set bit. */
aec3cfba
NC
618 }
619
620 return count;
621}
622
b3f8d95d
MM
623/* Set up library functions uqniue to ARM. */
624
625static void
626arm_init_libfuncs (void)
627{
628 /* There are no special library functions unless we are using the
629 ARM BPABI. */
630 if (!TARGET_BPABI)
631 return;
632
633 /* The functions below are described in Section 4 of the "Run-Time
634 ABI for the ARM architecture", Version 1.0. */
635
636 /* Double-precision floating-point arithmetic. Table 2. */
637 set_optab_libfunc (add_optab, DFmode, "__aeabi_dadd");
638 set_optab_libfunc (sdiv_optab, DFmode, "__aeabi_ddiv");
639 set_optab_libfunc (smul_optab, DFmode, "__aeabi_dmul");
640 set_optab_libfunc (neg_optab, DFmode, "__aeabi_dneg");
641 set_optab_libfunc (sub_optab, DFmode, "__aeabi_dsub");
642
643 /* Double-precision comparisions. Table 3. */
644 set_optab_libfunc (eq_optab, DFmode, "__aeabi_dcmpeq");
645 set_optab_libfunc (ne_optab, DFmode, NULL);
646 set_optab_libfunc (lt_optab, DFmode, "__aeabi_dcmplt");
647 set_optab_libfunc (le_optab, DFmode, "__aeabi_dcmple");
648 set_optab_libfunc (ge_optab, DFmode, "__aeabi_dcmpge");
649 set_optab_libfunc (gt_optab, DFmode, "__aeabi_dcmpgt");
650 set_optab_libfunc (unord_optab, DFmode, "__aeabi_dcmpun");
651
652 /* Single-precision floating-point arithmetic. Table 4. */
653 set_optab_libfunc (add_optab, SFmode, "__aeabi_fadd");
654 set_optab_libfunc (sdiv_optab, SFmode, "__aeabi_fdiv");
655 set_optab_libfunc (smul_optab, SFmode, "__aeabi_fmul");
656 set_optab_libfunc (neg_optab, SFmode, "__aeabi_fneg");
657 set_optab_libfunc (sub_optab, SFmode, "__aeabi_fsub");
658
659 /* Single-precision comparisions. Table 5. */
660 set_optab_libfunc (eq_optab, SFmode, "__aeabi_fcmpeq");
661 set_optab_libfunc (ne_optab, SFmode, NULL);
662 set_optab_libfunc (lt_optab, SFmode, "__aeabi_fcmplt");
663 set_optab_libfunc (le_optab, SFmode, "__aeabi_fcmple");
664 set_optab_libfunc (ge_optab, SFmode, "__aeabi_fcmpge");
665 set_optab_libfunc (gt_optab, SFmode, "__aeabi_fcmpgt");
666 set_optab_libfunc (unord_optab, SFmode, "__aeabi_fcmpun");
667
668 /* Floating-point to integer conversions. Table 6. */
669 set_conv_libfunc (sfix_optab, SImode, DFmode, "__aeabi_d2iz");
670 set_conv_libfunc (ufix_optab, SImode, DFmode, "__aeabi_d2uiz");
671 set_conv_libfunc (sfix_optab, DImode, DFmode, "__aeabi_d2lz");
672 set_conv_libfunc (ufix_optab, DImode, DFmode, "__aeabi_d2ulz");
673 set_conv_libfunc (sfix_optab, SImode, SFmode, "__aeabi_f2iz");
674 set_conv_libfunc (ufix_optab, SImode, SFmode, "__aeabi_f2uiz");
675 set_conv_libfunc (sfix_optab, DImode, SFmode, "__aeabi_f2lz");
676 set_conv_libfunc (ufix_optab, DImode, SFmode, "__aeabi_f2ulz");
677
678 /* Conversions between floating types. Table 7. */
679 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__aeabi_d2f");
680 set_conv_libfunc (sext_optab, DFmode, SFmode, "__aeabi_f2d");
681
682 /* Integer to floating-point converisons. Table 8. */
683 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__aeabi_i2d");
684 set_conv_libfunc (ufloat_optab, DFmode, SImode, "__aeabi_ui2d");
685 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__aeabi_l2d");
686 set_conv_libfunc (ufloat_optab, DFmode, DImode, "__aeabi_ul2d");
687 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__aeabi_i2f");
688 set_conv_libfunc (ufloat_optab, SFmode, SImode, "__aeabi_ui2f");
689 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__aeabi_l2f");
690 set_conv_libfunc (ufloat_optab, SFmode, DImode, "__aeabi_ul2f");
691
692 /* Long long. Table 9. */
693 set_optab_libfunc (smul_optab, DImode, "__aeabi_lmul");
694 set_optab_libfunc (sdivmod_optab, DImode, "__aeabi_ldivmod");
695 set_optab_libfunc (udivmod_optab, DImode, "__aeabi_uldivmod");
696 set_optab_libfunc (ashl_optab, DImode, "__aeabi_llsl");
697 set_optab_libfunc (lshr_optab, DImode, "__aeabi_llsr");
698 set_optab_libfunc (ashr_optab, DImode, "__aeabi_lasr");
699 set_optab_libfunc (cmp_optab, DImode, "__aeabi_lcmp");
700 set_optab_libfunc (ucmp_optab, DImode, "__aeabi_ulcmp");
701
702 /* Integer (32/32->32) division. \S 4.3.1. */
703 set_optab_libfunc (sdivmod_optab, SImode, "__aeabi_idivmod");
704 set_optab_libfunc (udivmod_optab, SImode, "__aeabi_uidivmod");
705
706 /* The divmod functions are designed so that they can be used for
707 plain division, even though they return both the quotient and the
708 remainder. The quotient is returned in the usual location (i.e.,
709 r0 for SImode, {r0, r1} for DImode), just as would be expected
710 for an ordinary division routine. Because the AAPCS calling
711 conventions specify that all of { r0, r1, r2, r3 } are
712 callee-saved registers, there is no need to tell the compiler
713 explicitly that those registers are clobbered by these
714 routines. */
715 set_optab_libfunc (sdiv_optab, DImode, "__aeabi_ldivmod");
716 set_optab_libfunc (udiv_optab, DImode, "__aeabi_uldivmod");
717 set_optab_libfunc (sdiv_optab, SImode, "__aeabi_idivmod");
718 set_optab_libfunc (udiv_optab, SImode, "__aeabi_uidivmod");
719}
720
2b835d68
RE
721/* Fix up any incompatible options that the user has specified.
722 This has now turned into a maze. */
723void
e32bac5b 724arm_override_options (void)
2b835d68 725{
ed4c4348 726 unsigned i;
9b66ebb1 727
f5a1b0d2 728 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 729 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 730 {
f5a1b0d2
NC
731 struct arm_cpu_select * ptr = arm_select + i;
732
733 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 734 {
13bd191d 735 const struct processors * sel;
bd9c7e23 736
5895f793 737 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 738 if (streq (ptr->string, sel->name))
bd9c7e23 739 {
78011587
PB
740 /* Set the architecture define. */
741 if (i != 2)
742 sprintf (arm_arch_name, "__ARM_ARCH_%s__", sel->arch);
743
9b66ebb1
PB
744 /* Determine the processor core for which we should
745 tune code-generation. */
746 if (/* -mcpu= is a sensible default. */
747 i == 0
748 /* If -march= is used, and -mcpu= has not been used,
749 assume that we should tune for a representative
750 CPU from that architecture. */
751 || i == 1
752 /* -mtune= overrides -mcpu= and -march=. */
753 || i == 2)
754 arm_tune = (enum processor_type) (sel - ptr->processors);
755
756 if (i != 2)
b111229a 757 {
aec3cfba
NC
758 /* If we have been given an architecture and a processor
759 make sure that they are compatible. We only generate
760 a warning though, and we prefer the CPU over the
6354dc9b 761 architecture. */
aec3cfba 762 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 763 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
764 ptr->string);
765
766 insn_flags = sel->flags;
b111229a 767 }
f5a1b0d2 768
bd9c7e23
RE
769 break;
770 }
771
772 if (sel->name == NULL)
773 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
774 }
775 }
aec3cfba 776
f5a1b0d2 777 /* If the user did not specify a processor, choose one for them. */
aec3cfba 778 if (insn_flags == 0)
f5a1b0d2 779 {
8b60264b 780 const struct processors * sel;
aec3cfba 781 unsigned int sought;
78011587 782 enum processor_type cpu;
aec3cfba 783
78011587
PB
784 cpu = TARGET_CPU_DEFAULT;
785 if (cpu == arm_none)
786 {
787#ifdef SUBTARGET_CPU_DEFAULT
788 /* Use the subtarget default CPU if none was specified by
789 configure. */
790 cpu = SUBTARGET_CPU_DEFAULT;
791#endif
792 /* Default to ARM6. */
793 if (cpu == arm_none)
794 cpu = arm6;
795 }
796 sel = &all_cores[cpu];
aec3cfba
NC
797
798 insn_flags = sel->flags;
9b66ebb1 799
aec3cfba
NC
800 /* Now check to see if the user has specified some command line
801 switch that require certain abilities from the cpu. */
802 sought = 0;
f5a1b0d2 803
d5b7b3ae 804 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 805 {
aec3cfba
NC
806 sought |= (FL_THUMB | FL_MODE32);
807
d5b7b3ae 808 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
809 interworking. Therefore we force FL_MODE26 to be removed
810 from insn_flags here (if it was set), so that the search
811 below will always be able to find a compatible processor. */
5895f793 812 insn_flags &= ~FL_MODE26;
f5a1b0d2 813 }
d5b7b3ae 814
aec3cfba 815 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 816 {
aec3cfba
NC
817 /* Try to locate a CPU type that supports all of the abilities
818 of the default CPU, plus the extra abilities requested by
819 the user. */
5895f793 820 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 821 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
822 break;
823
824 if (sel->name == NULL)
aec3cfba 825 {
0977774b 826 unsigned current_bit_count = 0;
8b60264b 827 const struct processors * best_fit = NULL;
aec3cfba
NC
828
829 /* Ideally we would like to issue an error message here
830 saying that it was not possible to find a CPU compatible
831 with the default CPU, but which also supports the command
832 line options specified by the programmer, and so they
833 ought to use the -mcpu=<name> command line option to
834 override the default CPU type.
835
61f0ccff
RE
836 If we cannot find a cpu that has both the
837 characteristics of the default cpu and the given
838 command line options we scan the array again looking
839 for a best match. */
5895f793 840 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
841 if ((sel->flags & sought) == sought)
842 {
0977774b 843 unsigned count;
aec3cfba
NC
844
845 count = bit_count (sel->flags & insn_flags);
846
847 if (count >= current_bit_count)
848 {
849 best_fit = sel;
850 current_bit_count = count;
851 }
852 }
f5a1b0d2 853
aec3cfba
NC
854 if (best_fit == NULL)
855 abort ();
856 else
857 sel = best_fit;
858 }
859
860 insn_flags = sel->flags;
f5a1b0d2 861 }
78011587 862 sprintf (arm_arch_name, "__ARM_ARCH_%s__", sel->arch);
9b66ebb1
PB
863 if (arm_tune == arm_none)
864 arm_tune = (enum processor_type) (sel - all_cores);
f5a1b0d2 865 }
aec3cfba 866
9b66ebb1
PB
867 /* The processor for which we should tune should now have been
868 chosen. */
869 if (arm_tune == arm_none)
870 abort ();
871
872 tune_flags = all_cores[(int)arm_tune].flags;
873 targetm.rtx_costs = all_cores[(int)arm_tune].rtx_costs;
e26053d1 874
f5a1b0d2
NC
875 /* Make sure that the processor choice does not conflict with any of the
876 other command line choices. */
6cfc7210 877 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
878 {
879 warning ("target CPU does not support interworking" );
6cfc7210 880 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
881 }
882
d5b7b3ae
RE
883 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
884 {
c725bd79 885 warning ("target CPU does not support THUMB instructions");
d5b7b3ae
RE
886 target_flags &= ~ARM_FLAG_THUMB;
887 }
888
889 if (TARGET_APCS_FRAME && TARGET_THUMB)
890 {
c725bd79 891 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
d5b7b3ae
RE
892 target_flags &= ~ARM_FLAG_APCS_FRAME;
893 }
d19fb8e3 894
d5b7b3ae
RE
895 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
896 from here where no function is being compiled currently. */
897 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
898 && TARGET_ARM)
c725bd79 899 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
900
901 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
c725bd79 902 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
903
904 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
c725bd79 905 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae 906
5895f793 907 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
908 {
909 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
910 target_flags |= ARM_FLAG_APCS_FRAME;
911 }
aec3cfba 912
2b835d68
RE
913 if (TARGET_POKE_FUNCTION_NAME)
914 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 915
2b835d68 916 if (TARGET_APCS_REENT && flag_pic)
400500c4 917 error ("-fpic and -mapcs-reent are incompatible");
aec3cfba 918
2b835d68 919 if (TARGET_APCS_REENT)
f5a1b0d2 920 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 921
d5b7b3ae
RE
922 /* If this target is normally configured to use APCS frames, warn if they
923 are turned off and debugging is turned on. */
924 if (TARGET_ARM
925 && write_symbols != NO_DEBUG
5895f793 926 && !TARGET_APCS_FRAME
d5b7b3ae
RE
927 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
928 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 929
32de079a
RE
930 /* If stack checking is disabled, we can use r10 as the PIC register,
931 which keeps r9 available. */
5b43fed1
RH
932 if (flag_pic)
933 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
aec3cfba 934
2b835d68 935 if (TARGET_APCS_FLOAT)
c725bd79 936 warning ("passing floating point arguments in fp regs not yet supported");
f5a1b0d2 937
4912a07c 938 /* Initialize boolean versions of the flags, for use in the arm.md file. */
9b66ebb1
PB
939 arm_arch3m = (insn_flags & FL_ARCH3M) != 0;
940 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
68d560d4 941 arm_arch4t = arm_arch4 & ((insn_flags & FL_THUMB) != 0);
9b66ebb1
PB
942 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
943 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
944 arm_arch6 = (insn_flags & FL_ARCH6) != 0;
945 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
78011587 946 arm_arch_cirrus = (insn_flags & FL_CIRRUS) != 0;
9b66ebb1
PB
947
948 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
949 arm_is_strong = (tune_flags & FL_STRONG) != 0;
950 thumb_code = (TARGET_ARM == 0);
951 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
952 && !(tune_flags & FL_ARCH4))) != 0;
953 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
954 arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
5a9335ef 955
68d560d4
RE
956 /* V5 code we generate is completely interworking capable, so we turn off
957 TARGET_INTERWORK here to avoid many tests later on. */
2ad4dcf9
RE
958
959 /* XXX However, we must pass the right pre-processor defines to CPP
960 or GLD can get confused. This is a hack. */
961 if (TARGET_INTERWORK)
962 arm_cpp_interwork = 1;
963
68d560d4
RE
964 if (arm_arch5)
965 target_flags &= ~ARM_FLAG_INTERWORK;
966
5848830f
PB
967 if (target_abi_name)
968 {
969 for (i = 0; i < ARRAY_SIZE (arm_all_abis); i++)
970 {
971 if (streq (arm_all_abis[i].name, target_abi_name))
972 {
973 arm_abi = arm_all_abis[i].abi_type;
974 break;
975 }
976 }
977 if (i == ARRAY_SIZE (arm_all_abis))
978 error ("invalid ABI option: -mabi=%s", target_abi_name);
979 }
980 else
c805f22e 981 arm_abi = ARM_DEFAULT_ABI;
5848830f
PB
982
983 if (TARGET_IWMMXT && !ARM_DOUBLEWORD_ALIGN)
984 error ("iwmmxt requires an AAPCS compatible ABI for proper operation");
985
986 if (TARGET_IWMMXT_ABI && !TARGET_IWMMXT)
987 error ("iwmmxt abi requires an iwmmxt capable cpu");
6f7ebcbb 988
9b66ebb1
PB
989 arm_fp_model = ARM_FP_MODEL_UNKNOWN;
990 if (target_fpu_name == NULL && target_fpe_name != NULL)
9b6b54e2 991 {
9b66ebb1
PB
992 if (streq (target_fpe_name, "2"))
993 target_fpu_name = "fpe2";
994 else if (streq (target_fpe_name, "3"))
995 target_fpu_name = "fpe3";
996 else
997 error ("invalid floating point emulation option: -mfpe=%s",
998 target_fpe_name);
999 }
1000 if (target_fpu_name != NULL)
1001 {
1002 /* The user specified a FPU. */
1003 for (i = 0; i < ARRAY_SIZE (all_fpus); i++)
1004 {
1005 if (streq (all_fpus[i].name, target_fpu_name))
1006 {
1007 arm_fpu_arch = all_fpus[i].fpu;
1008 arm_fpu_tune = arm_fpu_arch;
1009 arm_fp_model = fp_model_for_fpu[arm_fpu_arch];
1010 break;
1011 }
1012 }
1013 if (arm_fp_model == ARM_FP_MODEL_UNKNOWN)
1014 error ("invalid floating point option: -mfpu=%s", target_fpu_name);
9b6b54e2
NC
1015 }
1016 else
2b835d68 1017 {
9b66ebb1 1018#ifdef FPUTYPE_DEFAULT
78011587 1019 /* Use the default if it is specified for this platform. */
9b66ebb1
PB
1020 arm_fpu_arch = FPUTYPE_DEFAULT;
1021 arm_fpu_tune = FPUTYPE_DEFAULT;
1022#else
1023 /* Pick one based on CPU type. */
78011587 1024 /* ??? Some targets assume FPA is the default.
9b66ebb1
PB
1025 if ((insn_flags & FL_VFP) != 0)
1026 arm_fpu_arch = FPUTYPE_VFP;
78011587
PB
1027 else
1028 */
1029 if (arm_arch_cirrus)
9b66ebb1
PB
1030 arm_fpu_arch = FPUTYPE_MAVERICK;
1031 else
29ad9694 1032 arm_fpu_arch = FPUTYPE_FPA_EMU2;
9b66ebb1
PB
1033#endif
1034 if (tune_flags & FL_CO_PROC && arm_fpu_arch == FPUTYPE_FPA_EMU2)
1035 arm_fpu_tune = FPUTYPE_FPA;
2b835d68 1036 else
9b66ebb1
PB
1037 arm_fpu_tune = arm_fpu_arch;
1038 arm_fp_model = fp_model_for_fpu[arm_fpu_arch];
1039 if (arm_fp_model == ARM_FP_MODEL_UNKNOWN)
1040 abort ();
1041 }
1042
1043 if (target_float_abi_name != NULL)
1044 {
1045 /* The user specified a FP ABI. */
1046 for (i = 0; i < ARRAY_SIZE (all_float_abis); i++)
1047 {
1048 if (streq (all_float_abis[i].name, target_float_abi_name))
1049 {
1050 arm_float_abi = all_float_abis[i].abi_type;
1051 break;
1052 }
1053 }
1054 if (i == ARRAY_SIZE (all_float_abis))
1055 error ("invalid floating point abi: -mfloat-abi=%s",
1056 target_float_abi_name);
2b835d68 1057 }
b111229a 1058 else
9b6b54e2 1059 {
9b66ebb1
PB
1060 /* Use soft-float target flag. */
1061 if (target_flags & ARM_FLAG_SOFT_FLOAT)
1062 arm_float_abi = ARM_FLOAT_ABI_SOFT;
1063 else
1064 arm_float_abi = ARM_FLOAT_ABI_HARD;
9b6b54e2 1065 }
9b66ebb1
PB
1066
1067 if (arm_float_abi == ARM_FLOAT_ABI_SOFTFP)
1068 sorry ("-mfloat-abi=softfp");
1069 /* If soft-float is specified then don't use FPU. */
1070 if (TARGET_SOFT_FLOAT)
1071 arm_fpu_arch = FPUTYPE_NONE;
aec3cfba 1072
f5a1b0d2
NC
1073 /* For arm2/3 there is no need to do any scheduling if there is only
1074 a floating point emulator, or we are doing software floating-point. */
9b66ebb1
PB
1075 if ((TARGET_SOFT_FLOAT
1076 || arm_fpu_tune == FPUTYPE_FPA_EMU2
1077 || arm_fpu_tune == FPUTYPE_FPA_EMU3)
ed0e6530 1078 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 1079 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 1080
5848830f
PB
1081 /* Override the default structure alignment for AAPCS ABI. */
1082 if (arm_abi == ARM_ABI_AAPCS)
1083 arm_structure_size_boundary = 8;
1084
b355a481
NC
1085 if (structure_size_string != NULL)
1086 {
1087 int size = strtol (structure_size_string, NULL, 0);
5848830f
PB
1088
1089 if (size == 8 || size == 32
1090 || (ARM_DOUBLEWORD_ALIGN && size == 64))
b355a481
NC
1091 arm_structure_size_boundary = size;
1092 else
5848830f
PB
1093 warning ("structure size boundary can only be set to %s",
1094 ARM_DOUBLEWORD_ALIGN ? "8, 32 or 64": "8 or 32");
b355a481 1095 }
ed0e6530
PB
1096
1097 if (arm_pic_register_string != NULL)
1098 {
5b43fed1 1099 int pic_register = decode_reg_name (arm_pic_register_string);
e26053d1 1100
5895f793 1101 if (!flag_pic)
ed0e6530
PB
1102 warning ("-mpic-register= is useless without -fpic");
1103
ed0e6530 1104 /* Prevent the user from choosing an obviously stupid PIC register. */
5b43fed1
RH
1105 else if (pic_register < 0 || call_used_regs[pic_register]
1106 || pic_register == HARD_FRAME_POINTER_REGNUM
1107 || pic_register == STACK_POINTER_REGNUM
1108 || pic_register >= PC_REGNUM)
c725bd79 1109 error ("unable to use '%s' for PIC register", arm_pic_register_string);
ed0e6530
PB
1110 else
1111 arm_pic_register = pic_register;
1112 }
d5b7b3ae
RE
1113
1114 if (TARGET_THUMB && flag_schedule_insns)
1115 {
1116 /* Don't warn since it's on by default in -O2. */
1117 flag_schedule_insns = 0;
1118 }
1119
f5a1b0d2 1120 if (optimize_size)
be03ccc9 1121 {
577d6328
RE
1122 /* There's some dispute as to whether this should be 1 or 2. However,
1123 experiments seem to show that in pathological cases a setting of
839a4992 1124 1 degrades less severely than a setting of 2. This could change if
577d6328
RE
1125 other parts of the compiler change their behavior. */
1126 arm_constant_limit = 1;
be03ccc9
NP
1127
1128 /* If optimizing for size, bump the number of instructions that we
d6b4baa4 1129 are prepared to conditionally execute (even on a StrongARM). */
be03ccc9
NP
1130 max_insns_skipped = 6;
1131 }
1132 else
1133 {
1134 /* For processors with load scheduling, it never costs more than
1135 2 cycles to load a constant, and the load scheduler may well
1136 reduce that to 1. */
1137 if (tune_flags & FL_LDSCHED)
1138 arm_constant_limit = 1;
1139
1140 /* On XScale the longer latency of a load makes it more difficult
1141 to achieve a good schedule, so it's faster to synthesize
d6b4baa4 1142 constants that can be done in two insns. */
be03ccc9
NP
1143 if (arm_tune_xscale)
1144 arm_constant_limit = 2;
1145
1146 /* StrongARM has early execution of branches, so a sequence
1147 that is worth skipping is shorter. */
1148 if (arm_is_strong)
1149 max_insns_skipped = 3;
1150 }
92a432f4
RE
1151
1152 /* Register global variables with the garbage collector. */
1153 arm_add_gc_roots ();
1154}
1155
1156static void
e32bac5b 1157arm_add_gc_roots (void)
92a432f4 1158{
c7319d87
RE
1159 gcc_obstack_init(&minipool_obstack);
1160 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 1161}
cce8749e 1162\f
6d3d9133
NC
1163/* A table of known ARM exception types.
1164 For use with the interrupt function attribute. */
1165
1166typedef struct
1167{
8b60264b
KG
1168 const char *const arg;
1169 const unsigned long return_value;
6d3d9133
NC
1170}
1171isr_attribute_arg;
1172
8b60264b 1173static const isr_attribute_arg isr_attribute_args [] =
6d3d9133
NC
1174{
1175 { "IRQ", ARM_FT_ISR },
1176 { "irq", ARM_FT_ISR },
1177 { "FIQ", ARM_FT_FIQ },
1178 { "fiq", ARM_FT_FIQ },
1179 { "ABORT", ARM_FT_ISR },
1180 { "abort", ARM_FT_ISR },
1181 { "ABORT", ARM_FT_ISR },
1182 { "abort", ARM_FT_ISR },
1183 { "UNDEF", ARM_FT_EXCEPTION },
1184 { "undef", ARM_FT_EXCEPTION },
1185 { "SWI", ARM_FT_EXCEPTION },
1186 { "swi", ARM_FT_EXCEPTION },
1187 { NULL, ARM_FT_NORMAL }
1188};
1189
1190/* Returns the (interrupt) function type of the current
1191 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
1192
1193static unsigned long
e32bac5b 1194arm_isr_value (tree argument)
6d3d9133 1195{
8b60264b 1196 const isr_attribute_arg * ptr;
1d6e90ac 1197 const char * arg;
6d3d9133
NC
1198
1199 /* No argument - default to IRQ. */
1200 if (argument == NULL_TREE)
1201 return ARM_FT_ISR;
1202
1203 /* Get the value of the argument. */
1204 if (TREE_VALUE (argument) == NULL_TREE
1205 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
1206 return ARM_FT_UNKNOWN;
1207
1208 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
1209
1210 /* Check it against the list of known arguments. */
5a9335ef 1211 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
1d6e90ac
NC
1212 if (streq (arg, ptr->arg))
1213 return ptr->return_value;
6d3d9133 1214
05713b80 1215 /* An unrecognized interrupt type. */
6d3d9133
NC
1216 return ARM_FT_UNKNOWN;
1217}
1218
1219/* Computes the type of the current function. */
1220
1221static unsigned long
e32bac5b 1222arm_compute_func_type (void)
6d3d9133
NC
1223{
1224 unsigned long type = ARM_FT_UNKNOWN;
1225 tree a;
1226 tree attr;
1227
1228 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1229 abort ();
1230
1231 /* Decide if the current function is volatile. Such functions
1232 never return, and many memory cycles can be saved by not storing
1233 register values that will never be needed again. This optimization
1234 was added to speed up context switching in a kernel application. */
1235 if (optimize > 0
97b0ade3 1236 && TREE_NOTHROW (current_function_decl)
6d3d9133
NC
1237 && TREE_THIS_VOLATILE (current_function_decl))
1238 type |= ARM_FT_VOLATILE;
1239
6de9cd9a 1240 if (cfun->static_chain_decl != NULL)
6d3d9133
NC
1241 type |= ARM_FT_NESTED;
1242
91d231cb 1243 attr = DECL_ATTRIBUTES (current_function_decl);
6d3d9133
NC
1244
1245 a = lookup_attribute ("naked", attr);
1246 if (a != NULL_TREE)
1247 type |= ARM_FT_NAKED;
1248
c9ca9b88
PB
1249 a = lookup_attribute ("isr", attr);
1250 if (a == NULL_TREE)
1251 a = lookup_attribute ("interrupt", attr);
1252
1253 if (a == NULL_TREE)
1254 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
6d3d9133 1255 else
c9ca9b88 1256 type |= arm_isr_value (TREE_VALUE (a));
6d3d9133
NC
1257
1258 return type;
1259}
1260
1261/* Returns the type of the current function. */
1262
1263unsigned long
e32bac5b 1264arm_current_func_type (void)
6d3d9133
NC
1265{
1266 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
1267 cfun->machine->func_type = arm_compute_func_type ();
1268
1269 return cfun->machine->func_type;
1270}
1271\f
a72d4945
RE
1272/* Return 1 if it is possible to return using a single instruction.
1273 If SIBLING is non-null, this is a test for a return before a sibling
1274 call. SIBLING is the call insn, so we can examine its register usage. */
6d3d9133 1275
ff9940b0 1276int
a72d4945 1277use_return_insn (int iscond, rtx sibling)
ff9940b0
RE
1278{
1279 int regno;
9b598fa0 1280 unsigned int func_type;
d5db54a1 1281 unsigned long saved_int_regs;
a72d4945 1282 unsigned HOST_WIDE_INT stack_adjust;
5848830f 1283 arm_stack_offsets *offsets;
ff9940b0 1284
d5b7b3ae 1285 /* Never use a return instruction before reload has run. */
6d3d9133
NC
1286 if (!reload_completed)
1287 return 0;
efc2515b 1288
9b598fa0
RE
1289 func_type = arm_current_func_type ();
1290
3a7731fd
PB
1291 /* Naked functions and volatile functions need special
1292 consideration. */
1293 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
6d3d9133 1294 return 0;
06bea5aa
NC
1295
1296 /* So do interrupt functions that use the frame pointer. */
1297 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
1298 return 0;
a72d4945 1299
5848830f
PB
1300 offsets = arm_get_frame_offsets ();
1301 stack_adjust = offsets->outgoing_args - offsets->saved_regs;
a72d4945 1302
6d3d9133
NC
1303 /* As do variadic functions. */
1304 if (current_function_pretend_args_size
3cb66fd7 1305 || cfun->machine->uses_anonymous_args
699a4925 1306 /* Or if the function calls __builtin_eh_return () */
c9ca9b88 1307 || current_function_calls_eh_return
699a4925
RE
1308 /* Or if the function calls alloca */
1309 || current_function_calls_alloca
a72d4945
RE
1310 /* Or if there is a stack adjustment. However, if the stack pointer
1311 is saved on the stack, we can use a pre-incrementing stack load. */
1312 || !(stack_adjust == 0 || (frame_pointer_needed && stack_adjust == 4)))
ff9940b0
RE
1313 return 0;
1314
d5db54a1
RE
1315 saved_int_regs = arm_compute_save_reg_mask ();
1316
a72d4945
RE
1317 /* Unfortunately, the insn
1318
1319 ldmib sp, {..., sp, ...}
1320
1321 triggers a bug on most SA-110 based devices, such that the stack
1322 pointer won't be correctly restored if the instruction takes a
839a4992 1323 page fault. We work around this problem by popping r3 along with
a72d4945
RE
1324 the other registers, since that is never slower than executing
1325 another instruction.
1326
1327 We test for !arm_arch5 here, because code for any architecture
1328 less than this could potentially be run on one of the buggy
1329 chips. */
1330 if (stack_adjust == 4 && !arm_arch5)
1331 {
1332 /* Validate that r3 is a call-clobbered register (always true in
d6b4baa4 1333 the default abi) ... */
a72d4945
RE
1334 if (!call_used_regs[3])
1335 return 0;
1336
1337 /* ... that it isn't being used for a return value (always true
1338 until we implement return-in-regs), or for a tail-call
d6b4baa4 1339 argument ... */
a72d4945
RE
1340 if (sibling)
1341 {
1342 if (GET_CODE (sibling) != CALL_INSN)
1343 abort ();
1344
1345 if (find_regno_fusage (sibling, USE, 3))
1346 return 0;
1347 }
1348
1349 /* ... and that there are no call-saved registers in r0-r2
1350 (always true in the default ABI). */
1351 if (saved_int_regs & 0x7)
1352 return 0;
1353 }
1354
b111229a 1355 /* Can't be done if interworking with Thumb, and any registers have been
d5db54a1
RE
1356 stacked. */
1357 if (TARGET_INTERWORK && saved_int_regs != 0)
b36ba79f 1358 return 0;
d5db54a1
RE
1359
1360 /* On StrongARM, conditional returns are expensive if they aren't
1361 taken and multiple registers have been stacked. */
1362 if (iscond && arm_is_strong)
6ed30148 1363 {
d5db54a1
RE
1364 /* Conditional return when just the LR is stored is a simple
1365 conditional-load instruction, that's not expensive. */
1366 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1367 return 0;
6ed30148
RE
1368
1369 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 1370 return 0;
6ed30148 1371 }
d5db54a1
RE
1372
1373 /* If there are saved registers but the LR isn't saved, then we need
1374 two instructions for the return. */
1375 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1376 return 0;
1377
3b684012 1378 /* Can't be done if any of the FPA regs are pushed,
6d3d9133 1379 since this also requires an insn. */
9b66ebb1
PB
1380 if (TARGET_HARD_FLOAT && TARGET_FPA)
1381 for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
1382 if (regs_ever_live[regno] && !call_used_regs[regno])
1383 return 0;
1384
1385 /* Likewise VFP regs. */
1386 if (TARGET_HARD_FLOAT && TARGET_VFP)
1387 for (regno = FIRST_VFP_REGNUM; regno <= LAST_VFP_REGNUM; regno++)
5895f793 1388 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 1389 return 0;
ff9940b0 1390
5a9335ef
NC
1391 if (TARGET_REALLY_IWMMXT)
1392 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
1393 if (regs_ever_live[regno] && ! call_used_regs [regno])
1394 return 0;
1395
ff9940b0
RE
1396 return 1;
1397}
1398
cce8749e
CH
1399/* Return TRUE if int I is a valid immediate ARM constant. */
1400
1401int
e32bac5b 1402const_ok_for_arm (HOST_WIDE_INT i)
cce8749e 1403{
30cf4896 1404 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 1405
56636818
JL
1406 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1407 be all zero, or all one. */
30cf4896
KG
1408 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1409 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1410 != ((~(unsigned HOST_WIDE_INT) 0)
1411 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
1412 return FALSE;
1413
e2c671ba
RE
1414 /* Fast return for 0 and powers of 2 */
1415 if ((i & (i - 1)) == 0)
1416 return TRUE;
1417
cce8749e
CH
1418 do
1419 {
30cf4896 1420 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 1421 return TRUE;
abaa26e5 1422 mask =
30cf4896
KG
1423 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1424 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
ebe413e5
NC
1425 }
1426 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 1427
f3bb6135
RE
1428 return FALSE;
1429}
cce8749e 1430
6354dc9b 1431/* Return true if I is a valid constant for the operation CODE. */
74bbc178 1432static int
e32bac5b 1433const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
e2c671ba
RE
1434{
1435 if (const_ok_for_arm (i))
1436 return 1;
1437
1438 switch (code)
1439 {
1440 case PLUS:
1441 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1442
1443 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1444 case XOR:
1445 case IOR:
1446 return 0;
1447
1448 case AND:
1449 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1450
1451 default:
1452 abort ();
1453 }
1454}
1455
1456/* Emit a sequence of insns to handle a large constant.
1457 CODE is the code of the operation required, it can be any of SET, PLUS,
1458 IOR, AND, XOR, MINUS;
1459 MODE is the mode in which the operation is being performed;
1460 VAL is the integer to operate on;
1461 SOURCE is the other operand (a register, or a null-pointer for SET);
1462 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
1463 either produce a simpler sequence, or we will want to cse the values.
1464 Return value is the number of insns emitted. */
e2c671ba
RE
1465
1466int
a406f566 1467arm_split_constant (enum rtx_code code, enum machine_mode mode, rtx insn,
e32bac5b 1468 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
2b835d68 1469{
a406f566
MM
1470 rtx cond;
1471
1472 if (insn && GET_CODE (PATTERN (insn)) == COND_EXEC)
1473 cond = COND_EXEC_TEST (PATTERN (insn));
1474 else
1475 cond = NULL_RTX;
1476
2b835d68
RE
1477 if (subtargets || code == SET
1478 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1479 && REGNO (target) != REGNO (source)))
1480 {
4b632bf1 1481 /* After arm_reorg has been called, we can't fix up expensive
05713b80 1482 constants by pushing them into memory so we must synthesize
4b632bf1
RE
1483 them in-line, regardless of the cost. This is only likely to
1484 be more costly on chips that have load delay slots and we are
1485 compiling without running the scheduler (so no splitting
aec3cfba
NC
1486 occurred before the final instruction emission).
1487
1488 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 1489 */
5895f793 1490 if (!after_arm_reorg
a406f566
MM
1491 && !cond
1492 && (arm_gen_constant (code, mode, NULL_RTX, val, target, source,
1493 1, 0)
4b632bf1 1494 > arm_constant_limit + (code != SET)))
2b835d68
RE
1495 {
1496 if (code == SET)
1497 {
1498 /* Currently SET is the only monadic value for CODE, all
1499 the rest are diadic. */
43cffd11 1500 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
1501 return 1;
1502 }
1503 else
1504 {
1505 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1506
43cffd11 1507 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
1508 /* For MINUS, the value is subtracted from, since we never
1509 have subtraction of a constant. */
1510 if (code == MINUS)
43cffd11 1511 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 1512 gen_rtx_MINUS (mode, temp, source)));
2b835d68 1513 else
43cffd11 1514 emit_insn (gen_rtx_SET (VOIDmode, target,
1c563bed 1515 gen_rtx_fmt_ee (code, mode, source, temp)));
2b835d68
RE
1516 return 2;
1517 }
1518 }
1519 }
1520
a406f566
MM
1521 return arm_gen_constant (code, mode, cond, val, target, source, subtargets,
1522 1);
2b835d68
RE
1523}
1524
ceebdb09 1525static int
e32bac5b 1526count_insns_for_constant (HOST_WIDE_INT remainder, int i)
ceebdb09
PB
1527{
1528 HOST_WIDE_INT temp1;
1529 int num_insns = 0;
1530 do
1531 {
1532 int end;
1533
1534 if (i <= 0)
1535 i += 32;
1536 if (remainder & (3 << (i - 2)))
1537 {
1538 end = i - 8;
1539 if (end < 0)
1540 end += 32;
1541 temp1 = remainder & ((0x0ff << end)
1542 | ((i < end) ? (0xff >> (32 - end)) : 0));
1543 remainder &= ~temp1;
1544 num_insns++;
1545 i -= 6;
1546 }
1547 i -= 2;
1548 } while (remainder);
1549 return num_insns;
1550}
1551
a406f566
MM
1552/* Emit an instruction with the indicated PATTERN. If COND is
1553 non-NULL, conditionalize the execution of the instruction on COND
1554 being true. */
1555
1556static void
1557emit_constant_insn (rtx cond, rtx pattern)
1558{
1559 if (cond)
1560 pattern = gen_rtx_COND_EXEC (VOIDmode, copy_rtx (cond), pattern);
1561 emit_insn (pattern);
1562}
1563
2b835d68
RE
1564/* As above, but extra parameter GENERATE which, if clear, suppresses
1565 RTL generation. */
1d6e90ac 1566
d5b7b3ae 1567static int
a406f566 1568arm_gen_constant (enum rtx_code code, enum machine_mode mode, rtx cond,
e32bac5b
RE
1569 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1570 int generate)
e2c671ba 1571{
e2c671ba
RE
1572 int can_invert = 0;
1573 int can_negate = 0;
1574 int can_negate_initial = 0;
1575 int can_shift = 0;
1576 int i;
1577 int num_bits_set = 0;
1578 int set_sign_bit_copies = 0;
1579 int clear_sign_bit_copies = 0;
1580 int clear_zero_bit_copies = 0;
1581 int set_zero_bit_copies = 0;
1582 int insns = 0;
e2c671ba 1583 unsigned HOST_WIDE_INT temp1, temp2;
30cf4896 1584 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
e2c671ba 1585
d5b7b3ae 1586 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1587 check for degenerate cases; these can occur when DImode operations
1588 are split. */
1589 switch (code)
1590 {
1591 case SET:
1592 can_invert = 1;
1593 can_shift = 1;
1594 can_negate = 1;
1595 break;
1596
1597 case PLUS:
1598 can_negate = 1;
1599 can_negate_initial = 1;
1600 break;
1601
1602 case IOR:
30cf4896 1603 if (remainder == 0xffffffff)
e2c671ba 1604 {
2b835d68 1605 if (generate)
a406f566
MM
1606 emit_constant_insn (cond,
1607 gen_rtx_SET (VOIDmode, target,
1608 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1609 return 1;
1610 }
1611 if (remainder == 0)
1612 {
1613 if (reload_completed && rtx_equal_p (target, source))
1614 return 0;
2b835d68 1615 if (generate)
a406f566
MM
1616 emit_constant_insn (cond,
1617 gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1618 return 1;
1619 }
1620 break;
1621
1622 case AND:
1623 if (remainder == 0)
1624 {
2b835d68 1625 if (generate)
a406f566
MM
1626 emit_constant_insn (cond,
1627 gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1628 return 1;
1629 }
30cf4896 1630 if (remainder == 0xffffffff)
e2c671ba
RE
1631 {
1632 if (reload_completed && rtx_equal_p (target, source))
1633 return 0;
2b835d68 1634 if (generate)
a406f566
MM
1635 emit_constant_insn (cond,
1636 gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1637 return 1;
1638 }
1639 can_invert = 1;
1640 break;
1641
1642 case XOR:
1643 if (remainder == 0)
1644 {
1645 if (reload_completed && rtx_equal_p (target, source))
1646 return 0;
2b835d68 1647 if (generate)
a406f566
MM
1648 emit_constant_insn (cond,
1649 gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1650 return 1;
1651 }
30cf4896 1652 if (remainder == 0xffffffff)
e2c671ba 1653 {
2b835d68 1654 if (generate)
a406f566
MM
1655 emit_constant_insn (cond,
1656 gen_rtx_SET (VOIDmode, target,
1657 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1658 return 1;
1659 }
1660
1661 /* We don't know how to handle this yet below. */
1662 abort ();
1663
1664 case MINUS:
1665 /* We treat MINUS as (val - source), since (source - val) is always
1666 passed as (source + (-val)). */
1667 if (remainder == 0)
1668 {
2b835d68 1669 if (generate)
a406f566
MM
1670 emit_constant_insn (cond,
1671 gen_rtx_SET (VOIDmode, target,
1672 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1673 return 1;
1674 }
1675 if (const_ok_for_arm (val))
1676 {
2b835d68 1677 if (generate)
a406f566
MM
1678 emit_constant_insn (cond,
1679 gen_rtx_SET (VOIDmode, target,
1680 gen_rtx_MINUS (mode, GEN_INT (val),
1681 source)));
e2c671ba
RE
1682 return 1;
1683 }
1684 can_negate = 1;
1685
1686 break;
1687
1688 default:
1689 abort ();
1690 }
1691
6354dc9b 1692 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1693 if (const_ok_for_arm (val)
1694 || (can_negate_initial && const_ok_for_arm (-val))
1695 || (can_invert && const_ok_for_arm (~val)))
1696 {
2b835d68 1697 if (generate)
a406f566
MM
1698 emit_constant_insn (cond,
1699 gen_rtx_SET (VOIDmode, target,
1700 (source
1701 ? gen_rtx_fmt_ee (code, mode, source,
1702 GEN_INT (val))
1703 : GEN_INT (val))));
e2c671ba
RE
1704 return 1;
1705 }
1706
e2c671ba 1707 /* Calculate a few attributes that may be useful for specific
6354dc9b 1708 optimizations. */
e2c671ba
RE
1709 for (i = 31; i >= 0; i--)
1710 {
1711 if ((remainder & (1 << i)) == 0)
1712 clear_sign_bit_copies++;
1713 else
1714 break;
1715 }
1716
1717 for (i = 31; i >= 0; i--)
1718 {
1719 if ((remainder & (1 << i)) != 0)
1720 set_sign_bit_copies++;
1721 else
1722 break;
1723 }
1724
1725 for (i = 0; i <= 31; i++)
1726 {
1727 if ((remainder & (1 << i)) == 0)
1728 clear_zero_bit_copies++;
1729 else
1730 break;
1731 }
1732
1733 for (i = 0; i <= 31; i++)
1734 {
1735 if ((remainder & (1 << i)) != 0)
1736 set_zero_bit_copies++;
1737 else
1738 break;
1739 }
1740
1741 switch (code)
1742 {
1743 case SET:
1744 /* See if we can do this by sign_extending a constant that is known
1745 to be negative. This is a good, way of doing it, since the shift
1746 may well merge into a subsequent insn. */
1747 if (set_sign_bit_copies > 1)
1748 {
1749 if (const_ok_for_arm
1750 (temp1 = ARM_SIGN_EXTEND (remainder
1751 << (set_sign_bit_copies - 1))))
1752 {
2b835d68
RE
1753 if (generate)
1754 {
d499463f 1755 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
a406f566
MM
1756 emit_constant_insn (cond,
1757 gen_rtx_SET (VOIDmode, new_src,
1758 GEN_INT (temp1)));
1759 emit_constant_insn (cond,
1760 gen_ashrsi3 (target, new_src,
1761 GEN_INT (set_sign_bit_copies - 1)));
2b835d68 1762 }
e2c671ba
RE
1763 return 2;
1764 }
1765 /* For an inverted constant, we will need to set the low bits,
1766 these will be shifted out of harm's way. */
1767 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1768 if (const_ok_for_arm (~temp1))
1769 {
2b835d68
RE
1770 if (generate)
1771 {
d499463f 1772 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
a406f566
MM
1773 emit_constant_insn (cond,
1774 gen_rtx_SET (VOIDmode, new_src,
1775 GEN_INT (temp1)));
1776 emit_constant_insn (cond,
1777 gen_ashrsi3 (target, new_src,
1778 GEN_INT (set_sign_bit_copies - 1)));
2b835d68 1779 }
e2c671ba
RE
1780 return 2;
1781 }
1782 }
1783
1784 /* See if we can generate this by setting the bottom (or the top)
1785 16 bits, and then shifting these into the other half of the
1786 word. We only look for the simplest cases, to do more would cost
1787 too much. Be careful, however, not to generate this when the
1788 alternative would take fewer insns. */
30cf4896 1789 if (val & 0xffff0000)
e2c671ba 1790 {
30cf4896 1791 temp1 = remainder & 0xffff0000;
e2c671ba
RE
1792 temp2 = remainder & 0x0000ffff;
1793
6354dc9b 1794 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1795 for (i = 9; i < 24; i++)
1796 {
30cf4896 1797 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
5895f793 1798 && !const_ok_for_arm (temp2))
e2c671ba 1799 {
d499463f
RE
1800 rtx new_src = (subtargets
1801 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1802 : target);
a406f566 1803 insns = arm_gen_constant (code, mode, cond, temp2, new_src,
2b835d68 1804 source, subtargets, generate);
e2c671ba 1805 source = new_src;
2b835d68 1806 if (generate)
a406f566
MM
1807 emit_constant_insn
1808 (cond,
1809 gen_rtx_SET
1810 (VOIDmode, target,
1811 gen_rtx_IOR (mode,
1812 gen_rtx_ASHIFT (mode, source,
1813 GEN_INT (i)),
1814 source)));
e2c671ba
RE
1815 return insns + 1;
1816 }
1817 }
1818
6354dc9b 1819 /* Don't duplicate cases already considered. */
e2c671ba
RE
1820 for (i = 17; i < 24; i++)
1821 {
1822 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1823 && !const_ok_for_arm (temp1))
e2c671ba 1824 {
d499463f
RE
1825 rtx new_src = (subtargets
1826 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1827 : target);
a406f566 1828 insns = arm_gen_constant (code, mode, cond, temp1, new_src,
2b835d68 1829 source, subtargets, generate);
e2c671ba 1830 source = new_src;
2b835d68 1831 if (generate)
a406f566
MM
1832 emit_constant_insn
1833 (cond,
1834 gen_rtx_SET (VOIDmode, target,
43cffd11
RE
1835 gen_rtx_IOR
1836 (mode,
1837 gen_rtx_LSHIFTRT (mode, source,
1838 GEN_INT (i)),
1839 source)));
e2c671ba
RE
1840 return insns + 1;
1841 }
1842 }
1843 }
1844 break;
1845
1846 case IOR:
1847 case XOR:
7b64da89
RE
1848 /* If we have IOR or XOR, and the constant can be loaded in a
1849 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1850 then this can be done in two instructions instead of 3-4. */
1851 if (subtargets
d499463f 1852 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1853 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1854 {
5895f793 1855 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1856 {
2b835d68
RE
1857 if (generate)
1858 {
1859 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1860
a406f566
MM
1861 emit_constant_insn (cond,
1862 gen_rtx_SET (VOIDmode, sub,
1863 GEN_INT (val)));
1864 emit_constant_insn (cond,
1865 gen_rtx_SET (VOIDmode, target,
1866 gen_rtx_fmt_ee (code, mode,
1867 source, sub)));
2b835d68 1868 }
e2c671ba
RE
1869 return 2;
1870 }
1871 }
1872
1873 if (code == XOR)
1874 break;
1875
1876 if (set_sign_bit_copies > 8
1877 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1878 {
2b835d68
RE
1879 if (generate)
1880 {
1881 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1882 rtx shift = GEN_INT (set_sign_bit_copies);
1883
a406f566
MM
1884 emit_constant_insn
1885 (cond,
1886 gen_rtx_SET (VOIDmode, sub,
1887 gen_rtx_NOT (mode,
1888 gen_rtx_ASHIFT (mode,
1889 source,
1890 shift))));
1891 emit_constant_insn
1892 (cond,
1893 gen_rtx_SET (VOIDmode, target,
1894 gen_rtx_NOT (mode,
1895 gen_rtx_LSHIFTRT (mode, sub,
1896 shift))));
2b835d68 1897 }
e2c671ba
RE
1898 return 2;
1899 }
1900
1901 if (set_zero_bit_copies > 8
1902 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1903 {
2b835d68
RE
1904 if (generate)
1905 {
1906 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1907 rtx shift = GEN_INT (set_zero_bit_copies);
1908
a406f566
MM
1909 emit_constant_insn
1910 (cond,
1911 gen_rtx_SET (VOIDmode, sub,
1912 gen_rtx_NOT (mode,
1913 gen_rtx_LSHIFTRT (mode,
1914 source,
1915 shift))));
1916 emit_constant_insn
1917 (cond,
1918 gen_rtx_SET (VOIDmode, target,
1919 gen_rtx_NOT (mode,
1920 gen_rtx_ASHIFT (mode, sub,
1921 shift))));
2b835d68 1922 }
e2c671ba
RE
1923 return 2;
1924 }
1925
5895f793 1926 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1927 {
2b835d68
RE
1928 if (generate)
1929 {
1930 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
a406f566
MM
1931 emit_constant_insn (cond,
1932 gen_rtx_SET (VOIDmode, sub,
1933 gen_rtx_NOT (mode, source)));
2b835d68
RE
1934 source = sub;
1935 if (subtargets)
1936 sub = gen_reg_rtx (mode);
a406f566
MM
1937 emit_constant_insn (cond,
1938 gen_rtx_SET (VOIDmode, sub,
1939 gen_rtx_AND (mode, source,
1940 GEN_INT (temp1))));
1941 emit_constant_insn (cond,
1942 gen_rtx_SET (VOIDmode, target,
1943 gen_rtx_NOT (mode, sub)));
2b835d68 1944 }
e2c671ba
RE
1945 return 3;
1946 }
1947 break;
1948
1949 case AND:
1950 /* See if two shifts will do 2 or more insn's worth of work. */
1951 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1952 {
30cf4896 1953 HOST_WIDE_INT shift_mask = ((0xffffffff
e2c671ba 1954 << (32 - clear_sign_bit_copies))
30cf4896 1955 & 0xffffffff);
e2c671ba 1956
30cf4896 1957 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1958 {
2b835d68
RE
1959 if (generate)
1960 {
d499463f 1961 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
a406f566
MM
1962 insns = arm_gen_constant (AND, mode, cond,
1963 remainder | shift_mask,
d499463f
RE
1964 new_src, source, subtargets, 1);
1965 source = new_src;
2b835d68
RE
1966 }
1967 else
d499463f
RE
1968 {
1969 rtx targ = subtargets ? NULL_RTX : target;
a406f566
MM
1970 insns = arm_gen_constant (AND, mode, cond,
1971 remainder | shift_mask,
d499463f
RE
1972 targ, source, subtargets, 0);
1973 }
2b835d68
RE
1974 }
1975
1976 if (generate)
1977 {
d499463f
RE
1978 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1979 rtx shift = GEN_INT (clear_sign_bit_copies);
1980
1981 emit_insn (gen_ashlsi3 (new_src, source, shift));
1982 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1983 }
1984
e2c671ba
RE
1985 return insns + 2;
1986 }
1987
1988 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1989 {
1990 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1991
30cf4896 1992 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1993 {
2b835d68
RE
1994 if (generate)
1995 {
d499463f
RE
1996 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1997
a406f566
MM
1998 insns = arm_gen_constant (AND, mode, cond,
1999 remainder | shift_mask,
d499463f
RE
2000 new_src, source, subtargets, 1);
2001 source = new_src;
2b835d68
RE
2002 }
2003 else
d499463f
RE
2004 {
2005 rtx targ = subtargets ? NULL_RTX : target;
2006
a406f566
MM
2007 insns = arm_gen_constant (AND, mode, cond,
2008 remainder | shift_mask,
d499463f
RE
2009 targ, source, subtargets, 0);
2010 }
2b835d68
RE
2011 }
2012
2013 if (generate)
2014 {
d499463f
RE
2015 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2016 rtx shift = GEN_INT (clear_zero_bit_copies);
2017
2018 emit_insn (gen_lshrsi3 (new_src, source, shift));
2019 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
2020 }
2021
e2c671ba
RE
2022 return insns + 2;
2023 }
2024
2025 break;
2026
2027 default:
2028 break;
2029 }
2030
2031 for (i = 0; i < 32; i++)
2032 if (remainder & (1 << i))
2033 num_bits_set++;
2034
2035 if (code == AND || (can_invert && num_bits_set > 16))
30cf4896 2036 remainder = (~remainder) & 0xffffffff;
e2c671ba 2037 else if (code == PLUS && num_bits_set > 16)
30cf4896 2038 remainder = (-remainder) & 0xffffffff;
e2c671ba
RE
2039 else
2040 {
2041 can_invert = 0;
2042 can_negate = 0;
2043 }
2044
2045 /* Now try and find a way of doing the job in either two or three
2046 instructions.
2047 We start by looking for the largest block of zeros that are aligned on
2048 a 2-bit boundary, we then fill up the temps, wrapping around to the
2049 top of the word when we drop off the bottom.
6354dc9b 2050 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
2051 {
2052 int best_start = 0;
2053 int best_consecutive_zeros = 0;
2054
2055 for (i = 0; i < 32; i += 2)
2056 {
2057 int consecutive_zeros = 0;
2058
5895f793 2059 if (!(remainder & (3 << i)))
e2c671ba 2060 {
5895f793 2061 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
2062 {
2063 consecutive_zeros += 2;
2064 i += 2;
2065 }
2066 if (consecutive_zeros > best_consecutive_zeros)
2067 {
2068 best_consecutive_zeros = consecutive_zeros;
2069 best_start = i - consecutive_zeros;
2070 }
2071 i -= 2;
2072 }
2073 }
2074
ceebdb09
PB
2075 /* So long as it won't require any more insns to do so, it's
2076 desirable to emit a small constant (in bits 0...9) in the last
2077 insn. This way there is more chance that it can be combined with
2078 a later addressing insn to form a pre-indexed load or store
2079 operation. Consider:
2080
2081 *((volatile int *)0xe0000100) = 1;
2082 *((volatile int *)0xe0000110) = 2;
2083
2084 We want this to wind up as:
2085
2086 mov rA, #0xe0000000
2087 mov rB, #1
2088 str rB, [rA, #0x100]
2089 mov rB, #2
2090 str rB, [rA, #0x110]
2091
2092 rather than having to synthesize both large constants from scratch.
2093
2094 Therefore, we calculate how many insns would be required to emit
2095 the constant starting from `best_start', and also starting from
2096 zero (ie with bit 31 first to be output). If `best_start' doesn't
2097 yield a shorter sequence, we may as well use zero. */
2098 if (best_start != 0
2099 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
2100 && (count_insns_for_constant (remainder, 0) <=
2101 count_insns_for_constant (remainder, best_start)))
2102 best_start = 0;
2103
2104 /* Now start emitting the insns. */
e2c671ba
RE
2105 i = best_start;
2106 do
2107 {
2108 int end;
2109
2110 if (i <= 0)
2111 i += 32;
2112 if (remainder & (3 << (i - 2)))
2113 {
2114 end = i - 8;
2115 if (end < 0)
2116 end += 32;
2117 temp1 = remainder & ((0x0ff << end)
2118 | ((i < end) ? (0xff >> (32 - end)) : 0));
2119 remainder &= ~temp1;
2120
d499463f 2121 if (generate)
e2c671ba 2122 {
9503f3d1
RH
2123 rtx new_src, temp1_rtx;
2124
2125 if (code == SET || code == MINUS)
2126 {
2127 new_src = (subtargets ? gen_reg_rtx (mode) : target);
96ae8197 2128 if (can_invert && code != MINUS)
9503f3d1
RH
2129 temp1 = ~temp1;
2130 }
2131 else
2132 {
96ae8197 2133 if (remainder && subtargets)
9503f3d1 2134 new_src = gen_reg_rtx (mode);
96ae8197
NC
2135 else
2136 new_src = target;
9503f3d1
RH
2137 if (can_invert)
2138 temp1 = ~temp1;
2139 else if (can_negate)
2140 temp1 = -temp1;
2141 }
2142
2143 temp1 = trunc_int_for_mode (temp1, mode);
2144 temp1_rtx = GEN_INT (temp1);
d499463f
RE
2145
2146 if (code == SET)
9503f3d1 2147 ;
d499463f 2148 else if (code == MINUS)
9503f3d1 2149 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
d499463f 2150 else
9503f3d1
RH
2151 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
2152
a406f566
MM
2153 emit_constant_insn (cond,
2154 gen_rtx_SET (VOIDmode, new_src,
2155 temp1_rtx));
d499463f 2156 source = new_src;
e2c671ba
RE
2157 }
2158
d499463f
RE
2159 if (code == SET)
2160 {
2161 can_invert = 0;
2162 code = PLUS;
2163 }
2164 else if (code == MINUS)
2165 code = PLUS;
2166
e2c671ba 2167 insns++;
e2c671ba
RE
2168 i -= 6;
2169 }
2170 i -= 2;
1d6e90ac
NC
2171 }
2172 while (remainder);
e2c671ba 2173 }
1d6e90ac 2174
e2c671ba
RE
2175 return insns;
2176}
2177
bd9c7e23
RE
2178/* Canonicalize a comparison so that we are more likely to recognize it.
2179 This can be done for a few constant compares, where we can make the
2180 immediate value easier to load. */
1d6e90ac 2181
bd9c7e23 2182enum rtx_code
e32bac5b 2183arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
bd9c7e23 2184{
ad076f4e 2185 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
2186
2187 switch (code)
2188 {
2189 case EQ:
2190 case NE:
2191 return code;
2192
2193 case GT:
2194 case LE:
30cf4896 2195 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
5895f793 2196 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 2197 {
5895f793 2198 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
2199 return code == GT ? GE : LT;
2200 }
2201 break;
2202
2203 case GE:
2204 case LT:
30cf4896 2205 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 2206 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 2207 {
5895f793 2208 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
2209 return code == GE ? GT : LE;
2210 }
2211 break;
2212
2213 case GTU:
2214 case LEU:
30cf4896 2215 if (i != ~((unsigned HOST_WIDE_INT) 0)
5895f793 2216 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
2217 {
2218 *op1 = GEN_INT (i + 1);
2219 return code == GTU ? GEU : LTU;
2220 }
2221 break;
2222
2223 case GEU:
2224 case LTU:
2225 if (i != 0
5895f793 2226 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
2227 {
2228 *op1 = GEN_INT (i - 1);
2229 return code == GEU ? GTU : LEU;
2230 }
2231 break;
2232
2233 default:
2234 abort ();
2235 }
2236
2237 return code;
2238}
bd9c7e23 2239
d4453b7a
PB
2240
2241/* Define how to find the value returned by a function. */
2242
2243rtx arm_function_value(tree type, tree func ATTRIBUTE_UNUSED)
2244{
2245 enum machine_mode mode;
2246 int unsignedp ATTRIBUTE_UNUSED;
2247 rtx r ATTRIBUTE_UNUSED;
2248
2249
2250 mode = TYPE_MODE (type);
2251 /* Promote integer types. */
2252 if (INTEGRAL_TYPE_P (type))
2253 PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
2254 return LIBCALL_VALUE(mode);
2255}
2256
2257
f5a1b0d2
NC
2258/* Decide whether a type should be returned in memory (true)
2259 or in a register (false). This is called by the macro
2260 RETURN_IN_MEMORY. */
2b835d68 2261int
e32bac5b 2262arm_return_in_memory (tree type)
2b835d68 2263{
dc0ba55a
JT
2264 HOST_WIDE_INT size;
2265
5895f793 2266 if (!AGGREGATE_TYPE_P (type))
9e291dbe 2267 /* All simple types are returned in registers. */
d7d01975 2268 return 0;
dc0ba55a
JT
2269
2270 size = int_size_in_bytes (type);
2271
5848830f 2272 if (arm_abi != ARM_ABI_APCS)
dc0ba55a 2273 {
5848830f 2274 /* ATPCS and later return aggregate types in memory only if they are
dc0ba55a
JT
2275 larger than a word (or are variable size). */
2276 return (size < 0 || size > UNITS_PER_WORD);
2277 }
d5b7b3ae 2278
6bc82793 2279 /* For the arm-wince targets we choose to be compatible with Microsoft's
d5b7b3ae
RE
2280 ARM and Thumb compilers, which always return aggregates in memory. */
2281#ifndef ARM_WINCE
e529bd42
NC
2282 /* All structures/unions bigger than one word are returned in memory.
2283 Also catch the case where int_size_in_bytes returns -1. In this case
6bc82793 2284 the aggregate is either huge or of variable size, and in either case
e529bd42 2285 we will want to return it via memory and not in a register. */
dc0ba55a 2286 if (size < 0 || size > UNITS_PER_WORD)
d7d01975 2287 return 1;
d5b7b3ae 2288
d7d01975 2289 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
2290 {
2291 tree field;
2292
3a2ea258
RE
2293 /* For a struct the APCS says that we only return in a register
2294 if the type is 'integer like' and every addressable element
2295 has an offset of zero. For practical purposes this means
2296 that the structure can have at most one non bit-field element
2297 and that this element must be the first one in the structure. */
2298
f5a1b0d2
NC
2299 /* Find the first field, ignoring non FIELD_DECL things which will
2300 have been created by C++. */
2301 for (field = TYPE_FIELDS (type);
2302 field && TREE_CODE (field) != FIELD_DECL;
2303 field = TREE_CHAIN (field))
2304 continue;
2305
2306 if (field == NULL)
9e291dbe 2307 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 2308
d5b7b3ae
RE
2309 /* Check that the first field is valid for returning in a register. */
2310
2311 /* ... Floats are not allowed */
9e291dbe 2312 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
2313 return 1;
2314
d5b7b3ae
RE
2315 /* ... Aggregates that are not themselves valid for returning in
2316 a register are not allowed. */
9e291dbe 2317 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 2318 return 1;
6f7ebcbb 2319
3a2ea258
RE
2320 /* Now check the remaining fields, if any. Only bitfields are allowed,
2321 since they are not addressable. */
f5a1b0d2
NC
2322 for (field = TREE_CHAIN (field);
2323 field;
2324 field = TREE_CHAIN (field))
2325 {
2326 if (TREE_CODE (field) != FIELD_DECL)
2327 continue;
2328
5895f793 2329 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
2330 return 1;
2331 }
2b835d68
RE
2332
2333 return 0;
2334 }
d7d01975
NC
2335
2336 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
2337 {
2338 tree field;
2339
2340 /* Unions can be returned in registers if every element is
2341 integral, or can be returned in an integer register. */
f5a1b0d2
NC
2342 for (field = TYPE_FIELDS (type);
2343 field;
2344 field = TREE_CHAIN (field))
2b835d68 2345 {
f5a1b0d2
NC
2346 if (TREE_CODE (field) != FIELD_DECL)
2347 continue;
2348
6cc8c0b3
NC
2349 if (FLOAT_TYPE_P (TREE_TYPE (field)))
2350 return 1;
2351
f5a1b0d2 2352 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
2353 return 1;
2354 }
f5a1b0d2 2355
2b835d68
RE
2356 return 0;
2357 }
d5b7b3ae 2358#endif /* not ARM_WINCE */
f5a1b0d2 2359
d5b7b3ae 2360 /* Return all other types in memory. */
2b835d68
RE
2361 return 1;
2362}
2363
d6b4baa4 2364/* Indicate whether or not words of a double are in big-endian order. */
3717da94
JT
2365
2366int
e32bac5b 2367arm_float_words_big_endian (void)
3717da94 2368{
9b66ebb1 2369 if (TARGET_MAVERICK)
9b6b54e2 2370 return 0;
3717da94
JT
2371
2372 /* For FPA, float words are always big-endian. For VFP, floats words
2373 follow the memory system mode. */
2374
9b66ebb1 2375 if (TARGET_FPA)
3717da94 2376 {
3717da94
JT
2377 return 1;
2378 }
2379
2380 if (TARGET_VFP)
2381 return (TARGET_BIG_END ? 1 : 0);
2382
2383 return 1;
2384}
2385
82e9d970
PB
2386/* Initialize a variable CUM of type CUMULATIVE_ARGS
2387 for a call to a function whose data type is FNTYPE.
2388 For a library call, FNTYPE is NULL. */
2389void
e32bac5b
RE
2390arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
2391 rtx libname ATTRIBUTE_UNUSED,
2392 tree fndecl ATTRIBUTE_UNUSED)
82e9d970
PB
2393{
2394 /* On the ARM, the offset starts at 0. */
61f71b34 2395 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype), fntype)) ? 1 : 0);
5a9335ef 2396 pcum->iwmmxt_nregs = 0;
5848830f 2397 pcum->can_split = true;
c27ba912 2398
82e9d970
PB
2399 pcum->call_cookie = CALL_NORMAL;
2400
2401 if (TARGET_LONG_CALLS)
2402 pcum->call_cookie = CALL_LONG;
2403
2404 /* Check for long call/short call attributes. The attributes
2405 override any command line option. */
2406 if (fntype)
2407 {
2408 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
2409 pcum->call_cookie = CALL_SHORT;
2410 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
2411 pcum->call_cookie = CALL_LONG;
2412 }
5a9335ef
NC
2413
2414 /* Varargs vectors are treated the same as long long.
2415 named_count avoids having to change the way arm handles 'named' */
2416 pcum->named_count = 0;
2417 pcum->nargs = 0;
2418
2419 if (TARGET_REALLY_IWMMXT && fntype)
2420 {
2421 tree fn_arg;
2422
2423 for (fn_arg = TYPE_ARG_TYPES (fntype);
2424 fn_arg;
2425 fn_arg = TREE_CHAIN (fn_arg))
2426 pcum->named_count += 1;
2427
2428 if (! pcum->named_count)
2429 pcum->named_count = INT_MAX;
2430 }
82e9d970
PB
2431}
2432
5848830f
PB
2433
2434/* Return true if mode/type need doubleword alignment. */
2435bool
2436arm_needs_doubleword_align (enum machine_mode mode, tree type)
2437{
65a939f7
PB
2438 return (GET_MODE_ALIGNMENT (mode) > PARM_BOUNDARY
2439 || (type && TYPE_ALIGN (type) > PARM_BOUNDARY));
5848830f
PB
2440}
2441
2442
82e9d970
PB
2443/* Determine where to put an argument to a function.
2444 Value is zero to push the argument on the stack,
2445 or a hard register in which to store the argument.
2446
2447 MODE is the argument's machine mode.
2448 TYPE is the data type of the argument (as a tree).
2449 This is null for libcalls where that information may
2450 not be available.
2451 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2452 the preceding args and about the function being called.
2453 NAMED is nonzero if this argument is a named parameter
2454 (otherwise it is an extra parameter matching an ellipsis). */
1d6e90ac 2455
82e9d970 2456rtx
e32bac5b 2457arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
5848830f 2458 tree type, int named)
82e9d970 2459{
5848830f
PB
2460 int nregs;
2461
2462 /* Varargs vectors are treated the same as long long.
2463 named_count avoids having to change the way arm handles 'named' */
2464 if (TARGET_IWMMXT_ABI
2465 && VECTOR_MODE_SUPPORTED_P (mode)
2466 && pcum->named_count > pcum->nargs + 1)
5a9335ef 2467 {
5848830f
PB
2468 if (pcum->iwmmxt_nregs <= 9)
2469 return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
2470 else
5a9335ef 2471 {
5848830f
PB
2472 pcum->can_split = false;
2473 return NULL_RTX;
5a9335ef 2474 }
5a9335ef
NC
2475 }
2476
5848830f
PB
2477 /* Put doubleword aligned quantities in even register pairs. */
2478 if (pcum->nregs & 1
2479 && ARM_DOUBLEWORD_ALIGN
2480 && arm_needs_doubleword_align (mode, type))
2481 pcum->nregs++;
2482
82e9d970
PB
2483 if (mode == VOIDmode)
2484 /* Compute operand 2 of the call insn. */
2485 return GEN_INT (pcum->call_cookie);
5848830f 2486
666c27b9 2487 /* Only allow splitting an arg between regs and memory if all preceding
5848830f
PB
2488 args were allocated to regs. For args passed by reference we only count
2489 the reference pointer. */
2490 if (pcum->can_split)
2491 nregs = 1;
2492 else
2493 nregs = ARM_NUM_REGS2 (mode, type);
2494
2495 if (!named || pcum->nregs + nregs > NUM_ARG_REGS)
82e9d970
PB
2496 return NULL_RTX;
2497
2498 return gen_rtx_REG (mode, pcum->nregs);
2499}
1741620c
JD
2500
2501/* Variable sized types are passed by reference. This is a GCC
2502 extension to the ARM ABI. */
2503
8cd5a4e0
RH
2504static bool
2505arm_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2506 enum machine_mode mode ATTRIBUTE_UNUSED,
2507 tree type, bool named ATTRIBUTE_UNUSED)
1741620c
JD
2508{
2509 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2510}
82e9d970 2511\f
c27ba912
DM
2512/* Encode the current state of the #pragma [no_]long_calls. */
2513typedef enum
82e9d970 2514{
c27ba912
DM
2515 OFF, /* No #pramgma [no_]long_calls is in effect. */
2516 LONG, /* #pragma long_calls is in effect. */
2517 SHORT /* #pragma no_long_calls is in effect. */
2518} arm_pragma_enum;
82e9d970 2519
c27ba912 2520static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 2521
8b97c5f8 2522void
e32bac5b 2523arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
82e9d970 2524{
8b97c5f8
ZW
2525 arm_pragma_long_calls = LONG;
2526}
2527
2528void
e32bac5b 2529arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
8b97c5f8
ZW
2530{
2531 arm_pragma_long_calls = SHORT;
2532}
2533
2534void
e32bac5b 2535arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
8b97c5f8
ZW
2536{
2537 arm_pragma_long_calls = OFF;
82e9d970
PB
2538}
2539\f
91d231cb
JM
2540/* Table of machine attributes. */
2541const struct attribute_spec arm_attribute_table[] =
82e9d970 2542{
91d231cb 2543 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
82e9d970
PB
2544 /* Function calls made to this symbol must be done indirectly, because
2545 it may lie outside of the 26 bit addressing range of a normal function
2546 call. */
91d231cb 2547 { "long_call", 0, 0, false, true, true, NULL },
82e9d970
PB
2548 /* Whereas these functions are always known to reside within the 26 bit
2549 addressing range. */
91d231cb 2550 { "short_call", 0, 0, false, true, true, NULL },
6d3d9133 2551 /* Interrupt Service Routines have special prologue and epilogue requirements. */
91d231cb
JM
2552 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2553 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2554 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2555#ifdef ARM_PE
2556 /* ARM/PE has three new attributes:
2557 interfacearm - ?
2558 dllexport - for exporting a function/variable that will live in a dll
2559 dllimport - for importing a function/variable from a dll
2560
2561 Microsoft allows multiple declspecs in one __declspec, separating
2562 them with spaces. We do NOT support this. Instead, use __declspec
2563 multiple times.
2564 */
2565 { "dllimport", 0, 0, true, false, false, NULL },
2566 { "dllexport", 0, 0, true, false, false, NULL },
2567 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
b2ca3702
MM
2568#elif TARGET_DLLIMPORT_DECL_ATTRIBUTES
2569 { "dllimport", 0, 0, false, false, false, handle_dll_attribute },
2570 { "dllexport", 0, 0, false, false, false, handle_dll_attribute },
91d231cb
JM
2571#endif
2572 { NULL, 0, 0, false, false, false, NULL }
2573};
6d3d9133 2574
91d231cb
JM
2575/* Handle an attribute requiring a FUNCTION_DECL;
2576 arguments as in struct attribute_spec.handler. */
2577static tree
e32bac5b
RE
2578arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2579 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
91d231cb
JM
2580{
2581 if (TREE_CODE (*node) != FUNCTION_DECL)
2582 {
2583 warning ("`%s' attribute only applies to functions",
2584 IDENTIFIER_POINTER (name));
2585 *no_add_attrs = true;
2586 }
2587
2588 return NULL_TREE;
2589}
2590
2591/* Handle an "interrupt" or "isr" attribute;
2592 arguments as in struct attribute_spec.handler. */
2593static tree
e32bac5b
RE
2594arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2595 bool *no_add_attrs)
91d231cb
JM
2596{
2597 if (DECL_P (*node))
2598 {
2599 if (TREE_CODE (*node) != FUNCTION_DECL)
2600 {
2601 warning ("`%s' attribute only applies to functions",
2602 IDENTIFIER_POINTER (name));
2603 *no_add_attrs = true;
2604 }
2605 /* FIXME: the argument if any is checked for type attributes;
2606 should it be checked for decl ones? */
2607 }
2608 else
2609 {
2610 if (TREE_CODE (*node) == FUNCTION_TYPE
2611 || TREE_CODE (*node) == METHOD_TYPE)
2612 {
2613 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2614 {
2615 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2616 *no_add_attrs = true;
2617 }
2618 }
2619 else if (TREE_CODE (*node) == POINTER_TYPE
2620 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2621 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2622 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2623 {
2624 *node = build_type_copy (*node);
1d6e90ac
NC
2625 TREE_TYPE (*node) = build_type_attribute_variant
2626 (TREE_TYPE (*node),
2627 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
91d231cb
JM
2628 *no_add_attrs = true;
2629 }
2630 else
2631 {
2632 /* Possibly pass this attribute on from the type to a decl. */
2633 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2634 | (int) ATTR_FLAG_FUNCTION_NEXT
2635 | (int) ATTR_FLAG_ARRAY_NEXT))
2636 {
2637 *no_add_attrs = true;
2638 return tree_cons (name, args, NULL_TREE);
2639 }
2640 else
2641 {
2642 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2643 }
2644 }
2645 }
2646
2647 return NULL_TREE;
82e9d970
PB
2648}
2649
2650/* Return 0 if the attributes for two types are incompatible, 1 if they
2651 are compatible, and 2 if they are nearly compatible (which causes a
2652 warning to be generated). */
8d8e52be 2653static int
e32bac5b 2654arm_comp_type_attributes (tree type1, tree type2)
82e9d970 2655{
1cb8d58a 2656 int l1, l2, s1, s2;
bd7fc26f 2657
82e9d970
PB
2658 /* Check for mismatch of non-default calling convention. */
2659 if (TREE_CODE (type1) != FUNCTION_TYPE)
2660 return 1;
2661
2662 /* Check for mismatched call attributes. */
1cb8d58a
NC
2663 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2664 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2665 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2666 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
2667
2668 /* Only bother to check if an attribute is defined. */
2669 if (l1 | l2 | s1 | s2)
2670 {
2671 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 2672 if ((l1 != l2) || (s1 != s2))
bd7fc26f 2673 return 0;
82e9d970 2674
bd7fc26f
NC
2675 /* Disallow mixed attributes. */
2676 if ((l1 & s2) || (l2 & s1))
2677 return 0;
2678 }
2679
6d3d9133
NC
2680 /* Check for mismatched ISR attribute. */
2681 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2682 if (! l1)
2683 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2684 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2685 if (! l2)
2686 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2687 if (l1 != l2)
2688 return 0;
2689
bd7fc26f 2690 return 1;
82e9d970
PB
2691}
2692
c27ba912
DM
2693/* Encode long_call or short_call attribute by prefixing
2694 symbol name in DECL with a special character FLAG. */
2695void
e32bac5b 2696arm_encode_call_attribute (tree decl, int flag)
c27ba912 2697{
3cce094d 2698 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 2699 int len = strlen (str);
d19fb8e3 2700 char * newstr;
c27ba912 2701
c27ba912
DM
2702 /* Do not allow weak functions to be treated as short call. */
2703 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2704 return;
c27ba912 2705
520a57c8
ZW
2706 newstr = alloca (len + 2);
2707 newstr[0] = flag;
2708 strcpy (newstr + 1, str);
c27ba912 2709
6d3d9133 2710 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
2711 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2712}
2713
2714/* Assigns default attributes to newly defined type. This is used to
2715 set short_call/long_call attributes for function types of
2716 functions defined inside corresponding #pragma scopes. */
8d8e52be 2717static void
e32bac5b 2718arm_set_default_type_attributes (tree type)
c27ba912
DM
2719{
2720 /* Add __attribute__ ((long_call)) to all functions, when
2721 inside #pragma long_calls or __attribute__ ((short_call)),
2722 when inside #pragma no_long_calls. */
2723 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2724 {
2725 tree type_attr_list, attr_name;
2726 type_attr_list = TYPE_ATTRIBUTES (type);
2727
2728 if (arm_pragma_long_calls == LONG)
2729 attr_name = get_identifier ("long_call");
2730 else if (arm_pragma_long_calls == SHORT)
2731 attr_name = get_identifier ("short_call");
2732 else
2733 return;
2734
2735 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2736 TYPE_ATTRIBUTES (type) = type_attr_list;
2737 }
2738}
2739\f
2740/* Return 1 if the operand is a SYMBOL_REF for a function known to be
6bc82793 2741 defined within the current compilation unit. If this cannot be
c27ba912
DM
2742 determined, then 0 is returned. */
2743static int
e32bac5b 2744current_file_function_operand (rtx sym_ref)
c27ba912
DM
2745{
2746 /* This is a bit of a fib. A function will have a short call flag
2747 applied to its name if it has the short call attribute, or it has
2748 already been defined within the current compilation unit. */
2749 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2750 return 1;
2751
6d77b53e 2752 /* The current function is always defined within the current compilation
a77655b1 2753 unit. If it s a weak definition however, then this may not be the real
d6a7951f 2754 definition of the function, and so we have to say no. */
c27ba912 2755 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 2756 && !DECL_WEAK (current_function_decl))
c27ba912
DM
2757 return 1;
2758
2759 /* We cannot make the determination - default to returning 0. */
2760 return 0;
2761}
2762
825dda42 2763/* Return nonzero if a 32 bit "long_call" should be generated for
c27ba912
DM
2764 this call. We generate a long_call if the function:
2765
2766 a. has an __attribute__((long call))
2767 or b. is within the scope of a #pragma long_calls
2768 or c. the -mlong-calls command line switch has been specified
a77655b1
NC
2769 . and either:
2770 1. -ffunction-sections is in effect
2771 or 2. the current function has __attribute__ ((section))
2772 or 3. the target function has __attribute__ ((section))
c27ba912
DM
2773
2774 However we do not generate a long call if the function:
2775
2776 d. has an __attribute__ ((short_call))
2777 or e. is inside the scope of a #pragma no_long_calls
a77655b1 2778 or f. is defined within the current compilation unit.
c27ba912
DM
2779
2780 This function will be called by C fragments contained in the machine
a77655b1 2781 description file. SYM_REF and CALL_COOKIE correspond to the matched
c27ba912
DM
2782 rtl operands. CALL_SYMBOL is used to distinguish between
2783 two different callers of the function. It is set to 1 in the
2784 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2785 and "call_value" patterns. This is because of the difference in the
2786 SYM_REFs passed by these patterns. */
2787int
e32bac5b 2788arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
c27ba912 2789{
5895f793 2790 if (!call_symbol)
c27ba912
DM
2791 {
2792 if (GET_CODE (sym_ref) != MEM)
2793 return 0;
2794
2795 sym_ref = XEXP (sym_ref, 0);
2796 }
2797
2798 if (GET_CODE (sym_ref) != SYMBOL_REF)
2799 return 0;
2800
2801 if (call_cookie & CALL_SHORT)
2802 return 0;
2803
a77655b1
NC
2804 if (TARGET_LONG_CALLS)
2805 {
2806 if (flag_function_sections
2807 || DECL_SECTION_NAME (current_function_decl))
2808 /* c.3 is handled by the defintion of the
2809 ARM_DECLARE_FUNCTION_SIZE macro. */
2810 return 1;
2811 }
2812
87e27392 2813 if (current_file_function_operand (sym_ref))
c27ba912
DM
2814 return 0;
2815
2816 return (call_cookie & CALL_LONG)
2817 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2818 || TARGET_LONG_CALLS;
2819}
f99fce0c 2820
825dda42 2821/* Return nonzero if it is ok to make a tail-call to DECL. */
4977bab6 2822static bool
e32bac5b 2823arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
f99fce0c
RE
2824{
2825 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2826
5a9335ef
NC
2827 if (cfun->machine->sibcall_blocked)
2828 return false;
2829
f99fce0c
RE
2830 /* Never tailcall something for which we have no decl, or if we
2831 are in Thumb mode. */
2832 if (decl == NULL || TARGET_THUMB)
4977bab6 2833 return false;
f99fce0c
RE
2834
2835 /* Get the calling method. */
2836 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2837 call_type = CALL_SHORT;
2838 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2839 call_type = CALL_LONG;
2840
2841 /* Cannot tail-call to long calls, since these are out of range of
2842 a branch instruction. However, if not compiling PIC, we know
2843 we can reach the symbol if it is in this compilation unit. */
5895f793 2844 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
4977bab6 2845 return false;
f99fce0c
RE
2846
2847 /* If we are interworking and the function is not declared static
2848 then we can't tail-call it unless we know that it exists in this
2849 compilation unit (since it might be a Thumb routine). */
5895f793 2850 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
4977bab6 2851 return false;
f99fce0c 2852
6d3d9133
NC
2853 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2854 if (IS_INTERRUPT (arm_current_func_type ()))
4977bab6 2855 return false;
6d3d9133 2856
f99fce0c 2857 /* Everything else is ok. */
4977bab6 2858 return true;
f99fce0c
RE
2859}
2860
82e9d970 2861\f
6b990f6b
RE
2862/* Addressing mode support functions. */
2863
0b4be7de 2864/* Return nonzero if X is a legitimate immediate operand when compiling
6b990f6b 2865 for PIC. */
32de079a 2866int
e32bac5b 2867legitimate_pic_operand_p (rtx x)
32de079a 2868{
d5b7b3ae
RE
2869 if (CONSTANT_P (x)
2870 && flag_pic
32de079a
RE
2871 && (GET_CODE (x) == SYMBOL_REF
2872 || (GET_CODE (x) == CONST
2873 && GET_CODE (XEXP (x, 0)) == PLUS
2874 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2875 return 0;
2876
2877 return 1;
2878}
2879
2880rtx
e32bac5b 2881legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
32de079a 2882{
a3c48721
RE
2883 if (GET_CODE (orig) == SYMBOL_REF
2884 || GET_CODE (orig) == LABEL_REF)
32de079a 2885 {
5f37d07c 2886#ifndef AOF_ASSEMBLER
32de079a 2887 rtx pic_ref, address;
5f37d07c 2888#endif
32de079a
RE
2889 rtx insn;
2890 int subregs = 0;
2891
2892 if (reg == 0)
2893 {
893f3d5b 2894 if (no_new_pseudos)
32de079a
RE
2895 abort ();
2896 else
2897 reg = gen_reg_rtx (Pmode);
2898
2899 subregs = 1;
2900 }
2901
2902#ifdef AOF_ASSEMBLER
2903 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2904 understands that the PIC register has to be added into the offset. */
32de079a
RE
2905 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2906#else
2907 if (subregs)
2908 address = gen_reg_rtx (Pmode);
2909 else
2910 address = reg;
2911
4bec9f7d
NC
2912 if (TARGET_ARM)
2913 emit_insn (gen_pic_load_addr_arm (address, orig));
2914 else
2915 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2916
14f583b8
PB
2917 if ((GET_CODE (orig) == LABEL_REF
2918 || (GET_CODE (orig) == SYMBOL_REF &&
94428622 2919 SYMBOL_REF_LOCAL_P (orig)))
14f583b8 2920 && NEED_GOT_RELOC)
a3c48721
RE
2921 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2922 else
2923 {
2924 pic_ref = gen_rtx_MEM (Pmode,
2925 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2926 address));
389fdba0 2927 MEM_READONLY_P (pic_ref) = 1;
a3c48721
RE
2928 }
2929
32de079a
RE
2930 insn = emit_move_insn (reg, pic_ref);
2931#endif
2932 current_function_uses_pic_offset_table = 1;
2933 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2934 by loop. */
43cffd11
RE
2935 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2936 REG_NOTES (insn));
32de079a
RE
2937 return reg;
2938 }
2939 else if (GET_CODE (orig) == CONST)
2940 {
2941 rtx base, offset;
2942
2943 if (GET_CODE (XEXP (orig, 0)) == PLUS
2944 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2945 return orig;
2946
2947 if (reg == 0)
2948 {
893f3d5b 2949 if (no_new_pseudos)
32de079a
RE
2950 abort ();
2951 else
2952 reg = gen_reg_rtx (Pmode);
2953 }
2954
2955 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2956 {
2957 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2958 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2959 base == reg ? 0 : reg);
2960 }
2961 else
2962 abort ();
2963
2964 if (GET_CODE (offset) == CONST_INT)
2965 {
2966 /* The base register doesn't really matter, we only want to
2967 test the index for the appropriate mode. */
1e1ab407 2968 if (!arm_legitimate_index_p (mode, offset, SET, 0))
6b990f6b
RE
2969 {
2970 if (!no_new_pseudos)
2971 offset = force_reg (Pmode, offset);
2972 else
2973 abort ();
2974 }
32de079a 2975
32de079a 2976 if (GET_CODE (offset) == CONST_INT)
ed8908e7 2977 return plus_constant (base, INTVAL (offset));
32de079a
RE
2978 }
2979
2980 if (GET_MODE_SIZE (mode) > 4
2981 && (GET_MODE_CLASS (mode) == MODE_INT
2982 || TARGET_SOFT_FLOAT))
2983 {
2984 emit_insn (gen_addsi3 (reg, base, offset));
2985 return reg;
2986 }
2987
43cffd11 2988 return gen_rtx_PLUS (Pmode, base, offset);
32de079a 2989 }
32de079a
RE
2990
2991 return orig;
2992}
2993
57934c39
PB
2994
2995/* Find a spare low register. */
2996
2997static int
2998thumb_find_work_register (int live_regs_mask)
2999{
3000 int reg;
3001
3002 /* Use a spare arg register. */
3003 if (!regs_ever_live[LAST_ARG_REGNUM])
3004 return LAST_ARG_REGNUM;
3005
3006 /* Look for a pushed register. */
3007 for (reg = 0; reg < LAST_LO_REGNUM; reg++)
3008 if (live_regs_mask & (1 << reg))
3009 return reg;
3010
3011 /* Something went wrong. */
3012 abort ();
3013}
3014
876f13b0
PB
3015
3016/* Generate code to load the PIC register. */
3017
32de079a 3018void
876f13b0 3019arm_load_pic_register (void)
32de079a
RE
3020{
3021#ifndef AOF_ASSEMBLER
876f13b0 3022 rtx l1, pic_tmp, pic_tmp2, pic_rtx;
32de079a
RE
3023 rtx global_offset_table;
3024
ed0e6530 3025 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
3026 return;
3027
5895f793 3028 if (!flag_pic)
32de079a
RE
3029 abort ();
3030
32de079a
RE
3031 l1 = gen_label_rtx ();
3032
43cffd11 3033 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 3034 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
3035 addition, on the Thumb it is 'dot + 4'. */
3036 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
3037 if (GOT_PCREL)
3038 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 3039 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
3040 else
3041 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
3042
3043 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 3044
d5b7b3ae 3045 if (TARGET_ARM)
4bec9f7d
NC
3046 {
3047 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
3048 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
3049 }
d5b7b3ae 3050 else
4bec9f7d 3051 {
876f13b0
PB
3052 if (REGNO (pic_offset_table_rtx) > LAST_LO_REGNUM)
3053 {
3054 int reg;
3055
3056 /* We will have pushed the pic register, so should always be
3057 able to find a work register. */
3058 reg = thumb_find_work_register (thumb_compute_save_reg_mask ());
3059 pic_tmp = gen_rtx_REG (SImode, reg);
3060 emit_insn (gen_pic_load_addr_thumb (pic_tmp, pic_rtx));
3061 emit_insn (gen_movsi (pic_offset_table_rtx, pic_tmp));
3062 }
3063 else
3064 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
4bec9f7d
NC
3065 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
3066 }
32de079a 3067
32de079a
RE
3068 /* Need to emit this whether or not we obey regdecls,
3069 since setjmp/longjmp can cause life info to screw up. */
43cffd11 3070 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
3071#endif /* AOF_ASSEMBLER */
3072}
3073
876f13b0 3074
6b990f6b
RE
3075/* Return nonzero if X is valid as an ARM state addressing register. */
3076static int
e32bac5b 3077arm_address_register_rtx_p (rtx x, int strict_p)
6b990f6b
RE
3078{
3079 int regno;
3080
3081 if (GET_CODE (x) != REG)
3082 return 0;
3083
3084 regno = REGNO (x);
3085
3086 if (strict_p)
3087 return ARM_REGNO_OK_FOR_BASE_P (regno);
3088
3089 return (regno <= LAST_ARM_REGNUM
3090 || regno >= FIRST_PSEUDO_REGISTER
3091 || regno == FRAME_POINTER_REGNUM
3092 || regno == ARG_POINTER_REGNUM);
3093}
3094
3095/* Return nonzero if X is a valid ARM state address operand. */
3096int
1e1ab407
RE
3097arm_legitimate_address_p (enum machine_mode mode, rtx x, RTX_CODE outer,
3098 int strict_p)
6b990f6b 3099{
fdd695fd
PB
3100 bool use_ldrd;
3101 enum rtx_code code = GET_CODE (x);
3102
6b990f6b
RE
3103 if (arm_address_register_rtx_p (x, strict_p))
3104 return 1;
3105
fdd695fd
PB
3106 use_ldrd = (TARGET_LDRD
3107 && (mode == DImode
3108 || (mode == DFmode && (TARGET_SOFT_FLOAT || TARGET_VFP))));
3109
3110 if (code == POST_INC || code == PRE_DEC
3111 || ((code == PRE_INC || code == POST_DEC)
3112 && (use_ldrd || GET_MODE_SIZE (mode) <= 4)))
6b990f6b
RE
3113 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
3114
fdd695fd 3115 else if ((code == POST_MODIFY || code == PRE_MODIFY)
6b990f6b
RE
3116 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
3117 && GET_CODE (XEXP (x, 1)) == PLUS
386d3a16 3118 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
fdd695fd
PB
3119 {
3120 rtx addend = XEXP (XEXP (x, 1), 1);
3121
3122 /* Don't allow ldrd post increment by register becuase it's hard
3123 to fixup invalid register choices. */
3124 if (use_ldrd
3125 && GET_CODE (x) == POST_MODIFY
3126 && GET_CODE (addend) == REG)
3127 return 0;
3128
3129 return ((use_ldrd || GET_MODE_SIZE (mode) <= 4)
3130 && arm_legitimate_index_p (mode, addend, outer, strict_p));
3131 }
6b990f6b
RE
3132
3133 /* After reload constants split into minipools will have addresses
3134 from a LABEL_REF. */
0bfb39ef 3135 else if (reload_completed
fdd695fd
PB
3136 && (code == LABEL_REF
3137 || (code == CONST
6b990f6b
RE
3138 && GET_CODE (XEXP (x, 0)) == PLUS
3139 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
3140 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
3141 return 1;
3142
3143 else if (mode == TImode)
3144 return 0;
3145
fdd695fd 3146 else if (code == PLUS)
6b990f6b
RE
3147 {
3148 rtx xop0 = XEXP (x, 0);
3149 rtx xop1 = XEXP (x, 1);
3150
3151 return ((arm_address_register_rtx_p (xop0, strict_p)
1e1ab407 3152 && arm_legitimate_index_p (mode, xop1, outer, strict_p))
6b990f6b 3153 || (arm_address_register_rtx_p (xop1, strict_p)
1e1ab407 3154 && arm_legitimate_index_p (mode, xop0, outer, strict_p)));
6b990f6b
RE
3155 }
3156
3157#if 0
3158 /* Reload currently can't handle MINUS, so disable this for now */
3159 else if (GET_CODE (x) == MINUS)
3160 {
3161 rtx xop0 = XEXP (x, 0);
3162 rtx xop1 = XEXP (x, 1);
3163
3164 return (arm_address_register_rtx_p (xop0, strict_p)
1e1ab407 3165 && arm_legitimate_index_p (mode, xop1, outer, strict_p));
6b990f6b
RE
3166 }
3167#endif
3168
3169 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
fdd695fd 3170 && code == SYMBOL_REF
6b990f6b
RE
3171 && CONSTANT_POOL_ADDRESS_P (x)
3172 && ! (flag_pic
3173 && symbol_mentioned_p (get_pool_constant (x))))
3174 return 1;
3175
6b990f6b
RE
3176 return 0;
3177}
3178
3179/* Return nonzero if INDEX is valid for an address index operand in
3180 ARM state. */
3181static int
1e1ab407
RE
3182arm_legitimate_index_p (enum machine_mode mode, rtx index, RTX_CODE outer,
3183 int strict_p)
6b990f6b
RE
3184{
3185 HOST_WIDE_INT range;
3186 enum rtx_code code = GET_CODE (index);
3187
778ebdd9
PB
3188 /* Standard coprocessor addressing modes. */
3189 if (TARGET_HARD_FLOAT
3190 && (TARGET_FPA || TARGET_MAVERICK)
3191 && (GET_MODE_CLASS (mode) == MODE_FLOAT
3192 || (TARGET_MAVERICK && mode == DImode)))
6b990f6b
RE
3193 return (code == CONST_INT && INTVAL (index) < 1024
3194 && INTVAL (index) > -1024
3195 && (INTVAL (index) & 3) == 0);
3196
5a9335ef
NC
3197 if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
3198 return (code == CONST_INT
3657dc3e
PB
3199 && INTVAL (index) < 1024
3200 && INTVAL (index) > -1024
3201 && (INTVAL (index) & 3) == 0);
5a9335ef 3202
fdd695fd
PB
3203 if (arm_address_register_rtx_p (index, strict_p)
3204 && (GET_MODE_SIZE (mode) <= 4))
3205 return 1;
3206
3207 if (mode == DImode || mode == DFmode)
3208 {
3209 if (code == CONST_INT)
3210 {
3211 HOST_WIDE_INT val = INTVAL (index);
3212
3213 if (TARGET_LDRD)
3214 return val > -256 && val < 256;
3215 else
f372c932 3216 return val > -4096 && val < 4092;
fdd695fd
PB
3217 }
3218
3219 return TARGET_LDRD && arm_address_register_rtx_p (index, strict_p);
3220 }
3221
6b990f6b 3222 if (GET_MODE_SIZE (mode) <= 4
1e1ab407
RE
3223 && ! (arm_arch4
3224 && (mode == HImode
3225 || (mode == QImode && outer == SIGN_EXTEND))))
6b990f6b 3226 {
1e1ab407
RE
3227 if (code == MULT)
3228 {
3229 rtx xiop0 = XEXP (index, 0);
3230 rtx xiop1 = XEXP (index, 1);
3231
3232 return ((arm_address_register_rtx_p (xiop0, strict_p)
3233 && power_of_two_operand (xiop1, SImode))
3234 || (arm_address_register_rtx_p (xiop1, strict_p)
3235 && power_of_two_operand (xiop0, SImode)));
3236 }
3237 else if (code == LSHIFTRT || code == ASHIFTRT
3238 || code == ASHIFT || code == ROTATERT)
3239 {
3240 rtx op = XEXP (index, 1);
6b990f6b 3241
1e1ab407
RE
3242 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
3243 && GET_CODE (op) == CONST_INT
3244 && INTVAL (op) > 0
3245 && INTVAL (op) <= 31);
3246 }
6b990f6b
RE
3247 }
3248
1e1ab407
RE
3249 /* For ARM v4 we may be doing a sign-extend operation during the
3250 load. */
e1471c91 3251 if (arm_arch4)
1e1ab407
RE
3252 {
3253 if (mode == HImode || (outer == SIGN_EXTEND && mode == QImode))
3254 range = 256;
3255 else
3256 range = 4096;
3257 }
e1471c91
RE
3258 else
3259 range = (mode == HImode) ? 4095 : 4096;
6b990f6b
RE
3260
3261 return (code == CONST_INT
3262 && INTVAL (index) < range
3263 && INTVAL (index) > -range);
76a318e9
RE
3264}
3265
edf7cee8 3266/* Return nonzero if X is valid as a Thumb state base register. */
76a318e9 3267static int
e32bac5b 3268thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
76a318e9
RE
3269{
3270 int regno;
3271
3272 if (GET_CODE (x) != REG)
3273 return 0;
3274
3275 regno = REGNO (x);
3276
3277 if (strict_p)
3278 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
3279
3280 return (regno <= LAST_LO_REGNUM
07e58265 3281 || regno > LAST_VIRTUAL_REGISTER
76a318e9
RE
3282 || regno == FRAME_POINTER_REGNUM
3283 || (GET_MODE_SIZE (mode) >= 4
3284 && (regno == STACK_POINTER_REGNUM
edf7cee8 3285 || regno >= FIRST_PSEUDO_REGISTER
76a318e9
RE
3286 || x == hard_frame_pointer_rtx
3287 || x == arg_pointer_rtx)));
3288}
3289
3290/* Return nonzero if x is a legitimate index register. This is the case
3291 for any base register that can access a QImode object. */
3292inline static int
e32bac5b 3293thumb_index_register_rtx_p (rtx x, int strict_p)
76a318e9
RE
3294{
3295 return thumb_base_register_rtx_p (x, QImode, strict_p);
3296}
3297
3298/* Return nonzero if x is a legitimate Thumb-state address.
3299
3300 The AP may be eliminated to either the SP or the FP, so we use the
3301 least common denominator, e.g. SImode, and offsets from 0 to 64.
3302
3303 ??? Verify whether the above is the right approach.
3304
3305 ??? Also, the FP may be eliminated to the SP, so perhaps that
3306 needs special handling also.
3307
3308 ??? Look at how the mips16 port solves this problem. It probably uses
3309 better ways to solve some of these problems.
3310
3311 Although it is not incorrect, we don't accept QImode and HImode
3312 addresses based on the frame pointer or arg pointer until the
3313 reload pass starts. This is so that eliminating such addresses
3314 into stack based ones won't produce impossible code. */
3315int
e32bac5b 3316thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
76a318e9
RE
3317{
3318 /* ??? Not clear if this is right. Experiment. */
3319 if (GET_MODE_SIZE (mode) < 4
3320 && !(reload_in_progress || reload_completed)
3321 && (reg_mentioned_p (frame_pointer_rtx, x)
3322 || reg_mentioned_p (arg_pointer_rtx, x)
3323 || reg_mentioned_p (virtual_incoming_args_rtx, x)
3324 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
3325 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
3326 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
3327 return 0;
3328
3329 /* Accept any base register. SP only in SImode or larger. */
3330 else if (thumb_base_register_rtx_p (x, mode, strict_p))
3331 return 1;
3332
18dbd950 3333 /* This is PC relative data before arm_reorg runs. */
76a318e9
RE
3334 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
3335 && GET_CODE (x) == SYMBOL_REF
3336 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
3337 return 1;
3338
18dbd950 3339 /* This is PC relative data after arm_reorg runs. */
76a318e9
RE
3340 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
3341 && (GET_CODE (x) == LABEL_REF
3342 || (GET_CODE (x) == CONST
3343 && GET_CODE (XEXP (x, 0)) == PLUS
3344 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
3345 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
3346 return 1;
3347
3348 /* Post-inc indexing only supported for SImode and larger. */
3349 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
3350 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
3351 return 1;
3352
3353 else if (GET_CODE (x) == PLUS)
3354 {
3355 /* REG+REG address can be any two index registers. */
3356 /* We disallow FRAME+REG addressing since we know that FRAME
3357 will be replaced with STACK, and SP relative addressing only
3358 permits SP+OFFSET. */
3359 if (GET_MODE_SIZE (mode) <= 4
3360 && XEXP (x, 0) != frame_pointer_rtx
3361 && XEXP (x, 1) != frame_pointer_rtx
76a318e9
RE
3362 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
3363 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
3364 return 1;
3365
3366 /* REG+const has 5-7 bit offset for non-SP registers. */
3367 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
3368 || XEXP (x, 0) == arg_pointer_rtx)
3369 && GET_CODE (XEXP (x, 1)) == CONST_INT
3370 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
3371 return 1;
3372
3373 /* REG+const has 10 bit offset for SP, but only SImode and
3374 larger is supported. */
3375 /* ??? Should probably check for DI/DFmode overflow here
3376 just like GO_IF_LEGITIMATE_OFFSET does. */
3377 else if (GET_CODE (XEXP (x, 0)) == REG
3378 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
3379 && GET_MODE_SIZE (mode) >= 4
3380 && GET_CODE (XEXP (x, 1)) == CONST_INT
3381 && INTVAL (XEXP (x, 1)) >= 0
3382 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
3383 && (INTVAL (XEXP (x, 1)) & 3) == 0)
3384 return 1;
3385
3386 else if (GET_CODE (XEXP (x, 0)) == REG
3387 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
3388 && GET_MODE_SIZE (mode) >= 4
3389 && GET_CODE (XEXP (x, 1)) == CONST_INT
3390 && (INTVAL (XEXP (x, 1)) & 3) == 0)
3391 return 1;
3392 }
3393
3394 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
f954388e 3395 && GET_MODE_SIZE (mode) == 4
76a318e9
RE
3396 && GET_CODE (x) == SYMBOL_REF
3397 && CONSTANT_POOL_ADDRESS_P (x)
3398 && !(flag_pic
3399 && symbol_mentioned_p (get_pool_constant (x))))
3400 return 1;
3401
3402 return 0;
3403}
3404
3405/* Return nonzero if VAL can be used as an offset in a Thumb-state address
3406 instruction of mode MODE. */
3407int
e32bac5b 3408thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
76a318e9
RE
3409{
3410 switch (GET_MODE_SIZE (mode))
3411 {
3412 case 1:
3413 return val >= 0 && val < 32;
3414
3415 case 2:
3416 return val >= 0 && val < 64 && (val & 1) == 0;
3417
3418 default:
3419 return (val >= 0
3420 && (val + GET_MODE_SIZE (mode)) <= 128
3421 && (val & 3) == 0);
3422 }
3423}
3424
ccf4d512
RE
3425/* Try machine-dependent ways of modifying an illegitimate address
3426 to be legitimate. If we find one, return the new, valid address. */
ccf4d512 3427rtx
e32bac5b 3428arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
ccf4d512
RE
3429{
3430 if (GET_CODE (x) == PLUS)
3431 {
3432 rtx xop0 = XEXP (x, 0);
3433 rtx xop1 = XEXP (x, 1);
3434
3435 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
3436 xop0 = force_reg (SImode, xop0);
3437
3438 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
3439 xop1 = force_reg (SImode, xop1);
3440
3441 if (ARM_BASE_REGISTER_RTX_P (xop0)
3442 && GET_CODE (xop1) == CONST_INT)
3443 {
3444 HOST_WIDE_INT n, low_n;
3445 rtx base_reg, val;
3446 n = INTVAL (xop1);
3447
9b66ebb1
PB
3448 /* VFP addressing modes actually allow greater offsets, but for
3449 now we just stick with the lowest common denominator. */
3450 if (mode == DImode
3451 || ((TARGET_SOFT_FLOAT || TARGET_VFP) && mode == DFmode))
ccf4d512
RE
3452 {
3453 low_n = n & 0x0f;
3454 n &= ~0x0f;
3455 if (low_n > 4)
3456 {
3457 n += 16;
3458 low_n -= 16;
3459 }
3460 }
3461 else
3462 {
3463 low_n = ((mode) == TImode ? 0
3464 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
3465 n -= low_n;
3466 }
3467
3468 base_reg = gen_reg_rtx (SImode);
3469 val = force_operand (gen_rtx_PLUS (SImode, xop0,
3470 GEN_INT (n)), NULL_RTX);
3471 emit_move_insn (base_reg, val);
3472 x = (low_n == 0 ? base_reg
3473 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
3474 }
3475 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3476 x = gen_rtx_PLUS (SImode, xop0, xop1);
3477 }
3478
3479 /* XXX We don't allow MINUS any more -- see comment in
3480 arm_legitimate_address_p (). */
3481 else if (GET_CODE (x) == MINUS)
3482 {
3483 rtx xop0 = XEXP (x, 0);
3484 rtx xop1 = XEXP (x, 1);
3485
3486 if (CONSTANT_P (xop0))
3487 xop0 = force_reg (SImode, xop0);
3488
3489 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3490 xop1 = force_reg (SImode, xop1);
3491
3492 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3493 x = gen_rtx_MINUS (SImode, xop0, xop1);
3494 }
3495
3496 if (flag_pic)
3497 {
3498 /* We need to find and carefully transform any SYMBOL and LABEL
3499 references; so go back to the original address expression. */
3500 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3501
3502 if (new_x != orig_x)
3503 x = new_x;
3504 }
3505
3506 return x;
3507}
3508
6f5b4f3e
RE
3509
3510/* Try machine-dependent ways of modifying an illegitimate Thumb address
3511 to be legitimate. If we find one, return the new, valid address. */
3512rtx
3513thumb_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
3514{
3515 if (GET_CODE (x) == PLUS
3516 && GET_CODE (XEXP (x, 1)) == CONST_INT
3517 && (INTVAL (XEXP (x, 1)) >= 32 * GET_MODE_SIZE (mode)
3518 || INTVAL (XEXP (x, 1)) < 0))
3519 {
3520 rtx xop0 = XEXP (x, 0);
3521 rtx xop1 = XEXP (x, 1);
3522 HOST_WIDE_INT offset = INTVAL (xop1);
3523
3524 /* Try and fold the offset into a biasing of the base register and
3525 then offsetting that. Don't do this when optimizing for space
3526 since it can cause too many CSEs. */
3527 if (optimize_size && offset >= 0
3528 && offset < 256 + 31 * GET_MODE_SIZE (mode))
3529 {
3530 HOST_WIDE_INT delta;
3531
3532 if (offset >= 256)
3533 delta = offset - (256 - GET_MODE_SIZE (mode));
3534 else if (offset < 32 * GET_MODE_SIZE (mode) + 8)
3535 delta = 31 * GET_MODE_SIZE (mode);
3536 else
3537 delta = offset & (~31 * GET_MODE_SIZE (mode));
3538
3539 xop0 = force_operand (plus_constant (xop0, offset - delta),
3540 NULL_RTX);
3541 x = plus_constant (xop0, delta);
3542 }
3543 else if (offset < 0 && offset > -256)
3544 /* Small negative offsets are best done with a subtract before the
3545 dereference, forcing these into a register normally takes two
3546 instructions. */
3547 x = force_operand (x, NULL_RTX);
3548 else
3549 {
3550 /* For the remaining cases, force the constant into a register. */
3551 xop1 = force_reg (SImode, xop1);
3552 x = gen_rtx_PLUS (SImode, xop0, xop1);
3553 }
3554 }
3555 else if (GET_CODE (x) == PLUS
3556 && s_register_operand (XEXP (x, 1), SImode)
3557 && !s_register_operand (XEXP (x, 0), SImode))
3558 {
3559 rtx xop0 = force_operand (XEXP (x, 0), NULL_RTX);
3560
3561 x = gen_rtx_PLUS (SImode, xop0, XEXP (x, 1));
3562 }
3563
3564 if (flag_pic)
3565 {
3566 /* We need to find and carefully transform any SYMBOL and LABEL
3567 references; so go back to the original address expression. */
3568 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3569
3570 if (new_x != orig_x)
3571 x = new_x;
3572 }
3573
3574 return x;
3575}
3576
6b990f6b
RE
3577\f
3578
e2c671ba
RE
3579#define REG_OR_SUBREG_REG(X) \
3580 (GET_CODE (X) == REG \
3581 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3582
3583#define REG_OR_SUBREG_RTX(X) \
3584 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3585
d5b7b3ae
RE
3586#ifndef COSTS_N_INSNS
3587#define COSTS_N_INSNS(N) ((N) * 4 - 2)
3588#endif
3c50106f 3589static inline int
9b66ebb1 3590thumb_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
e2c671ba
RE
3591{
3592 enum machine_mode mode = GET_MODE (x);
e2c671ba 3593
9b66ebb1 3594 switch (code)
d5b7b3ae 3595 {
9b66ebb1
PB
3596 case ASHIFT:
3597 case ASHIFTRT:
3598 case LSHIFTRT:
3599 case ROTATERT:
3600 case PLUS:
3601 case MINUS:
3602 case COMPARE:
3603 case NEG:
3604 case NOT:
3605 return COSTS_N_INSNS (1);
3606
3607 case MULT:
3608 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3609 {
3610 int cycles = 0;
3611 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
d5b7b3ae 3612
9b66ebb1 3613 while (i)
d5b7b3ae 3614 {
9b66ebb1
PB
3615 i >>= 2;
3616 cycles++;
3617 }
3618 return COSTS_N_INSNS (2) + cycles;
3619 }
3620 return COSTS_N_INSNS (1) + 16;
3621
3622 case SET:
3623 return (COSTS_N_INSNS (1)
3624 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3625 + GET_CODE (SET_DEST (x)) == MEM));
3626
3627 case CONST_INT:
3628 if (outer == SET)
3629 {
3630 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3631 return 0;
3632 if (thumb_shiftable_const (INTVAL (x)))
3633 return COSTS_N_INSNS (2);
3634 return COSTS_N_INSNS (3);
3635 }
3636 else if ((outer == PLUS || outer == COMPARE)
3637 && INTVAL (x) < 256 && INTVAL (x) > -256)
3638 return 0;
3639 else if (outer == AND
3640 && INTVAL (x) < 256 && INTVAL (x) >= -256)
3641 return COSTS_N_INSNS (1);
3642 else if (outer == ASHIFT || outer == ASHIFTRT
3643 || outer == LSHIFTRT)
3644 return 0;
3645 return COSTS_N_INSNS (2);
3646
3647 case CONST:
3648 case CONST_DOUBLE:
3649 case LABEL_REF:
3650 case SYMBOL_REF:
3651 return COSTS_N_INSNS (3);
3652
3653 case UDIV:
3654 case UMOD:
3655 case DIV:
3656 case MOD:
3657 return 100;
d5b7b3ae 3658
9b66ebb1
PB
3659 case TRUNCATE:
3660 return 99;
d5b7b3ae 3661
9b66ebb1
PB
3662 case AND:
3663 case XOR:
3664 case IOR:
ff482c8d 3665 /* XXX guess. */
9b66ebb1 3666 return 8;
d5b7b3ae 3667
9b66ebb1
PB
3668 case MEM:
3669 /* XXX another guess. */
3670 /* Memory costs quite a lot for the first word, but subsequent words
3671 load at the equivalent of a single insn each. */
3672 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3673 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3674 ? 4 : 0));
3675
3676 case IF_THEN_ELSE:
ff482c8d 3677 /* XXX a guess. */
9b66ebb1
PB
3678 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3679 return 14;
3680 return 2;
3681
3682 case ZERO_EXTEND:
3683 /* XXX still guessing. */
3684 switch (GET_MODE (XEXP (x, 0)))
3685 {
3686 case QImode:
3687 return (1 + (mode == DImode ? 4 : 0)
3688 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
d5b7b3ae 3689
9b66ebb1
PB
3690 case HImode:
3691 return (4 + (mode == DImode ? 4 : 0)
3692 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
d5b7b3ae 3693
9b66ebb1
PB
3694 case SImode:
3695 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3696
d5b7b3ae
RE
3697 default:
3698 return 99;
d5b7b3ae 3699 }
9b66ebb1
PB
3700
3701 default:
3702 return 99;
d5b7b3ae 3703 }
9b66ebb1
PB
3704}
3705
3706
3707/* Worker routine for arm_rtx_costs. */
3708static inline int
3709arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
3710{
3711 enum machine_mode mode = GET_MODE (x);
3712 enum rtx_code subcode;
3713 int extra_cost;
3714
e2c671ba
RE
3715 switch (code)
3716 {
3717 case MEM:
3718 /* Memory costs quite a lot for the first word, but subsequent words
3719 load at the equivalent of a single insn each. */
3720 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
3721 + (GET_CODE (x) == SYMBOL_REF
3722 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
e2c671ba
RE
3723
3724 case DIV:
3725 case MOD:
b9c53150
RS
3726 case UDIV:
3727 case UMOD:
3728 return optimize_size ? COSTS_N_INSNS (2) : 100;
e2c671ba
RE
3729
3730 case ROTATE:
3731 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3732 return 4;
3733 /* Fall through */
3734 case ROTATERT:
3735 if (mode != SImode)
3736 return 8;
3737 /* Fall through */
3738 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3739 if (mode == DImode)
3740 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3741 + ((GET_CODE (XEXP (x, 0)) == REG
3742 || (GET_CODE (XEXP (x, 0)) == SUBREG
3743 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3744 ? 0 : 8));
3745 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3746 || (GET_CODE (XEXP (x, 0)) == SUBREG
3747 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3748 ? 0 : 4)
3749 + ((GET_CODE (XEXP (x, 1)) == REG
3750 || (GET_CODE (XEXP (x, 1)) == SUBREG
3751 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3752 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3753 ? 0 : 4));
3754
3755 case MINUS:
3756 if (mode == DImode)
3757 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3758 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3759 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3760 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3761 ? 0 : 8));
3762
3763 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3764 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3765 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
9b66ebb1 3766 && arm_const_double_rtx (XEXP (x, 1))))
e2c671ba
RE
3767 ? 0 : 8)
3768 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3769 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
9b66ebb1 3770 && arm_const_double_rtx (XEXP (x, 0))))
e2c671ba
RE
3771 ? 0 : 8));
3772
3773 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3774 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3775 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3776 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3777 || subcode == ASHIFTRT || subcode == LSHIFTRT
3778 || subcode == ROTATE || subcode == ROTATERT
3779 || (subcode == MULT
3780 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3781 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3782 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3783 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3784 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3785 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3786 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3787 return 1;
3788 /* Fall through */
3789
3790 case PLUS:
3791 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3792 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3793 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3794 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
9b66ebb1 3795 && arm_const_double_rtx (XEXP (x, 1))))
e2c671ba
RE
3796 ? 0 : 8));
3797
3798 /* Fall through */
3799 case AND: case XOR: case IOR:
3800 extra_cost = 0;
3801
3802 /* Normally the frame registers will be spilt into reg+const during
3803 reload, so it is a bad idea to combine them with other instructions,
3804 since then they might not be moved outside of loops. As a compromise
3805 we allow integration with ops that have a constant as their second
3806 operand. */
3807 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3808 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3809 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3810 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3811 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3812 extra_cost = 4;
3813
3814 if (mode == DImode)
3815 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3816 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3817 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 3818 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
3819 ? 0 : 8));
3820
3821 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3822 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3823 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3824 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 3825 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
3826 ? 0 : 4));
3827
3828 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3829 return (1 + extra_cost
3830 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3831 || subcode == LSHIFTRT || subcode == ASHIFTRT
3832 || subcode == ROTATE || subcode == ROTATERT
3833 || (subcode == MULT
3834 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3835 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 3836 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
3837 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3838 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 3839 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
3840 ? 0 : 4));
3841
3842 return 8;
3843
3844 case MULT:
9b66ebb1
PB
3845 /* This should have been handled by the CPU specific routines. */
3846 abort ();
e2c671ba 3847
56636818 3848 case TRUNCATE:
9b66ebb1 3849 if (arm_arch3m && mode == SImode
56636818
JL
3850 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3851 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3852 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3853 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3854 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3855 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3856 return 8;
3857 return 99;
3858
e2c671ba
RE
3859 case NEG:
3860 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3861 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3862 /* Fall through */
3863 case NOT:
3864 if (mode == DImode)
3865 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3866
3867 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3868
3869 case IF_THEN_ELSE:
3870 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3871 return 14;
3872 return 2;
3873
3874 case COMPARE:
3875 return 1;
3876
3877 case ABS:
3878 return 4 + (mode == DImode ? 4 : 0);
3879
3880 case SIGN_EXTEND:
3881 if (GET_MODE (XEXP (x, 0)) == QImode)
3882 return (4 + (mode == DImode ? 4 : 0)
3883 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3884 /* Fall through */
3885 case ZERO_EXTEND:
3886 switch (GET_MODE (XEXP (x, 0)))
3887 {
3888 case QImode:
3889 return (1 + (mode == DImode ? 4 : 0)
3890 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3891
3892 case HImode:
3893 return (4 + (mode == DImode ? 4 : 0)
3894 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3895
3896 case SImode:
3897 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e 3898
5a9335ef
NC
3899 case V8QImode:
3900 case V4HImode:
3901 case V2SImode:
3902 case V4QImode:
3903 case V2HImode:
3904 return 1;
3905
ad076f4e
RE
3906 default:
3907 break;
e2c671ba
RE
3908 }
3909 abort ();
3910
d5b7b3ae
RE
3911 case CONST_INT:
3912 if (const_ok_for_arm (INTVAL (x)))
3913 return outer == SET ? 2 : -1;
3914 else if (outer == AND
5895f793 3915 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
3916 return -1;
3917 else if ((outer == COMPARE
3918 || outer == PLUS || outer == MINUS)
5895f793 3919 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
3920 return -1;
3921 else
3922 return 5;
3923
3924 case CONST:
3925 case LABEL_REF:
3926 case SYMBOL_REF:
3927 return 6;
3928
3929 case CONST_DOUBLE:
9b66ebb1 3930 if (arm_const_double_rtx (x))
d5b7b3ae
RE
3931 return outer == SET ? 2 : -1;
3932 else if ((outer == COMPARE || outer == PLUS)
3b684012 3933 && neg_const_double_rtx_ok_for_fpa (x))
d5b7b3ae
RE
3934 return -1;
3935 return 7;
3936
e2c671ba
RE
3937 default:
3938 return 99;
3939 }
3940}
32de079a 3941
59b9a953 3942/* RTX costs for cores with a slow MUL implementation. */
9b66ebb1 3943
3c50106f 3944static bool
9b66ebb1 3945arm_slowmul_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 3946{
9b66ebb1
PB
3947 enum machine_mode mode = GET_MODE (x);
3948
3949 if (TARGET_THUMB)
3950 {
3951 *total = thumb_rtx_costs (x, code, outer_code);
3952 return true;
3953 }
3954
3955 switch (code)
3956 {
3957 case MULT:
3958 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3959 || mode == DImode)
3960 {
3961 *total = 30;
3962 return true;
3963 }
3964
3965 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3966 {
3967 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3968 & (unsigned HOST_WIDE_INT) 0xffffffff);
3969 int cost, const_ok = const_ok_for_arm (i);
3970 int j, booth_unit_size;
3971
3972 /* Tune as appropriate. */
3973 cost = const_ok ? 4 : 8;
3974 booth_unit_size = 2;
3975 for (j = 0; i && j < 32; j += booth_unit_size)
3976 {
3977 i >>= booth_unit_size;
3978 cost += 2;
3979 }
3980
3981 *total = cost;
3982 return true;
3983 }
3984
3985 *total = 30 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3986 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
3987 return true;
3988
3989 default:
3990 *total = arm_rtx_costs_1 (x, code, outer_code);
3991 return true;
3992 }
3c50106f
RH
3993}
3994
9b66ebb1
PB
3995
3996/* RTX cost for cores with a fast multiply unit (M variants). */
3997
3998static bool
3999arm_fastmul_rtx_costs (rtx x, int code, int outer_code, int *total)
4000{
4001 enum machine_mode mode = GET_MODE (x);
4002
4003 if (TARGET_THUMB)
4004 {
4005 *total = thumb_rtx_costs (x, code, outer_code);
4006 return true;
4007 }
4008
4009 switch (code)
4010 {
4011 case MULT:
4012 /* There is no point basing this on the tuning, since it is always the
4013 fast variant if it exists at all. */
4014 if (mode == DImode
4015 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
4016 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
4017 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
4018 {
4019 *total = 8;
4020 return true;
4021 }
4022
4023
4024 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4025 || mode == DImode)
4026 {
4027 *total = 30;
4028 return true;
4029 }
4030
4031 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4032 {
4033 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
4034 & (unsigned HOST_WIDE_INT) 0xffffffff);
4035 int cost, const_ok = const_ok_for_arm (i);
4036 int j, booth_unit_size;
4037
4038 /* Tune as appropriate. */
4039 cost = const_ok ? 4 : 8;
4040 booth_unit_size = 8;
4041 for (j = 0; i && j < 32; j += booth_unit_size)
4042 {
4043 i >>= booth_unit_size;
4044 cost += 2;
4045 }
4046
4047 *total = cost;
4048 return true;
4049 }
4050
4051 *total = 8 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
4052 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
4053 return true;
4054
4055 default:
4056 *total = arm_rtx_costs_1 (x, code, outer_code);
4057 return true;
4058 }
4059}
4060
4061
4062/* RTX cost for XScale CPUs. */
4063
4064static bool
4065arm_xscale_rtx_costs (rtx x, int code, int outer_code, int *total)
4066{
4067 enum machine_mode mode = GET_MODE (x);
4068
4069 if (TARGET_THUMB)
4070 {
4071 *total = thumb_rtx_costs (x, code, outer_code);
4072 return true;
4073 }
4074
4075 switch (code)
4076 {
4077 case MULT:
4078 /* There is no point basing this on the tuning, since it is always the
4079 fast variant if it exists at all. */
4080 if (mode == DImode
4081 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
4082 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
4083 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
4084 {
4085 *total = 8;
4086 return true;
4087 }
4088
4089
4090 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4091 || mode == DImode)
4092 {
4093 *total = 30;
4094 return true;
4095 }
4096
4097 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4098 {
4099 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
4100 & (unsigned HOST_WIDE_INT) 0xffffffff);
4101 int cost, const_ok = const_ok_for_arm (i);
4102 unsigned HOST_WIDE_INT masked_const;
4103
4104 /* The cost will be related to two insns.
ff482c8d 4105 First a load of the constant (MOV or LDR), then a multiply. */
9b66ebb1
PB
4106 cost = 2;
4107 if (! const_ok)
4108 cost += 1; /* LDR is probably more expensive because
ff482c8d 4109 of longer result latency. */
9b66ebb1
PB
4110 masked_const = i & 0xffff8000;
4111 if (masked_const != 0 && masked_const != 0xffff8000)
4112 {
4113 masked_const = i & 0xf8000000;
4114 if (masked_const == 0 || masked_const == 0xf8000000)
4115 cost += 1;
4116 else
4117 cost += 2;
4118 }
4119 *total = cost;
4120 return true;
4121 }
4122
4123 *total = 8 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
4124 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4);
4125 return true;
4126
4127 default:
4128 *total = arm_rtx_costs_1 (x, code, outer_code);
4129 return true;
4130 }
4131}
4132
4133
4134/* RTX costs for 9e (and later) cores. */
4135
4136static bool
4137arm_9e_rtx_costs (rtx x, int code, int outer_code, int *total)
4138{
4139 enum machine_mode mode = GET_MODE (x);
4140 int nonreg_cost;
4141 int cost;
4142
4143 if (TARGET_THUMB)
4144 {
4145 switch (code)
4146 {
4147 case MULT:
4148 *total = COSTS_N_INSNS (3);
4149 return true;
4150
4151 default:
4152 *total = thumb_rtx_costs (x, code, outer_code);
4153 return true;
4154 }
4155 }
4156
4157 switch (code)
4158 {
4159 case MULT:
4160 /* There is no point basing this on the tuning, since it is always the
4161 fast variant if it exists at all. */
4162 if (mode == DImode
4163 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
4164 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
4165 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
4166 {
4167 *total = 3;
4168 return true;
4169 }
4170
4171
4172 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4173 {
4174 *total = 30;
4175 return true;
4176 }
4177 if (mode == DImode)
4178 {
4179 cost = 7;
4180 nonreg_cost = 8;
4181 }
4182 else
4183 {
4184 cost = 2;
4185 nonreg_cost = 4;
4186 }
4187
4188
4189 *total = cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : nonreg_cost)
4190 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : nonreg_cost);
4191 return true;
4192
4193 default:
4194 *total = arm_rtx_costs_1 (x, code, outer_code);
4195 return true;
4196 }
4197}
dcefdf67
RH
4198/* All address computations that can be done are free, but rtx cost returns
4199 the same for practically all of them. So we weight the different types
4200 of address here in the order (most pref first):
d6b4baa4 4201 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
d2b6eb76
ZW
4202static inline int
4203arm_arm_address_cost (rtx x)
4204{
4205 enum rtx_code c = GET_CODE (x);
4206
4207 if (c == PRE_INC || c == PRE_DEC || c == POST_INC || c == POST_DEC)
4208 return 0;
4209 if (c == MEM || c == LABEL_REF || c == SYMBOL_REF)
4210 return 10;
4211
4212 if (c == PLUS || c == MINUS)
4213 {
d2b6eb76
ZW
4214 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4215 return 2;
4216
ec8e098d 4217 if (ARITHMETIC_P (XEXP (x, 0)) || ARITHMETIC_P (XEXP (x, 1)))
d2b6eb76
ZW
4218 return 3;
4219
4220 return 4;
4221 }
4222
4223 return 6;
4224}
4225
4226static inline int
4227arm_thumb_address_cost (rtx x)
4228{
4229 enum rtx_code c = GET_CODE (x);
4230
4231 if (c == REG)
4232 return 1;
4233 if (c == PLUS
4234 && GET_CODE (XEXP (x, 0)) == REG
4235 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4236 return 1;
4237
4238 return 2;
4239}
4240
dcefdf67 4241static int
e32bac5b 4242arm_address_cost (rtx x)
dcefdf67 4243{
d2b6eb76 4244 return TARGET_ARM ? arm_arm_address_cost (x) : arm_thumb_address_cost (x);
dcefdf67
RH
4245}
4246
c237e94a 4247static int
e32bac5b 4248arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
32de079a
RE
4249{
4250 rtx i_pat, d_pat;
4251
d19fb8e3
NC
4252 /* Some true dependencies can have a higher cost depending
4253 on precisely how certain input operands are used. */
4b3c2e48 4254 if (arm_tune_xscale
d19fb8e3 4255 && REG_NOTE_KIND (link) == 0
eda833e3
BE
4256 && recog_memoized (insn) >= 0
4257 && recog_memoized (dep) >= 0)
d19fb8e3
NC
4258 {
4259 int shift_opnum = get_attr_shift (insn);
4260 enum attr_type attr_type = get_attr_type (dep);
4261
4262 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
4263 operand for INSN. If we have a shifted input operand and the
4264 instruction we depend on is another ALU instruction, then we may
4265 have to account for an additional stall. */
9b66ebb1
PB
4266 if (shift_opnum != 0
4267 && (attr_type == TYPE_ALU_SHIFT || attr_type == TYPE_ALU_SHIFT_REG))
d19fb8e3
NC
4268 {
4269 rtx shifted_operand;
4270 int opno;
4271
4272 /* Get the shifted operand. */
4273 extract_insn (insn);
4274 shifted_operand = recog_data.operand[shift_opnum];
4275
4276 /* Iterate over all the operands in DEP. If we write an operand
4277 that overlaps with SHIFTED_OPERAND, then we have increase the
4278 cost of this dependency. */
4279 extract_insn (dep);
4280 preprocess_constraints ();
4281 for (opno = 0; opno < recog_data.n_operands; opno++)
4282 {
4283 /* We can ignore strict inputs. */
4284 if (recog_data.operand_type[opno] == OP_IN)
4285 continue;
4286
4287 if (reg_overlap_mentioned_p (recog_data.operand[opno],
4288 shifted_operand))
4289 return 2;
4290 }
4291 }
4292 }
4293
6354dc9b 4294 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
4295 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
4296 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
4297 return 0;
4298
d5b7b3ae
RE
4299 /* Call insns don't incur a stall, even if they follow a load. */
4300 if (REG_NOTE_KIND (link) == 0
4301 && GET_CODE (insn) == CALL_INSN)
4302 return 1;
4303
32de079a
RE
4304 if ((i_pat = single_set (insn)) != NULL
4305 && GET_CODE (SET_SRC (i_pat)) == MEM
4306 && (d_pat = single_set (dep)) != NULL
4307 && GET_CODE (SET_DEST (d_pat)) == MEM)
4308 {
48f6efae 4309 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
32de079a
RE
4310 /* This is a load after a store, there is no conflict if the load reads
4311 from a cached area. Assume that loads from the stack, and from the
4312 constant pool are cached, and that others will miss. This is a
6354dc9b 4313 hack. */
32de079a 4314
48f6efae
NC
4315 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
4316 || reg_mentioned_p (stack_pointer_rtx, src_mem)
4317 || reg_mentioned_p (frame_pointer_rtx, src_mem)
4318 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
949d79eb 4319 return 1;
32de079a
RE
4320 }
4321
4322 return cost;
4323}
4324
9b66ebb1 4325static int fp_consts_inited = 0;
ff9940b0 4326
9b66ebb1
PB
4327/* Only zero is valid for VFP. Other values are also valid for FPA. */
4328static const char * const strings_fp[8] =
62b10bbc 4329{
2b835d68
RE
4330 "0", "1", "2", "3",
4331 "4", "5", "0.5", "10"
4332};
ff9940b0 4333
9b66ebb1 4334static REAL_VALUE_TYPE values_fp[8];
ff9940b0
RE
4335
4336static void
9b66ebb1 4337init_fp_table (void)
ff9940b0
RE
4338{
4339 int i;
4340 REAL_VALUE_TYPE r;
4341
9b66ebb1
PB
4342 if (TARGET_VFP)
4343 fp_consts_inited = 1;
4344 else
4345 fp_consts_inited = 8;
4346
4347 for (i = 0; i < fp_consts_inited; i++)
ff9940b0 4348 {
9b66ebb1
PB
4349 r = REAL_VALUE_ATOF (strings_fp[i], DFmode);
4350 values_fp[i] = r;
ff9940b0 4351 }
ff9940b0
RE
4352}
4353
9b66ebb1 4354/* Return TRUE if rtx X is a valid immediate FP constant. */
cce8749e 4355int
9b66ebb1 4356arm_const_double_rtx (rtx x)
cce8749e 4357{
ff9940b0
RE
4358 REAL_VALUE_TYPE r;
4359 int i;
4360
9b66ebb1
PB
4361 if (!fp_consts_inited)
4362 init_fp_table ();
ff9940b0
RE
4363
4364 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4365 if (REAL_VALUE_MINUS_ZERO (r))
4366 return 0;
f3bb6135 4367
9b66ebb1
PB
4368 for (i = 0; i < fp_consts_inited; i++)
4369 if (REAL_VALUES_EQUAL (r, values_fp[i]))
ff9940b0 4370 return 1;
f3bb6135 4371
ff9940b0 4372 return 0;
f3bb6135 4373}
ff9940b0 4374
3b684012 4375/* Return TRUE if rtx X is a valid immediate FPA constant. */
ff9940b0 4376int
e32bac5b 4377neg_const_double_rtx_ok_for_fpa (rtx x)
ff9940b0
RE
4378{
4379 REAL_VALUE_TYPE r;
4380 int i;
4381
9b66ebb1
PB
4382 if (!fp_consts_inited)
4383 init_fp_table ();
ff9940b0
RE
4384
4385 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4386 r = REAL_VALUE_NEGATE (r);
4387 if (REAL_VALUE_MINUS_ZERO (r))
4388 return 0;
f3bb6135 4389
ff9940b0 4390 for (i = 0; i < 8; i++)
9b66ebb1 4391 if (REAL_VALUES_EQUAL (r, values_fp[i]))
ff9940b0 4392 return 1;
f3bb6135 4393
ff9940b0 4394 return 0;
f3bb6135 4395}
cce8749e
CH
4396\f
4397/* Predicates for `match_operand' and `match_operator'. */
4398
ff9940b0 4399/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
4400 (SUBREG (MEM)...).
4401
4402 This function exists because at the time it was put in it led to better
4403 code. SUBREG(MEM) always needs a reload in the places where
4404 s_register_operand is used, and this seemed to lead to excessive
4405 reloading. */
ff9940b0 4406int
e32bac5b 4407s_register_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
4408{
4409 if (GET_MODE (op) != mode && mode != VOIDmode)
4410 return 0;
4411
4412 if (GET_CODE (op) == SUBREG)
f3bb6135 4413 op = SUBREG_REG (op);
ff9940b0
RE
4414
4415 /* We don't consider registers whose class is NO_REGS
4416 to be a register operand. */
d5b7b3ae 4417 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
4418 return (GET_CODE (op) == REG
4419 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
4420 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
4421}
4422
b0888988
RE
4423/* A hard register operand (even before reload. */
4424int
e32bac5b 4425arm_hard_register_operand (rtx op, enum machine_mode mode)
b0888988
RE
4426{
4427 if (GET_MODE (op) != mode && mode != VOIDmode)
4428 return 0;
4429
4430 return (GET_CODE (op) == REG
4431 && REGNO (op) < FIRST_PSEUDO_REGISTER);
4432}
4433
9b66ebb1
PB
4434/* An arm register operand. */
4435int
4436arm_general_register_operand (rtx op, enum machine_mode mode)
4437{
4438 if (GET_MODE (op) != mode && mode != VOIDmode)
4439 return 0;
4440
4441 if (GET_CODE (op) == SUBREG)
4442 op = SUBREG_REG (op);
4443
4444 return (GET_CODE (op) == REG
4445 && (REGNO (op) <= LAST_ARM_REGNUM
4446 || REGNO (op) >= FIRST_PSEUDO_REGISTER));
4447}
4448
e2c671ba 4449/* Only accept reg, subreg(reg), const_int. */
e2c671ba 4450int
e32bac5b 4451reg_or_int_operand (rtx op, enum machine_mode mode)
e2c671ba
RE
4452{
4453 if (GET_CODE (op) == CONST_INT)
4454 return 1;
4455
4456 if (GET_MODE (op) != mode && mode != VOIDmode)
4457 return 0;
4458
4459 if (GET_CODE (op) == SUBREG)
4460 op = SUBREG_REG (op);
4461
4462 /* We don't consider registers whose class is NO_REGS
4463 to be a register operand. */
4464 return (GET_CODE (op) == REG
4465 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
4466 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
4467}
4468
ff9940b0 4469/* Return 1 if OP is an item in memory, given that we are in reload. */
ff9940b0 4470int
e32bac5b 4471arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0
RE
4472{
4473 int regno = true_regnum (op);
4474
5895f793 4475 return (!CONSTANT_P (op)
ff9940b0
RE
4476 && (regno == -1
4477 || (GET_CODE (op) == REG
4478 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
4479}
4480
cce8749e 4481/* Return TRUE for valid operands for the rhs of an ARM instruction. */
cce8749e 4482int
e32bac5b 4483arm_rhs_operand (rtx op, enum machine_mode mode)
cce8749e 4484{
ff9940b0 4485 return (s_register_operand (op, mode)
cce8749e 4486 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 4487}
cce8749e 4488
1d6e90ac
NC
4489/* Return TRUE for valid operands for the
4490 rhs of an ARM instruction, or a load. */
ff9940b0 4491int
e32bac5b 4492arm_rhsm_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
4493{
4494 return (s_register_operand (op, mode)
4495 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
4496 || memory_operand (op, mode));
f3bb6135 4497}
ff9940b0
RE
4498
4499/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
4500 constant that is valid when negated. */
ff9940b0 4501int
e32bac5b 4502arm_add_operand (rtx op, enum machine_mode mode)
ff9940b0 4503{
d5b7b3ae
RE
4504 if (TARGET_THUMB)
4505 return thumb_cmp_operand (op, mode);
4506
ff9940b0
RE
4507 return (s_register_operand (op, mode)
4508 || (GET_CODE (op) == CONST_INT
4509 && (const_ok_for_arm (INTVAL (op))
4510 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 4511}
ff9940b0 4512
f9b9980e
RE
4513/* Return TRUE for valid ARM constants (or when valid if negated). */
4514int
91de08c3 4515arm_addimm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
f9b9980e
RE
4516{
4517 return (GET_CODE (op) == CONST_INT
4518 && (const_ok_for_arm (INTVAL (op))
4519 || const_ok_for_arm (-INTVAL (op))));
4520}
4521
ff9940b0 4522int
e32bac5b 4523arm_not_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
4524{
4525 return (s_register_operand (op, mode)
4526 || (GET_CODE (op) == CONST_INT
4527 && (const_ok_for_arm (INTVAL (op))
4528 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 4529}
ff9940b0 4530
5165176d
RE
4531/* Return TRUE if the operand is a memory reference which contains an
4532 offsettable address. */
4533int
e32bac5b 4534offsettable_memory_operand (rtx op, enum machine_mode mode)
5165176d
RE
4535{
4536 if (mode == VOIDmode)
4537 mode = GET_MODE (op);
4538
4539 return (mode == GET_MODE (op)
4540 && GET_CODE (op) == MEM
4541 && offsettable_address_p (reload_completed | reload_in_progress,
4542 mode, XEXP (op, 0)));
4543}
4544
4545/* Return TRUE if the operand is a memory reference which is, or can be
4546 made word aligned by adjusting the offset. */
4547int
e32bac5b 4548alignable_memory_operand (rtx op, enum machine_mode mode)
5165176d
RE
4549{
4550 rtx reg;
4551
4552 if (mode == VOIDmode)
4553 mode = GET_MODE (op);
4554
4555 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
4556 return 0;
4557
4558 op = XEXP (op, 0);
4559
4560 return ((GET_CODE (reg = op) == REG
4561 || (GET_CODE (op) == SUBREG
4562 && GET_CODE (reg = SUBREG_REG (op)) == REG)
4563 || (GET_CODE (op) == PLUS
4564 && GET_CODE (XEXP (op, 1)) == CONST_INT
4565 && (GET_CODE (reg = XEXP (op, 0)) == REG
4566 || (GET_CODE (XEXP (op, 0)) == SUBREG
4567 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 4568 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
4569}
4570
b111229a
RE
4571/* Similar to s_register_operand, but does not allow hard integer
4572 registers. */
4573int
e32bac5b 4574f_register_operand (rtx op, enum machine_mode mode)
b111229a
RE
4575{
4576 if (GET_MODE (op) != mode && mode != VOIDmode)
4577 return 0;
4578
4579 if (GET_CODE (op) == SUBREG)
4580 op = SUBREG_REG (op);
4581
4582 /* We don't consider registers whose class is NO_REGS
4583 to be a register operand. */
4584 return (GET_CODE (op) == REG
4585 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3b684012 4586 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
b111229a
RE
4587}
4588
9b66ebb1
PB
4589/* Return TRUE for valid operands for the rhs of an floating point insns.
4590 Allows regs or certain consts on FPA, just regs for everything else. */
cce8749e 4591int
9b66ebb1 4592arm_float_rhs_operand (rtx op, enum machine_mode mode)
cce8749e 4593{
ff9940b0 4594 if (s_register_operand (op, mode))
f3bb6135 4595 return TRUE;
9ce71c6f
BS
4596
4597 if (GET_MODE (op) != mode && mode != VOIDmode)
4598 return FALSE;
4599
9b66ebb1
PB
4600 if (TARGET_FPA && GET_CODE (op) == CONST_DOUBLE)
4601 return arm_const_double_rtx (op);
f3bb6135
RE
4602
4603 return FALSE;
4604}
cce8749e 4605
ff9940b0 4606int
9b66ebb1 4607arm_float_add_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
4608{
4609 if (s_register_operand (op, mode))
f3bb6135 4610 return TRUE;
9ce71c6f
BS
4611
4612 if (GET_MODE (op) != mode && mode != VOIDmode)
4613 return FALSE;
4614
9b66ebb1
PB
4615 if (TARGET_FPA && GET_CODE (op) == CONST_DOUBLE)
4616 return (arm_const_double_rtx (op)
3b684012 4617 || neg_const_double_rtx_ok_for_fpa (op));
f3bb6135
RE
4618
4619 return FALSE;
ff9940b0
RE
4620}
4621
9b66ebb1
PB
4622
4623/* Return TRUE if OP is suitable for the rhs of a floating point comparison.
4624 Depends which fpu we are targeting. */
4625
4626int
4627arm_float_compare_operand (rtx op, enum machine_mode mode)
4628{
4629 if (TARGET_VFP)
4630 return vfp_compare_operand (op, mode);
4631 else
4632 return arm_float_rhs_operand (op, mode);
4633}
4634
4635
9b6b54e2 4636/* Return nonzero if OP is a valid Cirrus memory address pattern. */
9b6b54e2 4637int
e32bac5b 4638cirrus_memory_offset (rtx op)
9b6b54e2
NC
4639{
4640 /* Reject eliminable registers. */
4641 if (! (reload_in_progress || reload_completed)
4642 && ( reg_mentioned_p (frame_pointer_rtx, op)
4643 || reg_mentioned_p (arg_pointer_rtx, op)
4644 || reg_mentioned_p (virtual_incoming_args_rtx, op)
4645 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
4646 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
4647 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
4648 return 0;
4649
4650 if (GET_CODE (op) == MEM)
4651 {
4652 rtx ind;
4653
4654 ind = XEXP (op, 0);
4655
4656 /* Match: (mem (reg)). */
4657 if (GET_CODE (ind) == REG)
4658 return 1;
4659
4660 /* Match:
4661 (mem (plus (reg)
4662 (const))). */
4663 if (GET_CODE (ind) == PLUS
4664 && GET_CODE (XEXP (ind, 0)) == REG
4665 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
4666 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
4667 return 1;
4668 }
4669
4670 return 0;
4671}
4672
1e1ab407
RE
4673int
4674arm_extendqisi_mem_op (rtx op, enum machine_mode mode)
4675{
4676 if (!memory_operand (op, mode))
4677 return 0;
4678
4679 return arm_legitimate_address_p (mode, XEXP (op, 0), SIGN_EXTEND, 0);
4680}
4681
9b6b54e2 4682/* Return nonzero if OP is a Cirrus or general register. */
9b6b54e2 4683int
e32bac5b 4684cirrus_register_operand (rtx op, enum machine_mode mode)
9b6b54e2
NC
4685{
4686 if (GET_MODE (op) != mode && mode != VOIDmode)
4687 return FALSE;
4688
4689 if (GET_CODE (op) == SUBREG)
4690 op = SUBREG_REG (op);
4691
4692 return (GET_CODE (op) == REG
4693 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
4694 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
4695}
4696
4697/* Return nonzero if OP is a cirrus FP register. */
9b6b54e2 4698int
e32bac5b 4699cirrus_fp_register (rtx op, enum machine_mode mode)
9b6b54e2
NC
4700{
4701 if (GET_MODE (op) != mode && mode != VOIDmode)
4702 return FALSE;
4703
4704 if (GET_CODE (op) == SUBREG)
4705 op = SUBREG_REG (op);
4706
4707 return (GET_CODE (op) == REG
4708 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
4709 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
4710}
4711
4712/* Return nonzero if OP is a 6bit constant (0..63). */
9b6b54e2 4713int
e32bac5b 4714cirrus_shift_const (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9b6b54e2
NC
4715{
4716 return (GET_CODE (op) == CONST_INT
4717 && INTVAL (op) >= 0
4718 && INTVAL (op) < 64);
4719}
4720
9b66ebb1 4721
fdd695fd
PB
4722/* Return TRUE if OP is a valid VFP memory address pattern.
4723 WB if true if writeback address modes are allowed. */
9b66ebb1
PB
4724
4725int
fdd695fd 4726arm_coproc_mem_operand (rtx op, bool wb)
9b66ebb1 4727{
fdd695fd 4728 rtx ind;
9b66ebb1 4729
fdd695fd 4730 /* Reject eliminable registers. */
9b66ebb1
PB
4731 if (! (reload_in_progress || reload_completed)
4732 && ( reg_mentioned_p (frame_pointer_rtx, op)
4733 || reg_mentioned_p (arg_pointer_rtx, op)
4734 || reg_mentioned_p (virtual_incoming_args_rtx, op)
4735 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
4736 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
4737 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
4738 return FALSE;
4739
59b9a953 4740 /* Constants are converted into offsets from labels. */
fdd695fd
PB
4741 if (GET_CODE (op) != MEM)
4742 return FALSE;
9b66ebb1 4743
fdd695fd 4744 ind = XEXP (op, 0);
9b66ebb1 4745
fdd695fd
PB
4746 if (reload_completed
4747 && (GET_CODE (ind) == LABEL_REF
4748 || (GET_CODE (ind) == CONST
4749 && GET_CODE (XEXP (ind, 0)) == PLUS
4750 && GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
4751 && GET_CODE (XEXP (XEXP (ind, 0), 1)) == CONST_INT)))
4752 return TRUE;
9b66ebb1 4753
fdd695fd
PB
4754 /* Match: (mem (reg)). */
4755 if (GET_CODE (ind) == REG)
4756 return arm_address_register_rtx_p (ind, 0);
4757
4758 /* Autoincremment addressing modes. */
4759 if (wb
4760 && (GET_CODE (ind) == PRE_INC
4761 || GET_CODE (ind) == POST_INC
4762 || GET_CODE (ind) == PRE_DEC
4763 || GET_CODE (ind) == POST_DEC))
4764 return arm_address_register_rtx_p (XEXP (ind, 0), 0);
4765
4766 if (wb
4767 && (GET_CODE (ind) == POST_MODIFY || GET_CODE (ind) == PRE_MODIFY)
4768 && arm_address_register_rtx_p (XEXP (ind, 0), 0)
4769 && GET_CODE (XEXP (ind, 1)) == PLUS
4770 && rtx_equal_p (XEXP (XEXP (ind, 1), 0), XEXP (ind, 0)))
4771 ind = XEXP (ind, 1);
4772
4773 /* Match:
4774 (plus (reg)
4775 (const)). */
4776 if (GET_CODE (ind) == PLUS
4777 && GET_CODE (XEXP (ind, 0)) == REG
4778 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
4779 && GET_CODE (XEXP (ind, 1)) == CONST_INT
4780 && INTVAL (XEXP (ind, 1)) > -1024
4781 && INTVAL (XEXP (ind, 1)) < 1024
4782 && (INTVAL (XEXP (ind, 1)) & 3) == 0)
4783 return TRUE;
9b66ebb1
PB
4784
4785 return FALSE;
4786}
4787
4788
4789/* Return TRUE if OP is a REG or constant zero. */
4790int
4791vfp_compare_operand (rtx op, enum machine_mode mode)
4792{
4793 if (s_register_operand (op, mode))
4794 return TRUE;
4795
4796 return (GET_CODE (op) == CONST_DOUBLE
4797 && arm_const_double_rtx (op));
4798}
4799
4800
4801/* Return GENERAL_REGS if a scratch register required to reload x to/from
4802 VFP registers. Otherwise return NO_REGS. */
4803
4804enum reg_class
4805vfp_secondary_reload_class (enum machine_mode mode, rtx x)
4806{
fdd695fd 4807 if (arm_coproc_mem_operand (x, FALSE) || s_register_operand (x, mode))
9b66ebb1
PB
4808 return NO_REGS;
4809
4810 return GENERAL_REGS;
4811}
4812
4813
f0375c66
NC
4814/* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
4815 Use by the Cirrus Maverick code which has to workaround
4816 a hardware bug triggered by such instructions. */
f0375c66 4817static bool
e32bac5b 4818arm_memory_load_p (rtx insn)
9b6b54e2
NC
4819{
4820 rtx body, lhs, rhs;;
4821
f0375c66
NC
4822 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
4823 return false;
9b6b54e2
NC
4824
4825 body = PATTERN (insn);
4826
4827 if (GET_CODE (body) != SET)
f0375c66 4828 return false;
9b6b54e2
NC
4829
4830 lhs = XEXP (body, 0);
4831 rhs = XEXP (body, 1);
4832
f0375c66
NC
4833 lhs = REG_OR_SUBREG_RTX (lhs);
4834
4835 /* If the destination is not a general purpose
4836 register we do not have to worry. */
4837 if (GET_CODE (lhs) != REG
4838 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
4839 return false;
4840
4841 /* As well as loads from memory we also have to react
4842 to loads of invalid constants which will be turned
4843 into loads from the minipool. */
4844 return (GET_CODE (rhs) == MEM
4845 || GET_CODE (rhs) == SYMBOL_REF
4846 || note_invalid_constants (insn, -1, false));
9b6b54e2
NC
4847}
4848
f0375c66 4849/* Return TRUE if INSN is a Cirrus instruction. */
f0375c66 4850static bool
e32bac5b 4851arm_cirrus_insn_p (rtx insn)
9b6b54e2
NC
4852{
4853 enum attr_cirrus attr;
4854
4855 /* get_attr aborts on USE and CLOBBER. */
4856 if (!insn
4857 || GET_CODE (insn) != INSN
4858 || GET_CODE (PATTERN (insn)) == USE
4859 || GET_CODE (PATTERN (insn)) == CLOBBER)
4860 return 0;
4861
4862 attr = get_attr_cirrus (insn);
4863
f0375c66 4864 return attr != CIRRUS_NOT;
9b6b54e2
NC
4865}
4866
4867/* Cirrus reorg for invalid instruction combinations. */
9b6b54e2 4868static void
e32bac5b 4869cirrus_reorg (rtx first)
9b6b54e2
NC
4870{
4871 enum attr_cirrus attr;
4872 rtx body = PATTERN (first);
4873 rtx t;
4874 int nops;
4875
4876 /* Any branch must be followed by 2 non Cirrus instructions. */
4877 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
4878 {
4879 nops = 0;
4880 t = next_nonnote_insn (first);
4881
f0375c66 4882 if (arm_cirrus_insn_p (t))
9b6b54e2
NC
4883 ++ nops;
4884
f0375c66 4885 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
9b6b54e2
NC
4886 ++ nops;
4887
4888 while (nops --)
4889 emit_insn_after (gen_nop (), first);
4890
4891 return;
4892 }
4893
4894 /* (float (blah)) is in parallel with a clobber. */
4895 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4896 body = XVECEXP (body, 0, 0);
4897
4898 if (GET_CODE (body) == SET)
4899 {
4900 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4901
4902 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4903 be followed by a non Cirrus insn. */
4904 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4905 {
f0375c66 4906 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
9b6b54e2
NC
4907 emit_insn_after (gen_nop (), first);
4908
4909 return;
4910 }
f0375c66 4911 else if (arm_memory_load_p (first))
9b6b54e2
NC
4912 {
4913 unsigned int arm_regno;
4914
4915 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4916 ldr/cfmv64hr combination where the Rd field is the same
4917 in both instructions must be split with a non Cirrus
4918 insn. Example:
4919
4920 ldr r0, blah
4921 nop
4922 cfmvsr mvf0, r0. */
4923
4924 /* Get Arm register number for ldr insn. */
4925 if (GET_CODE (lhs) == REG)
4926 arm_regno = REGNO (lhs);
4927 else if (GET_CODE (rhs) == REG)
4928 arm_regno = REGNO (rhs);
4929 else
4930 abort ();
4931
4932 /* Next insn. */
4933 first = next_nonnote_insn (first);
4934
f0375c66 4935 if (! arm_cirrus_insn_p (first))
9b6b54e2
NC
4936 return;
4937
4938 body = PATTERN (first);
4939
4940 /* (float (blah)) is in parallel with a clobber. */
4941 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4942 body = XVECEXP (body, 0, 0);
4943
4944 if (GET_CODE (body) == FLOAT)
4945 body = XEXP (body, 0);
4946
4947 if (get_attr_cirrus (first) == CIRRUS_MOVE
4948 && GET_CODE (XEXP (body, 1)) == REG
4949 && arm_regno == REGNO (XEXP (body, 1)))
4950 emit_insn_after (gen_nop (), first);
4951
4952 return;
4953 }
4954 }
4955
4956 /* get_attr aborts on USE and CLOBBER. */
4957 if (!first
4958 || GET_CODE (first) != INSN
4959 || GET_CODE (PATTERN (first)) == USE
4960 || GET_CODE (PATTERN (first)) == CLOBBER)
4961 return;
4962
4963 attr = get_attr_cirrus (first);
4964
4965 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4966 must be followed by a non-coprocessor instruction. */
4967 if (attr == CIRRUS_COMPARE)
4968 {
4969 nops = 0;
4970
4971 t = next_nonnote_insn (first);
4972
f0375c66 4973 if (arm_cirrus_insn_p (t))
9b6b54e2
NC
4974 ++ nops;
4975
f0375c66 4976 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
9b6b54e2
NC
4977 ++ nops;
4978
4979 while (nops --)
4980 emit_insn_after (gen_nop (), first);
4981
4982 return;
4983 }
4984}
4985
cce8749e 4986/* Return nonzero if OP is a constant power of two. */
cce8749e 4987int
e32bac5b 4988power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cce8749e
CH
4989{
4990 if (GET_CODE (op) == CONST_INT)
4991 {
d5b7b3ae 4992 HOST_WIDE_INT value = INTVAL (op);
1d6e90ac 4993
f3bb6135 4994 return value != 0 && (value & (value - 1)) == 0;
cce8749e 4995 }
1d6e90ac 4996
f3bb6135
RE
4997 return FALSE;
4998}
cce8749e
CH
4999
5000/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 5001 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
5002 Note that this disallows MEM(REG+REG), but allows
5003 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e 5004int
e32bac5b 5005di_operand (rtx op, enum machine_mode mode)
cce8749e 5006{
ff9940b0 5007 if (s_register_operand (op, mode))
f3bb6135 5008 return TRUE;
cce8749e 5009
9ce71c6f
BS
5010 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
5011 return FALSE;
5012
e9c6b69b
NC
5013 if (GET_CODE (op) == SUBREG)
5014 op = SUBREG_REG (op);
5015
cce8749e
CH
5016 switch (GET_CODE (op))
5017 {
5018 case CONST_DOUBLE:
5019 case CONST_INT:
f3bb6135
RE
5020 return TRUE;
5021
cce8749e 5022 case MEM:
f3bb6135
RE
5023 return memory_address_p (DImode, XEXP (op, 0));
5024
cce8749e 5025 default:
f3bb6135 5026 return FALSE;
cce8749e 5027 }
f3bb6135 5028}
cce8749e 5029
d5b7b3ae
RE
5030/* Like di_operand, but don't accept constants. */
5031int
e32bac5b 5032nonimmediate_di_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
5033{
5034 if (s_register_operand (op, mode))
5035 return TRUE;
5036
5037 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
5038 return FALSE;
5039
5040 if (GET_CODE (op) == SUBREG)
5041 op = SUBREG_REG (op);
5042
5043 if (GET_CODE (op) == MEM)
5044 return memory_address_p (DImode, XEXP (op, 0));
5045
5046 return FALSE;
5047}
5048
9b66ebb1 5049/* Return TRUE for a valid operand of a DFmode operation when soft-float.
e9c6b69b 5050 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
5051 Note that this disallows MEM(REG+REG), but allows
5052 MEM(PRE/POST_INC/DEC(REG)). */
f3139301 5053int
e32bac5b 5054soft_df_operand (rtx op, enum machine_mode mode)
f3139301
DE
5055{
5056 if (s_register_operand (op, mode))
4b02997f 5057 return TRUE;
f3139301 5058
9ce71c6f
BS
5059 if (mode != VOIDmode && GET_MODE (op) != mode)
5060 return FALSE;
5061
37b80d2e
BS
5062 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
5063 return FALSE;
5064
e9c6b69b
NC
5065 if (GET_CODE (op) == SUBREG)
5066 op = SUBREG_REG (op);
9ce71c6f 5067
f3139301
DE
5068 switch (GET_CODE (op))
5069 {
5070 case CONST_DOUBLE:
5071 return TRUE;
5072
5073 case MEM:
5074 return memory_address_p (DFmode, XEXP (op, 0));
5075
5076 default:
5077 return FALSE;
5078 }
5079}
5080
d5b7b3ae
RE
5081/* Like soft_df_operand, but don't accept constants. */
5082int
e32bac5b 5083nonimmediate_soft_df_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
5084{
5085 if (s_register_operand (op, mode))
4b02997f 5086 return TRUE;
d5b7b3ae
RE
5087
5088 if (mode != VOIDmode && GET_MODE (op) != mode)
5089 return FALSE;
5090
5091 if (GET_CODE (op) == SUBREG)
5092 op = SUBREG_REG (op);
5093
5094 if (GET_CODE (op) == MEM)
5095 return memory_address_p (DFmode, XEXP (op, 0));
5096 return FALSE;
5097}
cce8749e 5098
d5b7b3ae 5099/* Return TRUE for valid index operands. */
cce8749e 5100int
e32bac5b 5101index_operand (rtx op, enum machine_mode mode)
cce8749e 5102{
d5b7b3ae 5103 return (s_register_operand (op, mode)
ff9940b0 5104 || (immediate_operand (op, mode)
d5b7b3ae
RE
5105 && (GET_CODE (op) != CONST_INT
5106 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 5107}
cce8749e 5108
ff9940b0
RE
5109/* Return TRUE for valid shifts by a constant. This also accepts any
5110 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 5111 shift operator in this case was a mult. */
ff9940b0 5112int
e32bac5b 5113const_shift_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
5114{
5115 return (power_of_two_operand (op, mode)
5116 || (immediate_operand (op, mode)
d5b7b3ae
RE
5117 && (GET_CODE (op) != CONST_INT
5118 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 5119}
ff9940b0 5120
cce8749e
CH
5121/* Return TRUE for arithmetic operators which can be combined with a multiply
5122 (shift). */
cce8749e 5123int
e32bac5b 5124shiftable_operator (rtx x, enum machine_mode mode)
cce8749e 5125{
1d6e90ac
NC
5126 enum rtx_code code;
5127
cce8749e
CH
5128 if (GET_MODE (x) != mode)
5129 return FALSE;
cce8749e 5130
1d6e90ac
NC
5131 code = GET_CODE (x);
5132
5133 return (code == PLUS || code == MINUS
5134 || code == IOR || code == XOR || code == AND);
f3bb6135 5135}
cce8749e 5136
6ab589e0 5137/* Return TRUE for binary logical operators. */
6ab589e0 5138int
e32bac5b 5139logical_binary_operator (rtx x, enum machine_mode mode)
6ab589e0 5140{
1d6e90ac
NC
5141 enum rtx_code code;
5142
6ab589e0
JL
5143 if (GET_MODE (x) != mode)
5144 return FALSE;
6ab589e0 5145
1d6e90ac
NC
5146 code = GET_CODE (x);
5147
5148 return (code == IOR || code == XOR || code == AND);
6ab589e0
JL
5149}
5150
6354dc9b 5151/* Return TRUE for shift operators. */
cce8749e 5152int
e32bac5b 5153shift_operator (rtx x,enum machine_mode mode)
cce8749e 5154{
1d6e90ac
NC
5155 enum rtx_code code;
5156
cce8749e
CH
5157 if (GET_MODE (x) != mode)
5158 return FALSE;
cce8749e 5159
1d6e90ac 5160 code = GET_CODE (x);
f3bb6135 5161
1d6e90ac
NC
5162 if (code == MULT)
5163 return power_of_two_operand (XEXP (x, 1), mode);
5164
5165 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
5166 || code == ROTATERT);
f3bb6135 5167}
ff9940b0 5168
6354dc9b
NC
5169/* Return TRUE if x is EQ or NE. */
5170int
e32bac5b 5171equality_operator (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 5172{
f3bb6135 5173 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
5174}
5175
e45b72c4
RE
5176/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
5177int
e32bac5b 5178arm_comparison_operator (rtx x, enum machine_mode mode)
e45b72c4
RE
5179{
5180 return (comparison_operator (x, mode)
5181 && GET_CODE (x) != LTGT
5182 && GET_CODE (x) != UNEQ);
5183}
5184
6354dc9b 5185/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0 5186int
e32bac5b 5187minmax_operator (rtx x, enum machine_mode mode)
ff9940b0
RE
5188{
5189 enum rtx_code code = GET_CODE (x);
5190
5191 if (GET_MODE (x) != mode)
5192 return FALSE;
f3bb6135 5193
ff9940b0 5194 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 5195}
ff9940b0 5196
ff9940b0 5197/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 5198 a mode, accept any class CCmode register. */
ff9940b0 5199int
e32bac5b 5200cc_register (rtx x, enum machine_mode mode)
ff9940b0
RE
5201{
5202 if (mode == VOIDmode)
5203 {
5204 mode = GET_MODE (x);
d5b7b3ae 5205
ff9940b0
RE
5206 if (GET_MODE_CLASS (mode) != MODE_CC)
5207 return FALSE;
5208 }
f3bb6135 5209
d5b7b3ae
RE
5210 if ( GET_MODE (x) == mode
5211 && GET_CODE (x) == REG
5212 && REGNO (x) == CC_REGNUM)
ff9940b0 5213 return TRUE;
f3bb6135 5214
ff9940b0
RE
5215 return FALSE;
5216}
5bbe2d40
RE
5217
5218/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
5219 a mode, accept any class CCmode register which indicates a dominance
5220 expression. */
5bbe2d40 5221int
e32bac5b 5222dominant_cc_register (rtx x, enum machine_mode mode)
5bbe2d40
RE
5223{
5224 if (mode == VOIDmode)
5225 {
5226 mode = GET_MODE (x);
d5b7b3ae 5227
84ed5e79 5228 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
5229 return FALSE;
5230 }
5231
e32bac5b 5232 if (mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
5233 && mode != CC_DLEmode && mode != CC_DLTmode
5234 && mode != CC_DGEmode && mode != CC_DGTmode
5235 && mode != CC_DLEUmode && mode != CC_DLTUmode
5236 && mode != CC_DGEUmode && mode != CC_DGTUmode)
5237 return FALSE;
5238
d5b7b3ae 5239 return cc_register (x, mode);
5bbe2d40
RE
5240}
5241
2b835d68
RE
5242/* Return TRUE if X references a SYMBOL_REF. */
5243int
e32bac5b 5244symbol_mentioned_p (rtx x)
2b835d68 5245{
1d6e90ac
NC
5246 const char * fmt;
5247 int i;
2b835d68
RE
5248
5249 if (GET_CODE (x) == SYMBOL_REF)
5250 return 1;
5251
5252 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 5253
2b835d68
RE
5254 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5255 {
5256 if (fmt[i] == 'E')
5257 {
1d6e90ac 5258 int j;
2b835d68
RE
5259
5260 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5261 if (symbol_mentioned_p (XVECEXP (x, i, j)))
5262 return 1;
5263 }
5264 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
5265 return 1;
5266 }
5267
5268 return 0;
5269}
5270
5271/* Return TRUE if X references a LABEL_REF. */
5272int
e32bac5b 5273label_mentioned_p (rtx x)
2b835d68 5274{
1d6e90ac
NC
5275 const char * fmt;
5276 int i;
2b835d68
RE
5277
5278 if (GET_CODE (x) == LABEL_REF)
5279 return 1;
5280
5281 fmt = GET_RTX_FORMAT (GET_CODE (x));
5282 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5283 {
5284 if (fmt[i] == 'E')
5285 {
1d6e90ac 5286 int j;
2b835d68
RE
5287
5288 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5289 if (label_mentioned_p (XVECEXP (x, i, j)))
5290 return 1;
5291 }
5292 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
5293 return 1;
5294 }
5295
5296 return 0;
5297}
5298
ff9940b0 5299enum rtx_code
e32bac5b 5300minmax_code (rtx x)
ff9940b0
RE
5301{
5302 enum rtx_code code = GET_CODE (x);
5303
5304 if (code == SMAX)
5305 return GE;
f3bb6135 5306 else if (code == SMIN)
ff9940b0 5307 return LE;
f3bb6135 5308 else if (code == UMIN)
ff9940b0 5309 return LEU;
f3bb6135 5310 else if (code == UMAX)
ff9940b0 5311 return GEU;
f3bb6135 5312
ff9940b0
RE
5313 abort ();
5314}
5315
6354dc9b 5316/* Return 1 if memory locations are adjacent. */
f3bb6135 5317int
e32bac5b 5318adjacent_mem_locations (rtx a, rtx b)
ff9940b0 5319{
ff9940b0
RE
5320 if ((GET_CODE (XEXP (a, 0)) == REG
5321 || (GET_CODE (XEXP (a, 0)) == PLUS
5322 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
5323 && (GET_CODE (XEXP (b, 0)) == REG
5324 || (GET_CODE (XEXP (b, 0)) == PLUS
5325 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
5326 {
1d6e90ac
NC
5327 int val0 = 0, val1 = 0;
5328 int reg0, reg1;
5329
ff9940b0
RE
5330 if (GET_CODE (XEXP (a, 0)) == PLUS)
5331 {
1d6e90ac 5332 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
ff9940b0
RE
5333 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
5334 }
5335 else
5336 reg0 = REGNO (XEXP (a, 0));
1d6e90ac 5337
ff9940b0
RE
5338 if (GET_CODE (XEXP (b, 0)) == PLUS)
5339 {
1d6e90ac 5340 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
ff9940b0
RE
5341 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
5342 }
5343 else
5344 reg1 = REGNO (XEXP (b, 0));
1d6e90ac 5345
e32bac5b
RE
5346 /* Don't accept any offset that will require multiple
5347 instructions to handle, since this would cause the
5348 arith_adjacentmem pattern to output an overlong sequence. */
c75a3ddc
PB
5349 if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
5350 return 0;
5351
ff9940b0
RE
5352 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
5353 }
5354 return 0;
5355}
5356
5357/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 5358 parallel and the first section will be tested. */
f3bb6135 5359int
e32bac5b 5360load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 5361{
f3bb6135 5362 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
5363 int dest_regno;
5364 rtx src_addr;
f3bb6135 5365 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
5366 rtx elt;
5367
5368 if (count <= 1
5369 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
5370 return 0;
5371
6354dc9b 5372 /* Check to see if this might be a write-back. */
ff9940b0
RE
5373 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
5374 {
5375 i++;
5376 base = 1;
5377
6354dc9b 5378 /* Now check it more carefully. */
ff9940b0
RE
5379 if (GET_CODE (SET_DEST (elt)) != REG
5380 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
ff9940b0 5381 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 5382 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 5383 return 0;
ff9940b0
RE
5384 }
5385
5386 /* Perform a quick check so we don't blow up below. */
5387 if (count <= i
5388 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
5389 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
5390 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
5391 return 0;
5392
5393 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
5394 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
5395
5396 for (; i < count; i++)
5397 {
ed4c4348 5398 elt = XVECEXP (op, 0, i);
ff9940b0
RE
5399
5400 if (GET_CODE (elt) != SET
5401 || GET_CODE (SET_DEST (elt)) != REG
5402 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 5403 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
5404 || GET_CODE (SET_SRC (elt)) != MEM
5405 || GET_MODE (SET_SRC (elt)) != SImode
5406 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 5407 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
5408 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5409 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
5410 return 0;
5411 }
5412
5413 return 1;
5414}
5415
5416/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 5417 parallel and the first section will be tested. */
f3bb6135 5418int
e32bac5b 5419store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 5420{
f3bb6135 5421 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
5422 int src_regno;
5423 rtx dest_addr;
f3bb6135 5424 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
5425 rtx elt;
5426
5427 if (count <= 1
5428 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
5429 return 0;
5430
6354dc9b 5431 /* Check to see if this might be a write-back. */
ff9940b0
RE
5432 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
5433 {
5434 i++;
5435 base = 1;
5436
6354dc9b 5437 /* Now check it more carefully. */
ff9940b0
RE
5438 if (GET_CODE (SET_DEST (elt)) != REG
5439 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
ff9940b0 5440 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 5441 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 5442 return 0;
ff9940b0
RE
5443 }
5444
5445 /* Perform a quick check so we don't blow up below. */
5446 if (count <= i
5447 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
5448 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
5449 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
5450 return 0;
5451
5452 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
5453 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
5454
5455 for (; i < count; i++)
5456 {
5457 elt = XVECEXP (op, 0, i);
5458
5459 if (GET_CODE (elt) != SET
5460 || GET_CODE (SET_SRC (elt)) != REG
5461 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 5462 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
5463 || GET_CODE (SET_DEST (elt)) != MEM
5464 || GET_MODE (SET_DEST (elt)) != SImode
5465 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 5466 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
5467 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5468 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
5469 return 0;
5470 }
5471
5472 return 1;
5473}
e2c671ba 5474
84ed5e79 5475int
e32bac5b
RE
5476load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
5477 HOST_WIDE_INT *load_offset)
84ed5e79
RE
5478{
5479 int unsorted_regs[4];
5480 HOST_WIDE_INT unsorted_offsets[4];
5481 int order[4];
ad076f4e 5482 int base_reg = -1;
84ed5e79
RE
5483 int i;
5484
1d6e90ac
NC
5485 /* Can only handle 2, 3, or 4 insns at present,
5486 though could be easily extended if required. */
84ed5e79
RE
5487 if (nops < 2 || nops > 4)
5488 abort ();
5489
5490 /* Loop over the operands and check that the memory references are
5491 suitable (ie immediate offsets from the same base register). At
5492 the same time, extract the target register, and the memory
5493 offsets. */
5494 for (i = 0; i < nops; i++)
5495 {
5496 rtx reg;
5497 rtx offset;
5498
56636818
JL
5499 /* Convert a subreg of a mem into the mem itself. */
5500 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 5501 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 5502
84ed5e79
RE
5503 if (GET_CODE (operands[nops + i]) != MEM)
5504 abort ();
5505
5506 /* Don't reorder volatile memory references; it doesn't seem worth
5507 looking for the case where the order is ok anyway. */
5508 if (MEM_VOLATILE_P (operands[nops + i]))
5509 return 0;
5510
5511 offset = const0_rtx;
5512
5513 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
5514 || (GET_CODE (reg) == SUBREG
5515 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
5516 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
5517 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
5518 == REG)
5519 || (GET_CODE (reg) == SUBREG
5520 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
5521 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
5522 == CONST_INT)))
5523 {
5524 if (i == 0)
5525 {
d5b7b3ae 5526 base_reg = REGNO (reg);
84ed5e79
RE
5527 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
5528 ? REGNO (operands[i])
5529 : REGNO (SUBREG_REG (operands[i])));
5530 order[0] = 0;
5531 }
5532 else
5533 {
6354dc9b 5534 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
5535 /* Not addressed from the same base register. */
5536 return 0;
5537
5538 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
5539 ? REGNO (operands[i])
5540 : REGNO (SUBREG_REG (operands[i])));
5541 if (unsorted_regs[i] < unsorted_regs[order[0]])
5542 order[0] = i;
5543 }
5544
5545 /* If it isn't an integer register, or if it overwrites the
5546 base register but isn't the last insn in the list, then
5547 we can't do this. */
5548 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
5549 || (i != nops - 1 && unsorted_regs[i] == base_reg))
5550 return 0;
5551
5552 unsorted_offsets[i] = INTVAL (offset);
5553 }
5554 else
5555 /* Not a suitable memory address. */
5556 return 0;
5557 }
5558
5559 /* All the useful information has now been extracted from the
5560 operands into unsorted_regs and unsorted_offsets; additionally,
5561 order[0] has been set to the lowest numbered register in the
5562 list. Sort the registers into order, and check that the memory
5563 offsets are ascending and adjacent. */
5564
5565 for (i = 1; i < nops; i++)
5566 {
5567 int j;
5568
5569 order[i] = order[i - 1];
5570 for (j = 0; j < nops; j++)
5571 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
5572 && (order[i] == order[i - 1]
5573 || unsorted_regs[j] < unsorted_regs[order[i]]))
5574 order[i] = j;
5575
5576 /* Have we found a suitable register? if not, one must be used more
5577 than once. */
5578 if (order[i] == order[i - 1])
5579 return 0;
5580
5581 /* Is the memory address adjacent and ascending? */
5582 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
5583 return 0;
5584 }
5585
5586 if (base)
5587 {
5588 *base = base_reg;
5589
5590 for (i = 0; i < nops; i++)
5591 regs[i] = unsorted_regs[order[i]];
5592
5593 *load_offset = unsorted_offsets[order[0]];
5594 }
5595
5596 if (unsorted_offsets[order[0]] == 0)
5597 return 1; /* ldmia */
5598
5599 if (unsorted_offsets[order[0]] == 4)
5600 return 2; /* ldmib */
5601
5602 if (unsorted_offsets[order[nops - 1]] == 0)
5603 return 3; /* ldmda */
5604
5605 if (unsorted_offsets[order[nops - 1]] == -4)
5606 return 4; /* ldmdb */
5607
949d79eb
RE
5608 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
5609 if the offset isn't small enough. The reason 2 ldrs are faster
5610 is because these ARMs are able to do more than one cache access
5611 in a single cycle. The ARM9 and StrongARM have Harvard caches,
5612 whilst the ARM8 has a double bandwidth cache. This means that
5613 these cores can do both an instruction fetch and a data fetch in
5614 a single cycle, so the trick of calculating the address into a
5615 scratch register (one of the result regs) and then doing a load
5616 multiple actually becomes slower (and no smaller in code size).
5617 That is the transformation
6cc8c0b3
NC
5618
5619 ldr rd1, [rbase + offset]
5620 ldr rd2, [rbase + offset + 4]
5621
5622 to
5623
5624 add rd1, rbase, offset
5625 ldmia rd1, {rd1, rd2}
5626
949d79eb
RE
5627 produces worse code -- '3 cycles + any stalls on rd2' instead of
5628 '2 cycles + any stalls on rd2'. On ARMs with only one cache
5629 access per cycle, the first sequence could never complete in less
5630 than 6 cycles, whereas the ldm sequence would only take 5 and
5631 would make better use of sequential accesses if not hitting the
5632 cache.
5633
5634 We cheat here and test 'arm_ld_sched' which we currently know to
5635 only be true for the ARM8, ARM9 and StrongARM. If this ever
5636 changes, then the test below needs to be reworked. */
f5a1b0d2 5637 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
5638 return 0;
5639
84ed5e79
RE
5640 /* Can't do it without setting up the offset, only do this if it takes
5641 no more than one insn. */
5642 return (const_ok_for_arm (unsorted_offsets[order[0]])
5643 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
5644}
5645
cd2b33d0 5646const char *
e32bac5b 5647emit_ldm_seq (rtx *operands, int nops)
84ed5e79
RE
5648{
5649 int regs[4];
5650 int base_reg;
5651 HOST_WIDE_INT offset;
5652 char buf[100];
5653 int i;
5654
5655 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
5656 {
5657 case 1:
5658 strcpy (buf, "ldm%?ia\t");
5659 break;
5660
5661 case 2:
5662 strcpy (buf, "ldm%?ib\t");
5663 break;
5664
5665 case 3:
5666 strcpy (buf, "ldm%?da\t");
5667 break;
5668
5669 case 4:
5670 strcpy (buf, "ldm%?db\t");
5671 break;
5672
5673 case 5:
5674 if (offset >= 0)
5675 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
5676 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
5677 (long) offset);
5678 else
5679 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
5680 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
5681 (long) -offset);
5682 output_asm_insn (buf, operands);
5683 base_reg = regs[0];
5684 strcpy (buf, "ldm%?ia\t");
5685 break;
5686
5687 default:
5688 abort ();
5689 }
5690
5691 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
5692 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
5693
5694 for (i = 1; i < nops; i++)
5695 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5696 reg_names[regs[i]]);
5697
5698 strcat (buf, "}\t%@ phole ldm");
5699
5700 output_asm_insn (buf, operands);
5701 return "";
5702}
5703
5704int
e32bac5b
RE
5705store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
5706 HOST_WIDE_INT * load_offset)
84ed5e79
RE
5707{
5708 int unsorted_regs[4];
5709 HOST_WIDE_INT unsorted_offsets[4];
5710 int order[4];
ad076f4e 5711 int base_reg = -1;
84ed5e79
RE
5712 int i;
5713
5714 /* Can only handle 2, 3, or 4 insns at present, though could be easily
5715 extended if required. */
5716 if (nops < 2 || nops > 4)
5717 abort ();
5718
5719 /* Loop over the operands and check that the memory references are
5720 suitable (ie immediate offsets from the same base register). At
5721 the same time, extract the target register, and the memory
5722 offsets. */
5723 for (i = 0; i < nops; i++)
5724 {
5725 rtx reg;
5726 rtx offset;
5727
56636818
JL
5728 /* Convert a subreg of a mem into the mem itself. */
5729 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 5730 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 5731
84ed5e79
RE
5732 if (GET_CODE (operands[nops + i]) != MEM)
5733 abort ();
5734
5735 /* Don't reorder volatile memory references; it doesn't seem worth
5736 looking for the case where the order is ok anyway. */
5737 if (MEM_VOLATILE_P (operands[nops + i]))
5738 return 0;
5739
5740 offset = const0_rtx;
5741
5742 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
5743 || (GET_CODE (reg) == SUBREG
5744 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
5745 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
5746 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
5747 == REG)
5748 || (GET_CODE (reg) == SUBREG
5749 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
5750 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
5751 == CONST_INT)))
5752 {
5753 if (i == 0)
5754 {
62b10bbc 5755 base_reg = REGNO (reg);
84ed5e79
RE
5756 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
5757 ? REGNO (operands[i])
5758 : REGNO (SUBREG_REG (operands[i])));
5759 order[0] = 0;
5760 }
5761 else
5762 {
6354dc9b 5763 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
5764 /* Not addressed from the same base register. */
5765 return 0;
5766
5767 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
5768 ? REGNO (operands[i])
5769 : REGNO (SUBREG_REG (operands[i])));
5770 if (unsorted_regs[i] < unsorted_regs[order[0]])
5771 order[0] = i;
5772 }
5773
5774 /* If it isn't an integer register, then we can't do this. */
5775 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
5776 return 0;
5777
5778 unsorted_offsets[i] = INTVAL (offset);
5779 }
5780 else
5781 /* Not a suitable memory address. */
5782 return 0;
5783 }
5784
5785 /* All the useful information has now been extracted from the
5786 operands into unsorted_regs and unsorted_offsets; additionally,
5787 order[0] has been set to the lowest numbered register in the
5788 list. Sort the registers into order, and check that the memory
5789 offsets are ascending and adjacent. */
5790
5791 for (i = 1; i < nops; i++)
5792 {
5793 int j;
5794
5795 order[i] = order[i - 1];
5796 for (j = 0; j < nops; j++)
5797 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
5798 && (order[i] == order[i - 1]
5799 || unsorted_regs[j] < unsorted_regs[order[i]]))
5800 order[i] = j;
5801
5802 /* Have we found a suitable register? if not, one must be used more
5803 than once. */
5804 if (order[i] == order[i - 1])
5805 return 0;
5806
5807 /* Is the memory address adjacent and ascending? */
5808 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
5809 return 0;
5810 }
5811
5812 if (base)
5813 {
5814 *base = base_reg;
5815
5816 for (i = 0; i < nops; i++)
5817 regs[i] = unsorted_regs[order[i]];
5818
5819 *load_offset = unsorted_offsets[order[0]];
5820 }
5821
5822 if (unsorted_offsets[order[0]] == 0)
5823 return 1; /* stmia */
5824
5825 if (unsorted_offsets[order[0]] == 4)
5826 return 2; /* stmib */
5827
5828 if (unsorted_offsets[order[nops - 1]] == 0)
5829 return 3; /* stmda */
5830
5831 if (unsorted_offsets[order[nops - 1]] == -4)
5832 return 4; /* stmdb */
5833
5834 return 0;
5835}
5836
cd2b33d0 5837const char *
e32bac5b 5838emit_stm_seq (rtx *operands, int nops)
84ed5e79
RE
5839{
5840 int regs[4];
5841 int base_reg;
5842 HOST_WIDE_INT offset;
5843 char buf[100];
5844 int i;
5845
5846 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
5847 {
5848 case 1:
5849 strcpy (buf, "stm%?ia\t");
5850 break;
5851
5852 case 2:
5853 strcpy (buf, "stm%?ib\t");
5854 break;
5855
5856 case 3:
5857 strcpy (buf, "stm%?da\t");
5858 break;
5859
5860 case 4:
5861 strcpy (buf, "stm%?db\t");
5862 break;
5863
5864 default:
5865 abort ();
5866 }
5867
5868 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
5869 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
5870
5871 for (i = 1; i < nops; i++)
5872 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5873 reg_names[regs[i]]);
5874
5875 strcat (buf, "}\t%@ phole stm");
5876
5877 output_asm_insn (buf, operands);
5878 return "";
5879}
5880
e2c671ba 5881int
e32bac5b 5882multi_register_push (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
e2c671ba
RE
5883{
5884 if (GET_CODE (op) != PARALLEL
5885 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5886 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 5887 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
5888 return 0;
5889
5890 return 1;
5891}
ff9940b0 5892\f
6354dc9b 5893/* Routines for use in generating RTL. */
1d6e90ac 5894
f3bb6135 5895rtx
e32bac5b
RE
5896arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
5897 int write_back, int unchanging_p, int in_struct_p,
5898 int scalar_p)
ff9940b0
RE
5899{
5900 int i = 0, j;
5901 rtx result;
5902 int sign = up ? 1 : -1;
56636818 5903 rtx mem;
ff9940b0 5904
d19fb8e3 5905 /* XScale has load-store double instructions, but they have stricter
1e5f1716 5906 alignment requirements than load-store multiple, so we cannot
d19fb8e3
NC
5907 use them.
5908
5909 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5910 the pipeline until completion.
5911
5912 NREGS CYCLES
5913 1 3
5914 2 4
5915 3 5
5916 4 6
5917
5918 An ldr instruction takes 1-3 cycles, but does not block the
5919 pipeline.
5920
5921 NREGS CYCLES
5922 1 1-3
5923 2 2-6
5924 3 3-9
5925 4 4-12
5926
5927 Best case ldr will always win. However, the more ldr instructions
5928 we issue, the less likely we are to be able to schedule them well.
5929 Using ldr instructions also increases code size.
5930
5931 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5932 for counts of 3 or 4 regs. */
4b3c2e48 5933 if (arm_tune_xscale && count <= 2 && ! optimize_size)
d19fb8e3
NC
5934 {
5935 rtx seq;
5936
5937 start_sequence ();
5938
5939 for (i = 0; i < count; i++)
5940 {
5941 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
389fdba0 5942 MEM_READONLY_P (mem) = unchanging_p;
d19fb8e3
NC
5943 MEM_IN_STRUCT_P (mem) = in_struct_p;
5944 MEM_SCALAR_P (mem) = scalar_p;
5945 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5946 }
5947
5948 if (write_back)
5949 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5950
2f937369 5951 seq = get_insns ();
d19fb8e3
NC
5952 end_sequence ();
5953
5954 return seq;
5955 }
5956
43cffd11 5957 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 5958 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 5959 if (write_back)
f3bb6135 5960 {
ff9940b0 5961 XVECEXP (result, 0, 0)
43cffd11
RE
5962 = gen_rtx_SET (GET_MODE (from), from,
5963 plus_constant (from, count * 4 * sign));
ff9940b0
RE
5964 i = 1;
5965 count++;
f3bb6135
RE
5966 }
5967
ff9940b0 5968 for (j = 0; i < count; i++, j++)
f3bb6135 5969 {
43cffd11 5970 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
389fdba0 5971 MEM_READONLY_P (mem) = unchanging_p;
56636818 5972 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 5973 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
5974 XVECEXP (result, 0, i)
5975 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
5976 }
5977
ff9940b0
RE
5978 return result;
5979}
5980
f3bb6135 5981rtx
e32bac5b
RE
5982arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
5983 int write_back, int unchanging_p, int in_struct_p,
5984 int scalar_p)
ff9940b0
RE
5985{
5986 int i = 0, j;
5987 rtx result;
5988 int sign = up ? 1 : -1;
56636818 5989 rtx mem;
ff9940b0 5990
d19fb8e3
NC
5991 /* See arm_gen_load_multiple for discussion of
5992 the pros/cons of ldm/stm usage for XScale. */
4b3c2e48 5993 if (arm_tune_xscale && count <= 2 && ! optimize_size)
d19fb8e3
NC
5994 {
5995 rtx seq;
5996
5997 start_sequence ();
5998
5999 for (i = 0; i < count; i++)
6000 {
6001 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
389fdba0 6002 MEM_READONLY_P (mem) = unchanging_p;
d19fb8e3
NC
6003 MEM_IN_STRUCT_P (mem) = in_struct_p;
6004 MEM_SCALAR_P (mem) = scalar_p;
6005 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
6006 }
6007
6008 if (write_back)
6009 emit_move_insn (to, plus_constant (to, count * 4 * sign));
6010
2f937369 6011 seq = get_insns ();
d19fb8e3
NC
6012 end_sequence ();
6013
6014 return seq;
6015 }
6016
43cffd11 6017 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 6018 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 6019 if (write_back)
f3bb6135 6020 {
ff9940b0 6021 XVECEXP (result, 0, 0)
43cffd11
RE
6022 = gen_rtx_SET (GET_MODE (to), to,
6023 plus_constant (to, count * 4 * sign));
ff9940b0
RE
6024 i = 1;
6025 count++;
f3bb6135
RE
6026 }
6027
ff9940b0 6028 for (j = 0; i < count; i++, j++)
f3bb6135 6029 {
43cffd11 6030 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
389fdba0 6031 MEM_READONLY_P (mem) = unchanging_p;
56636818 6032 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 6033 MEM_SCALAR_P (mem) = scalar_p;
56636818 6034
43cffd11
RE
6035 XVECEXP (result, 0, i)
6036 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
6037 }
6038
ff9940b0
RE
6039 return result;
6040}
6041
880e2516 6042int
70128ad9 6043arm_gen_movmemqi (rtx *operands)
880e2516
RE
6044{
6045 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 6046 int i;
880e2516 6047 rtx src, dst;
ad076f4e 6048 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 6049 rtx part_bytes_reg = NULL;
56636818
JL
6050 rtx mem;
6051 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 6052 int dst_scalar_p, src_scalar_p;
880e2516
RE
6053
6054 if (GET_CODE (operands[2]) != CONST_INT
6055 || GET_CODE (operands[3]) != CONST_INT
6056 || INTVAL (operands[2]) > 64
6057 || INTVAL (operands[3]) & 3)
6058 return 0;
6059
6060 st_dst = XEXP (operands[0], 0);
6061 st_src = XEXP (operands[1], 0);
56636818 6062
389fdba0 6063 dst_unchanging_p = MEM_READONLY_P (operands[0]);
56636818 6064 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 6065 dst_scalar_p = MEM_SCALAR_P (operands[0]);
389fdba0 6066 src_unchanging_p = MEM_READONLY_P (operands[1]);
56636818 6067 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 6068 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 6069
880e2516
RE
6070 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
6071 fin_src = src = copy_to_mode_reg (SImode, st_src);
6072
e9d7b180 6073 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
880e2516
RE
6074 out_words_to_go = INTVAL (operands[2]) / 4;
6075 last_bytes = INTVAL (operands[2]) & 3;
6076
6077 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 6078 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
6079
6080 for (i = 0; in_words_to_go >= 2; i+=4)
6081 {
bd9c7e23 6082 if (in_words_to_go > 4)
56636818 6083 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
6084 src_unchanging_p,
6085 src_in_struct_p,
6086 src_scalar_p));
bd9c7e23
RE
6087 else
6088 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 6089 FALSE, src_unchanging_p,
c6df88cb 6090 src_in_struct_p, src_scalar_p));
bd9c7e23 6091
880e2516
RE
6092 if (out_words_to_go)
6093 {
bd9c7e23 6094 if (out_words_to_go > 4)
56636818
JL
6095 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
6096 dst_unchanging_p,
c6df88cb
MM
6097 dst_in_struct_p,
6098 dst_scalar_p));
bd9c7e23
RE
6099 else if (out_words_to_go != 1)
6100 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
6101 dst, TRUE,
6102 (last_bytes == 0
56636818
JL
6103 ? FALSE : TRUE),
6104 dst_unchanging_p,
c6df88cb
MM
6105 dst_in_struct_p,
6106 dst_scalar_p));
880e2516
RE
6107 else
6108 {
43cffd11 6109 mem = gen_rtx_MEM (SImode, dst);
389fdba0 6110 MEM_READONLY_P (mem) = dst_unchanging_p;
56636818 6111 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 6112 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 6113 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
6114 if (last_bytes != 0)
6115 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
6116 }
6117 }
6118
6119 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
6120 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
6121 }
6122
6123 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
6124 if (out_words_to_go)
62b10bbc
NC
6125 {
6126 rtx sreg;
6127
6128 mem = gen_rtx_MEM (SImode, src);
389fdba0 6129 MEM_READONLY_P (mem) = src_unchanging_p;
62b10bbc
NC
6130 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
6131 MEM_SCALAR_P (mem) = src_scalar_p;
6132 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
6133 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
6134
6135 mem = gen_rtx_MEM (SImode, dst);
389fdba0 6136 MEM_READONLY_P (mem) = dst_unchanging_p;
62b10bbc
NC
6137 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
6138 MEM_SCALAR_P (mem) = dst_scalar_p;
6139 emit_move_insn (mem, sreg);
6140 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
6141 in_words_to_go--;
6142
6143 if (in_words_to_go) /* Sanity check */
6144 abort ();
6145 }
880e2516
RE
6146
6147 if (in_words_to_go)
6148 {
6149 if (in_words_to_go < 0)
6150 abort ();
6151
43cffd11 6152 mem = gen_rtx_MEM (SImode, src);
389fdba0 6153 MEM_READONLY_P (mem) = src_unchanging_p;
56636818 6154 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 6155 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 6156 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
6157 }
6158
d5b7b3ae
RE
6159 if (last_bytes && part_bytes_reg == NULL)
6160 abort ();
6161
880e2516
RE
6162 if (BYTES_BIG_ENDIAN && last_bytes)
6163 {
6164 rtx tmp = gen_reg_rtx (SImode);
6165
6354dc9b 6166 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
6167 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
6168 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
6169 part_bytes_reg = tmp;
6170
6171 while (last_bytes)
6172 {
43cffd11 6173 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
389fdba0 6174 MEM_READONLY_P (mem) = dst_unchanging_p;
56636818 6175 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 6176 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2
BS
6177 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
6178
880e2516
RE
6179 if (--last_bytes)
6180 {
6181 tmp = gen_reg_rtx (SImode);
6182 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
6183 part_bytes_reg = tmp;
6184 }
6185 }
6186
6187 }
6188 else
6189 {
d5b7b3ae 6190 if (last_bytes > 1)
880e2516 6191 {
d5b7b3ae 6192 mem = gen_rtx_MEM (HImode, dst);
389fdba0 6193 MEM_READONLY_P (mem) = dst_unchanging_p;
56636818 6194 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 6195 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 6196 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
d5b7b3ae
RE
6197 last_bytes -= 2;
6198 if (last_bytes)
880e2516
RE
6199 {
6200 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 6201
a556fd39 6202 emit_insn (gen_addsi3 (dst, dst, const2_rtx));
d5b7b3ae 6203 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
6204 part_bytes_reg = tmp;
6205 }
6206 }
d5b7b3ae
RE
6207
6208 if (last_bytes)
6209 {
6210 mem = gen_rtx_MEM (QImode, dst);
389fdba0 6211 MEM_READONLY_P (mem) = dst_unchanging_p;
d5b7b3ae
RE
6212 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
6213 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 6214 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
d5b7b3ae 6215 }
880e2516
RE
6216 }
6217
6218 return 1;
6219}
6220
5165176d
RE
6221/* Generate a memory reference for a half word, such that it will be loaded
6222 into the top 16 bits of the word. We can assume that the address is
6223 known to be alignable and of the form reg, or plus (reg, const). */
1d6e90ac 6224
5165176d 6225rtx
e32bac5b 6226arm_gen_rotated_half_load (rtx memref)
5165176d
RE
6227{
6228 HOST_WIDE_INT offset = 0;
6229 rtx base = XEXP (memref, 0);
6230
6231 if (GET_CODE (base) == PLUS)
6232 {
6233 offset = INTVAL (XEXP (base, 1));
6234 base = XEXP (base, 0);
6235 }
6236
956d6950 6237 /* If we aren't allowed to generate unaligned addresses, then fail. */
61f0ccff 6238 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0))
5165176d
RE
6239 return NULL;
6240
43cffd11 6241 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
6242
6243 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
6244 return base;
6245
43cffd11 6246 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
6247}
6248
03f1640c
RE
6249/* Select a dominance comparison mode if possible for a test of the general
6250 form (OP (COND_OR (X) (Y)) (const_int 0)). We support three forms.
6251 COND_OR == DOM_CC_X_AND_Y => (X && Y)
6252 COND_OR == DOM_CC_NX_OR_Y => ((! X) || Y)
6253 COND_OR == DOM_CC_X_OR_Y => (X || Y)
6254 In all cases OP will be either EQ or NE, but we don't need to know which
6255 here. If we are unable to support a dominance comparison we return
6256 CC mode. This will then fail to match for the RTL expressions that
6257 generate this call. */
03f1640c 6258enum machine_mode
e32bac5b 6259arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
84ed5e79
RE
6260{
6261 enum rtx_code cond1, cond2;
6262 int swapped = 0;
6263
6264 /* Currently we will probably get the wrong result if the individual
6265 comparisons are not simple. This also ensures that it is safe to
956d6950 6266 reverse a comparison if necessary. */
84ed5e79
RE
6267 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
6268 != CCmode)
6269 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
6270 != CCmode))
6271 return CCmode;
6272
1646cf41
RE
6273 /* The if_then_else variant of this tests the second condition if the
6274 first passes, but is true if the first fails. Reverse the first
6275 condition to get a true "inclusive-or" expression. */
03f1640c 6276 if (cond_or == DOM_CC_NX_OR_Y)
84ed5e79
RE
6277 cond1 = reverse_condition (cond1);
6278
6279 /* If the comparisons are not equal, and one doesn't dominate the other,
6280 then we can't do this. */
6281 if (cond1 != cond2
5895f793
RE
6282 && !comparison_dominates_p (cond1, cond2)
6283 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
6284 return CCmode;
6285
6286 if (swapped)
6287 {
6288 enum rtx_code temp = cond1;
6289 cond1 = cond2;
6290 cond2 = temp;
6291 }
6292
6293 switch (cond1)
6294 {
6295 case EQ:
03f1640c 6296 if (cond2 == EQ || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
6297 return CC_DEQmode;
6298
6299 switch (cond2)
6300 {
6301 case LE: return CC_DLEmode;
6302 case LEU: return CC_DLEUmode;
6303 case GE: return CC_DGEmode;
6304 case GEU: return CC_DGEUmode;
ad076f4e 6305 default: break;
84ed5e79
RE
6306 }
6307
6308 break;
6309
6310 case LT:
03f1640c 6311 if (cond2 == LT || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
6312 return CC_DLTmode;
6313 if (cond2 == LE)
6314 return CC_DLEmode;
6315 if (cond2 == NE)
6316 return CC_DNEmode;
6317 break;
6318
6319 case GT:
03f1640c 6320 if (cond2 == GT || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
6321 return CC_DGTmode;
6322 if (cond2 == GE)
6323 return CC_DGEmode;
6324 if (cond2 == NE)
6325 return CC_DNEmode;
6326 break;
6327
6328 case LTU:
03f1640c 6329 if (cond2 == LTU || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
6330 return CC_DLTUmode;
6331 if (cond2 == LEU)
6332 return CC_DLEUmode;
6333 if (cond2 == NE)
6334 return CC_DNEmode;
6335 break;
6336
6337 case GTU:
03f1640c 6338 if (cond2 == GTU || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
6339 return CC_DGTUmode;
6340 if (cond2 == GEU)
6341 return CC_DGEUmode;
6342 if (cond2 == NE)
6343 return CC_DNEmode;
6344 break;
6345
6346 /* The remaining cases only occur when both comparisons are the
6347 same. */
6348 case NE:
6349 return CC_DNEmode;
6350
6351 case LE:
6352 return CC_DLEmode;
6353
6354 case GE:
6355 return CC_DGEmode;
6356
6357 case LEU:
6358 return CC_DLEUmode;
6359
6360 case GEU:
6361 return CC_DGEUmode;
ad076f4e
RE
6362
6363 default:
6364 break;
84ed5e79
RE
6365 }
6366
6367 abort ();
6368}
6369
6370enum machine_mode
e32bac5b 6371arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
84ed5e79
RE
6372{
6373 /* All floating point compares return CCFP if it is an equality
6374 comparison, and CCFPE otherwise. */
6375 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
6376 {
6377 switch (op)
6378 {
6379 case EQ:
6380 case NE:
6381 case UNORDERED:
6382 case ORDERED:
6383 case UNLT:
6384 case UNLE:
6385 case UNGT:
6386 case UNGE:
6387 case UNEQ:
6388 case LTGT:
6389 return CCFPmode;
6390
6391 case LT:
6392 case LE:
6393 case GT:
6394 case GE:
9b66ebb1 6395 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
9b6b54e2 6396 return CCFPmode;
e45b72c4
RE
6397 return CCFPEmode;
6398
6399 default:
6400 abort ();
6401 }
6402 }
84ed5e79
RE
6403
6404 /* A compare with a shifted operand. Because of canonicalization, the
6405 comparison will have to be swapped when we emit the assembler. */
6406 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
6407 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
6408 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
6409 || GET_CODE (x) == ROTATERT))
6410 return CC_SWPmode;
6411
956d6950
JL
6412 /* This is a special case that is used by combine to allow a
6413 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 6414 followed by a comparison of the shifted integer (only valid for
956d6950 6415 equalities and unsigned inequalities). */
84ed5e79
RE
6416 if (GET_MODE (x) == SImode
6417 && GET_CODE (x) == ASHIFT
6418 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
6419 && GET_CODE (XEXP (x, 0)) == SUBREG
6420 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
6421 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
6422 && (op == EQ || op == NE
6423 || op == GEU || op == GTU || op == LTU || op == LEU)
6424 && GET_CODE (y) == CONST_INT)
6425 return CC_Zmode;
6426
1646cf41
RE
6427 /* A construct for a conditional compare, if the false arm contains
6428 0, then both conditions must be true, otherwise either condition
6429 must be true. Not all conditions are possible, so CCmode is
6430 returned if it can't be done. */
6431 if (GET_CODE (x) == IF_THEN_ELSE
6432 && (XEXP (x, 2) == const0_rtx
6433 || XEXP (x, 2) == const1_rtx)
ec8e098d
PB
6434 && COMPARISON_P (XEXP (x, 0))
6435 && COMPARISON_P (XEXP (x, 1)))
03f1640c
RE
6436 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
6437 INTVAL (XEXP (x, 2)));
1646cf41
RE
6438
6439 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
6440 if (GET_CODE (x) == AND
ec8e098d
PB
6441 && COMPARISON_P (XEXP (x, 0))
6442 && COMPARISON_P (XEXP (x, 1)))
03f1640c
RE
6443 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
6444 DOM_CC_X_AND_Y);
1646cf41
RE
6445
6446 if (GET_CODE (x) == IOR
ec8e098d
PB
6447 && COMPARISON_P (XEXP (x, 0))
6448 && COMPARISON_P (XEXP (x, 1)))
03f1640c
RE
6449 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
6450 DOM_CC_X_OR_Y);
1646cf41 6451
defc0463
RE
6452 /* An operation (on Thumb) where we want to test for a single bit.
6453 This is done by shifting that bit up into the top bit of a
6454 scratch register; we can then branch on the sign bit. */
6455 if (TARGET_THUMB
6456 && GET_MODE (x) == SImode
6457 && (op == EQ || op == NE)
6458 && (GET_CODE (x) == ZERO_EXTRACT))
6459 return CC_Nmode;
6460
84ed5e79
RE
6461 /* An operation that sets the condition codes as a side-effect, the
6462 V flag is not set correctly, so we can only use comparisons where
6463 this doesn't matter. (For LT and GE we can use "mi" and "pl"
defc0463 6464 instead.) */
84ed5e79
RE
6465 if (GET_MODE (x) == SImode
6466 && y == const0_rtx
6467 && (op == EQ || op == NE || op == LT || op == GE)
6468 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
6469 || GET_CODE (x) == AND || GET_CODE (x) == IOR
6470 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
6471 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
6472 || GET_CODE (x) == LSHIFTRT
6473 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
defc0463
RE
6474 || GET_CODE (x) == ROTATERT
6475 || (TARGET_ARM && GET_CODE (x) == ZERO_EXTRACT)))
84ed5e79
RE
6476 return CC_NOOVmode;
6477
84ed5e79
RE
6478 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
6479 return CC_Zmode;
6480
bd9c7e23
RE
6481 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
6482 && GET_CODE (x) == PLUS
6483 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
6484 return CC_Cmode;
6485
84ed5e79
RE
6486 return CCmode;
6487}
6488
ff9940b0
RE
6489/* X and Y are two things to compare using CODE. Emit the compare insn and
6490 return the rtx for register 0 in the proper mode. FP means this is a
6491 floating point compare: I don't think that it is needed on the arm. */
ff9940b0 6492rtx
e32bac5b 6493arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
ff9940b0
RE
6494{
6495 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 6496 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 6497
43cffd11
RE
6498 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
6499 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
6500
6501 return cc_reg;
6502}
6503
fcd53748
JT
6504/* Generate a sequence of insns that will generate the correct return
6505 address mask depending on the physical architecture that the program
6506 is running on. */
fcd53748 6507rtx
e32bac5b 6508arm_gen_return_addr_mask (void)
fcd53748
JT
6509{
6510 rtx reg = gen_reg_rtx (Pmode);
6511
6512 emit_insn (gen_return_addr_mask (reg));
6513 return reg;
6514}
6515
0a81f500 6516void
e32bac5b 6517arm_reload_in_hi (rtx *operands)
0a81f500 6518{
f9cc092a
RE
6519 rtx ref = operands[1];
6520 rtx base, scratch;
6521 HOST_WIDE_INT offset = 0;
6522
6523 if (GET_CODE (ref) == SUBREG)
6524 {
ddef6bc7 6525 offset = SUBREG_BYTE (ref);
f9cc092a
RE
6526 ref = SUBREG_REG (ref);
6527 }
6528
6529 if (GET_CODE (ref) == REG)
6530 {
6531 /* We have a pseudo which has been spilt onto the stack; there
6532 are two cases here: the first where there is a simple
6533 stack-slot replacement and a second where the stack-slot is
6534 out of range, or is used as a subreg. */
6535 if (reg_equiv_mem[REGNO (ref)])
6536 {
6537 ref = reg_equiv_mem[REGNO (ref)];
6538 base = find_replacement (&XEXP (ref, 0));
6539 }
6540 else
6354dc9b 6541 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
6542 base = reg_equiv_address[REGNO (ref)];
6543 }
6544 else
6545 base = find_replacement (&XEXP (ref, 0));
0a81f500 6546
e5e809f4
JL
6547 /* Handle the case where the address is too complex to be offset by 1. */
6548 if (GET_CODE (base) == MINUS
6549 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
6550 {
f9cc092a 6551 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 6552
43cffd11 6553 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
6554 base = base_plus;
6555 }
f9cc092a
RE
6556 else if (GET_CODE (base) == PLUS)
6557 {
6354dc9b 6558 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
6559 HOST_WIDE_INT hi, lo;
6560
6561 offset += INTVAL (XEXP (base, 1));
6562 base = XEXP (base, 0);
6563
6354dc9b 6564 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
6565 /* Valid range for lo is -4095 -> 4095 */
6566 lo = (offset >= 0
6567 ? (offset & 0xfff)
6568 : -((-offset) & 0xfff));
6569
6570 /* Corner case, if lo is the max offset then we would be out of range
6571 once we have added the additional 1 below, so bump the msb into the
6572 pre-loading insn(s). */
6573 if (lo == 4095)
6574 lo &= 0x7ff;
6575
30cf4896
KG
6576 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
6577 ^ (HOST_WIDE_INT) 0x80000000)
6578 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
6579
6580 if (hi + lo != offset)
6581 abort ();
6582
6583 if (hi != 0)
6584 {
6585 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6586
6587 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 6588 that require more than one insn. */
f9cc092a
RE
6589 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
6590 base = base_plus;
6591 offset = lo;
6592 }
6593 }
e5e809f4 6594
3a1944a6
RE
6595 /* Operands[2] may overlap operands[0] (though it won't overlap
6596 operands[1]), that's why we asked for a DImode reg -- so we can
6597 use the bit that does not overlap. */
6598 if (REGNO (operands[2]) == REGNO (operands[0]))
6599 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6600 else
6601 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
6602
f9cc092a
RE
6603 emit_insn (gen_zero_extendqisi2 (scratch,
6604 gen_rtx_MEM (QImode,
6605 plus_constant (base,
6606 offset))));
43cffd11
RE
6607 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
6608 gen_rtx_MEM (QImode,
f9cc092a
RE
6609 plus_constant (base,
6610 offset + 1))));
5895f793 6611 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
6612 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
6613 gen_rtx_IOR (SImode,
6614 gen_rtx_ASHIFT
6615 (SImode,
6616 gen_rtx_SUBREG (SImode, operands[0], 0),
6617 GEN_INT (8)),
f9cc092a 6618 scratch)));
0a81f500 6619 else
43cffd11
RE
6620 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
6621 gen_rtx_IOR (SImode,
f9cc092a 6622 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
6623 GEN_INT (8)),
6624 gen_rtx_SUBREG (SImode, operands[0],
6625 0))));
0a81f500
RE
6626}
6627
72ac76be 6628/* Handle storing a half-word to memory during reload by synthesizing as two
f9cc092a
RE
6629 byte stores. Take care not to clobber the input values until after we
6630 have moved them somewhere safe. This code assumes that if the DImode
6631 scratch in operands[2] overlaps either the input value or output address
6632 in some way, then that value must die in this insn (we absolutely need
6633 two scratch registers for some corner cases). */
f3bb6135 6634void
e32bac5b 6635arm_reload_out_hi (rtx *operands)
af48348a 6636{
f9cc092a
RE
6637 rtx ref = operands[0];
6638 rtx outval = operands[1];
6639 rtx base, scratch;
6640 HOST_WIDE_INT offset = 0;
6641
6642 if (GET_CODE (ref) == SUBREG)
6643 {
ddef6bc7 6644 offset = SUBREG_BYTE (ref);
f9cc092a
RE
6645 ref = SUBREG_REG (ref);
6646 }
6647
f9cc092a
RE
6648 if (GET_CODE (ref) == REG)
6649 {
6650 /* We have a pseudo which has been spilt onto the stack; there
6651 are two cases here: the first where there is a simple
6652 stack-slot replacement and a second where the stack-slot is
6653 out of range, or is used as a subreg. */
6654 if (reg_equiv_mem[REGNO (ref)])
6655 {
6656 ref = reg_equiv_mem[REGNO (ref)];
6657 base = find_replacement (&XEXP (ref, 0));
6658 }
6659 else
6354dc9b 6660 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
6661 base = reg_equiv_address[REGNO (ref)];
6662 }
6663 else
6664 base = find_replacement (&XEXP (ref, 0));
6665
6666 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
6667
6668 /* Handle the case where the address is too complex to be offset by 1. */
6669 if (GET_CODE (base) == MINUS
6670 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
6671 {
6672 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6673
6674 /* Be careful not to destroy OUTVAL. */
6675 if (reg_overlap_mentioned_p (base_plus, outval))
6676 {
6677 /* Updating base_plus might destroy outval, see if we can
6678 swap the scratch and base_plus. */
5895f793 6679 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
6680 {
6681 rtx tmp = scratch;
6682 scratch = base_plus;
6683 base_plus = tmp;
6684 }
6685 else
6686 {
6687 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
6688
6689 /* Be conservative and copy OUTVAL into the scratch now,
6690 this should only be necessary if outval is a subreg
6691 of something larger than a word. */
6692 /* XXX Might this clobber base? I can't see how it can,
6693 since scratch is known to overlap with OUTVAL, and
6694 must be wider than a word. */
6695 emit_insn (gen_movhi (scratch_hi, outval));
6696 outval = scratch_hi;
6697 }
6698 }
6699
6700 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
6701 base = base_plus;
6702 }
6703 else if (GET_CODE (base) == PLUS)
6704 {
6354dc9b 6705 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
6706 HOST_WIDE_INT hi, lo;
6707
6708 offset += INTVAL (XEXP (base, 1));
6709 base = XEXP (base, 0);
6710
6354dc9b 6711 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
6712 /* Valid range for lo is -4095 -> 4095 */
6713 lo = (offset >= 0
6714 ? (offset & 0xfff)
6715 : -((-offset) & 0xfff));
6716
6717 /* Corner case, if lo is the max offset then we would be out of range
6718 once we have added the additional 1 below, so bump the msb into the
6719 pre-loading insn(s). */
6720 if (lo == 4095)
6721 lo &= 0x7ff;
6722
30cf4896
KG
6723 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
6724 ^ (HOST_WIDE_INT) 0x80000000)
6725 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
6726
6727 if (hi + lo != offset)
6728 abort ();
6729
6730 if (hi != 0)
6731 {
6732 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6733
6734 /* Be careful not to destroy OUTVAL. */
6735 if (reg_overlap_mentioned_p (base_plus, outval))
6736 {
6737 /* Updating base_plus might destroy outval, see if we
6738 can swap the scratch and base_plus. */
5895f793 6739 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
6740 {
6741 rtx tmp = scratch;
6742 scratch = base_plus;
6743 base_plus = tmp;
6744 }
6745 else
6746 {
6747 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
6748
6749 /* Be conservative and copy outval into scratch now,
6750 this should only be necessary if outval is a
6751 subreg of something larger than a word. */
6752 /* XXX Might this clobber base? I can't see how it
6753 can, since scratch is known to overlap with
6754 outval. */
6755 emit_insn (gen_movhi (scratch_hi, outval));
6756 outval = scratch_hi;
6757 }
6758 }
6759
6760 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 6761 that require more than one insn. */
f9cc092a
RE
6762 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
6763 base = base_plus;
6764 offset = lo;
6765 }
6766 }
af48348a 6767
b5cc037f
RE
6768 if (BYTES_BIG_ENDIAN)
6769 {
f9cc092a
RE
6770 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6771 plus_constant (base, offset + 1)),
5d5603e2 6772 gen_lowpart (QImode, outval)));
f9cc092a
RE
6773 emit_insn (gen_lshrsi3 (scratch,
6774 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 6775 GEN_INT (8)));
f9cc092a 6776 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 6777 gen_lowpart (QImode, scratch)));
b5cc037f
RE
6778 }
6779 else
6780 {
f9cc092a 6781 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 6782 gen_lowpart (QImode, outval)));
f9cc092a
RE
6783 emit_insn (gen_lshrsi3 (scratch,
6784 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 6785 GEN_INT (8)));
f9cc092a
RE
6786 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6787 plus_constant (base, offset + 1)),
5d5603e2 6788 gen_lowpart (QImode, scratch)));
b5cc037f 6789 }
af48348a 6790}
2b835d68 6791\f
d5b7b3ae
RE
6792/* Print a symbolic form of X to the debug file, F. */
6793static void
e32bac5b 6794arm_print_value (FILE *f, rtx x)
d5b7b3ae
RE
6795{
6796 switch (GET_CODE (x))
6797 {
6798 case CONST_INT:
6799 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
6800 return;
6801
6802 case CONST_DOUBLE:
6803 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
6804 return;
6805
5a9335ef
NC
6806 case CONST_VECTOR:
6807 {
6808 int i;
6809
6810 fprintf (f, "<");
6811 for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
6812 {
6813 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (CONST_VECTOR_ELT (x, i)));
6814 if (i < (CONST_VECTOR_NUNITS (x) - 1))
6815 fputc (',', f);
6816 }
6817 fprintf (f, ">");
6818 }
6819 return;
6820
d5b7b3ae
RE
6821 case CONST_STRING:
6822 fprintf (f, "\"%s\"", XSTR (x, 0));
6823 return;
6824
6825 case SYMBOL_REF:
6826 fprintf (f, "`%s'", XSTR (x, 0));
6827 return;
6828
6829 case LABEL_REF:
6830 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
6831 return;
6832
6833 case CONST:
6834 arm_print_value (f, XEXP (x, 0));
6835 return;
6836
6837 case PLUS:
6838 arm_print_value (f, XEXP (x, 0));
6839 fprintf (f, "+");
6840 arm_print_value (f, XEXP (x, 1));
6841 return;
6842
6843 case PC:
6844 fprintf (f, "pc");
6845 return;
6846
6847 default:
6848 fprintf (f, "????");
6849 return;
6850 }
6851}
6852\f
2b835d68 6853/* Routines for manipulation of the constant pool. */
2b835d68 6854
949d79eb
RE
6855/* Arm instructions cannot load a large constant directly into a
6856 register; they have to come from a pc relative load. The constant
6857 must therefore be placed in the addressable range of the pc
6858 relative load. Depending on the precise pc relative load
6859 instruction the range is somewhere between 256 bytes and 4k. This
6860 means that we often have to dump a constant inside a function, and
2b835d68
RE
6861 generate code to branch around it.
6862
949d79eb
RE
6863 It is important to minimize this, since the branches will slow
6864 things down and make the code larger.
2b835d68 6865
949d79eb
RE
6866 Normally we can hide the table after an existing unconditional
6867 branch so that there is no interruption of the flow, but in the
6868 worst case the code looks like this:
2b835d68
RE
6869
6870 ldr rn, L1
949d79eb 6871 ...
2b835d68
RE
6872 b L2
6873 align
6874 L1: .long value
6875 L2:
949d79eb 6876 ...
2b835d68 6877
2b835d68 6878 ldr rn, L3
949d79eb 6879 ...
2b835d68
RE
6880 b L4
6881 align
2b835d68
RE
6882 L3: .long value
6883 L4:
949d79eb
RE
6884 ...
6885
6886 We fix this by performing a scan after scheduling, which notices
6887 which instructions need to have their operands fetched from the
6888 constant table and builds the table.
6889
6890 The algorithm starts by building a table of all the constants that
6891 need fixing up and all the natural barriers in the function (places
6892 where a constant table can be dropped without breaking the flow).
6893 For each fixup we note how far the pc-relative replacement will be
6894 able to reach and the offset of the instruction into the function.
6895
6896 Having built the table we then group the fixes together to form
6897 tables that are as large as possible (subject to addressing
6898 constraints) and emit each table of constants after the last
6899 barrier that is within range of all the instructions in the group.
6900 If a group does not contain a barrier, then we forcibly create one
6901 by inserting a jump instruction into the flow. Once the table has
6902 been inserted, the insns are then modified to reference the
6903 relevant entry in the pool.
6904
6354dc9b 6905 Possible enhancements to the algorithm (not implemented) are:
949d79eb 6906
d5b7b3ae 6907 1) For some processors and object formats, there may be benefit in
949d79eb
RE
6908 aligning the pools to the start of cache lines; this alignment
6909 would need to be taken into account when calculating addressability
6354dc9b 6910 of a pool. */
2b835d68 6911
d5b7b3ae
RE
6912/* These typedefs are located at the start of this file, so that
6913 they can be used in the prototypes there. This comment is to
6914 remind readers of that fact so that the following structures
6915 can be understood more easily.
6916
6917 typedef struct minipool_node Mnode;
6918 typedef struct minipool_fixup Mfix; */
6919
6920struct minipool_node
6921{
6922 /* Doubly linked chain of entries. */
6923 Mnode * next;
6924 Mnode * prev;
6925 /* The maximum offset into the code that this entry can be placed. While
6926 pushing fixes for forward references, all entries are sorted in order
6927 of increasing max_address. */
6928 HOST_WIDE_INT max_address;
5519a4f9 6929 /* Similarly for an entry inserted for a backwards ref. */
d5b7b3ae
RE
6930 HOST_WIDE_INT min_address;
6931 /* The number of fixes referencing this entry. This can become zero
6932 if we "unpush" an entry. In this case we ignore the entry when we
6933 come to emit the code. */
6934 int refcount;
6935 /* The offset from the start of the minipool. */
6936 HOST_WIDE_INT offset;
6937 /* The value in table. */
6938 rtx value;
6939 /* The mode of value. */
6940 enum machine_mode mode;
5a9335ef
NC
6941 /* The size of the value. With iWMMXt enabled
6942 sizes > 4 also imply an alignment of 8-bytes. */
d5b7b3ae
RE
6943 int fix_size;
6944};
6945
6946struct minipool_fixup
2b835d68 6947{
d5b7b3ae
RE
6948 Mfix * next;
6949 rtx insn;
6950 HOST_WIDE_INT address;
6951 rtx * loc;
6952 enum machine_mode mode;
6953 int fix_size;
6954 rtx value;
6955 Mnode * minipool;
6956 HOST_WIDE_INT forwards;
6957 HOST_WIDE_INT backwards;
6958};
2b835d68 6959
d5b7b3ae
RE
6960/* Fixes less than a word need padding out to a word boundary. */
6961#define MINIPOOL_FIX_SIZE(mode) \
6962 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 6963
d5b7b3ae
RE
6964static Mnode * minipool_vector_head;
6965static Mnode * minipool_vector_tail;
6966static rtx minipool_vector_label;
332072db 6967
d5b7b3ae
RE
6968/* The linked list of all minipool fixes required for this function. */
6969Mfix * minipool_fix_head;
6970Mfix * minipool_fix_tail;
6971/* The fix entry for the current minipool, once it has been placed. */
6972Mfix * minipool_barrier;
6973
6974/* Determines if INSN is the start of a jump table. Returns the end
6975 of the TABLE or NULL_RTX. */
6976static rtx
e32bac5b 6977is_jump_table (rtx insn)
2b835d68 6978{
d5b7b3ae 6979 rtx table;
da6558fd 6980
d5b7b3ae
RE
6981 if (GET_CODE (insn) == JUMP_INSN
6982 && JUMP_LABEL (insn) != NULL
6983 && ((table = next_real_insn (JUMP_LABEL (insn)))
6984 == next_real_insn (insn))
6985 && table != NULL
6986 && GET_CODE (table) == JUMP_INSN
6987 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6988 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6989 return table;
6990
6991 return NULL_RTX;
2b835d68
RE
6992}
6993
657d9449
RE
6994#ifndef JUMP_TABLES_IN_TEXT_SECTION
6995#define JUMP_TABLES_IN_TEXT_SECTION 0
6996#endif
6997
d5b7b3ae 6998static HOST_WIDE_INT
e32bac5b 6999get_jump_table_size (rtx insn)
2b835d68 7000{
657d9449
RE
7001 /* ADDR_VECs only take room if read-only data does into the text
7002 section. */
7003 if (JUMP_TABLES_IN_TEXT_SECTION
d48bc59a 7004#if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
657d9449
RE
7005 || 1
7006#endif
7007 )
7008 {
7009 rtx body = PATTERN (insn);
7010 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 7011
657d9449
RE
7012 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
7013 }
7014
7015 return 0;
d5b7b3ae 7016}
2b835d68 7017
d5b7b3ae
RE
7018/* Move a minipool fix MP from its current location to before MAX_MP.
7019 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
093354e0 7020 constraints may need updating. */
d5b7b3ae 7021static Mnode *
e32bac5b
RE
7022move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
7023 HOST_WIDE_INT max_address)
d5b7b3ae
RE
7024{
7025 /* This should never be true and the code below assumes these are
7026 different. */
7027 if (mp == max_mp)
7028 abort ();
7029
7030 if (max_mp == NULL)
7031 {
7032 if (max_address < mp->max_address)
7033 mp->max_address = max_address;
7034 }
7035 else
2b835d68 7036 {
d5b7b3ae
RE
7037 if (max_address > max_mp->max_address - mp->fix_size)
7038 mp->max_address = max_mp->max_address - mp->fix_size;
7039 else
7040 mp->max_address = max_address;
2b835d68 7041
d5b7b3ae
RE
7042 /* Unlink MP from its current position. Since max_mp is non-null,
7043 mp->prev must be non-null. */
7044 mp->prev->next = mp->next;
7045 if (mp->next != NULL)
7046 mp->next->prev = mp->prev;
7047 else
7048 minipool_vector_tail = mp->prev;
2b835d68 7049
d5b7b3ae
RE
7050 /* Re-insert it before MAX_MP. */
7051 mp->next = max_mp;
7052 mp->prev = max_mp->prev;
7053 max_mp->prev = mp;
7054
7055 if (mp->prev != NULL)
7056 mp->prev->next = mp;
7057 else
7058 minipool_vector_head = mp;
7059 }
2b835d68 7060
d5b7b3ae
RE
7061 /* Save the new entry. */
7062 max_mp = mp;
7063
d6a7951f 7064 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
7065 required. */
7066 while (mp->prev != NULL
7067 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
7068 {
7069 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
7070 mp = mp->prev;
2b835d68
RE
7071 }
7072
d5b7b3ae 7073 return max_mp;
2b835d68
RE
7074}
7075
d5b7b3ae
RE
7076/* Add a constant to the minipool for a forward reference. Returns the
7077 node added or NULL if the constant will not fit in this pool. */
7078static Mnode *
e32bac5b 7079add_minipool_forward_ref (Mfix *fix)
d5b7b3ae
RE
7080{
7081 /* If set, max_mp is the first pool_entry that has a lower
7082 constraint than the one we are trying to add. */
7083 Mnode * max_mp = NULL;
7084 HOST_WIDE_INT max_address = fix->address + fix->forwards;
7085 Mnode * mp;
7086
7087 /* If this fix's address is greater than the address of the first
7088 entry, then we can't put the fix in this pool. We subtract the
7089 size of the current fix to ensure that if the table is fully
7090 packed we still have enough room to insert this value by suffling
7091 the other fixes forwards. */
7092 if (minipool_vector_head &&
7093 fix->address >= minipool_vector_head->max_address - fix->fix_size)
7094 return NULL;
2b835d68 7095
d5b7b3ae
RE
7096 /* Scan the pool to see if a constant with the same value has
7097 already been added. While we are doing this, also note the
7098 location where we must insert the constant if it doesn't already
7099 exist. */
7100 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
7101 {
7102 if (GET_CODE (fix->value) == GET_CODE (mp->value)
7103 && fix->mode == mp->mode
7104 && (GET_CODE (fix->value) != CODE_LABEL
7105 || (CODE_LABEL_NUMBER (fix->value)
7106 == CODE_LABEL_NUMBER (mp->value)))
7107 && rtx_equal_p (fix->value, mp->value))
7108 {
7109 /* More than one fix references this entry. */
7110 mp->refcount++;
7111 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
7112 }
7113
7114 /* Note the insertion point if necessary. */
7115 if (max_mp == NULL
7116 && mp->max_address > max_address)
7117 max_mp = mp;
5a9335ef
NC
7118
7119 /* If we are inserting an 8-bytes aligned quantity and
7120 we have not already found an insertion point, then
7121 make sure that all such 8-byte aligned quantities are
7122 placed at the start of the pool. */
5848830f 7123 if (ARM_DOUBLEWORD_ALIGN
5a9335ef
NC
7124 && max_mp == NULL
7125 && fix->fix_size == 8
7126 && mp->fix_size != 8)
7127 {
7128 max_mp = mp;
7129 max_address = mp->max_address;
7130 }
d5b7b3ae
RE
7131 }
7132
7133 /* The value is not currently in the minipool, so we need to create
7134 a new entry for it. If MAX_MP is NULL, the entry will be put on
7135 the end of the list since the placement is less constrained than
7136 any existing entry. Otherwise, we insert the new fix before
6bc82793 7137 MAX_MP and, if necessary, adjust the constraints on the other
d5b7b3ae
RE
7138 entries. */
7139 mp = xmalloc (sizeof (* mp));
7140 mp->fix_size = fix->fix_size;
7141 mp->mode = fix->mode;
7142 mp->value = fix->value;
7143 mp->refcount = 1;
7144 /* Not yet required for a backwards ref. */
7145 mp->min_address = -65536;
7146
7147 if (max_mp == NULL)
7148 {
7149 mp->max_address = max_address;
7150 mp->next = NULL;
7151 mp->prev = minipool_vector_tail;
7152
7153 if (mp->prev == NULL)
7154 {
7155 minipool_vector_head = mp;
7156 minipool_vector_label = gen_label_rtx ();
7551cbc7 7157 }
2b835d68 7158 else
d5b7b3ae 7159 mp->prev->next = mp;
2b835d68 7160
d5b7b3ae
RE
7161 minipool_vector_tail = mp;
7162 }
7163 else
7164 {
7165 if (max_address > max_mp->max_address - mp->fix_size)
7166 mp->max_address = max_mp->max_address - mp->fix_size;
7167 else
7168 mp->max_address = max_address;
7169
7170 mp->next = max_mp;
7171 mp->prev = max_mp->prev;
7172 max_mp->prev = mp;
7173 if (mp->prev != NULL)
7174 mp->prev->next = mp;
7175 else
7176 minipool_vector_head = mp;
7177 }
7178
7179 /* Save the new entry. */
7180 max_mp = mp;
7181
d6a7951f 7182 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
7183 required. */
7184 while (mp->prev != NULL
7185 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
7186 {
7187 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
7188 mp = mp->prev;
2b835d68
RE
7189 }
7190
d5b7b3ae
RE
7191 return max_mp;
7192}
7193
7194static Mnode *
e32bac5b
RE
7195move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
7196 HOST_WIDE_INT min_address)
d5b7b3ae
RE
7197{
7198 HOST_WIDE_INT offset;
7199
7200 /* This should never be true, and the code below assumes these are
7201 different. */
7202 if (mp == min_mp)
7203 abort ();
7204
7205 if (min_mp == NULL)
2b835d68 7206 {
d5b7b3ae
RE
7207 if (min_address > mp->min_address)
7208 mp->min_address = min_address;
7209 }
7210 else
7211 {
7212 /* We will adjust this below if it is too loose. */
7213 mp->min_address = min_address;
7214
7215 /* Unlink MP from its current position. Since min_mp is non-null,
7216 mp->next must be non-null. */
7217 mp->next->prev = mp->prev;
7218 if (mp->prev != NULL)
7219 mp->prev->next = mp->next;
7220 else
7221 minipool_vector_head = mp->next;
7222
7223 /* Reinsert it after MIN_MP. */
7224 mp->prev = min_mp;
7225 mp->next = min_mp->next;
7226 min_mp->next = mp;
7227 if (mp->next != NULL)
7228 mp->next->prev = mp;
2b835d68 7229 else
d5b7b3ae
RE
7230 minipool_vector_tail = mp;
7231 }
7232
7233 min_mp = mp;
7234
7235 offset = 0;
7236 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
7237 {
7238 mp->offset = offset;
7239 if (mp->refcount > 0)
7240 offset += mp->fix_size;
7241
7242 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
7243 mp->next->min_address = mp->min_address + mp->fix_size;
7244 }
7245
7246 return min_mp;
7247}
7248
7249/* Add a constant to the minipool for a backward reference. Returns the
7250 node added or NULL if the constant will not fit in this pool.
7251
7252 Note that the code for insertion for a backwards reference can be
7253 somewhat confusing because the calculated offsets for each fix do
7254 not take into account the size of the pool (which is still under
7255 construction. */
7256static Mnode *
e32bac5b 7257add_minipool_backward_ref (Mfix *fix)
d5b7b3ae
RE
7258{
7259 /* If set, min_mp is the last pool_entry that has a lower constraint
7260 than the one we are trying to add. */
e32bac5b 7261 Mnode *min_mp = NULL;
d5b7b3ae
RE
7262 /* This can be negative, since it is only a constraint. */
7263 HOST_WIDE_INT min_address = fix->address - fix->backwards;
e32bac5b 7264 Mnode *mp;
d5b7b3ae
RE
7265
7266 /* If we can't reach the current pool from this insn, or if we can't
7267 insert this entry at the end of the pool without pushing other
7268 fixes out of range, then we don't try. This ensures that we
7269 can't fail later on. */
7270 if (min_address >= minipool_barrier->address
7271 || (minipool_vector_tail->min_address + fix->fix_size
7272 >= minipool_barrier->address))
7273 return NULL;
7274
7275 /* Scan the pool to see if a constant with the same value has
7276 already been added. While we are doing this, also note the
7277 location where we must insert the constant if it doesn't already
7278 exist. */
7279 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
7280 {
7281 if (GET_CODE (fix->value) == GET_CODE (mp->value)
7282 && fix->mode == mp->mode
7283 && (GET_CODE (fix->value) != CODE_LABEL
7284 || (CODE_LABEL_NUMBER (fix->value)
7285 == CODE_LABEL_NUMBER (mp->value)))
7286 && rtx_equal_p (fix->value, mp->value)
7287 /* Check that there is enough slack to move this entry to the
7288 end of the table (this is conservative). */
7289 && (mp->max_address
7290 > (minipool_barrier->address
7291 + minipool_vector_tail->offset
7292 + minipool_vector_tail->fix_size)))
7293 {
7294 mp->refcount++;
7295 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
7296 }
7297
7298 if (min_mp != NULL)
7299 mp->min_address += fix->fix_size;
7300 else
7301 {
7302 /* Note the insertion point if necessary. */
7303 if (mp->min_address < min_address)
5a9335ef
NC
7304 {
7305 /* For now, we do not allow the insertion of 8-byte alignment
7306 requiring nodes anywhere but at the start of the pool. */
5848830f
PB
7307 if (ARM_DOUBLEWORD_ALIGN
7308 && fix->fix_size == 8 && mp->fix_size != 8)
5a9335ef
NC
7309 return NULL;
7310 else
7311 min_mp = mp;
7312 }
d5b7b3ae
RE
7313 else if (mp->max_address
7314 < minipool_barrier->address + mp->offset + fix->fix_size)
7315 {
7316 /* Inserting before this entry would push the fix beyond
7317 its maximum address (which can happen if we have
7318 re-located a forwards fix); force the new fix to come
7319 after it. */
7320 min_mp = mp;
7321 min_address = mp->min_address + fix->fix_size;
7322 }
5a9335ef
NC
7323 /* If we are inserting an 8-bytes aligned quantity and
7324 we have not already found an insertion point, then
7325 make sure that all such 8-byte aligned quantities are
7326 placed at the start of the pool. */
5848830f 7327 else if (ARM_DOUBLEWORD_ALIGN
5a9335ef
NC
7328 && min_mp == NULL
7329 && fix->fix_size == 8
7330 && mp->fix_size < 8)
7331 {
7332 min_mp = mp;
7333 min_address = mp->min_address + fix->fix_size;
7334 }
d5b7b3ae
RE
7335 }
7336 }
7337
7338 /* We need to create a new entry. */
7339 mp = xmalloc (sizeof (* mp));
7340 mp->fix_size = fix->fix_size;
7341 mp->mode = fix->mode;
7342 mp->value = fix->value;
7343 mp->refcount = 1;
7344 mp->max_address = minipool_barrier->address + 65536;
7345
7346 mp->min_address = min_address;
7347
7348 if (min_mp == NULL)
7349 {
7350 mp->prev = NULL;
7351 mp->next = minipool_vector_head;
7352
7353 if (mp->next == NULL)
7354 {
7355 minipool_vector_tail = mp;
7356 minipool_vector_label = gen_label_rtx ();
7357 }
7358 else
7359 mp->next->prev = mp;
7360
7361 minipool_vector_head = mp;
7362 }
7363 else
7364 {
7365 mp->next = min_mp->next;
7366 mp->prev = min_mp;
7367 min_mp->next = mp;
da6558fd 7368
d5b7b3ae
RE
7369 if (mp->next != NULL)
7370 mp->next->prev = mp;
7371 else
7372 minipool_vector_tail = mp;
7373 }
7374
7375 /* Save the new entry. */
7376 min_mp = mp;
7377
7378 if (mp->prev)
7379 mp = mp->prev;
7380 else
7381 mp->offset = 0;
7382
7383 /* Scan over the following entries and adjust their offsets. */
7384 while (mp->next != NULL)
7385 {
7386 if (mp->next->min_address < mp->min_address + mp->fix_size)
7387 mp->next->min_address = mp->min_address + mp->fix_size;
7388
7389 if (mp->refcount)
7390 mp->next->offset = mp->offset + mp->fix_size;
7391 else
7392 mp->next->offset = mp->offset;
7393
7394 mp = mp->next;
7395 }
7396
7397 return min_mp;
7398}
7399
7400static void
e32bac5b 7401assign_minipool_offsets (Mfix *barrier)
d5b7b3ae
RE
7402{
7403 HOST_WIDE_INT offset = 0;
e32bac5b 7404 Mnode *mp;
d5b7b3ae
RE
7405
7406 minipool_barrier = barrier;
7407
7408 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
7409 {
7410 mp->offset = offset;
da6558fd 7411
d5b7b3ae
RE
7412 if (mp->refcount > 0)
7413 offset += mp->fix_size;
7414 }
7415}
7416
7417/* Output the literal table */
7418static void
e32bac5b 7419dump_minipool (rtx scan)
d5b7b3ae 7420{
5a9335ef
NC
7421 Mnode * mp;
7422 Mnode * nmp;
7423 int align64 = 0;
7424
5848830f 7425 if (ARM_DOUBLEWORD_ALIGN)
5a9335ef
NC
7426 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
7427 if (mp->refcount > 0 && mp->fix_size == 8)
7428 {
7429 align64 = 1;
7430 break;
7431 }
d5b7b3ae 7432
c263766c
RH
7433 if (dump_file)
7434 fprintf (dump_file,
5a9335ef
NC
7435 ";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
7436 INSN_UID (scan), (unsigned long) minipool_barrier->address, align64 ? 8 : 4);
d5b7b3ae
RE
7437
7438 scan = emit_label_after (gen_label_rtx (), scan);
5a9335ef 7439 scan = emit_insn_after (align64 ? gen_align_8 () : gen_align_4 (), scan);
d5b7b3ae
RE
7440 scan = emit_label_after (minipool_vector_label, scan);
7441
7442 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
7443 {
7444 if (mp->refcount > 0)
7445 {
c263766c 7446 if (dump_file)
d5b7b3ae 7447 {
c263766c 7448 fprintf (dump_file,
d5b7b3ae
RE
7449 ";; Offset %u, min %ld, max %ld ",
7450 (unsigned) mp->offset, (unsigned long) mp->min_address,
7451 (unsigned long) mp->max_address);
c263766c
RH
7452 arm_print_value (dump_file, mp->value);
7453 fputc ('\n', dump_file);
d5b7b3ae
RE
7454 }
7455
7456 switch (mp->fix_size)
7457 {
7458#ifdef HAVE_consttable_1
7459 case 1:
7460 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
7461 break;
7462
7463#endif
7464#ifdef HAVE_consttable_2
7465 case 2:
7466 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
7467 break;
7468
7469#endif
7470#ifdef HAVE_consttable_4
7471 case 4:
7472 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
7473 break;
7474
7475#endif
7476#ifdef HAVE_consttable_8
7477 case 8:
7478 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
7479 break;
7480
7481#endif
7482 default:
7483 abort ();
7484 break;
7485 }
7486 }
7487
7488 nmp = mp->next;
7489 free (mp);
2b835d68
RE
7490 }
7491
d5b7b3ae
RE
7492 minipool_vector_head = minipool_vector_tail = NULL;
7493 scan = emit_insn_after (gen_consttable_end (), scan);
7494 scan = emit_barrier_after (scan);
2b835d68
RE
7495}
7496
d5b7b3ae
RE
7497/* Return the cost of forcibly inserting a barrier after INSN. */
7498static int
e32bac5b 7499arm_barrier_cost (rtx insn)
949d79eb 7500{
d5b7b3ae
RE
7501 /* Basing the location of the pool on the loop depth is preferable,
7502 but at the moment, the basic block information seems to be
7503 corrupt by this stage of the compilation. */
7504 int base_cost = 50;
7505 rtx next = next_nonnote_insn (insn);
7506
7507 if (next != NULL && GET_CODE (next) == CODE_LABEL)
7508 base_cost -= 20;
7509
7510 switch (GET_CODE (insn))
7511 {
7512 case CODE_LABEL:
7513 /* It will always be better to place the table before the label, rather
7514 than after it. */
7515 return 50;
949d79eb 7516
d5b7b3ae
RE
7517 case INSN:
7518 case CALL_INSN:
7519 return base_cost;
7520
7521 case JUMP_INSN:
7522 return base_cost - 10;
7523
7524 default:
7525 return base_cost + 10;
7526 }
7527}
7528
7529/* Find the best place in the insn stream in the range
7530 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
7531 Create the barrier by inserting a jump and add a new fix entry for
7532 it. */
7533static Mfix *
e32bac5b 7534create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
d5b7b3ae
RE
7535{
7536 HOST_WIDE_INT count = 0;
7537 rtx barrier;
7538 rtx from = fix->insn;
7539 rtx selected = from;
7540 int selected_cost;
7541 HOST_WIDE_INT selected_address;
7542 Mfix * new_fix;
7543 HOST_WIDE_INT max_count = max_address - fix->address;
7544 rtx label = gen_label_rtx ();
7545
7546 selected_cost = arm_barrier_cost (from);
7547 selected_address = fix->address;
7548
7549 while (from && count < max_count)
7550 {
7551 rtx tmp;
7552 int new_cost;
7553
7554 /* This code shouldn't have been called if there was a natural barrier
7555 within range. */
7556 if (GET_CODE (from) == BARRIER)
7557 abort ();
7558
7559 /* Count the length of this insn. */
7560 count += get_attr_length (from);
7561
7562 /* If there is a jump table, add its length. */
7563 tmp = is_jump_table (from);
7564 if (tmp != NULL)
7565 {
7566 count += get_jump_table_size (tmp);
7567
7568 /* Jump tables aren't in a basic block, so base the cost on
7569 the dispatch insn. If we select this location, we will
7570 still put the pool after the table. */
7571 new_cost = arm_barrier_cost (from);
7572
7573 if (count < max_count && new_cost <= selected_cost)
7574 {
7575 selected = tmp;
7576 selected_cost = new_cost;
7577 selected_address = fix->address + count;
7578 }
7579
7580 /* Continue after the dispatch table. */
7581 from = NEXT_INSN (tmp);
7582 continue;
7583 }
7584
7585 new_cost = arm_barrier_cost (from);
7586
7587 if (count < max_count && new_cost <= selected_cost)
7588 {
7589 selected = from;
7590 selected_cost = new_cost;
7591 selected_address = fix->address + count;
7592 }
7593
7594 from = NEXT_INSN (from);
7595 }
7596
7597 /* Create a new JUMP_INSN that branches around a barrier. */
7598 from = emit_jump_insn_after (gen_jump (label), selected);
7599 JUMP_LABEL (from) = label;
7600 barrier = emit_barrier_after (from);
7601 emit_label_after (label, barrier);
7602
7603 /* Create a minipool barrier entry for the new barrier. */
c7319d87 7604 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
7605 new_fix->insn = barrier;
7606 new_fix->address = selected_address;
7607 new_fix->next = fix->next;
7608 fix->next = new_fix;
7609
7610 return new_fix;
7611}
7612
7613/* Record that there is a natural barrier in the insn stream at
7614 ADDRESS. */
949d79eb 7615static void
e32bac5b 7616push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
2b835d68 7617{
c7319d87 7618 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 7619
949d79eb
RE
7620 fix->insn = insn;
7621 fix->address = address;
2b835d68 7622
949d79eb
RE
7623 fix->next = NULL;
7624 if (minipool_fix_head != NULL)
7625 minipool_fix_tail->next = fix;
7626 else
7627 minipool_fix_head = fix;
7628
7629 minipool_fix_tail = fix;
7630}
2b835d68 7631
d5b7b3ae
RE
7632/* Record INSN, which will need fixing up to load a value from the
7633 minipool. ADDRESS is the offset of the insn since the start of the
7634 function; LOC is a pointer to the part of the insn which requires
7635 fixing; VALUE is the constant that must be loaded, which is of type
7636 MODE. */
949d79eb 7637static void
e32bac5b
RE
7638push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
7639 enum machine_mode mode, rtx value)
949d79eb 7640{
c7319d87 7641 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
7642
7643#ifdef AOF_ASSEMBLER
093354e0 7644 /* PIC symbol references need to be converted into offsets into the
949d79eb 7645 based area. */
d5b7b3ae
RE
7646 /* XXX This shouldn't be done here. */
7647 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
7648 value = aof_pic_entry (value);
7649#endif /* AOF_ASSEMBLER */
7650
7651 fix->insn = insn;
7652 fix->address = address;
7653 fix->loc = loc;
7654 fix->mode = mode;
d5b7b3ae 7655 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 7656 fix->value = value;
d5b7b3ae
RE
7657 fix->forwards = get_attr_pool_range (insn);
7658 fix->backwards = get_attr_neg_pool_range (insn);
7659 fix->minipool = NULL;
949d79eb
RE
7660
7661 /* If an insn doesn't have a range defined for it, then it isn't
7662 expecting to be reworked by this code. Better to abort now than
7663 to generate duff assembly code. */
d5b7b3ae 7664 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
7665 abort ();
7666
5848830f 7667 /* With AAPCS/iWMMXt enabled, the pool is aligned to an 8-byte boundary.
5a9335ef
NC
7668 So there might be an empty word before the start of the pool.
7669 Hence we reduce the forward range by 4 to allow for this
7670 possibility. */
5848830f 7671 if (ARM_DOUBLEWORD_ALIGN && fix->fix_size == 8)
5a9335ef
NC
7672 fix->forwards -= 4;
7673
c263766c 7674 if (dump_file)
d5b7b3ae 7675 {
c263766c 7676 fprintf (dump_file,
d5b7b3ae
RE
7677 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
7678 GET_MODE_NAME (mode),
7679 INSN_UID (insn), (unsigned long) address,
7680 -1 * (long)fix->backwards, (long)fix->forwards);
c263766c
RH
7681 arm_print_value (dump_file, fix->value);
7682 fprintf (dump_file, "\n");
d5b7b3ae
RE
7683 }
7684
6354dc9b 7685 /* Add it to the chain of fixes. */
949d79eb 7686 fix->next = NULL;
d5b7b3ae 7687
949d79eb
RE
7688 if (minipool_fix_head != NULL)
7689 minipool_fix_tail->next = fix;
7690 else
7691 minipool_fix_head = fix;
7692
7693 minipool_fix_tail = fix;
7694}
7695
f0375c66
NC
7696/* Scan INSN and note any of its operands that need fixing.
7697 If DO_PUSHES is false we do not actually push any of the fixups
7698 needed. The function returns TRUE is any fixups were needed/pushed.
7699 This is used by arm_memory_load_p() which needs to know about loads
7700 of constants that will be converted into minipool loads. */
f0375c66 7701static bool
e32bac5b 7702note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
949d79eb 7703{
f0375c66 7704 bool result = false;
949d79eb
RE
7705 int opno;
7706
d5b7b3ae 7707 extract_insn (insn);
949d79eb 7708
5895f793 7709 if (!constrain_operands (1))
949d79eb
RE
7710 fatal_insn_not_found (insn);
7711
8c2a5582
RE
7712 if (recog_data.n_alternatives == 0)
7713 return false;
7714
f0375c66 7715 /* Fill in recog_op_alt with information about the constraints of this insn. */
949d79eb
RE
7716 preprocess_constraints ();
7717
1ccbefce 7718 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 7719 {
6354dc9b 7720 /* Things we need to fix can only occur in inputs. */
36ab44c7 7721 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
7722 continue;
7723
7724 /* If this alternative is a memory reference, then any mention
7725 of constants in this alternative is really to fool reload
7726 into allowing us to accept one there. We need to fix them up
7727 now so that we output the right code. */
7728 if (recog_op_alt[opno][which_alternative].memory_ok)
7729 {
1ccbefce 7730 rtx op = recog_data.operand[opno];
949d79eb
RE
7731
7732 if (CONSTANT_P (op))
f0375c66
NC
7733 {
7734 if (do_pushes)
7735 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
7736 recog_data.operand_mode[opno], op);
7737 result = true;
7738 }
d5b7b3ae 7739 else if (GET_CODE (op) == MEM
949d79eb
RE
7740 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
7741 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
f0375c66
NC
7742 {
7743 if (do_pushes)
244b1afb
RE
7744 {
7745 rtx cop = avoid_constant_pool_reference (op);
7746
7747 /* Casting the address of something to a mode narrower
7748 than a word can cause avoid_constant_pool_reference()
7749 to return the pool reference itself. That's no good to
7750 us here. Lets just hope that we can use the
7751 constant pool value directly. */
7752 if (op == cop)
c769a35d 7753 cop = get_pool_constant (XEXP (op, 0));
244b1afb
RE
7754
7755 push_minipool_fix (insn, address,
7756 recog_data.operand_loc[opno],
c769a35d 7757 recog_data.operand_mode[opno], cop);
244b1afb 7758 }
f0375c66
NC
7759
7760 result = true;
7761 }
949d79eb 7762 }
2b835d68 7763 }
f0375c66
NC
7764
7765 return result;
2b835d68
RE
7766}
7767
18dbd950
RS
7768/* Gcc puts the pool in the wrong place for ARM, since we can only
7769 load addresses a limited distance around the pc. We do some
7770 special munging to move the constant pool values to the correct
7771 point in the code. */
18dbd950 7772static void
e32bac5b 7773arm_reorg (void)
2b835d68
RE
7774{
7775 rtx insn;
d5b7b3ae
RE
7776 HOST_WIDE_INT address = 0;
7777 Mfix * fix;
ad076f4e 7778
949d79eb 7779 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 7780
949d79eb
RE
7781 /* The first insn must always be a note, or the code below won't
7782 scan it properly. */
18dbd950
RS
7783 insn = get_insns ();
7784 if (GET_CODE (insn) != NOTE)
949d79eb
RE
7785 abort ();
7786
7787 /* Scan all the insns and record the operands that will need fixing. */
18dbd950 7788 for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
2b835d68 7789 {
9b6b54e2 7790 if (TARGET_CIRRUS_FIX_INVALID_INSNS
f0375c66 7791 && (arm_cirrus_insn_p (insn)
9b6b54e2 7792 || GET_CODE (insn) == JUMP_INSN
f0375c66 7793 || arm_memory_load_p (insn)))
9b6b54e2
NC
7794 cirrus_reorg (insn);
7795
949d79eb 7796 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 7797 push_minipool_barrier (insn, address);
f0375c66 7798 else if (INSN_P (insn))
949d79eb
RE
7799 {
7800 rtx table;
7801
f0375c66 7802 note_invalid_constants (insn, address, true);
949d79eb 7803 address += get_attr_length (insn);
d5b7b3ae 7804
949d79eb
RE
7805 /* If the insn is a vector jump, add the size of the table
7806 and skip the table. */
d5b7b3ae 7807 if ((table = is_jump_table (insn)) != NULL)
2b835d68 7808 {
d5b7b3ae 7809 address += get_jump_table_size (table);
949d79eb
RE
7810 insn = table;
7811 }
7812 }
7813 }
332072db 7814
d5b7b3ae
RE
7815 fix = minipool_fix_head;
7816
949d79eb 7817 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 7818 while (fix)
949d79eb 7819 {
d5b7b3ae
RE
7820 Mfix * ftmp;
7821 Mfix * fdel;
7822 Mfix * last_added_fix;
7823 Mfix * last_barrier = NULL;
7824 Mfix * this_fix;
949d79eb
RE
7825
7826 /* Skip any further barriers before the next fix. */
7827 while (fix && GET_CODE (fix->insn) == BARRIER)
7828 fix = fix->next;
7829
d5b7b3ae 7830 /* No more fixes. */
949d79eb
RE
7831 if (fix == NULL)
7832 break;
332072db 7833
d5b7b3ae 7834 last_added_fix = NULL;
2b835d68 7835
d5b7b3ae 7836 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 7837 {
949d79eb 7838 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 7839 {
d5b7b3ae
RE
7840 if (ftmp->address >= minipool_vector_head->max_address)
7841 break;
2b835d68 7842
d5b7b3ae 7843 last_barrier = ftmp;
2b835d68 7844 }
d5b7b3ae
RE
7845 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
7846 break;
7847
7848 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 7849 }
949d79eb 7850
d5b7b3ae
RE
7851 /* If we found a barrier, drop back to that; any fixes that we
7852 could have reached but come after the barrier will now go in
7853 the next mini-pool. */
949d79eb
RE
7854 if (last_barrier != NULL)
7855 {
d5b7b3ae
RE
7856 /* Reduce the refcount for those fixes that won't go into this
7857 pool after all. */
7858 for (fdel = last_barrier->next;
7859 fdel && fdel != ftmp;
7860 fdel = fdel->next)
7861 {
7862 fdel->minipool->refcount--;
7863 fdel->minipool = NULL;
7864 }
7865
949d79eb
RE
7866 ftmp = last_barrier;
7867 }
7868 else
2bfa88dc 7869 {
d5b7b3ae
RE
7870 /* ftmp is first fix that we can't fit into this pool and
7871 there no natural barriers that we could use. Insert a
7872 new barrier in the code somewhere between the previous
7873 fix and this one, and arrange to jump around it. */
7874 HOST_WIDE_INT max_address;
7875
7876 /* The last item on the list of fixes must be a barrier, so
7877 we can never run off the end of the list of fixes without
7878 last_barrier being set. */
7879 if (ftmp == NULL)
7880 abort ();
7881
7882 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
7883 /* Check that there isn't another fix that is in range that
7884 we couldn't fit into this pool because the pool was
7885 already too large: we need to put the pool before such an
7886 instruction. */
d5b7b3ae
RE
7887 if (ftmp->address < max_address)
7888 max_address = ftmp->address;
7889
7890 last_barrier = create_fix_barrier (last_added_fix, max_address);
7891 }
7892
7893 assign_minipool_offsets (last_barrier);
7894
7895 while (ftmp)
7896 {
7897 if (GET_CODE (ftmp->insn) != BARRIER
7898 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
7899 == NULL))
7900 break;
2bfa88dc 7901
d5b7b3ae 7902 ftmp = ftmp->next;
2bfa88dc 7903 }
949d79eb
RE
7904
7905 /* Scan over the fixes we have identified for this pool, fixing them
7906 up and adding the constants to the pool itself. */
d5b7b3ae 7907 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
7908 this_fix = this_fix->next)
7909 if (GET_CODE (this_fix->insn) != BARRIER)
7910 {
949d79eb
RE
7911 rtx addr
7912 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7913 minipool_vector_label),
d5b7b3ae 7914 this_fix->minipool->offset);
949d79eb
RE
7915 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7916 }
7917
d5b7b3ae 7918 dump_minipool (last_barrier->insn);
949d79eb 7919 fix = ftmp;
2b835d68 7920 }
4b632bf1 7921
949d79eb
RE
7922 /* From now on we must synthesize any constants that we can't handle
7923 directly. This can happen if the RTL gets split during final
7924 instruction generation. */
4b632bf1 7925 after_arm_reorg = 1;
c7319d87
RE
7926
7927 /* Free the minipool memory. */
7928 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 7929}
cce8749e
CH
7930\f
7931/* Routines to output assembly language. */
7932
f3bb6135 7933/* If the rtx is the correct value then return the string of the number.
ff9940b0 7934 In this way we can ensure that valid double constants are generated even
6354dc9b 7935 when cross compiling. */
cd2b33d0 7936const char *
e32bac5b 7937fp_immediate_constant (rtx x)
ff9940b0
RE
7938{
7939 REAL_VALUE_TYPE r;
7940 int i;
7941
9b66ebb1
PB
7942 if (!fp_consts_inited)
7943 init_fp_table ();
ff9940b0
RE
7944
7945 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7946 for (i = 0; i < 8; i++)
9b66ebb1
PB
7947 if (REAL_VALUES_EQUAL (r, values_fp[i]))
7948 return strings_fp[i];
f3bb6135 7949
ff9940b0
RE
7950 abort ();
7951}
7952
9997d19d 7953/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 7954static const char *
e32bac5b 7955fp_const_from_val (REAL_VALUE_TYPE *r)
9997d19d
RE
7956{
7957 int i;
7958
9b66ebb1
PB
7959 if (!fp_consts_inited)
7960 init_fp_table ();
9997d19d
RE
7961
7962 for (i = 0; i < 8; i++)
9b66ebb1
PB
7963 if (REAL_VALUES_EQUAL (*r, values_fp[i]))
7964 return strings_fp[i];
9997d19d
RE
7965
7966 abort ();
7967}
ff9940b0 7968
cce8749e
CH
7969/* Output the operands of a LDM/STM instruction to STREAM.
7970 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
7971 REG is the base register, either the frame pointer or the stack pointer,
7972 INSTR is the possibly suffixed load or store instruction. */
d5b7b3ae 7973static void
e32bac5b 7974print_multi_reg (FILE *stream, const char *instr, int reg, int mask)
cce8749e
CH
7975{
7976 int i;
7977 int not_first = FALSE;
7978
1d5473cb 7979 fputc ('\t', stream);
dd18ae56 7980 asm_fprintf (stream, instr, reg);
1d5473cb 7981 fputs (", {", stream);
62b10bbc 7982
d5b7b3ae 7983 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
7984 if (mask & (1 << i))
7985 {
7986 if (not_first)
7987 fprintf (stream, ", ");
62b10bbc 7988
dd18ae56 7989 asm_fprintf (stream, "%r", i);
cce8749e
CH
7990 not_first = TRUE;
7991 }
f3bb6135 7992
61f0ccff 7993 fprintf (stream, "}\n");
f3bb6135 7994}
cce8749e 7995
9b66ebb1 7996
9728c9d1
PB
7997/* Output a FLDMX instruction to STREAM.
7998 BASE if the register containing the address.
7999 REG and COUNT specify the register range.
8000 Extra registers may be added to avoid hardware bugs. */
9b66ebb1
PB
8001
8002static void
9728c9d1 8003arm_output_fldmx (FILE * stream, unsigned int base, int reg, int count)
9b66ebb1
PB
8004{
8005 int i;
8006
9728c9d1
PB
8007 /* Workaround ARM10 VFPr1 bug. */
8008 if (count == 2 && !arm_arch6)
8009 {
8010 if (reg == 15)
8011 reg--;
8012 count++;
8013 }
8014
9b66ebb1 8015 fputc ('\t', stream);
9728c9d1 8016 asm_fprintf (stream, "fldmfdx\t%r!, {", base);
9b66ebb1 8017
9728c9d1 8018 for (i = reg; i < reg + count; i++)
9b66ebb1 8019 {
9728c9d1 8020 if (i > reg)
9b66ebb1 8021 fputs (", ", stream);
9728c9d1 8022 asm_fprintf (stream, "d%d", i);
9b66ebb1
PB
8023 }
8024 fputs ("}\n", stream);
9728c9d1 8025
9b66ebb1
PB
8026}
8027
8028
8029/* Output the assembly for a store multiple. */
8030
8031const char *
8032vfp_output_fstmx (rtx * operands)
8033{
8034 char pattern[100];
8035 int p;
8036 int base;
8037 int i;
8038
8039 strcpy (pattern, "fstmfdx\t%m0!, {%P1");
8040 p = strlen (pattern);
8041
8042 if (GET_CODE (operands[1]) != REG)
8043 abort ();
8044
8045 base = (REGNO (operands[1]) - FIRST_VFP_REGNUM) / 2;
8046 for (i = 1; i < XVECLEN (operands[2], 0); i++)
8047 {
8048 p += sprintf (&pattern[p], ", d%d", base + i);
8049 }
8050 strcpy (&pattern[p], "}");
8051
8052 output_asm_insn (pattern, operands);
8053 return "";
8054}
8055
8056
9728c9d1
PB
8057/* Emit RTL to save block of VFP register pairs to the stack. Returns the
8058 number of bytes pushed. */
9b66ebb1 8059
9728c9d1 8060static int
9b66ebb1
PB
8061vfp_emit_fstmx (int base_reg, int count)
8062{
8063 rtx par;
8064 rtx dwarf;
8065 rtx tmp, reg;
8066 int i;
8067
9728c9d1
PB
8068 /* Workaround ARM10 VFPr1 bug. Data corruption can occur when exactly two
8069 register pairs are stored by a store multiple insn. We avoid this
8070 by pushing an extra pair. */
8071 if (count == 2 && !arm_arch6)
8072 {
8073 if (base_reg == LAST_VFP_REGNUM - 3)
8074 base_reg -= 2;
8075 count++;
8076 }
8077
9b66ebb1
PB
8078 /* ??? The frame layout is implementation defined. We describe
8079 standard format 1 (equivalent to a FSTMD insn and unused pad word).
8080 We really need some way of representing the whole block so that the
8081 unwinder can figure it out at runtime. */
8082 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8083 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (count + 1));
8084
8085 reg = gen_rtx_REG (DFmode, base_reg);
8086 base_reg += 2;
8087
8088 XVECEXP (par, 0, 0)
8089 = gen_rtx_SET (VOIDmode,
8090 gen_rtx_MEM (BLKmode,
8091 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8092 gen_rtx_UNSPEC (BLKmode,
8093 gen_rtvec (1, reg),
8094 UNSPEC_PUSH_MULT));
8095
8096 tmp = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8097 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8098 GEN_INT (-(count * 8 + 4))));
8099 RTX_FRAME_RELATED_P (tmp) = 1;
8100 XVECEXP (dwarf, 0, 0) = tmp;
8101
8102 tmp = gen_rtx_SET (VOIDmode,
8103 gen_rtx_MEM (DFmode, stack_pointer_rtx),
8104 reg);
8105 RTX_FRAME_RELATED_P (tmp) = 1;
8106 XVECEXP (dwarf, 0, 1) = tmp;
8107
8108 for (i = 1; i < count; i++)
8109 {
8110 reg = gen_rtx_REG (DFmode, base_reg);
8111 base_reg += 2;
8112 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8113
8114 tmp = gen_rtx_SET (VOIDmode,
8115 gen_rtx_MEM (DFmode,
8116 gen_rtx_PLUS (SImode,
8117 stack_pointer_rtx,
8118 GEN_INT (i * 8))),
8119 reg);
8120 RTX_FRAME_RELATED_P (tmp) = 1;
8121 XVECEXP (dwarf, 0, i + 1) = tmp;
8122 }
8123
8124 par = emit_insn (par);
8125 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8126 REG_NOTES (par));
9728c9d1
PB
8127 RTX_FRAME_RELATED_P (par) = 1;
8128
8129 return count * 8 + 4;
9b66ebb1
PB
8130}
8131
8132
6354dc9b 8133/* Output a 'call' insn. */
cd2b33d0 8134const char *
e32bac5b 8135output_call (rtx *operands)
cce8749e 8136{
68d560d4
RE
8137 if (arm_arch5)
8138 abort (); /* Patterns should call blx <reg> directly. */
cce8749e 8139
68d560d4 8140 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
62b10bbc 8141 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 8142 {
62b10bbc 8143 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 8144 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 8145 }
62b10bbc 8146
1d5473cb 8147 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 8148
68d560d4 8149 if (TARGET_INTERWORK || arm_arch4t)
da6558fd
NC
8150 output_asm_insn ("bx%?\t%0", operands);
8151 else
8152 output_asm_insn ("mov%?\t%|pc, %0", operands);
8153
f3bb6135
RE
8154 return "";
8155}
cce8749e 8156
6354dc9b 8157/* Output a 'call' insn that is a reference in memory. */
cd2b33d0 8158const char *
e32bac5b 8159output_call_mem (rtx *operands)
ff9940b0 8160{
68d560d4 8161 if (TARGET_INTERWORK && !arm_arch5)
da6558fd
NC
8162 {
8163 output_asm_insn ("ldr%?\t%|ip, %0", operands);
8164 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
8165 output_asm_insn ("bx%?\t%|ip", operands);
8166 }
6ab5da80
RE
8167 else if (regno_use_in (LR_REGNUM, operands[0]))
8168 {
8169 /* LR is used in the memory address. We load the address in the
8170 first instruction. It's safe to use IP as the target of the
8171 load since the call will kill it anyway. */
8172 output_asm_insn ("ldr%?\t%|ip, %0", operands);
68d560d4
RE
8173 if (arm_arch5)
8174 output_asm_insn ("blx%?%|ip", operands);
8175 else
8176 {
8177 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
8178 if (arm_arch4t)
8179 output_asm_insn ("bx%?\t%|ip", operands);
8180 else
8181 output_asm_insn ("mov%?\t%|pc, %|ip", operands);
8182 }
6ab5da80 8183 }
da6558fd
NC
8184 else
8185 {
8186 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
8187 output_asm_insn ("ldr%?\t%|pc, %0", operands);
8188 }
8189
f3bb6135
RE
8190 return "";
8191}
ff9940b0
RE
8192
8193
3b684012
RE
8194/* Output a move from arm registers to an fpa registers.
8195 OPERANDS[0] is an fpa register.
ff9940b0 8196 OPERANDS[1] is the first registers of an arm register pair. */
cd2b33d0 8197const char *
e32bac5b 8198output_mov_long_double_fpa_from_arm (rtx *operands)
ff9940b0
RE
8199{
8200 int arm_reg0 = REGNO (operands[1]);
8201 rtx ops[3];
8202
62b10bbc
NC
8203 if (arm_reg0 == IP_REGNUM)
8204 abort ();
f3bb6135 8205
43cffd11
RE
8206 ops[0] = gen_rtx_REG (SImode, arm_reg0);
8207 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
8208 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 8209
1d5473cb
RE
8210 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
8211 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 8212
f3bb6135
RE
8213 return "";
8214}
ff9940b0 8215
3b684012 8216/* Output a move from an fpa register to arm registers.
ff9940b0 8217 OPERANDS[0] is the first registers of an arm register pair.
3b684012 8218 OPERANDS[1] is an fpa register. */
cd2b33d0 8219const char *
e32bac5b 8220output_mov_long_double_arm_from_fpa (rtx *operands)
ff9940b0
RE
8221{
8222 int arm_reg0 = REGNO (operands[0]);
8223 rtx ops[3];
8224
62b10bbc
NC
8225 if (arm_reg0 == IP_REGNUM)
8226 abort ();
f3bb6135 8227
43cffd11
RE
8228 ops[0] = gen_rtx_REG (SImode, arm_reg0);
8229 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
8230 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 8231
1d5473cb
RE
8232 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
8233 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
8234 return "";
8235}
ff9940b0
RE
8236
8237/* Output a move from arm registers to arm registers of a long double
8238 OPERANDS[0] is the destination.
8239 OPERANDS[1] is the source. */
cd2b33d0 8240const char *
e32bac5b 8241output_mov_long_double_arm_from_arm (rtx *operands)
ff9940b0 8242{
6354dc9b 8243 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
8244 int dest_start = REGNO (operands[0]);
8245 int src_start = REGNO (operands[1]);
8246 rtx ops[2];
8247 int i;
8248
8249 if (dest_start < src_start)
8250 {
8251 for (i = 0; i < 3; i++)
8252 {
43cffd11
RE
8253 ops[0] = gen_rtx_REG (SImode, dest_start + i);
8254 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 8255 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
8256 }
8257 }
8258 else
8259 {
8260 for (i = 2; i >= 0; i--)
8261 {
43cffd11
RE
8262 ops[0] = gen_rtx_REG (SImode, dest_start + i);
8263 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 8264 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
8265 }
8266 }
f3bb6135 8267
ff9940b0
RE
8268 return "";
8269}
8270
8271
3b684012
RE
8272/* Output a move from arm registers to an fpa registers.
8273 OPERANDS[0] is an fpa register.
cce8749e 8274 OPERANDS[1] is the first registers of an arm register pair. */
cd2b33d0 8275const char *
e32bac5b 8276output_mov_double_fpa_from_arm (rtx *operands)
cce8749e
CH
8277{
8278 int arm_reg0 = REGNO (operands[1]);
8279 rtx ops[2];
8280
62b10bbc
NC
8281 if (arm_reg0 == IP_REGNUM)
8282 abort ();
8283
43cffd11
RE
8284 ops[0] = gen_rtx_REG (SImode, arm_reg0);
8285 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
8286 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
8287 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
8288 return "";
8289}
cce8749e 8290
3b684012 8291/* Output a move from an fpa register to arm registers.
cce8749e 8292 OPERANDS[0] is the first registers of an arm register pair.
3b684012 8293 OPERANDS[1] is an fpa register. */
cd2b33d0 8294const char *
e32bac5b 8295output_mov_double_arm_from_fpa (rtx *operands)
cce8749e
CH
8296{
8297 int arm_reg0 = REGNO (operands[0]);
8298 rtx ops[2];
8299
62b10bbc
NC
8300 if (arm_reg0 == IP_REGNUM)
8301 abort ();
f3bb6135 8302
43cffd11
RE
8303 ops[0] = gen_rtx_REG (SImode, arm_reg0);
8304 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
8305 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
8306 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
8307 return "";
8308}
cce8749e
CH
8309
8310/* Output a move between double words.
8311 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
8312 or MEM<-REG and all MEMs must be offsettable addresses. */
cd2b33d0 8313const char *
e32bac5b 8314output_move_double (rtx *operands)
cce8749e
CH
8315{
8316 enum rtx_code code0 = GET_CODE (operands[0]);
8317 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 8318 rtx otherops[3];
cce8749e
CH
8319
8320 if (code0 == REG)
8321 {
8322 int reg0 = REGNO (operands[0]);
8323
43cffd11 8324 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 8325
cce8749e
CH
8326 if (code1 == REG)
8327 {
8328 int reg1 = REGNO (operands[1]);
62b10bbc
NC
8329 if (reg1 == IP_REGNUM)
8330 abort ();
f3bb6135 8331
6354dc9b 8332 /* Ensure the second source is not overwritten. */
c1c2bc04 8333 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 8334 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 8335 else
6cfc7210 8336 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e 8337 }
5a9335ef
NC
8338 else if (code1 == CONST_VECTOR)
8339 {
8340 HOST_WIDE_INT hint = 0;
8341
8342 switch (GET_MODE (operands[1]))
8343 {
8344 case V2SImode:
8345 otherops[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 1)));
8346 operands[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)));
8347 break;
8348
8349 case V4HImode:
8350 if (BYTES_BIG_ENDIAN)
8351 {
8352 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 2));
8353 hint <<= 16;
8354 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
8355 }
8356 else
8357 {
8358 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 3));
8359 hint <<= 16;
8360 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
8361 }
8362
8363 otherops[1] = GEN_INT (hint);
8364 hint = 0;
8365
8366 if (BYTES_BIG_ENDIAN)
8367 {
8368 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
8369 hint <<= 16;
8370 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
8371 }
8372 else
8373 {
8374 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
8375 hint <<= 16;
8376 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
8377 }
8378
8379 operands[1] = GEN_INT (hint);
8380 break;
8381
8382 case V8QImode:
8383 if (BYTES_BIG_ENDIAN)
8384 {
8385 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
8386 hint <<= 8;
8387 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
8388 hint <<= 8;
8389 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
8390 hint <<= 8;
8391 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
8392 }
8393 else
8394 {
8395 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
8396 hint <<= 8;
8397 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
8398 hint <<= 8;
8399 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
8400 hint <<= 8;
8401 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
8402 }
8403
8404 otherops[1] = GEN_INT (hint);
8405 hint = 0;
8406
8407 if (BYTES_BIG_ENDIAN)
8408 {
8409 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
8410 hint <<= 8;
8411 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
8412 hint <<= 8;
8413 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
8414 hint <<= 8;
8415 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
8416 }
8417 else
8418 {
8419 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
8420 hint <<= 8;
8421 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
8422 hint <<= 8;
8423 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
8424 hint <<= 8;
8425 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
8426 }
8427
8428 operands[1] = GEN_INT (hint);
8429 break;
8430
8431 default:
8432 abort ();
8433 }
8434 output_mov_immediate (operands);
8435 output_mov_immediate (otherops);
8436 }
cce8749e
CH
8437 else if (code1 == CONST_DOUBLE)
8438 {
226a5051
RE
8439 if (GET_MODE (operands[1]) == DFmode)
8440 {
b216cd4a 8441 REAL_VALUE_TYPE r;
226a5051 8442 long l[2];
226a5051 8443
b216cd4a
ZW
8444 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
8445 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
d5b7b3ae
RE
8446 otherops[1] = GEN_INT (l[1]);
8447 operands[1] = GEN_INT (l[0]);
226a5051 8448 }
c1c2bc04
RE
8449 else if (GET_MODE (operands[1]) != VOIDmode)
8450 abort ();
8451 else if (WORDS_BIG_ENDIAN)
8452 {
c1c2bc04
RE
8453 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
8454 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
8455 }
226a5051
RE
8456 else
8457 {
8458 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
8459 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
8460 }
6cfc7210 8461
c1c2bc04
RE
8462 output_mov_immediate (operands);
8463 output_mov_immediate (otherops);
cce8749e
CH
8464 }
8465 else if (code1 == CONST_INT)
8466 {
56636818
JL
8467#if HOST_BITS_PER_WIDE_INT > 32
8468 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
8469 what the upper word is. */
8470 if (WORDS_BIG_ENDIAN)
8471 {
8472 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
8473 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
8474 }
8475 else
8476 {
8477 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
8478 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
8479 }
8480#else
6354dc9b 8481 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
8482 if (WORDS_BIG_ENDIAN)
8483 {
8484 otherops[1] = operands[1];
8485 operands[1] = (INTVAL (operands[1]) < 0
8486 ? constm1_rtx : const0_rtx);
8487 }
ff9940b0 8488 else
c1c2bc04 8489 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 8490#endif
c1c2bc04
RE
8491 output_mov_immediate (otherops);
8492 output_mov_immediate (operands);
cce8749e
CH
8493 }
8494 else if (code1 == MEM)
8495 {
ff9940b0 8496 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 8497 {
ff9940b0 8498 case REG:
9997d19d 8499 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 8500 break;
2b835d68 8501
ff9940b0 8502 case PRE_INC:
fdd695fd
PB
8503 if (!TARGET_LDRD)
8504 abort (); /* Should never happen now. */
8505 output_asm_insn ("ldr%?d\t%0, [%m1, #8]!", operands);
ff9940b0 8506 break;
2b835d68 8507
ff9940b0 8508 case PRE_DEC:
2b835d68 8509 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 8510 break;
2b835d68 8511
ff9940b0 8512 case POST_INC:
9997d19d 8513 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 8514 break;
2b835d68 8515
ff9940b0 8516 case POST_DEC:
fdd695fd
PB
8517 if (!TARGET_LDRD)
8518 abort (); /* Should never happen now. */
8519 output_asm_insn ("ldr%?d\t%0, [%m1], #-8", operands);
8520 break;
8521
8522 case PRE_MODIFY:
8523 case POST_MODIFY:
8524 otherops[0] = operands[0];
8525 otherops[1] = XEXP (XEXP (XEXP (operands[1], 0), 1), 0);
8526 otherops[2] = XEXP (XEXP (XEXP (operands[1], 0), 1), 1);
8527
8528 if (GET_CODE (XEXP (operands[1], 0)) == PRE_MODIFY)
8529 {
8530 if (reg_overlap_mentioned_p (otherops[0], otherops[2]))
8531 {
8532 /* Registers overlap so split out the increment. */
8533 output_asm_insn ("add%?\t%1, %1, %2", otherops);
8534 output_asm_insn ("ldr%?d\t%0, [%1] @split", otherops);
8535 }
8536 else
8537 output_asm_insn ("ldr%?d\t%0, [%1, %2]!", otherops);
8538 }
8539 else
8540 {
8541 /* We only allow constant increments, so this is safe. */
8542 output_asm_insn ("ldr%?d\t%0, [%1], %2", otherops);
8543 }
ff9940b0 8544 break;
2b835d68
RE
8545
8546 case LABEL_REF:
8547 case CONST:
8548 output_asm_insn ("adr%?\t%0, %1", operands);
8549 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
8550 break;
8551
ff9940b0 8552 default:
aec3cfba
NC
8553 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
8554 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 8555 {
2b835d68
RE
8556 otherops[0] = operands[0];
8557 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
8558 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
1d6e90ac 8559
2b835d68
RE
8560 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
8561 {
8562 if (GET_CODE (otherops[2]) == CONST_INT)
8563 {
06bea5aa 8564 switch ((int) INTVAL (otherops[2]))
2b835d68
RE
8565 {
8566 case -8:
8567 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
8568 return "";
8569 case -4:
8570 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
8571 return "";
8572 case 4:
8573 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
8574 return "";
8575 }
fdd695fd
PB
8576 }
8577 if (TARGET_LDRD
8578 && (GET_CODE (otherops[2]) == REG
8579 || (GET_CODE (otherops[2]) == CONST_INT
8580 && INTVAL (otherops[2]) > -256
8581 && INTVAL (otherops[2]) < 256)))
8582 {
8583 if (reg_overlap_mentioned_p (otherops[0],
8584 otherops[2]))
8585 {
8586 /* Swap base and index registers over to
8587 avoid a conflict. */
8588 otherops[1] = XEXP (XEXP (operands[1], 0), 1);
8589 otherops[2] = XEXP (XEXP (operands[1], 0), 0);
8590
8591 }
8592 /* If both registers conflict, it will usually
8593 have been fixed by a splitter. */
8594 if (reg_overlap_mentioned_p (otherops[0],
8595 otherops[2]))
8596 {
8597 output_asm_insn ("add%?\t%1, %1, %2", otherops);
8598 output_asm_insn ("ldr%?d\t%0, [%1]",
8599 otherops);
8600 return "";
8601 }
8602 else
8603 {
8604 output_asm_insn ("ldr%?d\t%0, [%1, %2]",
8605 otherops);
8606 return "";
8607 }
8608 }
8609 if (GET_CODE (otherops[2]) == CONST_INT)
8610 {
2b835d68
RE
8611 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
8612 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
8613 else
8614 output_asm_insn ("add%?\t%0, %1, %2", otherops);
8615 }
8616 else
8617 output_asm_insn ("add%?\t%0, %1, %2", otherops);
8618 }
8619 else
8620 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 8621
2b835d68
RE
8622 return "ldm%?ia\t%0, %M0";
8623 }
8624 else
8625 {
a4a37b30 8626 otherops[1] = adjust_address (operands[1], SImode, 4);
2b835d68
RE
8627 /* Take care of overlapping base/data reg. */
8628 if (reg_mentioned_p (operands[0], operands[1]))
8629 {
8630 output_asm_insn ("ldr%?\t%0, %1", otherops);
8631 output_asm_insn ("ldr%?\t%0, %1", operands);
8632 }
8633 else
8634 {
8635 output_asm_insn ("ldr%?\t%0, %1", operands);
8636 output_asm_insn ("ldr%?\t%0, %1", otherops);
8637 }
cce8749e
CH
8638 }
8639 }
8640 }
2b835d68 8641 else
6354dc9b 8642 abort (); /* Constraints should prevent this. */
cce8749e
CH
8643 }
8644 else if (code0 == MEM && code1 == REG)
8645 {
62b10bbc
NC
8646 if (REGNO (operands[1]) == IP_REGNUM)
8647 abort ();
2b835d68 8648
ff9940b0
RE
8649 switch (GET_CODE (XEXP (operands[0], 0)))
8650 {
8651 case REG:
9997d19d 8652 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 8653 break;
2b835d68 8654
ff9940b0 8655 case PRE_INC:
fdd695fd
PB
8656 if (!TARGET_LDRD)
8657 abort (); /* Should never happen now. */
8658 output_asm_insn ("str%?d\t%1, [%m0, #8]!", operands);
ff9940b0 8659 break;
2b835d68 8660
ff9940b0 8661 case PRE_DEC:
2b835d68 8662 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 8663 break;
2b835d68 8664
ff9940b0 8665 case POST_INC:
9997d19d 8666 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 8667 break;
2b835d68 8668
ff9940b0 8669 case POST_DEC:
fdd695fd
PB
8670 if (!TARGET_LDRD)
8671 abort (); /* Should never happen now. */
8672 output_asm_insn ("str%?d\t%1, [%m0], #-8", operands);
8673 break;
8674
8675 case PRE_MODIFY:
8676 case POST_MODIFY:
8677 otherops[0] = operands[1];
8678 otherops[1] = XEXP (XEXP (XEXP (operands[0], 0), 1), 0);
8679 otherops[2] = XEXP (XEXP (XEXP (operands[0], 0), 1), 1);
8680
8681 if (GET_CODE (XEXP (operands[0], 0)) == PRE_MODIFY)
8682 output_asm_insn ("str%?d\t%0, [%1, %2]!", otherops);
8683 else
8684 output_asm_insn ("str%?d\t%0, [%1], %2", otherops);
ff9940b0 8685 break;
2b835d68
RE
8686
8687 case PLUS:
fdd695fd
PB
8688 otherops[2] = XEXP (XEXP (operands[0], 0), 1);
8689 if (GET_CODE (otherops[2]) == CONST_INT)
2b835d68 8690 {
06bea5aa 8691 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
2b835d68
RE
8692 {
8693 case -8:
8694 output_asm_insn ("stm%?db\t%m0, %M1", operands);
8695 return "";
8696
8697 case -4:
8698 output_asm_insn ("stm%?da\t%m0, %M1", operands);
8699 return "";
8700
8701 case 4:
8702 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
8703 return "";
8704 }
8705 }
fdd695fd
PB
8706 if (TARGET_LDRD
8707 && (GET_CODE (otherops[2]) == REG
8708 || (GET_CODE (otherops[2]) == CONST_INT
8709 && INTVAL (otherops[2]) > -256
8710 && INTVAL (otherops[2]) < 256)))
8711 {
8712 otherops[0] = operands[1];
8713 otherops[1] = XEXP (XEXP (operands[0], 0), 0);
8714 output_asm_insn ("str%?d\t%0, [%1, %2]", otherops);
8715 return "";
8716 }
2b835d68
RE
8717 /* Fall through */
8718
ff9940b0 8719 default:
a4a37b30 8720 otherops[0] = adjust_address (operands[0], SImode, 4);
43cffd11 8721 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
8722 output_asm_insn ("str%?\t%1, %0", operands);
8723 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
8724 }
8725 }
2b835d68 8726 else
1d6e90ac
NC
8727 /* Constraints should prevent this. */
8728 abort ();
cce8749e 8729
9997d19d
RE
8730 return "";
8731}
cce8749e
CH
8732
8733
8734/* Output an arbitrary MOV reg, #n.
8735 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
cd2b33d0 8736const char *
e32bac5b 8737output_mov_immediate (rtx *operands)
cce8749e 8738{
f3bb6135 8739 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e 8740
1d6e90ac 8741 /* Try to use one MOV. */
cce8749e 8742 if (const_ok_for_arm (n))
1d6e90ac 8743 output_asm_insn ("mov%?\t%0, %1", operands);
cce8749e 8744
1d6e90ac
NC
8745 /* Try to use one MVN. */
8746 else if (const_ok_for_arm (~n))
cce8749e 8747 {
f3bb6135 8748 operands[1] = GEN_INT (~n);
9997d19d 8749 output_asm_insn ("mvn%?\t%0, %1", operands);
cce8749e 8750 }
1d6e90ac
NC
8751 else
8752 {
8753 int n_ones = 0;
8754 int i;
cce8749e 8755
1d6e90ac 8756 /* If all else fails, make it out of ORRs or BICs as appropriate. */
5a9335ef 8757 for (i = 0; i < 32; i++)
1d6e90ac 8758 if (n & 1 << i)
5a9335ef 8759 n_ones++;
cce8749e 8760
1d6e90ac
NC
8761 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
8762 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
8763 else
8764 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
8765 }
f3bb6135
RE
8766
8767 return "";
8768}
cce8749e 8769
1d6e90ac
NC
8770/* Output an ADD r, s, #n where n may be too big for one instruction.
8771 If adding zero to one register, output nothing. */
cd2b33d0 8772const char *
e32bac5b 8773output_add_immediate (rtx *operands)
cce8749e 8774{
f3bb6135 8775 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
8776
8777 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
8778 {
8779 if (n < 0)
8780 output_multi_immediate (operands,
9997d19d
RE
8781 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
8782 -n);
cce8749e
CH
8783 else
8784 output_multi_immediate (operands,
9997d19d
RE
8785 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
8786 n);
cce8749e 8787 }
f3bb6135
RE
8788
8789 return "";
8790}
cce8749e 8791
cce8749e
CH
8792/* Output a multiple immediate operation.
8793 OPERANDS is the vector of operands referred to in the output patterns.
8794 INSTR1 is the output pattern to use for the first constant.
8795 INSTR2 is the output pattern to use for subsequent constants.
8796 IMMED_OP is the index of the constant slot in OPERANDS.
8797 N is the constant value. */
cd2b33d0 8798static const char *
e32bac5b
RE
8799output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
8800 int immed_op, HOST_WIDE_INT n)
cce8749e 8801{
f3bb6135 8802#if HOST_BITS_PER_WIDE_INT > 32
30cf4896 8803 n &= 0xffffffff;
f3bb6135
RE
8804#endif
8805
cce8749e
CH
8806 if (n == 0)
8807 {
1d6e90ac 8808 /* Quick and easy output. */
cce8749e 8809 operands[immed_op] = const0_rtx;
1d6e90ac 8810 output_asm_insn (instr1, operands);
cce8749e
CH
8811 }
8812 else
8813 {
8814 int i;
cd2b33d0 8815 const char * instr = instr1;
cce8749e 8816
6354dc9b 8817 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
8818 for (i = 0; i < 32; i += 2)
8819 {
8820 if (n & (3 << i))
8821 {
f3bb6135
RE
8822 operands[immed_op] = GEN_INT (n & (255 << i));
8823 output_asm_insn (instr, operands);
cce8749e
CH
8824 instr = instr2;
8825 i += 6;
8826 }
8827 }
8828 }
cd2b33d0 8829
f3bb6135 8830 return "";
9997d19d 8831}
cce8749e 8832
cce8749e
CH
8833/* Return the appropriate ARM instruction for the operation code.
8834 The returned result should not be overwritten. OP is the rtx of the
8835 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
8836 was shifted. */
cd2b33d0 8837const char *
e32bac5b 8838arithmetic_instr (rtx op, int shift_first_arg)
cce8749e 8839{
9997d19d 8840 switch (GET_CODE (op))
cce8749e
CH
8841 {
8842 case PLUS:
f3bb6135
RE
8843 return "add";
8844
cce8749e 8845 case MINUS:
f3bb6135
RE
8846 return shift_first_arg ? "rsb" : "sub";
8847
cce8749e 8848 case IOR:
f3bb6135
RE
8849 return "orr";
8850
cce8749e 8851 case XOR:
f3bb6135
RE
8852 return "eor";
8853
cce8749e 8854 case AND:
f3bb6135
RE
8855 return "and";
8856
cce8749e 8857 default:
f3bb6135 8858 abort ();
cce8749e 8859 }
f3bb6135 8860}
cce8749e 8861
cce8749e
CH
8862/* Ensure valid constant shifts and return the appropriate shift mnemonic
8863 for the operation code. The returned result should not be overwritten.
8864 OP is the rtx code of the shift.
9997d19d 8865 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 8866 shift. */
cd2b33d0 8867static const char *
e32bac5b 8868shift_op (rtx op, HOST_WIDE_INT *amountp)
cce8749e 8869{
cd2b33d0 8870 const char * mnem;
e2c671ba 8871 enum rtx_code code = GET_CODE (op);
cce8749e 8872
9997d19d
RE
8873 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
8874 *amountp = -1;
8875 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
8876 *amountp = INTVAL (XEXP (op, 1));
8877 else
8878 abort ();
8879
e2c671ba 8880 switch (code)
cce8749e
CH
8881 {
8882 case ASHIFT:
8883 mnem = "asl";
8884 break;
f3bb6135 8885
cce8749e
CH
8886 case ASHIFTRT:
8887 mnem = "asr";
cce8749e 8888 break;
f3bb6135 8889
cce8749e
CH
8890 case LSHIFTRT:
8891 mnem = "lsr";
cce8749e 8892 break;
f3bb6135 8893
9997d19d
RE
8894 case ROTATERT:
8895 mnem = "ror";
9997d19d
RE
8896 break;
8897
ff9940b0 8898 case MULT:
e2c671ba
RE
8899 /* We never have to worry about the amount being other than a
8900 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
8901 if (*amountp != -1)
8902 *amountp = int_log2 (*amountp);
8903 else
8904 abort ();
f3bb6135
RE
8905 return "asl";
8906
cce8749e 8907 default:
f3bb6135 8908 abort ();
cce8749e
CH
8909 }
8910
e2c671ba
RE
8911 if (*amountp != -1)
8912 {
8913 /* This is not 100% correct, but follows from the desire to merge
8914 multiplication by a power of 2 with the recognizer for a
8915 shift. >=32 is not a valid shift for "asl", so we must try and
8916 output a shift that produces the correct arithmetical result.
ddd5a7c1 8917 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
8918 is not set correctly if we set the flags; but we never use the
8919 carry bit from such an operation, so we can ignore that. */
8920 if (code == ROTATERT)
1d6e90ac
NC
8921 /* Rotate is just modulo 32. */
8922 *amountp &= 31;
e2c671ba
RE
8923 else if (*amountp != (*amountp & 31))
8924 {
8925 if (code == ASHIFT)
8926 mnem = "lsr";
8927 *amountp = 32;
8928 }
8929
8930 /* Shifts of 0 are no-ops. */
8931 if (*amountp == 0)
8932 return NULL;
8933 }
8934
9997d19d
RE
8935 return mnem;
8936}
cce8749e 8937
6354dc9b 8938/* Obtain the shift from the POWER of two. */
1d6e90ac 8939
18af7313 8940static HOST_WIDE_INT
e32bac5b 8941int_log2 (HOST_WIDE_INT power)
cce8749e 8942{
f3bb6135 8943 HOST_WIDE_INT shift = 0;
cce8749e 8944
30cf4896 8945 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
cce8749e
CH
8946 {
8947 if (shift > 31)
f3bb6135 8948 abort ();
e32bac5b 8949 shift++;
cce8749e 8950 }
f3bb6135
RE
8951
8952 return shift;
8953}
cce8749e 8954
cce8749e
CH
8955/* Output a .ascii pseudo-op, keeping track of lengths. This is because
8956 /bin/as is horribly restrictive. */
6cfc7210 8957#define MAX_ASCII_LEN 51
cce8749e
CH
8958
8959void
e32bac5b 8960output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
cce8749e
CH
8961{
8962 int i;
6cfc7210 8963 int len_so_far = 0;
cce8749e 8964
6cfc7210
NC
8965 fputs ("\t.ascii\t\"", stream);
8966
cce8749e
CH
8967 for (i = 0; i < len; i++)
8968 {
1d6e90ac 8969 int c = p[i];
cce8749e 8970
6cfc7210 8971 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 8972 {
6cfc7210 8973 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 8974 len_so_far = 0;
cce8749e
CH
8975 }
8976
6cfc7210 8977 switch (c)
cce8749e 8978 {
6cfc7210
NC
8979 case TARGET_TAB:
8980 fputs ("\\t", stream);
8981 len_so_far += 2;
8982 break;
8983
8984 case TARGET_FF:
8985 fputs ("\\f", stream);
8986 len_so_far += 2;
8987 break;
8988
8989 case TARGET_BS:
8990 fputs ("\\b", stream);
8991 len_so_far += 2;
8992 break;
8993
8994 case TARGET_CR:
8995 fputs ("\\r", stream);
8996 len_so_far += 2;
8997 break;
8998
8999 case TARGET_NEWLINE:
9000 fputs ("\\n", stream);
9001 c = p [i + 1];
9002 if ((c >= ' ' && c <= '~')
9003 || c == TARGET_TAB)
9004 /* This is a good place for a line break. */
9005 len_so_far = MAX_ASCII_LEN;
9006 else
9007 len_so_far += 2;
9008 break;
9009
9010 case '\"':
9011 case '\\':
9012 putc ('\\', stream);
5895f793 9013 len_so_far++;
d6b4baa4 9014 /* Drop through. */
f3bb6135 9015
6cfc7210
NC
9016 default:
9017 if (c >= ' ' && c <= '~')
9018 {
9019 putc (c, stream);
5895f793 9020 len_so_far++;
6cfc7210
NC
9021 }
9022 else
9023 {
9024 fprintf (stream, "\\%03o", c);
9025 len_so_far += 4;
9026 }
9027 break;
cce8749e 9028 }
cce8749e 9029 }
f3bb6135 9030
cce8749e 9031 fputs ("\"\n", stream);
f3bb6135 9032}
cce8749e 9033\f
c9ca9b88 9034/* Compute the register save mask for registers 0 through 12
5848830f 9035 inclusive. This code is used by arm_compute_save_reg_mask. */
6d3d9133 9036static unsigned long
e32bac5b 9037arm_compute_save_reg0_reg12_mask (void)
6d3d9133 9038{
121308d4 9039 unsigned long func_type = arm_current_func_type ();
6d3d9133
NC
9040 unsigned int save_reg_mask = 0;
9041 unsigned int reg;
6d3d9133 9042
7b8b8ade 9043 if (IS_INTERRUPT (func_type))
6d3d9133 9044 {
7b8b8ade 9045 unsigned int max_reg;
7b8b8ade
NC
9046 /* Interrupt functions must not corrupt any registers,
9047 even call clobbered ones. If this is a leaf function
9048 we can just examine the registers used by the RTL, but
9049 otherwise we have to assume that whatever function is
9050 called might clobber anything, and so we have to save
9051 all the call-clobbered registers as well. */
9052 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
9053 /* FIQ handlers have registers r8 - r12 banked, so
9054 we only need to check r0 - r7, Normal ISRs only
121308d4 9055 bank r14 and r15, so we must check up to r12.
7b8b8ade
NC
9056 r13 is the stack pointer which is always preserved,
9057 so we do not need to consider it here. */
9058 max_reg = 7;
9059 else
9060 max_reg = 12;
9061
9062 for (reg = 0; reg <= max_reg; reg++)
9063 if (regs_ever_live[reg]
9064 || (! current_function_is_leaf && call_used_regs [reg]))
6d3d9133
NC
9065 save_reg_mask |= (1 << reg);
9066 }
9067 else
9068 {
9069 /* In the normal case we only need to save those registers
9070 which are call saved and which are used by this function. */
9071 for (reg = 0; reg <= 10; reg++)
9072 if (regs_ever_live[reg] && ! call_used_regs [reg])
9073 save_reg_mask |= (1 << reg);
9074
9075 /* Handle the frame pointer as a special case. */
9076 if (! TARGET_APCS_FRAME
9077 && ! frame_pointer_needed
9078 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
9079 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
9080 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
9081
9082 /* If we aren't loading the PIC register,
9083 don't stack it even though it may be live. */
9084 if (flag_pic
9085 && ! TARGET_SINGLE_PIC_BASE
9086 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
9087 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
9088 }
9089
c9ca9b88
PB
9090 /* Save registers so the exception handler can modify them. */
9091 if (current_function_calls_eh_return)
9092 {
9093 unsigned int i;
9094
9095 for (i = 0; ; i++)
9096 {
9097 reg = EH_RETURN_DATA_REGNO (i);
9098 if (reg == INVALID_REGNUM)
9099 break;
9100 save_reg_mask |= 1 << reg;
9101 }
9102 }
9103
121308d4
NC
9104 return save_reg_mask;
9105}
9106
9107/* Compute a bit mask of which registers need to be
9108 saved on the stack for the current function. */
9109
9110static unsigned long
e32bac5b 9111arm_compute_save_reg_mask (void)
121308d4
NC
9112{
9113 unsigned int save_reg_mask = 0;
9114 unsigned long func_type = arm_current_func_type ();
9115
9116 if (IS_NAKED (func_type))
9117 /* This should never really happen. */
9118 return 0;
9119
9120 /* If we are creating a stack frame, then we must save the frame pointer,
9121 IP (which will hold the old stack pointer), LR and the PC. */
9122 if (frame_pointer_needed)
9123 save_reg_mask |=
9124 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
9125 | (1 << IP_REGNUM)
9126 | (1 << LR_REGNUM)
9127 | (1 << PC_REGNUM);
9128
9129 /* Volatile functions do not return, so there
9130 is no need to save any other registers. */
9131 if (IS_VOLATILE (func_type))
9132 return save_reg_mask;
9133
9134 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
9135
6d3d9133
NC
9136 /* Decide if we need to save the link register.
9137 Interrupt routines have their own banked link register,
9138 so they never need to save it.
1768c26f 9139 Otherwise if we do not use the link register we do not need to save
6d3d9133
NC
9140 it. If we are pushing other registers onto the stack however, we
9141 can save an instruction in the epilogue by pushing the link register
9142 now and then popping it back into the PC. This incurs extra memory
72ac76be 9143 accesses though, so we only do it when optimizing for size, and only
6d3d9133 9144 if we know that we will not need a fancy return sequence. */
3a7731fd 9145 if (regs_ever_live [LR_REGNUM]
6d3d9133
NC
9146 || (save_reg_mask
9147 && optimize_size
c9ca9b88
PB
9148 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
9149 && !current_function_calls_eh_return))
6d3d9133
NC
9150 save_reg_mask |= 1 << LR_REGNUM;
9151
6f7ebcbb
NC
9152 if (cfun->machine->lr_save_eliminated)
9153 save_reg_mask &= ~ (1 << LR_REGNUM);
9154
5a9335ef
NC
9155 if (TARGET_REALLY_IWMMXT
9156 && ((bit_count (save_reg_mask)
9157 + ARM_NUM_INTS (current_function_pretend_args_size)) % 2) != 0)
9158 {
9159 unsigned int reg;
9160
9161 /* The total number of registers that are going to be pushed
9162 onto the stack is odd. We need to ensure that the stack
9163 is 64-bit aligned before we start to save iWMMXt registers,
9164 and also before we start to create locals. (A local variable
9165 might be a double or long long which we will load/store using
9166 an iWMMXt instruction). Therefore we need to push another
9167 ARM register, so that the stack will be 64-bit aligned. We
9168 try to avoid using the arg registers (r0 -r3) as they might be
9169 used to pass values in a tail call. */
9170 for (reg = 4; reg <= 12; reg++)
9171 if ((save_reg_mask & (1 << reg)) == 0)
9172 break;
9173
9174 if (reg <= 12)
9175 save_reg_mask |= (1 << reg);
9176 else
9177 {
9178 cfun->machine->sibcall_blocked = 1;
9179 save_reg_mask |= (1 << 3);
9180 }
9181 }
9182
6d3d9133
NC
9183 return save_reg_mask;
9184}
9185
9728c9d1 9186
57934c39
PB
9187/* Compute a bit mask of which registers need to be
9188 saved on the stack for the current function. */
9189static unsigned long
9190thumb_compute_save_reg_mask (void)
9191{
9192 unsigned long mask;
9193 int reg;
9194
9195 mask = 0;
9196 for (reg = 0; reg < 12; reg ++)
9197 {
9198 if (regs_ever_live[reg] && !call_used_regs[reg])
9199 mask |= 1 << reg;
9200 }
9201
9202 if (flag_pic && !TARGET_SINGLE_PIC_BASE)
9203 mask |= PIC_OFFSET_TABLE_REGNUM;
9204 if (TARGET_SINGLE_PIC_BASE)
9205 mask &= ~(1 << arm_pic_register);
9206
9207 /* lr will also be pushed if any lo regs are pushed. */
9208 if (mask & 0xff || thumb_force_lr_save ())
9209 mask |= (1 << LR_REGNUM);
9210
9211 /* Make sure we have a low work register if we need one. */
9212 if (((mask & 0xff) == 0 && regs_ever_live[LAST_ARG_REGNUM])
9213 && ((mask & 0x0f00) || TARGET_BACKTRACE))
9214 mask |= 1 << LAST_LO_REGNUM;
9215
9216 return mask;
9217}
9218
9219
9728c9d1
PB
9220/* Return the number of bytes required to save VFP registers. */
9221static int
9222arm_get_vfp_saved_size (void)
9223{
9224 unsigned int regno;
9225 int count;
9226 int saved;
9227
9228 saved = 0;
9229 /* Space for saved VFP registers. */
9230 if (TARGET_HARD_FLOAT && TARGET_VFP)
9231 {
9232 count = 0;
9233 for (regno = FIRST_VFP_REGNUM;
9234 regno < LAST_VFP_REGNUM;
9235 regno += 2)
9236 {
9237 if ((!regs_ever_live[regno] || call_used_regs[regno])
9238 && (!regs_ever_live[regno + 1] || call_used_regs[regno + 1]))
9239 {
9240 if (count > 0)
9241 {
9242 /* Workaround ARM10 VFPr1 bug. */
9243 if (count == 2 && !arm_arch6)
9244 count++;
9245 saved += count * 8 + 4;
9246 }
9247 count = 0;
9248 }
9249 else
9250 count++;
9251 }
9252 if (count > 0)
9253 {
9254 if (count == 2 && !arm_arch6)
9255 count++;
9256 saved += count * 8 + 4;
9257 }
9258 }
9259 return saved;
9260}
9261
9262
699a4925 9263/* Generate a function exit sequence. If REALLY_RETURN is false, then do
6d3d9133 9264 everything bar the final return instruction. */
cd2b33d0 9265const char *
e32bac5b 9266output_return_instruction (rtx operand, int really_return, int reverse)
ff9940b0 9267{
6d3d9133 9268 char conditional[10];
ff9940b0 9269 char instr[100];
6d3d9133
NC
9270 int reg;
9271 unsigned long live_regs_mask;
9272 unsigned long func_type;
5848830f 9273 arm_stack_offsets *offsets;
e26053d1 9274
6d3d9133 9275 func_type = arm_current_func_type ();
e2c671ba 9276
6d3d9133 9277 if (IS_NAKED (func_type))
d5b7b3ae 9278 return "";
6d3d9133
NC
9279
9280 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 9281 {
699a4925
RE
9282 /* If this function was declared non-returning, and we have
9283 found a tail call, then we have to trust that the called
9284 function won't return. */
3a5a4282
PB
9285 if (really_return)
9286 {
9287 rtx ops[2];
9288
9289 /* Otherwise, trap an attempted return by aborting. */
9290 ops[0] = operand;
9291 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
9292 : "abort");
9293 assemble_external_libcall (ops[1]);
9294 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
9295 }
9296
e2c671ba
RE
9297 return "";
9298 }
6d3d9133 9299
5895f793 9300 if (current_function_calls_alloca && !really_return)
62b10bbc 9301 abort ();
ff9940b0 9302
c414f8a9 9303 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 9304
6d3d9133 9305 return_used_this_function = 1;
ff9940b0 9306
6d3d9133 9307 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 9308
1768c26f 9309 if (live_regs_mask)
6d3d9133 9310 {
1768c26f
PB
9311 const char * return_reg;
9312
9313 /* If we do not have any special requirements for function exit
9314 (eg interworking, or ISR) then we can load the return address
9315 directly into the PC. Otherwise we must load it into LR. */
9316 if (really_return
1768c26f
PB
9317 && ! TARGET_INTERWORK)
9318 return_reg = reg_names[PC_REGNUM];
6d3d9133 9319 else
1768c26f
PB
9320 return_reg = reg_names[LR_REGNUM];
9321
6d3d9133 9322 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
b034930f
ILT
9323 {
9324 /* There are three possible reasons for the IP register
9325 being saved. 1) a stack frame was created, in which case
9326 IP contains the old stack pointer, or 2) an ISR routine
9327 corrupted it, or 3) it was saved to align the stack on
9328 iWMMXt. In case 1, restore IP into SP, otherwise just
9329 restore IP. */
9330 if (frame_pointer_needed)
9331 {
9332 live_regs_mask &= ~ (1 << IP_REGNUM);
9333 live_regs_mask |= (1 << SP_REGNUM);
9334 }
9335 else
9336 {
9337 if (! IS_INTERRUPT (func_type)
9338 && ! TARGET_REALLY_IWMMXT)
9339 abort ();
9340 }
9341 }
f3bb6135 9342
3a7731fd
PB
9343 /* On some ARM architectures it is faster to use LDR rather than
9344 LDM to load a single register. On other architectures, the
9345 cost is the same. In 26 bit mode, or for exception handlers,
9346 we have to use LDM to load the PC so that the CPSR is also
9347 restored. */
9348 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6d3d9133 9349 {
3a7731fd
PB
9350 if (live_regs_mask == (unsigned int)(1 << reg))
9351 break;
9352 }
9353 if (reg <= LAST_ARM_REGNUM
9354 && (reg != LR_REGNUM
9355 || ! really_return
61f0ccff 9356 || ! IS_INTERRUPT (func_type)))
3a7731fd
PB
9357 {
9358 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
9359 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
6d3d9133 9360 }
ff9940b0 9361 else
1d5473cb 9362 {
1768c26f
PB
9363 char *p;
9364 int first = 1;
6d3d9133 9365
699a4925
RE
9366 /* Generate the load multiple instruction to restore the
9367 registers. Note we can get here, even if
9368 frame_pointer_needed is true, but only if sp already
9369 points to the base of the saved core registers. */
9370 if (live_regs_mask & (1 << SP_REGNUM))
a72d4945 9371 {
5848830f
PB
9372 unsigned HOST_WIDE_INT stack_adjust;
9373
9374 offsets = arm_get_frame_offsets ();
9375 stack_adjust = offsets->outgoing_args - offsets->saved_regs;
a72d4945
RE
9376 if (stack_adjust != 0 && stack_adjust != 4)
9377 abort ();
9378
9379 if (stack_adjust && arm_arch5)
9380 sprintf (instr, "ldm%sib\t%%|sp, {", conditional);
9381 else
9382 {
9383 /* If we can't use ldmib (SA110 bug), then try to pop r3
9384 instead. */
9385 if (stack_adjust)
9386 live_regs_mask |= 1 << 3;
9387 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
9388 }
9389 }
da6558fd 9390 else
1768c26f
PB
9391 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
9392
9393 p = instr + strlen (instr);
6d3d9133 9394
1768c26f
PB
9395 for (reg = 0; reg <= SP_REGNUM; reg++)
9396 if (live_regs_mask & (1 << reg))
9397 {
9398 int l = strlen (reg_names[reg]);
9399
9400 if (first)
9401 first = 0;
9402 else
9403 {
9404 memcpy (p, ", ", 2);
9405 p += 2;
9406 }
9407
9408 memcpy (p, "%|", 2);
9409 memcpy (p + 2, reg_names[reg], l);
9410 p += l + 2;
9411 }
9412
9413 if (live_regs_mask & (1 << LR_REGNUM))
9414 {
b17fe233 9415 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
61f0ccff
RE
9416 /* If returning from an interrupt, restore the CPSR. */
9417 if (IS_INTERRUPT (func_type))
b17fe233 9418 strcat (p, "^");
1768c26f
PB
9419 }
9420 else
9421 strcpy (p, "}");
1d5473cb 9422 }
da6558fd 9423
1768c26f
PB
9424 output_asm_insn (instr, & operand);
9425
3a7731fd
PB
9426 /* See if we need to generate an extra instruction to
9427 perform the actual function return. */
9428 if (really_return
9429 && func_type != ARM_FT_INTERWORKED
9430 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
da6558fd 9431 {
3a7731fd
PB
9432 /* The return has already been handled
9433 by loading the LR into the PC. */
9434 really_return = 0;
da6558fd 9435 }
ff9940b0 9436 }
e26053d1 9437
1768c26f 9438 if (really_return)
ff9940b0 9439 {
6d3d9133
NC
9440 switch ((int) ARM_FUNC_TYPE (func_type))
9441 {
9442 case ARM_FT_ISR:
9443 case ARM_FT_FIQ:
9444 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
9445 break;
9446
9447 case ARM_FT_INTERWORKED:
9448 sprintf (instr, "bx%s\t%%|lr", conditional);
9449 break;
9450
9451 case ARM_FT_EXCEPTION:
9452 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
9453 break;
9454
9455 default:
68d560d4
RE
9456 /* Use bx if it's available. */
9457 if (arm_arch5 || arm_arch4t)
1768c26f
PB
9458 sprintf (instr, "bx%s\t%%|lr", conditional);
9459 else
61f0ccff 9460 sprintf (instr, "mov%s\t%%|pc, %%|lr", conditional);
6d3d9133
NC
9461 break;
9462 }
1768c26f
PB
9463
9464 output_asm_insn (instr, & operand);
ff9940b0 9465 }
f3bb6135 9466
ff9940b0
RE
9467 return "";
9468}
9469
ef179a26
NC
9470/* Write the function name into the code section, directly preceding
9471 the function prologue.
9472
9473 Code will be output similar to this:
9474 t0
9475 .ascii "arm_poke_function_name", 0
9476 .align
9477 t1
9478 .word 0xff000000 + (t1 - t0)
9479 arm_poke_function_name
9480 mov ip, sp
9481 stmfd sp!, {fp, ip, lr, pc}
9482 sub fp, ip, #4
9483
9484 When performing a stack backtrace, code can inspect the value
9485 of 'pc' stored at 'fp' + 0. If the trace function then looks
9486 at location pc - 12 and the top 8 bits are set, then we know
9487 that there is a function name embedded immediately preceding this
9488 location and has length ((pc[-3]) & 0xff000000).
9489
9490 We assume that pc is declared as a pointer to an unsigned long.
9491
9492 It is of no benefit to output the function name if we are assembling
9493 a leaf function. These function types will not contain a stack
9494 backtrace structure, therefore it is not possible to determine the
9495 function name. */
ef179a26 9496void
e32bac5b 9497arm_poke_function_name (FILE *stream, const char *name)
ef179a26
NC
9498{
9499 unsigned long alignlength;
9500 unsigned long length;
9501 rtx x;
9502
d5b7b3ae 9503 length = strlen (name) + 1;
0c2ca901 9504 alignlength = ROUND_UP_WORD (length);
ef179a26 9505
949d79eb 9506 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 9507 ASM_OUTPUT_ALIGN (stream, 2);
30cf4896 9508 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
301d03af 9509 assemble_aligned_integer (UNITS_PER_WORD, x);
ef179a26
NC
9510}
9511
6d3d9133
NC
9512/* Place some comments into the assembler stream
9513 describing the current function. */
08c148a8 9514static void
e32bac5b 9515arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
cce8749e 9516{
6d3d9133 9517 unsigned long func_type;
08c148a8
NB
9518
9519 if (!TARGET_ARM)
9520 {
9521 thumb_output_function_prologue (f, frame_size);
9522 return;
9523 }
6d3d9133
NC
9524
9525 /* Sanity check. */
abaa26e5 9526 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 9527 abort ();
31fdb4d5 9528
6d3d9133
NC
9529 func_type = arm_current_func_type ();
9530
9531 switch ((int) ARM_FUNC_TYPE (func_type))
9532 {
9533 default:
9534 case ARM_FT_NORMAL:
9535 break;
9536 case ARM_FT_INTERWORKED:
9537 asm_fprintf (f, "\t%@ Function supports interworking.\n");
9538 break;
6d3d9133
NC
9539 case ARM_FT_ISR:
9540 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
9541 break;
9542 case ARM_FT_FIQ:
9543 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
9544 break;
9545 case ARM_FT_EXCEPTION:
9546 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
9547 break;
9548 }
ff9940b0 9549
6d3d9133
NC
9550 if (IS_NAKED (func_type))
9551 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
9552
9553 if (IS_VOLATILE (func_type))
9554 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
9555
9556 if (IS_NESTED (func_type))
9557 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
9558
c53dddc2 9559 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
dd18ae56
NC
9560 current_function_args_size,
9561 current_function_pretend_args_size, frame_size);
6d3d9133 9562
3cb66fd7 9563 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
dd18ae56 9564 frame_pointer_needed,
3cb66fd7 9565 cfun->machine->uses_anonymous_args);
cce8749e 9566
6f7ebcbb
NC
9567 if (cfun->machine->lr_save_eliminated)
9568 asm_fprintf (f, "\t%@ link register save eliminated.\n");
9569
c9ca9b88
PB
9570 if (current_function_calls_eh_return)
9571 asm_fprintf (f, "\t@ Calls __builtin_eh_return.\n");
9572
32de079a
RE
9573#ifdef AOF_ASSEMBLER
9574 if (flag_pic)
dd18ae56 9575 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 9576#endif
6d3d9133
NC
9577
9578 return_used_this_function = 0;
f3bb6135 9579}
cce8749e 9580
cd2b33d0 9581const char *
a72d4945 9582arm_output_epilogue (rtx sibling)
cce8749e 9583{
949d79eb 9584 int reg;
6f7ebcbb 9585 unsigned long saved_regs_mask;
6d3d9133 9586 unsigned long func_type;
c882c7ac
RE
9587 /* Floats_offset is the offset from the "virtual" frame. In an APCS
9588 frame that is $fp + 4 for a non-variadic function. */
9589 int floats_offset = 0;
cce8749e 9590 rtx operands[3];
d5b7b3ae 9591 FILE * f = asm_out_file;
5a9335ef 9592 unsigned int lrm_count = 0;
a72d4945 9593 int really_return = (sibling == NULL);
9b66ebb1 9594 int start_reg;
5848830f 9595 arm_stack_offsets *offsets;
cce8749e 9596
6d3d9133
NC
9597 /* If we have already generated the return instruction
9598 then it is futile to generate anything else. */
a72d4945 9599 if (use_return_insn (FALSE, sibling) && return_used_this_function)
949d79eb 9600 return "";
cce8749e 9601
6d3d9133 9602 func_type = arm_current_func_type ();
d5b7b3ae 9603
6d3d9133
NC
9604 if (IS_NAKED (func_type))
9605 /* Naked functions don't have epilogues. */
9606 return "";
0616531f 9607
6d3d9133 9608 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 9609 {
86efdc8e 9610 rtx op;
6d3d9133
NC
9611
9612 /* A volatile function should never return. Call abort. */
ed0e6530 9613 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 9614 assemble_external_libcall (op);
e2c671ba 9615 output_asm_insn ("bl\t%a0", &op);
6d3d9133 9616
949d79eb 9617 return "";
e2c671ba
RE
9618 }
9619
c9ca9b88 9620 if (current_function_calls_eh_return
6d3d9133
NC
9621 && ! really_return)
9622 /* If we are throwing an exception, then we really must
9623 be doing a return, so we can't tail-call. */
9624 abort ();
9625
5848830f 9626 offsets = arm_get_frame_offsets ();
6f7ebcbb 9627 saved_regs_mask = arm_compute_save_reg_mask ();
5a9335ef
NC
9628
9629 if (TARGET_IWMMXT)
9630 lrm_count = bit_count (saved_regs_mask);
9631
5848830f 9632 floats_offset = offsets->saved_args;
6d3d9133 9633 /* Compute how far away the floats will be. */
5a9335ef 9634 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6f7ebcbb 9635 if (saved_regs_mask & (1 << reg))
6ed30148 9636 floats_offset += 4;
6d3d9133 9637
ff9940b0 9638 if (frame_pointer_needed)
cce8749e 9639 {
9b66ebb1 9640 /* This variable is for the Virtual Frame Pointer, not VFP regs. */
5848830f 9641 int vfp_offset = offsets->frame;
c882c7ac 9642
29ad9694 9643 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 9644 {
9b66ebb1 9645 for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
5895f793 9646 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
9647 {
9648 floats_offset += 12;
dd18ae56 9649 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
c882c7ac 9650 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
9651 }
9652 }
9653 else
9654 {
9b66ebb1 9655 start_reg = LAST_FPA_REGNUM;
b111229a 9656
9b66ebb1 9657 for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
b111229a 9658 {
5895f793 9659 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
9660 {
9661 floats_offset += 12;
6cfc7210 9662
6354dc9b 9663 /* We can't unstack more than four registers at once. */
b111229a
RE
9664 if (start_reg - reg == 3)
9665 {
dd18ae56 9666 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
c882c7ac 9667 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
9668 start_reg = reg - 1;
9669 }
9670 }
9671 else
9672 {
9673 if (reg != start_reg)
dd18ae56
NC
9674 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
9675 reg + 1, start_reg - reg,
c882c7ac 9676 FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
9677 start_reg = reg - 1;
9678 }
9679 }
9680
9681 /* Just in case the last register checked also needs unstacking. */
9682 if (reg != start_reg)
dd18ae56
NC
9683 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
9684 reg + 1, start_reg - reg,
c882c7ac 9685 FP_REGNUM, floats_offset - vfp_offset);
b111229a 9686 }
6d3d9133 9687
9b66ebb1
PB
9688 if (TARGET_HARD_FLOAT && TARGET_VFP)
9689 {
9728c9d1 9690 int saved_size;
9b66ebb1 9691
9728c9d1
PB
9692 /* The fldmx insn does not have base+offset addressing modes,
9693 so we use IP to hold the address. */
9694 saved_size = arm_get_vfp_saved_size ();
9b66ebb1 9695
9728c9d1 9696 if (saved_size > 0)
9b66ebb1 9697 {
9728c9d1 9698 floats_offset += saved_size;
9b66ebb1
PB
9699 asm_fprintf (f, "\tsub\t%r, %r, #%d\n", IP_REGNUM,
9700 FP_REGNUM, floats_offset - vfp_offset);
9701 }
9702 start_reg = FIRST_VFP_REGNUM;
9703 for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
9704 {
9705 if ((!regs_ever_live[reg] || call_used_regs[reg])
9706 && (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
9707 {
9708 if (start_reg != reg)
9728c9d1
PB
9709 arm_output_fldmx (f, IP_REGNUM,
9710 (start_reg - FIRST_VFP_REGNUM) / 2,
9711 (reg - start_reg) / 2);
9b66ebb1
PB
9712 start_reg = reg + 2;
9713 }
9714 }
9715 if (start_reg != reg)
9728c9d1
PB
9716 arm_output_fldmx (f, IP_REGNUM,
9717 (start_reg - FIRST_VFP_REGNUM) / 2,
9718 (reg - start_reg) / 2);
9b66ebb1
PB
9719 }
9720
5a9335ef
NC
9721 if (TARGET_IWMMXT)
9722 {
9723 /* The frame pointer is guaranteed to be non-double-word aligned.
9724 This is because it is set to (old_stack_pointer - 4) and the
9725 old_stack_pointer was double word aligned. Thus the offset to
9726 the iWMMXt registers to be loaded must also be non-double-word
9727 sized, so that the resultant address *is* double-word aligned.
9728 We can ignore floats_offset since that was already included in
9729 the live_regs_mask. */
9730 lrm_count += (lrm_count % 2 ? 2 : 1);
9731
01d4c813 9732 for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
5a9335ef
NC
9733 if (regs_ever_live[reg] && !call_used_regs[reg])
9734 {
9735 asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
9736 reg, FP_REGNUM, lrm_count * 4);
9737 lrm_count += 2;
9738 }
9739 }
9740
6f7ebcbb 9741 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
9742 frame generation actually contains the old stack pointer. So a
9743 quick way to unwind the stack is just pop the IP register directly
9744 into the stack pointer. */
6f7ebcbb 9745 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 9746 abort ();
6f7ebcbb
NC
9747 saved_regs_mask &= ~ (1 << IP_REGNUM);
9748 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 9749
6f7ebcbb 9750 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
9751 only need to restore the LR register (the return address), but to
9752 save time we can load it directly into the PC, unless we need a
9753 special function exit sequence, or we are not really returning. */
c9ca9b88
PB
9754 if (really_return
9755 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
9756 && !current_function_calls_eh_return)
6d3d9133
NC
9757 /* Delete the LR from the register mask, so that the LR on
9758 the stack is loaded into the PC in the register mask. */
6f7ebcbb 9759 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 9760 else
6f7ebcbb 9761 saved_regs_mask &= ~ (1 << PC_REGNUM);
efc2515b
RE
9762
9763 /* We must use SP as the base register, because SP is one of the
9764 registers being restored. If an interrupt or page fault
9765 happens in the ldm instruction, the SP might or might not
9766 have been restored. That would be bad, as then SP will no
9767 longer indicate the safe area of stack, and we can get stack
9768 corruption. Using SP as the base register means that it will
9769 be reset correctly to the original value, should an interrupt
699a4925
RE
9770 occur. If the stack pointer already points at the right
9771 place, then omit the subtraction. */
5848830f 9772 if (offsets->outgoing_args != (1 + (int) bit_count (saved_regs_mask))
699a4925
RE
9773 || current_function_calls_alloca)
9774 asm_fprintf (f, "\tsub\t%r, %r, #%d\n", SP_REGNUM, FP_REGNUM,
9775 4 * bit_count (saved_regs_mask));
efc2515b 9776 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
7b8b8ade
NC
9777
9778 if (IS_INTERRUPT (func_type))
9779 /* Interrupt handlers will have pushed the
9780 IP onto the stack, so restore it now. */
f55d7103 9781 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
cce8749e
CH
9782 }
9783 else
9784 {
d2288d8d 9785 /* Restore stack pointer if necessary. */
5848830f 9786 if (offsets->outgoing_args != offsets->saved_regs)
d2288d8d
TG
9787 {
9788 operands[0] = operands[1] = stack_pointer_rtx;
5848830f 9789 operands[2] = GEN_INT (offsets->outgoing_args - offsets->saved_regs);
d2288d8d
TG
9790 output_add_immediate (operands);
9791 }
9792
29ad9694 9793 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 9794 {
9b66ebb1 9795 for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
5895f793 9796 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
9797 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
9798 reg, SP_REGNUM);
b111229a
RE
9799 }
9800 else
9801 {
9b66ebb1 9802 start_reg = FIRST_FPA_REGNUM;
b111229a 9803
9b66ebb1 9804 for (reg = FIRST_FPA_REGNUM; reg <= LAST_FPA_REGNUM; reg++)
b111229a 9805 {
5895f793 9806 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
9807 {
9808 if (reg - start_reg == 3)
9809 {
dd18ae56
NC
9810 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
9811 start_reg, SP_REGNUM);
b111229a
RE
9812 start_reg = reg + 1;
9813 }
9814 }
9815 else
9816 {
9817 if (reg != start_reg)
dd18ae56
NC
9818 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
9819 start_reg, reg - start_reg,
9820 SP_REGNUM);
6cfc7210 9821
b111229a
RE
9822 start_reg = reg + 1;
9823 }
9824 }
9825
9826 /* Just in case the last register checked also needs unstacking. */
9827 if (reg != start_reg)
dd18ae56
NC
9828 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
9829 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
9830 }
9831
9b66ebb1
PB
9832 if (TARGET_HARD_FLOAT && TARGET_VFP)
9833 {
9834 start_reg = FIRST_VFP_REGNUM;
9835 for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
9836 {
9837 if ((!regs_ever_live[reg] || call_used_regs[reg])
9838 && (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
9839 {
9840 if (start_reg != reg)
9728c9d1
PB
9841 arm_output_fldmx (f, SP_REGNUM,
9842 (start_reg - FIRST_VFP_REGNUM) / 2,
9843 (reg - start_reg) / 2);
9b66ebb1
PB
9844 start_reg = reg + 2;
9845 }
9846 }
9847 if (start_reg != reg)
9728c9d1
PB
9848 arm_output_fldmx (f, SP_REGNUM,
9849 (start_reg - FIRST_VFP_REGNUM) / 2,
9850 (reg - start_reg) / 2);
9b66ebb1 9851 }
5a9335ef
NC
9852 if (TARGET_IWMMXT)
9853 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
9854 if (regs_ever_live[reg] && !call_used_regs[reg])
01d4c813 9855 asm_fprintf (f, "\twldrd\t%r, [%r], #8\n", reg, SP_REGNUM);
5a9335ef 9856
6d3d9133
NC
9857 /* If we can, restore the LR into the PC. */
9858 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
9859 && really_return
9860 && current_function_pretend_args_size == 0
c9ca9b88
PB
9861 && saved_regs_mask & (1 << LR_REGNUM)
9862 && !current_function_calls_eh_return)
cce8749e 9863 {
6f7ebcbb
NC
9864 saved_regs_mask &= ~ (1 << LR_REGNUM);
9865 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 9866 }
d5b7b3ae 9867
6d3d9133
NC
9868 /* Load the registers off the stack. If we only have one register
9869 to load use the LDR instruction - it is faster. */
6f7ebcbb 9870 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133 9871 {
c9ca9b88 9872 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 9873 }
6f7ebcbb 9874 else if (saved_regs_mask)
f1acdf8b
NC
9875 {
9876 if (saved_regs_mask & (1 << SP_REGNUM))
9877 /* Note - write back to the stack register is not enabled
9878 (ie "ldmfd sp!..."). We know that the stack pointer is
9879 in the list of registers and if we add writeback the
9880 instruction becomes UNPREDICTABLE. */
9881 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
9882 else
9883 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
9884 }
6d3d9133
NC
9885
9886 if (current_function_pretend_args_size)
cce8749e 9887 {
6d3d9133
NC
9888 /* Unwind the pre-pushed regs. */
9889 operands[0] = operands[1] = stack_pointer_rtx;
9890 operands[2] = GEN_INT (current_function_pretend_args_size);
9891 output_add_immediate (operands);
9892 }
9893 }
32de079a 9894
2966b00e 9895 /* We may have already restored PC directly from the stack. */
0cc3dda8 9896 if (!really_return || saved_regs_mask & (1 << PC_REGNUM))
6d3d9133 9897 return "";
d5b7b3ae 9898
c9ca9b88
PB
9899 /* Stack adjustment for exception handler. */
9900 if (current_function_calls_eh_return)
9901 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
9902 ARM_EH_STACKADJ_REGNUM);
9903
6d3d9133
NC
9904 /* Generate the return instruction. */
9905 switch ((int) ARM_FUNC_TYPE (func_type))
9906 {
6d3d9133
NC
9907 case ARM_FT_ISR:
9908 case ARM_FT_FIQ:
9909 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
9910 break;
9911
9912 case ARM_FT_EXCEPTION:
9913 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
9914 break;
9915
9916 case ARM_FT_INTERWORKED:
9917 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
9918 break;
9919
9920 default:
68d560d4
RE
9921 if (arm_arch5 || arm_arch4t)
9922 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
9923 else
9924 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
6d3d9133 9925 break;
cce8749e 9926 }
f3bb6135 9927
949d79eb
RE
9928 return "";
9929}
9930
08c148a8 9931static void
e32bac5b 9932arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
5848830f 9933 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
949d79eb 9934{
5848830f
PB
9935 arm_stack_offsets *offsets;
9936
d5b7b3ae
RE
9937 if (TARGET_THUMB)
9938 {
9939 /* ??? Probably not safe to set this here, since it assumes that a
9940 function will be emitted as assembly immediately after we generate
9941 RTL for it. This does not happen for inline functions. */
9942 return_used_this_function = 0;
9943 }
9944 else
9945 {
0977774b 9946 /* We need to take into account any stack-frame rounding. */
5848830f 9947 offsets = arm_get_frame_offsets ();
0977774b 9948
a72d4945 9949 if (use_return_insn (FALSE, NULL)
d5b7b3ae 9950 && return_used_this_function
5848830f 9951 && offsets->saved_regs != offsets->outgoing_args
5895f793 9952 && !frame_pointer_needed)
d5b7b3ae 9953 abort ();
f3bb6135 9954
d5b7b3ae 9955 /* Reset the ARM-specific per-function variables. */
d5b7b3ae
RE
9956 after_arm_reorg = 0;
9957 }
f3bb6135 9958}
e2c671ba 9959
2c849145
JM
9960/* Generate and emit an insn that we will recognize as a push_multi.
9961 Unfortunately, since this insn does not reflect very well the actual
9962 semantics of the operation, we need to annotate the insn for the benefit
9963 of DWARF2 frame unwind information. */
2c849145 9964static rtx
e32bac5b 9965emit_multi_reg_push (int mask)
e2c671ba
RE
9966{
9967 int num_regs = 0;
9b598fa0 9968 int num_dwarf_regs;
e2c671ba
RE
9969 int i, j;
9970 rtx par;
2c849145 9971 rtx dwarf;
87e27392 9972 int dwarf_par_index;
2c849145 9973 rtx tmp, reg;
e2c671ba 9974
d5b7b3ae 9975 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 9976 if (mask & (1 << i))
5895f793 9977 num_regs++;
e2c671ba
RE
9978
9979 if (num_regs == 0 || num_regs > 16)
9980 abort ();
9981
9b598fa0
RE
9982 /* We don't record the PC in the dwarf frame information. */
9983 num_dwarf_regs = num_regs;
9984 if (mask & (1 << PC_REGNUM))
9985 num_dwarf_regs--;
9986
87e27392 9987 /* For the body of the insn we are going to generate an UNSPEC in
05713b80 9988 parallel with several USEs. This allows the insn to be recognized
87e27392
NC
9989 by the push_multi pattern in the arm.md file. The insn looks
9990 something like this:
9991
9992 (parallel [
b15bca31
RE
9993 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
9994 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
9995 (use (reg:SI 11 fp))
9996 (use (reg:SI 12 ip))
9997 (use (reg:SI 14 lr))
9998 (use (reg:SI 15 pc))
9999 ])
10000
10001 For the frame note however, we try to be more explicit and actually
10002 show each register being stored into the stack frame, plus a (single)
10003 decrement of the stack pointer. We do it this way in order to be
10004 friendly to the stack unwinding code, which only wants to see a single
10005 stack decrement per instruction. The RTL we generate for the note looks
10006 something like this:
10007
10008 (sequence [
10009 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
10010 (set (mem:SI (reg:SI sp)) (reg:SI r4))
10011 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
10012 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
10013 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
87e27392
NC
10014 ])
10015
10016 This sequence is used both by the code to support stack unwinding for
10017 exceptions handlers and the code to generate dwarf2 frame debugging. */
10018
43cffd11 10019 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
9b598fa0 10020 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
87e27392 10021 dwarf_par_index = 1;
e2c671ba 10022
d5b7b3ae 10023 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
10024 {
10025 if (mask & (1 << i))
10026 {
2c849145
JM
10027 reg = gen_rtx_REG (SImode, i);
10028
e2c671ba 10029 XVECEXP (par, 0, 0)
43cffd11
RE
10030 = gen_rtx_SET (VOIDmode,
10031 gen_rtx_MEM (BLKmode,
10032 gen_rtx_PRE_DEC (BLKmode,
10033 stack_pointer_rtx)),
10034 gen_rtx_UNSPEC (BLKmode,
2c849145 10035 gen_rtvec (1, reg),
9b598fa0 10036 UNSPEC_PUSH_MULT));
2c849145 10037
9b598fa0
RE
10038 if (i != PC_REGNUM)
10039 {
10040 tmp = gen_rtx_SET (VOIDmode,
10041 gen_rtx_MEM (SImode, stack_pointer_rtx),
10042 reg);
10043 RTX_FRAME_RELATED_P (tmp) = 1;
10044 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
10045 dwarf_par_index++;
10046 }
2c849145 10047
e2c671ba
RE
10048 break;
10049 }
10050 }
10051
10052 for (j = 1, i++; j < num_regs; i++)
10053 {
10054 if (mask & (1 << i))
10055 {
2c849145
JM
10056 reg = gen_rtx_REG (SImode, i);
10057
10058 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
10059
9b598fa0
RE
10060 if (i != PC_REGNUM)
10061 {
10062 tmp = gen_rtx_SET (VOIDmode,
10063 gen_rtx_MEM (SImode,
10064 plus_constant (stack_pointer_rtx,
10065 4 * j)),
10066 reg);
10067 RTX_FRAME_RELATED_P (tmp) = 1;
10068 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
10069 }
10070
e2c671ba
RE
10071 j++;
10072 }
10073 }
b111229a 10074
2c849145 10075 par = emit_insn (par);
87e27392
NC
10076
10077 tmp = gen_rtx_SET (SImode,
10078 stack_pointer_rtx,
10079 gen_rtx_PLUS (SImode,
10080 stack_pointer_rtx,
10081 GEN_INT (-4 * num_regs)));
10082 RTX_FRAME_RELATED_P (tmp) = 1;
10083 XVECEXP (dwarf, 0, 0) = tmp;
10084
2c849145
JM
10085 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
10086 REG_NOTES (par));
10087 return par;
b111229a
RE
10088}
10089
2c849145 10090static rtx
e32bac5b 10091emit_sfm (int base_reg, int count)
b111229a
RE
10092{
10093 rtx par;
2c849145
JM
10094 rtx dwarf;
10095 rtx tmp, reg;
b111229a
RE
10096 int i;
10097
43cffd11 10098 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8ee6eb4e 10099 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (count + 1));
2c849145
JM
10100
10101 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
10102
10103 XVECEXP (par, 0, 0)
10104 = gen_rtx_SET (VOIDmode,
10105 gen_rtx_MEM (BLKmode,
10106 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
10107 gen_rtx_UNSPEC (BLKmode,
2c849145 10108 gen_rtvec (1, reg),
b15bca31 10109 UNSPEC_PUSH_MULT));
8ee6eb4e
PB
10110 tmp = gen_rtx_SET (VOIDmode,
10111 gen_rtx_MEM (XFmode, stack_pointer_rtx), reg);
2c849145 10112 RTX_FRAME_RELATED_P (tmp) = 1;
8ee6eb4e 10113 XVECEXP (dwarf, 0, 1) = tmp;
2c849145 10114
b111229a 10115 for (i = 1; i < count; i++)
2c849145
JM
10116 {
10117 reg = gen_rtx_REG (XFmode, base_reg++);
10118 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
10119
10120 tmp = gen_rtx_SET (VOIDmode,
10121 gen_rtx_MEM (XFmode,
8ee6eb4e
PB
10122 plus_constant (stack_pointer_rtx,
10123 i * 12)),
2c849145
JM
10124 reg);
10125 RTX_FRAME_RELATED_P (tmp) = 1;
8ee6eb4e 10126 XVECEXP (dwarf, 0, i + 1) = tmp;
2c849145 10127 }
b111229a 10128
8ee6eb4e
PB
10129 tmp = gen_rtx_SET (VOIDmode,
10130 stack_pointer_rtx,
10131 gen_rtx_PLUS (SImode,
10132 stack_pointer_rtx,
10133 GEN_INT (-12 * count)));
10134 RTX_FRAME_RELATED_P (tmp) = 1;
10135 XVECEXP (dwarf, 0, 0) = tmp;
10136
2c849145
JM
10137 par = emit_insn (par);
10138 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
10139 REG_NOTES (par));
10140 return par;
e2c671ba
RE
10141}
10142
9b66ebb1 10143
3c7ad43e
PB
10144/* Return true if the current function needs to save/restore LR. */
10145
10146static bool
10147thumb_force_lr_save (void)
10148{
10149 return !cfun->machine->lr_save_eliminated
10150 && (!leaf_function_p ()
10151 || thumb_far_jump_used_p ()
10152 || regs_ever_live [LR_REGNUM]);
10153}
10154
10155
095bb276
NC
10156/* Compute the distance from register FROM to register TO.
10157 These can be the arg pointer (26), the soft frame pointer (25),
10158 the stack pointer (13) or the hard frame pointer (11).
c9ca9b88 10159 In thumb mode r7 is used as the soft frame pointer, if needed.
095bb276
NC
10160 Typical stack layout looks like this:
10161
10162 old stack pointer -> | |
10163 ----
10164 | | \
10165 | | saved arguments for
10166 | | vararg functions
10167 | | /
10168 --
10169 hard FP & arg pointer -> | | \
10170 | | stack
10171 | | frame
10172 | | /
10173 --
10174 | | \
10175 | | call saved
10176 | | registers
10177 soft frame pointer -> | | /
10178 --
10179 | | \
10180 | | local
10181 | | variables
10182 | | /
10183 --
10184 | | \
10185 | | outgoing
10186 | | arguments
10187 current stack pointer -> | | /
10188 --
10189
43aa4e05 10190 For a given function some or all of these stack components
095bb276
NC
10191 may not be needed, giving rise to the possibility of
10192 eliminating some of the registers.
10193
825dda42 10194 The values returned by this function must reflect the behavior
095bb276
NC
10195 of arm_expand_prologue() and arm_compute_save_reg_mask().
10196
10197 The sign of the number returned reflects the direction of stack
10198 growth, so the values are positive for all eliminations except
5848830f
PB
10199 from the soft frame pointer to the hard frame pointer.
10200
10201 SFP may point just inside the local variables block to ensure correct
10202 alignment. */
10203
10204
10205/* Calculate stack offsets. These are used to calculate register elimination
10206 offsets and in prologue/epilogue code. */
10207
10208static arm_stack_offsets *
10209arm_get_frame_offsets (void)
095bb276 10210{
5848830f 10211 struct arm_stack_offsets *offsets;
095bb276 10212 unsigned long func_type;
5848830f 10213 int leaf;
5848830f
PB
10214 int saved;
10215 HOST_WIDE_INT frame_size;
10216
10217 offsets = &cfun->machine->stack_offsets;
095bb276 10218
5848830f
PB
10219 /* We need to know if we are a leaf function. Unfortunately, it
10220 is possible to be called after start_sequence has been called,
10221 which causes get_insns to return the insns for the sequence,
10222 not the function, which will cause leaf_function_p to return
10223 the incorrect result.
095bb276 10224
5848830f
PB
10225 to know about leaf functions once reload has completed, and the
10226 frame size cannot be changed after that time, so we can safely
10227 use the cached value. */
10228
10229 if (reload_completed)
10230 return offsets;
10231
666c27b9
KH
10232 /* Initially this is the size of the local variables. It will translated
10233 into an offset once we have determined the size of preceding data. */
5848830f
PB
10234 frame_size = ROUND_UP_WORD (get_frame_size ());
10235
10236 leaf = leaf_function_p ();
10237
10238 /* Space for variadic functions. */
10239 offsets->saved_args = current_function_pretend_args_size;
10240
10241 offsets->frame = offsets->saved_args + (frame_pointer_needed ? 4 : 0);
10242
10243 if (TARGET_ARM)
095bb276 10244 {
5848830f 10245 unsigned int regno;
ef7112de 10246
5848830f 10247 saved = bit_count (arm_compute_save_reg_mask ()) * 4;
5a9335ef 10248
5848830f
PB
10249 /* We know that SP will be doubleword aligned on entry, and we must
10250 preserve that condition at any subroutine call. We also require the
10251 soft frame pointer to be doubleword aligned. */
10252
10253 if (TARGET_REALLY_IWMMXT)
9b66ebb1 10254 {
5848830f
PB
10255 /* Check for the call-saved iWMMXt registers. */
10256 for (regno = FIRST_IWMMXT_REGNUM;
10257 regno <= LAST_IWMMXT_REGNUM;
10258 regno++)
10259 if (regs_ever_live [regno] && ! call_used_regs [regno])
10260 saved += 8;
10261 }
10262
10263 func_type = arm_current_func_type ();
10264 if (! IS_VOLATILE (func_type))
10265 {
10266 /* Space for saved FPA registers. */
10267 for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)
10268 if (regs_ever_live[regno] && ! call_used_regs[regno])
10269 saved += 12;
10270
10271 /* Space for saved VFP registers. */
10272 if (TARGET_HARD_FLOAT && TARGET_VFP)
9728c9d1 10273 saved += arm_get_vfp_saved_size ();
9b66ebb1 10274 }
5848830f
PB
10275 }
10276 else /* TARGET_THUMB */
10277 {
57934c39 10278 saved = bit_count (thumb_compute_save_reg_mask ()) * 4;
5848830f 10279 if (TARGET_BACKTRACE)
57934c39 10280 saved += 16;
5848830f 10281 }
9b66ebb1 10282
5848830f
PB
10283 /* Saved registers include the stack frame. */
10284 offsets->saved_regs = offsets->saved_args + saved;
10285 offsets->soft_frame = offsets->saved_regs;
10286 /* A leaf function does not need any stack alignment if it has nothing
10287 on the stack. */
10288 if (leaf && frame_size == 0)
10289 {
10290 offsets->outgoing_args = offsets->soft_frame;
10291 return offsets;
10292 }
10293
10294 /* Ensure SFP has the correct alignment. */
10295 if (ARM_DOUBLEWORD_ALIGN
10296 && (offsets->soft_frame & 7))
10297 offsets->soft_frame += 4;
10298
10299 offsets->outgoing_args = offsets->soft_frame + frame_size
10300 + current_function_outgoing_args_size;
10301
10302 if (ARM_DOUBLEWORD_ALIGN)
10303 {
10304 /* Ensure SP remains doubleword aligned. */
10305 if (offsets->outgoing_args & 7)
10306 offsets->outgoing_args += 4;
10307 if (offsets->outgoing_args & 7)
10308 abort ();
095bb276
NC
10309 }
10310
5848830f
PB
10311 return offsets;
10312}
10313
10314
666c27b9 10315/* Calculate the relative offsets for the different stack pointers. Positive
5848830f
PB
10316 offsets are in the direction of stack growth. */
10317
b3f8d95d 10318HOST_WIDE_INT
5848830f
PB
10319arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
10320{
10321 arm_stack_offsets *offsets;
10322
10323 offsets = arm_get_frame_offsets ();
095bb276 10324
095bb276
NC
10325 /* OK, now we have enough information to compute the distances.
10326 There must be an entry in these switch tables for each pair
10327 of registers in ELIMINABLE_REGS, even if some of the entries
10328 seem to be redundant or useless. */
10329 switch (from)
10330 {
10331 case ARG_POINTER_REGNUM:
10332 switch (to)
10333 {
10334 case THUMB_HARD_FRAME_POINTER_REGNUM:
10335 return 0;
10336
10337 case FRAME_POINTER_REGNUM:
10338 /* This is the reverse of the soft frame pointer
10339 to hard frame pointer elimination below. */
5848830f 10340 return offsets->soft_frame - offsets->saved_args;
095bb276
NC
10341
10342 case ARM_HARD_FRAME_POINTER_REGNUM:
10343 /* If there is no stack frame then the hard
10344 frame pointer and the arg pointer coincide. */
5848830f 10345 if (offsets->frame == offsets->saved_regs)
095bb276 10346 return 0;
6de9cd9a
DN
10347 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
10348 return (frame_pointer_needed
10349 && cfun->static_chain_decl != NULL
10350 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
095bb276
NC
10351
10352 case STACK_POINTER_REGNUM:
10353 /* If nothing has been pushed on the stack at all
10354 then this will return -4. This *is* correct! */
5848830f 10355 return offsets->outgoing_args - (offsets->saved_args + 4);
095bb276
NC
10356
10357 default:
10358 abort ();
10359 }
10360 break;
10361
10362 case FRAME_POINTER_REGNUM:
10363 switch (to)
10364 {
10365 case THUMB_HARD_FRAME_POINTER_REGNUM:
10366 return 0;
10367
10368 case ARM_HARD_FRAME_POINTER_REGNUM:
10369 /* The hard frame pointer points to the top entry in the
10370 stack frame. The soft frame pointer to the bottom entry
10371 in the stack frame. If there is no stack frame at all,
10372 then they are identical. */
5848830f
PB
10373
10374 return offsets->frame - offsets->soft_frame;
095bb276
NC
10375
10376 case STACK_POINTER_REGNUM:
5848830f 10377 return offsets->outgoing_args - offsets->soft_frame;
095bb276
NC
10378
10379 default:
10380 abort ();
10381 }
10382 break;
10383
10384 default:
10385 /* You cannot eliminate from the stack pointer.
10386 In theory you could eliminate from the hard frame
10387 pointer to the stack pointer, but this will never
10388 happen, since if a stack frame is not needed the
10389 hard frame pointer will never be used. */
10390 abort ();
10391 }
10392}
10393
0977774b 10394
6d3d9133 10395/* Generate the prologue instructions for entry into an ARM function. */
e2c671ba 10396void
e32bac5b 10397arm_expand_prologue (void)
e2c671ba
RE
10398{
10399 int reg;
6d3d9133 10400 rtx amount;
2c849145 10401 rtx insn;
68dfd979 10402 rtx ip_rtx;
6d3d9133
NC
10403 unsigned long live_regs_mask;
10404 unsigned long func_type;
68dfd979 10405 int fp_offset = 0;
095bb276 10406 int saved_pretend_args = 0;
5848830f 10407 int saved_regs = 0;
095bb276 10408 unsigned int args_to_push;
5848830f 10409 arm_stack_offsets *offsets;
d3236b4d 10410
6d3d9133 10411 func_type = arm_current_func_type ();
e2c671ba 10412
31fdb4d5 10413 /* Naked functions don't have prologues. */
6d3d9133 10414 if (IS_NAKED (func_type))
31fdb4d5
DE
10415 return;
10416
095bb276
NC
10417 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
10418 args_to_push = current_function_pretend_args_size;
10419
6d3d9133
NC
10420 /* Compute which register we will have to save onto the stack. */
10421 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 10422
68dfd979 10423 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 10424
e2c671ba
RE
10425 if (frame_pointer_needed)
10426 {
7b8b8ade
NC
10427 if (IS_INTERRUPT (func_type))
10428 {
10429 /* Interrupt functions must not corrupt any registers.
10430 Creating a frame pointer however, corrupts the IP
10431 register, so we must push it first. */
10432 insn = emit_multi_reg_push (1 << IP_REGNUM);
121308d4
NC
10433
10434 /* Do not set RTX_FRAME_RELATED_P on this insn.
10435 The dwarf stack unwinding code only wants to see one
10436 stack decrement per function, and this is not it. If
10437 this instruction is labeled as being part of the frame
10438 creation sequence then dwarf2out_frame_debug_expr will
10439 abort when it encounters the assignment of IP to FP
10440 later on, since the use of SP here establishes SP as
10441 the CFA register and not IP.
10442
10443 Anyway this instruction is not really part of the stack
10444 frame creation although it is part of the prologue. */
7b8b8ade
NC
10445 }
10446 else if (IS_NESTED (func_type))
68dfd979
NC
10447 {
10448 /* The Static chain register is the same as the IP register
10449 used as a scratch register during stack frame creation.
10450 To get around this need to find somewhere to store IP
10451 whilst the frame is being created. We try the following
10452 places in order:
10453
6d3d9133 10454 1. The last argument register.
68dfd979
NC
10455 2. A slot on the stack above the frame. (This only
10456 works if the function is not a varargs function).
095bb276
NC
10457 3. Register r3, after pushing the argument registers
10458 onto the stack.
6d3d9133 10459
34ce3d7b
JM
10460 Note - we only need to tell the dwarf2 backend about the SP
10461 adjustment in the second variant; the static chain register
10462 doesn't need to be unwound, as it doesn't contain a value
10463 inherited from the caller. */
d3236b4d 10464
68dfd979
NC
10465 if (regs_ever_live[3] == 0)
10466 {
10467 insn = gen_rtx_REG (SImode, 3);
10468 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 10469 insn = emit_insn (insn);
68dfd979 10470 }
095bb276 10471 else if (args_to_push == 0)
68dfd979 10472 {
34ce3d7b 10473 rtx dwarf;
68dfd979
NC
10474 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
10475 insn = gen_rtx_MEM (SImode, insn);
10476 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
10477 insn = emit_insn (insn);
34ce3d7b 10478
68dfd979 10479 fp_offset = 4;
34ce3d7b
JM
10480
10481 /* Just tell the dwarf backend that we adjusted SP. */
10482 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
10483 gen_rtx_PLUS (SImode, stack_pointer_rtx,
10484 GEN_INT (-fp_offset)));
10485 RTX_FRAME_RELATED_P (insn) = 1;
10486 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10487 dwarf, REG_NOTES (insn));
68dfd979
NC
10488 }
10489 else
095bb276
NC
10490 {
10491 /* Store the args on the stack. */
3cb66fd7 10492 if (cfun->machine->uses_anonymous_args)
095bb276
NC
10493 insn = emit_multi_reg_push
10494 ((0xf0 >> (args_to_push / 4)) & 0xf);
10495 else
10496 insn = emit_insn
10497 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10498 GEN_INT (- args_to_push)));
10499
10500 RTX_FRAME_RELATED_P (insn) = 1;
10501
10502 saved_pretend_args = 1;
10503 fp_offset = args_to_push;
10504 args_to_push = 0;
10505
10506 /* Now reuse r3 to preserve IP. */
10507 insn = gen_rtx_REG (SImode, 3);
10508 insn = gen_rtx_SET (SImode, insn, ip_rtx);
10509 (void) emit_insn (insn);
10510 }
68dfd979
NC
10511 }
10512
68dfd979
NC
10513 if (fp_offset)
10514 {
10515 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
10516 insn = gen_rtx_SET (SImode, ip_rtx, insn);
10517 }
10518 else
10519 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
10520
6d3d9133 10521 insn = emit_insn (insn);
8e56560e 10522 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
10523 }
10524
095bb276 10525 if (args_to_push)
e2c671ba 10526 {
6d3d9133 10527 /* Push the argument registers, or reserve space for them. */
3cb66fd7 10528 if (cfun->machine->uses_anonymous_args)
2c849145 10529 insn = emit_multi_reg_push
095bb276 10530 ((0xf0 >> (args_to_push / 4)) & 0xf);
e2c671ba 10531 else
2c849145
JM
10532 insn = emit_insn
10533 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
095bb276 10534 GEN_INT (- args_to_push)));
2c849145 10535 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
10536 }
10537
06bea5aa
NC
10538 /* If this is an interrupt service routine, and the link register
10539 is going to be pushed, and we are not creating a stack frame,
10540 (which would involve an extra push of IP and a pop in the epilogue)
10541 subtracting four from LR now will mean that the function return
10542 can be done with a single instruction. */
3a7731fd 10543 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
06bea5aa
NC
10544 && (live_regs_mask & (1 << LR_REGNUM)) != 0
10545 && ! frame_pointer_needed)
10546 emit_insn (gen_rtx_SET (SImode,
10547 gen_rtx_REG (SImode, LR_REGNUM),
10548 gen_rtx_PLUS (SImode,
10549 gen_rtx_REG (SImode, LR_REGNUM),
10550 GEN_INT (-4))));
3a7731fd 10551
e2c671ba
RE
10552 if (live_regs_mask)
10553 {
2c849145 10554 insn = emit_multi_reg_push (live_regs_mask);
5848830f 10555 saved_regs += bit_count (live_regs_mask) * 4;
2c849145 10556 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 10557 }
d5b7b3ae 10558
5a9335ef 10559 if (TARGET_IWMMXT)
01d4c813 10560 for (reg = LAST_IWMMXT_REGNUM; reg >= FIRST_IWMMXT_REGNUM; reg--)
5a9335ef
NC
10561 if (regs_ever_live[reg] && ! call_used_regs [reg])
10562 {
10563 insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
10564 insn = gen_rtx_MEM (V2SImode, insn);
10565 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
10566 gen_rtx_REG (V2SImode, reg)));
10567 RTX_FRAME_RELATED_P (insn) = 1;
5848830f 10568 saved_regs += 8;
5a9335ef
NC
10569 }
10570
6d3d9133 10571 if (! IS_VOLATILE (func_type))
b111229a 10572 {
9b66ebb1
PB
10573 int start_reg;
10574
29ad9694
RE
10575 /* Save any floating point call-saved registers used by this
10576 function. */
10577 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 10578 {
9b66ebb1 10579 for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
5895f793 10580 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
10581 {
10582 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
10583 insn = gen_rtx_MEM (XFmode, insn);
10584 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
10585 gen_rtx_REG (XFmode, reg)));
10586 RTX_FRAME_RELATED_P (insn) = 1;
5848830f 10587 saved_regs += 12;
2c849145 10588 }
b111229a
RE
10589 }
10590 else
10591 {
9b66ebb1 10592 start_reg = LAST_FPA_REGNUM;
b111229a 10593
9b66ebb1 10594 for (reg = LAST_FPA_REGNUM; reg >= FIRST_FPA_REGNUM; reg--)
b111229a 10595 {
5895f793 10596 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
10597 {
10598 if (start_reg - reg == 3)
10599 {
2c849145
JM
10600 insn = emit_sfm (reg, 4);
10601 RTX_FRAME_RELATED_P (insn) = 1;
4b763d77 10602 saved_regs += 48;
b111229a
RE
10603 start_reg = reg - 1;
10604 }
10605 }
10606 else
10607 {
10608 if (start_reg != reg)
2c849145
JM
10609 {
10610 insn = emit_sfm (reg + 1, start_reg - reg);
10611 RTX_FRAME_RELATED_P (insn) = 1;
7aebacee 10612 saved_regs += (start_reg - reg) * 12;
2c849145 10613 }
b111229a
RE
10614 start_reg = reg - 1;
10615 }
10616 }
10617
10618 if (start_reg != reg)
2c849145
JM
10619 {
10620 insn = emit_sfm (reg + 1, start_reg - reg);
7aebacee 10621 saved_regs += (start_reg - reg) * 12;
2c849145
JM
10622 RTX_FRAME_RELATED_P (insn) = 1;
10623 }
b111229a 10624 }
9b66ebb1
PB
10625 if (TARGET_HARD_FLOAT && TARGET_VFP)
10626 {
10627 start_reg = FIRST_VFP_REGNUM;
10628
10629 for (reg = FIRST_VFP_REGNUM; reg < LAST_VFP_REGNUM; reg += 2)
10630 {
10631 if ((!regs_ever_live[reg] || call_used_regs[reg])
10632 && (!regs_ever_live[reg + 1] || call_used_regs[reg + 1]))
10633 {
10634 if (start_reg != reg)
9728c9d1
PB
10635 saved_regs += vfp_emit_fstmx (start_reg,
10636 (reg - start_reg) / 2);
9b66ebb1
PB
10637 start_reg = reg + 2;
10638 }
10639 }
10640 if (start_reg != reg)
9728c9d1
PB
10641 saved_regs += vfp_emit_fstmx (start_reg,
10642 (reg - start_reg) / 2);
9b66ebb1 10643 }
b111229a 10644 }
e2c671ba
RE
10645
10646 if (frame_pointer_needed)
2c849145 10647 {
6d3d9133 10648 /* Create the new frame pointer. */
095bb276 10649 insn = GEN_INT (-(4 + args_to_push + fp_offset));
68dfd979 10650 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 10651 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 10652
6d3d9133 10653 if (IS_NESTED (func_type))
68dfd979
NC
10654 {
10655 /* Recover the static chain register. */
095bb276
NC
10656 if (regs_ever_live [3] == 0
10657 || saved_pretend_args)
1d6e90ac 10658 insn = gen_rtx_REG (SImode, 3);
68dfd979
NC
10659 else /* if (current_function_pretend_args_size == 0) */
10660 {
29ad9694
RE
10661 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
10662 GEN_INT (4));
68dfd979 10663 insn = gen_rtx_MEM (SImode, insn);
68dfd979 10664 }
1d6e90ac 10665
c14a3a45
NC
10666 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
10667 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 10668 emit_insn (gen_prologue_use (ip_rtx));
68dfd979 10669 }
2c849145 10670 }
e2c671ba 10671
5848830f
PB
10672 offsets = arm_get_frame_offsets ();
10673 if (offsets->outgoing_args != offsets->saved_args + saved_regs)
e2c671ba 10674 {
745b9093
JM
10675 /* This add can produce multiple insns for a large constant, so we
10676 need to get tricky. */
10677 rtx last = get_last_insn ();
5848830f
PB
10678
10679 amount = GEN_INT (offsets->saved_args + saved_regs
10680 - offsets->outgoing_args);
10681
2c849145
JM
10682 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10683 amount));
745b9093
JM
10684 do
10685 {
10686 last = last ? NEXT_INSN (last) : get_insns ();
10687 RTX_FRAME_RELATED_P (last) = 1;
10688 }
10689 while (last != insn);
e04c2d6c
RE
10690
10691 /* If the frame pointer is needed, emit a special barrier that
10692 will prevent the scheduler from moving stores to the frame
10693 before the stack adjustment. */
10694 if (frame_pointer_needed)
3894f59e
RE
10695 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
10696 hard_frame_pointer_rtx));
e2c671ba
RE
10697 }
10698
876f13b0
PB
10699
10700 if (flag_pic)
10701 arm_load_pic_register ();
10702
e2c671ba 10703 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
10704 the call to mcount. Similarly if the user has requested no
10705 scheduling in the prolog. */
70f4f91c 10706 if (current_function_profile || TARGET_NO_SCHED_PRO)
e2c671ba 10707 emit_insn (gen_blockage ());
6f7ebcbb
NC
10708
10709 /* If the link register is being kept alive, with the return address in it,
10710 then make sure that it does not get reused by the ce2 pass. */
10711 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
10712 {
6bacc7b0 10713 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
6f7ebcbb
NC
10714 cfun->machine->lr_save_eliminated = 1;
10715 }
e2c671ba 10716}
cce8749e 10717\f
9997d19d
RE
10718/* If CODE is 'd', then the X is a condition operand and the instruction
10719 should only be executed if the condition is true.
ddd5a7c1 10720 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
10721 should only be executed if the condition is false: however, if the mode
10722 of the comparison is CCFPEmode, then always execute the instruction -- we
10723 do this because in these circumstances !GE does not necessarily imply LT;
10724 in these cases the instruction pattern will take care to make sure that
10725 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 10726 doing this instruction unconditionally.
9997d19d
RE
10727 If CODE is 'N' then X is a floating point operand that must be negated
10728 before output.
10729 If CODE is 'B' then output a bitwise inverted value of X (a const int).
10730 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9997d19d 10731void
e32bac5b 10732arm_print_operand (FILE *stream, rtx x, int code)
9997d19d
RE
10733{
10734 switch (code)
10735 {
10736 case '@':
f3139301 10737 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
10738 return;
10739
d5b7b3ae
RE
10740 case '_':
10741 fputs (user_label_prefix, stream);
10742 return;
10743
9997d19d 10744 case '|':
f3139301 10745 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
10746 return;
10747
10748 case '?':
10749 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
10750 {
10751 if (TARGET_THUMB || current_insn_predicate != NULL)
10752 abort ();
10753
10754 fputs (arm_condition_codes[arm_current_cc], stream);
10755 }
10756 else if (current_insn_predicate)
10757 {
10758 enum arm_cond_code code;
10759
10760 if (TARGET_THUMB)
10761 abort ();
10762
10763 code = get_arm_condition_code (current_insn_predicate);
10764 fputs (arm_condition_codes[code], stream);
10765 }
9997d19d
RE
10766 return;
10767
10768 case 'N':
10769 {
10770 REAL_VALUE_TYPE r;
10771 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10772 r = REAL_VALUE_NEGATE (r);
10773 fprintf (stream, "%s", fp_const_from_val (&r));
10774 }
10775 return;
10776
10777 case 'B':
10778 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
10779 {
10780 HOST_WIDE_INT val;
5895f793 10781 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 10782 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 10783 }
9997d19d
RE
10784 else
10785 {
10786 putc ('~', stream);
10787 output_addr_const (stream, x);
10788 }
10789 return;
10790
10791 case 'i':
10792 fprintf (stream, "%s", arithmetic_instr (x, 1));
10793 return;
10794
9b6b54e2
NC
10795 /* Truncate Cirrus shift counts. */
10796 case 's':
10797 if (GET_CODE (x) == CONST_INT)
10798 {
10799 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
10800 return;
10801 }
10802 arm_print_operand (stream, x, 0);
10803 return;
10804
9997d19d
RE
10805 case 'I':
10806 fprintf (stream, "%s", arithmetic_instr (x, 0));
10807 return;
10808
10809 case 'S':
10810 {
10811 HOST_WIDE_INT val;
5895f793 10812 const char * shift = shift_op (x, &val);
9997d19d 10813
e2c671ba
RE
10814 if (shift)
10815 {
5895f793 10816 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
10817 if (val == -1)
10818 arm_print_operand (stream, XEXP (x, 1), 0);
10819 else
4a0a75dd 10820 fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
e2c671ba 10821 }
9997d19d
RE
10822 }
10823 return;
10824
d5b7b3ae
RE
10825 /* An explanation of the 'Q', 'R' and 'H' register operands:
10826
10827 In a pair of registers containing a DI or DF value the 'Q'
10828 operand returns the register number of the register containing
093354e0 10829 the least significant part of the value. The 'R' operand returns
d5b7b3ae
RE
10830 the register number of the register containing the most
10831 significant part of the value.
10832
10833 The 'H' operand returns the higher of the two register numbers.
10834 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
093354e0 10835 same as the 'Q' operand, since the most significant part of the
d5b7b3ae
RE
10836 value is held in the lower number register. The reverse is true
10837 on systems where WORDS_BIG_ENDIAN is false.
10838
10839 The purpose of these operands is to distinguish between cases
10840 where the endian-ness of the values is important (for example
10841 when they are added together), and cases where the endian-ness
10842 is irrelevant, but the order of register operations is important.
10843 For example when loading a value from memory into a register
10844 pair, the endian-ness does not matter. Provided that the value
10845 from the lower memory address is put into the lower numbered
10846 register, and the value from the higher address is put into the
10847 higher numbered register, the load will work regardless of whether
10848 the value being loaded is big-wordian or little-wordian. The
10849 order of the two register loads can matter however, if the address
10850 of the memory location is actually held in one of the registers
10851 being overwritten by the load. */
c1c2bc04 10852 case 'Q':
d5b7b3ae 10853 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 10854 abort ();
d5b7b3ae 10855 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
10856 return;
10857
9997d19d 10858 case 'R':
d5b7b3ae 10859 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 10860 abort ();
d5b7b3ae
RE
10861 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
10862 return;
10863
10864 case 'H':
10865 if (REGNO (x) > LAST_ARM_REGNUM)
10866 abort ();
10867 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
10868 return;
10869
10870 case 'm':
d5b7b3ae
RE
10871 asm_fprintf (stream, "%r",
10872 GET_CODE (XEXP (x, 0)) == REG
10873 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
10874 return;
10875
10876 case 'M':
dd18ae56 10877 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae 10878 REGNO (x),
e9d7b180 10879 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
10880 return;
10881
10882 case 'd':
64e92a26
RE
10883 /* CONST_TRUE_RTX means always -- that's the default. */
10884 if (x == const_true_rtx)
d5b7b3ae
RE
10885 return;
10886
defc0463
RE
10887 fputs (arm_condition_codes[get_arm_condition_code (x)],
10888 stream);
9997d19d
RE
10889 return;
10890
10891 case 'D':
64e92a26
RE
10892 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
10893 want to do that. */
10894 if (x == const_true_rtx)
10895 abort ();
d5b7b3ae 10896
defc0463
RE
10897 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
10898 (get_arm_condition_code (x))],
10899 stream);
9997d19d
RE
10900 return;
10901
9b6b54e2
NC
10902 /* Cirrus registers can be accessed in a variety of ways:
10903 single floating point (f)
10904 double floating point (d)
10905 32bit integer (fx)
10906 64bit integer (dx). */
10907 case 'W': /* Cirrus register in F mode. */
10908 case 'X': /* Cirrus register in D mode. */
10909 case 'Y': /* Cirrus register in FX mode. */
10910 case 'Z': /* Cirrus register in DX mode. */
10911 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
10912 abort ();
10913
10914 fprintf (stream, "mv%s%s",
10915 code == 'W' ? "f"
10916 : code == 'X' ? "d"
10917 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
10918
10919 return;
10920
10921 /* Print cirrus register in the mode specified by the register's mode. */
10922 case 'V':
10923 {
10924 int mode = GET_MODE (x);
10925
10926 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
10927 abort ();
10928
10929 fprintf (stream, "mv%s%s",
10930 mode == DFmode ? "d"
10931 : mode == SImode ? "fx"
10932 : mode == DImode ? "dx"
10933 : "f", reg_names[REGNO (x)] + 2);
10934
10935 return;
10936 }
10937
5a9335ef
NC
10938 case 'U':
10939 if (GET_CODE (x) != REG
10940 || REGNO (x) < FIRST_IWMMXT_GR_REGNUM
10941 || REGNO (x) > LAST_IWMMXT_GR_REGNUM)
10942 /* Bad value for wCG register number. */
10943 abort ();
10944 else
10945 fprintf (stream, "%d", REGNO (x) - FIRST_IWMMXT_GR_REGNUM);
10946 return;
10947
10948 /* Print an iWMMXt control register name. */
10949 case 'w':
10950 if (GET_CODE (x) != CONST_INT
10951 || INTVAL (x) < 0
10952 || INTVAL (x) >= 16)
10953 /* Bad value for wC register number. */
10954 abort ();
10955 else
10956 {
10957 static const char * wc_reg_names [16] =
10958 {
10959 "wCID", "wCon", "wCSSF", "wCASF",
10960 "wC4", "wC5", "wC6", "wC7",
10961 "wCGR0", "wCGR1", "wCGR2", "wCGR3",
10962 "wC12", "wC13", "wC14", "wC15"
10963 };
10964
10965 fprintf (stream, wc_reg_names [INTVAL (x)]);
10966 }
10967 return;
10968
9b66ebb1
PB
10969 /* Print a VFP double precision register name. */
10970 case 'P':
10971 {
10972 int mode = GET_MODE (x);
10973 int num;
10974
10975 if (mode != DImode && mode != DFmode)
10976 abort ();
10977
10978 if (GET_CODE (x) != REG
10979 || !IS_VFP_REGNUM (REGNO (x)))
10980 abort ();
10981
10982 num = REGNO(x) - FIRST_VFP_REGNUM;
10983 if (num & 1)
10984 abort ();
10985
10986 fprintf (stream, "d%d", num >> 1);
10987 }
10988 return;
10989
9997d19d
RE
10990 default:
10991 if (x == 0)
10992 abort ();
10993
10994 if (GET_CODE (x) == REG)
d5b7b3ae 10995 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
10996 else if (GET_CODE (x) == MEM)
10997 {
10998 output_memory_reference_mode = GET_MODE (x);
10999 output_address (XEXP (x, 0));
11000 }
11001 else if (GET_CODE (x) == CONST_DOUBLE)
11002 fprintf (stream, "#%s", fp_immediate_constant (x));
11003 else if (GET_CODE (x) == NEG)
6354dc9b 11004 abort (); /* This should never happen now. */
9997d19d
RE
11005 else
11006 {
11007 fputc ('#', stream);
11008 output_addr_const (stream, x);
11009 }
11010 }
11011}
cce8749e 11012\f
301d03af
RS
11013#ifndef AOF_ASSEMBLER
11014/* Target hook for assembling integer objects. The ARM version needs to
11015 handle word-sized values specially. */
301d03af 11016static bool
e32bac5b 11017arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
11018{
11019 if (size == UNITS_PER_WORD && aligned_p)
11020 {
11021 fputs ("\t.word\t", asm_out_file);
11022 output_addr_const (asm_out_file, x);
11023
11024 /* Mark symbols as position independent. We only do this in the
d6b4baa4 11025 .text segment, not in the .data segment. */
301d03af
RS
11026 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
11027 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
11028 {
e26053d1 11029 if (GET_CODE (x) == SYMBOL_REF
14f583b8 11030 && (CONSTANT_POOL_ADDRESS_P (x)
94428622 11031 || SYMBOL_REF_LOCAL_P (x)))
301d03af
RS
11032 fputs ("(GOTOFF)", asm_out_file);
11033 else if (GET_CODE (x) == LABEL_REF)
11034 fputs ("(GOTOFF)", asm_out_file);
11035 else
11036 fputs ("(GOT)", asm_out_file);
11037 }
11038 fputc ('\n', asm_out_file);
11039 return true;
11040 }
1d6e90ac 11041
5a9335ef
NC
11042 if (VECTOR_MODE_SUPPORTED_P (GET_MODE (x)))
11043 {
11044 int i, units;
11045
11046 if (GET_CODE (x) != CONST_VECTOR)
11047 abort ();
11048
11049 units = CONST_VECTOR_NUNITS (x);
11050
11051 switch (GET_MODE (x))
11052 {
11053 case V2SImode: size = 4; break;
11054 case V4HImode: size = 2; break;
11055 case V8QImode: size = 1; break;
11056 default:
11057 abort ();
11058 }
11059
11060 for (i = 0; i < units; i++)
11061 {
11062 rtx elt;
11063
11064 elt = CONST_VECTOR_ELT (x, i);
11065 assemble_integer
11066 (elt, size, i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, 1);
11067 }
11068
11069 return true;
11070 }
11071
301d03af
RS
11072 return default_assemble_integer (x, size, aligned_p);
11073}
11074#endif
11075\f
cce8749e
CH
11076/* A finite state machine takes care of noticing whether or not instructions
11077 can be conditionally executed, and thus decrease execution time and code
11078 size by deleting branch instructions. The fsm is controlled by
11079 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
11080
11081/* The state of the fsm controlling condition codes are:
11082 0: normal, do nothing special
11083 1: make ASM_OUTPUT_OPCODE not output this instruction
11084 2: make ASM_OUTPUT_OPCODE not output this instruction
11085 3: make instructions conditional
11086 4: make instructions conditional
11087
11088 State transitions (state->state by whom under condition):
11089 0 -> 1 final_prescan_insn if the `target' is a label
11090 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
11091 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
11092 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4977bab6 11093 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
cce8749e
CH
11094 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
11095 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
11096 (the target insn is arm_target_insn).
11097
ff9940b0
RE
11098 If the jump clobbers the conditions then we use states 2 and 4.
11099
11100 A similar thing can be done with conditional return insns.
11101
cce8749e
CH
11102 XXX In case the `target' is an unconditional branch, this conditionalising
11103 of the instructions always reduces code size, but not always execution
11104 time. But then, I want to reduce the code size to somewhere near what
11105 /bin/cc produces. */
11106
cce8749e
CH
11107/* Returns the index of the ARM condition code string in
11108 `arm_condition_codes'. COMPARISON should be an rtx like
11109 `(eq (...) (...))'. */
84ed5e79 11110static enum arm_cond_code
e32bac5b 11111get_arm_condition_code (rtx comparison)
cce8749e 11112{
5165176d 11113 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
1d6e90ac
NC
11114 int code;
11115 enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
11116
11117 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 11118 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
11119 XEXP (comparison, 1));
11120
11121 switch (mode)
cce8749e 11122 {
84ed5e79
RE
11123 case CC_DNEmode: code = ARM_NE; goto dominance;
11124 case CC_DEQmode: code = ARM_EQ; goto dominance;
11125 case CC_DGEmode: code = ARM_GE; goto dominance;
11126 case CC_DGTmode: code = ARM_GT; goto dominance;
11127 case CC_DLEmode: code = ARM_LE; goto dominance;
11128 case CC_DLTmode: code = ARM_LT; goto dominance;
11129 case CC_DGEUmode: code = ARM_CS; goto dominance;
11130 case CC_DGTUmode: code = ARM_HI; goto dominance;
11131 case CC_DLEUmode: code = ARM_LS; goto dominance;
11132 case CC_DLTUmode: code = ARM_CC;
11133
11134 dominance:
11135 if (comp_code != EQ && comp_code != NE)
11136 abort ();
11137
11138 if (comp_code == EQ)
11139 return ARM_INVERSE_CONDITION_CODE (code);
11140 return code;
11141
5165176d 11142 case CC_NOOVmode:
84ed5e79 11143 switch (comp_code)
5165176d 11144 {
84ed5e79
RE
11145 case NE: return ARM_NE;
11146 case EQ: return ARM_EQ;
11147 case GE: return ARM_PL;
11148 case LT: return ARM_MI;
5165176d
RE
11149 default: abort ();
11150 }
11151
11152 case CC_Zmode:
84ed5e79 11153 switch (comp_code)
5165176d 11154 {
84ed5e79
RE
11155 case NE: return ARM_NE;
11156 case EQ: return ARM_EQ;
5165176d
RE
11157 default: abort ();
11158 }
11159
defc0463
RE
11160 case CC_Nmode:
11161 switch (comp_code)
11162 {
11163 case NE: return ARM_MI;
11164 case EQ: return ARM_PL;
11165 default: abort ();
11166 }
11167
5165176d 11168 case CCFPEmode:
e45b72c4
RE
11169 case CCFPmode:
11170 /* These encodings assume that AC=1 in the FPA system control
11171 byte. This allows us to handle all cases except UNEQ and
11172 LTGT. */
84ed5e79
RE
11173 switch (comp_code)
11174 {
11175 case GE: return ARM_GE;
11176 case GT: return ARM_GT;
11177 case LE: return ARM_LS;
11178 case LT: return ARM_MI;
e45b72c4
RE
11179 case NE: return ARM_NE;
11180 case EQ: return ARM_EQ;
11181 case ORDERED: return ARM_VC;
11182 case UNORDERED: return ARM_VS;
11183 case UNLT: return ARM_LT;
11184 case UNLE: return ARM_LE;
11185 case UNGT: return ARM_HI;
11186 case UNGE: return ARM_PL;
11187 /* UNEQ and LTGT do not have a representation. */
11188 case UNEQ: /* Fall through. */
11189 case LTGT: /* Fall through. */
84ed5e79
RE
11190 default: abort ();
11191 }
11192
11193 case CC_SWPmode:
11194 switch (comp_code)
11195 {
11196 case NE: return ARM_NE;
11197 case EQ: return ARM_EQ;
11198 case GE: return ARM_LE;
11199 case GT: return ARM_LT;
11200 case LE: return ARM_GE;
11201 case LT: return ARM_GT;
11202 case GEU: return ARM_LS;
11203 case GTU: return ARM_CC;
11204 case LEU: return ARM_CS;
11205 case LTU: return ARM_HI;
11206 default: abort ();
11207 }
11208
bd9c7e23
RE
11209 case CC_Cmode:
11210 switch (comp_code)
11211 {
11212 case LTU: return ARM_CS;
11213 case GEU: return ARM_CC;
11214 default: abort ();
11215 }
11216
5165176d 11217 case CCmode:
84ed5e79 11218 switch (comp_code)
5165176d 11219 {
84ed5e79
RE
11220 case NE: return ARM_NE;
11221 case EQ: return ARM_EQ;
11222 case GE: return ARM_GE;
11223 case GT: return ARM_GT;
11224 case LE: return ARM_LE;
11225 case LT: return ARM_LT;
11226 case GEU: return ARM_CS;
11227 case GTU: return ARM_HI;
11228 case LEU: return ARM_LS;
11229 case LTU: return ARM_CC;
5165176d
RE
11230 default: abort ();
11231 }
11232
cce8749e
CH
11233 default: abort ();
11234 }
84ed5e79
RE
11235
11236 abort ();
f3bb6135 11237}
cce8749e 11238
cce8749e 11239void
e32bac5b 11240arm_final_prescan_insn (rtx insn)
cce8749e
CH
11241{
11242 /* BODY will hold the body of INSN. */
1d6e90ac 11243 rtx body = PATTERN (insn);
cce8749e
CH
11244
11245 /* This will be 1 if trying to repeat the trick, and things need to be
11246 reversed if it appears to fail. */
11247 int reverse = 0;
11248
ff9940b0
RE
11249 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
11250 taken are clobbered, even if the rtl suggests otherwise. It also
11251 means that we have to grub around within the jump expression to find
11252 out what the conditions are when the jump isn't taken. */
11253 int jump_clobbers = 0;
11254
6354dc9b 11255 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
11256 int seeking_return = 0;
11257
cce8749e
CH
11258 /* START_INSN will hold the insn from where we start looking. This is the
11259 first insn after the following code_label if REVERSE is true. */
11260 rtx start_insn = insn;
11261
11262 /* If in state 4, check if the target branch is reached, in order to
11263 change back to state 0. */
11264 if (arm_ccfsm_state == 4)
11265 {
11266 if (insn == arm_target_insn)
f5a1b0d2
NC
11267 {
11268 arm_target_insn = NULL;
11269 arm_ccfsm_state = 0;
11270 }
cce8749e
CH
11271 return;
11272 }
11273
11274 /* If in state 3, it is possible to repeat the trick, if this insn is an
11275 unconditional branch to a label, and immediately following this branch
11276 is the previous target label which is only used once, and the label this
11277 branch jumps to is not too far off. */
11278 if (arm_ccfsm_state == 3)
11279 {
11280 if (simplejump_p (insn))
11281 {
11282 start_insn = next_nonnote_insn (start_insn);
11283 if (GET_CODE (start_insn) == BARRIER)
11284 {
11285 /* XXX Isn't this always a barrier? */
11286 start_insn = next_nonnote_insn (start_insn);
11287 }
11288 if (GET_CODE (start_insn) == CODE_LABEL
11289 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
11290 && LABEL_NUSES (start_insn) == 1)
11291 reverse = TRUE;
11292 else
11293 return;
11294 }
ff9940b0
RE
11295 else if (GET_CODE (body) == RETURN)
11296 {
11297 start_insn = next_nonnote_insn (start_insn);
11298 if (GET_CODE (start_insn) == BARRIER)
11299 start_insn = next_nonnote_insn (start_insn);
11300 if (GET_CODE (start_insn) == CODE_LABEL
11301 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
11302 && LABEL_NUSES (start_insn) == 1)
11303 {
11304 reverse = TRUE;
11305 seeking_return = 1;
11306 }
11307 else
11308 return;
11309 }
cce8749e
CH
11310 else
11311 return;
11312 }
11313
11314 if (arm_ccfsm_state != 0 && !reverse)
11315 abort ();
11316 if (GET_CODE (insn) != JUMP_INSN)
11317 return;
11318
ddd5a7c1 11319 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
11320 the jump should always come first */
11321 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
11322 body = XVECEXP (body, 0, 0);
11323
cce8749e
CH
11324 if (reverse
11325 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
11326 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
11327 {
bd9c7e23
RE
11328 int insns_skipped;
11329 int fail = FALSE, succeed = FALSE;
cce8749e
CH
11330 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
11331 int then_not_else = TRUE;
ff9940b0 11332 rtx this_insn = start_insn, label = 0;
cce8749e 11333
e45b72c4
RE
11334 /* If the jump cannot be done with one instruction, we cannot
11335 conditionally execute the instruction in the inverse case. */
ff9940b0 11336 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 11337 {
5bbe2d40
RE
11338 jump_clobbers = 1;
11339 return;
11340 }
ff9940b0 11341
cce8749e
CH
11342 /* Register the insn jumped to. */
11343 if (reverse)
ff9940b0
RE
11344 {
11345 if (!seeking_return)
11346 label = XEXP (SET_SRC (body), 0);
11347 }
cce8749e
CH
11348 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
11349 label = XEXP (XEXP (SET_SRC (body), 1), 0);
11350 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
11351 {
11352 label = XEXP (XEXP (SET_SRC (body), 2), 0);
11353 then_not_else = FALSE;
11354 }
ff9940b0
RE
11355 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
11356 seeking_return = 1;
11357 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
11358 {
11359 seeking_return = 1;
11360 then_not_else = FALSE;
11361 }
cce8749e
CH
11362 else
11363 abort ();
11364
11365 /* See how many insns this branch skips, and what kind of insns. If all
11366 insns are okay, and the label or unconditional branch to the same
11367 label is not too far away, succeed. */
11368 for (insns_skipped = 0;
b36ba79f 11369 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
11370 {
11371 rtx scanbody;
11372
11373 this_insn = next_nonnote_insn (this_insn);
11374 if (!this_insn)
11375 break;
11376
cce8749e
CH
11377 switch (GET_CODE (this_insn))
11378 {
11379 case CODE_LABEL:
11380 /* Succeed if it is the target label, otherwise fail since
11381 control falls in from somewhere else. */
11382 if (this_insn == label)
11383 {
ff9940b0
RE
11384 if (jump_clobbers)
11385 {
11386 arm_ccfsm_state = 2;
11387 this_insn = next_nonnote_insn (this_insn);
11388 }
11389 else
11390 arm_ccfsm_state = 1;
cce8749e
CH
11391 succeed = TRUE;
11392 }
11393 else
11394 fail = TRUE;
11395 break;
11396
ff9940b0 11397 case BARRIER:
cce8749e 11398 /* Succeed if the following insn is the target label.
ff9940b0
RE
11399 Otherwise fail.
11400 If return insns are used then the last insn in a function
6354dc9b 11401 will be a barrier. */
cce8749e 11402 this_insn = next_nonnote_insn (this_insn);
ff9940b0 11403 if (this_insn && this_insn == label)
cce8749e 11404 {
ff9940b0
RE
11405 if (jump_clobbers)
11406 {
11407 arm_ccfsm_state = 2;
11408 this_insn = next_nonnote_insn (this_insn);
11409 }
11410 else
11411 arm_ccfsm_state = 1;
cce8749e
CH
11412 succeed = TRUE;
11413 }
11414 else
11415 fail = TRUE;
11416 break;
11417
ff9940b0 11418 case CALL_INSN:
68d560d4
RE
11419 /* The AAPCS says that conditional calls should not be
11420 used since they make interworking inefficient (the
11421 linker can't transform BL<cond> into BLX). That's
11422 only a problem if the machine has BLX. */
11423 if (arm_arch5)
11424 {
11425 fail = TRUE;
11426 break;
11427 }
11428
61f0ccff
RE
11429 /* Succeed if the following insn is the target label, or
11430 if the following two insns are a barrier and the
11431 target label. */
11432 this_insn = next_nonnote_insn (this_insn);
11433 if (this_insn && GET_CODE (this_insn) == BARRIER)
11434 this_insn = next_nonnote_insn (this_insn);
bd9c7e23 11435
61f0ccff
RE
11436 if (this_insn && this_insn == label
11437 && insns_skipped < max_insns_skipped)
11438 {
11439 if (jump_clobbers)
bd9c7e23 11440 {
61f0ccff
RE
11441 arm_ccfsm_state = 2;
11442 this_insn = next_nonnote_insn (this_insn);
bd9c7e23
RE
11443 }
11444 else
61f0ccff
RE
11445 arm_ccfsm_state = 1;
11446 succeed = TRUE;
bd9c7e23 11447 }
61f0ccff
RE
11448 else
11449 fail = TRUE;
ff9940b0 11450 break;
2b835d68 11451
cce8749e
CH
11452 case JUMP_INSN:
11453 /* If this is an unconditional branch to the same label, succeed.
11454 If it is to another label, do nothing. If it is conditional,
11455 fail. */
e32bac5b
RE
11456 /* XXX Probably, the tests for SET and the PC are
11457 unnecessary. */
cce8749e 11458
ed4c4348 11459 scanbody = PATTERN (this_insn);
ff9940b0
RE
11460 if (GET_CODE (scanbody) == SET
11461 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
11462 {
11463 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
11464 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
11465 {
11466 arm_ccfsm_state = 2;
11467 succeed = TRUE;
11468 }
11469 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
11470 fail = TRUE;
11471 }
b36ba79f
RE
11472 /* Fail if a conditional return is undesirable (eg on a
11473 StrongARM), but still allow this if optimizing for size. */
11474 else if (GET_CODE (scanbody) == RETURN
a72d4945 11475 && !use_return_insn (TRUE, NULL)
5895f793 11476 && !optimize_size)
b36ba79f 11477 fail = TRUE;
ff9940b0
RE
11478 else if (GET_CODE (scanbody) == RETURN
11479 && seeking_return)
11480 {
11481 arm_ccfsm_state = 2;
11482 succeed = TRUE;
11483 }
11484 else if (GET_CODE (scanbody) == PARALLEL)
11485 {
11486 switch (get_attr_conds (this_insn))
11487 {
11488 case CONDS_NOCOND:
11489 break;
11490 default:
11491 fail = TRUE;
11492 break;
11493 }
11494 }
4e67550b
RE
11495 else
11496 fail = TRUE; /* Unrecognized jump (eg epilogue). */
11497
cce8749e
CH
11498 break;
11499
11500 case INSN:
ff9940b0
RE
11501 /* Instructions using or affecting the condition codes make it
11502 fail. */
ed4c4348 11503 scanbody = PATTERN (this_insn);
5895f793
RE
11504 if (!(GET_CODE (scanbody) == SET
11505 || GET_CODE (scanbody) == PARALLEL)
74641843 11506 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e 11507 fail = TRUE;
9b6b54e2
NC
11508
11509 /* A conditional cirrus instruction must be followed by
11510 a non Cirrus instruction. However, since we
11511 conditionalize instructions in this function and by
11512 the time we get here we can't add instructions
11513 (nops), because shorten_branches() has already been
11514 called, we will disable conditionalizing Cirrus
11515 instructions to be safe. */
11516 if (GET_CODE (scanbody) != USE
11517 && GET_CODE (scanbody) != CLOBBER
f0375c66 11518 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
9b6b54e2 11519 fail = TRUE;
cce8749e
CH
11520 break;
11521
11522 default:
11523 break;
11524 }
11525 }
11526 if (succeed)
11527 {
ff9940b0 11528 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 11529 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
11530 else if (seeking_return || arm_ccfsm_state == 2)
11531 {
11532 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
11533 {
11534 this_insn = next_nonnote_insn (this_insn);
11535 if (this_insn && (GET_CODE (this_insn) == BARRIER
11536 || GET_CODE (this_insn) == CODE_LABEL))
11537 abort ();
11538 }
11539 if (!this_insn)
11540 {
d6b4baa4 11541 /* Oh, dear! we ran off the end.. give up. */
df4ae160 11542 recog (PATTERN (insn), insn, NULL);
ff9940b0 11543 arm_ccfsm_state = 0;
abaa26e5 11544 arm_target_insn = NULL;
ff9940b0
RE
11545 return;
11546 }
11547 arm_target_insn = this_insn;
11548 }
cce8749e
CH
11549 else
11550 abort ();
ff9940b0
RE
11551 if (jump_clobbers)
11552 {
11553 if (reverse)
11554 abort ();
11555 arm_current_cc =
11556 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
11557 0), 0), 1));
11558 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
11559 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
11560 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
11561 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
11562 }
11563 else
11564 {
11565 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
11566 what it was. */
11567 if (!reverse)
11568 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
11569 0));
11570 }
cce8749e 11571
cce8749e
CH
11572 if (reverse || then_not_else)
11573 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
11574 }
d5b7b3ae 11575
1ccbefce 11576 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 11577 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 11578 across this call; since the insn has been recognized already we
b020fd92 11579 call recog direct). */
df4ae160 11580 recog (PATTERN (insn), insn, NULL);
cce8749e 11581 }
f3bb6135 11582}
cce8749e 11583
4b02997f
NC
11584/* Returns true if REGNO is a valid register
11585 for holding a quantity of tyoe MODE. */
4b02997f 11586int
e32bac5b 11587arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
4b02997f
NC
11588{
11589 if (GET_MODE_CLASS (mode) == MODE_CC)
9b66ebb1 11590 return regno == CC_REGNUM || regno == VFPCC_REGNUM;
4b02997f
NC
11591
11592 if (TARGET_THUMB)
11593 /* For the Thumb we only allow values bigger than SImode in
11594 registers 0 - 6, so that there is always a second low
11595 register available to hold the upper part of the value.
11596 We probably we ought to ensure that the register is the
11597 start of an even numbered register pair. */
e9d7b180 11598 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
4b02997f 11599
9b6b54e2
NC
11600 if (IS_CIRRUS_REGNUM (regno))
11601 /* We have outlawed SI values in Cirrus registers because they
11602 reside in the lower 32 bits, but SF values reside in the
11603 upper 32 bits. This causes gcc all sorts of grief. We can't
11604 even split the registers into pairs because Cirrus SI values
11605 get sign extended to 64bits-- aldyh. */
11606 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
11607
9b66ebb1
PB
11608 if (IS_VFP_REGNUM (regno))
11609 {
11610 if (mode == SFmode || mode == SImode)
11611 return TRUE;
11612
11613 /* DFmode values are only valid in even register pairs. */
11614 if (mode == DFmode)
11615 return ((regno - FIRST_VFP_REGNUM) & 1) == 0;
11616 return FALSE;
11617 }
11618
5a9335ef
NC
11619 if (IS_IWMMXT_GR_REGNUM (regno))
11620 return mode == SImode;
11621
11622 if (IS_IWMMXT_REGNUM (regno))
11623 return VALID_IWMMXT_REG_MODE (mode);
11624
fdd695fd
PB
11625 /* We allow any value to be stored in the general registers.
11626 Restrict doubleword quantities to even register pairs so that we can
11627 use ldrd. */
4b02997f 11628 if (regno <= LAST_ARM_REGNUM)
fdd695fd 11629 return !(TARGET_LDRD && GET_MODE_SIZE (mode) > 4 && (regno & 1) != 0);
4b02997f
NC
11630
11631 if ( regno == FRAME_POINTER_REGNUM
11632 || regno == ARG_POINTER_REGNUM)
11633 /* We only allow integers in the fake hard registers. */
11634 return GET_MODE_CLASS (mode) == MODE_INT;
11635
3b684012 11636 /* The only registers left are the FPA registers
4b02997f
NC
11637 which we only allow to hold FP values. */
11638 return GET_MODE_CLASS (mode) == MODE_FLOAT
9b66ebb1
PB
11639 && regno >= FIRST_FPA_REGNUM
11640 && regno <= LAST_FPA_REGNUM;
4b02997f
NC
11641}
11642
d5b7b3ae 11643int
e32bac5b 11644arm_regno_class (int regno)
d5b7b3ae
RE
11645{
11646 if (TARGET_THUMB)
11647 {
11648 if (regno == STACK_POINTER_REGNUM)
11649 return STACK_REG;
11650 if (regno == CC_REGNUM)
11651 return CC_REG;
11652 if (regno < 8)
11653 return LO_REGS;
11654 return HI_REGS;
11655 }
11656
11657 if ( regno <= LAST_ARM_REGNUM
11658 || regno == FRAME_POINTER_REGNUM
11659 || regno == ARG_POINTER_REGNUM)
11660 return GENERAL_REGS;
11661
9b66ebb1 11662 if (regno == CC_REGNUM || regno == VFPCC_REGNUM)
d5b7b3ae
RE
11663 return NO_REGS;
11664
9b6b54e2
NC
11665 if (IS_CIRRUS_REGNUM (regno))
11666 return CIRRUS_REGS;
11667
9b66ebb1
PB
11668 if (IS_VFP_REGNUM (regno))
11669 return VFP_REGS;
11670
5a9335ef
NC
11671 if (IS_IWMMXT_REGNUM (regno))
11672 return IWMMXT_REGS;
11673
e99faaaa
ILT
11674 if (IS_IWMMXT_GR_REGNUM (regno))
11675 return IWMMXT_GR_REGS;
11676
3b684012 11677 return FPA_REGS;
d5b7b3ae
RE
11678}
11679
11680/* Handle a special case when computing the offset
11681 of an argument from the frame pointer. */
11682int
e32bac5b 11683arm_debugger_arg_offset (int value, rtx addr)
d5b7b3ae
RE
11684{
11685 rtx insn;
11686
11687 /* We are only interested if dbxout_parms() failed to compute the offset. */
11688 if (value != 0)
11689 return 0;
11690
11691 /* We can only cope with the case where the address is held in a register. */
11692 if (GET_CODE (addr) != REG)
11693 return 0;
11694
11695 /* If we are using the frame pointer to point at the argument, then
11696 an offset of 0 is correct. */
cd2b33d0 11697 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
11698 return 0;
11699
11700 /* If we are using the stack pointer to point at the
11701 argument, then an offset of 0 is correct. */
5895f793 11702 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
11703 && REGNO (addr) == SP_REGNUM)
11704 return 0;
11705
11706 /* Oh dear. The argument is pointed to by a register rather
11707 than being held in a register, or being stored at a known
11708 offset from the frame pointer. Since GDB only understands
11709 those two kinds of argument we must translate the address
11710 held in the register into an offset from the frame pointer.
11711 We do this by searching through the insns for the function
11712 looking to see where this register gets its value. If the
4912a07c 11713 register is initialized from the frame pointer plus an offset
d5b7b3ae
RE
11714 then we are in luck and we can continue, otherwise we give up.
11715
11716 This code is exercised by producing debugging information
11717 for a function with arguments like this:
11718
11719 double func (double a, double b, int c, double d) {return d;}
11720
11721 Without this code the stab for parameter 'd' will be set to
11722 an offset of 0 from the frame pointer, rather than 8. */
11723
11724 /* The if() statement says:
11725
11726 If the insn is a normal instruction
11727 and if the insn is setting the value in a register
11728 and if the register being set is the register holding the address of the argument
11729 and if the address is computing by an addition
11730 that involves adding to a register
11731 which is the frame pointer
11732 a constant integer
11733
d6b4baa4 11734 then... */
d5b7b3ae
RE
11735
11736 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11737 {
11738 if ( GET_CODE (insn) == INSN
11739 && GET_CODE (PATTERN (insn)) == SET
11740 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
11741 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
11742 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 11743 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
11744 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
11745 )
11746 {
11747 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
11748
11749 break;
11750 }
11751 }
11752
11753 if (value == 0)
11754 {
11755 debug_rtx (addr);
c725bd79 11756 warning ("unable to compute real location of stacked parameter");
d5b7b3ae
RE
11757 value = 8; /* XXX magic hack */
11758 }
11759
11760 return value;
11761}
d5b7b3ae 11762\f
5a9335ef
NC
11763#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
11764 do \
11765 { \
11766 if ((MASK) & insn_flags) \
6e34d3a3
JM
11767 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
11768 BUILT_IN_MD, NULL, NULL_TREE); \
5a9335ef
NC
11769 } \
11770 while (0)
11771
11772struct builtin_description
11773{
11774 const unsigned int mask;
11775 const enum insn_code icode;
11776 const char * const name;
11777 const enum arm_builtins code;
11778 const enum rtx_code comparison;
11779 const unsigned int flag;
11780};
11781
11782static const struct builtin_description bdesc_2arg[] =
11783{
11784#define IWMMXT_BUILTIN(code, string, builtin) \
11785 { FL_IWMMXT, CODE_FOR_##code, "__builtin_arm_" string, \
11786 ARM_BUILTIN_##builtin, 0, 0 },
11787
11788 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
11789 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
11790 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
11791 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
11792 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
11793 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
11794 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
11795 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
11796 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
11797 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
11798 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
11799 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
11800 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
11801 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
11802 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
11803 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
11804 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
11805 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
11806 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
f07a6b21
BE
11807 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
11808 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
5a9335ef
NC
11809 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
11810 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
11811 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
11812 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
11813 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
11814 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
11815 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
11816 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
11817 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
11818 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
11819 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
11820 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
11821 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
11822 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
11823 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
11824 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
11825 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
11826 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
11827 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
11828 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
11829 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
11830 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
11831 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
11832 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
11833 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
11834 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
11835 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
11836 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
11837 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
11838 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
11839 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
11840 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
11841 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
11842 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
11843 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
11844 IWMMXT_BUILTIN (iwmmxt_wmadds, "wmadds", WMADDS)
11845 IWMMXT_BUILTIN (iwmmxt_wmaddu, "wmaddu", WMADDU)
11846
11847#define IWMMXT_BUILTIN2(code, builtin) \
11848 { FL_IWMMXT, CODE_FOR_##code, NULL, ARM_BUILTIN_##builtin, 0, 0 },
11849
11850 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
11851 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
11852 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
11853 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
11854 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
11855 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
11856 IWMMXT_BUILTIN2 (ashlv4hi3_di, WSLLH)
11857 IWMMXT_BUILTIN2 (ashlv4hi3, WSLLHI)
11858 IWMMXT_BUILTIN2 (ashlv2si3_di, WSLLW)
11859 IWMMXT_BUILTIN2 (ashlv2si3, WSLLWI)
11860 IWMMXT_BUILTIN2 (ashldi3_di, WSLLD)
11861 IWMMXT_BUILTIN2 (ashldi3_iwmmxt, WSLLDI)
11862 IWMMXT_BUILTIN2 (lshrv4hi3_di, WSRLH)
11863 IWMMXT_BUILTIN2 (lshrv4hi3, WSRLHI)
11864 IWMMXT_BUILTIN2 (lshrv2si3_di, WSRLW)
11865 IWMMXT_BUILTIN2 (lshrv2si3, WSRLWI)
11866 IWMMXT_BUILTIN2 (lshrdi3_di, WSRLD)
9b66ebb1 11867 IWMMXT_BUILTIN2 (lshrdi3_iwmmxt, WSRLDI)
5a9335ef
NC
11868 IWMMXT_BUILTIN2 (ashrv4hi3_di, WSRAH)
11869 IWMMXT_BUILTIN2 (ashrv4hi3, WSRAHI)
11870 IWMMXT_BUILTIN2 (ashrv2si3_di, WSRAW)
11871 IWMMXT_BUILTIN2 (ashrv2si3, WSRAWI)
11872 IWMMXT_BUILTIN2 (ashrdi3_di, WSRAD)
9b66ebb1 11873 IWMMXT_BUILTIN2 (ashrdi3_iwmmxt, WSRADI)
5a9335ef
NC
11874 IWMMXT_BUILTIN2 (rorv4hi3_di, WRORH)
11875 IWMMXT_BUILTIN2 (rorv4hi3, WRORHI)
11876 IWMMXT_BUILTIN2 (rorv2si3_di, WRORW)
11877 IWMMXT_BUILTIN2 (rorv2si3, WRORWI)
11878 IWMMXT_BUILTIN2 (rordi3_di, WRORD)
11879 IWMMXT_BUILTIN2 (rordi3, WRORDI)
11880 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
11881 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
11882};
11883
11884static const struct builtin_description bdesc_1arg[] =
11885{
11886 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
11887 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
11888 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
11889 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
11890 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
11891 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
11892 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
11893 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
11894 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
11895 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
11896 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
11897 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
11898 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
11899 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
11900 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
11901 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
11902 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
11903 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
11904};
11905
11906/* Set up all the iWMMXt builtins. This is
11907 not called if TARGET_IWMMXT is zero. */
11908
11909static void
11910arm_init_iwmmxt_builtins (void)
11911{
11912 const struct builtin_description * d;
11913 size_t i;
11914 tree endlink = void_list_node;
11915
4a5eab38
PB
11916 tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
11917 tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
11918 tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
11919
5a9335ef
NC
11920 tree int_ftype_int
11921 = build_function_type (integer_type_node,
11922 tree_cons (NULL_TREE, integer_type_node, endlink));
11923 tree v8qi_ftype_v8qi_v8qi_int
11924 = build_function_type (V8QI_type_node,
11925 tree_cons (NULL_TREE, V8QI_type_node,
11926 tree_cons (NULL_TREE, V8QI_type_node,
11927 tree_cons (NULL_TREE,
11928 integer_type_node,
11929 endlink))));
11930 tree v4hi_ftype_v4hi_int
11931 = build_function_type (V4HI_type_node,
11932 tree_cons (NULL_TREE, V4HI_type_node,
11933 tree_cons (NULL_TREE, integer_type_node,
11934 endlink)));
11935 tree v2si_ftype_v2si_int
11936 = build_function_type (V2SI_type_node,
11937 tree_cons (NULL_TREE, V2SI_type_node,
11938 tree_cons (NULL_TREE, integer_type_node,
11939 endlink)));
11940 tree v2si_ftype_di_di
11941 = build_function_type (V2SI_type_node,
11942 tree_cons (NULL_TREE, long_long_integer_type_node,
11943 tree_cons (NULL_TREE, long_long_integer_type_node,
11944 endlink)));
11945 tree di_ftype_di_int
11946 = build_function_type (long_long_integer_type_node,
11947 tree_cons (NULL_TREE, long_long_integer_type_node,
11948 tree_cons (NULL_TREE, integer_type_node,
11949 endlink)));
11950 tree di_ftype_di_int_int
11951 = build_function_type (long_long_integer_type_node,
11952 tree_cons (NULL_TREE, long_long_integer_type_node,
11953 tree_cons (NULL_TREE, integer_type_node,
11954 tree_cons (NULL_TREE,
11955 integer_type_node,
11956 endlink))));
11957 tree int_ftype_v8qi
11958 = build_function_type (integer_type_node,
11959 tree_cons (NULL_TREE, V8QI_type_node,
11960 endlink));
11961 tree int_ftype_v4hi
11962 = build_function_type (integer_type_node,
11963 tree_cons (NULL_TREE, V4HI_type_node,
11964 endlink));
11965 tree int_ftype_v2si
11966 = build_function_type (integer_type_node,
11967 tree_cons (NULL_TREE, V2SI_type_node,
11968 endlink));
11969 tree int_ftype_v8qi_int
11970 = build_function_type (integer_type_node,
11971 tree_cons (NULL_TREE, V8QI_type_node,
11972 tree_cons (NULL_TREE, integer_type_node,
11973 endlink)));
11974 tree int_ftype_v4hi_int
11975 = build_function_type (integer_type_node,
11976 tree_cons (NULL_TREE, V4HI_type_node,
11977 tree_cons (NULL_TREE, integer_type_node,
11978 endlink)));
11979 tree int_ftype_v2si_int
11980 = build_function_type (integer_type_node,
11981 tree_cons (NULL_TREE, V2SI_type_node,
11982 tree_cons (NULL_TREE, integer_type_node,
11983 endlink)));
11984 tree v8qi_ftype_v8qi_int_int
11985 = build_function_type (V8QI_type_node,
11986 tree_cons (NULL_TREE, V8QI_type_node,
11987 tree_cons (NULL_TREE, integer_type_node,
11988 tree_cons (NULL_TREE,
11989 integer_type_node,
11990 endlink))));
11991 tree v4hi_ftype_v4hi_int_int
11992 = build_function_type (V4HI_type_node,
11993 tree_cons (NULL_TREE, V4HI_type_node,
11994 tree_cons (NULL_TREE, integer_type_node,
11995 tree_cons (NULL_TREE,
11996 integer_type_node,
11997 endlink))));
11998 tree v2si_ftype_v2si_int_int
11999 = build_function_type (V2SI_type_node,
12000 tree_cons (NULL_TREE, V2SI_type_node,
12001 tree_cons (NULL_TREE, integer_type_node,
12002 tree_cons (NULL_TREE,
12003 integer_type_node,
12004 endlink))));
12005 /* Miscellaneous. */
12006 tree v8qi_ftype_v4hi_v4hi
12007 = build_function_type (V8QI_type_node,
12008 tree_cons (NULL_TREE, V4HI_type_node,
12009 tree_cons (NULL_TREE, V4HI_type_node,
12010 endlink)));
12011 tree v4hi_ftype_v2si_v2si
12012 = build_function_type (V4HI_type_node,
12013 tree_cons (NULL_TREE, V2SI_type_node,
12014 tree_cons (NULL_TREE, V2SI_type_node,
12015 endlink)));
12016 tree v2si_ftype_v4hi_v4hi
12017 = build_function_type (V2SI_type_node,
12018 tree_cons (NULL_TREE, V4HI_type_node,
12019 tree_cons (NULL_TREE, V4HI_type_node,
12020 endlink)));
12021 tree v2si_ftype_v8qi_v8qi
12022 = build_function_type (V2SI_type_node,
12023 tree_cons (NULL_TREE, V8QI_type_node,
12024 tree_cons (NULL_TREE, V8QI_type_node,
12025 endlink)));
12026 tree v4hi_ftype_v4hi_di
12027 = build_function_type (V4HI_type_node,
12028 tree_cons (NULL_TREE, V4HI_type_node,
12029 tree_cons (NULL_TREE,
12030 long_long_integer_type_node,
12031 endlink)));
12032 tree v2si_ftype_v2si_di
12033 = build_function_type (V2SI_type_node,
12034 tree_cons (NULL_TREE, V2SI_type_node,
12035 tree_cons (NULL_TREE,
12036 long_long_integer_type_node,
12037 endlink)));
12038 tree void_ftype_int_int
12039 = build_function_type (void_type_node,
12040 tree_cons (NULL_TREE, integer_type_node,
12041 tree_cons (NULL_TREE, integer_type_node,
12042 endlink)));
12043 tree di_ftype_void
12044 = build_function_type (long_long_unsigned_type_node, endlink);
12045 tree di_ftype_v8qi
12046 = build_function_type (long_long_integer_type_node,
12047 tree_cons (NULL_TREE, V8QI_type_node,
12048 endlink));
12049 tree di_ftype_v4hi
12050 = build_function_type (long_long_integer_type_node,
12051 tree_cons (NULL_TREE, V4HI_type_node,
12052 endlink));
12053 tree di_ftype_v2si
12054 = build_function_type (long_long_integer_type_node,
12055 tree_cons (NULL_TREE, V2SI_type_node,
12056 endlink));
12057 tree v2si_ftype_v4hi
12058 = build_function_type (V2SI_type_node,
12059 tree_cons (NULL_TREE, V4HI_type_node,
12060 endlink));
12061 tree v4hi_ftype_v8qi
12062 = build_function_type (V4HI_type_node,
12063 tree_cons (NULL_TREE, V8QI_type_node,
12064 endlink));
12065
12066 tree di_ftype_di_v4hi_v4hi
12067 = build_function_type (long_long_unsigned_type_node,
12068 tree_cons (NULL_TREE,
12069 long_long_unsigned_type_node,
12070 tree_cons (NULL_TREE, V4HI_type_node,
12071 tree_cons (NULL_TREE,
12072 V4HI_type_node,
12073 endlink))));
12074
12075 tree di_ftype_v4hi_v4hi
12076 = build_function_type (long_long_unsigned_type_node,
12077 tree_cons (NULL_TREE, V4HI_type_node,
12078 tree_cons (NULL_TREE, V4HI_type_node,
12079 endlink)));
12080
12081 /* Normal vector binops. */
12082 tree v8qi_ftype_v8qi_v8qi
12083 = build_function_type (V8QI_type_node,
12084 tree_cons (NULL_TREE, V8QI_type_node,
12085 tree_cons (NULL_TREE, V8QI_type_node,
12086 endlink)));
12087 tree v4hi_ftype_v4hi_v4hi
12088 = build_function_type (V4HI_type_node,
12089 tree_cons (NULL_TREE, V4HI_type_node,
12090 tree_cons (NULL_TREE, V4HI_type_node,
12091 endlink)));
12092 tree v2si_ftype_v2si_v2si
12093 = build_function_type (V2SI_type_node,
12094 tree_cons (NULL_TREE, V2SI_type_node,
12095 tree_cons (NULL_TREE, V2SI_type_node,
12096 endlink)));
12097 tree di_ftype_di_di
12098 = build_function_type (long_long_unsigned_type_node,
12099 tree_cons (NULL_TREE, long_long_unsigned_type_node,
12100 tree_cons (NULL_TREE,
12101 long_long_unsigned_type_node,
12102 endlink)));
12103
12104 /* Add all builtins that are more or less simple operations on two
12105 operands. */
e97a46ce 12106 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5a9335ef
NC
12107 {
12108 /* Use one of the operands; the target can have a different mode for
12109 mask-generating compares. */
12110 enum machine_mode mode;
12111 tree type;
12112
12113 if (d->name == 0)
12114 continue;
12115
12116 mode = insn_data[d->icode].operand[1].mode;
12117
12118 switch (mode)
12119 {
12120 case V8QImode:
12121 type = v8qi_ftype_v8qi_v8qi;
12122 break;
12123 case V4HImode:
12124 type = v4hi_ftype_v4hi_v4hi;
12125 break;
12126 case V2SImode:
12127 type = v2si_ftype_v2si_v2si;
12128 break;
12129 case DImode:
12130 type = di_ftype_di_di;
12131 break;
12132
12133 default:
12134 abort ();
12135 }
12136
12137 def_mbuiltin (d->mask, d->name, type, d->code);
12138 }
12139
12140 /* Add the remaining MMX insns with somewhat more complicated types. */
12141 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wzero", di_ftype_void, ARM_BUILTIN_WZERO);
12142 def_mbuiltin (FL_IWMMXT, "__builtin_arm_setwcx", void_ftype_int_int, ARM_BUILTIN_SETWCX);
12143 def_mbuiltin (FL_IWMMXT, "__builtin_arm_getwcx", int_ftype_int, ARM_BUILTIN_GETWCX);
12144
12145 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSLLH);
12146 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllw", v2si_ftype_v2si_di, ARM_BUILTIN_WSLLW);
12147 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslld", di_ftype_di_di, ARM_BUILTIN_WSLLD);
12148 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSLLHI);
12149 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSLLWI);
12150 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslldi", di_ftype_di_int, ARM_BUILTIN_WSLLDI);
12151
12152 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRLH);
12153 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRLW);
12154 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrld", di_ftype_di_di, ARM_BUILTIN_WSRLD);
12155 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRLHI);
12156 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRLWI);
12157 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrldi", di_ftype_di_int, ARM_BUILTIN_WSRLDI);
12158
12159 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrah", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRAH);
12160 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsraw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRAW);
12161 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrad", di_ftype_di_di, ARM_BUILTIN_WSRAD);
12162 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrahi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRAHI);
12163 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrawi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRAWI);
12164 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsradi", di_ftype_di_int, ARM_BUILTIN_WSRADI);
12165
12166 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WRORH);
12167 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorw", v2si_ftype_v2si_di, ARM_BUILTIN_WRORW);
12168 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrord", di_ftype_di_di, ARM_BUILTIN_WRORD);
12169 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WRORHI);
12170 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorwi", v2si_ftype_v2si_int, ARM_BUILTIN_WRORWI);
12171 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrordi", di_ftype_di_int, ARM_BUILTIN_WRORDI);
12172
12173 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wshufh", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSHUFH);
12174
12175 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadb", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADB);
12176 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadh", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADH);
12177 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadbz", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADBZ);
12178 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadhz", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADHZ);
12179
12180 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsb", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMSB);
12181 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMSH);
12182 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMSW);
12183 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmub", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMUB);
12184 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMUH);
12185 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMUW);
12186 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrb", v8qi_ftype_v8qi_int_int, ARM_BUILTIN_TINSRB);
12187 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrh", v4hi_ftype_v4hi_int_int, ARM_BUILTIN_TINSRH);
12188 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrw", v2si_ftype_v2si_int_int, ARM_BUILTIN_TINSRW);
12189
12190 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccb", di_ftype_v8qi, ARM_BUILTIN_WACCB);
12191 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wacch", di_ftype_v4hi, ARM_BUILTIN_WACCH);
12192 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccw", di_ftype_v2si, ARM_BUILTIN_WACCW);
12193
12194 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskb", int_ftype_v8qi, ARM_BUILTIN_TMOVMSKB);
12195 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskh", int_ftype_v4hi, ARM_BUILTIN_TMOVMSKH);
12196 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskw", int_ftype_v2si, ARM_BUILTIN_TMOVMSKW);
12197
12198 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhss", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHSS);
12199 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhus", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHUS);
12200 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwus", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWUS);
12201 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwss", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWSS);
12202 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdus", v2si_ftype_di_di, ARM_BUILTIN_WPACKDUS);
12203 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdss", v2si_ftype_di_di, ARM_BUILTIN_WPACKDSS);
12204
12205 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHUB);
12206 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHUH);
12207 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHUW);
12208 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHSB);
12209 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHSH);
12210 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHSW);
12211 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELUB);
12212 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELUH);
12213 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELUW);
12214 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELSB);
12215 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELSH);
12216 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELSW);
12217
12218 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacs", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACS);
12219 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacsz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACSZ);
12220 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacu", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACU);
12221 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacuz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACUZ);
12222
12223 def_mbuiltin (FL_IWMMXT, "__builtin_arm_walign", v8qi_ftype_v8qi_v8qi_int, ARM_BUILTIN_WALIGN);
12224 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmia", di_ftype_di_int_int, ARM_BUILTIN_TMIA);
12225 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiaph", di_ftype_di_int_int, ARM_BUILTIN_TMIAPH);
12226 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabb", di_ftype_di_int_int, ARM_BUILTIN_TMIABB);
12227 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabt", di_ftype_di_int_int, ARM_BUILTIN_TMIABT);
12228 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatb", di_ftype_di_int_int, ARM_BUILTIN_TMIATB);
12229 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatt", di_ftype_di_int_int, ARM_BUILTIN_TMIATT);
12230}
12231
12232static void
12233arm_init_builtins (void)
12234{
12235 if (TARGET_REALLY_IWMMXT)
12236 arm_init_iwmmxt_builtins ();
12237}
12238
12239/* Errors in the source file can cause expand_expr to return const0_rtx
12240 where we expect a vector. To avoid crashing, use one of the vector
12241 clear instructions. */
12242
12243static rtx
12244safe_vector_operand (rtx x, enum machine_mode mode)
12245{
12246 if (x != const0_rtx)
12247 return x;
12248 x = gen_reg_rtx (mode);
12249
12250 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
12251 : gen_rtx_SUBREG (DImode, x, 0)));
12252 return x;
12253}
12254
12255/* Subroutine of arm_expand_builtin to take care of binop insns. */
12256
12257static rtx
12258arm_expand_binop_builtin (enum insn_code icode,
12259 tree arglist, rtx target)
12260{
12261 rtx pat;
12262 tree arg0 = TREE_VALUE (arglist);
12263 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12264 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12265 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12266 enum machine_mode tmode = insn_data[icode].operand[0].mode;
12267 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
12268 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
12269
12270 if (VECTOR_MODE_P (mode0))
12271 op0 = safe_vector_operand (op0, mode0);
12272 if (VECTOR_MODE_P (mode1))
12273 op1 = safe_vector_operand (op1, mode1);
12274
12275 if (! target
12276 || GET_MODE (target) != tmode
12277 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12278 target = gen_reg_rtx (tmode);
12279
12280 /* In case the insn wants input operands in modes different from
12281 the result, abort. */
12282 if (GET_MODE (op0) != mode0 || GET_MODE (op1) != mode1)
12283 abort ();
12284
12285 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
12286 op0 = copy_to_mode_reg (mode0, op0);
12287 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
12288 op1 = copy_to_mode_reg (mode1, op1);
12289
12290 pat = GEN_FCN (icode) (target, op0, op1);
12291 if (! pat)
12292 return 0;
12293 emit_insn (pat);
12294 return target;
12295}
12296
12297/* Subroutine of arm_expand_builtin to take care of unop insns. */
12298
12299static rtx
12300arm_expand_unop_builtin (enum insn_code icode,
12301 tree arglist, rtx target, int do_load)
12302{
12303 rtx pat;
12304 tree arg0 = TREE_VALUE (arglist);
12305 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12306 enum machine_mode tmode = insn_data[icode].operand[0].mode;
12307 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
12308
12309 if (! target
12310 || GET_MODE (target) != tmode
12311 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12312 target = gen_reg_rtx (tmode);
12313 if (do_load)
12314 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
12315 else
12316 {
12317 if (VECTOR_MODE_P (mode0))
12318 op0 = safe_vector_operand (op0, mode0);
12319
12320 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
12321 op0 = copy_to_mode_reg (mode0, op0);
12322 }
12323
12324 pat = GEN_FCN (icode) (target, op0);
12325 if (! pat)
12326 return 0;
12327 emit_insn (pat);
12328 return target;
12329}
12330
12331/* Expand an expression EXP that calls a built-in function,
12332 with result going to TARGET if that's convenient
12333 (and in mode MODE if that's convenient).
12334 SUBTARGET may be used as the target for computing one of EXP's operands.
12335 IGNORE is nonzero if the value is to be ignored. */
12336
12337static rtx
12338arm_expand_builtin (tree exp,
12339 rtx target,
12340 rtx subtarget ATTRIBUTE_UNUSED,
12341 enum machine_mode mode ATTRIBUTE_UNUSED,
12342 int ignore ATTRIBUTE_UNUSED)
12343{
12344 const struct builtin_description * d;
12345 enum insn_code icode;
12346 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
12347 tree arglist = TREE_OPERAND (exp, 1);
12348 tree arg0;
12349 tree arg1;
12350 tree arg2;
12351 rtx op0;
12352 rtx op1;
12353 rtx op2;
12354 rtx pat;
12355 int fcode = DECL_FUNCTION_CODE (fndecl);
12356 size_t i;
12357 enum machine_mode tmode;
12358 enum machine_mode mode0;
12359 enum machine_mode mode1;
12360 enum machine_mode mode2;
12361
12362 switch (fcode)
12363 {
12364 case ARM_BUILTIN_TEXTRMSB:
12365 case ARM_BUILTIN_TEXTRMUB:
12366 case ARM_BUILTIN_TEXTRMSH:
12367 case ARM_BUILTIN_TEXTRMUH:
12368 case ARM_BUILTIN_TEXTRMSW:
12369 case ARM_BUILTIN_TEXTRMUW:
12370 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
12371 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
12372 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
12373 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
12374 : CODE_FOR_iwmmxt_textrmw);
12375
12376 arg0 = TREE_VALUE (arglist);
12377 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12378 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12379 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12380 tmode = insn_data[icode].operand[0].mode;
12381 mode0 = insn_data[icode].operand[1].mode;
12382 mode1 = insn_data[icode].operand[2].mode;
12383
12384 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
12385 op0 = copy_to_mode_reg (mode0, op0);
12386 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
12387 {
12388 /* @@@ better error message */
12389 error ("selector must be an immediate");
12390 return gen_reg_rtx (tmode);
12391 }
12392 if (target == 0
12393 || GET_MODE (target) != tmode
12394 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12395 target = gen_reg_rtx (tmode);
12396 pat = GEN_FCN (icode) (target, op0, op1);
12397 if (! pat)
12398 return 0;
12399 emit_insn (pat);
12400 return target;
12401
12402 case ARM_BUILTIN_TINSRB:
12403 case ARM_BUILTIN_TINSRH:
12404 case ARM_BUILTIN_TINSRW:
12405 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
12406 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
12407 : CODE_FOR_iwmmxt_tinsrw);
12408 arg0 = TREE_VALUE (arglist);
12409 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12410 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
12411 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12412 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12413 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
12414 tmode = insn_data[icode].operand[0].mode;
12415 mode0 = insn_data[icode].operand[1].mode;
12416 mode1 = insn_data[icode].operand[2].mode;
12417 mode2 = insn_data[icode].operand[3].mode;
12418
12419 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
12420 op0 = copy_to_mode_reg (mode0, op0);
12421 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
12422 op1 = copy_to_mode_reg (mode1, op1);
12423 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
12424 {
12425 /* @@@ better error message */
12426 error ("selector must be an immediate");
12427 return const0_rtx;
12428 }
12429 if (target == 0
12430 || GET_MODE (target) != tmode
12431 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12432 target = gen_reg_rtx (tmode);
12433 pat = GEN_FCN (icode) (target, op0, op1, op2);
12434 if (! pat)
12435 return 0;
12436 emit_insn (pat);
12437 return target;
12438
12439 case ARM_BUILTIN_SETWCX:
12440 arg0 = TREE_VALUE (arglist);
12441 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
f07a6b21
BE
12442 op0 = force_reg (SImode, expand_expr (arg0, NULL_RTX, VOIDmode, 0));
12443 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12444 emit_insn (gen_iwmmxt_tmcr (op1, op0));
5a9335ef
NC
12445 return 0;
12446
12447 case ARM_BUILTIN_GETWCX:
12448 arg0 = TREE_VALUE (arglist);
12449 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12450 target = gen_reg_rtx (SImode);
12451 emit_insn (gen_iwmmxt_tmrc (target, op0));
12452 return target;
12453
12454 case ARM_BUILTIN_WSHUFH:
12455 icode = CODE_FOR_iwmmxt_wshufh;
12456 arg0 = TREE_VALUE (arglist);
12457 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12458 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12459 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12460 tmode = insn_data[icode].operand[0].mode;
12461 mode1 = insn_data[icode].operand[1].mode;
12462 mode2 = insn_data[icode].operand[2].mode;
12463
12464 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
12465 op0 = copy_to_mode_reg (mode1, op0);
12466 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
12467 {
12468 /* @@@ better error message */
12469 error ("mask must be an immediate");
12470 return const0_rtx;
12471 }
12472 if (target == 0
12473 || GET_MODE (target) != tmode
12474 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12475 target = gen_reg_rtx (tmode);
12476 pat = GEN_FCN (icode) (target, op0, op1);
12477 if (! pat)
12478 return 0;
12479 emit_insn (pat);
12480 return target;
12481
12482 case ARM_BUILTIN_WSADB:
12483 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadb, arglist, target);
12484 case ARM_BUILTIN_WSADH:
12485 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadh, arglist, target);
12486 case ARM_BUILTIN_WSADBZ:
12487 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, arglist, target);
12488 case ARM_BUILTIN_WSADHZ:
12489 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, arglist, target);
12490
12491 /* Several three-argument builtins. */
12492 case ARM_BUILTIN_WMACS:
12493 case ARM_BUILTIN_WMACU:
12494 case ARM_BUILTIN_WALIGN:
12495 case ARM_BUILTIN_TMIA:
12496 case ARM_BUILTIN_TMIAPH:
12497 case ARM_BUILTIN_TMIATT:
12498 case ARM_BUILTIN_TMIATB:
12499 case ARM_BUILTIN_TMIABT:
12500 case ARM_BUILTIN_TMIABB:
12501 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
12502 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
12503 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
12504 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
12505 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
12506 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
12507 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
12508 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
12509 : CODE_FOR_iwmmxt_walign);
12510 arg0 = TREE_VALUE (arglist);
12511 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12512 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
12513 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
12514 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
12515 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
12516 tmode = insn_data[icode].operand[0].mode;
12517 mode0 = insn_data[icode].operand[1].mode;
12518 mode1 = insn_data[icode].operand[2].mode;
12519 mode2 = insn_data[icode].operand[3].mode;
12520
12521 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
12522 op0 = copy_to_mode_reg (mode0, op0);
12523 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
12524 op1 = copy_to_mode_reg (mode1, op1);
12525 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
12526 op2 = copy_to_mode_reg (mode2, op2);
12527 if (target == 0
12528 || GET_MODE (target) != tmode
12529 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
12530 target = gen_reg_rtx (tmode);
12531 pat = GEN_FCN (icode) (target, op0, op1, op2);
12532 if (! pat)
12533 return 0;
12534 emit_insn (pat);
12535 return target;
12536
12537 case ARM_BUILTIN_WZERO:
12538 target = gen_reg_rtx (DImode);
12539 emit_insn (gen_iwmmxt_clrdi (target));
12540 return target;
12541
12542 default:
12543 break;
12544 }
12545
e97a46ce 12546 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5a9335ef
NC
12547 if (d->code == (const enum arm_builtins) fcode)
12548 return arm_expand_binop_builtin (d->icode, arglist, target);
12549
e97a46ce 12550 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5a9335ef
NC
12551 if (d->code == (const enum arm_builtins) fcode)
12552 return arm_expand_unop_builtin (d->icode, arglist, target, 0);
12553
12554 /* @@@ Should really do something sensible here. */
12555 return NULL_RTX;
12556}
12557\f
d5b7b3ae
RE
12558/* Recursively search through all of the blocks in a function
12559 checking to see if any of the variables created in that
12560 function match the RTX called 'orig'. If they do then
12561 replace them with the RTX called 'new'. */
d5b7b3ae 12562static void
e32bac5b 12563replace_symbols_in_block (tree block, rtx orig, rtx new)
d5b7b3ae
RE
12564{
12565 for (; block; block = BLOCK_CHAIN (block))
12566 {
12567 tree sym;
12568
5895f793 12569 if (!TREE_USED (block))
d5b7b3ae
RE
12570 continue;
12571
12572 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
12573 {
12574 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
12575 || DECL_IGNORED_P (sym)
12576 || TREE_CODE (sym) != VAR_DECL
12577 || DECL_EXTERNAL (sym)
5895f793 12578 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
12579 )
12580 continue;
12581
7b8b8ade 12582 SET_DECL_RTL (sym, new);
d5b7b3ae
RE
12583 }
12584
12585 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
12586 }
12587}
12588
1d6e90ac
NC
12589/* Return the number (counting from 0) of
12590 the least significant set bit in MASK. */
12591
e32bac5b
RE
12592inline static int
12593number_of_first_bit_set (int mask)
d5b7b3ae
RE
12594{
12595 int bit;
12596
12597 for (bit = 0;
12598 (mask & (1 << bit)) == 0;
5895f793 12599 ++bit)
d5b7b3ae
RE
12600 continue;
12601
12602 return bit;
12603}
12604
12605/* Generate code to return from a thumb function.
12606 If 'reg_containing_return_addr' is -1, then the return address is
12607 actually on the stack, at the stack pointer. */
12608static void
c9ca9b88 12609thumb_exit (FILE *f, int reg_containing_return_addr)
d5b7b3ae
RE
12610{
12611 unsigned regs_available_for_popping;
12612 unsigned regs_to_pop;
12613 int pops_needed;
12614 unsigned available;
12615 unsigned required;
12616 int mode;
12617 int size;
12618 int restore_a4 = FALSE;
12619
12620 /* Compute the registers we need to pop. */
12621 regs_to_pop = 0;
12622 pops_needed = 0;
12623
c9ca9b88 12624 if (reg_containing_return_addr == -1)
d5b7b3ae 12625 {
d5b7b3ae 12626 regs_to_pop |= 1 << LR_REGNUM;
5895f793 12627 ++pops_needed;
d5b7b3ae
RE
12628 }
12629
12630 if (TARGET_BACKTRACE)
12631 {
12632 /* Restore the (ARM) frame pointer and stack pointer. */
12633 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
12634 pops_needed += 2;
12635 }
12636
12637 /* If there is nothing to pop then just emit the BX instruction and
12638 return. */
12639 if (pops_needed == 0)
12640 {
c9ca9b88
PB
12641 if (current_function_calls_eh_return)
12642 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, ARM_EH_STACKADJ_REGNUM);
d5b7b3ae
RE
12643
12644 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
12645 return;
12646 }
12647 /* Otherwise if we are not supporting interworking and we have not created
12648 a backtrace structure and the function was not entered in ARM mode then
12649 just pop the return address straight into the PC. */
5895f793
RE
12650 else if (!TARGET_INTERWORK
12651 && !TARGET_BACKTRACE
c9ca9b88
PB
12652 && !is_called_in_ARM_mode (current_function_decl)
12653 && !current_function_calls_eh_return)
d5b7b3ae 12654 {
c9ca9b88 12655 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
d5b7b3ae
RE
12656 return;
12657 }
12658
12659 /* Find out how many of the (return) argument registers we can corrupt. */
12660 regs_available_for_popping = 0;
12661
12662 /* If returning via __builtin_eh_return, the bottom three registers
12663 all contain information needed for the return. */
c9ca9b88 12664 if (current_function_calls_eh_return)
d5b7b3ae
RE
12665 size = 12;
12666 else
12667 {
d5b7b3ae
RE
12668 /* If we can deduce the registers used from the function's
12669 return value. This is more reliable that examining
12670 regs_ever_live[] because that will be set if the register is
12671 ever used in the function, not just if the register is used
12672 to hold a return value. */
12673
12674 if (current_function_return_rtx != 0)
12675 mode = GET_MODE (current_function_return_rtx);
12676 else
d5b7b3ae
RE
12677 mode = DECL_MODE (DECL_RESULT (current_function_decl));
12678
12679 size = GET_MODE_SIZE (mode);
12680
12681 if (size == 0)
12682 {
12683 /* In a void function we can use any argument register.
12684 In a function that returns a structure on the stack
12685 we can use the second and third argument registers. */
12686 if (mode == VOIDmode)
12687 regs_available_for_popping =
12688 (1 << ARG_REGISTER (1))
12689 | (1 << ARG_REGISTER (2))
12690 | (1 << ARG_REGISTER (3));
12691 else
12692 regs_available_for_popping =
12693 (1 << ARG_REGISTER (2))
12694 | (1 << ARG_REGISTER (3));
12695 }
12696 else if (size <= 4)
12697 regs_available_for_popping =
12698 (1 << ARG_REGISTER (2))
12699 | (1 << ARG_REGISTER (3));
12700 else if (size <= 8)
12701 regs_available_for_popping =
12702 (1 << ARG_REGISTER (3));
12703 }
12704
12705 /* Match registers to be popped with registers into which we pop them. */
12706 for (available = regs_available_for_popping,
12707 required = regs_to_pop;
12708 required != 0 && available != 0;
12709 available &= ~(available & - available),
12710 required &= ~(required & - required))
12711 -- pops_needed;
12712
12713 /* If we have any popping registers left over, remove them. */
12714 if (available > 0)
5895f793 12715 regs_available_for_popping &= ~available;
d5b7b3ae
RE
12716
12717 /* Otherwise if we need another popping register we can use
12718 the fourth argument register. */
12719 else if (pops_needed)
12720 {
12721 /* If we have not found any free argument registers and
12722 reg a4 contains the return address, we must move it. */
12723 if (regs_available_for_popping == 0
12724 && reg_containing_return_addr == LAST_ARG_REGNUM)
12725 {
12726 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
12727 reg_containing_return_addr = LR_REGNUM;
12728 }
12729 else if (size > 12)
12730 {
12731 /* Register a4 is being used to hold part of the return value,
12732 but we have dire need of a free, low register. */
12733 restore_a4 = TRUE;
12734
12735 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
12736 }
12737
12738 if (reg_containing_return_addr != LAST_ARG_REGNUM)
12739 {
12740 /* The fourth argument register is available. */
12741 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
12742
5895f793 12743 --pops_needed;
d5b7b3ae
RE
12744 }
12745 }
12746
12747 /* Pop as many registers as we can. */
980e61bb
DJ
12748 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
12749 regs_available_for_popping);
d5b7b3ae
RE
12750
12751 /* Process the registers we popped. */
12752 if (reg_containing_return_addr == -1)
12753 {
12754 /* The return address was popped into the lowest numbered register. */
5895f793 12755 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
12756
12757 reg_containing_return_addr =
12758 number_of_first_bit_set (regs_available_for_popping);
12759
12760 /* Remove this register for the mask of available registers, so that
6bc82793 12761 the return address will not be corrupted by further pops. */
5895f793 12762 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
12763 }
12764
12765 /* If we popped other registers then handle them here. */
12766 if (regs_available_for_popping)
12767 {
12768 int frame_pointer;
12769
12770 /* Work out which register currently contains the frame pointer. */
12771 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
12772
12773 /* Move it into the correct place. */
12774 asm_fprintf (f, "\tmov\t%r, %r\n",
12775 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
12776
12777 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
12778 regs_available_for_popping &= ~(1 << frame_pointer);
12779 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
12780
12781 if (regs_available_for_popping)
12782 {
12783 int stack_pointer;
12784
12785 /* We popped the stack pointer as well,
12786 find the register that contains it. */
12787 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
12788
12789 /* Move it into the stack register. */
12790 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
12791
12792 /* At this point we have popped all necessary registers, so
12793 do not worry about restoring regs_available_for_popping
12794 to its correct value:
12795
12796 assert (pops_needed == 0)
12797 assert (regs_available_for_popping == (1 << frame_pointer))
12798 assert (regs_to_pop == (1 << STACK_POINTER)) */
12799 }
12800 else
12801 {
12802 /* Since we have just move the popped value into the frame
12803 pointer, the popping register is available for reuse, and
12804 we know that we still have the stack pointer left to pop. */
12805 regs_available_for_popping |= (1 << frame_pointer);
12806 }
12807 }
12808
12809 /* If we still have registers left on the stack, but we no longer have
12810 any registers into which we can pop them, then we must move the return
12811 address into the link register and make available the register that
12812 contained it. */
12813 if (regs_available_for_popping == 0 && pops_needed > 0)
12814 {
12815 regs_available_for_popping |= 1 << reg_containing_return_addr;
12816
12817 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
12818 reg_containing_return_addr);
12819
12820 reg_containing_return_addr = LR_REGNUM;
12821 }
12822
12823 /* If we have registers left on the stack then pop some more.
12824 We know that at most we will want to pop FP and SP. */
12825 if (pops_needed > 0)
12826 {
12827 int popped_into;
12828 int move_to;
12829
980e61bb
DJ
12830 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
12831 regs_available_for_popping);
d5b7b3ae
RE
12832
12833 /* We have popped either FP or SP.
12834 Move whichever one it is into the correct register. */
12835 popped_into = number_of_first_bit_set (regs_available_for_popping);
12836 move_to = number_of_first_bit_set (regs_to_pop);
12837
12838 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
12839
5895f793 12840 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 12841
5895f793 12842 --pops_needed;
d5b7b3ae
RE
12843 }
12844
12845 /* If we still have not popped everything then we must have only
12846 had one register available to us and we are now popping the SP. */
12847 if (pops_needed > 0)
12848 {
12849 int popped_into;
12850
980e61bb
DJ
12851 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
12852 regs_available_for_popping);
d5b7b3ae
RE
12853
12854 popped_into = number_of_first_bit_set (regs_available_for_popping);
12855
12856 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
12857 /*
12858 assert (regs_to_pop == (1 << STACK_POINTER))
12859 assert (pops_needed == 1)
12860 */
12861 }
12862
12863 /* If necessary restore the a4 register. */
12864 if (restore_a4)
12865 {
12866 if (reg_containing_return_addr != LR_REGNUM)
12867 {
12868 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
12869 reg_containing_return_addr = LR_REGNUM;
12870 }
12871
12872 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
12873 }
12874
c9ca9b88
PB
12875 if (current_function_calls_eh_return)
12876 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, ARM_EH_STACKADJ_REGNUM);
d5b7b3ae
RE
12877
12878 /* Return to caller. */
12879 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
12880}
12881
980e61bb
DJ
12882/* Emit code to push or pop registers to or from the stack. F is the
12883 assembly file. MASK is the registers to push or pop. PUSH is
59b9a953 12884 nonzero if we should push, and zero if we should pop. For debugging
980e61bb
DJ
12885 output, if pushing, adjust CFA_OFFSET by the amount of space added
12886 to the stack. REAL_REGS should have the same number of bits set as
12887 MASK, and will be used instead (in the same order) to describe which
12888 registers were saved - this is used to mark the save slots when we
12889 push high registers after moving them to low registers. */
d5b7b3ae 12890static void
980e61bb 12891thumb_pushpop (FILE *f, int mask, int push, int *cfa_offset, int real_regs)
d5b7b3ae
RE
12892{
12893 int regno;
12894 int lo_mask = mask & 0xFF;
980e61bb 12895 int pushed_words = 0;
d5b7b3ae 12896
c9ca9b88 12897 if (lo_mask == 0 && !push && (mask & (1 << PC_REGNUM)))
d5b7b3ae
RE
12898 {
12899 /* Special case. Do not generate a POP PC statement here, do it in
12900 thumb_exit() */
c9ca9b88 12901 thumb_exit (f, -1);
d5b7b3ae
RE
12902 return;
12903 }
12904
12905 fprintf (f, "\t%s\t{", push ? "push" : "pop");
12906
12907 /* Look at the low registers first. */
5895f793 12908 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
12909 {
12910 if (lo_mask & 1)
12911 {
12912 asm_fprintf (f, "%r", regno);
12913
12914 if ((lo_mask & ~1) != 0)
12915 fprintf (f, ", ");
980e61bb
DJ
12916
12917 pushed_words++;
d5b7b3ae
RE
12918 }
12919 }
12920
12921 if (push && (mask & (1 << LR_REGNUM)))
12922 {
12923 /* Catch pushing the LR. */
12924 if (mask & 0xFF)
12925 fprintf (f, ", ");
12926
12927 asm_fprintf (f, "%r", LR_REGNUM);
980e61bb
DJ
12928
12929 pushed_words++;
d5b7b3ae
RE
12930 }
12931 else if (!push && (mask & (1 << PC_REGNUM)))
12932 {
12933 /* Catch popping the PC. */
c9ca9b88
PB
12934 if (TARGET_INTERWORK || TARGET_BACKTRACE
12935 || current_function_calls_eh_return)
d5b7b3ae
RE
12936 {
12937 /* The PC is never poped directly, instead
12938 it is popped into r3 and then BX is used. */
12939 fprintf (f, "}\n");
12940
c9ca9b88 12941 thumb_exit (f, -1);
d5b7b3ae
RE
12942
12943 return;
12944 }
12945 else
12946 {
12947 if (mask & 0xFF)
12948 fprintf (f, ", ");
12949
12950 asm_fprintf (f, "%r", PC_REGNUM);
12951 }
12952 }
12953
12954 fprintf (f, "}\n");
980e61bb
DJ
12955
12956 if (push && pushed_words && dwarf2out_do_frame ())
12957 {
12958 char *l = dwarf2out_cfi_label ();
12959 int pushed_mask = real_regs;
12960
12961 *cfa_offset += pushed_words * 4;
12962 dwarf2out_def_cfa (l, SP_REGNUM, *cfa_offset);
12963
12964 pushed_words = 0;
12965 pushed_mask = real_regs;
12966 for (regno = 0; regno <= 14; regno++, pushed_mask >>= 1)
12967 {
12968 if (pushed_mask & 1)
12969 dwarf2out_reg_save (l, regno, 4 * pushed_words++ - *cfa_offset);
12970 }
12971 }
d5b7b3ae
RE
12972}
12973\f
12974void
e32bac5b 12975thumb_final_prescan_insn (rtx insn)
d5b7b3ae 12976{
d5b7b3ae 12977 if (flag_print_asm_name)
9d98a694
AO
12978 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
12979 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
12980}
12981
12982int
e32bac5b 12983thumb_shiftable_const (unsigned HOST_WIDE_INT val)
d5b7b3ae
RE
12984{
12985 unsigned HOST_WIDE_INT mask = 0xff;
12986 int i;
12987
12988 if (val == 0) /* XXX */
12989 return 0;
12990
12991 for (i = 0; i < 25; i++)
12992 if ((val & (mask << i)) == val)
12993 return 1;
12994
12995 return 0;
12996}
12997
825dda42 12998/* Returns nonzero if the current function contains,
d5b7b3ae 12999 or might contain a far jump. */
5848830f
PB
13000static int
13001thumb_far_jump_used_p (void)
d5b7b3ae
RE
13002{
13003 rtx insn;
13004
13005 /* This test is only important for leaf functions. */
5895f793 13006 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
13007
13008 /* If we have already decided that far jumps may be used,
13009 do not bother checking again, and always return true even if
13010 it turns out that they are not being used. Once we have made
13011 the decision that far jumps are present (and that hence the link
13012 register will be pushed onto the stack) we cannot go back on it. */
13013 if (cfun->machine->far_jump_used)
13014 return 1;
13015
13016 /* If this function is not being called from the prologue/epilogue
13017 generation code then it must be being called from the
13018 INITIAL_ELIMINATION_OFFSET macro. */
5848830f 13019 if (!(ARM_DOUBLEWORD_ALIGN || reload_completed))
d5b7b3ae
RE
13020 {
13021 /* In this case we know that we are being asked about the elimination
13022 of the arg pointer register. If that register is not being used,
13023 then there are no arguments on the stack, and we do not have to
13024 worry that a far jump might force the prologue to push the link
13025 register, changing the stack offsets. In this case we can just
13026 return false, since the presence of far jumps in the function will
13027 not affect stack offsets.
13028
13029 If the arg pointer is live (or if it was live, but has now been
13030 eliminated and so set to dead) then we do have to test to see if
13031 the function might contain a far jump. This test can lead to some
13032 false negatives, since before reload is completed, then length of
13033 branch instructions is not known, so gcc defaults to returning their
13034 longest length, which in turn sets the far jump attribute to true.
13035
13036 A false negative will not result in bad code being generated, but it
13037 will result in a needless push and pop of the link register. We
5848830f
PB
13038 hope that this does not occur too often.
13039
13040 If we need doubleword stack alignment this could affect the other
13041 elimination offsets so we can't risk getting it wrong. */
d5b7b3ae
RE
13042 if (regs_ever_live [ARG_POINTER_REGNUM])
13043 cfun->machine->arg_pointer_live = 1;
5895f793 13044 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
13045 return 0;
13046 }
13047
13048 /* Check to see if the function contains a branch
13049 insn with the far jump attribute set. */
13050 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13051 {
13052 if (GET_CODE (insn) == JUMP_INSN
13053 /* Ignore tablejump patterns. */
13054 && GET_CODE (PATTERN (insn)) != ADDR_VEC
13055 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
13056 && get_attr_far_jump (insn) == FAR_JUMP_YES
13057 )
13058 {
9a9f7594 13059 /* Record the fact that we have decided that
d5b7b3ae
RE
13060 the function does use far jumps. */
13061 cfun->machine->far_jump_used = 1;
13062 return 1;
13063 }
13064 }
13065
13066 return 0;
13067}
13068
825dda42 13069/* Return nonzero if FUNC must be entered in ARM mode. */
d5b7b3ae 13070int
e32bac5b 13071is_called_in_ARM_mode (tree func)
d5b7b3ae
RE
13072{
13073 if (TREE_CODE (func) != FUNCTION_DECL)
13074 abort ();
13075
13076 /* Ignore the problem about functions whoes address is taken. */
13077 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
13078 return TRUE;
13079
13080#ifdef ARM_PE
91d231cb 13081 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
d5b7b3ae
RE
13082#else
13083 return FALSE;
13084#endif
13085}
13086
d6b4baa4 13087/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 13088const char *
e32bac5b 13089thumb_unexpanded_epilogue (void)
d5b7b3ae
RE
13090{
13091 int regno;
13092 int live_regs_mask = 0;
13093 int high_regs_pushed = 0;
d5b7b3ae 13094 int had_to_push_lr;
57934c39
PB
13095 int size;
13096 int mode;
d5b7b3ae
RE
13097
13098 if (return_used_this_function)
13099 return "";
13100
58e60158
AN
13101 if (IS_NAKED (arm_current_func_type ()))
13102 return "";
13103
57934c39
PB
13104 live_regs_mask = thumb_compute_save_reg_mask ();
13105 high_regs_pushed = bit_count (live_regs_mask & 0x0f00);
13106
13107 /* If we can deduce the registers used from the function's return value.
13108 This is more reliable that examining regs_ever_live[] because that
13109 will be set if the register is ever used in the function, not just if
13110 the register is used to hold a return value. */
d5b7b3ae 13111
57934c39
PB
13112 if (current_function_return_rtx != 0)
13113 mode = GET_MODE (current_function_return_rtx);
13114 else
13115 mode = DECL_MODE (DECL_RESULT (current_function_decl));
13116
13117 size = GET_MODE_SIZE (mode);
d5b7b3ae
RE
13118
13119 /* The prolog may have pushed some high registers to use as
093354e0 13120 work registers. eg the testsuite file:
d5b7b3ae
RE
13121 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
13122 compiles to produce:
13123 push {r4, r5, r6, r7, lr}
13124 mov r7, r9
13125 mov r6, r8
13126 push {r6, r7}
13127 as part of the prolog. We have to undo that pushing here. */
13128
13129 if (high_regs_pushed)
13130 {
57934c39 13131 int mask = live_regs_mask & 0xff;
d5b7b3ae 13132 int next_hi_reg;
d5b7b3ae 13133
57934c39
PB
13134 /* The available low registers depend on the size of the value we are
13135 returning. */
13136 if (size <= 12)
d5b7b3ae 13137 mask |= 1 << 3;
57934c39
PB
13138 if (size <= 8)
13139 mask |= 1 << 2;
d5b7b3ae
RE
13140
13141 if (mask == 0)
13142 /* Oh dear! We have no low registers into which we can pop
13143 high registers! */
400500c4
RK
13144 internal_error
13145 ("no low registers available for popping high registers");
d5b7b3ae
RE
13146
13147 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
57934c39 13148 if (live_regs_mask & (1 << next_hi_reg))
d5b7b3ae
RE
13149 break;
13150
13151 while (high_regs_pushed)
13152 {
13153 /* Find lo register(s) into which the high register(s) can
13154 be popped. */
13155 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
13156 {
13157 if (mask & (1 << regno))
13158 high_regs_pushed--;
13159 if (high_regs_pushed == 0)
13160 break;
13161 }
13162
13163 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
13164
d6b4baa4 13165 /* Pop the values into the low register(s). */
980e61bb 13166 thumb_pushpop (asm_out_file, mask, 0, NULL, mask);
d5b7b3ae
RE
13167
13168 /* Move the value(s) into the high registers. */
13169 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
13170 {
13171 if (mask & (1 << regno))
13172 {
13173 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
13174 regno);
13175
13176 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
57934c39 13177 if (live_regs_mask & (1 << next_hi_reg))
d5b7b3ae
RE
13178 break;
13179 }
13180 }
13181 }
57934c39 13182 live_regs_mask &= ~0x0f00;
d5b7b3ae
RE
13183 }
13184
57934c39
PB
13185 had_to_push_lr = (live_regs_mask & (1 << LR_REGNUM)) != 0;
13186 live_regs_mask &= 0xff;
13187
d5b7b3ae
RE
13188 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
13189 {
57934c39
PB
13190 /* Pop the return address into the PC. */
13191 if (had_to_push_lr)
d5b7b3ae
RE
13192 live_regs_mask |= 1 << PC_REGNUM;
13193
13194 /* Either no argument registers were pushed or a backtrace
13195 structure was created which includes an adjusted stack
13196 pointer, so just pop everything. */
13197 if (live_regs_mask)
980e61bb
DJ
13198 thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
13199 live_regs_mask);
57934c39 13200
d5b7b3ae 13201 /* We have either just popped the return address into the
57934c39
PB
13202 PC or it is was kept in LR for the entire function. */
13203 if (!had_to_push_lr)
13204 thumb_exit (asm_out_file, LR_REGNUM);
d5b7b3ae
RE
13205 }
13206 else
13207 {
13208 /* Pop everything but the return address. */
d5b7b3ae 13209 if (live_regs_mask)
980e61bb
DJ
13210 thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
13211 live_regs_mask);
d5b7b3ae
RE
13212
13213 if (had_to_push_lr)
57934c39
PB
13214 {
13215 if (size > 12)
13216 {
13217 /* We have no free low regs, so save one. */
13218 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", IP_REGNUM,
13219 LAST_ARG_REGNUM);
13220 }
13221
13222 /* Get the return address into a temporary register. */
13223 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0, NULL,
13224 1 << LAST_ARG_REGNUM);
13225
13226 if (size > 12)
13227 {
13228 /* Move the return address to lr. */
13229 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", LR_REGNUM,
13230 LAST_ARG_REGNUM);
13231 /* Restore the low register. */
13232 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", LAST_ARG_REGNUM,
13233 IP_REGNUM);
13234 regno = LR_REGNUM;
13235 }
13236 else
13237 regno = LAST_ARG_REGNUM;
13238 }
13239 else
13240 regno = LR_REGNUM;
d5b7b3ae
RE
13241
13242 /* Remove the argument registers that were pushed onto the stack. */
13243 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
13244 SP_REGNUM, SP_REGNUM,
13245 current_function_pretend_args_size);
13246
57934c39 13247 thumb_exit (asm_out_file, regno);
d5b7b3ae
RE
13248 }
13249
13250 return "";
13251}
13252
13253/* Functions to save and restore machine-specific function data. */
e2500fed 13254static struct machine_function *
e32bac5b 13255arm_init_machine_status (void)
d5b7b3ae 13256{
e2500fed
GK
13257 struct machine_function *machine;
13258 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
6d3d9133 13259
e2500fed
GK
13260#if ARM_FT_UNKNOWN != 0
13261 machine->func_type = ARM_FT_UNKNOWN;
6d3d9133 13262#endif
e2500fed 13263 return machine;
f7a80099
NC
13264}
13265
d5b7b3ae
RE
13266/* Return an RTX indicating where the return address to the
13267 calling function can be found. */
13268rtx
e32bac5b 13269arm_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
d5b7b3ae 13270{
d5b7b3ae
RE
13271 if (count != 0)
13272 return NULL_RTX;
13273
61f0ccff 13274 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
d5b7b3ae
RE
13275}
13276
13277/* Do anything needed before RTL is emitted for each function. */
13278void
e32bac5b 13279arm_init_expanders (void)
d5b7b3ae
RE
13280{
13281 /* Arrange to initialize and mark the machine per-function status. */
13282 init_machine_status = arm_init_machine_status;
3ac5ea7c
RH
13283
13284 /* This is to stop the combine pass optimizing away the alignment
13285 adjustment of va_arg. */
13286 /* ??? It is claimed that this should not be necessary. */
13287 if (cfun)
13288 mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
d5b7b3ae
RE
13289}
13290
0977774b 13291
5848830f
PB
13292/* Like arm_compute_initial_elimination offset. Simpler because
13293 THUMB_HARD_FRAME_POINTER isn't actually the ABI specified frame pointer. */
0977774b 13294
5848830f
PB
13295HOST_WIDE_INT
13296thumb_compute_initial_elimination_offset (unsigned int from, unsigned int to)
13297{
13298 arm_stack_offsets *offsets;
0977774b 13299
5848830f 13300 offsets = arm_get_frame_offsets ();
0977774b 13301
5848830f 13302 switch (from)
0977774b 13303 {
5848830f
PB
13304 case ARG_POINTER_REGNUM:
13305 switch (to)
13306 {
13307 case STACK_POINTER_REGNUM:
13308 return offsets->outgoing_args - offsets->saved_args;
0977774b 13309
5848830f
PB
13310 case FRAME_POINTER_REGNUM:
13311 return offsets->soft_frame - offsets->saved_args;
0977774b 13312
5848830f
PB
13313 case THUMB_HARD_FRAME_POINTER_REGNUM:
13314 case ARM_HARD_FRAME_POINTER_REGNUM:
13315 return offsets->saved_regs - offsets->saved_args;
0977774b 13316
5848830f
PB
13317 default:
13318 abort();
13319 }
13320 break;
0977774b 13321
5848830f
PB
13322 case FRAME_POINTER_REGNUM:
13323 switch (to)
13324 {
13325 case STACK_POINTER_REGNUM:
13326 return offsets->outgoing_args - offsets->soft_frame;
0977774b 13327
5848830f
PB
13328 case THUMB_HARD_FRAME_POINTER_REGNUM:
13329 case ARM_HARD_FRAME_POINTER_REGNUM:
13330 return offsets->saved_regs - offsets->soft_frame;
0977774b 13331
5848830f
PB
13332 default:
13333 abort();
13334 }
13335 break;
0977774b 13336
5848830f
PB
13337 default:
13338 abort ();
13339 }
0977774b
JT
13340}
13341
5848830f 13342
d5b7b3ae
RE
13343/* Generate the rest of a function's prologue. */
13344void
e32bac5b 13345thumb_expand_prologue (void)
d5b7b3ae 13346{
980e61bb
DJ
13347 rtx insn, dwarf;
13348
5848830f
PB
13349 HOST_WIDE_INT amount;
13350 arm_stack_offsets *offsets;
6d3d9133 13351 unsigned long func_type;
3c7ad43e 13352 int regno;
57934c39 13353 unsigned long live_regs_mask;
6d3d9133
NC
13354
13355 func_type = arm_current_func_type ();
d5b7b3ae
RE
13356
13357 /* Naked functions don't have prologues. */
6d3d9133 13358 if (IS_NAKED (func_type))
d5b7b3ae
RE
13359 return;
13360
6d3d9133
NC
13361 if (IS_INTERRUPT (func_type))
13362 {
c725bd79 13363 error ("interrupt Service Routines cannot be coded in Thumb mode");
6d3d9133
NC
13364 return;
13365 }
13366
876f13b0
PB
13367 /* Load the pic recister before setting the frame pointer, so we can use r7
13368 as a temporary work register. */
13369 if (flag_pic)
13370 arm_load_pic_register ();
13371
5848830f
PB
13372 offsets = arm_get_frame_offsets ();
13373
d5b7b3ae 13374 if (frame_pointer_needed)
980e61bb 13375 {
5848830f
PB
13376 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
13377 stack_pointer_rtx));
980e61bb
DJ
13378 RTX_FRAME_RELATED_P (insn) = 1;
13379 }
d5b7b3ae 13380
57934c39 13381 live_regs_mask = thumb_compute_save_reg_mask ();
5848830f 13382 amount = offsets->outgoing_args - offsets->saved_regs;
d5b7b3ae
RE
13383 if (amount)
13384 {
d5b7b3ae 13385 if (amount < 512)
980e61bb
DJ
13386 {
13387 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
13388 GEN_INT (- amount)));
13389 RTX_FRAME_RELATED_P (insn) = 1;
13390 }
d5b7b3ae
RE
13391 else
13392 {
d5b7b3ae
RE
13393 rtx reg;
13394
13395 /* The stack decrement is too big for an immediate value in a single
13396 insn. In theory we could issue multiple subtracts, but after
13397 three of them it becomes more space efficient to place the full
13398 value in the constant pool and load into a register. (Also the
13399 ARM debugger really likes to see only one stack decrement per
13400 function). So instead we look for a scratch register into which
13401 we can load the decrement, and then we subtract this from the
13402 stack pointer. Unfortunately on the thumb the only available
13403 scratch registers are the argument registers, and we cannot use
13404 these as they may hold arguments to the function. Instead we
13405 attempt to locate a call preserved register which is used by this
13406 function. If we can find one, then we know that it will have
13407 been pushed at the start of the prologue and so we can corrupt
13408 it now. */
13409 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
57934c39 13410 if (live_regs_mask & (1 << regno)
5895f793
RE
13411 && !(frame_pointer_needed
13412 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
13413 break;
13414
aeaf4d25 13415 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
d5b7b3ae 13416 {
f1c25d3b 13417 rtx spare = gen_rtx_REG (SImode, IP_REGNUM);
d5b7b3ae 13418
6bc82793 13419 /* Choose an arbitrary, non-argument low register. */
f1c25d3b 13420 reg = gen_rtx_REG (SImode, LAST_LO_REGNUM);
d5b7b3ae
RE
13421
13422 /* Save it by copying it into a high, scratch register. */
c14a3a45
NC
13423 emit_insn (gen_movsi (spare, reg));
13424 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 13425 emit_insn (gen_prologue_use (spare));
d5b7b3ae
RE
13426
13427 /* Decrement the stack. */
1d6e90ac 13428 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
980e61bb
DJ
13429 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
13430 stack_pointer_rtx, reg));
13431 RTX_FRAME_RELATED_P (insn) = 1;
13432 dwarf = gen_rtx_SET (SImode, stack_pointer_rtx,
13433 plus_constant (stack_pointer_rtx,
78773322 13434 -amount));
980e61bb
DJ
13435 RTX_FRAME_RELATED_P (dwarf) = 1;
13436 REG_NOTES (insn)
13437 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
13438 REG_NOTES (insn));
d5b7b3ae
RE
13439
13440 /* Restore the low register's original value. */
13441 emit_insn (gen_movsi (reg, spare));
13442
13443 /* Emit a USE of the restored scratch register, so that flow
13444 analysis will not consider the restore redundant. The
13445 register won't be used again in this function and isn't
13446 restored by the epilogue. */
6bacc7b0 13447 emit_insn (gen_prologue_use (reg));
d5b7b3ae
RE
13448 }
13449 else
13450 {
f1c25d3b 13451 reg = gen_rtx_REG (SImode, regno);
d5b7b3ae 13452
1d6e90ac 13453 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
980e61bb
DJ
13454
13455 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
13456 stack_pointer_rtx, reg));
13457 RTX_FRAME_RELATED_P (insn) = 1;
13458 dwarf = gen_rtx_SET (SImode, stack_pointer_rtx,
13459 plus_constant (stack_pointer_rtx,
78773322 13460 -amount));
980e61bb
DJ
13461 RTX_FRAME_RELATED_P (dwarf) = 1;
13462 REG_NOTES (insn)
13463 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
13464 REG_NOTES (insn));
d5b7b3ae
RE
13465 }
13466 }
983e6484
PB
13467 /* If the frame pointer is needed, emit a special barrier that
13468 will prevent the scheduler from moving stores to the frame
13469 before the stack adjustment. */
13470 if (frame_pointer_needed)
13471 emit_insn (gen_stack_tie (stack_pointer_rtx,
13472 hard_frame_pointer_rtx));
d5b7b3ae
RE
13473 }
13474
70f4f91c 13475 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae 13476 emit_insn (gen_blockage ());
3c7ad43e
PB
13477
13478 cfun->machine->lr_save_eliminated = !thumb_force_lr_save ();
57934c39
PB
13479 if (live_regs_mask & 0xff)
13480 cfun->machine->lr_save_eliminated = 0;
3c7ad43e
PB
13481
13482 /* If the link register is being kept alive, with the return address in it,
13483 then make sure that it does not get reused by the ce2 pass. */
13484 if (cfun->machine->lr_save_eliminated)
13485 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
d5b7b3ae
RE
13486}
13487
57934c39 13488
d5b7b3ae 13489void
e32bac5b 13490thumb_expand_epilogue (void)
d5b7b3ae 13491{
5848830f
PB
13492 HOST_WIDE_INT amount;
13493 arm_stack_offsets *offsets;
defc0463
RE
13494 int regno;
13495
6d3d9133
NC
13496 /* Naked functions don't have prologues. */
13497 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
13498 return;
13499
5848830f
PB
13500 offsets = arm_get_frame_offsets ();
13501 amount = offsets->outgoing_args - offsets->saved_regs;
13502
d5b7b3ae
RE
13503 if (frame_pointer_needed)
13504 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
13505 else if (amount)
13506 {
d5b7b3ae
RE
13507 if (amount < 512)
13508 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
13509 GEN_INT (amount)));
13510 else
13511 {
13512 /* r3 is always free in the epilogue. */
f1c25d3b 13513 rtx reg = gen_rtx_REG (SImode, LAST_ARG_REGNUM);
d5b7b3ae
RE
13514
13515 emit_insn (gen_movsi (reg, GEN_INT (amount)));
13516 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
13517 }
13518 }
13519
13520 /* Emit a USE (stack_pointer_rtx), so that
13521 the stack adjustment will not be deleted. */
6bacc7b0 13522 emit_insn (gen_prologue_use (stack_pointer_rtx));
d5b7b3ae 13523
70f4f91c 13524 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae 13525 emit_insn (gen_blockage ());
defc0463
RE
13526
13527 /* Emit a clobber for each insn that will be restored in the epilogue,
13528 so that flow2 will get register lifetimes correct. */
13529 for (regno = 0; regno < 13; regno++)
13530 if (regs_ever_live[regno] && !call_used_regs[regno])
13531 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, regno)));
13532
13533 if (! regs_ever_live[LR_REGNUM])
13534 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
d5b7b3ae
RE
13535}
13536
08c148a8 13537static void
e32bac5b 13538thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
d5b7b3ae
RE
13539{
13540 int live_regs_mask = 0;
57934c39 13541 int l_mask;
d5b7b3ae 13542 int high_regs_pushed = 0;
980e61bb 13543 int cfa_offset = 0;
d5b7b3ae
RE
13544 int regno;
13545
6d3d9133 13546 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
13547 return;
13548
13549 if (is_called_in_ARM_mode (current_function_decl))
13550 {
13551 const char * name;
13552
13553 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
13554 abort ();
13555 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
13556 abort ();
13557 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
13558
13559 /* Generate code sequence to switch us into Thumb mode. */
13560 /* The .code 32 directive has already been emitted by
6d77b53e 13561 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
13562 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
13563 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
13564
13565 /* Generate a label, so that the debugger will notice the
13566 change in instruction sets. This label is also used by
13567 the assembler to bypass the ARM code when this function
13568 is called from a Thumb encoded function elsewhere in the
13569 same file. Hence the definition of STUB_NAME here must
d6b4baa4 13570 agree with the definition in gas/config/tc-arm.c. */
d5b7b3ae
RE
13571
13572#define STUB_NAME ".real_start_of"
13573
761c70aa 13574 fprintf (f, "\t.code\t16\n");
d5b7b3ae
RE
13575#ifdef ARM_PE
13576 if (arm_dllexport_name_p (name))
e5951263 13577 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
13578#endif
13579 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
761c70aa 13580 fprintf (f, "\t.thumb_func\n");
d5b7b3ae
RE
13581 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
13582 }
13583
d5b7b3ae
RE
13584 if (current_function_pretend_args_size)
13585 {
3cb66fd7 13586 if (cfun->machine->uses_anonymous_args)
d5b7b3ae
RE
13587 {
13588 int num_pushes;
13589
761c70aa 13590 fprintf (f, "\tpush\t{");
d5b7b3ae 13591
e9d7b180 13592 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
d5b7b3ae
RE
13593
13594 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
13595 regno <= LAST_ARG_REGNUM;
5895f793 13596 regno++)
d5b7b3ae
RE
13597 asm_fprintf (f, "%r%s", regno,
13598 regno == LAST_ARG_REGNUM ? "" : ", ");
13599
761c70aa 13600 fprintf (f, "}\n");
d5b7b3ae
RE
13601 }
13602 else
13603 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
13604 SP_REGNUM, SP_REGNUM,
13605 current_function_pretend_args_size);
980e61bb
DJ
13606
13607 /* We don't need to record the stores for unwinding (would it
13608 help the debugger any if we did?), but record the change in
13609 the stack pointer. */
13610 if (dwarf2out_do_frame ())
13611 {
13612 char *l = dwarf2out_cfi_label ();
13613 cfa_offset = cfa_offset + current_function_pretend_args_size;
13614 dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
13615 }
d5b7b3ae
RE
13616 }
13617
57934c39
PB
13618 live_regs_mask = thumb_compute_save_reg_mask ();
13619 /* Just low regs and lr. */
13620 l_mask = live_regs_mask & 0x40ff;
d5b7b3ae
RE
13621
13622 if (TARGET_BACKTRACE)
13623 {
13624 int offset;
57934c39 13625 int work_register;
d5b7b3ae
RE
13626
13627 /* We have been asked to create a stack backtrace structure.
13628 The code looks like this:
13629
13630 0 .align 2
13631 0 func:
13632 0 sub SP, #16 Reserve space for 4 registers.
57934c39 13633 2 push {R7} Push low registers.
d5b7b3ae
RE
13634 4 add R7, SP, #20 Get the stack pointer before the push.
13635 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
13636 8 mov R7, PC Get hold of the start of this code plus 12.
13637 10 str R7, [SP, #16] Store it.
13638 12 mov R7, FP Get hold of the current frame pointer.
13639 14 str R7, [SP, #4] Store it.
13640 16 mov R7, LR Get hold of the current return address.
13641 18 str R7, [SP, #12] Store it.
13642 20 add R7, SP, #16 Point at the start of the backtrace structure.
13643 22 mov FP, R7 Put this value into the frame pointer. */
13644
57934c39 13645 work_register = thumb_find_work_register (live_regs_mask);
d5b7b3ae
RE
13646
13647 asm_fprintf
13648 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
13649 SP_REGNUM, SP_REGNUM);
980e61bb
DJ
13650
13651 if (dwarf2out_do_frame ())
13652 {
13653 char *l = dwarf2out_cfi_label ();
13654 cfa_offset = cfa_offset + 16;
13655 dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
13656 }
13657
57934c39
PB
13658 if (l_mask)
13659 {
13660 thumb_pushpop (f, l_mask, 1, &cfa_offset, l_mask);
13661 offset = bit_count (l_mask);
13662 }
13663 else
13664 offset = 0;
d5b7b3ae
RE
13665
13666 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
13667 offset + 16 + current_function_pretend_args_size);
13668
13669 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13670 offset + 4);
13671
13672 /* Make sure that the instruction fetching the PC is in the right place
13673 to calculate "start of backtrace creation code + 12". */
57934c39 13674 if (l_mask)
d5b7b3ae
RE
13675 {
13676 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
13677 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13678 offset + 12);
13679 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
13680 ARM_HARD_FRAME_POINTER_REGNUM);
13681 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13682 offset);
13683 }
13684 else
13685 {
13686 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
13687 ARM_HARD_FRAME_POINTER_REGNUM);
13688 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13689 offset);
13690 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
13691 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13692 offset + 12);
13693 }
13694
13695 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
13696 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
13697 offset + 8);
13698 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
13699 offset + 12);
13700 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
13701 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
13702 }
57934c39
PB
13703 else if (l_mask)
13704 thumb_pushpop (f, l_mask, 1, &cfa_offset, l_mask);
d5b7b3ae 13705
57934c39 13706 high_regs_pushed = bit_count (live_regs_mask & 0x0f00);
d5b7b3ae
RE
13707
13708 if (high_regs_pushed)
13709 {
13710 int pushable_regs = 0;
d5b7b3ae
RE
13711 int next_hi_reg;
13712
13713 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
57934c39 13714 if (live_regs_mask & (1 << next_hi_reg))
e26053d1 13715 break;
d5b7b3ae 13716
57934c39 13717 pushable_regs = l_mask & 0xff;
d5b7b3ae
RE
13718
13719 if (pushable_regs == 0)
57934c39 13720 pushable_regs = 1 << thumb_find_work_register (live_regs_mask);
d5b7b3ae
RE
13721
13722 while (high_regs_pushed > 0)
13723 {
980e61bb
DJ
13724 int real_regs_mask = 0;
13725
d5b7b3ae
RE
13726 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
13727 {
57934c39 13728 if (pushable_regs & (1 << regno))
d5b7b3ae
RE
13729 {
13730 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
13731
5895f793 13732 high_regs_pushed--;
980e61bb 13733 real_regs_mask |= (1 << next_hi_reg);
d5b7b3ae
RE
13734
13735 if (high_regs_pushed)
aeaf4d25
AN
13736 {
13737 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
13738 next_hi_reg--)
57934c39 13739 if (live_regs_mask & (1 << next_hi_reg))
d5b7b3ae 13740 break;
aeaf4d25 13741 }
d5b7b3ae
RE
13742 else
13743 {
57934c39 13744 pushable_regs &= ~((1 << regno) - 1);
d5b7b3ae
RE
13745 break;
13746 }
13747 }
13748 }
980e61bb 13749
57934c39 13750 thumb_pushpop (f, pushable_regs, 1, &cfa_offset, real_regs_mask);
d5b7b3ae 13751 }
d5b7b3ae
RE
13752 }
13753}
13754
13755/* Handle the case of a double word load into a low register from
13756 a computed memory address. The computed address may involve a
13757 register which is overwritten by the load. */
cd2b33d0 13758const char *
e32bac5b 13759thumb_load_double_from_address (rtx *operands)
d5b7b3ae
RE
13760{
13761 rtx addr;
13762 rtx base;
13763 rtx offset;
13764 rtx arg1;
13765 rtx arg2;
13766
13767 if (GET_CODE (operands[0]) != REG)
400500c4 13768 abort ();
d5b7b3ae
RE
13769
13770 if (GET_CODE (operands[1]) != MEM)
400500c4 13771 abort ();
d5b7b3ae
RE
13772
13773 /* Get the memory address. */
13774 addr = XEXP (operands[1], 0);
13775
13776 /* Work out how the memory address is computed. */
13777 switch (GET_CODE (addr))
13778 {
13779 case REG:
f1c25d3b
KH
13780 operands[2] = gen_rtx_MEM (SImode,
13781 plus_constant (XEXP (operands[1], 0), 4));
d5b7b3ae
RE
13782
13783 if (REGNO (operands[0]) == REGNO (addr))
13784 {
13785 output_asm_insn ("ldr\t%H0, %2", operands);
13786 output_asm_insn ("ldr\t%0, %1", operands);
13787 }
13788 else
13789 {
13790 output_asm_insn ("ldr\t%0, %1", operands);
13791 output_asm_insn ("ldr\t%H0, %2", operands);
13792 }
13793 break;
13794
13795 case CONST:
13796 /* Compute <address> + 4 for the high order load. */
f1c25d3b
KH
13797 operands[2] = gen_rtx_MEM (SImode,
13798 plus_constant (XEXP (operands[1], 0), 4));
d5b7b3ae
RE
13799
13800 output_asm_insn ("ldr\t%0, %1", operands);
13801 output_asm_insn ("ldr\t%H0, %2", operands);
13802 break;
13803
13804 case PLUS:
13805 arg1 = XEXP (addr, 0);
13806 arg2 = XEXP (addr, 1);
13807
13808 if (CONSTANT_P (arg1))
13809 base = arg2, offset = arg1;
13810 else
13811 base = arg1, offset = arg2;
13812
13813 if (GET_CODE (base) != REG)
400500c4 13814 abort ();
d5b7b3ae
RE
13815
13816 /* Catch the case of <address> = <reg> + <reg> */
13817 if (GET_CODE (offset) == REG)
13818 {
13819 int reg_offset = REGNO (offset);
13820 int reg_base = REGNO (base);
13821 int reg_dest = REGNO (operands[0]);
13822
13823 /* Add the base and offset registers together into the
13824 higher destination register. */
13825 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
13826 reg_dest + 1, reg_base, reg_offset);
13827
13828 /* Load the lower destination register from the address in
13829 the higher destination register. */
13830 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
13831 reg_dest, reg_dest + 1);
13832
13833 /* Load the higher destination register from its own address
13834 plus 4. */
13835 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
13836 reg_dest + 1, reg_dest + 1);
13837 }
13838 else
13839 {
13840 /* Compute <address> + 4 for the high order load. */
f1c25d3b
KH
13841 operands[2] = gen_rtx_MEM (SImode,
13842 plus_constant (XEXP (operands[1], 0), 4));
d5b7b3ae
RE
13843
13844 /* If the computed address is held in the low order register
13845 then load the high order register first, otherwise always
13846 load the low order register first. */
13847 if (REGNO (operands[0]) == REGNO (base))
13848 {
13849 output_asm_insn ("ldr\t%H0, %2", operands);
13850 output_asm_insn ("ldr\t%0, %1", operands);
13851 }
13852 else
13853 {
13854 output_asm_insn ("ldr\t%0, %1", operands);
13855 output_asm_insn ("ldr\t%H0, %2", operands);
13856 }
13857 }
13858 break;
13859
13860 case LABEL_REF:
13861 /* With no registers to worry about we can just load the value
13862 directly. */
f1c25d3b
KH
13863 operands[2] = gen_rtx_MEM (SImode,
13864 plus_constant (XEXP (operands[1], 0), 4));
d5b7b3ae
RE
13865
13866 output_asm_insn ("ldr\t%H0, %2", operands);
13867 output_asm_insn ("ldr\t%0, %1", operands);
13868 break;
13869
13870 default:
400500c4 13871 abort ();
d5b7b3ae
RE
13872 break;
13873 }
13874
13875 return "";
13876}
13877
cd2b33d0 13878const char *
e32bac5b 13879thumb_output_move_mem_multiple (int n, rtx *operands)
d5b7b3ae
RE
13880{
13881 rtx tmp;
13882
13883 switch (n)
13884 {
13885 case 2:
ca356f3a 13886 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 13887 {
ca356f3a
RE
13888 tmp = operands[4];
13889 operands[4] = operands[5];
13890 operands[5] = tmp;
d5b7b3ae 13891 }
ca356f3a
RE
13892 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
13893 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
13894 break;
13895
13896 case 3:
ca356f3a 13897 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 13898 {
ca356f3a
RE
13899 tmp = operands[4];
13900 operands[4] = operands[5];
13901 operands[5] = tmp;
d5b7b3ae 13902 }
ca356f3a 13903 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 13904 {
ca356f3a
RE
13905 tmp = operands[5];
13906 operands[5] = operands[6];
13907 operands[6] = tmp;
d5b7b3ae 13908 }
ca356f3a 13909 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 13910 {
ca356f3a
RE
13911 tmp = operands[4];
13912 operands[4] = operands[5];
13913 operands[5] = tmp;
d5b7b3ae
RE
13914 }
13915
ca356f3a
RE
13916 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
13917 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
13918 break;
13919
13920 default:
13921 abort ();
13922 }
13923
13924 return "";
13925}
13926
1d6e90ac 13927/* Routines for generating rtl. */
d5b7b3ae 13928void
70128ad9 13929thumb_expand_movmemqi (rtx *operands)
d5b7b3ae
RE
13930{
13931 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
13932 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
13933 HOST_WIDE_INT len = INTVAL (operands[2]);
13934 HOST_WIDE_INT offset = 0;
13935
13936 while (len >= 12)
13937 {
ca356f3a 13938 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
13939 len -= 12;
13940 }
13941
13942 if (len >= 8)
13943 {
ca356f3a 13944 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
13945 len -= 8;
13946 }
13947
13948 if (len >= 4)
13949 {
13950 rtx reg = gen_reg_rtx (SImode);
f1c25d3b
KH
13951 emit_insn (gen_movsi (reg, gen_rtx_MEM (SImode, in)));
13952 emit_insn (gen_movsi (gen_rtx_MEM (SImode, out), reg));
d5b7b3ae
RE
13953 len -= 4;
13954 offset += 4;
13955 }
13956
13957 if (len >= 2)
13958 {
13959 rtx reg = gen_reg_rtx (HImode);
f1c25d3b
KH
13960 emit_insn (gen_movhi (reg, gen_rtx_MEM (HImode,
13961 plus_constant (in, offset))));
13962 emit_insn (gen_movhi (gen_rtx_MEM (HImode, plus_constant (out, offset)),
d5b7b3ae
RE
13963 reg));
13964 len -= 2;
13965 offset += 2;
13966 }
13967
13968 if (len)
13969 {
13970 rtx reg = gen_reg_rtx (QImode);
f1c25d3b
KH
13971 emit_insn (gen_movqi (reg, gen_rtx_MEM (QImode,
13972 plus_constant (in, offset))));
13973 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (out, offset)),
d5b7b3ae
RE
13974 reg));
13975 }
13976}
13977
13978int
e32bac5b 13979thumb_cmp_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
13980{
13981 return ((GET_CODE (op) == CONST_INT
c769a35d
RE
13982 && INTVAL (op) < 256
13983 && INTVAL (op) >= 0)
defc0463 13984 || s_register_operand (op, mode));
d5b7b3ae
RE
13985}
13986
c769a35d
RE
13987int
13988thumb_cmpneg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
13989{
13990 return (GET_CODE (op) == CONST_INT
13991 && INTVAL (op) < 0
13992 && INTVAL (op) > -256);
13993}
13994
defc0463
RE
13995/* Return TRUE if a result can be stored in OP without clobbering the
13996 condition code register. Prior to reload we only accept a
13997 register. After reload we have to be able to handle memory as
13998 well, since a pseudo may not get a hard reg and reload cannot
13999 handle output-reloads on jump insns.
d5b7b3ae 14000
defc0463
RE
14001 We could possibly handle mem before reload as well, but that might
14002 complicate things with the need to handle increment
14003 side-effects. */
d5b7b3ae 14004
defc0463
RE
14005int
14006thumb_cbrch_target_operand (rtx op, enum machine_mode mode)
14007{
14008 return (s_register_operand (op, mode)
14009 || ((reload_in_progress || reload_completed)
14010 && memory_operand (op, mode)));
d5b7b3ae
RE
14011}
14012
14013/* Handle storing a half-word to memory during reload. */
14014void
e32bac5b 14015thumb_reload_out_hi (rtx *operands)
d5b7b3ae
RE
14016{
14017 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
14018}
14019
e32bac5b 14020/* Handle reading a half-word from memory during reload. */
d5b7b3ae 14021void
e32bac5b 14022thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
d5b7b3ae
RE
14023{
14024 abort ();
14025}
14026
c27ba912
DM
14027/* Return the length of a function name prefix
14028 that starts with the character 'c'. */
14029static int
e32bac5b 14030arm_get_strip_length (int c)
c27ba912
DM
14031{
14032 switch (c)
14033 {
14034 ARM_NAME_ENCODING_LENGTHS
14035 default: return 0;
14036 }
14037}
14038
14039/* Return a pointer to a function's name with any
14040 and all prefix encodings stripped from it. */
14041const char *
e32bac5b 14042arm_strip_name_encoding (const char *name)
c27ba912
DM
14043{
14044 int skip;
14045
14046 while ((skip = arm_get_strip_length (* name)))
14047 name += skip;
14048
14049 return name;
14050}
14051
e1944073
KW
14052/* If there is a '*' anywhere in the name's prefix, then
14053 emit the stripped name verbatim, otherwise prepend an
14054 underscore if leading underscores are being used. */
e1944073 14055void
e32bac5b 14056arm_asm_output_labelref (FILE *stream, const char *name)
e1944073
KW
14057{
14058 int skip;
14059 int verbatim = 0;
14060
14061 while ((skip = arm_get_strip_length (* name)))
14062 {
14063 verbatim |= (*name == '*');
14064 name += skip;
14065 }
14066
14067 if (verbatim)
14068 fputs (name, stream);
14069 else
14070 asm_fprintf (stream, "%U%s", name);
14071}
14072
e2500fed
GK
14073rtx aof_pic_label;
14074
2b835d68 14075#ifdef AOF_ASSEMBLER
6354dc9b 14076/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 14077
32de079a
RE
14078struct pic_chain
14079{
62b10bbc 14080 struct pic_chain * next;
5f37d07c 14081 const char * symname;
32de079a
RE
14082};
14083
62b10bbc 14084static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
14085
14086rtx
e32bac5b 14087aof_pic_entry (rtx x)
32de079a 14088{
62b10bbc 14089 struct pic_chain ** chainp;
32de079a
RE
14090 int offset;
14091
14092 if (aof_pic_label == NULL_RTX)
14093 {
43cffd11 14094 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
14095 }
14096
14097 for (offset = 0, chainp = &aof_pic_chain; *chainp;
14098 offset += 4, chainp = &(*chainp)->next)
14099 if ((*chainp)->symname == XSTR (x, 0))
14100 return plus_constant (aof_pic_label, offset);
14101
14102 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
14103 (*chainp)->next = NULL;
14104 (*chainp)->symname = XSTR (x, 0);
14105 return plus_constant (aof_pic_label, offset);
14106}
14107
14108void
e32bac5b 14109aof_dump_pic_table (FILE *f)
32de079a 14110{
62b10bbc 14111 struct pic_chain * chain;
32de079a
RE
14112
14113 if (aof_pic_chain == NULL)
14114 return;
14115
dd18ae56
NC
14116 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
14117 PIC_OFFSET_TABLE_REGNUM,
14118 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
14119 fputs ("|x$adcons|\n", f);
14120
14121 for (chain = aof_pic_chain; chain; chain = chain->next)
14122 {
14123 fputs ("\tDCD\t", f);
14124 assemble_name (f, chain->symname);
14125 fputs ("\n", f);
14126 }
14127}
14128
2b835d68
RE
14129int arm_text_section_count = 1;
14130
14131char *
e32bac5b 14132aof_text_section (void )
2b835d68
RE
14133{
14134 static char buf[100];
2b835d68
RE
14135 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
14136 arm_text_section_count++);
14137 if (flag_pic)
14138 strcat (buf, ", PIC, REENTRANT");
14139 return buf;
14140}
14141
14142static int arm_data_section_count = 1;
14143
14144char *
e32bac5b 14145aof_data_section (void)
2b835d68
RE
14146{
14147 static char buf[100];
14148 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
14149 return buf;
14150}
14151
14152/* The AOF assembler is religiously strict about declarations of
14153 imported and exported symbols, so that it is impossible to declare
956d6950 14154 a function as imported near the beginning of the file, and then to
2b835d68
RE
14155 export it later on. It is, however, possible to delay the decision
14156 until all the functions in the file have been compiled. To get
14157 around this, we maintain a list of the imports and exports, and
14158 delete from it any that are subsequently defined. At the end of
14159 compilation we spit the remainder of the list out before the END
14160 directive. */
14161
14162struct import
14163{
62b10bbc 14164 struct import * next;
5f37d07c 14165 const char * name;
2b835d68
RE
14166};
14167
62b10bbc 14168static struct import * imports_list = NULL;
2b835d68
RE
14169
14170void
e32bac5b 14171aof_add_import (const char *name)
2b835d68 14172{
62b10bbc 14173 struct import * new;
2b835d68
RE
14174
14175 for (new = imports_list; new; new = new->next)
14176 if (new->name == name)
14177 return;
14178
14179 new = (struct import *) xmalloc (sizeof (struct import));
14180 new->next = imports_list;
14181 imports_list = new;
14182 new->name = name;
14183}
14184
14185void
e32bac5b 14186aof_delete_import (const char *name)
2b835d68 14187{
62b10bbc 14188 struct import ** old;
2b835d68
RE
14189
14190 for (old = &imports_list; *old; old = & (*old)->next)
14191 {
14192 if ((*old)->name == name)
14193 {
14194 *old = (*old)->next;
14195 return;
14196 }
14197 }
14198}
14199
14200int arm_main_function = 0;
14201
a5fe455b 14202static void
e32bac5b 14203aof_dump_imports (FILE *f)
2b835d68
RE
14204{
14205 /* The AOF assembler needs this to cause the startup code to be extracted
14206 from the library. Brining in __main causes the whole thing to work
14207 automagically. */
14208 if (arm_main_function)
14209 {
14210 text_section ();
14211 fputs ("\tIMPORT __main\n", f);
14212 fputs ("\tDCD __main\n", f);
14213 }
14214
14215 /* Now dump the remaining imports. */
14216 while (imports_list)
14217 {
14218 fprintf (f, "\tIMPORT\t");
14219 assemble_name (f, imports_list->name);
14220 fputc ('\n', f);
14221 imports_list = imports_list->next;
14222 }
14223}
5eb99654
KG
14224
14225static void
e32bac5b 14226aof_globalize_label (FILE *stream, const char *name)
5eb99654
KG
14227{
14228 default_globalize_label (stream, name);
14229 if (! strcmp (name, "main"))
14230 arm_main_function = 1;
14231}
a5fe455b 14232
1bc7c5b6 14233static void
f1777882 14234aof_file_start (void)
1bc7c5b6
ZW
14235{
14236 fputs ("__r0\tRN\t0\n", asm_out_file);
14237 fputs ("__a1\tRN\t0\n", asm_out_file);
14238 fputs ("__a2\tRN\t1\n", asm_out_file);
14239 fputs ("__a3\tRN\t2\n", asm_out_file);
14240 fputs ("__a4\tRN\t3\n", asm_out_file);
14241 fputs ("__v1\tRN\t4\n", asm_out_file);
14242 fputs ("__v2\tRN\t5\n", asm_out_file);
14243 fputs ("__v3\tRN\t6\n", asm_out_file);
14244 fputs ("__v4\tRN\t7\n", asm_out_file);
14245 fputs ("__v5\tRN\t8\n", asm_out_file);
14246 fputs ("__v6\tRN\t9\n", asm_out_file);
14247 fputs ("__sl\tRN\t10\n", asm_out_file);
14248 fputs ("__fp\tRN\t11\n", asm_out_file);
14249 fputs ("__ip\tRN\t12\n", asm_out_file);
14250 fputs ("__sp\tRN\t13\n", asm_out_file);
14251 fputs ("__lr\tRN\t14\n", asm_out_file);
14252 fputs ("__pc\tRN\t15\n", asm_out_file);
14253 fputs ("__f0\tFN\t0\n", asm_out_file);
14254 fputs ("__f1\tFN\t1\n", asm_out_file);
14255 fputs ("__f2\tFN\t2\n", asm_out_file);
14256 fputs ("__f3\tFN\t3\n", asm_out_file);
14257 fputs ("__f4\tFN\t4\n", asm_out_file);
14258 fputs ("__f5\tFN\t5\n", asm_out_file);
14259 fputs ("__f6\tFN\t6\n", asm_out_file);
14260 fputs ("__f7\tFN\t7\n", asm_out_file);
14261 text_section ();
14262}
14263
a5fe455b 14264static void
e32bac5b 14265aof_file_end (void)
a5fe455b
ZW
14266{
14267 if (flag_pic)
14268 aof_dump_pic_table (asm_out_file);
14269 aof_dump_imports (asm_out_file);
14270 fputs ("\tEND\n", asm_out_file);
14271}
2b835d68 14272#endif /* AOF_ASSEMBLER */
7c262518 14273
ebe413e5 14274#ifdef OBJECT_FORMAT_ELF
7c262518
RH
14275/* Switch to an arbitrary section NAME with attributes as specified
14276 by FLAGS. ALIGN specifies any known alignment requirements for
14277 the section; 0 if the default should be used.
14278
14279 Differs from the default elf version only in the prefix character
14280 used before the section type. */
14281
14282static void
e32bac5b 14283arm_elf_asm_named_section (const char *name, unsigned int flags)
7c262518 14284{
6a0a6ac4
AM
14285 char flagchars[10], *f = flagchars;
14286
14287 if (! named_section_first_declaration (name))
14288 {
14289 fprintf (asm_out_file, "\t.section\t%s\n", name);
14290 return;
14291 }
7c262518
RH
14292
14293 if (!(flags & SECTION_DEBUG))
14294 *f++ = 'a';
14295 if (flags & SECTION_WRITE)
14296 *f++ = 'w';
14297 if (flags & SECTION_CODE)
14298 *f++ = 'x';
14299 if (flags & SECTION_SMALL)
14300 *f++ = 's';
201556f0
JJ
14301 if (flags & SECTION_MERGE)
14302 *f++ = 'M';
14303 if (flags & SECTION_STRINGS)
14304 *f++ = 'S';
6a0a6ac4
AM
14305 if (flags & SECTION_TLS)
14306 *f++ = 'T';
7c262518
RH
14307 *f = '\0';
14308
6a0a6ac4 14309 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
7c262518 14310
6a0a6ac4
AM
14311 if (!(flags & SECTION_NOTYPE))
14312 {
14313 const char *type;
14314
14315 if (flags & SECTION_BSS)
14316 type = "nobits";
14317 else
14318 type = "progbits";
14319
14320 fprintf (asm_out_file, ",%%%s", type);
14321
14322 if (flags & SECTION_ENTSIZE)
14323 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
14324 }
14325
14326 putc ('\n', asm_out_file);
7c262518 14327}
ebe413e5 14328#endif
fb49053f
RH
14329
14330#ifndef ARM_PE
14331/* Symbols in the text segment can be accessed without indirecting via the
14332 constant pool; it may take an extra binary operation, but this is still
14333 faster than indirecting via memory. Don't do this when not optimizing,
14334 since we won't be calculating al of the offsets necessary to do this
14335 simplification. */
14336
14337static void
e32bac5b 14338arm_encode_section_info (tree decl, rtx rtl, int first)
fb49053f
RH
14339{
14340 /* This doesn't work with AOF syntax, since the string table may be in
14341 a different AREA. */
14342#ifndef AOF_ASSEMBLER
3521b33c 14343 if (optimize > 0 && TREE_CONSTANT (decl))
c6a2438a 14344 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
fb49053f
RH
14345#endif
14346
14347 /* If we are referencing a function that is weak then encode a long call
14348 flag in the function name, otherwise if the function is static or
14349 or known to be defined in this file then encode a short call flag. */
14350 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
14351 {
14352 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
14353 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
14354 else if (! TREE_PUBLIC (decl))
14355 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
14356 }
14357}
14358#endif /* !ARM_PE */
483ab821 14359
4977bab6 14360static void
e32bac5b 14361arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
4977bab6
ZW
14362{
14363 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
14364 && !strcmp (prefix, "L"))
14365 {
14366 arm_ccfsm_state = 0;
14367 arm_target_insn = NULL;
14368 }
14369 default_internal_label (stream, prefix, labelno);
14370}
14371
c590b625
RH
14372/* Output code to add DELTA to the first argument, and then jump
14373 to FUNCTION. Used for C++ multiple inheritance. */
c590b625 14374static void
e32bac5b
RE
14375arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
14376 HOST_WIDE_INT delta,
14377 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
14378 tree function)
483ab821 14379{
9b66ebb1
PB
14380 static int thunk_label = 0;
14381 char label[256];
483ab821
MM
14382 int mi_delta = delta;
14383 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
14384 int shift = 0;
61f71b34 14385 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)
483ab821
MM
14386 ? 1 : 0);
14387 if (mi_delta < 0)
14388 mi_delta = - mi_delta;
9b66ebb1
PB
14389 if (TARGET_THUMB)
14390 {
14391 int labelno = thunk_label++;
14392 ASM_GENERATE_INTERNAL_LABEL (label, "LTHUMBFUNC", labelno);
14393 fputs ("\tldr\tr12, ", file);
14394 assemble_name (file, label);
14395 fputc ('\n', file);
14396 }
483ab821
MM
14397 while (mi_delta != 0)
14398 {
14399 if ((mi_delta & (3 << shift)) == 0)
14400 shift += 2;
14401 else
14402 {
14403 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
14404 mi_op, this_regno, this_regno,
14405 mi_delta & (0xff << shift));
14406 mi_delta &= ~(0xff << shift);
14407 shift += 8;
14408 }
14409 }
9b66ebb1
PB
14410 if (TARGET_THUMB)
14411 {
14412 fprintf (file, "\tbx\tr12\n");
14413 ASM_OUTPUT_ALIGN (file, 2);
14414 assemble_name (file, label);
14415 fputs (":\n", file);
14416 assemble_integer (XEXP (DECL_RTL (function), 0), 4, BITS_PER_WORD, 1);
14417 }
14418 else
14419 {
14420 fputs ("\tb\t", file);
14421 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
14422 if (NEED_PLT_RELOC)
14423 fputs ("(PLT)", file);
14424 fputc ('\n', file);
14425 }
483ab821 14426}
5a9335ef
NC
14427
14428int
6f5f2481 14429arm_emit_vector_const (FILE *file, rtx x)
5a9335ef
NC
14430{
14431 int i;
14432 const char * pattern;
14433
14434 if (GET_CODE (x) != CONST_VECTOR)
14435 abort ();
14436
14437 switch (GET_MODE (x))
14438 {
14439 case V2SImode: pattern = "%08x"; break;
14440 case V4HImode: pattern = "%04x"; break;
14441 case V8QImode: pattern = "%02x"; break;
14442 default: abort ();
14443 }
14444
14445 fprintf (file, "0x");
14446 for (i = CONST_VECTOR_NUNITS (x); i--;)
14447 {
14448 rtx element;
14449
14450 element = CONST_VECTOR_ELT (x, i);
14451 fprintf (file, pattern, INTVAL (element));
14452 }
14453
14454 return 1;
14455}
14456
14457const char *
6f5f2481 14458arm_output_load_gr (rtx *operands)
5a9335ef
NC
14459{
14460 rtx reg;
14461 rtx offset;
14462 rtx wcgr;
14463 rtx sum;
14464
14465 if (GET_CODE (operands [1]) != MEM
14466 || GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
14467 || GET_CODE (reg = XEXP (sum, 0)) != REG
14468 || GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
14469 || ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
14470 return "wldrw%?\t%0, %1";
14471
14472 /* Fix up an out-of-range load of a GR register. */
14473 output_asm_insn ("str%?\t%0, [sp, #-4]!\t@ Start of GR load expansion", & reg);
14474 wcgr = operands[0];
14475 operands[0] = reg;
14476 output_asm_insn ("ldr%?\t%0, %1", operands);
14477
14478 operands[0] = wcgr;
14479 operands[1] = reg;
14480 output_asm_insn ("tmcr%?\t%0, %1", operands);
14481 output_asm_insn ("ldr%?\t%0, [sp], #4\t@ End of GR load expansion", & reg);
14482
14483 return "";
14484}
f9ba5949
KH
14485
14486static rtx
14487arm_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
14488 int incoming ATTRIBUTE_UNUSED)
14489{
14490#if 0
14491 /* FIXME: The ARM backend has special code to handle structure
14492 returns, and will reserve its own hidden first argument. So
14493 if this macro is enabled a *second* hidden argument will be
14494 reserved, which will break binary compatibility with old
14495 toolchains and also thunk handling. One day this should be
14496 fixed. */
14497 return 0;
14498#else
14499 /* Register in which address to store a structure value
14500 is passed to a function. */
14501 return gen_rtx_REG (Pmode, ARG_REGISTER (1));
14502#endif
14503}
1cc9f5f5
KH
14504
14505/* Worker function for TARGET_SETUP_INCOMING_VARARGS.
14506
14507 On the ARM, PRETEND_SIZE is set in order to have the prologue push the last
14508 named arg and all anonymous args onto the stack.
14509 XXX I know the prologue shouldn't be pushing registers, but it is faster
14510 that way. */
14511
14512static void
14513arm_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
14514 enum machine_mode mode ATTRIBUTE_UNUSED,
14515 tree type ATTRIBUTE_UNUSED,
14516 int *pretend_size,
14517 int second_time ATTRIBUTE_UNUSED)
14518{
14519 cfun->machine->uses_anonymous_args = 1;
14520 if (cum->nregs < NUM_ARG_REGS)
14521 *pretend_size = (NUM_ARG_REGS - cum->nregs) * UNITS_PER_WORD;
14522}
9b66ebb1 14523
59b9a953 14524/* Return nonzero if the CONSUMER instruction (a store) does not need
9b66ebb1
PB
14525 PRODUCER's value to calculate the address. */
14526
14527int
14528arm_no_early_store_addr_dep (rtx producer, rtx consumer)
14529{
14530 rtx value = PATTERN (producer);
14531 rtx addr = PATTERN (consumer);
14532
14533 if (GET_CODE (value) == COND_EXEC)
14534 value = COND_EXEC_CODE (value);
14535 if (GET_CODE (value) == PARALLEL)
14536 value = XVECEXP (value, 0, 0);
14537 value = XEXP (value, 0);
14538 if (GET_CODE (addr) == COND_EXEC)
14539 addr = COND_EXEC_CODE (addr);
14540 if (GET_CODE (addr) == PARALLEL)
14541 addr = XVECEXP (addr, 0, 0);
14542 addr = XEXP (addr, 0);
14543
14544 return !reg_overlap_mentioned_p (value, addr);
14545}
14546
59b9a953 14547/* Return nonzero if the CONSUMER instruction (an ALU op) does not
9b66ebb1
PB
14548 have an early register shift value or amount dependency on the
14549 result of PRODUCER. */
14550
14551int
14552arm_no_early_alu_shift_dep (rtx producer, rtx consumer)
14553{
14554 rtx value = PATTERN (producer);
14555 rtx op = PATTERN (consumer);
14556 rtx early_op;
14557
14558 if (GET_CODE (value) == COND_EXEC)
14559 value = COND_EXEC_CODE (value);
14560 if (GET_CODE (value) == PARALLEL)
14561 value = XVECEXP (value, 0, 0);
14562 value = XEXP (value, 0);
14563 if (GET_CODE (op) == COND_EXEC)
14564 op = COND_EXEC_CODE (op);
14565 if (GET_CODE (op) == PARALLEL)
14566 op = XVECEXP (op, 0, 0);
14567 op = XEXP (op, 1);
14568
14569 early_op = XEXP (op, 0);
14570 /* This is either an actual independent shift, or a shift applied to
14571 the first operand of another operation. We want the whole shift
14572 operation. */
14573 if (GET_CODE (early_op) == REG)
14574 early_op = op;
14575
14576 return !reg_overlap_mentioned_p (value, early_op);
14577}
14578
59b9a953 14579/* Return nonzero if the CONSUMER instruction (an ALU op) does not
9b66ebb1
PB
14580 have an early register shift value dependency on the result of
14581 PRODUCER. */
14582
14583int
14584arm_no_early_alu_shift_value_dep (rtx producer, rtx consumer)
14585{
14586 rtx value = PATTERN (producer);
14587 rtx op = PATTERN (consumer);
14588 rtx early_op;
14589
14590 if (GET_CODE (value) == COND_EXEC)
14591 value = COND_EXEC_CODE (value);
14592 if (GET_CODE (value) == PARALLEL)
14593 value = XVECEXP (value, 0, 0);
14594 value = XEXP (value, 0);
14595 if (GET_CODE (op) == COND_EXEC)
14596 op = COND_EXEC_CODE (op);
14597 if (GET_CODE (op) == PARALLEL)
14598 op = XVECEXP (op, 0, 0);
14599 op = XEXP (op, 1);
14600
14601 early_op = XEXP (op, 0);
14602
14603 /* This is either an actual independent shift, or a shift applied to
14604 the first operand of another operation. We want the value being
14605 shifted, in either case. */
14606 if (GET_CODE (early_op) != REG)
14607 early_op = XEXP (early_op, 0);
14608
14609 return !reg_overlap_mentioned_p (value, early_op);
14610}
14611
59b9a953 14612/* Return nonzero if the CONSUMER (a mul or mac op) does not
9b66ebb1
PB
14613 have an early register mult dependency on the result of
14614 PRODUCER. */
14615
14616int
14617arm_no_early_mul_dep (rtx producer, rtx consumer)
14618{
14619 rtx value = PATTERN (producer);
14620 rtx op = PATTERN (consumer);
14621
14622 if (GET_CODE (value) == COND_EXEC)
14623 value = COND_EXEC_CODE (value);
14624 if (GET_CODE (value) == PARALLEL)
14625 value = XVECEXP (value, 0, 0);
14626 value = XEXP (value, 0);
14627 if (GET_CODE (op) == COND_EXEC)
14628 op = COND_EXEC_CODE (op);
14629 if (GET_CODE (op) == PARALLEL)
14630 op = XVECEXP (op, 0, 0);
14631 op = XEXP (op, 1);
14632
14633 return (GET_CODE (op) == PLUS
14634 && !reg_overlap_mentioned_p (value, XEXP (op, 0)));
14635}
14636
70301b45
PB
14637
14638/* We can't rely on the caller doing the proper promotion when
14639 using APCS or ATPCS. */
14640
14641static bool
14642arm_promote_prototypes (tree t ATTRIBUTE_UNUSED)
14643{
b6685939 14644 return !TARGET_AAPCS_BASED;
70301b45
PB
14645}
14646
6b045785
PB
14647
14648/* AAPCS based ABIs use short enums by default. */
14649
14650static bool
14651arm_default_short_enums (void)
14652{
14653 return TARGET_AAPCS_BASED;
14654}
13c1cd82
PB
14655
14656
14657/* AAPCS requires that anonymous bitfields affect structure alignment. */
14658
14659static bool
14660arm_align_anon_bitfield (void)
14661{
14662 return TARGET_AAPCS_BASED;
14663}
4185ae53
PB
14664
14665
14666/* The generic C++ ABI says 64-bit (long long). The EABI says 32-bit. */
14667
14668static tree
14669arm_cxx_guard_type (void)
14670{
14671 return TARGET_AAPCS_BASED ? integer_type_node : long_long_integer_type_node;
14672}
14673
14674
14675/* The EABI says test the least significan bit of a guard variable. */
14676
14677static bool
14678arm_cxx_guard_mask_bit (void)
14679{
14680 return TARGET_AAPCS_BASED;
14681}
46e995e0
PB
14682
14683
14684/* The EABI specifies that all array cookies are 8 bytes long. */
14685
14686static tree
14687arm_get_cookie_size (tree type)
14688{
14689 tree size;
14690
14691 if (!TARGET_AAPCS_BASED)
14692 return default_cxx_get_cookie_size (type);
14693
4a90aeeb 14694 size = build_int_cst (sizetype, 8, 0);
46e995e0
PB
14695 return size;
14696}
14697
14698
14699/* The EABI says that array cookies should also contain the element size. */
14700
14701static bool
14702arm_cookie_has_size (void)
14703{
14704 return TARGET_AAPCS_BASED;
14705}
44d10c10
PB
14706
14707
14708/* The EABI says constructors and destructors should return a pointer to
14709 the object constructed/destroyed. */
14710
14711static bool
14712arm_cxx_cdtor_returns_this (void)
14713{
14714 return TARGET_AAPCS_BASED;
14715}
c9ca9b88
PB
14716
14717
14718void
14719arm_set_return_address (rtx source, rtx scratch)
14720{
14721 arm_stack_offsets *offsets;
14722 HOST_WIDE_INT delta;
14723 rtx addr;
14724 unsigned long saved_regs;
14725
14726 saved_regs = arm_compute_save_reg_mask ();
14727
14728 if ((saved_regs & (1 << LR_REGNUM)) == 0)
14729 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
14730 else
14731 {
14732 if (frame_pointer_needed)
14733 addr = plus_constant(hard_frame_pointer_rtx, -4);
14734 else
14735 {
14736 /* LR will be the first saved register. */
14737 offsets = arm_get_frame_offsets ();
14738 delta = offsets->outgoing_args - (offsets->frame + 4);
14739
14740
14741 if (delta >= 4096)
14742 {
14743 emit_insn (gen_addsi3 (scratch, stack_pointer_rtx,
14744 GEN_INT (delta & ~4095)));
14745 addr = scratch;
14746 delta &= 4095;
14747 }
14748 else
14749 addr = stack_pointer_rtx;
14750
14751 addr = plus_constant (addr, delta);
14752 }
14753 emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
14754 }
14755}
14756
14757
14758void
14759thumb_set_return_address (rtx source, rtx scratch)
14760{
14761 arm_stack_offsets *offsets;
c9ca9b88
PB
14762 HOST_WIDE_INT delta;
14763 int reg;
14764 rtx addr;
57934c39 14765 unsigned long mask;
c9ca9b88
PB
14766
14767 emit_insn (gen_rtx_USE (VOIDmode, source));
c9ca9b88 14768
57934c39
PB
14769 mask = thumb_compute_save_reg_mask ();
14770 if (mask & (1 << LR_REGNUM))
c9ca9b88
PB
14771 {
14772 offsets = arm_get_frame_offsets ();
14773
14774 /* Find the saved regs. */
14775 if (frame_pointer_needed)
14776 {
14777 delta = offsets->soft_frame - offsets->saved_args;
14778 reg = THUMB_HARD_FRAME_POINTER_REGNUM;
14779 }
14780 else
14781 {
14782 delta = offsets->outgoing_args - offsets->saved_args;
14783 reg = SP_REGNUM;
14784 }
14785 /* Allow for the stack frame. */
14786 if (TARGET_BACKTRACE)
14787 delta -= 16;
14788 /* The link register is always the first saved register. */
14789 delta -= 4;
14790
14791 /* Construct the address. */
14792 addr = gen_rtx_REG (SImode, reg);
14793 if ((reg != SP_REGNUM && delta >= 128)
14794 || delta >= 1024)
14795 {
14796 emit_insn (gen_movsi (scratch, GEN_INT (delta)));
14797 emit_insn (gen_addsi3 (scratch, scratch, stack_pointer_rtx));
14798 addr = scratch;
14799 }
14800 else
14801 addr = plus_constant (addr, delta);
14802
14803 emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
14804 }
14805 else
14806 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
14807}
14808