]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/arm/arm.c
Appleid arm-elf contribution from Philip Blundell and merged with Catherine
[thirdparty/gcc.git] / gcc / config / arm / arm.c
CommitLineData
cce8749e 1/* Output routines for GCC for ARM/RISCiX.
e5e809f4 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
ff9940b0 5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
cce8749e 25#include <stdio.h>
f3bb6135 26#include <string.h>
cce8749e
CH
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
af48348a 37#include "reload.h"
e2c671ba 38#include "tree.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44#define MAX_INSNS_SKIPPED 5
45
46/* Some function declarations. */
cce8749e 47extern FILE *asm_out_file;
cce8749e 48
18af7313
RE
49static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
50static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
51 HOST_WIDE_INT));
2b835d68
RE
52static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
54static int arm_naked_function_p PROTO ((tree));
55static void init_fpa_table PROTO ((void));
56static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
57 rtx, HOST_WIDE_INT));
332072db 58static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode, int *));
18af7313
RE
59static void dump_table PROTO ((rtx));
60static int fixit PROTO ((rtx, enum machine_mode, int));
61static rtx find_barrier PROTO ((rtx, int));
62static int broken_move PROTO ((rtx));
63static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
64static int eliminate_lr2ip PROTO ((rtx *));
65static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
66static int pattern_really_clobbers_lr PROTO ((rtx));
67static int function_really_clobbers_lr PROTO ((rtx));
68static void emit_multi_reg_push PROTO ((int));
b111229a 69static void emit_sfm PROTO ((int, int));
18af7313 70static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
f3bb6135 71
ff9940b0
RE
72/* Define the information needed to generate branch insns. This is
73 stored from the compare operation. */
74
75rtx arm_compare_op0, arm_compare_op1;
76int arm_compare_fp;
77
78/* What type of cpu are we compiling for? */
ff9940b0
RE
79enum processor_type arm_cpu;
80
b111229a 81/* What type of floating point are we tuning for? */
bee06f3d
RE
82enum floating_point_type arm_fpu;
83
b111229a
RE
84/* What type of floating point instructions are available? */
85enum floating_point_type arm_fpu_arch;
86
2b835d68
RE
87/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
88enum prog_mode_type arm_prgmode;
89
b111229a
RE
90/* Set by the -mfp=... option */
91char *target_fp_name = NULL;
2b835d68 92
b355a481
NC
93/* Used to parse -mstructure_size_boundary command line option. */
94char * structure_size_string = NULL;
95int arm_structure_size_boundary = 32; /* Used to be 8 */
96
2b835d68
RE
97/* Nonzero if this is an "M" variant of the processor. */
98int arm_fast_multiply = 0;
99
32de079a 100/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
101int arm_arch4 = 0;
102
b111229a
RE
103/* Set to the features we should tune the code for (multiply speed etc). */
104int tune_flags = 0;
105
cce8749e
CH
106/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
107 must report the mode of the memory reference from PRINT_OPERAND to
108 PRINT_OPERAND_ADDRESS. */
f3bb6135 109enum machine_mode output_memory_reference_mode;
cce8749e
CH
110
111/* Nonzero if the prologue must setup `fp'. */
112int current_function_anonymous_args;
113
32de079a
RE
114/* The register number to be used for the PIC offset register. */
115int arm_pic_register = 9;
116
cce8749e
CH
117/* Location counter of .text segment. */
118int arm_text_location = 0;
119
ff9940b0
RE
120/* Set to one if we think that lr is only saved because of subroutine calls,
121 but all of these can be `put after' return insns */
122int lr_save_eliminated;
123
ff9940b0
RE
124/* Set to 1 when a return insn is output, this means that the epilogue
125 is not needed. */
126
127static int return_used_this_function;
128
2b835d68
RE
129static int arm_constant_limit = 3;
130
cce8749e
CH
131/* For an explanation of these variables, see final_prescan_insn below. */
132int arm_ccfsm_state;
84ed5e79 133enum arm_cond_code arm_current_cc;
cce8749e
CH
134rtx arm_target_insn;
135int arm_target_label;
9997d19d
RE
136
137/* The condition codes of the ARM, and the inverse function. */
138char *arm_condition_codes[] =
139{
140 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
141 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
142};
143
84ed5e79 144static enum arm_cond_code get_arm_condition_code ();
2b835d68
RE
145
146\f
147/* Initialization code */
148
b111229a 149struct arm_cpu_select arm_select[4] =
bd9c7e23
RE
150{
151 /* switch name, tune arch */
152 { (char *)0, "--with-cpu=", 1, 1 },
153 { (char *)0, "-mcpu=", 1, 1 },
b111229a 154 { (char *)0, "-march=", 0, 1 },
bd9c7e23
RE
155 { (char *)0, "-mtune=", 1, 0 },
156};
157
2b835d68
RE
158#define FL_CO_PROC 0x01 /* Has external co-processor bus */
159#define FL_FAST_MULT 0x02 /* Fast multiply */
160#define FL_MODE26 0x04 /* 26-bit mode support */
161#define FL_MODE32 0x08 /* 32-bit mode support */
162#define FL_ARCH4 0x10 /* Architecture rel 4 */
163#define FL_THUMB 0x20 /* Thumb aware */
32de079a 164
2b835d68
RE
165struct processors
166{
167 char *name;
168 enum processor_type type;
169 unsigned int flags;
170};
171
172/* Not all of these give usefully different compilation alternatives,
173 but there is no simple way of generalizing them. */
174static struct processors all_procs[] =
175{
176 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
177 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
178 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
179 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
180 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
181 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
2b835d68 182 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
956d6950 183 /* arm7m doesn't exist on its own, only in conjunction with D, (and I), but
32de079a
RE
184 those don't alter the code, so it is sometimes known as the arm7m */
185 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
186 | FL_MODE26)},
2b835d68
RE
187 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
188 | FL_MODE26)},
189 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
190 | FL_MODE26)},
191 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68 192 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
bd9c7e23 193 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
2b835d68 194 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
32de079a
RE
195 /* Doesn't really have an external co-proc, but does have embedded fpu */
196 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
197 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
198 | FL_ARCH4 | FL_THUMB)},
32de079a
RE
199 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
200 | FL_ARCH4)},
201 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
202 | FL_ARCH4)},
203 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
204 | FL_ARCH4)},
205 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
206 | FL_ARCH4)},
b111229a
RE
207 {"armv2", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
208 {"armv2a", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
209 {"armv3", PROCESSOR_NONE, FL_CO_PROC | FL_MODE32 | FL_MODE26},
210 {"armv3m", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
211 | FL_MODE26)},
212 {"armv4", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
213 | FL_MODE26 | FL_ARCH4)},
214 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
215 implementations that support it, so we will leave it out for now. */
216 {"armv4t", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
217 | FL_ARCH4)},
2b835d68
RE
218 {NULL, 0, 0}
219};
220
221/* Fix up any incompatible options that the user has specified.
222 This has now turned into a maze. */
223void
224arm_override_options ()
225{
226 int arm_thumb_aware = 0;
bd9c7e23 227 int flags = 0;
ed4c4348 228 unsigned i;
25b1c156
NC
229 struct arm_cpu_select * ptr;
230 static struct cpu_default
231 {
232 int cpu;
233 char * name;
234 }
235 cpu_defaults[] =
236 {
32de079a
RE
237 { TARGET_CPU_arm2, "arm2" },
238 { TARGET_CPU_arm6, "arm6" },
239 { TARGET_CPU_arm610, "arm610" },
240 { TARGET_CPU_arm7dm, "arm7dm" },
241 { TARGET_CPU_arm7500fe, "arm7500fe" },
242 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
243 { TARGET_CPU_arm8, "arm8" },
244 { TARGET_CPU_arm810, "arm810" },
245 { TARGET_CPU_strongarm, "strongarm" },
246 { 0, 0 }
247 };
248 struct cpu_default *def;
249
250 /* Set the default. */
251 for (def = &cpu_defaults[0]; def->name; ++def)
252 if (def->cpu == TARGET_CPU_DEFAULT)
253 break;
254 if (! def->name)
255 abort ();
bd9c7e23 256
32de079a 257 arm_select[0].string = def->name;
bd9c7e23
RE
258
259 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
260 {
261 ptr = &arm_select[i];
262 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
263 {
264 struct processors *sel;
265
266 for (sel = all_procs; sel->name != NULL; sel++)
267 if (! strcmp (ptr->string, sel->name))
268 {
b111229a
RE
269 /* -march= is the only flag that can take an architecture
270 type, so if we match when the tune bit is set, the
271 option was invalid. */
bd9c7e23 272 if (ptr->set_tune_p)
b111229a
RE
273 {
274 if (sel->type == PROCESSOR_NONE)
275 continue; /* Its an architecture, not a cpu */
276
277 arm_cpu = sel->type;
278 tune_flags = sel->flags;
279 }
bd9c7e23
RE
280
281 if (ptr->set_arch_p)
282 flags = sel->flags;
b111229a 283
bd9c7e23
RE
284 break;
285 }
286
287 if (sel->name == NULL)
288 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
289 }
290 }
2b835d68
RE
291
292 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
293 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
294
295 if (TARGET_POKE_FUNCTION_NAME)
296 target_flags |= ARM_FLAG_APCS_FRAME;
297
298 if (TARGET_6)
32de079a 299 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
2b835d68
RE
300
301 if (TARGET_3)
32de079a 302 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
2b835d68 303
2b835d68
RE
304 if (TARGET_APCS_REENT && flag_pic)
305 fatal ("-fpic and -mapcs-reent are incompatible");
306
307 if (TARGET_APCS_REENT)
32de079a
RE
308 warning ("APCS reentrant code not supported.");
309
310 /* If stack checking is disabled, we can use r10 as the PIC register,
311 which keeps r9 available. */
312 if (flag_pic && ! TARGET_APCS_STACK)
313 arm_pic_register = 10;
2b835d68 314
32de079a
RE
315 /* Well, I'm about to have a go, but pic is NOT going to be compatible
316 with APCS reentrancy, since that requires too much support in the
317 assembler and linker, and the ARMASM assembler seems to lack some
318 required directives. */
2b835d68 319 if (flag_pic)
b4b68717 320 warning ("Position independent code not supported");
2b835d68
RE
321
322 if (TARGET_APCS_FLOAT)
323 warning ("Passing floating point arguments in fp regs not yet supported");
324
325 if (TARGET_APCS_STACK && ! TARGET_APCS)
326 {
327 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
328 target_flags |= ARM_FLAG_APCS_FRAME;
329 }
330
b111229a 331 /* Default is to tune for an FPA */
2b835d68
RE
332 arm_fpu = FP_HARD;
333
bd9c7e23
RE
334 /* Default value for floating point code... if no co-processor
335 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
336 assume the user has an FPA.
337 Note: this does not prevent use of floating point instructions,
338 -msoft-float does that. */
ad076f4e 339 if ((tune_flags & FL_CO_PROC) == 0)
bd9c7e23 340 arm_fpu = FP_SOFT3;
b111229a 341
bd9c7e23
RE
342 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
343 arm_arch4 = (flags & FL_ARCH4) != 0;
344 arm_thumb_aware = (flags & FL_THUMB) != 0;
2b835d68 345
b111229a 346 if (target_fp_name)
2b835d68 347 {
b111229a
RE
348 if (strcmp (target_fp_name, "2") == 0)
349 arm_fpu_arch = FP_SOFT2;
350 else if (strcmp (target_fp_name, "3") == 0)
351 arm_fpu_arch = FP_HARD;
2b835d68 352 else
b111229a
RE
353 fatal ("Invalid floating point emulation option: -mfpe=%s",
354 target_fp_name);
2b835d68 355 }
b111229a
RE
356 else
357 arm_fpu_arch = FP_DEFAULT;
2b835d68
RE
358
359 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
360 {
361 warning ("This processor variant does not support Thumb interworking");
362 target_flags &= ~ARM_FLAG_THUMB;
363 }
364
365 if (TARGET_FPE && arm_fpu != FP_HARD)
366 arm_fpu = FP_SOFT2;
367
368 /* For arm2/3 there is no need to do any scheduling if there is only
369 a floating point emulator, or we are doing software floating-point. */
370 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
371 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
372
373 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
374
375 if (structure_size_string != NULL)
376 {
377 int size = strtol (structure_size_string, NULL, 0);
378
379 if (size == 8 || size == 32)
380 arm_structure_size_boundary = size;
381 else
382 warning ("Structure size boundary can only be set to 8 or 32");
383 }
2b835d68 384}
cce8749e 385\f
32de079a 386
ff9940b0
RE
387/* Return 1 if it is possible to return using a single instruction */
388
389int
390use_return_insn ()
391{
392 int regno;
393
394 if (!reload_completed ||current_function_pretend_args_size
395 || current_function_anonymous_args
56636818
JL
396 || ((get_frame_size () + current_function_outgoing_args_size != 0)
397 && !(TARGET_APCS || frame_pointer_needed)))
ff9940b0
RE
398 return 0;
399
b111229a
RE
400 /* Can't be done if interworking with Thumb, and any registers have been
401 stacked */
402 if (TARGET_THUMB_INTERWORK)
403 for (regno = 0; regno < 16; regno++)
404 if (regs_ever_live[regno] && ! call_used_regs[regno])
405 return 0;
406
ff9940b0
RE
407 /* Can't be done if any of the FPU regs are pushed, since this also
408 requires an insn */
b111229a
RE
409 for (regno = 16; regno < 24; regno++)
410 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
411 return 0;
412
31fdb4d5
DE
413 /* If a function is naked, don't use the "return" insn. */
414 if (arm_naked_function_p (current_function_decl))
415 return 0;
416
ff9940b0
RE
417 return 1;
418}
419
cce8749e
CH
420/* Return TRUE if int I is a valid immediate ARM constant. */
421
422int
423const_ok_for_arm (i)
ff9940b0 424 HOST_WIDE_INT i;
cce8749e 425{
ed4c4348 426 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 427
56636818
JL
428 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
429 be all zero, or all one. */
430 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
431 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
432 != ((~(unsigned HOST_WIDE_INT) 0)
433 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
434 return FALSE;
435
e2c671ba
RE
436 /* Fast return for 0 and powers of 2 */
437 if ((i & (i - 1)) == 0)
438 return TRUE;
439
cce8749e
CH
440 do
441 {
abaa26e5 442 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 443 return TRUE;
abaa26e5
RE
444 mask =
445 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
446 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 447 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 448
f3bb6135
RE
449 return FALSE;
450}
cce8749e 451
e2c671ba
RE
452/* Return true if I is a valid constant for the operation CODE. */
453int
454const_ok_for_op (i, code, mode)
455 HOST_WIDE_INT i;
456 enum rtx_code code;
457 enum machine_mode mode;
458{
459 if (const_ok_for_arm (i))
460 return 1;
461
462 switch (code)
463 {
464 case PLUS:
465 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
466
467 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
468 case XOR:
469 case IOR:
470 return 0;
471
472 case AND:
473 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
474
475 default:
476 abort ();
477 }
478}
479
480/* Emit a sequence of insns to handle a large constant.
481 CODE is the code of the operation required, it can be any of SET, PLUS,
482 IOR, AND, XOR, MINUS;
483 MODE is the mode in which the operation is being performed;
484 VAL is the integer to operate on;
485 SOURCE is the other operand (a register, or a null-pointer for SET);
486 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
487 either produce a simpler sequence, or we will want to cse the values.
488 Return value is the number of insns emitted. */
e2c671ba
RE
489
490int
491arm_split_constant (code, mode, val, target, source, subtargets)
492 enum rtx_code code;
493 enum machine_mode mode;
494 HOST_WIDE_INT val;
495 rtx target;
496 rtx source;
497 int subtargets;
2b835d68
RE
498{
499 if (subtargets || code == SET
500 || (GET_CODE (target) == REG && GET_CODE (source) == REG
501 && REGNO (target) != REGNO (source)))
502 {
2b835d68
RE
503 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
504 > arm_constant_limit + (code != SET))
505 {
506 if (code == SET)
507 {
508 /* Currently SET is the only monadic value for CODE, all
509 the rest are diadic. */
510 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
511 return 1;
512 }
513 else
514 {
515 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
516
517 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
518 /* For MINUS, the value is subtracted from, since we never
519 have subtraction of a constant. */
520 if (code == MINUS)
521 emit_insn (gen_rtx (SET, VOIDmode, target,
522 gen_rtx (code, mode, temp, source)));
523 else
524 emit_insn (gen_rtx (SET, VOIDmode, target,
525 gen_rtx (code, mode, source, temp)));
526 return 2;
527 }
528 }
529 }
530
531 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
532}
533
534/* As above, but extra parameter GENERATE which, if clear, suppresses
535 RTL generation. */
536int
537arm_gen_constant (code, mode, val, target, source, subtargets, generate)
538 enum rtx_code code;
539 enum machine_mode mode;
540 HOST_WIDE_INT val;
541 rtx target;
542 rtx source;
543 int subtargets;
544 int generate;
e2c671ba 545{
e2c671ba
RE
546 int can_invert = 0;
547 int can_negate = 0;
548 int can_negate_initial = 0;
549 int can_shift = 0;
550 int i;
551 int num_bits_set = 0;
552 int set_sign_bit_copies = 0;
553 int clear_sign_bit_copies = 0;
554 int clear_zero_bit_copies = 0;
555 int set_zero_bit_copies = 0;
556 int insns = 0;
e2c671ba
RE
557 unsigned HOST_WIDE_INT temp1, temp2;
558 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
559
560 /* find out which operations are safe for a given CODE. Also do a quick
561 check for degenerate cases; these can occur when DImode operations
562 are split. */
563 switch (code)
564 {
565 case SET:
566 can_invert = 1;
567 can_shift = 1;
568 can_negate = 1;
569 break;
570
571 case PLUS:
572 can_negate = 1;
573 can_negate_initial = 1;
574 break;
575
576 case IOR:
577 if (remainder == 0xffffffff)
578 {
2b835d68
RE
579 if (generate)
580 emit_insn (gen_rtx (SET, VOIDmode, target,
581 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
582 return 1;
583 }
584 if (remainder == 0)
585 {
586 if (reload_completed && rtx_equal_p (target, source))
587 return 0;
2b835d68
RE
588 if (generate)
589 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
590 return 1;
591 }
592 break;
593
594 case AND:
595 if (remainder == 0)
596 {
2b835d68
RE
597 if (generate)
598 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
e2c671ba
RE
599 return 1;
600 }
601 if (remainder == 0xffffffff)
602 {
603 if (reload_completed && rtx_equal_p (target, source))
604 return 0;
2b835d68
RE
605 if (generate)
606 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
607 return 1;
608 }
609 can_invert = 1;
610 break;
611
612 case XOR:
613 if (remainder == 0)
614 {
615 if (reload_completed && rtx_equal_p (target, source))
616 return 0;
2b835d68
RE
617 if (generate)
618 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
619 return 1;
620 }
621 if (remainder == 0xffffffff)
622 {
2b835d68
RE
623 if (generate)
624 emit_insn (gen_rtx (SET, VOIDmode, target,
625 gen_rtx (NOT, mode, source)));
e2c671ba
RE
626 return 1;
627 }
628
629 /* We don't know how to handle this yet below. */
630 abort ();
631
632 case MINUS:
633 /* We treat MINUS as (val - source), since (source - val) is always
634 passed as (source + (-val)). */
635 if (remainder == 0)
636 {
2b835d68
RE
637 if (generate)
638 emit_insn (gen_rtx (SET, VOIDmode, target,
639 gen_rtx (NEG, mode, source)));
e2c671ba
RE
640 return 1;
641 }
642 if (const_ok_for_arm (val))
643 {
2b835d68
RE
644 if (generate)
645 emit_insn (gen_rtx (SET, VOIDmode, target,
646 gen_rtx (MINUS, mode, GEN_INT (val), source)));
e2c671ba
RE
647 return 1;
648 }
649 can_negate = 1;
650
651 break;
652
653 default:
654 abort ();
655 }
656
657 /* If we can do it in one insn get out quickly */
658 if (const_ok_for_arm (val)
659 || (can_negate_initial && const_ok_for_arm (-val))
660 || (can_invert && const_ok_for_arm (~val)))
661 {
2b835d68
RE
662 if (generate)
663 emit_insn (gen_rtx (SET, VOIDmode, target,
664 (source ? gen_rtx (code, mode, source,
665 GEN_INT (val))
666 : GEN_INT (val))));
e2c671ba
RE
667 return 1;
668 }
669
670
671 /* Calculate a few attributes that may be useful for specific
672 optimizations. */
673
674 for (i = 31; i >= 0; i--)
675 {
676 if ((remainder & (1 << i)) == 0)
677 clear_sign_bit_copies++;
678 else
679 break;
680 }
681
682 for (i = 31; i >= 0; i--)
683 {
684 if ((remainder & (1 << i)) != 0)
685 set_sign_bit_copies++;
686 else
687 break;
688 }
689
690 for (i = 0; i <= 31; i++)
691 {
692 if ((remainder & (1 << i)) == 0)
693 clear_zero_bit_copies++;
694 else
695 break;
696 }
697
698 for (i = 0; i <= 31; i++)
699 {
700 if ((remainder & (1 << i)) != 0)
701 set_zero_bit_copies++;
702 else
703 break;
704 }
705
706 switch (code)
707 {
708 case SET:
709 /* See if we can do this by sign_extending a constant that is known
710 to be negative. This is a good, way of doing it, since the shift
711 may well merge into a subsequent insn. */
712 if (set_sign_bit_copies > 1)
713 {
714 if (const_ok_for_arm
715 (temp1 = ARM_SIGN_EXTEND (remainder
716 << (set_sign_bit_copies - 1))))
717 {
2b835d68
RE
718 if (generate)
719 {
d499463f 720 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
721 emit_insn (gen_rtx (SET, VOIDmode, new_src,
722 GEN_INT (temp1)));
723 emit_insn (gen_ashrsi3 (target, new_src,
724 GEN_INT (set_sign_bit_copies - 1)));
725 }
e2c671ba
RE
726 return 2;
727 }
728 /* For an inverted constant, we will need to set the low bits,
729 these will be shifted out of harm's way. */
730 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
731 if (const_ok_for_arm (~temp1))
732 {
2b835d68
RE
733 if (generate)
734 {
d499463f 735 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
736 emit_insn (gen_rtx (SET, VOIDmode, new_src,
737 GEN_INT (temp1)));
738 emit_insn (gen_ashrsi3 (target, new_src,
739 GEN_INT (set_sign_bit_copies - 1)));
740 }
e2c671ba
RE
741 return 2;
742 }
743 }
744
745 /* See if we can generate this by setting the bottom (or the top)
746 16 bits, and then shifting these into the other half of the
747 word. We only look for the simplest cases, to do more would cost
748 too much. Be careful, however, not to generate this when the
749 alternative would take fewer insns. */
750 if (val & 0xffff0000)
751 {
752 temp1 = remainder & 0xffff0000;
753 temp2 = remainder & 0x0000ffff;
754
755 /* Overlaps outside this range are best done using other methods. */
756 for (i = 9; i < 24; i++)
757 {
758 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
759 && ! const_ok_for_arm (temp2))
760 {
d499463f
RE
761 rtx new_src = (subtargets
762 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
763 : target);
764 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 765 source, subtargets, generate);
e2c671ba 766 source = new_src;
2b835d68
RE
767 if (generate)
768 emit_insn (gen_rtx (SET, VOIDmode, target,
769 gen_rtx (IOR, mode,
770 gen_rtx (ASHIFT, mode, source,
771 GEN_INT (i)),
772 source)));
e2c671ba
RE
773 return insns + 1;
774 }
775 }
776
777 /* Don't duplicate cases already considered. */
778 for (i = 17; i < 24; i++)
779 {
780 if (((temp1 | (temp1 >> i)) == remainder)
781 && ! const_ok_for_arm (temp1))
782 {
d499463f
RE
783 rtx new_src = (subtargets
784 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
785 : target);
786 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 787 source, subtargets, generate);
e2c671ba 788 source = new_src;
2b835d68
RE
789 if (generate)
790 emit_insn (gen_rtx (SET, VOIDmode, target,
791 gen_rtx (IOR, mode,
792 gen_rtx (LSHIFTRT, mode,
793 source, GEN_INT (i)),
794 source)));
e2c671ba
RE
795 return insns + 1;
796 }
797 }
798 }
799 break;
800
801 case IOR:
802 case XOR:
7b64da89
RE
803 /* If we have IOR or XOR, and the constant can be loaded in a
804 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
805 then this can be done in two instructions instead of 3-4. */
806 if (subtargets
d499463f 807 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
808 || (reload_completed && ! reg_mentioned_p (target, source)))
809 {
810 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
811 {
2b835d68
RE
812 if (generate)
813 {
814 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 815
7b64da89 816 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
2b835d68
RE
817 emit_insn (gen_rtx (SET, VOIDmode, target,
818 gen_rtx (code, mode, source, sub)));
819 }
e2c671ba
RE
820 return 2;
821 }
822 }
823
824 if (code == XOR)
825 break;
826
827 if (set_sign_bit_copies > 8
828 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
829 {
2b835d68
RE
830 if (generate)
831 {
832 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
833 rtx shift = GEN_INT (set_sign_bit_copies);
834
835 emit_insn (gen_rtx (SET, VOIDmode, sub,
836 gen_rtx (NOT, mode,
837 gen_rtx (ASHIFT, mode, source,
838 shift))));
839 emit_insn (gen_rtx (SET, VOIDmode, target,
840 gen_rtx (NOT, mode,
841 gen_rtx (LSHIFTRT, mode, sub,
842 shift))));
843 }
e2c671ba
RE
844 return 2;
845 }
846
847 if (set_zero_bit_copies > 8
848 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
849 {
2b835d68
RE
850 if (generate)
851 {
852 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
853 rtx shift = GEN_INT (set_zero_bit_copies);
854
855 emit_insn (gen_rtx (SET, VOIDmode, sub,
856 gen_rtx (NOT, mode,
857 gen_rtx (LSHIFTRT, mode, source,
858 shift))));
859 emit_insn (gen_rtx (SET, VOIDmode, target,
860 gen_rtx (NOT, mode,
861 gen_rtx (ASHIFT, mode, sub,
862 shift))));
863 }
e2c671ba
RE
864 return 2;
865 }
866
867 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
868 {
2b835d68
RE
869 if (generate)
870 {
871 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
872 emit_insn (gen_rtx (SET, VOIDmode, sub,
873 gen_rtx (NOT, mode, source)));
874 source = sub;
875 if (subtargets)
876 sub = gen_reg_rtx (mode);
877 emit_insn (gen_rtx (SET, VOIDmode, sub,
878 gen_rtx (AND, mode, source,
879 GEN_INT (temp1))));
880 emit_insn (gen_rtx (SET, VOIDmode, target,
881 gen_rtx (NOT, mode, sub)));
882 }
e2c671ba
RE
883 return 3;
884 }
885 break;
886
887 case AND:
888 /* See if two shifts will do 2 or more insn's worth of work. */
889 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
890 {
891 HOST_WIDE_INT shift_mask = ((0xffffffff
892 << (32 - clear_sign_bit_copies))
893 & 0xffffffff);
e2c671ba
RE
894
895 if ((remainder | shift_mask) != 0xffffffff)
896 {
2b835d68
RE
897 if (generate)
898 {
d499463f 899 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 900 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
901 new_src, source, subtargets, 1);
902 source = new_src;
2b835d68
RE
903 }
904 else
d499463f
RE
905 {
906 rtx targ = subtargets ? NULL_RTX : target;
907 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
908 targ, source, subtargets, 0);
909 }
2b835d68
RE
910 }
911
912 if (generate)
913 {
d499463f
RE
914 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
915 rtx shift = GEN_INT (clear_sign_bit_copies);
916
917 emit_insn (gen_ashlsi3 (new_src, source, shift));
918 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
919 }
920
e2c671ba
RE
921 return insns + 2;
922 }
923
924 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
925 {
926 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
927
928 if ((remainder | shift_mask) != 0xffffffff)
929 {
2b835d68
RE
930 if (generate)
931 {
d499463f
RE
932 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
933
2b835d68 934 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
935 new_src, source, subtargets, 1);
936 source = new_src;
2b835d68
RE
937 }
938 else
d499463f
RE
939 {
940 rtx targ = subtargets ? NULL_RTX : target;
941
942 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
943 targ, source, subtargets, 0);
944 }
2b835d68
RE
945 }
946
947 if (generate)
948 {
d499463f
RE
949 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
950 rtx shift = GEN_INT (clear_zero_bit_copies);
951
952 emit_insn (gen_lshrsi3 (new_src, source, shift));
953 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
954 }
955
e2c671ba
RE
956 return insns + 2;
957 }
958
959 break;
960
961 default:
962 break;
963 }
964
965 for (i = 0; i < 32; i++)
966 if (remainder & (1 << i))
967 num_bits_set++;
968
969 if (code == AND || (can_invert && num_bits_set > 16))
970 remainder = (~remainder) & 0xffffffff;
971 else if (code == PLUS && num_bits_set > 16)
972 remainder = (-remainder) & 0xffffffff;
973 else
974 {
975 can_invert = 0;
976 can_negate = 0;
977 }
978
979 /* Now try and find a way of doing the job in either two or three
980 instructions.
981 We start by looking for the largest block of zeros that are aligned on
982 a 2-bit boundary, we then fill up the temps, wrapping around to the
983 top of the word when we drop off the bottom.
984 In the worst case this code should produce no more than four insns. */
985 {
986 int best_start = 0;
987 int best_consecutive_zeros = 0;
988
989 for (i = 0; i < 32; i += 2)
990 {
991 int consecutive_zeros = 0;
992
993 if (! (remainder & (3 << i)))
994 {
995 while ((i < 32) && ! (remainder & (3 << i)))
996 {
997 consecutive_zeros += 2;
998 i += 2;
999 }
1000 if (consecutive_zeros > best_consecutive_zeros)
1001 {
1002 best_consecutive_zeros = consecutive_zeros;
1003 best_start = i - consecutive_zeros;
1004 }
1005 i -= 2;
1006 }
1007 }
1008
1009 /* Now start emitting the insns, starting with the one with the highest
1010 bit set: we do this so that the smallest number will be emitted last;
1011 this is more likely to be combinable with addressing insns. */
1012 i = best_start;
1013 do
1014 {
1015 int end;
1016
1017 if (i <= 0)
1018 i += 32;
1019 if (remainder & (3 << (i - 2)))
1020 {
1021 end = i - 8;
1022 if (end < 0)
1023 end += 32;
1024 temp1 = remainder & ((0x0ff << end)
1025 | ((i < end) ? (0xff >> (32 - end)) : 0));
1026 remainder &= ~temp1;
1027
d499463f 1028 if (generate)
e2c671ba 1029 {
d499463f
RE
1030 rtx new_src;
1031
1032 if (code == SET)
2b835d68
RE
1033 emit_insn (gen_rtx (SET, VOIDmode,
1034 new_src = (subtargets
1035 ? gen_reg_rtx (mode)
1036 : target),
1037 GEN_INT (can_invert ? ~temp1 : temp1)));
d499463f 1038 else if (code == MINUS)
2b835d68
RE
1039 emit_insn (gen_rtx (SET, VOIDmode,
1040 new_src = (subtargets
1041 ? gen_reg_rtx (mode)
1042 : target),
1043 gen_rtx (code, mode, GEN_INT (temp1),
1044 source)));
d499463f 1045 else
2b835d68
RE
1046 emit_insn (gen_rtx (SET, VOIDmode,
1047 new_src = (remainder
1048 ? (subtargets
1049 ? gen_reg_rtx (mode)
1050 : target)
1051 : target),
1052 gen_rtx (code, mode, source,
1053 GEN_INT (can_invert ? ~temp1
1054 : (can_negate
1055 ? -temp1
1056 : temp1)))));
d499463f 1057 source = new_src;
e2c671ba
RE
1058 }
1059
d499463f
RE
1060 if (code == SET)
1061 {
1062 can_invert = 0;
1063 code = PLUS;
1064 }
1065 else if (code == MINUS)
1066 code = PLUS;
1067
e2c671ba 1068 insns++;
e2c671ba
RE
1069 i -= 6;
1070 }
1071 i -= 2;
1072 } while (remainder);
1073 }
1074 return insns;
1075}
1076
bd9c7e23
RE
1077/* Canonicalize a comparison so that we are more likely to recognize it.
1078 This can be done for a few constant compares, where we can make the
1079 immediate value easier to load. */
1080enum rtx_code
1081arm_canonicalize_comparison (code, op1)
1082 enum rtx_code code;
1083 rtx *op1;
1084{
ad076f4e 1085 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1086
1087 switch (code)
1088 {
1089 case EQ:
1090 case NE:
1091 return code;
1092
1093 case GT:
1094 case LE:
ad076f4e
RE
1095 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1096 - 1)
bd9c7e23
RE
1097 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1098 {
1099 *op1 = GEN_INT (i+1);
1100 return code == GT ? GE : LT;
1101 }
1102 break;
1103
1104 case GE:
1105 case LT:
ad076f4e 1106 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1107 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1108 {
1109 *op1 = GEN_INT (i-1);
1110 return code == GE ? GT : LE;
1111 }
1112 break;
1113
1114 case GTU:
1115 case LEU:
ad076f4e 1116 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1117 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1118 {
1119 *op1 = GEN_INT (i + 1);
1120 return code == GTU ? GEU : LTU;
1121 }
1122 break;
1123
1124 case GEU:
1125 case LTU:
1126 if (i != 0
1127 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1128 {
1129 *op1 = GEN_INT (i - 1);
1130 return code == GEU ? GTU : LEU;
1131 }
1132 break;
1133
1134 default:
1135 abort ();
1136 }
1137
1138 return code;
1139}
1140
1141
2b835d68
RE
1142/* Handle aggregates that are not laid out in a BLKmode element.
1143 This is a sub-element of RETURN_IN_MEMORY. */
1144int
1145arm_return_in_memory (type)
1146 tree type;
1147{
1148 if (TREE_CODE (type) == RECORD_TYPE)
1149 {
1150 tree field;
1151
1152 /* For a struct, we can return in a register if every element was a
1153 bit-field. */
1154 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1155 if (TREE_CODE (field) != FIELD_DECL
1156 || ! DECL_BIT_FIELD_TYPE (field))
1157 return 1;
1158
1159 return 0;
1160 }
1161 else if (TREE_CODE (type) == UNION_TYPE)
1162 {
1163 tree field;
1164
1165 /* Unions can be returned in registers if every element is
1166 integral, or can be returned in an integer register. */
1167 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1168 {
1169 if (TREE_CODE (field) != FIELD_DECL
1170 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1171 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1172 || FLOAT_TYPE_P (TREE_TYPE (field)))
1173 return 1;
1174 }
1175 return 0;
1176 }
1177 /* XXX Not sure what should be done for other aggregates, so put them in
1178 memory. */
1179 return 1;
1180}
1181
32de079a
RE
1182int
1183legitimate_pic_operand_p (x)
1184 rtx x;
1185{
1186 if (CONSTANT_P (x) && flag_pic
1187 && (GET_CODE (x) == SYMBOL_REF
1188 || (GET_CODE (x) == CONST
1189 && GET_CODE (XEXP (x, 0)) == PLUS
1190 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1191 return 0;
1192
1193 return 1;
1194}
1195
1196rtx
1197legitimize_pic_address (orig, mode, reg)
1198 rtx orig;
1199 enum machine_mode mode;
1200 rtx reg;
1201{
1202 if (GET_CODE (orig) == SYMBOL_REF)
1203 {
1204 rtx pic_ref, address;
1205 rtx insn;
1206 int subregs = 0;
1207
1208 if (reg == 0)
1209 {
1210 if (reload_in_progress || reload_completed)
1211 abort ();
1212 else
1213 reg = gen_reg_rtx (Pmode);
1214
1215 subregs = 1;
1216 }
1217
1218#ifdef AOF_ASSEMBLER
1219 /* The AOF assembler can generate relocations for these directly, and
1220 understands that the PIC register has to be added into the offset.
1221 */
1222 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1223#else
1224 if (subregs)
1225 address = gen_reg_rtx (Pmode);
1226 else
1227 address = reg;
1228
1229 emit_insn (gen_pic_load_addr (address, orig));
1230
1231 pic_ref = gen_rtx (MEM, Pmode,
1232 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address));
1233 RTX_UNCHANGING_P (pic_ref) = 1;
1234 insn = emit_move_insn (reg, pic_ref);
1235#endif
1236 current_function_uses_pic_offset_table = 1;
1237 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1238 by loop. */
1239 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig,
1240 REG_NOTES (insn));
1241 return reg;
1242 }
1243 else if (GET_CODE (orig) == CONST)
1244 {
1245 rtx base, offset;
1246
1247 if (GET_CODE (XEXP (orig, 0)) == PLUS
1248 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1249 return orig;
1250
1251 if (reg == 0)
1252 {
1253 if (reload_in_progress || reload_completed)
1254 abort ();
1255 else
1256 reg = gen_reg_rtx (Pmode);
1257 }
1258
1259 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1260 {
1261 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1262 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1263 base == reg ? 0 : reg);
1264 }
1265 else
1266 abort ();
1267
1268 if (GET_CODE (offset) == CONST_INT)
1269 {
1270 /* The base register doesn't really matter, we only want to
1271 test the index for the appropriate mode. */
1272 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1273
1274 if (! reload_in_progress && ! reload_completed)
1275 offset = force_reg (Pmode, offset);
1276 else
1277 abort ();
1278
1279 win:
1280 if (GET_CODE (offset) == CONST_INT)
1281 return plus_constant_for_output (base, INTVAL (offset));
1282 }
1283
1284 if (GET_MODE_SIZE (mode) > 4
1285 && (GET_MODE_CLASS (mode) == MODE_INT
1286 || TARGET_SOFT_FLOAT))
1287 {
1288 emit_insn (gen_addsi3 (reg, base, offset));
1289 return reg;
1290 }
1291
1292 return gen_rtx (PLUS, Pmode, base, offset);
1293 }
1294 else if (GET_CODE (orig) == LABEL_REF)
1295 current_function_uses_pic_offset_table = 1;
1296
1297 return orig;
1298}
1299
1300static rtx pic_rtx;
1301
1302int
1303is_pic(x)
1304 rtx x;
1305{
1306 if (x == pic_rtx)
1307 return 1;
1308 return 0;
1309}
1310
1311void
1312arm_finalize_pic ()
1313{
1314#ifndef AOF_ASSEMBLER
1315 rtx l1, pic_tmp, pic_tmp2, seq;
1316 rtx global_offset_table;
1317
1318 if (current_function_uses_pic_offset_table == 0)
1319 return;
1320
1321 if (! flag_pic)
1322 abort ();
1323
1324 start_sequence ();
1325 l1 = gen_label_rtx ();
1326
1327 global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_");
956d6950
JL
1328 /* The PC contains 'dot'+8, but the label L1 is on the next
1329 instruction, so the offset is only 'dot'+4. */
32de079a
RE
1330 pic_tmp = gen_rtx (CONST, VOIDmode,
1331 gen_rtx (PLUS, Pmode,
1332 gen_rtx (LABEL_REF, VOIDmode, l1),
956d6950 1333 GEN_INT (4)));
32de079a
RE
1334 pic_tmp2 = gen_rtx (CONST, VOIDmode,
1335 gen_rtx (PLUS, Pmode,
1336 global_offset_table,
1337 pc_rtx));
1338
1339 pic_rtx = gen_rtx (CONST, Pmode,
1340 gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp));
1341
1342 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1343 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1344 emit_label (l1);
1345
1346 seq = gen_sequence ();
1347 end_sequence ();
1348 emit_insn_after (seq, get_insns ());
1349
1350 /* Need to emit this whether or not we obey regdecls,
1351 since setjmp/longjmp can cause life info to screw up. */
1352 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
1353#endif /* AOF_ASSEMBLER */
1354}
1355
e2c671ba
RE
1356#define REG_OR_SUBREG_REG(X) \
1357 (GET_CODE (X) == REG \
1358 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1359
1360#define REG_OR_SUBREG_RTX(X) \
1361 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1362
1363#define ARM_FRAME_RTX(X) \
1364 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1365 || (X) == arg_pointer_rtx)
1366
1367int
1368arm_rtx_costs (x, code, outer_code)
1369 rtx x;
1370 enum rtx_code code, outer_code;
1371{
1372 enum machine_mode mode = GET_MODE (x);
1373 enum rtx_code subcode;
1374 int extra_cost;
1375
1376 switch (code)
1377 {
1378 case MEM:
1379 /* Memory costs quite a lot for the first word, but subsequent words
1380 load at the equivalent of a single insn each. */
1381 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1382 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1383
1384 case DIV:
1385 case MOD:
1386 return 100;
1387
1388 case ROTATE:
1389 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1390 return 4;
1391 /* Fall through */
1392 case ROTATERT:
1393 if (mode != SImode)
1394 return 8;
1395 /* Fall through */
1396 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1397 if (mode == DImode)
1398 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1399 + ((GET_CODE (XEXP (x, 0)) == REG
1400 || (GET_CODE (XEXP (x, 0)) == SUBREG
1401 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1402 ? 0 : 8));
1403 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1404 || (GET_CODE (XEXP (x, 0)) == SUBREG
1405 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1406 ? 0 : 4)
1407 + ((GET_CODE (XEXP (x, 1)) == REG
1408 || (GET_CODE (XEXP (x, 1)) == SUBREG
1409 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1410 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1411 ? 0 : 4));
1412
1413 case MINUS:
1414 if (mode == DImode)
1415 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1416 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1417 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1418 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1419 ? 0 : 8));
1420
1421 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1422 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1423 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1424 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1425 ? 0 : 8)
1426 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1427 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1428 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1429 ? 0 : 8));
1430
1431 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1432 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1433 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1434 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1435 || subcode == ASHIFTRT || subcode == LSHIFTRT
1436 || subcode == ROTATE || subcode == ROTATERT
1437 || (subcode == MULT
1438 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1439 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1440 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1441 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1442 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1443 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1444 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1445 return 1;
1446 /* Fall through */
1447
1448 case PLUS:
1449 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1450 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1451 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1452 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1453 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1454 ? 0 : 8));
1455
1456 /* Fall through */
1457 case AND: case XOR: case IOR:
1458 extra_cost = 0;
1459
1460 /* Normally the frame registers will be spilt into reg+const during
1461 reload, so it is a bad idea to combine them with other instructions,
1462 since then they might not be moved outside of loops. As a compromise
1463 we allow integration with ops that have a constant as their second
1464 operand. */
1465 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1466 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1467 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1468 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1469 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1470 extra_cost = 4;
1471
1472 if (mode == DImode)
1473 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1474 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1475 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1476 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1477 ? 0 : 8));
1478
1479 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1480 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1481 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1482 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1483 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1484 ? 0 : 4));
1485
1486 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1487 return (1 + extra_cost
1488 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1489 || subcode == LSHIFTRT || subcode == ASHIFTRT
1490 || subcode == ROTATE || subcode == ROTATERT
1491 || (subcode == MULT
1492 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1493 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1494 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1495 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1496 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1497 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1498 ? 0 : 4));
1499
1500 return 8;
1501
1502 case MULT:
b111229a
RE
1503 /* There is no point basing this on the tuning, since it is always the
1504 fast variant if it exists at all */
2b835d68
RE
1505 if (arm_fast_multiply && mode == DImode
1506 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1507 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1508 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1509 return 8;
1510
e2c671ba
RE
1511 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1512 || mode == DImode)
1513 return 30;
1514
1515 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1516 {
2b835d68
RE
1517 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1518 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1519 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1520 int j;
b111229a
RE
1521 /* Tune as appropriate */
1522 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2b835d68
RE
1523
1524 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1525 {
2b835d68 1526 i >>= booth_unit_size;
e2c671ba
RE
1527 add_cost += 2;
1528 }
1529
1530 return add_cost;
1531 }
1532
b111229a 1533 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1534 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1535 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1536
56636818
JL
1537 case TRUNCATE:
1538 if (arm_fast_multiply && mode == SImode
1539 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1540 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1541 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1542 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1543 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1544 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1545 return 8;
1546 return 99;
1547
e2c671ba
RE
1548 case NEG:
1549 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1550 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1551 /* Fall through */
1552 case NOT:
1553 if (mode == DImode)
1554 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1555
1556 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1557
1558 case IF_THEN_ELSE:
1559 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1560 return 14;
1561 return 2;
1562
1563 case COMPARE:
1564 return 1;
1565
1566 case ABS:
1567 return 4 + (mode == DImode ? 4 : 0);
1568
1569 case SIGN_EXTEND:
1570 if (GET_MODE (XEXP (x, 0)) == QImode)
1571 return (4 + (mode == DImode ? 4 : 0)
1572 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1573 /* Fall through */
1574 case ZERO_EXTEND:
1575 switch (GET_MODE (XEXP (x, 0)))
1576 {
1577 case QImode:
1578 return (1 + (mode == DImode ? 4 : 0)
1579 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1580
1581 case HImode:
1582 return (4 + (mode == DImode ? 4 : 0)
1583 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1584
1585 case SImode:
1586 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1587
1588 default:
1589 break;
e2c671ba
RE
1590 }
1591 abort ();
1592
1593 default:
1594 return 99;
1595 }
1596}
32de079a
RE
1597
1598int
1599arm_adjust_cost (insn, link, dep, cost)
1600 rtx insn;
1601 rtx link;
1602 rtx dep;
1603 int cost;
1604{
1605 rtx i_pat, d_pat;
1606
1607 if ((i_pat = single_set (insn)) != NULL
1608 && GET_CODE (SET_SRC (i_pat)) == MEM
1609 && (d_pat = single_set (dep)) != NULL
1610 && GET_CODE (SET_DEST (d_pat)) == MEM)
1611 {
1612 /* This is a load after a store, there is no conflict if the load reads
1613 from a cached area. Assume that loads from the stack, and from the
1614 constant pool are cached, and that others will miss. This is a
1615 hack. */
1616
1617/* debug_rtx (insn);
1618 debug_rtx (dep);
1619 debug_rtx (link);
1620 fprintf (stderr, "costs %d\n", cost); */
1621
1622 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1623 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1624 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1625 || reg_mentioned_p (hard_frame_pointer_rtx,
1626 XEXP (SET_SRC (i_pat), 0)))
1627 {
1628/* fprintf (stderr, "***** Now 1\n"); */
1629 return 1;
1630 }
1631 }
1632
1633 return cost;
1634}
1635
ff9940b0
RE
1636/* This code has been fixed for cross compilation. */
1637
1638static int fpa_consts_inited = 0;
1639
1640char *strings_fpa[8] = {
2b835d68
RE
1641 "0", "1", "2", "3",
1642 "4", "5", "0.5", "10"
1643};
ff9940b0
RE
1644
1645static REAL_VALUE_TYPE values_fpa[8];
1646
1647static void
1648init_fpa_table ()
1649{
1650 int i;
1651 REAL_VALUE_TYPE r;
1652
1653 for (i = 0; i < 8; i++)
1654 {
1655 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1656 values_fpa[i] = r;
1657 }
f3bb6135 1658
ff9940b0
RE
1659 fpa_consts_inited = 1;
1660}
1661
cce8749e
CH
1662/* Return TRUE if rtx X is a valid immediate FPU constant. */
1663
1664int
1665const_double_rtx_ok_for_fpu (x)
1666 rtx x;
1667{
ff9940b0
RE
1668 REAL_VALUE_TYPE r;
1669 int i;
1670
1671 if (!fpa_consts_inited)
1672 init_fpa_table ();
1673
1674 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1675 if (REAL_VALUE_MINUS_ZERO (r))
1676 return 0;
f3bb6135 1677
ff9940b0
RE
1678 for (i = 0; i < 8; i++)
1679 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1680 return 1;
f3bb6135 1681
ff9940b0 1682 return 0;
f3bb6135 1683}
ff9940b0
RE
1684
1685/* Return TRUE if rtx X is a valid immediate FPU constant. */
1686
1687int
1688neg_const_double_rtx_ok_for_fpu (x)
1689 rtx x;
1690{
1691 REAL_VALUE_TYPE r;
1692 int i;
1693
1694 if (!fpa_consts_inited)
1695 init_fpa_table ();
1696
1697 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1698 r = REAL_VALUE_NEGATE (r);
1699 if (REAL_VALUE_MINUS_ZERO (r))
1700 return 0;
f3bb6135 1701
ff9940b0
RE
1702 for (i = 0; i < 8; i++)
1703 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1704 return 1;
f3bb6135 1705
ff9940b0 1706 return 0;
f3bb6135 1707}
cce8749e
CH
1708\f
1709/* Predicates for `match_operand' and `match_operator'. */
1710
ff9940b0 1711/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
1712 (SUBREG (MEM)...).
1713
1714 This function exists because at the time it was put in it led to better
1715 code. SUBREG(MEM) always needs a reload in the places where
1716 s_register_operand is used, and this seemed to lead to excessive
1717 reloading. */
ff9940b0
RE
1718
1719int
1720s_register_operand (op, mode)
1721 register rtx op;
1722 enum machine_mode mode;
1723{
1724 if (GET_MODE (op) != mode && mode != VOIDmode)
1725 return 0;
1726
1727 if (GET_CODE (op) == SUBREG)
f3bb6135 1728 op = SUBREG_REG (op);
ff9940b0
RE
1729
1730 /* We don't consider registers whose class is NO_REGS
1731 to be a register operand. */
1732 return (GET_CODE (op) == REG
1733 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1734 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1735}
1736
e2c671ba
RE
1737/* Only accept reg, subreg(reg), const_int. */
1738
1739int
1740reg_or_int_operand (op, mode)
1741 register rtx op;
1742 enum machine_mode mode;
1743{
1744 if (GET_CODE (op) == CONST_INT)
1745 return 1;
1746
1747 if (GET_MODE (op) != mode && mode != VOIDmode)
1748 return 0;
1749
1750 if (GET_CODE (op) == SUBREG)
1751 op = SUBREG_REG (op);
1752
1753 /* We don't consider registers whose class is NO_REGS
1754 to be a register operand. */
1755 return (GET_CODE (op) == REG
1756 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1757 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1758}
1759
ff9940b0
RE
1760/* Return 1 if OP is an item in memory, given that we are in reload. */
1761
1762int
1763reload_memory_operand (op, mode)
1764 rtx op;
1765 enum machine_mode mode;
1766{
1767 int regno = true_regnum (op);
1768
1769 return (! CONSTANT_P (op)
1770 && (regno == -1
1771 || (GET_CODE (op) == REG
1772 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1773}
1774
4d818c85
RE
1775/* Return 1 if OP is a valid memory address, but not valid for a signed byte
1776 memory access (architecture V4) */
1777int
1778bad_signed_byte_operand (op, mode)
1779 rtx op;
1780 enum machine_mode mode;
1781{
1782 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
1783 return 0;
1784
1785 op = XEXP (op, 0);
1786
1787 /* A sum of anything more complex than reg + reg or reg + const is bad */
1788 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
1789 && (! s_register_operand (XEXP (op, 0), VOIDmode)
1790 || (! s_register_operand (XEXP (op, 1), VOIDmode)
1791 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
1792 return 1;
1793
1794 /* Big constants are also bad */
1795 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
1796 && (INTVAL (XEXP (op, 1)) > 0xff
1797 || -INTVAL (XEXP (op, 1)) > 0xff))
1798 return 1;
1799
1800 /* Everything else is good, or can will automatically be made so. */
1801 return 0;
1802}
1803
cce8749e
CH
1804/* Return TRUE for valid operands for the rhs of an ARM instruction. */
1805
1806int
1807arm_rhs_operand (op, mode)
1808 rtx op;
1809 enum machine_mode mode;
1810{
ff9940b0 1811 return (s_register_operand (op, mode)
cce8749e 1812 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 1813}
cce8749e 1814
ff9940b0
RE
1815/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1816 */
1817
1818int
1819arm_rhsm_operand (op, mode)
1820 rtx op;
1821 enum machine_mode mode;
1822{
1823 return (s_register_operand (op, mode)
1824 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1825 || memory_operand (op, mode));
f3bb6135 1826}
ff9940b0
RE
1827
1828/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1829 constant that is valid when negated. */
1830
1831int
1832arm_add_operand (op, mode)
1833 rtx op;
1834 enum machine_mode mode;
1835{
1836 return (s_register_operand (op, mode)
1837 || (GET_CODE (op) == CONST_INT
1838 && (const_ok_for_arm (INTVAL (op))
1839 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 1840}
ff9940b0
RE
1841
1842int
1843arm_not_operand (op, mode)
1844 rtx op;
1845 enum machine_mode mode;
1846{
1847 return (s_register_operand (op, mode)
1848 || (GET_CODE (op) == CONST_INT
1849 && (const_ok_for_arm (INTVAL (op))
1850 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 1851}
ff9940b0 1852
5165176d
RE
1853/* Return TRUE if the operand is a memory reference which contains an
1854 offsettable address. */
1855int
1856offsettable_memory_operand (op, mode)
1857 register rtx op;
1858 enum machine_mode mode;
1859{
1860 if (mode == VOIDmode)
1861 mode = GET_MODE (op);
1862
1863 return (mode == GET_MODE (op)
1864 && GET_CODE (op) == MEM
1865 && offsettable_address_p (reload_completed | reload_in_progress,
1866 mode, XEXP (op, 0)));
1867}
1868
1869/* Return TRUE if the operand is a memory reference which is, or can be
1870 made word aligned by adjusting the offset. */
1871int
1872alignable_memory_operand (op, mode)
1873 register rtx op;
1874 enum machine_mode mode;
1875{
1876 rtx reg;
1877
1878 if (mode == VOIDmode)
1879 mode = GET_MODE (op);
1880
1881 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1882 return 0;
1883
1884 op = XEXP (op, 0);
1885
1886 return ((GET_CODE (reg = op) == REG
1887 || (GET_CODE (op) == SUBREG
1888 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1889 || (GET_CODE (op) == PLUS
1890 && GET_CODE (XEXP (op, 1)) == CONST_INT
1891 && (GET_CODE (reg = XEXP (op, 0)) == REG
1892 || (GET_CODE (XEXP (op, 0)) == SUBREG
1893 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1894 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1895}
1896
b111229a
RE
1897/* Similar to s_register_operand, but does not allow hard integer
1898 registers. */
1899int
1900f_register_operand (op, mode)
1901 register rtx op;
1902 enum machine_mode mode;
1903{
1904 if (GET_MODE (op) != mode && mode != VOIDmode)
1905 return 0;
1906
1907 if (GET_CODE (op) == SUBREG)
1908 op = SUBREG_REG (op);
1909
1910 /* We don't consider registers whose class is NO_REGS
1911 to be a register operand. */
1912 return (GET_CODE (op) == REG
1913 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1914 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
1915}
1916
cce8749e
CH
1917/* Return TRUE for valid operands for the rhs of an FPU instruction. */
1918
1919int
1920fpu_rhs_operand (op, mode)
1921 rtx op;
1922 enum machine_mode mode;
1923{
ff9940b0 1924 if (s_register_operand (op, mode))
f3bb6135 1925 return TRUE;
cce8749e
CH
1926 else if (GET_CODE (op) == CONST_DOUBLE)
1927 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
1928
1929 return FALSE;
1930}
cce8749e 1931
ff9940b0
RE
1932int
1933fpu_add_operand (op, mode)
1934 rtx op;
1935 enum machine_mode mode;
1936{
1937 if (s_register_operand (op, mode))
f3bb6135 1938 return TRUE;
ff9940b0 1939 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
1940 return (const_double_rtx_ok_for_fpu (op)
1941 || neg_const_double_rtx_ok_for_fpu (op));
1942
1943 return FALSE;
ff9940b0
RE
1944}
1945
cce8749e
CH
1946/* Return nonzero if OP is a constant power of two. */
1947
1948int
1949power_of_two_operand (op, mode)
1950 rtx op;
1951 enum machine_mode mode;
1952{
1953 if (GET_CODE (op) == CONST_INT)
1954 {
f3bb6135
RE
1955 HOST_WIDE_INT value = INTVAL(op);
1956 return value != 0 && (value & (value - 1)) == 0;
cce8749e 1957 }
f3bb6135
RE
1958 return FALSE;
1959}
cce8749e
CH
1960
1961/* Return TRUE for a valid operand of a DImode operation.
ff9940b0
RE
1962 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1963 Note that this disallows MEM(REG+REG), but allows
1964 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
1965
1966int
1967di_operand (op, mode)
1968 rtx op;
1969 enum machine_mode mode;
1970{
ff9940b0 1971 if (s_register_operand (op, mode))
f3bb6135 1972 return TRUE;
cce8749e
CH
1973
1974 switch (GET_CODE (op))
1975 {
1976 case CONST_DOUBLE:
1977 case CONST_INT:
f3bb6135
RE
1978 return TRUE;
1979
cce8749e 1980 case MEM:
f3bb6135
RE
1981 return memory_address_p (DImode, XEXP (op, 0));
1982
cce8749e 1983 default:
f3bb6135 1984 return FALSE;
cce8749e 1985 }
f3bb6135 1986}
cce8749e 1987
f3139301
DE
1988/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1989 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1990 Note that this disallows MEM(REG+REG), but allows
1991 MEM(PRE/POST_INC/DEC(REG)). */
1992
1993int
1994soft_df_operand (op, mode)
1995 rtx op;
1996 enum machine_mode mode;
1997{
1998 if (s_register_operand (op, mode))
1999 return TRUE;
2000
2001 switch (GET_CODE (op))
2002 {
2003 case CONST_DOUBLE:
2004 return TRUE;
2005
2006 case MEM:
2007 return memory_address_p (DFmode, XEXP (op, 0));
2008
2009 default:
2010 return FALSE;
2011 }
2012}
2013
cce8749e
CH
2014/* Return TRUE for valid index operands. */
2015
2016int
2017index_operand (op, mode)
2018 rtx op;
2019 enum machine_mode mode;
2020{
ff9940b0
RE
2021 return (s_register_operand(op, mode)
2022 || (immediate_operand (op, mode)
2023 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2024}
cce8749e 2025
ff9940b0
RE
2026/* Return TRUE for valid shifts by a constant. This also accepts any
2027 power of two on the (somewhat overly relaxed) assumption that the
2028 shift operator in this case was a mult. */
2029
2030int
2031const_shift_operand (op, mode)
2032 rtx op;
2033 enum machine_mode mode;
2034{
2035 return (power_of_two_operand (op, mode)
2036 || (immediate_operand (op, mode)
2037 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2038}
ff9940b0 2039
cce8749e
CH
2040/* Return TRUE for arithmetic operators which can be combined with a multiply
2041 (shift). */
2042
2043int
2044shiftable_operator (x, mode)
2045 rtx x;
2046 enum machine_mode mode;
2047{
2048 if (GET_MODE (x) != mode)
2049 return FALSE;
2050 else
2051 {
2052 enum rtx_code code = GET_CODE (x);
2053
2054 return (code == PLUS || code == MINUS
2055 || code == IOR || code == XOR || code == AND);
2056 }
f3bb6135 2057}
cce8749e
CH
2058
2059/* Return TRUE for shift operators. */
2060
2061int
2062shift_operator (x, mode)
2063 rtx x;
2064 enum machine_mode mode;
2065{
2066 if (GET_MODE (x) != mode)
2067 return FALSE;
2068 else
2069 {
2070 enum rtx_code code = GET_CODE (x);
2071
ff9940b0
RE
2072 if (code == MULT)
2073 return power_of_two_operand (XEXP (x, 1));
f3bb6135 2074
e2c671ba
RE
2075 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2076 || code == ROTATERT);
cce8749e 2077 }
f3bb6135 2078}
ff9940b0
RE
2079
2080int equality_operator (x, mode)
f3bb6135
RE
2081 rtx x;
2082 enum machine_mode mode;
ff9940b0 2083{
f3bb6135 2084 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2085}
2086
2087/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2088
2089int
2090minmax_operator (x, mode)
2091 rtx x;
2092 enum machine_mode mode;
2093{
2094 enum rtx_code code = GET_CODE (x);
2095
2096 if (GET_MODE (x) != mode)
2097 return FALSE;
f3bb6135 2098
ff9940b0 2099 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2100}
ff9940b0
RE
2101
2102/* return TRUE if x is EQ or NE */
2103
2104/* Return TRUE if this is the condition code register, if we aren't given
2105 a mode, accept any class CCmode register */
2106
2107int
2108cc_register (x, mode)
f3bb6135
RE
2109 rtx x;
2110 enum machine_mode mode;
ff9940b0
RE
2111{
2112 if (mode == VOIDmode)
2113 {
2114 mode = GET_MODE (x);
2115 if (GET_MODE_CLASS (mode) != MODE_CC)
2116 return FALSE;
2117 }
f3bb6135 2118
ff9940b0
RE
2119 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2120 return TRUE;
f3bb6135 2121
ff9940b0
RE
2122 return FALSE;
2123}
5bbe2d40
RE
2124
2125/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2126 a mode, accept any class CCmode register which indicates a dominance
2127 expression. */
5bbe2d40
RE
2128
2129int
84ed5e79 2130dominant_cc_register (x, mode)
5bbe2d40
RE
2131 rtx x;
2132 enum machine_mode mode;
2133{
2134 if (mode == VOIDmode)
2135 {
2136 mode = GET_MODE (x);
84ed5e79 2137 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2138 return FALSE;
2139 }
2140
84ed5e79
RE
2141 if (mode != CC_DNEmode && mode != CC_DEQmode
2142 && mode != CC_DLEmode && mode != CC_DLTmode
2143 && mode != CC_DGEmode && mode != CC_DGTmode
2144 && mode != CC_DLEUmode && mode != CC_DLTUmode
2145 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2146 return FALSE;
2147
5bbe2d40
RE
2148 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2149 return TRUE;
2150
2151 return FALSE;
2152}
2153
2b835d68
RE
2154/* Return TRUE if X references a SYMBOL_REF. */
2155int
2156symbol_mentioned_p (x)
2157 rtx x;
2158{
2159 register char *fmt;
2160 register int i;
2161
2162 if (GET_CODE (x) == SYMBOL_REF)
2163 return 1;
2164
2165 fmt = GET_RTX_FORMAT (GET_CODE (x));
2166 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2167 {
2168 if (fmt[i] == 'E')
2169 {
2170 register int j;
2171
2172 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2173 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2174 return 1;
2175 }
2176 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2177 return 1;
2178 }
2179
2180 return 0;
2181}
2182
2183/* Return TRUE if X references a LABEL_REF. */
2184int
2185label_mentioned_p (x)
2186 rtx x;
2187{
2188 register char *fmt;
2189 register int i;
2190
2191 if (GET_CODE (x) == LABEL_REF)
2192 return 1;
2193
2194 fmt = GET_RTX_FORMAT (GET_CODE (x));
2195 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2196 {
2197 if (fmt[i] == 'E')
2198 {
2199 register int j;
2200
2201 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2202 if (label_mentioned_p (XVECEXP (x, i, j)))
2203 return 1;
2204 }
2205 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2206 return 1;
2207 }
2208
2209 return 0;
2210}
2211
ff9940b0
RE
2212enum rtx_code
2213minmax_code (x)
f3bb6135 2214 rtx x;
ff9940b0
RE
2215{
2216 enum rtx_code code = GET_CODE (x);
2217
2218 if (code == SMAX)
2219 return GE;
f3bb6135 2220 else if (code == SMIN)
ff9940b0 2221 return LE;
f3bb6135 2222 else if (code == UMIN)
ff9940b0 2223 return LEU;
f3bb6135 2224 else if (code == UMAX)
ff9940b0 2225 return GEU;
f3bb6135 2226
ff9940b0
RE
2227 abort ();
2228}
2229
2230/* Return 1 if memory locations are adjacent */
2231
f3bb6135 2232int
ff9940b0
RE
2233adjacent_mem_locations (a, b)
2234 rtx a, b;
2235{
2236 int val0 = 0, val1 = 0;
2237 int reg0, reg1;
2238
2239 if ((GET_CODE (XEXP (a, 0)) == REG
2240 || (GET_CODE (XEXP (a, 0)) == PLUS
2241 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2242 && (GET_CODE (XEXP (b, 0)) == REG
2243 || (GET_CODE (XEXP (b, 0)) == PLUS
2244 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2245 {
2246 if (GET_CODE (XEXP (a, 0)) == PLUS)
2247 {
2248 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2249 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2250 }
2251 else
2252 reg0 = REGNO (XEXP (a, 0));
2253 if (GET_CODE (XEXP (b, 0)) == PLUS)
2254 {
2255 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2256 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2257 }
2258 else
2259 reg1 = REGNO (XEXP (b, 0));
2260 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2261 }
2262 return 0;
2263}
2264
2265/* Return 1 if OP is a load multiple operation. It is known to be
2266 parallel and the first section will be tested. */
2267
f3bb6135 2268int
ff9940b0
RE
2269load_multiple_operation (op, mode)
2270 rtx op;
2271 enum machine_mode mode;
2272{
f3bb6135 2273 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2274 int dest_regno;
2275 rtx src_addr;
f3bb6135 2276 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2277 rtx elt;
2278
2279 if (count <= 1
2280 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2281 return 0;
2282
2283 /* Check to see if this might be a write-back */
2284 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2285 {
2286 i++;
2287 base = 1;
2288
2289 /* Now check it more carefully */
2290 if (GET_CODE (SET_DEST (elt)) != REG
2291 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2292 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2293 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2294 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2295 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2296 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2297 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2298 != REGNO (SET_DEST (elt)))
2299 return 0;
f3bb6135 2300
ff9940b0
RE
2301 count--;
2302 }
2303
2304 /* Perform a quick check so we don't blow up below. */
2305 if (count <= i
2306 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2307 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2308 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2309 return 0;
2310
2311 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2312 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2313
2314 for (; i < count; i++)
2315 {
ed4c4348 2316 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2317
2318 if (GET_CODE (elt) != SET
2319 || GET_CODE (SET_DEST (elt)) != REG
2320 || GET_MODE (SET_DEST (elt)) != SImode
2321 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2322 || GET_CODE (SET_SRC (elt)) != MEM
2323 || GET_MODE (SET_SRC (elt)) != SImode
2324 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2325 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2326 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2327 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2328 return 0;
2329 }
2330
2331 return 1;
2332}
2333
2334/* Return 1 if OP is a store multiple operation. It is known to be
2335 parallel and the first section will be tested. */
2336
f3bb6135 2337int
ff9940b0
RE
2338store_multiple_operation (op, mode)
2339 rtx op;
2340 enum machine_mode mode;
2341{
f3bb6135 2342 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2343 int src_regno;
2344 rtx dest_addr;
f3bb6135 2345 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2346 rtx elt;
2347
2348 if (count <= 1
2349 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2350 return 0;
2351
2352 /* Check to see if this might be a write-back */
2353 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2354 {
2355 i++;
2356 base = 1;
2357
2358 /* Now check it more carefully */
2359 if (GET_CODE (SET_DEST (elt)) != REG
2360 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2361 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2362 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2363 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2364 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2365 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2366 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2367 != REGNO (SET_DEST (elt)))
2368 return 0;
f3bb6135 2369
ff9940b0
RE
2370 count--;
2371 }
2372
2373 /* Perform a quick check so we don't blow up below. */
2374 if (count <= i
2375 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2376 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2377 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2378 return 0;
2379
2380 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2381 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2382
2383 for (; i < count; i++)
2384 {
2385 elt = XVECEXP (op, 0, i);
2386
2387 if (GET_CODE (elt) != SET
2388 || GET_CODE (SET_SRC (elt)) != REG
2389 || GET_MODE (SET_SRC (elt)) != SImode
2390 || REGNO (SET_SRC (elt)) != src_regno + i - base
2391 || GET_CODE (SET_DEST (elt)) != MEM
2392 || GET_MODE (SET_DEST (elt)) != SImode
2393 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2394 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2395 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2396 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2397 return 0;
2398 }
2399
2400 return 1;
2401}
e2c671ba 2402
84ed5e79
RE
2403int
2404load_multiple_sequence (operands, nops, regs, base, load_offset)
2405 rtx *operands;
2406 int nops;
2407 int *regs;
2408 int *base;
2409 HOST_WIDE_INT *load_offset;
2410{
2411 int unsorted_regs[4];
2412 HOST_WIDE_INT unsorted_offsets[4];
2413 int order[4];
ad076f4e 2414 int base_reg = -1;
84ed5e79
RE
2415 int i;
2416
2417 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2418 extended if required. */
2419 if (nops < 2 || nops > 4)
2420 abort ();
2421
2422 /* Loop over the operands and check that the memory references are
2423 suitable (ie immediate offsets from the same base register). At
2424 the same time, extract the target register, and the memory
2425 offsets. */
2426 for (i = 0; i < nops; i++)
2427 {
2428 rtx reg;
2429 rtx offset;
2430
56636818
JL
2431 /* Convert a subreg of a mem into the mem itself. */
2432 if (GET_CODE (operands[nops + i]) == SUBREG)
2433 operands[nops + i] = alter_subreg(operands[nops + i]);
2434
84ed5e79
RE
2435 if (GET_CODE (operands[nops + i]) != MEM)
2436 abort ();
2437
2438 /* Don't reorder volatile memory references; it doesn't seem worth
2439 looking for the case where the order is ok anyway. */
2440 if (MEM_VOLATILE_P (operands[nops + i]))
2441 return 0;
2442
2443 offset = const0_rtx;
2444
2445 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2446 || (GET_CODE (reg) == SUBREG
2447 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2448 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2449 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2450 == REG)
2451 || (GET_CODE (reg) == SUBREG
2452 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2453 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2454 == CONST_INT)))
2455 {
2456 if (i == 0)
2457 {
2458 base_reg = REGNO(reg);
2459 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2460 ? REGNO (operands[i])
2461 : REGNO (SUBREG_REG (operands[i])));
2462 order[0] = 0;
2463 }
2464 else
2465 {
2466 if (base_reg != REGNO (reg))
2467 /* Not addressed from the same base register. */
2468 return 0;
2469
2470 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2471 ? REGNO (operands[i])
2472 : REGNO (SUBREG_REG (operands[i])));
2473 if (unsorted_regs[i] < unsorted_regs[order[0]])
2474 order[0] = i;
2475 }
2476
2477 /* If it isn't an integer register, or if it overwrites the
2478 base register but isn't the last insn in the list, then
2479 we can't do this. */
2480 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2481 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2482 return 0;
2483
2484 unsorted_offsets[i] = INTVAL (offset);
2485 }
2486 else
2487 /* Not a suitable memory address. */
2488 return 0;
2489 }
2490
2491 /* All the useful information has now been extracted from the
2492 operands into unsorted_regs and unsorted_offsets; additionally,
2493 order[0] has been set to the lowest numbered register in the
2494 list. Sort the registers into order, and check that the memory
2495 offsets are ascending and adjacent. */
2496
2497 for (i = 1; i < nops; i++)
2498 {
2499 int j;
2500
2501 order[i] = order[i - 1];
2502 for (j = 0; j < nops; j++)
2503 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2504 && (order[i] == order[i - 1]
2505 || unsorted_regs[j] < unsorted_regs[order[i]]))
2506 order[i] = j;
2507
2508 /* Have we found a suitable register? if not, one must be used more
2509 than once. */
2510 if (order[i] == order[i - 1])
2511 return 0;
2512
2513 /* Is the memory address adjacent and ascending? */
2514 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2515 return 0;
2516 }
2517
2518 if (base)
2519 {
2520 *base = base_reg;
2521
2522 for (i = 0; i < nops; i++)
2523 regs[i] = unsorted_regs[order[i]];
2524
2525 *load_offset = unsorted_offsets[order[0]];
2526 }
2527
2528 if (unsorted_offsets[order[0]] == 0)
2529 return 1; /* ldmia */
2530
2531 if (unsorted_offsets[order[0]] == 4)
2532 return 2; /* ldmib */
2533
2534 if (unsorted_offsets[order[nops - 1]] == 0)
2535 return 3; /* ldmda */
2536
2537 if (unsorted_offsets[order[nops - 1]] == -4)
2538 return 4; /* ldmdb */
2539
2540 /* Can't do it without setting up the offset, only do this if it takes
2541 no more than one insn. */
2542 return (const_ok_for_arm (unsorted_offsets[order[0]])
2543 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2544}
2545
2546char *
2547emit_ldm_seq (operands, nops)
2548 rtx *operands;
2549 int nops;
2550{
2551 int regs[4];
2552 int base_reg;
2553 HOST_WIDE_INT offset;
2554 char buf[100];
2555 int i;
2556
2557 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2558 {
2559 case 1:
2560 strcpy (buf, "ldm%?ia\t");
2561 break;
2562
2563 case 2:
2564 strcpy (buf, "ldm%?ib\t");
2565 break;
2566
2567 case 3:
2568 strcpy (buf, "ldm%?da\t");
2569 break;
2570
2571 case 4:
2572 strcpy (buf, "ldm%?db\t");
2573 break;
2574
2575 case 5:
2576 if (offset >= 0)
2577 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2578 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2579 (long) offset);
2580 else
2581 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2582 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2583 (long) -offset);
2584 output_asm_insn (buf, operands);
2585 base_reg = regs[0];
2586 strcpy (buf, "ldm%?ia\t");
2587 break;
2588
2589 default:
2590 abort ();
2591 }
2592
2593 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2594 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2595
2596 for (i = 1; i < nops; i++)
2597 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2598 reg_names[regs[i]]);
2599
2600 strcat (buf, "}\t%@ phole ldm");
2601
2602 output_asm_insn (buf, operands);
2603 return "";
2604}
2605
2606int
2607store_multiple_sequence (operands, nops, regs, base, load_offset)
2608 rtx *operands;
2609 int nops;
2610 int *regs;
2611 int *base;
2612 HOST_WIDE_INT *load_offset;
2613{
2614 int unsorted_regs[4];
2615 HOST_WIDE_INT unsorted_offsets[4];
2616 int order[4];
ad076f4e 2617 int base_reg = -1;
84ed5e79
RE
2618 int i;
2619
2620 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2621 extended if required. */
2622 if (nops < 2 || nops > 4)
2623 abort ();
2624
2625 /* Loop over the operands and check that the memory references are
2626 suitable (ie immediate offsets from the same base register). At
2627 the same time, extract the target register, and the memory
2628 offsets. */
2629 for (i = 0; i < nops; i++)
2630 {
2631 rtx reg;
2632 rtx offset;
2633
56636818
JL
2634 /* Convert a subreg of a mem into the mem itself. */
2635 if (GET_CODE (operands[nops + i]) == SUBREG)
2636 operands[nops + i] = alter_subreg(operands[nops + i]);
2637
84ed5e79
RE
2638 if (GET_CODE (operands[nops + i]) != MEM)
2639 abort ();
2640
2641 /* Don't reorder volatile memory references; it doesn't seem worth
2642 looking for the case where the order is ok anyway. */
2643 if (MEM_VOLATILE_P (operands[nops + i]))
2644 return 0;
2645
2646 offset = const0_rtx;
2647
2648 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2649 || (GET_CODE (reg) == SUBREG
2650 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2651 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2652 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2653 == REG)
2654 || (GET_CODE (reg) == SUBREG
2655 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2656 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2657 == CONST_INT)))
2658 {
2659 if (i == 0)
2660 {
2661 base_reg = REGNO(reg);
2662 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2663 ? REGNO (operands[i])
2664 : REGNO (SUBREG_REG (operands[i])));
2665 order[0] = 0;
2666 }
2667 else
2668 {
2669 if (base_reg != REGNO (reg))
2670 /* Not addressed from the same base register. */
2671 return 0;
2672
2673 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2674 ? REGNO (operands[i])
2675 : REGNO (SUBREG_REG (operands[i])));
2676 if (unsorted_regs[i] < unsorted_regs[order[0]])
2677 order[0] = i;
2678 }
2679
2680 /* If it isn't an integer register, then we can't do this. */
2681 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2682 return 0;
2683
2684 unsorted_offsets[i] = INTVAL (offset);
2685 }
2686 else
2687 /* Not a suitable memory address. */
2688 return 0;
2689 }
2690
2691 /* All the useful information has now been extracted from the
2692 operands into unsorted_regs and unsorted_offsets; additionally,
2693 order[0] has been set to the lowest numbered register in the
2694 list. Sort the registers into order, and check that the memory
2695 offsets are ascending and adjacent. */
2696
2697 for (i = 1; i < nops; i++)
2698 {
2699 int j;
2700
2701 order[i] = order[i - 1];
2702 for (j = 0; j < nops; j++)
2703 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2704 && (order[i] == order[i - 1]
2705 || unsorted_regs[j] < unsorted_regs[order[i]]))
2706 order[i] = j;
2707
2708 /* Have we found a suitable register? if not, one must be used more
2709 than once. */
2710 if (order[i] == order[i - 1])
2711 return 0;
2712
2713 /* Is the memory address adjacent and ascending? */
2714 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2715 return 0;
2716 }
2717
2718 if (base)
2719 {
2720 *base = base_reg;
2721
2722 for (i = 0; i < nops; i++)
2723 regs[i] = unsorted_regs[order[i]];
2724
2725 *load_offset = unsorted_offsets[order[0]];
2726 }
2727
2728 if (unsorted_offsets[order[0]] == 0)
2729 return 1; /* stmia */
2730
2731 if (unsorted_offsets[order[0]] == 4)
2732 return 2; /* stmib */
2733
2734 if (unsorted_offsets[order[nops - 1]] == 0)
2735 return 3; /* stmda */
2736
2737 if (unsorted_offsets[order[nops - 1]] == -4)
2738 return 4; /* stmdb */
2739
2740 return 0;
2741}
2742
2743char *
2744emit_stm_seq (operands, nops)
2745 rtx *operands;
2746 int nops;
2747{
2748 int regs[4];
2749 int base_reg;
2750 HOST_WIDE_INT offset;
2751 char buf[100];
2752 int i;
2753
2754 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2755 {
2756 case 1:
2757 strcpy (buf, "stm%?ia\t");
2758 break;
2759
2760 case 2:
2761 strcpy (buf, "stm%?ib\t");
2762 break;
2763
2764 case 3:
2765 strcpy (buf, "stm%?da\t");
2766 break;
2767
2768 case 4:
2769 strcpy (buf, "stm%?db\t");
2770 break;
2771
2772 default:
2773 abort ();
2774 }
2775
2776 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2777 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2778
2779 for (i = 1; i < nops; i++)
2780 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2781 reg_names[regs[i]]);
2782
2783 strcat (buf, "}\t%@ phole stm");
2784
2785 output_asm_insn (buf, operands);
2786 return "";
2787}
2788
e2c671ba
RE
2789int
2790multi_register_push (op, mode)
0a81f500
RE
2791 rtx op;
2792 enum machine_mode mode;
e2c671ba
RE
2793{
2794 if (GET_CODE (op) != PARALLEL
2795 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2796 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2797 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2798 return 0;
2799
2800 return 1;
2801}
2802
ff9940b0 2803\f
f3bb6135
RE
2804/* Routines for use with attributes */
2805
31fdb4d5
DE
2806/* Return nonzero if ATTR is a valid attribute for DECL.
2807 ATTRIBUTES are any existing attributes and ARGS are the arguments
2808 supplied with ATTR.
2809
2810 Supported attributes:
2811
2812 naked: don't output any prologue or epilogue code, the user is assumed
2813 to do the right thing. */
2814
2815int
2816arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2817 tree decl;
2818 tree attributes;
2819 tree attr;
2820 tree args;
2821{
2822 if (args != NULL_TREE)
2823 return 0;
2824
2825 if (is_attribute_p ("naked", attr))
2826 return TREE_CODE (decl) == FUNCTION_DECL;
2827 return 0;
2828}
2829
2830/* Return non-zero if FUNC is a naked function. */
2831
2832static int
2833arm_naked_function_p (func)
2834 tree func;
2835{
2836 tree a;
2837
2838 if (TREE_CODE (func) != FUNCTION_DECL)
2839 abort ();
2840
2841 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2842 return a != NULL_TREE;
2843}
f3bb6135 2844\f
ff9940b0
RE
2845/* Routines for use in generating RTL */
2846
f3bb6135 2847rtx
56636818
JL
2848arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
2849 in_struct_p)
ff9940b0
RE
2850 int base_regno;
2851 int count;
2852 rtx from;
2853 int up;
2854 int write_back;
56636818
JL
2855 int unchanging_p;
2856 int in_struct_p;
ff9940b0
RE
2857{
2858 int i = 0, j;
2859 rtx result;
2860 int sign = up ? 1 : -1;
56636818 2861 rtx mem;
ff9940b0
RE
2862
2863 result = gen_rtx (PARALLEL, VOIDmode,
2864 rtvec_alloc (count + (write_back ? 2 : 0)));
2865 if (write_back)
f3bb6135 2866 {
ff9940b0 2867 XVECEXP (result, 0, 0)
f3bb6135
RE
2868 = gen_rtx (SET, GET_MODE (from), from,
2869 plus_constant (from, count * 4 * sign));
ff9940b0
RE
2870 i = 1;
2871 count++;
f3bb6135
RE
2872 }
2873
ff9940b0 2874 for (j = 0; i < count; i++, j++)
f3bb6135 2875 {
56636818
JL
2876 mem = gen_rtx (MEM, SImode, plus_constant (from, j * 4 * sign));
2877 RTX_UNCHANGING_P (mem) = unchanging_p;
2878 MEM_IN_STRUCT_P (mem) = in_struct_p;
2879
2880 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode,
2881 gen_rtx (REG, SImode, base_regno + j),
2882 mem);
f3bb6135
RE
2883 }
2884
ff9940b0
RE
2885 if (write_back)
2886 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2887
2888 return result;
2889}
2890
f3bb6135 2891rtx
56636818
JL
2892arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
2893 in_struct_p)
ff9940b0
RE
2894 int base_regno;
2895 int count;
2896 rtx to;
2897 int up;
2898 int write_back;
56636818
JL
2899 int unchanging_p;
2900 int in_struct_p;
ff9940b0
RE
2901{
2902 int i = 0, j;
2903 rtx result;
2904 int sign = up ? 1 : -1;
56636818 2905 rtx mem;
ff9940b0
RE
2906
2907 result = gen_rtx (PARALLEL, VOIDmode,
2908 rtvec_alloc (count + (write_back ? 2 : 0)));
2909 if (write_back)
f3bb6135 2910 {
ff9940b0 2911 XVECEXP (result, 0, 0)
f3bb6135
RE
2912 = gen_rtx (SET, GET_MODE (to), to,
2913 plus_constant (to, count * 4 * sign));
ff9940b0
RE
2914 i = 1;
2915 count++;
f3bb6135
RE
2916 }
2917
ff9940b0 2918 for (j = 0; i < count; i++, j++)
f3bb6135 2919 {
56636818
JL
2920 mem = gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign));
2921 RTX_UNCHANGING_P (mem) = unchanging_p;
2922 MEM_IN_STRUCT_P (mem) = in_struct_p;
2923
2924 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode, mem,
2925 gen_rtx (REG, SImode, base_regno + j));
f3bb6135
RE
2926 }
2927
ff9940b0
RE
2928 if (write_back)
2929 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2930
2931 return result;
2932}
2933
880e2516
RE
2934int
2935arm_gen_movstrqi (operands)
2936 rtx *operands;
2937{
2938 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 2939 int i;
880e2516 2940 rtx src, dst;
ad076f4e 2941 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 2942 rtx part_bytes_reg = NULL;
56636818
JL
2943 rtx mem;
2944 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
880e2516
RE
2945
2946 if (GET_CODE (operands[2]) != CONST_INT
2947 || GET_CODE (operands[3]) != CONST_INT
2948 || INTVAL (operands[2]) > 64
2949 || INTVAL (operands[3]) & 3)
2950 return 0;
2951
2952 st_dst = XEXP (operands[0], 0);
2953 st_src = XEXP (operands[1], 0);
56636818
JL
2954
2955 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
2956 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2957 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
2958 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2959
880e2516
RE
2960 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2961 fin_src = src = copy_to_mode_reg (SImode, st_src);
2962
2963 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2964 out_words_to_go = INTVAL (operands[2]) / 4;
2965 last_bytes = INTVAL (operands[2]) & 3;
2966
2967 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2968 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2969
2970 for (i = 0; in_words_to_go >= 2; i+=4)
2971 {
bd9c7e23 2972 if (in_words_to_go > 4)
56636818
JL
2973 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
2974 src_unchanging_p, src_in_struct_p));
bd9c7e23
RE
2975 else
2976 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818
JL
2977 FALSE, src_unchanging_p,
2978 src_in_struct_p));
bd9c7e23 2979
880e2516
RE
2980 if (out_words_to_go)
2981 {
bd9c7e23 2982 if (out_words_to_go > 4)
56636818
JL
2983 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
2984 dst_unchanging_p,
2985 dst_in_struct_p));
bd9c7e23
RE
2986 else if (out_words_to_go != 1)
2987 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2988 dst, TRUE,
2989 (last_bytes == 0
56636818
JL
2990 ? FALSE : TRUE),
2991 dst_unchanging_p,
2992 dst_in_struct_p));
880e2516
RE
2993 else
2994 {
56636818
JL
2995 mem = gen_rtx (MEM, SImode, dst);
2996 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2997 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2998 emit_move_insn (mem, gen_rtx (REG, SImode, 0));
bd9c7e23
RE
2999 if (last_bytes != 0)
3000 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3001 }
3002 }
3003
3004 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3005 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3006 }
3007
3008 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3009 if (out_words_to_go)
3010 {
3011 rtx sreg;
3012
56636818
JL
3013 mem = gen_rtx (MEM, SImode, src);
3014 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3015 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3016 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
880e2516 3017 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
56636818
JL
3018
3019 mem = gen_rtx (MEM, SImode, dst);
3020 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3021 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3022 emit_move_insn (mem, sreg);
880e2516
RE
3023 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3024 in_words_to_go--;
3025
3026 if (in_words_to_go) /* Sanity check */
3027 abort ();
3028 }
3029
3030 if (in_words_to_go)
3031 {
3032 if (in_words_to_go < 0)
3033 abort ();
3034
56636818
JL
3035 mem = gen_rtx (MEM, SImode, src);
3036 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3037 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3038 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3039 }
3040
3041 if (BYTES_BIG_ENDIAN && last_bytes)
3042 {
3043 rtx tmp = gen_reg_rtx (SImode);
3044
3045 if (part_bytes_reg == NULL)
3046 abort ();
3047
3048 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3049 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3050 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3051 part_bytes_reg = tmp;
3052
3053 while (last_bytes)
3054 {
56636818
JL
3055 mem = gen_rtx (MEM, QImode, plus_constant (dst, last_bytes - 1));
3056 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3057 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3058 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3059 if (--last_bytes)
3060 {
3061 tmp = gen_reg_rtx (SImode);
3062 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3063 part_bytes_reg = tmp;
3064 }
3065 }
3066
3067 }
3068 else
3069 {
3070 while (last_bytes)
3071 {
3072 if (part_bytes_reg == NULL)
3073 abort ();
3074
56636818
JL
3075 mem = gen_rtx (MEM, QImode, dst);
3076 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3077 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3078 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3079 if (--last_bytes)
3080 {
3081 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3082
3083 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3084 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3085 part_bytes_reg = tmp;
3086 }
3087 }
3088 }
3089
3090 return 1;
3091}
3092
5165176d
RE
3093/* Generate a memory reference for a half word, such that it will be loaded
3094 into the top 16 bits of the word. We can assume that the address is
3095 known to be alignable and of the form reg, or plus (reg, const). */
3096rtx
3097gen_rotated_half_load (memref)
3098 rtx memref;
3099{
3100 HOST_WIDE_INT offset = 0;
3101 rtx base = XEXP (memref, 0);
3102
3103 if (GET_CODE (base) == PLUS)
3104 {
3105 offset = INTVAL (XEXP (base, 1));
3106 base = XEXP (base, 0);
3107 }
3108
956d6950 3109 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3110 if (TARGET_SHORT_BY_BYTES
3111 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3112 return NULL;
3113
3114 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
3115
3116 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3117 return base;
3118
3119 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
3120}
3121
84ed5e79
RE
3122static enum machine_mode
3123select_dominance_cc_mode (op, x, y, cond_or)
3124 enum rtx_code op;
3125 rtx x;
3126 rtx y;
3127 HOST_WIDE_INT cond_or;
3128{
3129 enum rtx_code cond1, cond2;
3130 int swapped = 0;
3131
3132 /* Currently we will probably get the wrong result if the individual
3133 comparisons are not simple. This also ensures that it is safe to
956d6950 3134 reverse a comparison if necessary. */
84ed5e79
RE
3135 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3136 != CCmode)
3137 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3138 != CCmode))
3139 return CCmode;
3140
3141 if (cond_or)
3142 cond1 = reverse_condition (cond1);
3143
3144 /* If the comparisons are not equal, and one doesn't dominate the other,
3145 then we can't do this. */
3146 if (cond1 != cond2
3147 && ! comparison_dominates_p (cond1, cond2)
3148 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3149 return CCmode;
3150
3151 if (swapped)
3152 {
3153 enum rtx_code temp = cond1;
3154 cond1 = cond2;
3155 cond2 = temp;
3156 }
3157
3158 switch (cond1)
3159 {
3160 case EQ:
3161 if (cond2 == EQ || ! cond_or)
3162 return CC_DEQmode;
3163
3164 switch (cond2)
3165 {
3166 case LE: return CC_DLEmode;
3167 case LEU: return CC_DLEUmode;
3168 case GE: return CC_DGEmode;
3169 case GEU: return CC_DGEUmode;
ad076f4e 3170 default: break;
84ed5e79
RE
3171 }
3172
3173 break;
3174
3175 case LT:
3176 if (cond2 == LT || ! cond_or)
3177 return CC_DLTmode;
3178 if (cond2 == LE)
3179 return CC_DLEmode;
3180 if (cond2 == NE)
3181 return CC_DNEmode;
3182 break;
3183
3184 case GT:
3185 if (cond2 == GT || ! cond_or)
3186 return CC_DGTmode;
3187 if (cond2 == GE)
3188 return CC_DGEmode;
3189 if (cond2 == NE)
3190 return CC_DNEmode;
3191 break;
3192
3193 case LTU:
3194 if (cond2 == LTU || ! cond_or)
3195 return CC_DLTUmode;
3196 if (cond2 == LEU)
3197 return CC_DLEUmode;
3198 if (cond2 == NE)
3199 return CC_DNEmode;
3200 break;
3201
3202 case GTU:
3203 if (cond2 == GTU || ! cond_or)
3204 return CC_DGTUmode;
3205 if (cond2 == GEU)
3206 return CC_DGEUmode;
3207 if (cond2 == NE)
3208 return CC_DNEmode;
3209 break;
3210
3211 /* The remaining cases only occur when both comparisons are the
3212 same. */
3213 case NE:
3214 return CC_DNEmode;
3215
3216 case LE:
3217 return CC_DLEmode;
3218
3219 case GE:
3220 return CC_DGEmode;
3221
3222 case LEU:
3223 return CC_DLEUmode;
3224
3225 case GEU:
3226 return CC_DGEUmode;
ad076f4e
RE
3227
3228 default:
3229 break;
84ed5e79
RE
3230 }
3231
3232 abort ();
3233}
3234
3235enum machine_mode
3236arm_select_cc_mode (op, x, y)
3237 enum rtx_code op;
3238 rtx x;
3239 rtx y;
3240{
3241 /* All floating point compares return CCFP if it is an equality
3242 comparison, and CCFPE otherwise. */
3243 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3244 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3245
3246 /* A compare with a shifted operand. Because of canonicalization, the
3247 comparison will have to be swapped when we emit the assembler. */
3248 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3249 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3250 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3251 || GET_CODE (x) == ROTATERT))
3252 return CC_SWPmode;
3253
956d6950
JL
3254 /* This is a special case that is used by combine to allow a
3255 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3256 followed by a comparison of the shifted integer (only valid for
956d6950 3257 equalities and unsigned inequalities). */
84ed5e79
RE
3258 if (GET_MODE (x) == SImode
3259 && GET_CODE (x) == ASHIFT
3260 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3261 && GET_CODE (XEXP (x, 0)) == SUBREG
3262 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3263 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3264 && (op == EQ || op == NE
3265 || op == GEU || op == GTU || op == LTU || op == LEU)
3266 && GET_CODE (y) == CONST_INT)
3267 return CC_Zmode;
3268
3269 /* An operation that sets the condition codes as a side-effect, the
3270 V flag is not set correctly, so we can only use comparisons where
3271 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3272 instead. */
3273 if (GET_MODE (x) == SImode
3274 && y == const0_rtx
3275 && (op == EQ || op == NE || op == LT || op == GE)
3276 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3277 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3278 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3279 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3280 || GET_CODE (x) == LSHIFTRT
3281 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3282 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3283 return CC_NOOVmode;
3284
3285 /* A construct for a conditional compare, if the false arm contains
3286 0, then both conditions must be true, otherwise either condition
3287 must be true. Not all conditions are possible, so CCmode is
3288 returned if it can't be done. */
3289 if (GET_CODE (x) == IF_THEN_ELSE
3290 && (XEXP (x, 2) == const0_rtx
3291 || XEXP (x, 2) == const1_rtx)
3292 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3293 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3294 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3295 INTVAL (XEXP (x, 2)));
3296
3297 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3298 return CC_Zmode;
3299
bd9c7e23
RE
3300 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3301 && GET_CODE (x) == PLUS
3302 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3303 return CC_Cmode;
3304
84ed5e79
RE
3305 return CCmode;
3306}
3307
ff9940b0
RE
3308/* X and Y are two things to compare using CODE. Emit the compare insn and
3309 return the rtx for register 0 in the proper mode. FP means this is a
3310 floating point compare: I don't think that it is needed on the arm. */
3311
3312rtx
3313gen_compare_reg (code, x, y, fp)
3314 enum rtx_code code;
3315 rtx x, y;
ed4c4348 3316 int fp;
ff9940b0
RE
3317{
3318 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3319 rtx cc_reg = gen_rtx (REG, mode, 24);
3320
3321 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
3322 gen_rtx (COMPARE, mode, x, y)));
3323
3324 return cc_reg;
3325}
3326
0a81f500
RE
3327void
3328arm_reload_in_hi (operands)
3329 rtx *operands;
3330{
3331 rtx base = find_replacement (&XEXP (operands[1], 0));
3332
3333 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
e5e809f4
JL
3334 /* Handle the case where the address is too complex to be offset by 1. */
3335 if (GET_CODE (base) == MINUS
3336 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3337 {
3338 rtx base_plus = gen_rtx (REG, SImode, REGNO (operands[0]));
3339
3340 emit_insn (gen_rtx (SET, VOIDmode, base_plus, base));
3341 base = base_plus;
3342 }
3343
0a81f500
RE
3344 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
3345 gen_rtx (MEM, QImode,
3346 plus_constant (base, 1))));
3347 if (BYTES_BIG_ENDIAN)
3348 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3349 operands[0], 0),
3350 gen_rtx (IOR, SImode,
3351 gen_rtx (ASHIFT, SImode,
3352 gen_rtx (SUBREG, SImode,
3353 operands[0], 0),
3354 GEN_INT (8)),
3355 operands[2])));
3356 else
3357 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3358 operands[0], 0),
3359 gen_rtx (IOR, SImode,
3360 gen_rtx (ASHIFT, SImode,
3361 operands[2],
3362 GEN_INT (8)),
3363 gen_rtx (SUBREG, SImode, operands[0], 0))));
3364}
3365
f3bb6135 3366void
af48348a 3367arm_reload_out_hi (operands)
f3bb6135 3368 rtx *operands;
af48348a
RK
3369{
3370 rtx base = find_replacement (&XEXP (operands[0], 0));
3371
b5cc037f
RE
3372 if (BYTES_BIG_ENDIAN)
3373 {
3374 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3375 gen_rtx (SUBREG, QImode, operands[1], 0)));
3376 emit_insn (gen_lshrsi3 (operands[2],
3377 gen_rtx (SUBREG, SImode, operands[1], 0),
3378 GEN_INT (8)));
3379 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3380 gen_rtx (SUBREG, QImode, operands[2], 0)));
3381 }
3382 else
3383 {
3384 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3385 gen_rtx (SUBREG, QImode, operands[1], 0)));
3386 emit_insn (gen_lshrsi3 (operands[2],
3387 gen_rtx (SUBREG, SImode, operands[1], 0),
3388 GEN_INT (8)));
3389 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3390 gen_rtx (SUBREG, QImode, operands[2], 0)));
3391 }
af48348a 3392}
2b835d68
RE
3393\f
3394/* Routines for manipulation of the constant pool. */
3395/* This is unashamedly hacked from the version in sh.c, since the problem is
3396 extremely similar. */
3397
3398/* Arm instructions cannot load a large constant into a register,
3399 constants have to come from a pc relative load. The reference of a pc
3400 relative load instruction must be less than 1k infront of the instruction.
3401 This means that we often have to dump a constant inside a function, and
3402 generate code to branch around it.
3403
3404 It is important to minimize this, since the branches will slow things
3405 down and make things bigger.
3406
3407 Worst case code looks like:
3408
3409 ldr rn, L1
3410 b L2
3411 align
3412 L1: .long value
3413 L2:
3414 ..
3415
3416 ldr rn, L3
3417 b L4
3418 align
3419 L3: .long value
3420 L4:
3421 ..
3422
3423 We fix this by performing a scan before scheduling, which notices which
3424 instructions need to have their operands fetched from the constant table
3425 and builds the table.
3426
3427
3428 The algorithm is:
3429
3430 scan, find an instruction which needs a pcrel move. Look forward, find th
3431 last barrier which is within MAX_COUNT bytes of the requirement.
3432 If there isn't one, make one. Process all the instructions between
3433 the find and the barrier.
3434
3435 In the above example, we can tell that L3 is within 1k of L1, so
3436 the first move can be shrunk from the 2 insn+constant sequence into
3437 just 1 insn, and the constant moved to L3 to make:
3438
3439 ldr rn, L1
3440 ..
3441 ldr rn, L3
3442 b L4
3443 align
3444 L1: .long value
3445 L3: .long value
3446 L4:
3447
3448 Then the second move becomes the target for the shortening process.
3449
3450 */
3451
3452typedef struct
3453{
3454 rtx value; /* Value in table */
3455 HOST_WIDE_INT next_offset;
3456 enum machine_mode mode; /* Mode of value */
3457} pool_node;
3458
3459/* The maximum number of constants that can fit into one pool, since
3460 the pc relative range is 0...1020 bytes and constants are at least 4
3461 bytes long */
3462
3463#define MAX_POOL_SIZE (1020/4)
3464static pool_node pool_vector[MAX_POOL_SIZE];
3465static int pool_size;
3466static rtx pool_vector_label;
3467
332072db
RE
3468/* Add a constant to the pool and return its offset within the current
3469 pool.
3470
3471 X is the rtx we want to replace. MODE is its mode. On return,
3472 ADDRESS_ONLY will be non-zero if we really want the address of such
3473 a constant, not the constant itself. */
2b835d68 3474static HOST_WIDE_INT
332072db 3475add_constant (x, mode, address_only)
2b835d68
RE
3476 rtx x;
3477 enum machine_mode mode;
da6558fd 3478 int * address_only;
2b835d68
RE
3479{
3480 int i;
2b835d68
RE
3481 HOST_WIDE_INT offset;
3482
da6558fd
NC
3483 * address_only = 0;
3484
2b835d68
RE
3485 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3486 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3487 x = get_pool_constant (XEXP (x, 0));
332072db
RE
3488 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P(x))
3489 {
3490 *address_only = 1;
3491 x = get_pool_constant (x);
3492 }
2b835d68
RE
3493#ifndef AOF_ASSEMBLER
3494 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3495 x = XVECEXP (x, 0, 0);
3496#endif
3497
32de079a
RE
3498#ifdef AOF_ASSEMBLER
3499 /* PIC Symbol references need to be converted into offsets into the
3500 based area. */
3501 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3502 x = aof_pic_entry (x);
3503#endif /* AOF_ASSEMBLER */
3504
2b835d68
RE
3505 /* First see if we've already got it */
3506 for (i = 0; i < pool_size; i++)
3507 {
3508 if (GET_CODE (x) == pool_vector[i].value->code
3509 && mode == pool_vector[i].mode)
3510 {
3511 if (GET_CODE (x) == CODE_LABEL)
3512 {
3513 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3514 continue;
3515 }
3516 if (rtx_equal_p (x, pool_vector[i].value))
3517 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3518 }
3519 }
3520
3521 /* Need a new one */
3522 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3523 offset = 0;
3524 if (pool_size == 0)
3525 pool_vector_label = gen_label_rtx ();
3526 else
3527 pool_vector[pool_size].next_offset
3528 += (offset = pool_vector[pool_size - 1].next_offset);
3529
3530 pool_vector[pool_size].value = x;
3531 pool_vector[pool_size].mode = mode;
3532 pool_size++;
3533 return offset;
3534}
3535
3536/* Output the literal table */
3537static void
3538dump_table (scan)
3539 rtx scan;
3540{
3541 int i;
3542
3543 scan = emit_label_after (gen_label_rtx (), scan);
3544 scan = emit_insn_after (gen_align_4 (), scan);
3545 scan = emit_label_after (pool_vector_label, scan);
3546
3547 for (i = 0; i < pool_size; i++)
3548 {
3549 pool_node *p = pool_vector + i;
3550
3551 switch (GET_MODE_SIZE (p->mode))
3552 {
3553 case 4:
3554 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3555 break;
3556
3557 case 8:
3558 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3559 break;
3560
3561 default:
3562 abort ();
3563 break;
3564 }
3565 }
3566
3567 scan = emit_insn_after (gen_consttable_end (), scan);
3568 scan = emit_barrier_after (scan);
3569 pool_size = 0;
3570}
3571
3572/* Non zero if the src operand needs to be fixed up */
3573static int
3574fixit (src, mode, destreg)
3575 rtx src;
3576 enum machine_mode mode;
3577 int destreg;
3578{
3579 if (CONSTANT_P (src))
3580 {
3581 if (GET_CODE (src) == CONST_INT)
3582 return (! const_ok_for_arm (INTVAL (src))
3583 && ! const_ok_for_arm (~INTVAL (src)));
3584 if (GET_CODE (src) == CONST_DOUBLE)
3585 return (GET_MODE (src) == VOIDmode
3586 || destreg < 16
3587 || (! const_double_rtx_ok_for_fpu (src)
3588 && ! neg_const_double_rtx_ok_for_fpu (src)));
3589 return symbol_mentioned_p (src);
3590 }
3591#ifndef AOF_ASSEMBLER
3592 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3593 return 1;
3594#endif
3595 else
3596 return (mode == SImode && GET_CODE (src) == MEM
3597 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3598 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3599}
3600
3601/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3602static rtx
3603find_barrier (from, max_count)
3604 rtx from;
3605 int max_count;
3606{
3607 int count = 0;
3608 rtx found_barrier = 0;
e5e809f4 3609 rtx last = from;
2b835d68
RE
3610
3611 while (from && count < max_count)
3612 {
7551cbc7 3613 rtx tmp;
da6558fd 3614
2b835d68 3615 if (GET_CODE (from) == BARRIER)
7551cbc7 3616 found_barrier = from;
2b835d68
RE
3617
3618 /* Count the length of this insn */
3619 if (GET_CODE (from) == INSN
3620 && GET_CODE (PATTERN (from)) == SET
3621 && CONSTANT_P (SET_SRC (PATTERN (from)))
3622 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
d499463f 3623 count += 8;
7551cbc7
RE
3624 /* Handle table jumps as a single entity. */
3625 else if (GET_CODE (from) == JUMP_INSN
3626 && JUMP_LABEL (from) != 0
3627 && ((tmp = next_real_insn (JUMP_LABEL (from)))
3628 == next_real_insn (from))
3629 && tmp != NULL
3630 && GET_CODE (tmp) == JUMP_INSN
3631 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
3632 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
3633 {
3634 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
3635 count += (get_attr_length (from)
3636 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
3637 /* Continue after the dispatch table. */
3638 last = from;
3639 from = NEXT_INSN (tmp);
3640 continue;
3641 }
2b835d68
RE
3642 else
3643 count += get_attr_length (from);
3644
e5e809f4 3645 last = from;
2b835d68
RE
3646 from = NEXT_INSN (from);
3647 }
3648
da6558fd 3649 if (! found_barrier)
2b835d68
RE
3650 {
3651 /* We didn't find a barrier in time to
da6558fd 3652 dump our stuff, so we'll make one. */
2b835d68 3653 rtx label = gen_label_rtx ();
da6558fd 3654
2b835d68 3655 if (from)
e5e809f4 3656 from = PREV_INSN (last);
2b835d68
RE
3657 else
3658 from = get_last_insn ();
da6558fd
NC
3659
3660 /* Walk back to be just before any jump. */
2b835d68 3661 while (GET_CODE (from) == JUMP_INSN
25b1c156 3662 || GET_CODE (from) == NOTE
2b835d68
RE
3663 || GET_CODE (from) == CODE_LABEL)
3664 from = PREV_INSN (from);
da6558fd 3665
2b835d68
RE
3666 from = emit_jump_insn_after (gen_jump (label), from);
3667 JUMP_LABEL (from) = label;
3668 found_barrier = emit_barrier_after (from);
3669 emit_label_after (label, found_barrier);
2b835d68
RE
3670 }
3671
3672 return found_barrier;
3673}
3674
3675/* Non zero if the insn is a move instruction which needs to be fixed. */
3676static int
3677broken_move (insn)
3678 rtx insn;
3679{
3680 if (!INSN_DELETED_P (insn)
3681 && GET_CODE (insn) == INSN
3682 && GET_CODE (PATTERN (insn)) == SET)
3683 {
3684 rtx pat = PATTERN (insn);
3685 rtx src = SET_SRC (pat);
3686 rtx dst = SET_DEST (pat);
3687 int destreg;
3688 enum machine_mode mode = GET_MODE (dst);
ad076f4e 3689
2b835d68
RE
3690 if (dst == pc_rtx)
3691 return 0;
3692
3693 if (GET_CODE (dst) == REG)
3694 destreg = REGNO (dst);
3695 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3696 destreg = REGNO (SUBREG_REG (dst));
ad076f4e
RE
3697 else
3698 return 0;
2b835d68
RE
3699
3700 return fixit (src, mode, destreg);
3701 }
3702 return 0;
3703}
3704
3705void
3706arm_reorg (first)
3707 rtx first;
3708{
3709 rtx insn;
3710 int count_size;
2b835d68
RE
3711
3712#if 0
3713 /* The ldr instruction can work with up to a 4k offset, and most constants
3714 will be loaded with one of these instructions; however, the adr
3715 instruction and the ldf instructions only work with a 1k offset. This
3716 code needs to be rewritten to use the 4k offset when possible, and to
3717 adjust when a 1k offset is needed. For now we just use a 1k offset
3718 from the start. */
3719 count_size = 4000;
3720
3721 /* Floating point operands can't work further than 1024 bytes from the
3722 PC, so to make things simple we restrict all loads for such functions.
3723 */
3724 if (TARGET_HARD_FLOAT)
ad076f4e
RE
3725 {
3726 int regno;
3727
3728 for (regno = 16; regno < 24; regno++)
3729 if (regs_ever_live[regno])
3730 {
3731 count_size = 1000;
3732 break;
3733 }
3734 }
2b835d68
RE
3735#else
3736 count_size = 1000;
3737#endif /* 0 */
3738
3739 for (insn = first; insn; insn = NEXT_INSN (insn))
3740 {
3741 if (broken_move (insn))
3742 {
3743 /* This is a broken move instruction, scan ahead looking for
3744 a barrier to stick the constant table behind */
3745 rtx scan;
3746 rtx barrier = find_barrier (insn, count_size);
3747
3748 /* Now find all the moves between the points and modify them */
3749 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3750 {
3751 if (broken_move (scan))
3752 {
3753 /* This is a broken move instruction, add it to the pool */
3754 rtx pat = PATTERN (scan);
3755 rtx src = SET_SRC (pat);
3756 rtx dst = SET_DEST (pat);
3757 enum machine_mode mode = GET_MODE (dst);
3758 HOST_WIDE_INT offset;
3759 rtx newinsn = scan;
3760 rtx newsrc;
3761 rtx addr;
3762 int scratch;
332072db 3763 int address_only;
2b835d68
RE
3764
3765 /* If this is an HImode constant load, convert it into
3766 an SImode constant load. Since the register is always
3767 32 bits this is safe. We have to do this, since the
3768 load pc-relative instruction only does a 32-bit load. */
3769 if (mode == HImode)
3770 {
3771 mode = SImode;
3772 if (GET_CODE (dst) != REG)
3773 abort ();
3774 PUT_MODE (dst, SImode);
3775 }
3776
332072db 3777 offset = add_constant (src, mode, &address_only);
2b835d68
RE
3778 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3779 pool_vector_label),
3780 offset);
3781
332072db
RE
3782 /* If we only want the address of the pool entry, or
3783 for wide moves to integer regs we need to split
3784 the address calculation off into a separate insn.
3785 If necessary, the load can then be done with a
3786 load-multiple. This is safe, since we have
3787 already noted the length of such insns to be 8,
3788 and we are immediately over-writing the scratch
3789 we have grabbed with the final result. */
3790 if ((address_only || GET_MODE_SIZE (mode) > 4)
2b835d68
RE
3791 && (scratch = REGNO (dst)) < 16)
3792 {
332072db
RE
3793 rtx reg;
3794
3795 if (mode == SImode)
3796 reg = dst;
3797 else
3798 reg = gen_rtx (REG, SImode, scratch);
3799
2b835d68
RE
3800 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3801 newinsn);
3802 addr = reg;
3803 }
3804
332072db
RE
3805 if (! address_only)
3806 {
3807 newsrc = gen_rtx (MEM, mode, addr);
3808
3809 /* XXX Fixme -- I think the following is bogus. */
3810 /* Build a jump insn wrapper around the move instead
3811 of an ordinary insn, because we want to have room for
3812 the target label rtx in fld[7], which an ordinary
3813 insn doesn't have. */
3814 newinsn = emit_jump_insn_after
3815 (gen_rtx (SET, VOIDmode, dst, newsrc), newinsn);
3816 JUMP_LABEL (newinsn) = pool_vector_label;
3817
3818 /* But it's still an ordinary insn */
3819 PUT_CODE (newinsn, INSN);
3820 }
2b835d68
RE
3821
3822 /* Kill old insn */
3823 delete_insn (scan);
3824 scan = newinsn;
3825 }
3826 }
3827 dump_table (barrier);
3828 insn = scan;
3829 }
3830 }
3831}
3832
cce8749e
CH
3833\f
3834/* Routines to output assembly language. */
3835
f3bb6135 3836/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
3837 In this way we can ensure that valid double constants are generated even
3838 when cross compiling. */
3839char *
3840fp_immediate_constant (x)
b5cc037f 3841 rtx x;
ff9940b0
RE
3842{
3843 REAL_VALUE_TYPE r;
3844 int i;
3845
3846 if (!fpa_consts_inited)
3847 init_fpa_table ();
3848
3849 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3850 for (i = 0; i < 8; i++)
3851 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3852 return strings_fpa[i];
f3bb6135 3853
ff9940b0
RE
3854 abort ();
3855}
3856
9997d19d
RE
3857/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3858static char *
3859fp_const_from_val (r)
3860 REAL_VALUE_TYPE *r;
3861{
3862 int i;
3863
3864 if (! fpa_consts_inited)
3865 init_fpa_table ();
3866
3867 for (i = 0; i < 8; i++)
3868 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3869 return strings_fpa[i];
3870
3871 abort ();
3872}
ff9940b0 3873
cce8749e
CH
3874/* Output the operands of a LDM/STM instruction to STREAM.
3875 MASK is the ARM register set mask of which only bits 0-15 are important.
3876 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3877 must follow the register list. */
3878
3879void
3880print_multi_reg (stream, instr, mask, hat)
3881 FILE *stream;
3882 char *instr;
3883 int mask, hat;
3884{
3885 int i;
3886 int not_first = FALSE;
3887
1d5473cb 3888 fputc ('\t', stream);
f3139301 3889 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 3890 fputs (", {", stream);
cce8749e
CH
3891 for (i = 0; i < 16; i++)
3892 if (mask & (1 << i))
3893 {
3894 if (not_first)
3895 fprintf (stream, ", ");
f3139301 3896 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
3897 not_first = TRUE;
3898 }
f3bb6135 3899
cce8749e 3900 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 3901}
cce8749e
CH
3902
3903/* Output a 'call' insn. */
3904
3905char *
3906output_call (operands)
f3bb6135 3907 rtx *operands;
cce8749e 3908{
cce8749e
CH
3909 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3910
3911 if (REGNO (operands[0]) == 14)
3912 {
3913 operands[0] = gen_rtx (REG, SImode, 12);
1d5473cb 3914 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 3915 }
1d5473cb 3916 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd
NC
3917
3918 if (TARGET_THUMB_INTERWORK)
3919 output_asm_insn ("bx%?\t%0", operands);
3920 else
3921 output_asm_insn ("mov%?\t%|pc, %0", operands);
3922
f3bb6135
RE
3923 return "";
3924}
cce8749e 3925
ff9940b0
RE
3926static int
3927eliminate_lr2ip (x)
f3bb6135 3928 rtx *x;
ff9940b0
RE
3929{
3930 int something_changed = 0;
3931 rtx x0 = *x;
3932 int code = GET_CODE (x0);
3933 register int i, j;
3934 register char *fmt;
3935
3936 switch (code)
3937 {
3938 case REG:
3939 if (REGNO (x0) == 14)
3940 {
3941 *x = gen_rtx (REG, SImode, 12);
3942 return 1;
3943 }
3944 return 0;
3945 default:
3946 /* Scan through the sub-elements and change any references there */
3947 fmt = GET_RTX_FORMAT (code);
3948 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3949 if (fmt[i] == 'e')
3950 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3951 else if (fmt[i] == 'E')
3952 for (j = 0; j < XVECLEN (x0, i); j++)
3953 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3954 return something_changed;
3955 }
3956}
3957
3958/* Output a 'call' insn that is a reference in memory. */
3959
3960char *
3961output_call_mem (operands)
f3bb6135 3962 rtx *operands;
ff9940b0
RE
3963{
3964 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3965 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3966 */
3967 if (eliminate_lr2ip (&operands[0]))
1d5473cb 3968 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 3969
da6558fd
NC
3970 if (TARGET_THUMB_INTERWORK)
3971 {
3972 output_asm_insn ("ldr%?\t%|ip, %0", operands);
3973 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3974 output_asm_insn ("bx%?\t%|ip", operands);
3975 }
3976 else
3977 {
3978 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3979 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3980 }
3981
f3bb6135
RE
3982 return "";
3983}
ff9940b0
RE
3984
3985
3986/* Output a move from arm registers to an fpu registers.
3987 OPERANDS[0] is an fpu register.
3988 OPERANDS[1] is the first registers of an arm register pair. */
3989
3990char *
3991output_mov_long_double_fpu_from_arm (operands)
f3bb6135 3992 rtx *operands;
ff9940b0
RE
3993{
3994 int arm_reg0 = REGNO (operands[1]);
3995 rtx ops[3];
3996
3997 if (arm_reg0 == 12)
3998 abort();
f3bb6135 3999
ff9940b0
RE
4000 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4001 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
4002 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
4003
1d5473cb
RE
4004 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4005 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
4006 return "";
4007}
ff9940b0
RE
4008
4009/* Output a move from an fpu register to arm registers.
4010 OPERANDS[0] is the first registers of an arm register pair.
4011 OPERANDS[1] is an fpu register. */
4012
4013char *
4014output_mov_long_double_arm_from_fpu (operands)
f3bb6135 4015 rtx *operands;
ff9940b0
RE
4016{
4017 int arm_reg0 = REGNO (operands[0]);
4018 rtx ops[3];
4019
4020 if (arm_reg0 == 12)
4021 abort();
f3bb6135 4022
ff9940b0
RE
4023 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4024 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
4025 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
4026
1d5473cb
RE
4027 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4028 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4029 return "";
4030}
ff9940b0
RE
4031
4032/* Output a move from arm registers to arm registers of a long double
4033 OPERANDS[0] is the destination.
4034 OPERANDS[1] is the source. */
4035char *
4036output_mov_long_double_arm_from_arm (operands)
f3bb6135 4037 rtx *operands;
ff9940b0
RE
4038{
4039 /* We have to be careful here because the two might overlap */
4040 int dest_start = REGNO (operands[0]);
4041 int src_start = REGNO (operands[1]);
4042 rtx ops[2];
4043 int i;
4044
4045 if (dest_start < src_start)
4046 {
4047 for (i = 0; i < 3; i++)
4048 {
4049 ops[0] = gen_rtx (REG, SImode, dest_start + i);
4050 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 4051 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4052 }
4053 }
4054 else
4055 {
4056 for (i = 2; i >= 0; i--)
4057 {
4058 ops[0] = gen_rtx (REG, SImode, dest_start + i);
4059 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 4060 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4061 }
4062 }
f3bb6135 4063
ff9940b0
RE
4064 return "";
4065}
4066
4067
cce8749e
CH
4068/* Output a move from arm registers to an fpu registers.
4069 OPERANDS[0] is an fpu register.
4070 OPERANDS[1] is the first registers of an arm register pair. */
4071
4072char *
4073output_mov_double_fpu_from_arm (operands)
f3bb6135 4074 rtx *operands;
cce8749e
CH
4075{
4076 int arm_reg0 = REGNO (operands[1]);
4077 rtx ops[2];
4078
4079 if (arm_reg0 == 12)
4080 abort();
4081 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4082 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4083 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4084 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4085 return "";
4086}
cce8749e
CH
4087
4088/* Output a move from an fpu register to arm registers.
4089 OPERANDS[0] is the first registers of an arm register pair.
4090 OPERANDS[1] is an fpu register. */
4091
4092char *
4093output_mov_double_arm_from_fpu (operands)
f3bb6135 4094 rtx *operands;
cce8749e
CH
4095{
4096 int arm_reg0 = REGNO (operands[0]);
4097 rtx ops[2];
4098
4099 if (arm_reg0 == 12)
4100 abort();
f3bb6135 4101
cce8749e
CH
4102 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4103 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4104 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4105 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4106 return "";
4107}
cce8749e
CH
4108
4109/* Output a move between double words.
4110 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4111 or MEM<-REG and all MEMs must be offsettable addresses. */
4112
4113char *
4114output_move_double (operands)
f3bb6135 4115 rtx *operands;
cce8749e
CH
4116{
4117 enum rtx_code code0 = GET_CODE (operands[0]);
4118 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4119 rtx otherops[3];
cce8749e
CH
4120
4121 if (code0 == REG)
4122 {
4123 int reg0 = REGNO (operands[0]);
4124
4125 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
4126 if (code1 == REG)
4127 {
4128 int reg1 = REGNO (operands[1]);
4129 if (reg1 == 12)
4130 abort();
f3bb6135 4131
cce8749e 4132 /* Ensure the second source is not overwritten */
c1c2bc04
RE
4133 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4134 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4135 else
c1c2bc04 4136 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4137 }
4138 else if (code1 == CONST_DOUBLE)
4139 {
226a5051
RE
4140 if (GET_MODE (operands[1]) == DFmode)
4141 {
4142 long l[2];
4143 union real_extract u;
4144
4145 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4146 sizeof (u));
4147 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4148 otherops[1] = GEN_INT(l[1]);
4149 operands[1] = GEN_INT(l[0]);
4150 }
c1c2bc04
RE
4151 else if (GET_MODE (operands[1]) != VOIDmode)
4152 abort ();
4153 else if (WORDS_BIG_ENDIAN)
4154 {
4155
4156 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4157 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4158 }
226a5051
RE
4159 else
4160 {
c1c2bc04 4161
226a5051
RE
4162 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4163 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4164 }
c1c2bc04
RE
4165 output_mov_immediate (operands);
4166 output_mov_immediate (otherops);
cce8749e
CH
4167 }
4168 else if (code1 == CONST_INT)
4169 {
56636818
JL
4170#if HOST_BITS_PER_WIDE_INT > 32
4171 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4172 what the upper word is. */
4173 if (WORDS_BIG_ENDIAN)
4174 {
4175 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4176 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4177 }
4178 else
4179 {
4180 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4181 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4182 }
4183#else
4184 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4185 if (WORDS_BIG_ENDIAN)
4186 {
4187 otherops[1] = operands[1];
4188 operands[1] = (INTVAL (operands[1]) < 0
4189 ? constm1_rtx : const0_rtx);
4190 }
ff9940b0 4191 else
c1c2bc04 4192 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4193#endif
c1c2bc04
RE
4194 output_mov_immediate (otherops);
4195 output_mov_immediate (operands);
cce8749e
CH
4196 }
4197 else if (code1 == MEM)
4198 {
ff9940b0 4199 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4200 {
ff9940b0 4201 case REG:
9997d19d 4202 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4203 break;
2b835d68 4204
ff9940b0 4205 case PRE_INC:
2b835d68 4206 abort (); /* Should never happen now */
ff9940b0 4207 break;
2b835d68 4208
ff9940b0 4209 case PRE_DEC:
2b835d68 4210 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4211 break;
2b835d68 4212
ff9940b0 4213 case POST_INC:
9997d19d 4214 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4215 break;
2b835d68 4216
ff9940b0 4217 case POST_DEC:
2b835d68 4218 abort (); /* Should never happen now */
ff9940b0 4219 break;
2b835d68
RE
4220
4221 case LABEL_REF:
4222 case CONST:
4223 output_asm_insn ("adr%?\t%0, %1", operands);
4224 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4225 break;
4226
ff9940b0 4227 default:
2b835d68 4228 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
cce8749e 4229 {
2b835d68
RE
4230 otherops[0] = operands[0];
4231 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4232 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4233 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4234 {
4235 if (GET_CODE (otherops[2]) == CONST_INT)
4236 {
4237 switch (INTVAL (otherops[2]))
4238 {
4239 case -8:
4240 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4241 return "";
4242 case -4:
4243 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4244 return "";
4245 case 4:
4246 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4247 return "";
4248 }
4249 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4250 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4251 else
4252 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4253 }
4254 else
4255 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4256 }
4257 else
4258 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4259 return "ldm%?ia\t%0, %M0";
4260 }
4261 else
4262 {
4263 otherops[1] = adj_offsettable_operand (operands[1], 4);
4264 /* Take care of overlapping base/data reg. */
4265 if (reg_mentioned_p (operands[0], operands[1]))
4266 {
4267 output_asm_insn ("ldr%?\t%0, %1", otherops);
4268 output_asm_insn ("ldr%?\t%0, %1", operands);
4269 }
4270 else
4271 {
4272 output_asm_insn ("ldr%?\t%0, %1", operands);
4273 output_asm_insn ("ldr%?\t%0, %1", otherops);
4274 }
cce8749e
CH
4275 }
4276 }
4277 }
2b835d68
RE
4278 else
4279 abort(); /* Constraints should prevent this */
cce8749e
CH
4280 }
4281 else if (code0 == MEM && code1 == REG)
4282 {
4283 if (REGNO (operands[1]) == 12)
4284 abort();
2b835d68 4285
ff9940b0
RE
4286 switch (GET_CODE (XEXP (operands[0], 0)))
4287 {
4288 case REG:
9997d19d 4289 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4290 break;
2b835d68 4291
ff9940b0 4292 case PRE_INC:
2b835d68 4293 abort (); /* Should never happen now */
ff9940b0 4294 break;
2b835d68 4295
ff9940b0 4296 case PRE_DEC:
2b835d68 4297 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4298 break;
2b835d68 4299
ff9940b0 4300 case POST_INC:
9997d19d 4301 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4302 break;
2b835d68 4303
ff9940b0 4304 case POST_DEC:
2b835d68 4305 abort (); /* Should never happen now */
ff9940b0 4306 break;
2b835d68
RE
4307
4308 case PLUS:
4309 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4310 {
4311 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4312 {
4313 case -8:
4314 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4315 return "";
4316
4317 case -4:
4318 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4319 return "";
4320
4321 case 4:
4322 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4323 return "";
4324 }
4325 }
4326 /* Fall through */
4327
ff9940b0 4328 default:
cce8749e
CH
4329 otherops[0] = adj_offsettable_operand (operands[0], 4);
4330 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4331 output_asm_insn ("str%?\t%1, %0", operands);
4332 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4333 }
4334 }
2b835d68
RE
4335 else
4336 abort(); /* Constraints should prevent this */
cce8749e 4337
9997d19d
RE
4338 return "";
4339}
cce8749e
CH
4340
4341
4342/* Output an arbitrary MOV reg, #n.
4343 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4344
4345char *
4346output_mov_immediate (operands)
f3bb6135 4347 rtx *operands;
cce8749e 4348{
f3bb6135 4349 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4350 int n_ones = 0;
4351 int i;
4352
4353 /* Try to use one MOV */
cce8749e 4354 if (const_ok_for_arm (n))
f3bb6135 4355 {
9997d19d 4356 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4357 return "";
4358 }
cce8749e
CH
4359
4360 /* Try to use one MVN */
f3bb6135 4361 if (const_ok_for_arm (~n))
cce8749e 4362 {
f3bb6135 4363 operands[1] = GEN_INT (~n);
9997d19d 4364 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4365 return "";
cce8749e
CH
4366 }
4367
4368 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4369
4370 for (i=0; i < 32; i++)
4371 if (n & 1 << i)
4372 n_ones++;
4373
4374 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4375 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4376 ~n);
cce8749e 4377 else
9997d19d
RE
4378 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4379 n);
f3bb6135
RE
4380
4381 return "";
4382}
cce8749e
CH
4383
4384
4385/* Output an ADD r, s, #n where n may be too big for one instruction. If
4386 adding zero to one register, output nothing. */
4387
4388char *
4389output_add_immediate (operands)
f3bb6135 4390 rtx *operands;
cce8749e 4391{
f3bb6135 4392 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
4393
4394 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4395 {
4396 if (n < 0)
4397 output_multi_immediate (operands,
9997d19d
RE
4398 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4399 -n);
cce8749e
CH
4400 else
4401 output_multi_immediate (operands,
9997d19d
RE
4402 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4403 n);
cce8749e 4404 }
f3bb6135
RE
4405
4406 return "";
4407}
cce8749e 4408
cce8749e
CH
4409/* Output a multiple immediate operation.
4410 OPERANDS is the vector of operands referred to in the output patterns.
4411 INSTR1 is the output pattern to use for the first constant.
4412 INSTR2 is the output pattern to use for subsequent constants.
4413 IMMED_OP is the index of the constant slot in OPERANDS.
4414 N is the constant value. */
4415
18af7313 4416static char *
cce8749e 4417output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 4418 rtx *operands;
cce8749e 4419 char *instr1, *instr2;
f3bb6135
RE
4420 int immed_op;
4421 HOST_WIDE_INT n;
cce8749e 4422{
f3bb6135
RE
4423#if HOST_BITS_PER_WIDE_INT > 32
4424 n &= 0xffffffff;
4425#endif
4426
cce8749e
CH
4427 if (n == 0)
4428 {
4429 operands[immed_op] = const0_rtx;
f3bb6135 4430 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
4431 }
4432 else
4433 {
4434 int i;
4435 char *instr = instr1;
4436
4437 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
4438 for (i = 0; i < 32; i += 2)
4439 {
4440 if (n & (3 << i))
4441 {
f3bb6135
RE
4442 operands[immed_op] = GEN_INT (n & (255 << i));
4443 output_asm_insn (instr, operands);
cce8749e
CH
4444 instr = instr2;
4445 i += 6;
4446 }
4447 }
4448 }
f3bb6135 4449 return "";
9997d19d 4450}
cce8749e
CH
4451
4452
4453/* Return the appropriate ARM instruction for the operation code.
4454 The returned result should not be overwritten. OP is the rtx of the
4455 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4456 was shifted. */
4457
4458char *
4459arithmetic_instr (op, shift_first_arg)
4460 rtx op;
f3bb6135 4461 int shift_first_arg;
cce8749e 4462{
9997d19d 4463 switch (GET_CODE (op))
cce8749e
CH
4464 {
4465 case PLUS:
f3bb6135
RE
4466 return "add";
4467
cce8749e 4468 case MINUS:
f3bb6135
RE
4469 return shift_first_arg ? "rsb" : "sub";
4470
cce8749e 4471 case IOR:
f3bb6135
RE
4472 return "orr";
4473
cce8749e 4474 case XOR:
f3bb6135
RE
4475 return "eor";
4476
cce8749e 4477 case AND:
f3bb6135
RE
4478 return "and";
4479
cce8749e 4480 default:
f3bb6135 4481 abort ();
cce8749e 4482 }
f3bb6135 4483}
cce8749e
CH
4484
4485
4486/* Ensure valid constant shifts and return the appropriate shift mnemonic
4487 for the operation code. The returned result should not be overwritten.
4488 OP is the rtx code of the shift.
9997d19d
RE
4489 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4490 shift. */
cce8749e 4491
9997d19d
RE
4492static char *
4493shift_op (op, amountp)
4494 rtx op;
4495 HOST_WIDE_INT *amountp;
cce8749e 4496{
cce8749e 4497 char *mnem;
e2c671ba 4498 enum rtx_code code = GET_CODE (op);
cce8749e 4499
9997d19d
RE
4500 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4501 *amountp = -1;
4502 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4503 *amountp = INTVAL (XEXP (op, 1));
4504 else
4505 abort ();
4506
e2c671ba 4507 switch (code)
cce8749e
CH
4508 {
4509 case ASHIFT:
4510 mnem = "asl";
4511 break;
f3bb6135 4512
cce8749e
CH
4513 case ASHIFTRT:
4514 mnem = "asr";
cce8749e 4515 break;
f3bb6135 4516
cce8749e
CH
4517 case LSHIFTRT:
4518 mnem = "lsr";
cce8749e 4519 break;
f3bb6135 4520
9997d19d
RE
4521 case ROTATERT:
4522 mnem = "ror";
9997d19d
RE
4523 break;
4524
ff9940b0 4525 case MULT:
e2c671ba
RE
4526 /* We never have to worry about the amount being other than a
4527 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
4528 if (*amountp != -1)
4529 *amountp = int_log2 (*amountp);
4530 else
4531 abort ();
f3bb6135
RE
4532 return "asl";
4533
cce8749e 4534 default:
f3bb6135 4535 abort ();
cce8749e
CH
4536 }
4537
e2c671ba
RE
4538 if (*amountp != -1)
4539 {
4540 /* This is not 100% correct, but follows from the desire to merge
4541 multiplication by a power of 2 with the recognizer for a
4542 shift. >=32 is not a valid shift for "asl", so we must try and
4543 output a shift that produces the correct arithmetical result.
ddd5a7c1 4544 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
4545 is not set correctly if we set the flags; but we never use the
4546 carry bit from such an operation, so we can ignore that. */
4547 if (code == ROTATERT)
4548 *amountp &= 31; /* Rotate is just modulo 32 */
4549 else if (*amountp != (*amountp & 31))
4550 {
4551 if (code == ASHIFT)
4552 mnem = "lsr";
4553 *amountp = 32;
4554 }
4555
4556 /* Shifts of 0 are no-ops. */
4557 if (*amountp == 0)
4558 return NULL;
4559 }
4560
9997d19d
RE
4561 return mnem;
4562}
cce8749e
CH
4563
4564
4565/* Obtain the shift from the POWER of two. */
4566
18af7313 4567static HOST_WIDE_INT
cce8749e 4568int_log2 (power)
f3bb6135 4569 HOST_WIDE_INT power;
cce8749e 4570{
f3bb6135 4571 HOST_WIDE_INT shift = 0;
cce8749e 4572
2b835d68 4573 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
4574 {
4575 if (shift > 31)
f3bb6135 4576 abort ();
cce8749e
CH
4577 shift++;
4578 }
f3bb6135
RE
4579
4580 return shift;
4581}
cce8749e 4582
cce8749e
CH
4583/* Output a .ascii pseudo-op, keeping track of lengths. This is because
4584 /bin/as is horribly restrictive. */
4585
4586void
4587output_ascii_pseudo_op (stream, p, len)
4588 FILE *stream;
f1b3f515 4589 unsigned char *p;
cce8749e
CH
4590 int len;
4591{
4592 int i;
4593 int len_so_far = 1000;
4594 int chars_so_far = 0;
4595
4596 for (i = 0; i < len; i++)
4597 {
4598 register int c = p[i];
4599
4600 if (len_so_far > 50)
4601 {
4602 if (chars_so_far)
4603 fputs ("\"\n", stream);
4604 fputs ("\t.ascii\t\"", stream);
4605 len_so_far = 0;
cce8749e
CH
4606 chars_so_far = 0;
4607 }
4608
4609 if (c == '\"' || c == '\\')
4610 {
4611 putc('\\', stream);
4612 len_so_far++;
4613 }
f3bb6135 4614
cce8749e
CH
4615 if (c >= ' ' && c < 0177)
4616 {
4617 putc (c, stream);
4618 len_so_far++;
4619 }
4620 else
4621 {
4622 fprintf (stream, "\\%03o", c);
4623 len_so_far +=4;
4624 }
f3bb6135 4625
cce8749e
CH
4626 chars_so_far++;
4627 }
f3bb6135 4628
cce8749e 4629 fputs ("\"\n", stream);
f3bb6135 4630}
cce8749e 4631\f
ff9940b0
RE
4632
4633/* Try to determine whether a pattern really clobbers the link register.
4634 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
4635 if we combine a call followed by a return.
4636 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4637 such a check should not be needed because these only update an existing
4638 value within a register; the register must still be set elsewhere within
4639 the function. */
ff9940b0
RE
4640
4641static int
4642pattern_really_clobbers_lr (x)
f3bb6135 4643 rtx x;
ff9940b0
RE
4644{
4645 int i;
4646
4647 switch (GET_CODE (x))
4648 {
4649 case SET:
4650 switch (GET_CODE (SET_DEST (x)))
4651 {
4652 case REG:
4653 return REGNO (SET_DEST (x)) == 14;
f3bb6135 4654
ff9940b0
RE
4655 case SUBREG:
4656 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4657 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 4658
0e84b556
RK
4659 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4660 return 0;
ff9940b0 4661 abort ();
f3bb6135 4662
ff9940b0
RE
4663 default:
4664 return 0;
4665 }
f3bb6135 4666
ff9940b0
RE
4667 case PARALLEL:
4668 for (i = 0; i < XVECLEN (x, 0); i++)
4669 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4670 return 1;
4671 return 0;
f3bb6135 4672
ff9940b0
RE
4673 case CLOBBER:
4674 switch (GET_CODE (XEXP (x, 0)))
4675 {
4676 case REG:
4677 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 4678
ff9940b0
RE
4679 case SUBREG:
4680 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4681 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4682 abort ();
f3bb6135 4683
ff9940b0
RE
4684 default:
4685 return 0;
4686 }
f3bb6135 4687
ff9940b0
RE
4688 case UNSPEC:
4689 return 1;
f3bb6135 4690
ff9940b0
RE
4691 default:
4692 return 0;
4693 }
4694}
4695
4696static int
4697function_really_clobbers_lr (first)
f3bb6135 4698 rtx first;
ff9940b0
RE
4699{
4700 rtx insn, next;
4701
4702 for (insn = first; insn; insn = next_nonnote_insn (insn))
4703 {
4704 switch (GET_CODE (insn))
4705 {
4706 case BARRIER:
4707 case NOTE:
4708 case CODE_LABEL:
4709 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4710 case INLINE_HEADER:
4711 break;
f3bb6135 4712
ff9940b0
RE
4713 case INSN:
4714 if (pattern_really_clobbers_lr (PATTERN (insn)))
4715 return 1;
4716 break;
f3bb6135 4717
ff9940b0
RE
4718 case CALL_INSN:
4719 /* Don't yet know how to handle those calls that are not to a
4720 SYMBOL_REF */
4721 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4722 abort ();
f3bb6135 4723
ff9940b0
RE
4724 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4725 {
4726 case CALL:
4727 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4728 != SYMBOL_REF)
4729 return 1;
4730 break;
f3bb6135 4731
ff9940b0
RE
4732 case SET:
4733 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4734 0, 0)), 0), 0))
4735 != SYMBOL_REF)
4736 return 1;
4737 break;
f3bb6135 4738
ff9940b0
RE
4739 default: /* Don't recognize it, be safe */
4740 return 1;
4741 }
f3bb6135 4742
ff9940b0
RE
4743 /* A call can be made (by peepholing) not to clobber lr iff it is
4744 followed by a return. There may, however, be a use insn iff
4745 we are returning the result of the call.
4746 If we run off the end of the insn chain, then that means the
4747 call was at the end of the function. Unfortunately we don't
4748 have a return insn for the peephole to recognize, so we
4749 must reject this. (Can this be fixed by adding our own insn?) */
4750 if ((next = next_nonnote_insn (insn)) == NULL)
4751 return 1;
f3bb6135 4752
32de079a
RE
4753 /* No need to worry about lr if the call never returns */
4754 if (GET_CODE (next) == BARRIER)
4755 break;
4756
ff9940b0
RE
4757 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4758 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4759 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4760 == REGNO (XEXP (PATTERN (next), 0))))
4761 if ((next = next_nonnote_insn (next)) == NULL)
4762 return 1;
f3bb6135 4763
ff9940b0
RE
4764 if (GET_CODE (next) == JUMP_INSN
4765 && GET_CODE (PATTERN (next)) == RETURN)
4766 break;
4767 return 1;
f3bb6135 4768
ff9940b0
RE
4769 default:
4770 abort ();
4771 }
4772 }
f3bb6135 4773
ff9940b0
RE
4774 /* We have reached the end of the chain so lr was _not_ clobbered */
4775 return 0;
4776}
4777
4778char *
84ed5e79 4779output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
4780 rtx operand;
4781 int really_return;
84ed5e79 4782 int reverse;
ff9940b0
RE
4783{
4784 char instr[100];
4785 int reg, live_regs = 0;
e2c671ba
RE
4786 int volatile_func = (optimize > 0
4787 && TREE_THIS_VOLATILE (current_function_decl));
4788
4789 return_used_this_function = 1;
ff9940b0 4790
e2c671ba
RE
4791 if (volatile_func)
4792 {
4793 rtx ops[2];
4794 /* If this function was declared non-returning, and we have found a tail
4795 call, then we have to trust that the called function won't return. */
4796 if (! really_return)
4797 return "";
4798
4799 /* Otherwise, trap an attempted return by aborting. */
4800 ops[0] = operand;
4801 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 4802 assemble_external_libcall (ops[1]);
84ed5e79 4803 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
4804 return "";
4805 }
4806
f3bb6135 4807 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
4808 abort();
4809
f3bb6135
RE
4810 for (reg = 0; reg <= 10; reg++)
4811 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
4812 live_regs++;
4813
f3bb6135 4814 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
4815 live_regs++;
4816
4817 if (frame_pointer_needed)
4818 live_regs += 4;
4819
4820 if (live_regs)
4821 {
f3bb6135 4822 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 4823 live_regs++;
f3bb6135 4824
ff9940b0 4825 if (frame_pointer_needed)
84ed5e79
RE
4826 strcpy (instr,
4827 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 4828 else
84ed5e79
RE
4829 strcpy (instr,
4830 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
4831
4832 for (reg = 0; reg <= 10; reg++)
4833 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 4834 {
1d5473cb 4835 strcat (instr, "%|");
ff9940b0
RE
4836 strcat (instr, reg_names[reg]);
4837 if (--live_regs)
4838 strcat (instr, ", ");
4839 }
f3bb6135 4840
ff9940b0
RE
4841 if (frame_pointer_needed)
4842 {
1d5473cb 4843 strcat (instr, "%|");
ff9940b0
RE
4844 strcat (instr, reg_names[11]);
4845 strcat (instr, ", ");
1d5473cb 4846 strcat (instr, "%|");
ff9940b0
RE
4847 strcat (instr, reg_names[13]);
4848 strcat (instr, ", ");
1d5473cb 4849 strcat (instr, "%|");
da6558fd
NC
4850 strcat (instr, TARGET_THUMB_INTERWORK || (! really_return)
4851 ? reg_names[14] : reg_names[15] );
ff9940b0
RE
4852 }
4853 else
1d5473cb
RE
4854 {
4855 strcat (instr, "%|");
da6558fd
NC
4856 if (TARGET_THUMB_INTERWORK && really_return)
4857 strcat (instr, reg_names[12]);
4858 else
4859 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1d5473cb 4860 }
2b835d68 4861 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 4862 output_asm_insn (instr, &operand);
da6558fd
NC
4863
4864 if (TARGET_THUMB_INTERWORK && really_return)
4865 {
4866 strcpy (instr, "bx%?");
4867 strcat (instr, reverse ? "%D0" : "%d0");
4868 strcat (instr, "\t%|");
4869 strcat (instr, frame_pointer_needed ? "lr" : "ip");
4870
4871 output_asm_insn (instr, & operand);
4872 }
ff9940b0
RE
4873 }
4874 else if (really_return)
4875 {
b111229a 4876 if (TARGET_THUMB_INTERWORK)
25b1c156 4877 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
4878 else
4879 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4880 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
4881
4882 output_asm_insn (instr, & operand);
ff9940b0 4883 }
f3bb6135 4884
ff9940b0
RE
4885 return "";
4886}
4887
e82ea128
DE
4888/* Return nonzero if optimizing and the current function is volatile.
4889 Such functions never return, and many memory cycles can be saved
4890 by not storing register values that will never be needed again.
4891 This optimization was added to speed up context switching in a
4892 kernel application. */
a0b2ce4c 4893
e2c671ba
RE
4894int
4895arm_volatile_func ()
4896{
4897 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4898}
4899
ff9940b0
RE
4900/* The amount of stack adjustment that happens here, in output_return and in
4901 output_epilogue must be exactly the same as was calculated during reload,
4902 or things will point to the wrong place. The only time we can safely
4903 ignore this constraint is when a function has no arguments on the stack,
4904 no stack frame requirement and no live registers execpt for `lr'. If we
4905 can guarantee that by making all function calls into tail calls and that
4906 lr is not clobbered in any other way, then there is no need to push lr
4907 onto the stack. */
4908
cce8749e 4909void
f3bb6135 4910output_func_prologue (f, frame_size)
cce8749e
CH
4911 FILE *f;
4912 int frame_size;
4913{
f3bb6135 4914 int reg, live_regs_mask = 0;
e2c671ba
RE
4915 int volatile_func = (optimize > 0
4916 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4917
cce8749e
CH
4918 /* Nonzero if we must stuff some register arguments onto the stack as if
4919 they were passed there. */
4920 int store_arg_regs = 0;
4921
abaa26e5
RE
4922 if (arm_ccfsm_state || arm_target_insn)
4923 abort (); /* Sanity check */
31fdb4d5
DE
4924
4925 if (arm_naked_function_p (current_function_decl))
4926 return;
4927
ff9940b0
RE
4928 return_used_this_function = 0;
4929 lr_save_eliminated = 0;
4930
f3139301
DE
4931 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4932 ASM_COMMENT_START, current_function_args_size,
1d5473cb 4933 current_function_pretend_args_size, frame_size);
f3139301
DE
4934 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4935 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 4936 current_function_anonymous_args);
cce8749e 4937
e2c671ba 4938 if (volatile_func)
f3139301 4939 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 4940
cce8749e
CH
4941 if (current_function_anonymous_args && current_function_pretend_args_size)
4942 store_arg_regs = 1;
4943
f3bb6135
RE
4944 for (reg = 0; reg <= 10; reg++)
4945 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
4946 live_regs_mask |= (1 << reg);
4947
ff9940b0 4948 if (frame_pointer_needed)
e2c671ba 4949 live_regs_mask |= 0xD800;
cce8749e 4950 else if (regs_ever_live[14])
ff9940b0
RE
4951 {
4952 if (! current_function_args_size
f3bb6135 4953 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 4954 lr_save_eliminated = 1;
ff9940b0
RE
4955 else
4956 live_regs_mask |= 0x4000;
4957 }
cce8749e 4958
cce8749e
CH
4959 if (live_regs_mask)
4960 {
ff9940b0
RE
4961 /* if a di mode load/store multiple is used, and the base register
4962 is r3, then r4 can become an ever live register without lr
4963 doing so, in this case we need to push lr as well, or we
4964 will fail to get a proper return. */
4965
4966 live_regs_mask |= 0x4000;
4967 lr_save_eliminated = 0;
f3bb6135 4968
cce8749e
CH
4969 }
4970
e2c671ba 4971 if (lr_save_eliminated)
f3139301
DE
4972 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4973 ASM_COMMENT_START);
32de079a
RE
4974
4975#ifdef AOF_ASSEMBLER
4976 if (flag_pic)
4977 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4978 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4979#endif
f3bb6135 4980}
cce8749e
CH
4981
4982
4983void
f3bb6135 4984output_func_epilogue (f, frame_size)
cce8749e
CH
4985 FILE *f;
4986 int frame_size;
4987{
b111229a
RE
4988 int reg, live_regs_mask = 0;
4989 /* If we need this then it will always be at least this much */
4990 int floats_offset = 12;
cce8749e 4991 rtx operands[3];
e2c671ba
RE
4992 int volatile_func = (optimize > 0
4993 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4994
ff9940b0 4995 if (use_return_insn() && return_used_this_function)
cce8749e 4996 {
56636818
JL
4997 if ((frame_size + current_function_outgoing_args_size) != 0
4998 && !(frame_pointer_needed || TARGET_APCS))
4999 abort ();
f3bb6135 5000 goto epilogue_done;
cce8749e 5001 }
cce8749e 5002
31fdb4d5
DE
5003 /* Naked functions don't have epilogues. */
5004 if (arm_naked_function_p (current_function_decl))
5005 goto epilogue_done;
5006
e2c671ba
RE
5007 /* A volatile function should never return. Call abort. */
5008 if (volatile_func)
5009 {
5010 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 5011 assemble_external_libcall (op);
e2c671ba 5012 output_asm_insn ("bl\t%a0", &op);
e2c671ba
RE
5013 goto epilogue_done;
5014 }
5015
f3bb6135
RE
5016 for (reg = 0; reg <= 10; reg++)
5017 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 5018 {
ff9940b0
RE
5019 live_regs_mask |= (1 << reg);
5020 floats_offset += 4;
cce8749e
CH
5021 }
5022
ff9940b0 5023 if (frame_pointer_needed)
cce8749e 5024 {
b111229a
RE
5025 if (arm_fpu_arch == FP_SOFT2)
5026 {
5027 for (reg = 23; reg > 15; reg--)
5028 if (regs_ever_live[reg] && ! call_used_regs[reg])
5029 {
5030 floats_offset += 12;
5031 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
5032 reg_names[reg], REGISTER_PREFIX, floats_offset);
5033 }
5034 }
5035 else
5036 {
5037 int start_reg = 23;
5038
5039 for (reg = 23; reg > 15; reg--)
5040 {
5041 if (regs_ever_live[reg] && ! call_used_regs[reg])
5042 {
5043 floats_offset += 12;
5044 /* We can't unstack more than four registers at once */
5045 if (start_reg - reg == 3)
5046 {
5047 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
5048 REGISTER_PREFIX, reg_names[reg],
5049 REGISTER_PREFIX, floats_offset);
5050 start_reg = reg - 1;
5051 }
5052 }
5053 else
5054 {
5055 if (reg != start_reg)
5056 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5057 REGISTER_PREFIX, reg_names[reg + 1],
5058 start_reg - reg, REGISTER_PREFIX, floats_offset);
ff9940b0 5059
b111229a
RE
5060 start_reg = reg - 1;
5061 }
5062 }
5063
5064 /* Just in case the last register checked also needs unstacking. */
5065 if (reg != start_reg)
5066 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5067 REGISTER_PREFIX, reg_names[reg + 1],
5068 start_reg - reg, REGISTER_PREFIX, floats_offset);
5069 }
da6558fd 5070
b111229a
RE
5071 if (TARGET_THUMB_INTERWORK)
5072 {
5073 live_regs_mask |= 0x6800;
5074 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
5075 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5076 }
5077 else
5078 {
5079 live_regs_mask |= 0xA800;
5080 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
5081 TARGET_APCS_32 ? FALSE : TRUE);
5082 }
cce8749e
CH
5083 }
5084 else
5085 {
d2288d8d 5086 /* Restore stack pointer if necessary. */
56636818 5087 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
5088 {
5089 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
5090 operands[2] = GEN_INT (frame_size
5091 + current_function_outgoing_args_size);
d2288d8d
TG
5092 output_add_immediate (operands);
5093 }
5094
b111229a
RE
5095 if (arm_fpu_arch == FP_SOFT2)
5096 {
5097 for (reg = 16; reg < 24; reg++)
5098 if (regs_ever_live[reg] && ! call_used_regs[reg])
5099 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
5100 reg_names[reg], REGISTER_PREFIX);
5101 }
5102 else
5103 {
5104 int start_reg = 16;
5105
5106 for (reg = 16; reg < 24; reg++)
5107 {
5108 if (regs_ever_live[reg] && ! call_used_regs[reg])
5109 {
5110 if (reg - start_reg == 3)
5111 {
5112 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
5113 REGISTER_PREFIX, reg_names[start_reg],
5114 REGISTER_PREFIX);
5115 start_reg = reg + 1;
5116 }
5117 }
5118 else
5119 {
5120 if (reg != start_reg)
5121 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5122 REGISTER_PREFIX, reg_names[start_reg],
5123 reg - start_reg, REGISTER_PREFIX);
5124
5125 start_reg = reg + 1;
5126 }
5127 }
5128
5129 /* Just in case the last register checked also needs unstacking. */
5130 if (reg != start_reg)
5131 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5132 REGISTER_PREFIX, reg_names[start_reg],
5133 reg - start_reg, REGISTER_PREFIX);
5134 }
5135
cce8749e
CH
5136 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5137 {
b111229a
RE
5138 if (TARGET_THUMB_INTERWORK)
5139 {
5140 if (! lr_save_eliminated)
5141 print_multi_reg(f, "ldmfd\t%ssp!", live_regs_mask | 0x4000,
5142 FALSE);
5143
5144 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5145 }
5146 else if (lr_save_eliminated)
32de079a
RE
5147 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5148 : "\tmovs\t%spc, %slr\n"),
5149 REGISTER_PREFIX, REGISTER_PREFIX, f);
5150 else
5151 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5152 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5153 }
5154 else
5155 {
ff9940b0 5156 if (live_regs_mask || regs_ever_live[14])
cce8749e 5157 {
32de079a
RE
5158 /* Restore the integer regs, and the return address into lr */
5159 if (! lr_save_eliminated)
5160 live_regs_mask |= 0x4000;
5161
5162 if (live_regs_mask != 0)
32de079a 5163 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e 5164 }
b111229a 5165
cce8749e
CH
5166 if (current_function_pretend_args_size)
5167 {
32de079a 5168 /* Unwind the pre-pushed regs */
cce8749e 5169 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5170 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5171 output_add_immediate (operands);
5172 }
32de079a 5173 /* And finally, go home */
b111229a
RE
5174 if (TARGET_THUMB_INTERWORK)
5175 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
25b1c156
NC
5176 else if (TARGET_APCS_32)
5177 fprintf (f, "\tmov\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
b111229a 5178 else
25b1c156 5179 fprintf (f, "\tmovs\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
cce8749e
CH
5180 }
5181 }
f3bb6135 5182
32de079a 5183epilogue_done:
f3bb6135 5184
cce8749e 5185 current_function_anonymous_args = 0;
f3bb6135 5186}
e2c671ba
RE
5187
5188static void
5189emit_multi_reg_push (mask)
5190 int mask;
5191{
5192 int num_regs = 0;
5193 int i, j;
5194 rtx par;
5195
5196 for (i = 0; i < 16; i++)
5197 if (mask & (1 << i))
5198 num_regs++;
5199
5200 if (num_regs == 0 || num_regs > 16)
5201 abort ();
5202
5203 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
5204
5205 for (i = 0; i < 16; i++)
5206 {
5207 if (mask & (1 << i))
5208 {
5209 XVECEXP (par, 0, 0)
5210 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
5211 gen_rtx (PRE_DEC, BLKmode,
5212 stack_pointer_rtx)),
5213 gen_rtx (UNSPEC, BLKmode,
5214 gen_rtvec (1, gen_rtx (REG, SImode, i)),
5215 2));
5216 break;
5217 }
5218 }
5219
5220 for (j = 1, i++; j < num_regs; i++)
5221 {
5222 if (mask & (1 << i))
5223 {
5224 XVECEXP (par, 0, j)
5225 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
5226 j++;
5227 }
5228 }
b111229a
RE
5229
5230 emit_insn (par);
5231}
5232
5233static void
5234emit_sfm (base_reg, count)
5235 int base_reg;
5236 int count;
5237{
5238 rtx par;
5239 int i;
5240
5241 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (count));
5242
5243 XVECEXP (par, 0, 0) = gen_rtx (SET, VOIDmode,
5244 gen_rtx (MEM, BLKmode,
5245 gen_rtx (PRE_DEC, BLKmode,
5246 stack_pointer_rtx)),
5247 gen_rtx (UNSPEC, BLKmode,
5248 gen_rtvec (1, gen_rtx (REG, XFmode,
5249 base_reg++)),
5250 2));
5251 for (i = 1; i < count; i++)
5252 XVECEXP (par, 0, i) = gen_rtx (USE, VOIDmode,
5253 gen_rtx (REG, XFmode, base_reg++));
5254
e2c671ba
RE
5255 emit_insn (par);
5256}
5257
5258void
5259arm_expand_prologue ()
5260{
5261 int reg;
56636818
JL
5262 rtx amount = GEN_INT (-(get_frame_size ()
5263 + current_function_outgoing_args_size));
e2c671ba
RE
5264 int live_regs_mask = 0;
5265 int store_arg_regs = 0;
5266 int volatile_func = (optimize > 0
5267 && TREE_THIS_VOLATILE (current_function_decl));
5268
31fdb4d5
DE
5269 /* Naked functions don't have prologues. */
5270 if (arm_naked_function_p (current_function_decl))
5271 return;
5272
e2c671ba
RE
5273 if (current_function_anonymous_args && current_function_pretend_args_size)
5274 store_arg_regs = 1;
5275
5276 if (! volatile_func)
5277 for (reg = 0; reg <= 10; reg++)
5278 if (regs_ever_live[reg] && ! call_used_regs[reg])
5279 live_regs_mask |= 1 << reg;
5280
5281 if (! volatile_func && regs_ever_live[14])
5282 live_regs_mask |= 0x4000;
5283
5284 if (frame_pointer_needed)
5285 {
5286 live_regs_mask |= 0xD800;
5287 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
5288 stack_pointer_rtx));
5289 }
5290
5291 if (current_function_pretend_args_size)
5292 {
5293 if (store_arg_regs)
5294 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5295 & 0xf);
5296 else
5297 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5298 GEN_INT (-current_function_pretend_args_size)));
5299 }
5300
5301 if (live_regs_mask)
5302 {
5303 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 5304 we won't get a proper return. */
e2c671ba
RE
5305 live_regs_mask |= 0x4000;
5306 emit_multi_reg_push (live_regs_mask);
5307 }
5308
5309 /* For now the integer regs are still pushed in output_func_epilogue (). */
5310
5311 if (! volatile_func)
b111229a
RE
5312 {
5313 if (arm_fpu_arch == FP_SOFT2)
5314 {
5315 for (reg = 23; reg > 15; reg--)
5316 if (regs_ever_live[reg] && ! call_used_regs[reg])
5317 emit_insn (gen_rtx (SET, VOIDmode,
5318 gen_rtx (MEM, XFmode,
5319 gen_rtx (PRE_DEC, XFmode,
5320 stack_pointer_rtx)),
5321 gen_rtx (REG, XFmode, reg)));
5322 }
5323 else
5324 {
5325 int start_reg = 23;
5326
5327 for (reg = 23; reg > 15; reg--)
5328 {
5329 if (regs_ever_live[reg] && ! call_used_regs[reg])
5330 {
5331 if (start_reg - reg == 3)
5332 {
5333 emit_sfm (reg, 4);
5334 start_reg = reg - 1;
5335 }
5336 }
5337 else
5338 {
5339 if (start_reg != reg)
5340 emit_sfm (reg + 1, start_reg - reg);
5341 start_reg = reg - 1;
5342 }
5343 }
5344
5345 if (start_reg != reg)
5346 emit_sfm (reg + 1, start_reg - reg);
5347 }
5348 }
e2c671ba
RE
5349
5350 if (frame_pointer_needed)
5351 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
5352 (GEN_INT
5353 (-(4 + current_function_pretend_args_size)))));
5354
5355 if (amount != const0_rtx)
5356 {
5357 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
5358 emit_insn (gen_rtx (CLOBBER, VOIDmode,
5359 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
5360 }
5361
5362 /* If we are profiling, make sure no instructions are scheduled before
5363 the call to mcount. */
5364 if (profile_flag || profile_block_flag)
5365 emit_insn (gen_blockage ());
5366}
5367
cce8749e 5368\f
9997d19d
RE
5369/* If CODE is 'd', then the X is a condition operand and the instruction
5370 should only be executed if the condition is true.
ddd5a7c1 5371 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
5372 should only be executed if the condition is false: however, if the mode
5373 of the comparison is CCFPEmode, then always execute the instruction -- we
5374 do this because in these circumstances !GE does not necessarily imply LT;
5375 in these cases the instruction pattern will take care to make sure that
5376 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 5377 doing this instruction unconditionally.
9997d19d
RE
5378 If CODE is 'N' then X is a floating point operand that must be negated
5379 before output.
5380 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5381 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5382
5383void
5384arm_print_operand (stream, x, code)
5385 FILE *stream;
5386 rtx x;
5387 int code;
5388{
5389 switch (code)
5390 {
5391 case '@':
f3139301 5392 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
5393 return;
5394
5395 case '|':
f3139301 5396 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5397 return;
5398
5399 case '?':
5400 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5401 fputs (arm_condition_codes[arm_current_cc], stream);
5402 return;
5403
5404 case 'N':
5405 {
5406 REAL_VALUE_TYPE r;
5407 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5408 r = REAL_VALUE_NEGATE (r);
5409 fprintf (stream, "%s", fp_const_from_val (&r));
5410 }
5411 return;
5412
5413 case 'B':
5414 if (GET_CODE (x) == CONST_INT)
5415 fprintf (stream,
5416#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5417 "%d",
5418#else
5419 "%ld",
5420#endif
5421 ARM_SIGN_EXTEND (~ INTVAL (x)));
5422 else
5423 {
5424 putc ('~', stream);
5425 output_addr_const (stream, x);
5426 }
5427 return;
5428
5429 case 'i':
5430 fprintf (stream, "%s", arithmetic_instr (x, 1));
5431 return;
5432
5433 case 'I':
5434 fprintf (stream, "%s", arithmetic_instr (x, 0));
5435 return;
5436
5437 case 'S':
5438 {
5439 HOST_WIDE_INT val;
e2c671ba 5440 char *shift = shift_op (x, &val);
9997d19d 5441
e2c671ba
RE
5442 if (shift)
5443 {
5444 fprintf (stream, ", %s ", shift_op (x, &val));
5445 if (val == -1)
5446 arm_print_operand (stream, XEXP (x, 1), 0);
5447 else
5448 fprintf (stream,
9997d19d 5449#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
e2c671ba 5450 "#%d",
9997d19d 5451#else
e2c671ba 5452 "#%ld",
9997d19d 5453#endif
e2c671ba
RE
5454 val);
5455 }
9997d19d
RE
5456 }
5457 return;
5458
c1c2bc04
RE
5459 case 'Q':
5460 if (REGNO (x) > 15)
5461 abort ();
5462 fputs (REGISTER_PREFIX, stream);
5463 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5464 return;
5465
9997d19d
RE
5466 case 'R':
5467 if (REGNO (x) > 15)
5468 abort ();
f3139301 5469 fputs (REGISTER_PREFIX, stream);
c1c2bc04 5470 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
5471 return;
5472
5473 case 'm':
f3139301 5474 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5475 if (GET_CODE (XEXP (x, 0)) == REG)
5476 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5477 else
5478 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5479 return;
5480
5481 case 'M':
f3139301
DE
5482 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5483 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
5484 + ((GET_MODE_SIZE (GET_MODE (x))
5485 + GET_MODE_SIZE (SImode) - 1)
5486 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
5487 return;
5488
5489 case 'd':
5490 if (x)
5491 fputs (arm_condition_codes[get_arm_condition_code (x)],
5492 stream);
5493 return;
5494
5495 case 'D':
84ed5e79 5496 if (x)
9997d19d
RE
5497 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5498 (get_arm_condition_code (x))],
5499 stream);
5500 return;
5501
5502 default:
5503 if (x == 0)
5504 abort ();
5505
5506 if (GET_CODE (x) == REG)
1d5473cb 5507 {
f3139301 5508 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
5509 fputs (reg_names[REGNO (x)], stream);
5510 }
9997d19d
RE
5511 else if (GET_CODE (x) == MEM)
5512 {
5513 output_memory_reference_mode = GET_MODE (x);
5514 output_address (XEXP (x, 0));
5515 }
5516 else if (GET_CODE (x) == CONST_DOUBLE)
5517 fprintf (stream, "#%s", fp_immediate_constant (x));
5518 else if (GET_CODE (x) == NEG)
5519 abort (); /* This should never happen now. */
5520 else
5521 {
5522 fputc ('#', stream);
5523 output_addr_const (stream, x);
5524 }
5525 }
5526}
5527
cce8749e
CH
5528\f
5529/* A finite state machine takes care of noticing whether or not instructions
5530 can be conditionally executed, and thus decrease execution time and code
5531 size by deleting branch instructions. The fsm is controlled by
5532 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5533
5534/* The state of the fsm controlling condition codes are:
5535 0: normal, do nothing special
5536 1: make ASM_OUTPUT_OPCODE not output this instruction
5537 2: make ASM_OUTPUT_OPCODE not output this instruction
5538 3: make instructions conditional
5539 4: make instructions conditional
5540
5541 State transitions (state->state by whom under condition):
5542 0 -> 1 final_prescan_insn if the `target' is a label
5543 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5544 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5545 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5546 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5547 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5548 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5549 (the target insn is arm_target_insn).
5550
ff9940b0
RE
5551 If the jump clobbers the conditions then we use states 2 and 4.
5552
5553 A similar thing can be done with conditional return insns.
5554
cce8749e
CH
5555 XXX In case the `target' is an unconditional branch, this conditionalising
5556 of the instructions always reduces code size, but not always execution
5557 time. But then, I want to reduce the code size to somewhere near what
5558 /bin/cc produces. */
5559
cce8749e
CH
5560/* Returns the index of the ARM condition code string in
5561 `arm_condition_codes'. COMPARISON should be an rtx like
5562 `(eq (...) (...))'. */
5563
84ed5e79 5564static enum arm_cond_code
cce8749e
CH
5565get_arm_condition_code (comparison)
5566 rtx comparison;
5567{
5165176d 5568 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
5569 register int code;
5570 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
5571
5572 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 5573 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
5574 XEXP (comparison, 1));
5575
5576 switch (mode)
cce8749e 5577 {
84ed5e79
RE
5578 case CC_DNEmode: code = ARM_NE; goto dominance;
5579 case CC_DEQmode: code = ARM_EQ; goto dominance;
5580 case CC_DGEmode: code = ARM_GE; goto dominance;
5581 case CC_DGTmode: code = ARM_GT; goto dominance;
5582 case CC_DLEmode: code = ARM_LE; goto dominance;
5583 case CC_DLTmode: code = ARM_LT; goto dominance;
5584 case CC_DGEUmode: code = ARM_CS; goto dominance;
5585 case CC_DGTUmode: code = ARM_HI; goto dominance;
5586 case CC_DLEUmode: code = ARM_LS; goto dominance;
5587 case CC_DLTUmode: code = ARM_CC;
5588
5589 dominance:
5590 if (comp_code != EQ && comp_code != NE)
5591 abort ();
5592
5593 if (comp_code == EQ)
5594 return ARM_INVERSE_CONDITION_CODE (code);
5595 return code;
5596
5165176d 5597 case CC_NOOVmode:
84ed5e79 5598 switch (comp_code)
5165176d 5599 {
84ed5e79
RE
5600 case NE: return ARM_NE;
5601 case EQ: return ARM_EQ;
5602 case GE: return ARM_PL;
5603 case LT: return ARM_MI;
5165176d
RE
5604 default: abort ();
5605 }
5606
5607 case CC_Zmode:
5608 case CCFPmode:
84ed5e79 5609 switch (comp_code)
5165176d 5610 {
84ed5e79
RE
5611 case NE: return ARM_NE;
5612 case EQ: return ARM_EQ;
5165176d
RE
5613 default: abort ();
5614 }
5615
5616 case CCFPEmode:
84ed5e79
RE
5617 switch (comp_code)
5618 {
5619 case GE: return ARM_GE;
5620 case GT: return ARM_GT;
5621 case LE: return ARM_LS;
5622 case LT: return ARM_MI;
5623 default: abort ();
5624 }
5625
5626 case CC_SWPmode:
5627 switch (comp_code)
5628 {
5629 case NE: return ARM_NE;
5630 case EQ: return ARM_EQ;
5631 case GE: return ARM_LE;
5632 case GT: return ARM_LT;
5633 case LE: return ARM_GE;
5634 case LT: return ARM_GT;
5635 case GEU: return ARM_LS;
5636 case GTU: return ARM_CC;
5637 case LEU: return ARM_CS;
5638 case LTU: return ARM_HI;
5639 default: abort ();
5640 }
5641
bd9c7e23
RE
5642 case CC_Cmode:
5643 switch (comp_code)
5644 {
5645 case LTU: return ARM_CS;
5646 case GEU: return ARM_CC;
5647 default: abort ();
5648 }
5649
5165176d 5650 case CCmode:
84ed5e79 5651 switch (comp_code)
5165176d 5652 {
84ed5e79
RE
5653 case NE: return ARM_NE;
5654 case EQ: return ARM_EQ;
5655 case GE: return ARM_GE;
5656 case GT: return ARM_GT;
5657 case LE: return ARM_LE;
5658 case LT: return ARM_LT;
5659 case GEU: return ARM_CS;
5660 case GTU: return ARM_HI;
5661 case LEU: return ARM_LS;
5662 case LTU: return ARM_CC;
5165176d
RE
5663 default: abort ();
5664 }
5665
cce8749e
CH
5666 default: abort ();
5667 }
84ed5e79
RE
5668
5669 abort ();
f3bb6135 5670}
cce8749e
CH
5671
5672
5673void
5674final_prescan_insn (insn, opvec, noperands)
5675 rtx insn;
5676 rtx *opvec;
5677 int noperands;
5678{
5679 /* BODY will hold the body of INSN. */
5680 register rtx body = PATTERN (insn);
5681
5682 /* This will be 1 if trying to repeat the trick, and things need to be
5683 reversed if it appears to fail. */
5684 int reverse = 0;
5685
ff9940b0
RE
5686 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5687 taken are clobbered, even if the rtl suggests otherwise. It also
5688 means that we have to grub around within the jump expression to find
5689 out what the conditions are when the jump isn't taken. */
5690 int jump_clobbers = 0;
5691
5692 /* If we start with a return insn, we only succeed if we find another one. */
5693 int seeking_return = 0;
5694
cce8749e
CH
5695 /* START_INSN will hold the insn from where we start looking. This is the
5696 first insn after the following code_label if REVERSE is true. */
5697 rtx start_insn = insn;
5698
5699 /* If in state 4, check if the target branch is reached, in order to
5700 change back to state 0. */
5701 if (arm_ccfsm_state == 4)
5702 {
5703 if (insn == arm_target_insn)
abaa26e5
RE
5704 {
5705 arm_target_insn = NULL;
cce8749e 5706 arm_ccfsm_state = 0;
abaa26e5 5707 }
cce8749e
CH
5708 return;
5709 }
5710
5711 /* If in state 3, it is possible to repeat the trick, if this insn is an
5712 unconditional branch to a label, and immediately following this branch
5713 is the previous target label which is only used once, and the label this
5714 branch jumps to is not too far off. */
5715 if (arm_ccfsm_state == 3)
5716 {
5717 if (simplejump_p (insn))
5718 {
5719 start_insn = next_nonnote_insn (start_insn);
5720 if (GET_CODE (start_insn) == BARRIER)
5721 {
5722 /* XXX Isn't this always a barrier? */
5723 start_insn = next_nonnote_insn (start_insn);
5724 }
5725 if (GET_CODE (start_insn) == CODE_LABEL
5726 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5727 && LABEL_NUSES (start_insn) == 1)
5728 reverse = TRUE;
5729 else
5730 return;
5731 }
ff9940b0
RE
5732 else if (GET_CODE (body) == RETURN)
5733 {
5734 start_insn = next_nonnote_insn (start_insn);
5735 if (GET_CODE (start_insn) == BARRIER)
5736 start_insn = next_nonnote_insn (start_insn);
5737 if (GET_CODE (start_insn) == CODE_LABEL
5738 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5739 && LABEL_NUSES (start_insn) == 1)
5740 {
5741 reverse = TRUE;
5742 seeking_return = 1;
5743 }
5744 else
5745 return;
5746 }
cce8749e
CH
5747 else
5748 return;
5749 }
5750
5751 if (arm_ccfsm_state != 0 && !reverse)
5752 abort ();
5753 if (GET_CODE (insn) != JUMP_INSN)
5754 return;
5755
ddd5a7c1 5756 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
5757 the jump should always come first */
5758 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5759 body = XVECEXP (body, 0, 0);
5760
5761#if 0
5762 /* If this is a conditional return then we don't want to know */
5763 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5764 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5765 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5766 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5767 return;
5768#endif
5769
cce8749e
CH
5770 if (reverse
5771 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5772 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5773 {
bd9c7e23
RE
5774 int insns_skipped;
5775 int fail = FALSE, succeed = FALSE;
cce8749e
CH
5776 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5777 int then_not_else = TRUE;
ff9940b0 5778 rtx this_insn = start_insn, label = 0;
cce8749e 5779
ff9940b0 5780 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
5781 {
5782 /* The code below is wrong for these, and I haven't time to
5783 fix it now. So we just do the safe thing and return. This
5784 whole function needs re-writing anyway. */
5785 jump_clobbers = 1;
5786 return;
5787 }
ff9940b0 5788
cce8749e
CH
5789 /* Register the insn jumped to. */
5790 if (reverse)
ff9940b0
RE
5791 {
5792 if (!seeking_return)
5793 label = XEXP (SET_SRC (body), 0);
5794 }
cce8749e
CH
5795 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5796 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5797 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5798 {
5799 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5800 then_not_else = FALSE;
5801 }
ff9940b0
RE
5802 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5803 seeking_return = 1;
5804 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5805 {
5806 seeking_return = 1;
5807 then_not_else = FALSE;
5808 }
cce8749e
CH
5809 else
5810 abort ();
5811
5812 /* See how many insns this branch skips, and what kind of insns. If all
5813 insns are okay, and the label or unconditional branch to the same
5814 label is not too far away, succeed. */
5815 for (insns_skipped = 0;
bd9c7e23 5816 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
cce8749e
CH
5817 {
5818 rtx scanbody;
5819
5820 this_insn = next_nonnote_insn (this_insn);
5821 if (!this_insn)
5822 break;
5823
cce8749e
CH
5824 switch (GET_CODE (this_insn))
5825 {
5826 case CODE_LABEL:
5827 /* Succeed if it is the target label, otherwise fail since
5828 control falls in from somewhere else. */
5829 if (this_insn == label)
5830 {
ff9940b0
RE
5831 if (jump_clobbers)
5832 {
5833 arm_ccfsm_state = 2;
5834 this_insn = next_nonnote_insn (this_insn);
5835 }
5836 else
5837 arm_ccfsm_state = 1;
cce8749e
CH
5838 succeed = TRUE;
5839 }
5840 else
5841 fail = TRUE;
5842 break;
5843
ff9940b0 5844 case BARRIER:
cce8749e 5845 /* Succeed if the following insn is the target label.
ff9940b0
RE
5846 Otherwise fail.
5847 If return insns are used then the last insn in a function
5848 will be a barrier. */
cce8749e 5849 this_insn = next_nonnote_insn (this_insn);
ff9940b0 5850 if (this_insn && this_insn == label)
cce8749e 5851 {
ff9940b0
RE
5852 if (jump_clobbers)
5853 {
5854 arm_ccfsm_state = 2;
5855 this_insn = next_nonnote_insn (this_insn);
5856 }
5857 else
5858 arm_ccfsm_state = 1;
cce8749e
CH
5859 succeed = TRUE;
5860 }
5861 else
5862 fail = TRUE;
5863 break;
5864
ff9940b0 5865 case CALL_INSN:
2b835d68
RE
5866 /* If using 32-bit addresses the cc is not preserved over
5867 calls */
5868 if (TARGET_APCS_32)
bd9c7e23
RE
5869 {
5870 /* Succeed if the following insn is the target label,
5871 or if the following two insns are a barrier and
5872 the target label. */
5873 this_insn = next_nonnote_insn (this_insn);
5874 if (this_insn && GET_CODE (this_insn) == BARRIER)
5875 this_insn = next_nonnote_insn (this_insn);
5876
5877 if (this_insn && this_insn == label
5878 && insns_skipped < MAX_INSNS_SKIPPED)
5879 {
5880 if (jump_clobbers)
5881 {
5882 arm_ccfsm_state = 2;
5883 this_insn = next_nonnote_insn (this_insn);
5884 }
5885 else
5886 arm_ccfsm_state = 1;
5887 succeed = TRUE;
5888 }
5889 else
5890 fail = TRUE;
5891 }
ff9940b0 5892 break;
2b835d68 5893
cce8749e
CH
5894 case JUMP_INSN:
5895 /* If this is an unconditional branch to the same label, succeed.
5896 If it is to another label, do nothing. If it is conditional,
5897 fail. */
ed4c4348 5898 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 5899
ed4c4348 5900 scanbody = PATTERN (this_insn);
ff9940b0
RE
5901 if (GET_CODE (scanbody) == SET
5902 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
5903 {
5904 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5905 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5906 {
5907 arm_ccfsm_state = 2;
5908 succeed = TRUE;
5909 }
5910 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5911 fail = TRUE;
5912 }
ff9940b0
RE
5913 else if (GET_CODE (scanbody) == RETURN
5914 && seeking_return)
5915 {
5916 arm_ccfsm_state = 2;
5917 succeed = TRUE;
5918 }
5919 else if (GET_CODE (scanbody) == PARALLEL)
5920 {
5921 switch (get_attr_conds (this_insn))
5922 {
5923 case CONDS_NOCOND:
5924 break;
5925 default:
5926 fail = TRUE;
5927 break;
5928 }
5929 }
cce8749e
CH
5930 break;
5931
5932 case INSN:
ff9940b0
RE
5933 /* Instructions using or affecting the condition codes make it
5934 fail. */
ed4c4348 5935 scanbody = PATTERN (this_insn);
ff9940b0
RE
5936 if ((GET_CODE (scanbody) == SET
5937 || GET_CODE (scanbody) == PARALLEL)
5938 && get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
5939 fail = TRUE;
5940 break;
5941
5942 default:
5943 break;
5944 }
5945 }
5946 if (succeed)
5947 {
ff9940b0 5948 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 5949 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
5950 else if (seeking_return || arm_ccfsm_state == 2)
5951 {
5952 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5953 {
5954 this_insn = next_nonnote_insn (this_insn);
5955 if (this_insn && (GET_CODE (this_insn) == BARRIER
5956 || GET_CODE (this_insn) == CODE_LABEL))
5957 abort ();
5958 }
5959 if (!this_insn)
5960 {
5961 /* Oh, dear! we ran off the end.. give up */
5962 recog (PATTERN (insn), insn, NULL_PTR);
5963 arm_ccfsm_state = 0;
abaa26e5 5964 arm_target_insn = NULL;
ff9940b0
RE
5965 return;
5966 }
5967 arm_target_insn = this_insn;
5968 }
cce8749e
CH
5969 else
5970 abort ();
ff9940b0
RE
5971 if (jump_clobbers)
5972 {
5973 if (reverse)
5974 abort ();
5975 arm_current_cc =
5976 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5977 0), 0), 1));
5978 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5979 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5980 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5981 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5982 }
5983 else
5984 {
5985 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5986 what it was. */
5987 if (!reverse)
5988 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5989 0));
5990 }
cce8749e 5991
cce8749e
CH
5992 if (reverse || then_not_else)
5993 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5994 }
ff9940b0
RE
5995 /* restore recog_operand (getting the attributes of other insns can
5996 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 5997 across this call; since the insn has been recognized already we
ff9940b0
RE
5998 call recog direct). */
5999 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 6000 }
f3bb6135 6001}
cce8749e 6002
2b835d68
RE
6003#ifdef AOF_ASSEMBLER
6004/* Special functions only needed when producing AOF syntax assembler. */
6005
32de079a
RE
6006rtx aof_pic_label = NULL_RTX;
6007struct pic_chain
6008{
6009 struct pic_chain *next;
6010 char *symname;
6011};
6012
6013static struct pic_chain *aof_pic_chain = NULL;
6014
6015rtx
6016aof_pic_entry (x)
6017 rtx x;
6018{
6019 struct pic_chain **chainp;
6020 int offset;
6021
6022 if (aof_pic_label == NULL_RTX)
6023 {
6024 /* This needs to persist throughout the compilation. */
6025 end_temporary_allocation ();
6026 aof_pic_label = gen_rtx (SYMBOL_REF, Pmode, "x$adcons");
6027 resume_temporary_allocation ();
6028 }
6029
6030 for (offset = 0, chainp = &aof_pic_chain; *chainp;
6031 offset += 4, chainp = &(*chainp)->next)
6032 if ((*chainp)->symname == XSTR (x, 0))
6033 return plus_constant (aof_pic_label, offset);
6034
6035 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
6036 (*chainp)->next = NULL;
6037 (*chainp)->symname = XSTR (x, 0);
6038 return plus_constant (aof_pic_label, offset);
6039}
6040
6041void
6042aof_dump_pic_table (f)
6043 FILE *f;
6044{
6045 struct pic_chain *chain;
6046
6047 if (aof_pic_chain == NULL)
6048 return;
6049
6050 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
6051 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
6052 reg_names[PIC_OFFSET_TABLE_REGNUM]);
6053 fputs ("|x$adcons|\n", f);
6054
6055 for (chain = aof_pic_chain; chain; chain = chain->next)
6056 {
6057 fputs ("\tDCD\t", f);
6058 assemble_name (f, chain->symname);
6059 fputs ("\n", f);
6060 }
6061}
6062
2b835d68
RE
6063int arm_text_section_count = 1;
6064
6065char *
84ed5e79 6066aof_text_section ()
2b835d68
RE
6067{
6068 static char buf[100];
2b835d68
RE
6069 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
6070 arm_text_section_count++);
6071 if (flag_pic)
6072 strcat (buf, ", PIC, REENTRANT");
6073 return buf;
6074}
6075
6076static int arm_data_section_count = 1;
6077
6078char *
6079aof_data_section ()
6080{
6081 static char buf[100];
6082 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
6083 return buf;
6084}
6085
6086/* The AOF assembler is religiously strict about declarations of
6087 imported and exported symbols, so that it is impossible to declare
956d6950 6088 a function as imported near the beginning of the file, and then to
2b835d68
RE
6089 export it later on. It is, however, possible to delay the decision
6090 until all the functions in the file have been compiled. To get
6091 around this, we maintain a list of the imports and exports, and
6092 delete from it any that are subsequently defined. At the end of
6093 compilation we spit the remainder of the list out before the END
6094 directive. */
6095
6096struct import
6097{
6098 struct import *next;
6099 char *name;
6100};
6101
6102static struct import *imports_list = NULL;
6103
6104void
6105aof_add_import (name)
6106 char *name;
6107{
6108 struct import *new;
6109
6110 for (new = imports_list; new; new = new->next)
6111 if (new->name == name)
6112 return;
6113
6114 new = (struct import *) xmalloc (sizeof (struct import));
6115 new->next = imports_list;
6116 imports_list = new;
6117 new->name = name;
6118}
6119
6120void
6121aof_delete_import (name)
6122 char *name;
6123{
6124 struct import **old;
6125
6126 for (old = &imports_list; *old; old = & (*old)->next)
6127 {
6128 if ((*old)->name == name)
6129 {
6130 *old = (*old)->next;
6131 return;
6132 }
6133 }
6134}
6135
6136int arm_main_function = 0;
6137
6138void
6139aof_dump_imports (f)
6140 FILE *f;
6141{
6142 /* The AOF assembler needs this to cause the startup code to be extracted
6143 from the library. Brining in __main causes the whole thing to work
6144 automagically. */
6145 if (arm_main_function)
6146 {
6147 text_section ();
6148 fputs ("\tIMPORT __main\n", f);
6149 fputs ("\tDCD __main\n", f);
6150 }
6151
6152 /* Now dump the remaining imports. */
6153 while (imports_list)
6154 {
6155 fprintf (f, "\tIMPORT\t");
6156 assemble_name (f, imports_list->name);
6157 fputc ('\n', f);
6158 imports_list = imports_list->next;
6159 }
6160}
6161#endif /* AOF_ASSEMBLER */