]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/vax/vax.c
2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[thirdparty/gcc.git] / gcc / config / vax / vax.c
CommitLineData
8aeea6e6 1/* Subroutines for insn-output.c for VAX.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
9c21a7e7 3
e7d9d16b 4This file is part of GCC.
9c21a7e7 5
e7d9d16b 6GCC is free software; you can redistribute it and/or modify
9c21a7e7 7it under the terms of the GNU General Public License as published by
2f83c7d6 8the Free Software Foundation; either version 3, or (at your option)
9c21a7e7
RS
9any later version.
10
e7d9d16b 11GCC is distributed in the hope that it will be useful,
9c21a7e7
RS
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
2f83c7d6
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
9c21a7e7 19
9c21a7e7 20#include "config.h"
c5c76735 21#include "system.h"
4977bab6
ZW
22#include "coretypes.h"
23#include "tm.h"
9c21a7e7 24#include "rtl.h"
60393bbc
AM
25#include "dominance.h"
26#include "cfg.h"
27#include "cfgrtl.h"
28#include "cfganal.h"
29#include "lcm.h"
30#include "cfgbuild.h"
31#include "cfgcleanup.h"
32#include "predict.h"
33#include "basic-block.h"
c4e75102 34#include "df.h"
92bc3c1a 35#include "tree.h"
d8a2d370
DN
36#include "calls.h"
37#include "varasm.h"
9c21a7e7
RS
38#include "regs.h"
39#include "hard-reg-set.h"
9c21a7e7
RS
40#include "insn-config.h"
41#include "conditions.h"
83685514
AM
42#include "hashtab.h"
43#include "hash-set.h"
44#include "vec.h"
45#include "machmode.h"
46#include "input.h"
49ad7cfa 47#include "function.h"
9c21a7e7
RS
48#include "output.h"
49#include "insn-attr.h"
08c148a8 50#include "recog.h"
215b48a7 51#include "expr.h"
b0710fe1 52#include "insn-codes.h"
c15c90bb 53#include "optabs.h"
fb49053f 54#include "flags.h"
ec20cffb 55#include "debug.h"
718f9c0f 56#include "diagnostic-core.h"
c30e7434 57#include "reload.h"
c4e75102
MT
58#include "tm-preds.h"
59#include "tm-constrs.h"
2fd58acb 60#include "tm_p.h"
672a6f42
NB
61#include "target.h"
62#include "target-def.h"
807e902e 63#include "wide-int.h"
9b2b7279 64#include "builtins.h"
08c148a8 65
c5387660 66static void vax_option_override (void);
ef4bddc2 67static bool vax_legitimate_address_p (machine_mode, rtx, bool);
0d92b0e4 68static void vax_file_start (void);
c15c90bb 69static void vax_init_libfuncs (void);
0d92b0e4
NN
70static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
71 HOST_WIDE_INT, tree);
72static int vax_address_cost_1 (rtx);
ef4bddc2 73static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
68f932c4 74static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
ef4bddc2 75static rtx vax_function_arg (cumulative_args_t, machine_mode,
8f8a46ba 76 const_tree, bool);
ef4bddc2 77static void vax_function_arg_advance (cumulative_args_t, machine_mode,
8f8a46ba 78 const_tree, bool);
f289e226 79static rtx vax_struct_value_rtx (tree, int);
c4e75102 80static rtx vax_builtin_setjmp_frame_value (void);
3814318d
RH
81static void vax_asm_trampoline_template (FILE *);
82static void vax_trampoline_init (rtx, tree, rtx);
079e7538 83static int vax_return_pops_args (tree, tree, int);
5bfed9a9 84static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
672a6f42
NB
85\f
86/* Initialize the GCC target structure. */
301d03af
RS
87#undef TARGET_ASM_ALIGNED_HI_OP
88#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
89
1bc7c5b6
ZW
90#undef TARGET_ASM_FILE_START
91#define TARGET_ASM_FILE_START vax_file_start
92#undef TARGET_ASM_FILE_START_APP_OFF
93#define TARGET_ASM_FILE_START_APP_OFF true
94
c15c90bb
ZW
95#undef TARGET_INIT_LIBFUNCS
96#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
97
3961e8fe
RH
98#undef TARGET_ASM_OUTPUT_MI_THUNK
99#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
100#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
101#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
102
3c50106f
RH
103#undef TARGET_RTX_COSTS
104#define TARGET_RTX_COSTS vax_rtx_costs
dcefdf67
RH
105#undef TARGET_ADDRESS_COST
106#define TARGET_ADDRESS_COST vax_address_cost
3c50106f 107
f289e226 108#undef TARGET_PROMOTE_PROTOTYPES
586de218 109#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
f289e226 110
8f8a46ba
NF
111#undef TARGET_FUNCTION_ARG
112#define TARGET_FUNCTION_ARG vax_function_arg
113#undef TARGET_FUNCTION_ARG_ADVANCE
114#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
115
f289e226
KH
116#undef TARGET_STRUCT_VALUE_RTX
117#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
118
c4e75102
MT
119#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
120#define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
121
c6c3dba9
PB
122#undef TARGET_LEGITIMATE_ADDRESS_P
123#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
b0f6b612
NF
124#undef TARGET_MODE_DEPENDENT_ADDRESS_P
125#define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
c6c3dba9 126
b52b1749
AS
127#undef TARGET_FRAME_POINTER_REQUIRED
128#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
129
3814318d
RH
130#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
131#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
132#undef TARGET_TRAMPOLINE_INIT
133#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
079e7538
NF
134#undef TARGET_RETURN_POPS_ARGS
135#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
3814318d 136
c5387660
JM
137#undef TARGET_OPTION_OVERRIDE
138#define TARGET_OPTION_OVERRIDE vax_option_override
139
f6897b10 140struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 141\f
3dc85dfb
RH
142/* Set global variables as needed for the options enabled. */
143
c5387660
JM
144static void
145vax_option_override (void)
3dc85dfb
RH
146{
147 /* We're VAX floating point, not IEEE floating point. */
94134f42
ZW
148 if (TARGET_G_FLOAT)
149 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
c5387660
JM
150
151#ifdef SUBTARGET_OVERRIDE_OPTIONS
152 SUBTARGET_OVERRIDE_OPTIONS;
153#endif
3dc85dfb
RH
154}
155
a3515605
RH
156static void
157vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
158{
159 rtx x;
160
0a81f074 161 x = plus_constant (Pmode, frame_pointer_rtx, offset);
a3515605
RH
162 x = gen_rtx_MEM (SImode, x);
163 x = gen_rtx_SET (VOIDmode, x, src);
164 add_reg_note (insn, REG_CFA_OFFSET, x);
165}
166
08c148a8
NB
167/* Generate the assembly code for function entry. FILE is a stdio
168 stream to output the code to. SIZE is an int: how many units of
169 temporary storage to allocate.
170
171 Refer to the array `regs_ever_live' to determine which registers to
172 save; `regs_ever_live[I]' is nonzero if register number I is ever
173 used in the function. This function is responsible for knowing
174 which registers should not be saved even if used. */
175
a3515605
RH
176void
177vax_expand_prologue (void)
08c148a8 178{
a3515605 179 int regno, offset;
d001241c 180 int mask = 0;
a3515605
RH
181 HOST_WIDE_INT size;
182 rtx insn;
08c148a8
NB
183
184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6fb5fa3c 185 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
08c148a8
NB
186 mask |= 1 << regno;
187
a3515605
RH
188 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
189 RTX_FRAME_RELATED_P (insn) = 1;
08c148a8 190
a3515605 191 /* The layout of the CALLG/S stack frame is follows:
ec20cffb 192
a3515605
RH
193 <- CFA, AP
194 r11
195 r10
196 ... Registers saved as specified by MASK
197 r3
198 r2
199 return-addr
200 old fp
201 old ap
202 old psw
203 zero
204 <- FP, SP
ec20cffb 205
a3515605
RH
206 The rest of the prologue will adjust the SP for the local frame. */
207
208 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
209 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
210 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
ec20cffb 211
a3515605
RH
212 offset = 16;
213 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
214 if (mask & (1 << regno))
215 {
216 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
217 offset += 4;
218 }
219
220 /* Because add_reg_note pushes the notes, adding this last means that
221 it will be processed first. This is required to allow the other
222 notes be interpreted properly. */
223 add_reg_note (insn, REG_CFA_DEF_CFA,
0a81f074 224 plus_constant (Pmode, frame_pointer_rtx, offset));
a3515605
RH
225
226 /* Allocate the local stack frame. */
227 size = get_frame_size ();
23361093 228 size -= STARTING_FRAME_OFFSET;
a3515605
RH
229 emit_insn (gen_addsi3 (stack_pointer_rtx,
230 stack_pointer_rtx, GEN_INT (-size)));
231
232 /* Do not allow instructions referencing local stack memory to be
233 scheduled before the frame is allocated. This is more pedantic
234 than anything else, given that VAX does not currently have a
235 scheduling description. */
236 emit_insn (gen_blockage ());
08c148a8
NB
237}
238
1bc7c5b6
ZW
239/* When debugging with stabs, we want to output an extra dummy label
240 so that gas can distinguish between D_float and G_float prior to
241 processing the .stabs directive identifying type double. */
242static void
0d92b0e4 243vax_file_start (void)
1bc7c5b6
ZW
244{
245 default_file_start ();
246
247 if (write_symbols == DBX_DEBUG)
248 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
249}
250
c15c90bb
ZW
251/* We can use the BSD C library routines for the libgcc calls that are
252 still generated, since that's what they boil down to anyways. When
253 ELF, avoid the user's namespace. */
254
255static void
256vax_init_libfuncs (void)
257{
1df34d4d
MR
258 if (TARGET_BSD_DIVMOD)
259 {
260 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
261 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
262 }
c15c90bb
ZW
263}
264
2a4bfeed
RS
265/* This is like nonimmediate_operand with a restriction on the type of MEM. */
266
c4e75102
MT
267static void
268split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
269 rtx * low, int n)
9c21a7e7
RS
270{
271 int i;
9c21a7e7 272
c4e75102
MT
273 for (i = 0; i < n; i++)
274 low[i] = 0;
275
276 for (i = 0; i < n; i++)
9c21a7e7 277 {
c4e75102
MT
278 if (MEM_P (operands[i])
279 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
280 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
9c21a7e7
RS
281 {
282 rtx addr = XEXP (operands[i], 0);
c5c76735 283 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
c4e75102
MT
284 }
285 else if (optimize_size && MEM_P (operands[i])
286 && REG_P (XEXP (operands[i], 0))
287 && (code != MINUS || operands[1] != const0_rtx)
288 && find_regno_note (insn, REG_DEAD,
289 REGNO (XEXP (operands[i], 0))))
290 {
291 low[i] = gen_rtx_MEM (SImode,
292 gen_rtx_POST_INC (Pmode,
293 XEXP (operands[i], 0)));
294 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
9c21a7e7
RS
295 }
296 else
297 {
298 low[i] = operand_subword (operands[i], 0, 0, DImode);
299 operands[i] = operand_subword (operands[i], 1, 0, DImode);
300 }
301 }
302}
303\f
2fd58acb 304void
d001241c 305print_operand_address (FILE * file, rtx addr)
9c21a7e7 306{
c4e75102 307 rtx orig = addr;
d001241c 308 rtx reg1, breg, ireg;
9c21a7e7
RS
309 rtx offset;
310
311 retry:
312 switch (GET_CODE (addr))
313 {
314 case MEM:
315 fprintf (file, "*");
316 addr = XEXP (addr, 0);
317 goto retry;
318
319 case REG:
320 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
321 break;
322
323 case PRE_DEC:
324 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
325 break;
326
327 case POST_INC:
328 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
329 break;
330
331 case PLUS:
332 /* There can be either two or three things added here. One must be a
333 REG. One can be either a REG or a MULT of a REG and an appropriate
334 constant, and the third can only be a constant or a MEM.
335
336 We get these two or three things and put the constant or MEM in
337 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
338 a register and can't tell yet if it is a base or index register,
339 put it into REG1. */
340
341 reg1 = 0; ireg = 0; breg = 0; offset = 0;
342
343 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 344 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
345 {
346 offset = XEXP (addr, 0);
347 addr = XEXP (addr, 1);
348 }
349 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 350 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
351 {
352 offset = XEXP (addr, 1);
353 addr = XEXP (addr, 0);
354 }
355 else if (GET_CODE (XEXP (addr, 1)) == MULT)
356 {
357 ireg = XEXP (addr, 1);
358 addr = XEXP (addr, 0);
359 }
360 else if (GET_CODE (XEXP (addr, 0)) == MULT)
361 {
362 ireg = XEXP (addr, 0);
363 addr = XEXP (addr, 1);
364 }
ff9d4590 365 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
366 {
367 reg1 = XEXP (addr, 1);
368 addr = XEXP (addr, 0);
369 }
ff9d4590 370 else if (REG_P (XEXP (addr, 0)))
2d6cb879
TW
371 {
372 reg1 = XEXP (addr, 0);
373 addr = XEXP (addr, 1);
374 }
9c21a7e7 375 else
90285d8d 376 gcc_unreachable ();
9c21a7e7 377
ff9d4590 378 if (REG_P (addr))
9c21a7e7
RS
379 {
380 if (reg1)
381 ireg = addr;
382 else
383 reg1 = addr;
384 }
385 else if (GET_CODE (addr) == MULT)
386 ireg = addr;
90285d8d 387 else
9c21a7e7 388 {
90285d8d 389 gcc_assert (GET_CODE (addr) == PLUS);
9c21a7e7 390 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 391 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
392 {
393 if (offset)
394 {
d97c1295 395 if (CONST_INT_P (offset))
0a81f074
RS
396 offset = plus_constant (Pmode, XEXP (addr, 0),
397 INTVAL (offset));
9c21a7e7 398 else
90285d8d 399 {
d97c1295 400 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
0a81f074
RS
401 offset = plus_constant (Pmode, offset,
402 INTVAL (XEXP (addr, 0)));
90285d8d 403 }
9c21a7e7
RS
404 }
405 offset = XEXP (addr, 0);
406 }
ff9d4590 407 else if (REG_P (XEXP (addr, 0)))
9c21a7e7
RS
408 {
409 if (reg1)
410 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
411 else
412 reg1 = XEXP (addr, 0);
413 }
90285d8d 414 else
9c21a7e7 415 {
90285d8d
NS
416 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
417 gcc_assert (!ireg);
9c21a7e7
RS
418 ireg = XEXP (addr, 0);
419 }
9c21a7e7
RS
420
421 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 422 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
423 {
424 if (offset)
425 {
d97c1295 426 if (CONST_INT_P (offset))
0a81f074
RS
427 offset = plus_constant (Pmode, XEXP (addr, 1),
428 INTVAL (offset));
9c21a7e7 429 else
90285d8d 430 {
d97c1295 431 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
0a81f074
RS
432 offset = plus_constant (Pmode, offset,
433 INTVAL (XEXP (addr, 1)));
90285d8d 434 }
9c21a7e7
RS
435 }
436 offset = XEXP (addr, 1);
437 }
ff9d4590 438 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
439 {
440 if (reg1)
441 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
442 else
443 reg1 = XEXP (addr, 1);
444 }
90285d8d 445 else
9c21a7e7 446 {
90285d8d
NS
447 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
448 gcc_assert (!ireg);
9c21a7e7
RS
449 ireg = XEXP (addr, 1);
450 }
9c21a7e7 451 }
9c21a7e7 452
5e7a8ee0 453 /* If REG1 is nonzero, figure out if it is a base or index register. */
9c21a7e7
RS
454 if (reg1)
455 {
c4e75102
MT
456 if (breg
457 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
458 || (offset
459 && (MEM_P (offset)
460 || (flag_pic && symbolic_operand (offset, SImode)))))
9c21a7e7 461 {
90285d8d 462 gcc_assert (!ireg);
9c21a7e7
RS
463 ireg = reg1;
464 }
465 else
466 breg = reg1;
467 }
468
469 if (offset != 0)
c4e75102
MT
470 {
471 if (flag_pic && symbolic_operand (offset, SImode))
472 {
473 if (breg && ireg)
474 {
475 debug_rtx (orig);
476 output_operand_lossage ("symbol used with both base and indexed registers");
477 }
478
479#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
480 if (flag_pic > 1 && GET_CODE (offset) == CONST
481 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
482 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
483 {
484 debug_rtx (orig);
485 output_operand_lossage ("symbol with offset used in PIC mode");
486 }
487#endif
488
489 /* symbol(reg) isn't PIC, but symbol[reg] is. */
490 if (breg)
491 {
492 ireg = breg;
493 breg = 0;
494 }
495
496 }
497
498 output_address (offset);
499 }
9c21a7e7
RS
500
501 if (breg != 0)
502 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
503
504 if (ireg != 0)
505 {
506 if (GET_CODE (ireg) == MULT)
507 ireg = XEXP (ireg, 0);
ff9d4590 508 gcc_assert (REG_P (ireg));
9c21a7e7
RS
509 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
510 }
511 break;
512
513 default:
514 output_addr_const (file, addr);
515 }
516}
c4e75102
MT
517
518void
519print_operand (FILE *file, rtx x, int code)
520{
521 if (code == '#')
522 fputc (ASM_DOUBLE_CHAR, file);
523 else if (code == '|')
524 fputs (REGISTER_PREFIX, file);
f90b7a5a
PB
525 else if (code == 'c')
526 fputs (cond_name (x), file);
c4e75102
MT
527 else if (code == 'C')
528 fputs (rev_cond_name (x), file);
529 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
530 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
531 else if (code == 'P' && CONST_INT_P (x))
532 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
533 else if (code == 'N' && CONST_INT_P (x))
534 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
535 /* rotl instruction cannot deal with negative arguments. */
536 else if (code == 'R' && CONST_INT_P (x))
537 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
538 else if (code == 'H' && CONST_INT_P (x))
539 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
540 else if (code == 'h' && CONST_INT_P (x))
541 fprintf (file, "$%d", (short) - INTVAL (x));
542 else if (code == 'B' && CONST_INT_P (x))
543 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
544 else if (code == 'b' && CONST_INT_P (x))
545 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
546 else if (code == 'M' && CONST_INT_P (x))
547 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
a3515605
RH
548 else if (code == 'x' && CONST_INT_P (x))
549 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
c4e75102
MT
550 else if (REG_P (x))
551 fprintf (file, "%s", reg_names[REGNO (x)]);
552 else if (MEM_P (x))
553 output_address (XEXP (x, 0));
554 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
555 {
556 char dstr[30];
557 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
558 sizeof (dstr), 0, 1);
559 fprintf (file, "$0f%s", dstr);
560 }
561 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
562 {
563 char dstr[30];
564 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
565 sizeof (dstr), 0, 1);
566 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
567 }
568 else
569 {
570 if (flag_pic > 1 && symbolic_operand (x, SImode))
571 {
572 debug_rtx (x);
573 output_operand_lossage ("symbol used as immediate operand");
574 }
575 putc ('$', file);
576 output_addr_const (file, x);
577 }
578}
9c21a7e7 579\f
f90b7a5a
PB
580const char *
581cond_name (rtx op)
582{
583 switch (GET_CODE (op))
584 {
585 case NE:
586 return "neq";
587 case EQ:
588 return "eql";
589 case GE:
590 return "geq";
591 case GT:
592 return "gtr";
593 case LE:
594 return "leq";
595 case LT:
596 return "lss";
597 case GEU:
598 return "gequ";
599 case GTU:
600 return "gtru";
601 case LEU:
602 return "lequ";
603 case LTU:
604 return "lssu";
605
606 default:
607 gcc_unreachable ();
608 }
609}
610
2fd58acb 611const char *
0d92b0e4 612rev_cond_name (rtx op)
9c21a7e7
RS
613{
614 switch (GET_CODE (op))
615 {
616 case EQ:
617 return "neq";
618 case NE:
619 return "eql";
620 case LT:
621 return "geq";
622 case LE:
623 return "gtr";
624 case GT:
625 return "leq";
626 case GE:
627 return "lss";
628 case LTU:
629 return "gequ";
630 case LEU:
631 return "gtru";
632 case GTU:
633 return "lequ";
634 case GEU:
635 return "lssu";
636
637 default:
90285d8d 638 gcc_unreachable ();
9c21a7e7
RS
639 }
640}
d3797078 641
c4e75102
MT
642static bool
643vax_float_literal (rtx c)
d3797078 644{
ef4bddc2 645 machine_mode mode;
b216cd4a 646 REAL_VALUE_TYPE r, s;
d3797078 647 int i;
d3797078
RS
648
649 if (GET_CODE (c) != CONST_DOUBLE)
c4e75102 650 return false;
d3797078
RS
651
652 mode = GET_MODE (c);
653
654 if (c == const_tiny_rtx[(int) mode][0]
655 || c == const_tiny_rtx[(int) mode][1]
656 || c == const_tiny_rtx[(int) mode][2])
c4e75102 657 return true;
d3797078 658
b216cd4a 659 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
d3797078 660
b216cd4a
ZW
661 for (i = 0; i < 7; i++)
662 {
663 int x = 1 << i;
90285d8d 664 bool ok;
807e902e 665 real_from_integer (&s, mode, x, SIGNED);
d3797078 666
b216cd4a 667 if (REAL_VALUES_EQUAL (r, s))
c4e75102 668 return true;
90285d8d
NS
669 ok = exact_real_inverse (mode, &s);
670 gcc_assert (ok);
b216cd4a 671 if (REAL_VALUES_EQUAL (r, s))
c4e75102 672 return true;
b216cd4a 673 }
c4e75102 674 return false;
d3797078
RS
675}
676
677
678/* Return the cost in cycles of a memory address, relative to register
679 indirect.
680
681 Each of the following adds the indicated number of cycles:
682
683 1 - symbolic address
684 1 - pre-decrement
685 1 - indexing and/or offset(register)
686 2 - indirect */
687
688
dcefdf67 689static int
d001241c 690vax_address_cost_1 (rtx addr)
d3797078
RS
691{
692 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
693 rtx plus_op0 = 0, plus_op1 = 0;
694 restart:
695 switch (GET_CODE (addr))
696 {
697 case PRE_DEC:
698 predec = 1;
699 case REG:
700 case SUBREG:
701 case POST_INC:
702 reg = 1;
703 break;
704 case MULT:
705 indexed = 1; /* 2 on VAX 2 */
706 break;
707 case CONST_INT:
708 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
709 if (offset == 0)
76335fef 710 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
d3797078
RS
711 break;
712 case CONST:
713 case SYMBOL_REF:
714 offset = 1; /* 2 on VAX 2 */
715 break;
716 case LABEL_REF: /* this is probably a byte offset from the pc */
717 if (offset == 0)
718 offset = 1;
719 break;
720 case PLUS:
721 if (plus_op0)
722 plus_op1 = XEXP (addr, 0);
723 else
724 plus_op0 = XEXP (addr, 0);
725 addr = XEXP (addr, 1);
726 goto restart;
727 case MEM:
728 indir = 2; /* 3 on VAX 2 */
729 addr = XEXP (addr, 0);
730 goto restart;
2fd58acb
KG
731 default:
732 break;
d3797078
RS
733 }
734
735 /* Up to 3 things can be added in an address. They are stored in
736 plus_op0, plus_op1, and addr. */
737
738 if (plus_op0)
739 {
740 addr = plus_op0;
741 plus_op0 = 0;
742 goto restart;
743 }
744 if (plus_op1)
745 {
746 addr = plus_op1;
747 plus_op1 = 0;
748 goto restart;
749 }
750 /* Indexing and register+offset can both be used (except on a VAX 2)
6b857ce3 751 without increasing execution time over either one alone. */
d3797078
RS
752 if (reg && indexed && offset)
753 return reg + indir + offset + predec;
754 return reg + indexed + indir + offset + predec;
755}
756
dcefdf67 757static int
ef4bddc2 758vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
b413068c
OE
759 addr_space_t as ATTRIBUTE_UNUSED,
760 bool speed ATTRIBUTE_UNUSED)
dcefdf67 761{
ff9d4590 762 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
dcefdf67
RH
763}
764
d3797078
RS
765/* Cost of an expression on a VAX. This version has costs tuned for the
766 CVAX chip (found in the VAX 3 series) with comments for variations on
ccb527e4 767 other models.
d3797078 768
ccb527e4
JDA
769 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
770 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
771 costs on a per cpu basis. */
772
773static bool
68f932c4
RS
774vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
775 int *total, bool speed ATTRIBUTE_UNUSED)
d3797078 776{
ef4bddc2 777 machine_mode mode = GET_MODE (x);
ccb527e4 778 int i = 0; /* may be modified in switch */
6f7d635c 779 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
d3797078
RS
780
781 switch (code)
782 {
3c50106f 783 /* On a VAX, constants from 0..63 are cheap because they can use the
ccb527e4
JDA
784 1 byte literal constant format. Compare to -1 should be made cheap
785 so that decrement-and-branch insns can be formed more easily (if
786 the value -1 is copied to a register some decrement-and-branch
3c50106f
RH
787 patterns will not match). */
788 case CONST_INT:
789 if (INTVAL (x) == 0)
c4e75102
MT
790 {
791 *total = 0;
792 return true;
793 }
3c50106f 794 if (outer_code == AND)
ccb527e4 795 {
c4e75102 796 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
ccb527e4
JDA
797 return true;
798 }
799 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
800 || (outer_code == COMPARE
801 && INTVAL (x) == -1)
802 || ((outer_code == PLUS || outer_code == MINUS)
803 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
804 {
805 *total = 1;
806 return true;
807 }
5efb1046 808 /* FALLTHRU */
3c50106f
RH
809
810 case CONST:
811 case LABEL_REF:
812 case SYMBOL_REF:
ccb527e4
JDA
813 *total = 3;
814 return true;
3c50106f
RH
815
816 case CONST_DOUBLE:
817 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
ccb527e4 818 *total = vax_float_literal (x) ? 5 : 8;
3c50106f 819 else
c4e75102 820 *total = ((CONST_DOUBLE_HIGH (x) == 0
ccb527e4
JDA
821 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
822 || (outer_code == PLUS
823 && CONST_DOUBLE_HIGH (x) == -1
824 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
825 ? 2 : 5;
826 return true;
b20f13e9 827
d3797078 828 case POST_INC:
ccb527e4
JDA
829 *total = 2;
830 return true; /* Implies register operand. */
831
d3797078 832 case PRE_DEC:
ccb527e4
JDA
833 *total = 3;
834 return true; /* Implies register operand. */
835
d3797078
RS
836 case MULT:
837 switch (mode)
838 {
839 case DFmode:
ccb527e4 840 *total = 16; /* 4 on VAX 9000 */
d3797078
RS
841 break;
842 case SFmode:
ccb527e4 843 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
d3797078
RS
844 break;
845 case DImode:
ccb527e4 846 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
d3797078
RS
847 break;
848 case SImode:
849 case HImode:
850 case QImode:
ccb527e4 851 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
d3797078 852 break;
2fd58acb 853 default:
ccb527e4
JDA
854 *total = MAX_COST; /* Mode is not supported. */
855 return true;
d3797078
RS
856 }
857 break;
ccb527e4 858
d3797078 859 case UDIV:
5c41fdfb 860 if (mode != SImode)
ccb527e4
JDA
861 {
862 *total = MAX_COST; /* Mode is not supported. */
863 return true;
864 }
865 *total = 17;
d3797078 866 break;
ccb527e4 867
d3797078
RS
868 case DIV:
869 if (mode == DImode)
ccb527e4 870 *total = 30; /* Highly variable. */
d3797078
RS
871 else if (mode == DFmode)
872 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
ccb527e4 873 *total = 24;
d3797078 874 else
ccb527e4 875 *total = 11; /* 25 on VAX 2 */
d3797078 876 break;
ccb527e4 877
d3797078 878 case MOD:
ccb527e4 879 *total = 23;
d3797078 880 break;
ccb527e4 881
d3797078 882 case UMOD:
5c41fdfb 883 if (mode != SImode)
ccb527e4
JDA
884 {
885 *total = MAX_COST; /* Mode is not supported. */
886 return true;
887 }
888 *total = 29;
d3797078 889 break;
ccb527e4 890
d3797078 891 case FLOAT:
ccb527e4
JDA
892 *total = (6 /* 4 on VAX 9000 */
893 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
d3797078 894 break;
ccb527e4 895
d3797078 896 case FIX:
ccb527e4 897 *total = 7; /* 17 on VAX 2 */
d3797078 898 break;
ccb527e4 899
d3797078
RS
900 case ASHIFT:
901 case LSHIFTRT:
902 case ASHIFTRT:
903 if (mode == DImode)
ccb527e4 904 *total = 12;
d3797078 905 else
ccb527e4 906 *total = 10; /* 6 on VAX 9000 */
d3797078 907 break;
ccb527e4 908
d3797078
RS
909 case ROTATE:
910 case ROTATERT:
ccb527e4 911 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
d97c1295 912 if (CONST_INT_P (XEXP (x, 1)))
ccb527e4 913 fmt = "e"; /* all constant rotate counts are short */
d3797078 914 break;
ccb527e4 915
d3797078 916 case PLUS:
d3797078 917 case MINUS:
ccb527e4 918 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
76335fef 919 /* Small integer operands can use subl2 and addl2. */
d97c1295 920 if ((CONST_INT_P (XEXP (x, 1)))
76335fef
JDA
921 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
922 fmt = "e";
923 break;
ccb527e4 924
d3797078
RS
925 case IOR:
926 case XOR:
ccb527e4 927 *total = 3;
d3797078 928 break;
ccb527e4 929
d3797078 930 case AND:
6b857ce3 931 /* AND is special because the first operand is complemented. */
ccb527e4 932 *total = 3;
d97c1295 933 if (CONST_INT_P (XEXP (x, 0)))
d3797078 934 {
76335fef 935 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
ccb527e4 936 *total = 4;
d3797078
RS
937 fmt = "e";
938 i = 1;
939 }
940 break;
ccb527e4 941
d3797078
RS
942 case NEG:
943 if (mode == DFmode)
ccb527e4 944 *total = 9;
d3797078 945 else if (mode == SFmode)
ccb527e4 946 *total = 6;
d3797078 947 else if (mode == DImode)
ccb527e4
JDA
948 *total = 4;
949 else
950 *total = 2;
951 break;
952
d3797078 953 case NOT:
ccb527e4
JDA
954 *total = 2;
955 break;
956
d3797078
RS
957 case ZERO_EXTRACT:
958 case SIGN_EXTRACT:
ccb527e4 959 *total = 15;
d3797078 960 break;
ccb527e4 961
d3797078
RS
962 case MEM:
963 if (mode == DImode || mode == DFmode)
ccb527e4 964 *total = 5; /* 7 on VAX 2 */
d3797078 965 else
ccb527e4 966 *total = 3; /* 4 on VAX 2 */
d3797078 967 x = XEXP (x, 0);
ff9d4590 968 if (!REG_P (x) && GET_CODE (x) != POST_INC)
ccb527e4
JDA
969 *total += vax_address_cost_1 (x);
970 return true;
971
972 case FLOAT_EXTEND:
973 case FLOAT_TRUNCATE:
974 case TRUNCATE:
975 *total = 3; /* FIXME: Costs need to be checked */
d3797078 976 break;
ccb527e4
JDA
977
978 default:
979 return false;
d3797078
RS
980 }
981
d3797078
RS
982 /* Now look inside the expression. Operands which are not registers or
983 short constants add to the cost.
984
985 FMT and I may have been adjusted in the switch above for instructions
ccb527e4 986 which require special handling. */
d3797078
RS
987
988 while (*fmt++ == 'e')
989 {
ccb527e4
JDA
990 rtx op = XEXP (x, i);
991
992 i += 1;
d3797078
RS
993 code = GET_CODE (op);
994
995 /* A NOT is likely to be found as the first operand of an AND
996 (in which case the relevant cost is of the operand inside
997 the not) and not likely to be found anywhere else. */
998 if (code == NOT)
999 op = XEXP (op, 0), code = GET_CODE (op);
1000
1001 switch (code)
1002 {
1003 case CONST_INT:
76335fef
JDA
1004 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1005 && GET_MODE (x) != QImode)
ccb527e4 1006 *total += 1; /* 2 on VAX 2 */
d3797078
RS
1007 break;
1008 case CONST:
1009 case LABEL_REF:
1010 case SYMBOL_REF:
ccb527e4 1011 *total += 1; /* 2 on VAX 2 */
d3797078
RS
1012 break;
1013 case CONST_DOUBLE:
1014 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1015 {
1016 /* Registers are faster than floating point constants -- even
1017 those constants which can be encoded in a single byte. */
1018 if (vax_float_literal (op))
ccb527e4 1019 *total += 1;
d3797078 1020 else
ccb527e4 1021 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
d3797078
RS
1022 }
1023 else
1024 {
1025 if (CONST_DOUBLE_HIGH (op) != 0
c4e75102 1026 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
ccb527e4 1027 *total += 2;
d3797078
RS
1028 }
1029 break;
1030 case MEM:
ccb527e4 1031 *total += 1; /* 2 on VAX 2 */
ff9d4590 1032 if (!REG_P (XEXP (op, 0)))
ccb527e4 1033 *total += vax_address_cost_1 (XEXP (op, 0));
d3797078
RS
1034 break;
1035 case REG:
1036 case SUBREG:
1037 break;
1038 default:
ccb527e4 1039 *total += 1;
d3797078
RS
1040 break;
1041 }
1042 }
3c50106f
RH
1043 return true;
1044}
ebea352b 1045\f
b9962e0a
RH
1046/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1047 Used for C++ multiple inheritance.
1048 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1049 addl2 $DELTA, 4(ap) #adjust first argument
1050 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1051*/
1052
3961e8fe 1053static void
0d92b0e4 1054vax_output_mi_thunk (FILE * file,
c4e75102
MT
1055 tree thunk ATTRIBUTE_UNUSED,
1056 HOST_WIDE_INT delta,
1057 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1058 tree function)
483ab821 1059{
4a0a75dd 1060 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
eb0424da 1061 asm_fprintf (file, ",4(%Rap)\n");
b20f13e9
MT
1062 fprintf (file, "\tjmp ");
1063 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1064 fprintf (file, "+2\n");
483ab821 1065}
f289e226
KH
1066\f
1067static rtx
1068vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1069 int incoming ATTRIBUTE_UNUSED)
1070{
1071 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1072}
af0ec113 1073
c4e75102
MT
1074static rtx
1075vax_builtin_setjmp_frame_value (void)
1076{
1077 return hard_frame_pointer_rtx;
1078}
1079
af0ec113
KH
1080/* Worker function for NOTICE_UPDATE_CC. */
1081
1082void
1083vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1084{
1085 if (GET_CODE (exp) == SET)
1086 {
1087 if (GET_CODE (SET_SRC (exp)) == CALL)
1088 CC_STATUS_INIT;
1089 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1090 && GET_CODE (SET_DEST (exp)) != PC)
1091 {
1092 cc_status.flags = 0;
1093 /* The integer operations below don't set carry or
1094 set it in an incompatible way. That's ok though
1095 as the Z bit is all we need when doing unsigned
1096 comparisons on the result of these insns (since
1097 they're always with 0). Set CC_NO_OVERFLOW to
1098 generate the correct unsigned branches. */
1099 switch (GET_CODE (SET_SRC (exp)))
1100 {
1101 case NEG:
1102 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
b20f13e9 1103 break;
af0ec113
KH
1104 case AND:
1105 case IOR:
1106 case XOR:
1107 case NOT:
1108 case MEM:
1109 case REG:
1110 cc_status.flags = CC_NO_OVERFLOW;
1111 break;
1112 default:
1113 break;
1114 }
1115 cc_status.value1 = SET_DEST (exp);
1116 cc_status.value2 = SET_SRC (exp);
1117 }
1118 }
1119 else if (GET_CODE (exp) == PARALLEL
1120 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1121 {
1122 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1123 CC_STATUS_INIT;
1124 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1125 {
1126 cc_status.flags = 0;
1127 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1128 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1129 }
1130 else
1131 /* PARALLELs whose first element sets the PC are aob,
1132 sob insns. They do change the cc's. */
1133 CC_STATUS_INIT;
1134 }
1135 else
1136 CC_STATUS_INIT;
ff9d4590 1137 if (cc_status.value1 && REG_P (cc_status.value1)
af0ec113
KH
1138 && cc_status.value2
1139 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1140 cc_status.value2 = 0;
ff9d4590 1141 if (cc_status.value1 && MEM_P (cc_status.value1)
af0ec113 1142 && cc_status.value2
ff9d4590 1143 && MEM_P (cc_status.value2))
af0ec113
KH
1144 cc_status.value2 = 0;
1145 /* Actual condition, one line up, should be that value2's address
1146 depends on value1, but that is too much of a pain. */
1147}
20acd226
SB
1148
1149/* Output integer move instructions. */
1150
1151const char *
1152vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
ef4bddc2 1153 machine_mode mode)
20acd226 1154{
c4e75102
MT
1155 rtx hi[3], lo[3];
1156 const char *pattern_hi, *pattern_lo;
1157
20acd226
SB
1158 switch (mode)
1159 {
c4e75102
MT
1160 case DImode:
1161 if (operands[1] == const0_rtx)
1162 return "clrq %0";
1163 if (TARGET_QMATH && optimize_size
1164 && (CONST_INT_P (operands[1])
1165 || GET_CODE (operands[1]) == CONST_DOUBLE))
1166 {
1167 unsigned HOST_WIDE_INT hval, lval;
1168 int n;
1169
1170 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1171 {
1172 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1173
1174 /* Make sure only the low 32 bits are valid. */
1175 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1176 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1177 }
1178 else
1179 {
1180 lval = INTVAL (operands[1]);
1181 hval = 0;
1182 }
1183
1184 /* Here we see if we are trying to see if the 64bit value is really
1185 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1186 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1187 8 bytes - 1 shift byte - 1 short literal byte. */
1188 if (lval != 0
1189 && (n = exact_log2 (lval & (- lval))) != -1
1190 && (lval >> n) < 64)
1191 {
1192 lval >>= n;
1193
c4e75102
MT
1194 /* On 32bit platforms, if the 6bits didn't overflow into the
1195 upper 32bit value that value better be 0. If we have
1196 overflowed, make sure it wasn't too much. */
ce7190e5 1197 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
c4e75102
MT
1198 {
1199 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1200 n = 0; /* failure */
1201 else
1202 lval |= hval << (32 - n);
1203 }
c4e75102
MT
1204 /* If n is 0, then ashq is not the best way to emit this. */
1205 if (n > 0)
1206 {
1207 operands[1] = GEN_INT (lval);
1208 operands[2] = GEN_INT (n);
7691132c 1209 return "ashq %2,%D1,%0";
c4e75102
MT
1210 }
1211#if HOST_BITS_PER_WIDE_INT == 32
1212 }
1213 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1214 upper 32bit value. */
1215 else if (hval != 0
1216 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1217 && (hval >> n) < 64)
1218 {
1219 operands[1] = GEN_INT (hval >> n);
1220 operands[2] = GEN_INT (n + 32);
7691132c 1221 return "ashq %2,%D1,%0";
c4e75102
MT
1222#endif
1223 }
1224 }
1225
1226 if (TARGET_QMATH
1227 && (!MEM_P (operands[0])
1228 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1229 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1230 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1231 && ((CONST_INT_P (operands[1])
1232 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1233 || GET_CODE (operands[1]) == CONST_DOUBLE))
1234 {
1235 hi[0] = operands[0];
1236 hi[1] = operands[1];
1237
1238 split_quadword_operands (insn, SET, hi, lo, 2);
1239
1240 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1241 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1242
1243 /* The patterns are just movl/movl or pushl/pushl then a movq will
1244 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1245 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1246 value bytes. */
1247 if ((!strncmp (pattern_lo, "movl", 4)
1248 && !strncmp (pattern_hi, "movl", 4))
1249 || (!strncmp (pattern_lo, "pushl", 5)
1250 && !strncmp (pattern_hi, "pushl", 5)))
1251 return "movq %1,%0";
1252
1253 if (MEM_P (operands[0])
1254 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1255 {
1256 output_asm_insn (pattern_hi, hi);
1257 operands[0] = lo[0];
1258 operands[1] = lo[1];
1259 operands[2] = lo[2];
1260 return pattern_lo;
1261 }
1262 else
1263 {
1264 output_asm_insn (pattern_lo, lo);
1265 operands[0] = hi[0];
1266 operands[1] = hi[1];
1267 operands[2] = hi[2];
1268 return pattern_hi;
1269 }
1270 }
1271 return "movq %1,%0";
1272
20acd226 1273 case SImode:
c4e75102 1274 if (symbolic_operand (operands[1], SImode))
20acd226
SB
1275 {
1276 if (push_operand (operands[0], SImode))
1277 return "pushab %a1";
1278 return "movab %a1,%0";
1279 }
c4e75102 1280
20acd226 1281 if (operands[1] == const0_rtx)
c4e75102
MT
1282 {
1283 if (push_operand (operands[1], SImode))
1284 return "pushl %1";
1285 return "clrl %0";
1286 }
1287
d97c1295 1288 if (CONST_INT_P (operands[1])
c4e75102 1289 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
20acd226 1290 {
c4e75102
MT
1291 HOST_WIDE_INT i = INTVAL (operands[1]);
1292 int n;
1293 if ((unsigned HOST_WIDE_INT)(~i) < 64)
20acd226 1294 return "mcoml %N1,%0";
c4e75102 1295 if ((unsigned HOST_WIDE_INT)i < 0x100)
20acd226
SB
1296 return "movzbl %1,%0";
1297 if (i >= -0x80 && i < 0)
1298 return "cvtbl %1,%0";
c4e75102
MT
1299 if (optimize_size
1300 && (n = exact_log2 (i & (-i))) != -1
1301 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1302 {
1303 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1304 operands[2] = GEN_INT (n);
1305 return "ashl %2,%1,%0";
1306 }
1307 if ((unsigned HOST_WIDE_INT)i < 0x10000)
20acd226
SB
1308 return "movzwl %1,%0";
1309 if (i >= -0x8000 && i < 0)
1310 return "cvtwl %1,%0";
1311 }
1312 if (push_operand (operands[0], SImode))
1313 return "pushl %1";
1314 return "movl %1,%0";
1315
1316 case HImode:
d97c1295 1317 if (CONST_INT_P (operands[1]))
20acd226 1318 {
c4e75102 1319 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1320 if (i == 0)
1321 return "clrw %0";
c4e75102 1322 else if ((unsigned HOST_WIDE_INT)i < 64)
20acd226 1323 return "movw %1,%0";
c4e75102 1324 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226 1325 return "mcomw %H1,%0";
c4e75102 1326 else if ((unsigned HOST_WIDE_INT)i < 256)
20acd226 1327 return "movzbw %1,%0";
c4e75102
MT
1328 else if (i >= -0x80 && i < 0)
1329 return "cvtbw %1,%0";
20acd226
SB
1330 }
1331 return "movw %1,%0";
1332
1333 case QImode:
d97c1295 1334 if (CONST_INT_P (operands[1]))
20acd226 1335 {
c4e75102 1336 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1337 if (i == 0)
1338 return "clrb %0";
c4e75102 1339 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226
SB
1340 return "mcomb %B1,%0";
1341 }
1342 return "movb %1,%0";
1343
1344 default:
1345 gcc_unreachable ();
1346 }
1347}
1348
1349/* Output integer add instructions.
1350
1351 The space-time-opcode tradeoffs for addition vary by model of VAX.
1352
1353 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1354 but it not faster on other models.
1355
1356 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1357 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1358 a register is used in an address too soon after it is set.
1359 Compromise by using movab only when it is shorter than the add
1360 or the base register in the address is one of sp, ap, and fp,
1361 which are not modified very often. */
1362
1363const char *
ef4bddc2 1364vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
20acd226
SB
1365{
1366 switch (mode)
1367 {
c4e75102
MT
1368 case DImode:
1369 {
1370 rtx low[3];
1371 const char *pattern;
1372 int carry = 1;
1373 bool sub;
1374
1375 if (TARGET_QMATH && 0)
1376 debug_rtx (insn);
1377
1378 split_quadword_operands (insn, PLUS, operands, low, 3);
1379
1380 if (TARGET_QMATH)
1381 {
1382 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1383#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1384 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1385 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1386#endif
1387
1388 /* No reason to add a 0 to the low part and thus no carry, so just
1389 emit the appropriate add/sub instruction. */
1390 if (low[2] == const0_rtx)
1391 return vax_output_int_add (NULL, operands, SImode);
1392
1393 /* Are we doing addition or subtraction? */
1394 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1395
1396 /* We can't use vax_output_int_add since some the patterns don't
1397 modify the carry bit. */
1398 if (sub)
1399 {
1400 if (low[2] == constm1_rtx)
1401 pattern = "decl %0";
1402 else
1403 pattern = "subl2 $%n2,%0";
1404 }
1405 else
1406 {
1407 if (low[2] == const1_rtx)
1408 pattern = "incl %0";
1409 else
1410 pattern = "addl2 %2,%0";
1411 }
1412 output_asm_insn (pattern, low);
1413
1414 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1415 two 32bit parts, we complement each and then add one to
1416 low part. We know that the low part can't overflow since
1417 it's value can never be 0. */
1418 if (sub)
1419 return "sbwc %N2,%0";
1420 return "adwc %2,%0";
1421 }
1422
1423 /* Add low parts. */
1424 if (rtx_equal_p (operands[0], operands[1]))
1425 {
1426 if (low[2] == const0_rtx)
1427 /* Should examine operand, punt if not POST_INC. */
1428 pattern = "tstl %0", carry = 0;
1429 else if (low[2] == const1_rtx)
1430 pattern = "incl %0";
1431 else
1432 pattern = "addl2 %2,%0";
1433 }
1434 else
1435 {
1436 if (low[2] == const0_rtx)
1437 pattern = "movl %1,%0", carry = 0;
1438 else
1439 pattern = "addl3 %2,%1,%0";
1440 }
1441 if (pattern)
1442 output_asm_insn (pattern, low);
1443 if (!carry)
1444 /* If CARRY is 0, we don't have any carry value to worry about. */
1445 return get_insn_template (CODE_FOR_addsi3, insn);
1446 /* %0 = C + %1 + %2 */
1447 if (!rtx_equal_p (operands[0], operands[1]))
1448 output_asm_insn ((operands[1] == const0_rtx
1449 ? "clrl %0"
1450 : "movl %1,%0"), operands);
1451 return "adwc %2,%0";
1452 }
1453
20acd226
SB
1454 case SImode:
1455 if (rtx_equal_p (operands[0], operands[1]))
1456 {
1457 if (operands[2] == const1_rtx)
1458 return "incl %0";
1459 if (operands[2] == constm1_rtx)
1460 return "decl %0";
d97c1295 1461 if (CONST_INT_P (operands[2])
c4e75102 1462 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226 1463 return "subl2 $%n2,%0";
d97c1295 1464 if (CONST_INT_P (operands[2])
c4e75102 1465 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1466 && REG_P (operands[1])
20acd226
SB
1467 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1468 || REGNO (operands[1]) > 11))
1469 return "movab %c2(%1),%0";
c4e75102
MT
1470 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1471 return "movab %a2[%0],%0";
20acd226
SB
1472 return "addl2 %2,%0";
1473 }
1474
1475 if (rtx_equal_p (operands[0], operands[2]))
c4e75102
MT
1476 {
1477 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1478 return "movab %a1[%0],%0";
1479 return "addl2 %1,%0";
1480 }
20acd226 1481
d97c1295 1482 if (CONST_INT_P (operands[2])
20acd226
SB
1483 && INTVAL (operands[2]) < 32767
1484 && INTVAL (operands[2]) > -32768
ff9d4590 1485 && REG_P (operands[1])
20acd226
SB
1486 && push_operand (operands[0], SImode))
1487 return "pushab %c2(%1)";
1488
d97c1295 1489 if (CONST_INT_P (operands[2])
c4e75102 1490 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1491 return "subl3 $%n2,%1,%0";
1492
d97c1295 1493 if (CONST_INT_P (operands[2])
c4e75102 1494 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1495 && REG_P (operands[1])
20acd226
SB
1496 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1497 || REGNO (operands[1]) > 11))
1498 return "movab %c2(%1),%0";
1499
1500 /* Add this if using gcc on a VAX 3xxx:
1501 if (REG_P (operands[1]) && REG_P (operands[2]))
1502 return "movab (%1)[%2],%0";
1503 */
c4e75102
MT
1504
1505 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1506 {
1507 if (push_operand (operands[0], SImode))
1508 return "pushab %a2[%1]";
1509 return "movab %a2[%1],%0";
1510 }
1511
1512 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1513 {
1514 if (push_operand (operands[0], SImode))
1515 return "pushab %a1[%2]";
1516 return "movab %a1[%2],%0";
1517 }
1518
1519 if (flag_pic && REG_P (operands[0])
1520 && symbolic_operand (operands[2], SImode))
1521 return "movab %a2,%0;addl2 %1,%0";
1522
1523 if (flag_pic
1524 && (symbolic_operand (operands[1], SImode)
1525 || symbolic_operand (operands[1], SImode)))
1526 debug_rtx (insn);
1527
20acd226
SB
1528 return "addl3 %1,%2,%0";
1529
1530 case HImode:
1531 if (rtx_equal_p (operands[0], operands[1]))
1532 {
1533 if (operands[2] == const1_rtx)
1534 return "incw %0";
1535 if (operands[2] == constm1_rtx)
1536 return "decw %0";
d97c1295 1537 if (CONST_INT_P (operands[2])
c4e75102 1538 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1539 return "subw2 $%n2,%0";
1540 return "addw2 %2,%0";
1541 }
1542 if (rtx_equal_p (operands[0], operands[2]))
1543 return "addw2 %1,%0";
d97c1295 1544 if (CONST_INT_P (operands[2])
c4e75102 1545 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1546 return "subw3 $%n2,%1,%0";
1547 return "addw3 %1,%2,%0";
1548
1549 case QImode:
1550 if (rtx_equal_p (operands[0], operands[1]))
1551 {
1552 if (operands[2] == const1_rtx)
1553 return "incb %0";
1554 if (operands[2] == constm1_rtx)
1555 return "decb %0";
d97c1295 1556 if (CONST_INT_P (operands[2])
c4e75102 1557 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1558 return "subb2 $%n2,%0";
1559 return "addb2 %2,%0";
1560 }
1561 if (rtx_equal_p (operands[0], operands[2]))
1562 return "addb2 %1,%0";
d97c1295 1563 if (CONST_INT_P (operands[2])
c4e75102 1564 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1565 return "subb3 $%n2,%1,%0";
1566 return "addb3 %1,%2,%0";
1567
1568 default:
1569 gcc_unreachable ();
1570 }
1571}
1572
c4e75102 1573const char *
ef4bddc2 1574vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
c4e75102
MT
1575{
1576 switch (mode)
1577 {
1578 case DImode:
1579 {
1580 rtx low[3];
1581 const char *pattern;
1582 int carry = 1;
1583
1584 if (TARGET_QMATH && 0)
1585 debug_rtx (insn);
1586
1587 split_quadword_operands (insn, MINUS, operands, low, 3);
1588
1589 if (TARGET_QMATH)
1590 {
1591 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1592 {
1593 /* Negation is tricky. It's basically complement and increment.
1594 Negate hi, then lo, and subtract the carry back. */
1595 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1596 || (MEM_P (operands[0])
1597 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1598 fatal_insn ("illegal operand detected", insn);
1599 output_asm_insn ("mnegl %2,%0", operands);
1600 output_asm_insn ("mnegl %2,%0", low);
1601 return "sbwc $0,%0";
1602 }
1603 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1604 gcc_assert (rtx_equal_p (low[0], low[1]));
1605 if (low[2] == const1_rtx)
1606 output_asm_insn ("decl %0", low);
1607 else
1608 output_asm_insn ("subl2 %2,%0", low);
1609 return "sbwc %2,%0";
1610 }
1611
1612 /* Subtract low parts. */
1613 if (rtx_equal_p (operands[0], operands[1]))
1614 {
1615 if (low[2] == const0_rtx)
1616 pattern = 0, carry = 0;
1617 else if (low[2] == constm1_rtx)
1618 pattern = "decl %0";
1619 else
1620 pattern = "subl2 %2,%0";
1621 }
1622 else
1623 {
1624 if (low[2] == constm1_rtx)
1625 pattern = "decl %0";
1626 else if (low[2] == const0_rtx)
1627 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1628 else
1629 pattern = "subl3 %2,%1,%0";
1630 }
1631 if (pattern)
1632 output_asm_insn (pattern, low);
1633 if (carry)
1634 {
1635 if (!rtx_equal_p (operands[0], operands[1]))
1636 return "movl %1,%0;sbwc %2,%0";
1637 return "sbwc %2,%0";
1638 /* %0 = %2 - %1 - C */
1639 }
1640 return get_insn_template (CODE_FOR_subsi3, insn);
1641 }
1642
1643 default:
1644 gcc_unreachable ();
1645 }
1646}
1647
c4e75102 1648/* True if X is an rtx for a constant that is a valid address. */
fbf55580 1649
c4e75102 1650bool
fbf55580
MT
1651legitimate_constant_address_p (rtx x)
1652{
c4e75102
MT
1653 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1654 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1655 return true;
1656 if (GET_CODE (x) != CONST)
1657 return false;
1658#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1659 if (flag_pic
1660 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1661 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1662 return false;
1663#endif
1664 return true;
fbf55580
MT
1665}
1666
fbf55580
MT
1667/* The other macros defined here are used only in legitimate_address_p (). */
1668
1669/* Nonzero if X is a hard reg that can be used as an index
1670 or, if not strict, if it is a pseudo reg. */
b20f13e9 1671#define INDEX_REGISTER_P(X, STRICT) \
ff9d4590 1672(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
fbf55580
MT
1673
1674/* Nonzero if X is a hard reg that can be used as a base reg
1675 or, if not strict, if it is a pseudo reg. */
b20f13e9 1676#define BASE_REGISTER_P(X, STRICT) \
ff9d4590 1677(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
fbf55580
MT
1678
1679#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1680
1681/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1682 are no SYMBOL_REFs for external symbols present. */
1683
c4e75102
MT
1684static bool
1685indirectable_constant_address_p (rtx x, bool indirect)
fbf55580 1686{
c4e75102
MT
1687 if (GET_CODE (x) == SYMBOL_REF)
1688 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1689
1690 if (GET_CODE (x) == CONST)
1691 return !flag_pic
1692 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1693 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1694
1695 return CONSTANT_ADDRESS_P (x);
fbf55580
MT
1696}
1697
1698#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1699
c4e75102
MT
1700static bool
1701indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
fbf55580
MT
1702{
1703 return CONSTANT_ADDRESS_P (x);
1704}
1705
1706#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1707
c4e75102 1708/* True if X is an address which can be indirected. External symbols
fbf55580
MT
1709 could be in a sharable image library, so we disallow those. */
1710
c4e75102
MT
1711static bool
1712indirectable_address_p (rtx x, bool strict, bool indirect)
fbf55580 1713{
c4e75102
MT
1714 if (indirectable_constant_address_p (x, indirect)
1715 || BASE_REGISTER_P (x, strict))
1716 return true;
1717 if (GET_CODE (x) != PLUS
1718 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1719 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1720 return false;
1721 return indirectable_constant_address_p (XEXP (x, 1), indirect);
fbf55580
MT
1722}
1723
c4e75102 1724/* Return true if x is a valid address not using indexing.
fbf55580 1725 (This much is the easy part.) */
c4e75102
MT
1726static bool
1727nonindexed_address_p (rtx x, bool strict)
fbf55580
MT
1728{
1729 rtx xfoo0;
ff9d4590 1730 if (REG_P (x))
fbf55580 1731 {
c4e75102 1732 if (! reload_in_progress
f2034d06
JL
1733 || reg_equiv_mem (REGNO (x)) == 0
1734 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
c4e75102 1735 return true;
fbf55580 1736 }
c4e75102
MT
1737 if (indirectable_constant_address_p (x, false))
1738 return true;
1739 if (indirectable_address_p (x, strict, false))
1740 return true;
fbf55580 1741 xfoo0 = XEXP (x, 0);
c4e75102
MT
1742 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1743 return true;
fbf55580
MT
1744 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1745 && BASE_REGISTER_P (xfoo0, strict))
c4e75102
MT
1746 return true;
1747 return false;
fbf55580
MT
1748}
1749
c4e75102 1750/* True if PROD is either a reg times size of mode MODE and MODE is less
fbf55580
MT
1751 than or equal 8 bytes, or just a reg if MODE is one byte. */
1752
c4e75102 1753static bool
ef4bddc2 1754index_term_p (rtx prod, machine_mode mode, bool strict)
fbf55580
MT
1755{
1756 rtx xfoo0, xfoo1;
1757
1758 if (GET_MODE_SIZE (mode) == 1)
1759 return BASE_REGISTER_P (prod, strict);
1760
1761 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
c4e75102 1762 return false;
fbf55580
MT
1763
1764 xfoo0 = XEXP (prod, 0);
1765 xfoo1 = XEXP (prod, 1);
1766
d97c1295 1767 if (CONST_INT_P (xfoo0)
fbf55580
MT
1768 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1769 && INDEX_REGISTER_P (xfoo1, strict))
c4e75102 1770 return true;
fbf55580 1771
d97c1295 1772 if (CONST_INT_P (xfoo1)
fbf55580
MT
1773 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1774 && INDEX_REGISTER_P (xfoo0, strict))
c4e75102 1775 return true;
fbf55580 1776
c4e75102 1777 return false;
fbf55580
MT
1778}
1779
c4e75102 1780/* Return true if X is the sum of a register
fbf55580 1781 and a valid index term for mode MODE. */
c4e75102 1782static bool
ef4bddc2 1783reg_plus_index_p (rtx x, machine_mode mode, bool strict)
fbf55580
MT
1784{
1785 rtx xfoo0, xfoo1;
1786
1787 if (GET_CODE (x) != PLUS)
c4e75102 1788 return false;
fbf55580
MT
1789
1790 xfoo0 = XEXP (x, 0);
1791 xfoo1 = XEXP (x, 1);
1792
1793 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
c4e75102 1794 return true;
fbf55580
MT
1795
1796 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
c4e75102 1797 return true;
fbf55580 1798
c4e75102 1799 return false;
fbf55580
MT
1800}
1801
c4e75102
MT
1802/* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1803static bool
ef4bddc2 1804indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
c4e75102
MT
1805{
1806 if (!CONSTANT_ADDRESS_P (xfoo0))
1807 return false;
1808 if (BASE_REGISTER_P (xfoo1, strict))
1809 return !flag_pic || mode == QImode;
1810 if (flag_pic && symbolic_operand (xfoo0, SImode))
1811 return false;
1812 return reg_plus_index_p (xfoo1, mode, strict);
1813}
1814
1815/* legitimate_address_p returns true if it recognizes an RTL expression "x"
fbf55580
MT
1816 that is a valid memory address for an instruction.
1817 The MODE argument is the machine mode for the MEM expression
1818 that wants to use this address. */
c4e75102 1819bool
ef4bddc2 1820vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
fbf55580
MT
1821{
1822 rtx xfoo0, xfoo1;
1823
1824 if (nonindexed_address_p (x, strict))
c4e75102 1825 return true;
fbf55580
MT
1826
1827 if (GET_CODE (x) != PLUS)
c4e75102 1828 return false;
fbf55580
MT
1829
1830 /* Handle <address>[index] represented with index-sum outermost */
1831
1832 xfoo0 = XEXP (x, 0);
1833 xfoo1 = XEXP (x, 1);
1834
1835 if (index_term_p (xfoo0, mode, strict)
1836 && nonindexed_address_p (xfoo1, strict))
c4e75102 1837 return true;
fbf55580
MT
1838
1839 if (index_term_p (xfoo1, mode, strict)
1840 && nonindexed_address_p (xfoo0, strict))
c4e75102 1841 return true;
fbf55580 1842
b20f13e9 1843 /* Handle offset(reg)[index] with offset added outermost */
fbf55580 1844
c4e75102
MT
1845 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1846 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1847 return true;
fbf55580 1848
c4e75102 1849 return false;
b20f13e9 1850}
fbf55580 1851
c4e75102 1852/* Return true if x (a legitimate address expression) has an effect that
fbf55580
MT
1853 depends on the machine mode it is used for. On the VAX, the predecrement
1854 and postincrement address depend thus (the amount of decrement or
1855 increment being the length of the operand) and all indexed address depend
1856 thus (because the index scale factor is the length of the operand). */
1857
b0f6b612 1858static bool
5bfed9a9 1859vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
fbf55580
MT
1860{
1861 rtx xfoo0, xfoo1;
1862
b9a76028 1863 /* Auto-increment cases are now dealt with generically in recog.c. */
fbf55580 1864 if (GET_CODE (x) != PLUS)
c4e75102 1865 return false;
fbf55580
MT
1866
1867 xfoo0 = XEXP (x, 0);
1868 xfoo1 = XEXP (x, 1);
1869
c4e75102
MT
1870 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1871 return false;
1872 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1873 return false;
1874 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1875 return false;
1876 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1877 return false;
1878
1879 return true;
1880}
1881
1882static rtx
ef4bddc2 1883fixup_mathdi_operand (rtx x, machine_mode mode)
c4e75102
MT
1884{
1885 if (illegal_addsub_di_memory_operand (x, mode))
1886 {
1887 rtx addr = XEXP (x, 0);
1888 rtx temp = gen_reg_rtx (Pmode);
1889 rtx offset = 0;
1890#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1891 if (GET_CODE (addr) == CONST && flag_pic)
1892 {
1893 offset = XEXP (XEXP (addr, 0), 1);
1894 addr = XEXP (XEXP (addr, 0), 0);
1895 }
1896#endif
1897 emit_move_insn (temp, addr);
1898 if (offset)
1899 temp = gen_rtx_PLUS (Pmode, temp, offset);
1900 x = gen_rtx_MEM (DImode, temp);
1901 }
1902 return x;
1903}
1904
1905void
1906vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1907{
1908 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1909 rtx temp;
1910
1911 rtx (*gen_old_insn)(rtx, rtx, rtx);
1912 rtx (*gen_si_insn)(rtx, rtx, rtx);
1913 rtx (*gen_insn)(rtx, rtx, rtx);
1914
1915 if (code == PLUS)
1916 {
1917 gen_old_insn = gen_adddi3_old;
1918 gen_si_insn = gen_addsi3;
1919 gen_insn = gen_adcdi3;
1920 }
1921 else if (code == MINUS)
1922 {
1923 gen_old_insn = gen_subdi3_old;
1924 gen_si_insn = gen_subsi3;
1925 gen_insn = gen_sbcdi3;
1926 }
1927 else
1928 gcc_unreachable ();
1929
1930 /* If this is addition (thus operands are commutative) and if there is one
1931 addend that duplicates the desination, we want that addend to be the
1932 first addend. */
1933 if (code == PLUS
1934 && rtx_equal_p (operands[0], operands[2])
1935 && !rtx_equal_p (operands[1], operands[2]))
1936 {
1937 temp = operands[2];
1938 operands[2] = operands[1];
1939 operands[1] = temp;
1940 }
1941
1942 if (!TARGET_QMATH)
1943 {
1944 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1945 }
1946 else if (hi_only)
1947 {
1948 if (!rtx_equal_p (operands[0], operands[1])
1949 && (REG_P (operands[0]) && MEM_P (operands[1])))
1950 {
1951 emit_move_insn (operands[0], operands[1]);
1952 operands[1] = operands[0];
1953 }
1954
1955 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1956 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1957 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1958
1959 if (!rtx_equal_p (operands[0], operands[1]))
1960 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1961 operand_subword (operands[1], 0, 0, DImode));
1962
1963 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1964 operand_subword (operands[1], 1, 0, DImode),
1965 operand_subword (operands[2], 1, 0, DImode)));
1966 }
1967 else
1968 {
1969 /* If are adding the same value together, that's really a multiply by 2,
1970 and that's just a left shift of 1. */
1971 if (rtx_equal_p (operands[1], operands[2]))
1972 {
1973 gcc_assert (code != MINUS);
1974 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1975 return;
1976 }
1977
1978 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1979
1980 /* If an operand is the same as operand[0], use the operand[0] rtx
1981 because fixup will an equivalent rtx but not an equal one. */
1982
1983 if (rtx_equal_p (operands[0], operands[1]))
1984 operands[1] = operands[0];
1985 else
1986 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1987
1988 if (rtx_equal_p (operands[0], operands[2]))
1989 operands[2] = operands[0];
1990 else
1991 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1992
1993 /* If we are subtracting not from ourselves [d = a - b], and because the
1994 carry ops are two operand only, we would need to do a move prior to
1995 the subtract. And if d == b, we would need a temp otherwise
1996 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1997 into d = -b, d += a. Since -b can never overflow, even if b == d,
1998 no temp is needed.
1999
2000 If we are doing addition, since the carry ops are two operand, if
2001 we aren't adding to ourselves, move the first addend to the
2002 destination first. */
2003
2004 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2005 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2006 {
2007 if (code == MINUS && CONSTANT_P (operands[1]))
2008 {
2009 temp = gen_reg_rtx (DImode);
2010 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2011 code = PLUS;
2012 gen_insn = gen_adcdi3;
2013 operands[2] = operands[1];
2014 operands[1] = operands[0];
2015 }
2016 else
2017 emit_move_insn (operands[0], operands[1]);
2018 }
2019
2020 /* Subtracting a constant will have been rewritten to an addition of the
2021 negative of that constant before we get here. */
2022 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2023 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2024 }
2025}
2026
2027bool
ef4bddc2 2028adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
c4e75102
MT
2029{
2030 HOST_WIDE_INT lo_offset;
2031 HOST_WIDE_INT hi_offset;
2032
2033 if (GET_CODE (lo) != GET_CODE (hi))
2034 return false;
2035
2036 if (REG_P (lo))
2037 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2038 if (CONST_INT_P (lo))
2039 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2040 if (CONST_INT_P (lo))
2041 return mode != SImode;
2042
2043 if (!MEM_P (lo))
2044 return false;
2045
2046 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2047 return false;
2048
2049 lo = XEXP (lo, 0);
2050 hi = XEXP (hi, 0);
2051
2052 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2053 return rtx_equal_p (lo, hi);
2054
2055 switch (GET_CODE (lo))
2056 {
2057 case REG:
2058 case SYMBOL_REF:
2059 lo_offset = 0;
2060 break;
2061 case CONST:
2062 lo = XEXP (lo, 0);
2063 /* FALLTHROUGH */
2064 case PLUS:
2065 if (!CONST_INT_P (XEXP (lo, 1)))
2066 return false;
2067 lo_offset = INTVAL (XEXP (lo, 1));
2068 lo = XEXP (lo, 0);
2069 break;
2070 default:
2071 return false;
2072 }
2073
2074 switch (GET_CODE (hi))
2075 {
2076 case REG:
2077 case SYMBOL_REF:
2078 hi_offset = 0;
2079 break;
2080 case CONST:
2081 hi = XEXP (hi, 0);
2082 /* FALLTHROUGH */
2083 case PLUS:
2084 if (!CONST_INT_P (XEXP (hi, 1)))
2085 return false;
2086 hi_offset = INTVAL (XEXP (hi, 1));
2087 hi = XEXP (hi, 0);
2088 break;
2089 default:
2090 return false;
2091 }
2092
2093 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2094 return false;
fbf55580 2095
c4e75102
MT
2096 return rtx_equal_p (lo, hi)
2097 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
fbf55580 2098}
3814318d
RH
2099
2100/* Output assembler code for a block containing the constant parts
2101 of a trampoline, leaving space for the variable parts. */
2102
2103/* On the VAX, the trampoline contains an entry mask and two instructions:
2104 .word NN
2105 movl $STATIC,r0 (store the functions static chain)
2106 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2107
2108static void
2109vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2110{
2111 assemble_aligned_integer (2, const0_rtx);
2112 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2113 assemble_aligned_integer (4, const0_rtx);
2114 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2115 assemble_aligned_integer (2, GEN_INT (0x9f17));
2116 assemble_aligned_integer (4, const0_rtx);
2117}
2118
2119/* We copy the register-mask from the function's pure code
2120 to the start of the trampoline. */
2121
2122static void
2123vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2124{
2125 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2126 rtx mem;
2127
2128 emit_block_move (m_tramp, assemble_trampoline_template (),
2129 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2130
2131 mem = adjust_address (m_tramp, HImode, 0);
2132 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2133
2134 mem = adjust_address (m_tramp, SImode, 4);
2135 emit_move_insn (mem, cxt);
2136 mem = adjust_address (m_tramp, SImode, 11);
0a81f074 2137 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
3814318d
RH
2138 emit_insn (gen_sync_istream ());
2139}
2140
079e7538
NF
2141/* Value is the number of bytes of arguments automatically
2142 popped when returning from a subroutine call.
2143 FUNDECL is the declaration node of the function (as a tree),
2144 FUNTYPE is the data type of the function (as a tree),
2145 or for a library call it is an identifier node for the subroutine name.
2146 SIZE is the number of bytes of arguments passed on the stack.
2147
2148 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2149
2150static int
2151vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2152 tree funtype ATTRIBUTE_UNUSED, int size)
2153{
2154 return size > 255 * 4 ? 0 : size;
2155}
8f8a46ba
NF
2156
2157/* Define where to put the arguments to a function.
2158 Value is zero to push the argument on the stack,
2159 or a hard register in which to store the argument.
2160
2161 MODE is the argument's machine mode.
2162 TYPE is the data type of the argument (as a tree).
2163 This is null for libcalls where that information may
2164 not be available.
2165 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2166 the preceding args and about the function being called.
2167 NAMED is nonzero if this argument is a named parameter
2168 (otherwise it is an extra parameter matching an ellipsis). */
2169
2170/* On the VAX all args are pushed. */
2171
2172static rtx
d5cc9181 2173vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
ef4bddc2 2174 machine_mode mode ATTRIBUTE_UNUSED,
8f8a46ba
NF
2175 const_tree type ATTRIBUTE_UNUSED,
2176 bool named ATTRIBUTE_UNUSED)
2177{
2178 return NULL_RTX;
2179}
2180
2181/* Update the data in CUM to advance over an argument of mode MODE and
2182 data type TYPE. (TYPE is null for libcalls where that information
2183 may not be available.) */
2184
2185static void
ef4bddc2 2186vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
8f8a46ba
NF
2187 const_tree type, bool named ATTRIBUTE_UNUSED)
2188{
d5cc9181
JR
2189 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2190
8f8a46ba
NF
2191 *cum += (mode != BLKmode
2192 ? (GET_MODE_SIZE (mode) + 3) & ~3
2193 : (int_size_in_bytes (type) + 3) & ~3);
2194}