]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/vax/vax.cc
Change references of .c files to .cc files
[thirdparty/gcc.git] / gcc / config / vax / vax.cc
CommitLineData
e53b6e56 1/* Subroutines for insn-output.cc for VAX.
7adcbafe 2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
9c21a7e7 3
e7d9d16b 4This file is part of GCC.
9c21a7e7 5
e7d9d16b 6GCC is free software; you can redistribute it and/or modify
9c21a7e7 7it under the terms of the GNU General Public License as published by
2f83c7d6 8the Free Software Foundation; either version 3, or (at your option)
9c21a7e7
RS
9any later version.
10
e7d9d16b 11GCC is distributed in the hope that it will be useful,
9c21a7e7
RS
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
2f83c7d6
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
9c21a7e7 19
8fcc61f8
RS
20#define IN_TARGET_CODE 1
21
9c21a7e7 22#include "config.h"
c5c76735 23#include "system.h"
4977bab6 24#include "coretypes.h"
c7131fb2 25#include "backend.h"
e11c4407 26#include "target.h"
9c21a7e7 27#include "rtl.h"
e11c4407 28#include "tree.h"
314e6352
ML
29#include "stringpool.h"
30#include "attribs.h"
c7131fb2 31#include "df.h"
4d0cdd0c 32#include "memmodel.h"
e11c4407
AM
33#include "tm_p.h"
34#include "optabs.h"
35#include "regs.h"
36#include "emit-rtl.h"
d8a2d370
DN
37#include "calls.h"
38#include "varasm.h"
9c21a7e7 39#include "conditions.h"
9c21a7e7 40#include "output.h"
215b48a7 41#include "expr.h"
c30e7434 42#include "reload.h"
9b2b7279 43#include "builtins.h"
08c148a8 44
994c5d85 45/* This file should be included last. */
d58627a0
RS
46#include "target-def.h"
47
c5387660 48static void vax_option_override (void);
ef4bddc2 49static bool vax_legitimate_address_p (machine_mode, rtx, bool);
0d92b0e4 50static void vax_file_start (void);
c15c90bb 51static void vax_init_libfuncs (void);
0d92b0e4
NN
52static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
53 HOST_WIDE_INT, tree);
54static int vax_address_cost_1 (rtx);
ef4bddc2 55static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
e548c9df 56static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
e552abe2
MR
57static machine_mode vax_cc_modes_compatible (machine_mode, machine_mode);
58static rtx_insn *vax_md_asm_adjust (vec<rtx> &, vec<rtx> &,
e52ef6e6 59 vec<machine_mode> &, vec<const char *> &,
8d76ff99 60 vec<rtx> &, HARD_REG_SET &, location_t);
6783fdb7 61static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
6930c98c
RS
62static void vax_function_arg_advance (cumulative_args_t,
63 const function_arg_info &);
f289e226 64static rtx vax_struct_value_rtx (tree, int);
3e09331f 65static bool vax_lra_p (void);
3814318d
RH
66static void vax_asm_trampoline_template (FILE *);
67static void vax_trampoline_init (rtx, tree, rtx);
a20c5714 68static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
5bfed9a9 69static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
2a31c321 70static HOST_WIDE_INT vax_starting_frame_offset (void);
672a6f42
NB
71\f
72/* Initialize the GCC target structure. */
301d03af
RS
73#undef TARGET_ASM_ALIGNED_HI_OP
74#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
75
1bc7c5b6
ZW
76#undef TARGET_ASM_FILE_START
77#define TARGET_ASM_FILE_START vax_file_start
78#undef TARGET_ASM_FILE_START_APP_OFF
79#define TARGET_ASM_FILE_START_APP_OFF true
80
c15c90bb
ZW
81#undef TARGET_INIT_LIBFUNCS
82#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
83
3961e8fe
RH
84#undef TARGET_ASM_OUTPUT_MI_THUNK
85#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
86#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
87#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88
e552abe2
MR
89/* Enable compare elimination pass. */
90#undef TARGET_FLAGS_REGNUM
91#define TARGET_FLAGS_REGNUM VAX_PSL_REGNUM
92
3c50106f
RH
93#undef TARGET_RTX_COSTS
94#define TARGET_RTX_COSTS vax_rtx_costs
dcefdf67
RH
95#undef TARGET_ADDRESS_COST
96#define TARGET_ADDRESS_COST vax_address_cost
3c50106f 97
e552abe2
MR
98/* Return the narrowest CC mode that spans both modes offered. */
99#undef TARGET_CC_MODES_COMPATIBLE
100#define TARGET_CC_MODES_COMPATIBLE vax_cc_modes_compatible
101
102/* Mark PSL as clobbered for compatibility with the CC0 representation. */
103#undef TARGET_MD_ASM_ADJUST
104#define TARGET_MD_ASM_ADJUST vax_md_asm_adjust
105
f289e226 106#undef TARGET_PROMOTE_PROTOTYPES
586de218 107#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
f289e226 108
8f8a46ba
NF
109#undef TARGET_FUNCTION_ARG
110#define TARGET_FUNCTION_ARG vax_function_arg
111#undef TARGET_FUNCTION_ARG_ADVANCE
112#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
113
f289e226
KH
114#undef TARGET_STRUCT_VALUE_RTX
115#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
116
d81db636 117#undef TARGET_LRA_P
3e09331f 118#define TARGET_LRA_P vax_lra_p
d81db636 119
c6c3dba9
PB
120#undef TARGET_LEGITIMATE_ADDRESS_P
121#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
b0f6b612
NF
122#undef TARGET_MODE_DEPENDENT_ADDRESS_P
123#define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
c6c3dba9 124
b52b1749
AS
125#undef TARGET_FRAME_POINTER_REQUIRED
126#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
127
3814318d
RH
128#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
129#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
130#undef TARGET_TRAMPOLINE_INIT
131#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
079e7538
NF
132#undef TARGET_RETURN_POPS_ARGS
133#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
3814318d 134
c5387660
JM
135#undef TARGET_OPTION_OVERRIDE
136#define TARGET_OPTION_OVERRIDE vax_option_override
137
2a31c321
RS
138#undef TARGET_STARTING_FRAME_OFFSET
139#define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
140
0fc7d9e3
MR
141#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
142#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
143
f6897b10 144struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 145\f
3dc85dfb
RH
146/* Set global variables as needed for the options enabled. */
147
c5387660
JM
148static void
149vax_option_override (void)
3dc85dfb
RH
150{
151 /* We're VAX floating point, not IEEE floating point. */
94134f42
ZW
152 if (TARGET_G_FLOAT)
153 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
c5387660
JM
154
155#ifdef SUBTARGET_OVERRIDE_OPTIONS
156 SUBTARGET_OVERRIDE_OPTIONS;
157#endif
3dc85dfb
RH
158}
159
a3515605
RH
160static void
161vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
162{
163 rtx x;
164
0a81f074 165 x = plus_constant (Pmode, frame_pointer_rtx, offset);
a3515605 166 x = gen_rtx_MEM (SImode, x);
f7df4a84 167 x = gen_rtx_SET (x, src);
a3515605
RH
168 add_reg_note (insn, REG_CFA_OFFSET, x);
169}
170
08c148a8
NB
171/* Generate the assembly code for function entry. FILE is a stdio
172 stream to output the code to. SIZE is an int: how many units of
173 temporary storage to allocate.
174
175 Refer to the array `regs_ever_live' to determine which registers to
176 save; `regs_ever_live[I]' is nonzero if register number I is ever
177 used in the function. This function is responsible for knowing
178 which registers should not be saved even if used. */
179
a3515605
RH
180void
181vax_expand_prologue (void)
08c148a8 182{
a3515605 183 int regno, offset;
d001241c 184 int mask = 0;
a3515605
RH
185 HOST_WIDE_INT size;
186 rtx insn;
08c148a8
NB
187
188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
a365fa06 189 if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
08c148a8
NB
190 mask |= 1 << regno;
191
a3515605
RH
192 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
193 RTX_FRAME_RELATED_P (insn) = 1;
08c148a8 194
a3515605 195 /* The layout of the CALLG/S stack frame is follows:
ec20cffb 196
a3515605
RH
197 <- CFA, AP
198 r11
199 r10
200 ... Registers saved as specified by MASK
201 r3
202 r2
203 return-addr
204 old fp
205 old ap
206 old psw
207 zero
208 <- FP, SP
ec20cffb 209
a3515605
RH
210 The rest of the prologue will adjust the SP for the local frame. */
211
212 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
213 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
214 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
ec20cffb 215
a3515605
RH
216 offset = 16;
217 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
218 if (mask & (1 << regno))
219 {
220 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
221 offset += 4;
222 }
223
224 /* Because add_reg_note pushes the notes, adding this last means that
225 it will be processed first. This is required to allow the other
226 notes be interpreted properly. */
227 add_reg_note (insn, REG_CFA_DEF_CFA,
0a81f074 228 plus_constant (Pmode, frame_pointer_rtx, offset));
a3515605
RH
229
230 /* Allocate the local stack frame. */
231 size = get_frame_size ();
2a31c321 232 size -= vax_starting_frame_offset ();
a3515605
RH
233 emit_insn (gen_addsi3 (stack_pointer_rtx,
234 stack_pointer_rtx, GEN_INT (-size)));
235
236 /* Do not allow instructions referencing local stack memory to be
237 scheduled before the frame is allocated. This is more pedantic
238 than anything else, given that VAX does not currently have a
239 scheduling description. */
240 emit_insn (gen_blockage ());
08c148a8
NB
241}
242
1bc7c5b6
ZW
243/* When debugging with stabs, we want to output an extra dummy label
244 so that gas can distinguish between D_float and G_float prior to
245 processing the .stabs directive identifying type double. */
246static void
0d92b0e4 247vax_file_start (void)
1bc7c5b6
ZW
248{
249 default_file_start ();
250
251 if (write_symbols == DBX_DEBUG)
252 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
253}
254
c15c90bb
ZW
255/* We can use the BSD C library routines for the libgcc calls that are
256 still generated, since that's what they boil down to anyways. When
257 ELF, avoid the user's namespace. */
258
259static void
260vax_init_libfuncs (void)
261{
1df34d4d
MR
262 if (TARGET_BSD_DIVMOD)
263 {
264 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
265 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
266 }
c15c90bb
ZW
267}
268
2a4bfeed
RS
269/* This is like nonimmediate_operand with a restriction on the type of MEM. */
270
c4e75102
MT
271static void
272split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
273 rtx * low, int n)
9c21a7e7
RS
274{
275 int i;
9c21a7e7 276
c4e75102
MT
277 for (i = 0; i < n; i++)
278 low[i] = 0;
279
280 for (i = 0; i < n; i++)
9c21a7e7 281 {
c4e75102
MT
282 if (MEM_P (operands[i])
283 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
284 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
9c21a7e7
RS
285 {
286 rtx addr = XEXP (operands[i], 0);
c5c76735 287 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
c4e75102
MT
288 }
289 else if (optimize_size && MEM_P (operands[i])
290 && REG_P (XEXP (operands[i], 0))
291 && (code != MINUS || operands[1] != const0_rtx)
292 && find_regno_note (insn, REG_DEAD,
293 REGNO (XEXP (operands[i], 0))))
294 {
295 low[i] = gen_rtx_MEM (SImode,
296 gen_rtx_POST_INC (Pmode,
297 XEXP (operands[i], 0)));
298 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
9c21a7e7
RS
299 }
300 else
301 {
302 low[i] = operand_subword (operands[i], 0, 0, DImode);
303 operands[i] = operand_subword (operands[i], 1, 0, DImode);
304 }
305 }
306}
307\f
2fd58acb 308void
d001241c 309print_operand_address (FILE * file, rtx addr)
9c21a7e7 310{
c4e75102 311 rtx orig = addr;
d001241c 312 rtx reg1, breg, ireg;
9c21a7e7
RS
313 rtx offset;
314
315 retry:
316 switch (GET_CODE (addr))
317 {
318 case MEM:
319 fprintf (file, "*");
320 addr = XEXP (addr, 0);
321 goto retry;
322
323 case REG:
324 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
325 break;
326
327 case PRE_DEC:
328 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
329 break;
330
331 case POST_INC:
332 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
333 break;
334
335 case PLUS:
336 /* There can be either two or three things added here. One must be a
c605a8bf
MR
337 REG. One can be either a REG or a MULT/ASHIFT of a REG and an
338 appropriate constant, and the third can only be a constant or a MEM.
9c21a7e7
RS
339
340 We get these two or three things and put the constant or MEM in
c605a8bf
MR
341 OFFSET, the MULT/ASHIFT or REG in IREG, and the REG in BREG. If we
342 have a register and can't tell yet if it is a base or index register,
9c21a7e7
RS
343 put it into REG1. */
344
345 reg1 = 0; ireg = 0; breg = 0; offset = 0;
346
347 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 348 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
349 {
350 offset = XEXP (addr, 0);
351 addr = XEXP (addr, 1);
352 }
353 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 354 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
355 {
356 offset = XEXP (addr, 1);
357 addr = XEXP (addr, 0);
358 }
c605a8bf
MR
359 else if (GET_CODE (XEXP (addr, 1)) == MULT
360 || GET_CODE (XEXP (addr, 1)) == ASHIFT)
9c21a7e7
RS
361 {
362 ireg = XEXP (addr, 1);
363 addr = XEXP (addr, 0);
364 }
c605a8bf
MR
365 else if (GET_CODE (XEXP (addr, 0)) == MULT
366 || GET_CODE (XEXP (addr, 0)) == ASHIFT)
9c21a7e7
RS
367 {
368 ireg = XEXP (addr, 0);
369 addr = XEXP (addr, 1);
370 }
ff9d4590 371 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
372 {
373 reg1 = XEXP (addr, 1);
374 addr = XEXP (addr, 0);
375 }
ff9d4590 376 else if (REG_P (XEXP (addr, 0)))
2d6cb879
TW
377 {
378 reg1 = XEXP (addr, 0);
379 addr = XEXP (addr, 1);
380 }
9c21a7e7 381 else
90285d8d 382 gcc_unreachable ();
9c21a7e7 383
ff9d4590 384 if (REG_P (addr))
9c21a7e7
RS
385 {
386 if (reg1)
387 ireg = addr;
388 else
389 reg1 = addr;
390 }
c605a8bf 391 else if (GET_CODE (addr) == MULT || GET_CODE (addr) == ASHIFT)
9c21a7e7 392 ireg = addr;
90285d8d 393 else
9c21a7e7 394 {
90285d8d 395 gcc_assert (GET_CODE (addr) == PLUS);
9c21a7e7 396 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 397 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
398 {
399 if (offset)
400 {
d97c1295 401 if (CONST_INT_P (offset))
0a81f074
RS
402 offset = plus_constant (Pmode, XEXP (addr, 0),
403 INTVAL (offset));
9c21a7e7 404 else
90285d8d 405 {
d97c1295 406 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
0a81f074
RS
407 offset = plus_constant (Pmode, offset,
408 INTVAL (XEXP (addr, 0)));
90285d8d 409 }
9c21a7e7
RS
410 }
411 offset = XEXP (addr, 0);
412 }
ff9d4590 413 else if (REG_P (XEXP (addr, 0)))
9c21a7e7
RS
414 {
415 if (reg1)
416 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
417 else
418 reg1 = XEXP (addr, 0);
419 }
90285d8d 420 else
9c21a7e7 421 {
c605a8bf
MR
422 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT
423 || GET_CODE (XEXP (addr, 0)) == ASHIFT);
90285d8d 424 gcc_assert (!ireg);
9c21a7e7
RS
425 ireg = XEXP (addr, 0);
426 }
9c21a7e7
RS
427
428 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 429 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
430 {
431 if (offset)
432 {
d97c1295 433 if (CONST_INT_P (offset))
0a81f074
RS
434 offset = plus_constant (Pmode, XEXP (addr, 1),
435 INTVAL (offset));
9c21a7e7 436 else
90285d8d 437 {
d97c1295 438 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
0a81f074
RS
439 offset = plus_constant (Pmode, offset,
440 INTVAL (XEXP (addr, 1)));
90285d8d 441 }
9c21a7e7
RS
442 }
443 offset = XEXP (addr, 1);
444 }
ff9d4590 445 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
446 {
447 if (reg1)
448 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
449 else
450 reg1 = XEXP (addr, 1);
451 }
90285d8d 452 else
9c21a7e7 453 {
c605a8bf
MR
454 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT
455 || GET_CODE (XEXP (addr, 1)) == ASHIFT);
90285d8d 456 gcc_assert (!ireg);
9c21a7e7
RS
457 ireg = XEXP (addr, 1);
458 }
9c21a7e7 459 }
9c21a7e7 460
5e7a8ee0 461 /* If REG1 is nonzero, figure out if it is a base or index register. */
9c21a7e7
RS
462 if (reg1)
463 {
c4e75102
MT
464 if (breg
465 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
466 || (offset
467 && (MEM_P (offset)
468 || (flag_pic && symbolic_operand (offset, SImode)))))
9c21a7e7 469 {
90285d8d 470 gcc_assert (!ireg);
9c21a7e7
RS
471 ireg = reg1;
472 }
473 else
474 breg = reg1;
475 }
476
477 if (offset != 0)
c4e75102
MT
478 {
479 if (flag_pic && symbolic_operand (offset, SImode))
480 {
481 if (breg && ireg)
482 {
483 debug_rtx (orig);
484 output_operand_lossage ("symbol used with both base and indexed registers");
485 }
486
487#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
488 if (flag_pic > 1 && GET_CODE (offset) == CONST
489 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
490 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
491 {
492 debug_rtx (orig);
493 output_operand_lossage ("symbol with offset used in PIC mode");
494 }
495#endif
496
497 /* symbol(reg) isn't PIC, but symbol[reg] is. */
498 if (breg)
499 {
500 ireg = breg;
501 breg = 0;
502 }
503
504 }
505
cc8ca59e 506 output_address (VOIDmode, offset);
c4e75102 507 }
9c21a7e7
RS
508
509 if (breg != 0)
510 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
511
512 if (ireg != 0)
513 {
c605a8bf 514 if (GET_CODE (ireg) == MULT || GET_CODE (ireg) == ASHIFT)
9c21a7e7 515 ireg = XEXP (ireg, 0);
ff9d4590 516 gcc_assert (REG_P (ireg));
9c21a7e7
RS
517 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
518 }
519 break;
520
521 default:
522 output_addr_const (file, addr);
523 }
524}
c4e75102
MT
525
526void
527print_operand (FILE *file, rtx x, int code)
528{
529 if (code == '#')
530 fputc (ASM_DOUBLE_CHAR, file);
531 else if (code == '|')
532 fputs (REGISTER_PREFIX, file);
91ae8fbc 533 else if (code == 'k')
f90b7a5a 534 fputs (cond_name (x), file);
91ae8fbc 535 else if (code == 'K')
c4e75102
MT
536 fputs (rev_cond_name (x), file);
537 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
538 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
539 else if (code == 'P' && CONST_INT_P (x))
540 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
541 else if (code == 'N' && CONST_INT_P (x))
542 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
543 /* rotl instruction cannot deal with negative arguments. */
544 else if (code == 'R' && CONST_INT_P (x))
545 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
546 else if (code == 'H' && CONST_INT_P (x))
547 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
548 else if (code == 'h' && CONST_INT_P (x))
549 fprintf (file, "$%d", (short) - INTVAL (x));
550 else if (code == 'B' && CONST_INT_P (x))
551 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
552 else if (code == 'b' && CONST_INT_P (x))
553 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
554 else if (code == 'M' && CONST_INT_P (x))
555 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
a3515605
RH
556 else if (code == 'x' && CONST_INT_P (x))
557 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
c4e75102
MT
558 else if (REG_P (x))
559 fprintf (file, "%s", reg_names[REGNO (x)]);
560 else if (MEM_P (x))
cc8ca59e 561 output_address (GET_MODE (x), XEXP (x, 0));
c4e75102
MT
562 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
563 {
564 char dstr[30];
565 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
566 sizeof (dstr), 0, 1);
567 fprintf (file, "$0f%s", dstr);
568 }
569 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
570 {
571 char dstr[30];
572 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
573 sizeof (dstr), 0, 1);
574 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
575 }
576 else
577 {
578 if (flag_pic > 1 && symbolic_operand (x, SImode))
579 {
580 debug_rtx (x);
581 output_operand_lossage ("symbol used as immediate operand");
582 }
583 putc ('$', file);
584 output_addr_const (file, x);
585 }
586}
9c21a7e7 587\f
f90b7a5a
PB
588const char *
589cond_name (rtx op)
590{
591 switch (GET_CODE (op))
592 {
593 case NE:
594 return "neq";
595 case EQ:
596 return "eql";
597 case GE:
598 return "geq";
599 case GT:
600 return "gtr";
601 case LE:
602 return "leq";
603 case LT:
604 return "lss";
605 case GEU:
606 return "gequ";
607 case GTU:
608 return "gtru";
609 case LEU:
610 return "lequ";
611 case LTU:
612 return "lssu";
613
614 default:
615 gcc_unreachable ();
616 }
617}
618
2fd58acb 619const char *
0d92b0e4 620rev_cond_name (rtx op)
9c21a7e7
RS
621{
622 switch (GET_CODE (op))
623 {
624 case EQ:
625 return "neq";
626 case NE:
627 return "eql";
628 case LT:
629 return "geq";
630 case LE:
631 return "gtr";
632 case GT:
633 return "leq";
634 case GE:
635 return "lss";
636 case LTU:
637 return "gequ";
638 case LEU:
639 return "gtru";
640 case GTU:
641 return "lequ";
642 case GEU:
643 return "lssu";
644
645 default:
90285d8d 646 gcc_unreachable ();
9c21a7e7
RS
647 }
648}
d3797078 649
c4e75102
MT
650static bool
651vax_float_literal (rtx c)
d3797078 652{
ef4bddc2 653 machine_mode mode;
34a72c33
RS
654 const REAL_VALUE_TYPE *r;
655 REAL_VALUE_TYPE s;
d3797078 656 int i;
d3797078
RS
657
658 if (GET_CODE (c) != CONST_DOUBLE)
c4e75102 659 return false;
d3797078
RS
660
661 mode = GET_MODE (c);
662
663 if (c == const_tiny_rtx[(int) mode][0]
664 || c == const_tiny_rtx[(int) mode][1]
665 || c == const_tiny_rtx[(int) mode][2])
c4e75102 666 return true;
d3797078 667
34a72c33 668 r = CONST_DOUBLE_REAL_VALUE (c);
d3797078 669
b216cd4a
ZW
670 for (i = 0; i < 7; i++)
671 {
672 int x = 1 << i;
90285d8d 673 bool ok;
807e902e 674 real_from_integer (&s, mode, x, SIGNED);
d3797078 675
34a72c33 676 if (real_equal (r, &s))
c4e75102 677 return true;
90285d8d
NS
678 ok = exact_real_inverse (mode, &s);
679 gcc_assert (ok);
34a72c33 680 if (real_equal (r, &s))
c4e75102 681 return true;
b216cd4a 682 }
c4e75102 683 return false;
d3797078
RS
684}
685
686
687/* Return the cost in cycles of a memory address, relative to register
688 indirect.
689
690 Each of the following adds the indicated number of cycles:
691
692 1 - symbolic address
693 1 - pre-decrement
694 1 - indexing and/or offset(register)
695 2 - indirect */
696
697
dcefdf67 698static int
d001241c 699vax_address_cost_1 (rtx addr)
d3797078
RS
700{
701 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
702 rtx plus_op0 = 0, plus_op1 = 0;
703 restart:
704 switch (GET_CODE (addr))
705 {
706 case PRE_DEC:
707 predec = 1;
9443c717 708 /* FALLTHRU */
d3797078
RS
709 case REG:
710 case SUBREG:
711 case POST_INC:
712 reg = 1;
713 break;
714 case MULT:
c605a8bf 715 case ASHIFT:
d3797078
RS
716 indexed = 1; /* 2 on VAX 2 */
717 break;
718 case CONST_INT:
719 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
720 if (offset == 0)
76335fef 721 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
d3797078
RS
722 break;
723 case CONST:
724 case SYMBOL_REF:
725 offset = 1; /* 2 on VAX 2 */
726 break;
727 case LABEL_REF: /* this is probably a byte offset from the pc */
728 if (offset == 0)
729 offset = 1;
730 break;
731 case PLUS:
732 if (plus_op0)
733 plus_op1 = XEXP (addr, 0);
734 else
735 plus_op0 = XEXP (addr, 0);
736 addr = XEXP (addr, 1);
737 goto restart;
738 case MEM:
739 indir = 2; /* 3 on VAX 2 */
740 addr = XEXP (addr, 0);
741 goto restart;
2fd58acb
KG
742 default:
743 break;
d3797078
RS
744 }
745
746 /* Up to 3 things can be added in an address. They are stored in
747 plus_op0, plus_op1, and addr. */
748
749 if (plus_op0)
750 {
751 addr = plus_op0;
752 plus_op0 = 0;
753 goto restart;
754 }
755 if (plus_op1)
756 {
757 addr = plus_op1;
758 plus_op1 = 0;
759 goto restart;
760 }
761 /* Indexing and register+offset can both be used (except on a VAX 2)
6b857ce3 762 without increasing execution time over either one alone. */
d3797078
RS
763 if (reg && indexed && offset)
764 return reg + indir + offset + predec;
765 return reg + indexed + indir + offset + predec;
766}
767
dcefdf67 768static int
ef4bddc2 769vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
b413068c
OE
770 addr_space_t as ATTRIBUTE_UNUSED,
771 bool speed ATTRIBUTE_UNUSED)
dcefdf67 772{
dfb21f37 773 return COSTS_N_INSNS (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
dcefdf67
RH
774}
775
d3797078
RS
776/* Cost of an expression on a VAX. This version has costs tuned for the
777 CVAX chip (found in the VAX 3 series) with comments for variations on
ccb527e4 778 other models.
d3797078 779
ccb527e4
JDA
780 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
781 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
782 costs on a per cpu basis. */
783
784static bool
e548c9df
AM
785vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
786 int opno ATTRIBUTE_UNUSED,
68f932c4 787 int *total, bool speed ATTRIBUTE_UNUSED)
d3797078 788{
8b3be949 789 enum rtx_code code = GET_CODE (x);
ccb527e4 790 int i = 0; /* may be modified in switch */
6f7d635c 791 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
d3797078
RS
792
793 switch (code)
794 {
3c50106f 795 /* On a VAX, constants from 0..63 are cheap because they can use the
ccb527e4
JDA
796 1 byte literal constant format. Compare to -1 should be made cheap
797 so that decrement-and-branch insns can be formed more easily (if
798 the value -1 is copied to a register some decrement-and-branch
3c50106f
RH
799 patterns will not match). */
800 case CONST_INT:
801 if (INTVAL (x) == 0)
c4e75102 802 {
dfb21f37 803 *total = COSTS_N_INSNS (1) / 2;
c4e75102
MT
804 return true;
805 }
3c50106f 806 if (outer_code == AND)
ccb527e4 807 {
dfb21f37
MR
808 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077
809 ? COSTS_N_INSNS (1) : COSTS_N_INSNS (2));
ccb527e4
JDA
810 return true;
811 }
812 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
813 || (outer_code == COMPARE
814 && INTVAL (x) == -1)
815 || ((outer_code == PLUS || outer_code == MINUS)
816 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
817 {
dfb21f37 818 *total = COSTS_N_INSNS (1);
ccb527e4
JDA
819 return true;
820 }
5efb1046 821 /* FALLTHRU */
3c50106f
RH
822
823 case CONST:
824 case LABEL_REF:
825 case SYMBOL_REF:
dfb21f37 826 *total = COSTS_N_INSNS (3);
ccb527e4 827 return true;
3c50106f
RH
828
829 case CONST_DOUBLE:
e548c9df 830 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
dfb21f37 831 *total = vax_float_literal (x) ? COSTS_N_INSNS (5) : COSTS_N_INSNS (8);
3c50106f 832 else
c4e75102 833 *total = ((CONST_DOUBLE_HIGH (x) == 0
ccb527e4
JDA
834 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
835 || (outer_code == PLUS
836 && CONST_DOUBLE_HIGH (x) == -1
dfb21f37
MR
837 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)
838 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (5));
ccb527e4 839 return true;
b20f13e9 840
d3797078 841 case POST_INC:
dfb21f37
MR
842 *total = COSTS_N_INSNS (2);
843 return true; /* Implies register operand. */
ccb527e4 844
d3797078 845 case PRE_DEC:
dfb21f37
MR
846 *total = COSTS_N_INSNS (3);
847 return true; /* Implies register operand. */
ccb527e4 848
d3797078
RS
849 case MULT:
850 switch (mode)
851 {
4e10a5a7 852 case E_DFmode:
dfb21f37 853 *total = COSTS_N_INSNS (16); /* 4 on VAX 9000 */
d3797078 854 break;
4e10a5a7 855 case E_SFmode:
dfb21f37 856 *total = COSTS_N_INSNS (9); /* 4 on VAX 9000, 12 on VAX 2 */
d3797078 857 break;
4e10a5a7 858 case E_DImode:
dfb21f37 859 *total = COSTS_N_INSNS (16); /* 6 on VAX 9000, 28 on VAX 2 */
d3797078 860 break;
4e10a5a7
RS
861 case E_SImode:
862 case E_HImode:
863 case E_QImode:
dfb21f37 864 *total = COSTS_N_INSNS (10); /* 3-4 on VAX 9000, 20-28 on VAX 2 */
d3797078 865 break;
2fd58acb 866 default:
dfb21f37 867 *total = MAX_COST; /* Mode is not supported. */
ccb527e4 868 return true;
d3797078
RS
869 }
870 break;
ccb527e4 871
d3797078 872 case UDIV:
5c41fdfb 873 if (mode != SImode)
ccb527e4 874 {
dfb21f37 875 *total = MAX_COST; /* Mode is not supported. */
ccb527e4
JDA
876 return true;
877 }
dfb21f37 878 *total = COSTS_N_INSNS (17);
d3797078 879 break;
ccb527e4 880
d3797078
RS
881 case DIV:
882 if (mode == DImode)
dfb21f37 883 *total = COSTS_N_INSNS (30); /* Highly variable. */
d3797078
RS
884 else if (mode == DFmode)
885 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
dfb21f37 886 *total = COSTS_N_INSNS (24);
d3797078 887 else
dfb21f37 888 *total = COSTS_N_INSNS (11); /* 25 on VAX 2 */
d3797078 889 break;
ccb527e4 890
d3797078 891 case MOD:
dfb21f37 892 *total = COSTS_N_INSNS (23);
d3797078 893 break;
ccb527e4 894
d3797078 895 case UMOD:
5c41fdfb 896 if (mode != SImode)
ccb527e4 897 {
dfb21f37 898 *total = MAX_COST; /* Mode is not supported. */
ccb527e4
JDA
899 return true;
900 }
dfb21f37 901 *total = COSTS_N_INSNS (29);
d3797078 902 break;
ccb527e4 903
d3797078 904 case FLOAT:
dfb21f37
MR
905 *total = COSTS_N_INSNS (6 /* 4 on VAX 9000 */
906 + (mode == DFmode)
907 + (GET_MODE (XEXP (x, 0)) != SImode));
d3797078 908 break;
ccb527e4 909
d3797078 910 case FIX:
dfb21f37 911 *total = COSTS_N_INSNS (7); /* 17 on VAX 2 */
d3797078 912 break;
ccb527e4 913
d3797078
RS
914 case ASHIFT:
915 case LSHIFTRT:
916 case ASHIFTRT:
917 if (mode == DImode)
dfb21f37 918 *total = COSTS_N_INSNS (12);
d3797078 919 else
dfb21f37 920 *total = COSTS_N_INSNS (10); /* 6 on VAX 9000 */
d3797078 921 break;
ccb527e4 922
d3797078
RS
923 case ROTATE:
924 case ROTATERT:
dfb21f37 925 *total = COSTS_N_INSNS (6); /* 5 on VAX 2, 4 on VAX 9000 */
d97c1295 926 if (CONST_INT_P (XEXP (x, 1)))
ccb527e4 927 fmt = "e"; /* all constant rotate counts are short */
d3797078 928 break;
ccb527e4 929
d3797078 930 case PLUS:
d3797078 931 case MINUS:
dfb21f37
MR
932 *total = (mode == DFmode /* 6/8 on VAX 9000, 16/15 on VAX 2 */
933 ? COSTS_N_INSNS (13) : COSTS_N_INSNS (8));
76335fef 934 /* Small integer operands can use subl2 and addl2. */
d97c1295 935 if ((CONST_INT_P (XEXP (x, 1)))
76335fef
JDA
936 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
937 fmt = "e";
938 break;
ccb527e4 939
d3797078
RS
940 case IOR:
941 case XOR:
dfb21f37 942 *total = COSTS_N_INSNS (3);
d3797078 943 break;
ccb527e4 944
d3797078 945 case AND:
6b857ce3 946 /* AND is special because the first operand is complemented. */
dfb21f37 947 *total = COSTS_N_INSNS (3);
d97c1295 948 if (CONST_INT_P (XEXP (x, 0)))
d3797078 949 {
76335fef 950 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
dfb21f37 951 *total = COSTS_N_INSNS (4);
d3797078
RS
952 fmt = "e";
953 i = 1;
954 }
955 break;
ccb527e4 956
d3797078
RS
957 case NEG:
958 if (mode == DFmode)
dfb21f37 959 *total = COSTS_N_INSNS (9);
d3797078 960 else if (mode == SFmode)
dfb21f37 961 *total = COSTS_N_INSNS (6);
d3797078 962 else if (mode == DImode)
dfb21f37 963 *total = COSTS_N_INSNS (4);
ccb527e4 964 else
dfb21f37 965 *total = COSTS_N_INSNS (2);
ccb527e4
JDA
966 break;
967
d3797078 968 case NOT:
dfb21f37 969 *total = COSTS_N_INSNS (2);
ccb527e4
JDA
970 break;
971
d3797078
RS
972 case ZERO_EXTRACT:
973 case SIGN_EXTRACT:
dfb21f37 974 *total = COSTS_N_INSNS (15);
d3797078 975 break;
ccb527e4 976
d3797078
RS
977 case MEM:
978 if (mode == DImode || mode == DFmode)
dfb21f37 979 *total = COSTS_N_INSNS (5); /* 7 on VAX 2 */
d3797078 980 else
dfb21f37 981 *total = COSTS_N_INSNS (3); /* 4 on VAX 2 */
d3797078 982 x = XEXP (x, 0);
ff9d4590 983 if (!REG_P (x) && GET_CODE (x) != POST_INC)
dfb21f37 984 *total += COSTS_N_INSNS (vax_address_cost_1 (x));
ccb527e4
JDA
985 return true;
986
987 case FLOAT_EXTEND:
988 case FLOAT_TRUNCATE:
989 case TRUNCATE:
dfb21f37 990 *total = COSTS_N_INSNS (3); /* FIXME: Costs need to be checked */
d3797078 991 break;
ccb527e4
JDA
992
993 default:
994 return false;
d3797078
RS
995 }
996
d3797078
RS
997 /* Now look inside the expression. Operands which are not registers or
998 short constants add to the cost.
999
1000 FMT and I may have been adjusted in the switch above for instructions
ccb527e4 1001 which require special handling. */
d3797078
RS
1002
1003 while (*fmt++ == 'e')
1004 {
ccb527e4
JDA
1005 rtx op = XEXP (x, i);
1006
1007 i += 1;
d3797078
RS
1008 code = GET_CODE (op);
1009
1010 /* A NOT is likely to be found as the first operand of an AND
1011 (in which case the relevant cost is of the operand inside
1012 the not) and not likely to be found anywhere else. */
1013 if (code == NOT)
1014 op = XEXP (op, 0), code = GET_CODE (op);
1015
1016 switch (code)
1017 {
1018 case CONST_INT:
76335fef 1019 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
e548c9df 1020 && mode != QImode)
dfb21f37 1021 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
d3797078
RS
1022 break;
1023 case CONST:
1024 case LABEL_REF:
1025 case SYMBOL_REF:
dfb21f37 1026 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
d3797078
RS
1027 break;
1028 case CONST_DOUBLE:
1029 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1030 {
1031 /* Registers are faster than floating point constants -- even
1032 those constants which can be encoded in a single byte. */
1033 if (vax_float_literal (op))
dfb21f37 1034 *total += COSTS_N_INSNS (1);
d3797078 1035 else
dfb21f37
MR
1036 *total += (GET_MODE (x) == DFmode
1037 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (2));
d3797078
RS
1038 }
1039 else
1040 {
1041 if (CONST_DOUBLE_HIGH (op) != 0
c4e75102 1042 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
dfb21f37 1043 *total += COSTS_N_INSNS (2);
d3797078
RS
1044 }
1045 break;
1046 case MEM:
dfb21f37 1047 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
ff9d4590 1048 if (!REG_P (XEXP (op, 0)))
dfb21f37 1049 *total += COSTS_N_INSNS (vax_address_cost_1 (XEXP (op, 0)));
d3797078
RS
1050 break;
1051 case REG:
1052 case SUBREG:
1053 break;
1054 default:
dfb21f37 1055 *total += COSTS_N_INSNS (1);
d3797078
RS
1056 break;
1057 }
1058 }
3c50106f
RH
1059 return true;
1060}
ebea352b 1061\f
85f5a7d6
MR
1062/* With ELF we do not support GOT entries for external `symbol+offset'
1063 references, so do not accept external symbol references if an offset
1064 is to be added. Do not accept external symbol references at all if
1065 LOCAL_P is set. This is for cases where making a reference indirect
1066 would make it invalid. Do not accept any kind of symbols if SYMBOL_P
1067 is clear. This is for situations where the a reference is used as an
1068 immediate value for operations other than address loads (MOVA/PUSHA),
1069 as those operations do not support PC-relative immediates. */
1070
1071bool
1072vax_acceptable_pic_operand_p (rtx x ATTRIBUTE_UNUSED,
1073 bool local_p ATTRIBUTE_UNUSED,
1074 bool symbol_p ATTRIBUTE_UNUSED)
1075{
1076#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1077 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
1078 {
1079 x = XEXP (XEXP (x, 0), 0);
1080 local_p = true;
1081 }
1082 switch (GET_CODE (x))
1083 {
1084 case SYMBOL_REF:
1085 return symbol_p && !(local_p && !SYMBOL_REF_LOCAL_P (x));
1086 case LABEL_REF:
1087 return symbol_p && !(local_p && LABEL_REF_NONLOCAL_P (x));
1088 default:
1089 break;
1090 }
1091#endif
1092 return true;
1093}
1094\f
e552abe2
MR
1095/* Given a comparison code (NE, EQ, etc.) and the operands of a COMPARE,
1096 return the mode to be used for the comparison. As we have the same
1097 interpretation of condition codes across all the instructions we just
1098 return the narrowest mode suitable for the comparison code requested. */
1099
1100extern machine_mode
1101vax_select_cc_mode (enum rtx_code op,
1102 rtx x ATTRIBUTE_UNUSED, rtx y ATTRIBUTE_UNUSED)
1103{
1104 switch (op)
1105 {
1106 default:
1107 gcc_unreachable ();
1108 case NE:
1109 case EQ:
1110 return CCZmode;
1111 case GE:
1112 case LT:
1113 return CCNmode;
1114 case GT:
1115 case LE:
1116 return CCNZmode;
1117 case GEU:
1118 case GTU:
1119 case LEU:
1120 case LTU:
1121 return CCmode;
1122 }
1123}
1124
1125/* Return the narrowest CC mode that spans both modes offered. If they
1126 intersect, this will be the wider of the two, and if they do not then
1127 find find one that is a superset of both (i.e. CCNZmode for a pair
1128 consisting of CCNmode and CCZmode). A wider CC writer will satisfy
1129 a narrower CC reader, e.g. a comparison operator that uses CCZmode
1130 can use a CCNZmode output of a previous instruction. */
1131
1132static machine_mode
1133vax_cc_modes_compatible (machine_mode m1, machine_mode m2)
1134{
1135 switch (m1)
1136 {
1137 default:
1138 gcc_unreachable ();
1139 case E_CCmode:
1140 switch (m2)
1141 {
1142 default:
1143 gcc_unreachable ();
1144 case E_CCmode:
1145 case E_CCNZmode:
1146 case E_CCNmode:
1147 case E_CCZmode:
1148 return m1;
1149 }
1150 case E_CCNZmode:
1151 switch (m2)
1152 {
1153 default:
1154 gcc_unreachable ();
1155 case E_CCmode:
1156 return m2;
1157 case E_CCNmode:
1158 case E_CCNZmode:
1159 case E_CCZmode:
1160 return m1;
1161 }
1162 case E_CCNmode:
1163 case E_CCZmode:
1164 switch (m2)
1165 {
1166 default:
1167 gcc_unreachable ();
1168 case E_CCmode:
1169 case E_CCNZmode:
1170 return m2;
1171 case E_CCNmode:
1172 case E_CCZmode:
1173 return m1 == m2 ? m1 : E_CCNZmode;
1174 }
1175 }
1176}
1177\f
1178/* Mark PSL as clobbered for compatibility with the CC0 representation. */
1179
1180static rtx_insn *
1181vax_md_asm_adjust (vec<rtx> &outputs ATTRIBUTE_UNUSED,
1182 vec<rtx> &inputs ATTRIBUTE_UNUSED,
e52ef6e6 1183 vec<machine_mode> &input_modes ATTRIBUTE_UNUSED,
e552abe2 1184 vec<const char *> &constraints ATTRIBUTE_UNUSED,
8d76ff99
TS
1185 vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs,
1186 location_t /*loc*/)
e552abe2
MR
1187{
1188 clobbers.safe_push (gen_rtx_REG (CCmode, VAX_PSL_REGNUM));
1189 SET_HARD_REG_BIT (clobbered_regs, VAX_PSL_REGNUM);
1190 return NULL;
1191}
1192\f
b9962e0a
RH
1193/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1194 Used for C++ multiple inheritance.
1195 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1196 addl2 $DELTA, 4(ap) #adjust first argument
1197 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1198*/
1199
3961e8fe 1200static void
0d92b0e4 1201vax_output_mi_thunk (FILE * file,
c4e75102
MT
1202 tree thunk ATTRIBUTE_UNUSED,
1203 HOST_WIDE_INT delta,
1204 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1205 tree function)
483ab821 1206{
f7430263
MF
1207 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1208
1209 assemble_start_function (thunk, fnname);
4a0a75dd 1210 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
eb0424da 1211 asm_fprintf (file, ",4(%Rap)\n");
b20f13e9
MT
1212 fprintf (file, "\tjmp ");
1213 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1214 fprintf (file, "+2\n");
f7430263 1215 assemble_end_function (thunk, fnname);
483ab821 1216}
f289e226
KH
1217\f
1218static rtx
1219vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1220 int incoming ATTRIBUTE_UNUSED)
1221{
1222 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1223}
af0ec113 1224
3e09331f
MR
1225/* Return true if we use LRA instead of reload pass. */
1226
1227static bool
1228vax_lra_p (void)
1229{
1230 return TARGET_LRA;
1231}
1232
e552abe2 1233/* Output integer move instructions. */
af0ec113 1234
e552abe2
MR
1235bool
1236vax_maybe_split_dimode_move (rtx *operands)
af0ec113 1237{
e552abe2
MR
1238 return (TARGET_QMATH
1239 && (!MEM_P (operands[0])
1240 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1241 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1242 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1243 && ((CONST_INT_P (operands[1])
1244 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1245 || GET_CODE (operands[1]) == CONST_DOUBLE));
af0ec113 1246}
20acd226 1247
20acd226
SB
1248const char *
1249vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
ef4bddc2 1250 machine_mode mode)
20acd226 1251{
c4e75102
MT
1252 rtx hi[3], lo[3];
1253 const char *pattern_hi, *pattern_lo;
2c3d487a 1254 bool push_p;
c4e75102 1255
20acd226
SB
1256 switch (mode)
1257 {
4e10a5a7 1258 case E_DImode:
c4e75102
MT
1259 if (operands[1] == const0_rtx)
1260 return "clrq %0";
1261 if (TARGET_QMATH && optimize_size
1262 && (CONST_INT_P (operands[1])
1263 || GET_CODE (operands[1]) == CONST_DOUBLE))
1264 {
1265 unsigned HOST_WIDE_INT hval, lval;
1266 int n;
1267
1268 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1269 {
1270 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1271
1272 /* Make sure only the low 32 bits are valid. */
1273 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1274 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1275 }
1276 else
1277 {
1278 lval = INTVAL (operands[1]);
1279 hval = 0;
1280 }
1281
1282 /* Here we see if we are trying to see if the 64bit value is really
1283 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1284 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1285 8 bytes - 1 shift byte - 1 short literal byte. */
1286 if (lval != 0
1287 && (n = exact_log2 (lval & (- lval))) != -1
1288 && (lval >> n) < 64)
1289 {
1290 lval >>= n;
1291
c4e75102
MT
1292 /* On 32bit platforms, if the 6bits didn't overflow into the
1293 upper 32bit value that value better be 0. If we have
1294 overflowed, make sure it wasn't too much. */
ce7190e5 1295 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
c4e75102
MT
1296 {
1297 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1298 n = 0; /* failure */
1299 else
1300 lval |= hval << (32 - n);
1301 }
c4e75102
MT
1302 /* If n is 0, then ashq is not the best way to emit this. */
1303 if (n > 0)
1304 {
1305 operands[1] = GEN_INT (lval);
1306 operands[2] = GEN_INT (n);
7691132c 1307 return "ashq %2,%D1,%0";
c4e75102
MT
1308 }
1309#if HOST_BITS_PER_WIDE_INT == 32
1310 }
1311 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1312 upper 32bit value. */
1313 else if (hval != 0
1314 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1315 && (hval >> n) < 64)
1316 {
1317 operands[1] = GEN_INT (hval >> n);
1318 operands[2] = GEN_INT (n + 32);
7691132c 1319 return "ashq %2,%D1,%0";
c4e75102
MT
1320#endif
1321 }
1322 }
1323
e552abe2 1324 if (vax_maybe_split_dimode_move (operands))
c4e75102
MT
1325 {
1326 hi[0] = operands[0];
1327 hi[1] = operands[1];
1328
1329 split_quadword_operands (insn, SET, hi, lo, 2);
1330
1331 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1332 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1333
1334 /* The patterns are just movl/movl or pushl/pushl then a movq will
1335 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1336 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1337 value bytes. */
c0129e2d
ML
1338 if ((startswith (pattern_lo, "movl")
1339 && startswith (pattern_hi, "movl"))
1340 || (startswith (pattern_lo, "pushl")
1341 && startswith (pattern_hi, "pushl")))
c4e75102
MT
1342 return "movq %1,%0";
1343
1344 if (MEM_P (operands[0])
1345 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1346 {
1347 output_asm_insn (pattern_hi, hi);
1348 operands[0] = lo[0];
1349 operands[1] = lo[1];
1350 operands[2] = lo[2];
1351 return pattern_lo;
1352 }
1353 else
1354 {
1355 output_asm_insn (pattern_lo, lo);
1356 operands[0] = hi[0];
1357 operands[1] = hi[1];
1358 operands[2] = hi[2];
1359 return pattern_hi;
1360 }
1361 }
1362 return "movq %1,%0";
1363
4e10a5a7 1364 case E_SImode:
2c3d487a
MR
1365 push_p = push_operand (operands[0], SImode);
1366
c4e75102 1367 if (symbolic_operand (operands[1], SImode))
2c3d487a 1368 return push_p ? "pushab %a1" : "movab %a1,%0";
c4e75102 1369
20acd226 1370 if (operands[1] == const0_rtx)
2c3d487a 1371 return push_p ? "pushl %1" : "clrl %0";
c4e75102 1372
d97c1295 1373 if (CONST_INT_P (operands[1])
c4e75102 1374 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
20acd226 1375 {
c4e75102
MT
1376 HOST_WIDE_INT i = INTVAL (operands[1]);
1377 int n;
1378 if ((unsigned HOST_WIDE_INT)(~i) < 64)
20acd226 1379 return "mcoml %N1,%0";
c4e75102 1380 if ((unsigned HOST_WIDE_INT)i < 0x100)
20acd226
SB
1381 return "movzbl %1,%0";
1382 if (i >= -0x80 && i < 0)
1383 return "cvtbl %1,%0";
c4e75102
MT
1384 if (optimize_size
1385 && (n = exact_log2 (i & (-i))) != -1
1386 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1387 {
1388 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1389 operands[2] = GEN_INT (n);
1390 return "ashl %2,%1,%0";
1391 }
1392 if ((unsigned HOST_WIDE_INT)i < 0x10000)
20acd226
SB
1393 return "movzwl %1,%0";
1394 if (i >= -0x8000 && i < 0)
1395 return "cvtwl %1,%0";
1396 }
2c3d487a 1397 return push_p ? "pushl %1" : "movl %1,%0";
20acd226 1398
4e10a5a7 1399 case E_HImode:
d97c1295 1400 if (CONST_INT_P (operands[1]))
20acd226 1401 {
c4e75102 1402 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1403 if (i == 0)
1404 return "clrw %0";
c4e75102 1405 else if ((unsigned HOST_WIDE_INT)i < 64)
20acd226 1406 return "movw %1,%0";
c4e75102 1407 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226 1408 return "mcomw %H1,%0";
c4e75102 1409 else if ((unsigned HOST_WIDE_INT)i < 256)
20acd226 1410 return "movzbw %1,%0";
c4e75102
MT
1411 else if (i >= -0x80 && i < 0)
1412 return "cvtbw %1,%0";
20acd226
SB
1413 }
1414 return "movw %1,%0";
1415
4e10a5a7 1416 case E_QImode:
d97c1295 1417 if (CONST_INT_P (operands[1]))
20acd226 1418 {
c4e75102 1419 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1420 if (i == 0)
1421 return "clrb %0";
c4e75102 1422 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226
SB
1423 return "mcomb %B1,%0";
1424 }
1425 return "movb %1,%0";
1426
1427 default:
1428 gcc_unreachable ();
1429 }
1430}
1431
1432/* Output integer add instructions.
1433
1434 The space-time-opcode tradeoffs for addition vary by model of VAX.
1435
1436 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1437 but it not faster on other models.
1438
1439 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1440 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1441 a register is used in an address too soon after it is set.
1442 Compromise by using movab only when it is shorter than the add
1443 or the base register in the address is one of sp, ap, and fp,
1444 which are not modified very often. */
1445
1446const char *
df0b55f0 1447vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
20acd226
SB
1448{
1449 switch (mode)
1450 {
4e10a5a7 1451 case E_DImode:
c4e75102
MT
1452 {
1453 rtx low[3];
1454 const char *pattern;
1455 int carry = 1;
1456 bool sub;
1457
1458 if (TARGET_QMATH && 0)
1459 debug_rtx (insn);
1460
1461 split_quadword_operands (insn, PLUS, operands, low, 3);
1462
1463 if (TARGET_QMATH)
1464 {
1465 gcc_assert (rtx_equal_p (operands[0], operands[1]));
075fdf85 1466#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
85f5a7d6
MR
1467 gcc_assert (!flag_pic
1468 || !non_pic_external_memory_operand (low[2], SImode));
1469 gcc_assert (!flag_pic
1470 || !non_pic_external_memory_operand (low[0], SImode));
c4e75102
MT
1471#endif
1472
1473 /* No reason to add a 0 to the low part and thus no carry, so just
1474 emit the appropriate add/sub instruction. */
1475 if (low[2] == const0_rtx)
1476 return vax_output_int_add (NULL, operands, SImode);
1477
1478 /* Are we doing addition or subtraction? */
1479 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1480
1481 /* We can't use vax_output_int_add since some the patterns don't
1482 modify the carry bit. */
1483 if (sub)
1484 {
1485 if (low[2] == constm1_rtx)
1486 pattern = "decl %0";
1487 else
1488 pattern = "subl2 $%n2,%0";
1489 }
1490 else
1491 {
1492 if (low[2] == const1_rtx)
1493 pattern = "incl %0";
1494 else
1495 pattern = "addl2 %2,%0";
1496 }
1497 output_asm_insn (pattern, low);
1498
1499 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1500 two 32bit parts, we complement each and then add one to
1501 low part. We know that the low part can't overflow since
1502 it's value can never be 0. */
1503 if (sub)
1504 return "sbwc %N2,%0";
1505 return "adwc %2,%0";
1506 }
1507
1508 /* Add low parts. */
1509 if (rtx_equal_p (operands[0], operands[1]))
1510 {
1511 if (low[2] == const0_rtx)
1512 /* Should examine operand, punt if not POST_INC. */
1513 pattern = "tstl %0", carry = 0;
1514 else if (low[2] == const1_rtx)
1515 pattern = "incl %0";
1516 else
1517 pattern = "addl2 %2,%0";
1518 }
1519 else
1520 {
1521 if (low[2] == const0_rtx)
1522 pattern = "movl %1,%0", carry = 0;
1523 else
1524 pattern = "addl3 %2,%1,%0";
1525 }
1526 if (pattern)
1527 output_asm_insn (pattern, low);
1528 if (!carry)
1529 /* If CARRY is 0, we don't have any carry value to worry about. */
1530 return get_insn_template (CODE_FOR_addsi3, insn);
1531 /* %0 = C + %1 + %2 */
1532 if (!rtx_equal_p (operands[0], operands[1]))
1533 output_asm_insn ((operands[1] == const0_rtx
1534 ? "clrl %0"
1535 : "movl %1,%0"), operands);
1536 return "adwc %2,%0";
1537 }
1538
4e10a5a7 1539 case E_SImode:
20acd226
SB
1540 if (rtx_equal_p (operands[0], operands[1]))
1541 {
1542 if (operands[2] == const1_rtx)
1543 return "incl %0";
1544 if (operands[2] == constm1_rtx)
1545 return "decl %0";
d97c1295 1546 if (CONST_INT_P (operands[2])
c4e75102 1547 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226 1548 return "subl2 $%n2,%0";
d97c1295 1549 if (CONST_INT_P (operands[2])
c4e75102 1550 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1551 && REG_P (operands[1])
20acd226
SB
1552 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1553 || REGNO (operands[1]) > 11))
1554 return "movab %c2(%1),%0";
c4e75102
MT
1555 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1556 return "movab %a2[%0],%0";
20acd226
SB
1557 return "addl2 %2,%0";
1558 }
1559
1560 if (rtx_equal_p (operands[0], operands[2]))
c4e75102
MT
1561 {
1562 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1563 return "movab %a1[%0],%0";
1564 return "addl2 %1,%0";
1565 }
20acd226 1566
d97c1295 1567 if (CONST_INT_P (operands[2])
20acd226
SB
1568 && INTVAL (operands[2]) < 32767
1569 && INTVAL (operands[2]) > -32768
ff9d4590 1570 && REG_P (operands[1])
20acd226
SB
1571 && push_operand (operands[0], SImode))
1572 return "pushab %c2(%1)";
1573
d97c1295 1574 if (CONST_INT_P (operands[2])
c4e75102 1575 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1576 return "subl3 $%n2,%1,%0";
1577
d97c1295 1578 if (CONST_INT_P (operands[2])
c4e75102 1579 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1580 && REG_P (operands[1])
20acd226
SB
1581 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1582 || REGNO (operands[1]) > 11))
1583 return "movab %c2(%1),%0";
1584
1585 /* Add this if using gcc on a VAX 3xxx:
1586 if (REG_P (operands[1]) && REG_P (operands[2]))
1587 return "movab (%1)[%2],%0";
1588 */
c4e75102
MT
1589
1590 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1591 {
1592 if (push_operand (operands[0], SImode))
1593 return "pushab %a2[%1]";
1594 return "movab %a2[%1],%0";
1595 }
1596
1597 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1598 {
1599 if (push_operand (operands[0], SImode))
1600 return "pushab %a1[%2]";
1601 return "movab %a1[%2],%0";
1602 }
1603
1604 if (flag_pic && REG_P (operands[0])
1605 && symbolic_operand (operands[2], SImode))
1606 return "movab %a2,%0;addl2 %1,%0";
1607
1608 if (flag_pic
1609 && (symbolic_operand (operands[1], SImode)
c454324b 1610 || symbolic_operand (operands[2], SImode)))
c4e75102
MT
1611 debug_rtx (insn);
1612
20acd226
SB
1613 return "addl3 %1,%2,%0";
1614
4e10a5a7 1615 case E_HImode:
20acd226
SB
1616 if (rtx_equal_p (operands[0], operands[1]))
1617 {
1618 if (operands[2] == const1_rtx)
1619 return "incw %0";
1620 if (operands[2] == constm1_rtx)
1621 return "decw %0";
d97c1295 1622 if (CONST_INT_P (operands[2])
c4e75102 1623 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1624 return "subw2 $%n2,%0";
1625 return "addw2 %2,%0";
1626 }
1627 if (rtx_equal_p (operands[0], operands[2]))
1628 return "addw2 %1,%0";
d97c1295 1629 if (CONST_INT_P (operands[2])
c4e75102 1630 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1631 return "subw3 $%n2,%1,%0";
1632 return "addw3 %1,%2,%0";
1633
4e10a5a7 1634 case E_QImode:
20acd226
SB
1635 if (rtx_equal_p (operands[0], operands[1]))
1636 {
1637 if (operands[2] == const1_rtx)
1638 return "incb %0";
1639 if (operands[2] == constm1_rtx)
1640 return "decb %0";
d97c1295 1641 if (CONST_INT_P (operands[2])
c4e75102 1642 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1643 return "subb2 $%n2,%0";
1644 return "addb2 %2,%0";
1645 }
1646 if (rtx_equal_p (operands[0], operands[2]))
1647 return "addb2 %1,%0";
d97c1295 1648 if (CONST_INT_P (operands[2])
c4e75102 1649 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1650 return "subb3 $%n2,%1,%0";
1651 return "addb3 %1,%2,%0";
1652
1653 default:
1654 gcc_unreachable ();
1655 }
1656}
1657
c4e75102 1658const char *
df0b55f0 1659vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
c4e75102
MT
1660{
1661 switch (mode)
1662 {
4e10a5a7 1663 case E_DImode:
c4e75102
MT
1664 {
1665 rtx low[3];
1666 const char *pattern;
1667 int carry = 1;
1668
1669 if (TARGET_QMATH && 0)
1670 debug_rtx (insn);
1671
1672 split_quadword_operands (insn, MINUS, operands, low, 3);
1673
1674 if (TARGET_QMATH)
1675 {
1676 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1677 {
1678 /* Negation is tricky. It's basically complement and increment.
1679 Negate hi, then lo, and subtract the carry back. */
1680 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1681 || (MEM_P (operands[0])
1682 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1683 fatal_insn ("illegal operand detected", insn);
1684 output_asm_insn ("mnegl %2,%0", operands);
1685 output_asm_insn ("mnegl %2,%0", low);
1686 return "sbwc $0,%0";
1687 }
1688 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1689 gcc_assert (rtx_equal_p (low[0], low[1]));
1690 if (low[2] == const1_rtx)
1691 output_asm_insn ("decl %0", low);
1692 else
1693 output_asm_insn ("subl2 %2,%0", low);
1694 return "sbwc %2,%0";
1695 }
1696
1697 /* Subtract low parts. */
1698 if (rtx_equal_p (operands[0], operands[1]))
1699 {
1700 if (low[2] == const0_rtx)
1701 pattern = 0, carry = 0;
1702 else if (low[2] == constm1_rtx)
1703 pattern = "decl %0";
1704 else
1705 pattern = "subl2 %2,%0";
1706 }
1707 else
1708 {
1709 if (low[2] == constm1_rtx)
1710 pattern = "decl %0";
1711 else if (low[2] == const0_rtx)
1712 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1713 else
1714 pattern = "subl3 %2,%1,%0";
1715 }
1716 if (pattern)
1717 output_asm_insn (pattern, low);
1718 if (carry)
1719 {
1720 if (!rtx_equal_p (operands[0], operands[1]))
1721 return "movl %1,%0;sbwc %2,%0";
1722 return "sbwc %2,%0";
1723 /* %0 = %2 - %1 - C */
1724 }
1725 return get_insn_template (CODE_FOR_subsi3, insn);
1726 }
1727
1728 default:
1729 gcc_unreachable ();
1730 }
1731}
1732
c4e75102 1733/* True if X is an rtx for a constant that is a valid address. */
fbf55580 1734
c4e75102 1735bool
fbf55580
MT
1736legitimate_constant_address_p (rtx x)
1737{
c4e75102
MT
1738 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1739 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1740 return true;
1741 if (GET_CODE (x) != CONST)
1742 return false;
1743#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1744 if (flag_pic
1745 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1746 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1747 return false;
1748#endif
1749 return true;
fbf55580
MT
1750}
1751
fbf55580
MT
1752/* The other macros defined here are used only in legitimate_address_p (). */
1753
1754/* Nonzero if X is a hard reg that can be used as an index
1755 or, if not strict, if it is a pseudo reg. */
b20f13e9 1756#define INDEX_REGISTER_P(X, STRICT) \
ff9d4590 1757(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
fbf55580
MT
1758
1759/* Nonzero if X is a hard reg that can be used as a base reg
1760 or, if not strict, if it is a pseudo reg. */
b20f13e9 1761#define BASE_REGISTER_P(X, STRICT) \
ff9d4590 1762(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
fbf55580
MT
1763
1764#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1765
1766/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1767 are no SYMBOL_REFs for external symbols present. */
1768
c4e75102
MT
1769static bool
1770indirectable_constant_address_p (rtx x, bool indirect)
fbf55580 1771{
c4e75102
MT
1772 if (GET_CODE (x) == SYMBOL_REF)
1773 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1774
1775 if (GET_CODE (x) == CONST)
1776 return !flag_pic
1777 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1778 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1779
1780 return CONSTANT_ADDRESS_P (x);
fbf55580
MT
1781}
1782
1783#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1784
c4e75102
MT
1785static bool
1786indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
fbf55580
MT
1787{
1788 return CONSTANT_ADDRESS_P (x);
1789}
1790
1791#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1792
c4e75102 1793/* True if X is an address which can be indirected. External symbols
fbf55580
MT
1794 could be in a sharable image library, so we disallow those. */
1795
c4e75102
MT
1796static bool
1797indirectable_address_p (rtx x, bool strict, bool indirect)
fbf55580 1798{
c4e75102
MT
1799 if (indirectable_constant_address_p (x, indirect)
1800 || BASE_REGISTER_P (x, strict))
1801 return true;
1802 if (GET_CODE (x) != PLUS
1803 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1804 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1805 return false;
1806 return indirectable_constant_address_p (XEXP (x, 1), indirect);
fbf55580
MT
1807}
1808
c4e75102 1809/* Return true if x is a valid address not using indexing.
fbf55580 1810 (This much is the easy part.) */
c4e75102
MT
1811static bool
1812nonindexed_address_p (rtx x, bool strict)
fbf55580
MT
1813{
1814 rtx xfoo0;
ff9d4590 1815 if (REG_P (x))
fbf55580 1816 {
c4e75102 1817 if (! reload_in_progress
f2034d06
JL
1818 || reg_equiv_mem (REGNO (x)) == 0
1819 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
c4e75102 1820 return true;
fbf55580 1821 }
c4e75102
MT
1822 if (indirectable_constant_address_p (x, false))
1823 return true;
1824 if (indirectable_address_p (x, strict, false))
1825 return true;
fbf55580 1826 xfoo0 = XEXP (x, 0);
c4e75102
MT
1827 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1828 return true;
fbf55580
MT
1829 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1830 && BASE_REGISTER_P (xfoo0, strict))
c4e75102
MT
1831 return true;
1832 return false;
fbf55580
MT
1833}
1834
c4e75102 1835/* True if PROD is either a reg times size of mode MODE and MODE is less
fbf55580
MT
1836 than or equal 8 bytes, or just a reg if MODE is one byte. */
1837
c4e75102 1838static bool
ef4bddc2 1839index_term_p (rtx prod, machine_mode mode, bool strict)
fbf55580
MT
1840{
1841 rtx xfoo0, xfoo1;
c605a8bf 1842 bool log_p;
fbf55580
MT
1843
1844 if (GET_MODE_SIZE (mode) == 1)
1845 return BASE_REGISTER_P (prod, strict);
1846
c605a8bf
MR
1847 if ((GET_CODE (prod) != MULT && GET_CODE (prod) != ASHIFT)
1848 || GET_MODE_SIZE (mode) > 8)
c4e75102 1849 return false;
fbf55580 1850
c605a8bf 1851 log_p = GET_CODE (prod) == ASHIFT;
fbf55580
MT
1852 xfoo0 = XEXP (prod, 0);
1853 xfoo1 = XEXP (prod, 1);
1854
d97c1295 1855 if (CONST_INT_P (xfoo0)
c605a8bf 1856 && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo0) : INTVAL (xfoo0))
fbf55580 1857 && INDEX_REGISTER_P (xfoo1, strict))
c4e75102 1858 return true;
fbf55580 1859
d97c1295 1860 if (CONST_INT_P (xfoo1)
c605a8bf 1861 && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo1) : INTVAL (xfoo1))
fbf55580 1862 && INDEX_REGISTER_P (xfoo0, strict))
c4e75102 1863 return true;
fbf55580 1864
c4e75102 1865 return false;
fbf55580
MT
1866}
1867
c4e75102 1868/* Return true if X is the sum of a register
fbf55580 1869 and a valid index term for mode MODE. */
c4e75102 1870static bool
ef4bddc2 1871reg_plus_index_p (rtx x, machine_mode mode, bool strict)
fbf55580
MT
1872{
1873 rtx xfoo0, xfoo1;
1874
1875 if (GET_CODE (x) != PLUS)
c4e75102 1876 return false;
fbf55580
MT
1877
1878 xfoo0 = XEXP (x, 0);
1879 xfoo1 = XEXP (x, 1);
1880
1881 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
c4e75102 1882 return true;
fbf55580
MT
1883
1884 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
c4e75102 1885 return true;
fbf55580 1886
c4e75102 1887 return false;
fbf55580
MT
1888}
1889
c4e75102
MT
1890/* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1891static bool
ef4bddc2 1892indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
c4e75102
MT
1893{
1894 if (!CONSTANT_ADDRESS_P (xfoo0))
1895 return false;
1896 if (BASE_REGISTER_P (xfoo1, strict))
1897 return !flag_pic || mode == QImode;
1898 if (flag_pic && symbolic_operand (xfoo0, SImode))
1899 return false;
1900 return reg_plus_index_p (xfoo1, mode, strict);
1901}
1902
1903/* legitimate_address_p returns true if it recognizes an RTL expression "x"
fbf55580
MT
1904 that is a valid memory address for an instruction.
1905 The MODE argument is the machine mode for the MEM expression
1906 that wants to use this address. */
c4e75102 1907bool
ef4bddc2 1908vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
fbf55580
MT
1909{
1910 rtx xfoo0, xfoo1;
1911
1912 if (nonindexed_address_p (x, strict))
c4e75102 1913 return true;
fbf55580
MT
1914
1915 if (GET_CODE (x) != PLUS)
c4e75102 1916 return false;
fbf55580
MT
1917
1918 /* Handle <address>[index] represented with index-sum outermost */
1919
1920 xfoo0 = XEXP (x, 0);
1921 xfoo1 = XEXP (x, 1);
1922
1923 if (index_term_p (xfoo0, mode, strict)
1924 && nonindexed_address_p (xfoo1, strict))
c4e75102 1925 return true;
fbf55580
MT
1926
1927 if (index_term_p (xfoo1, mode, strict)
1928 && nonindexed_address_p (xfoo0, strict))
c4e75102 1929 return true;
fbf55580 1930
b20f13e9 1931 /* Handle offset(reg)[index] with offset added outermost */
fbf55580 1932
c4e75102
MT
1933 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1934 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1935 return true;
fbf55580 1936
c4e75102 1937 return false;
b20f13e9 1938}
fbf55580 1939
c4e75102 1940/* Return true if x (a legitimate address expression) has an effect that
fbf55580
MT
1941 depends on the machine mode it is used for. On the VAX, the predecrement
1942 and postincrement address depend thus (the amount of decrement or
1943 increment being the length of the operand) and all indexed address depend
1944 thus (because the index scale factor is the length of the operand). */
1945
b0f6b612 1946static bool
5bfed9a9 1947vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
fbf55580
MT
1948{
1949 rtx xfoo0, xfoo1;
1950
e53b6e56 1951 /* Auto-increment cases are now dealt with generically in recog.cc. */
fbf55580 1952 if (GET_CODE (x) != PLUS)
c4e75102 1953 return false;
fbf55580
MT
1954
1955 xfoo0 = XEXP (x, 0);
1956 xfoo1 = XEXP (x, 1);
1957
c4e75102
MT
1958 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1959 return false;
1960 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1961 return false;
1962 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1963 return false;
1964 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1965 return false;
1966
1967 return true;
1968}
1969
1970static rtx
ef4bddc2 1971fixup_mathdi_operand (rtx x, machine_mode mode)
c4e75102
MT
1972{
1973 if (illegal_addsub_di_memory_operand (x, mode))
1974 {
1975 rtx addr = XEXP (x, 0);
1976 rtx temp = gen_reg_rtx (Pmode);
1977 rtx offset = 0;
1978#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1979 if (GET_CODE (addr) == CONST && flag_pic)
1980 {
1981 offset = XEXP (XEXP (addr, 0), 1);
1982 addr = XEXP (XEXP (addr, 0), 0);
1983 }
1984#endif
1985 emit_move_insn (temp, addr);
1986 if (offset)
1987 temp = gen_rtx_PLUS (Pmode, temp, offset);
1988 x = gen_rtx_MEM (DImode, temp);
1989 }
1990 return x;
1991}
1992
1993void
1994vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1995{
1996 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1997 rtx temp;
1998
1999 rtx (*gen_old_insn)(rtx, rtx, rtx);
2000 rtx (*gen_si_insn)(rtx, rtx, rtx);
2001 rtx (*gen_insn)(rtx, rtx, rtx);
2002
2003 if (code == PLUS)
2004 {
2005 gen_old_insn = gen_adddi3_old;
2006 gen_si_insn = gen_addsi3;
2007 gen_insn = gen_adcdi3;
2008 }
2009 else if (code == MINUS)
2010 {
2011 gen_old_insn = gen_subdi3_old;
2012 gen_si_insn = gen_subsi3;
2013 gen_insn = gen_sbcdi3;
2014 }
2015 else
2016 gcc_unreachable ();
2017
2018 /* If this is addition (thus operands are commutative) and if there is one
2019 addend that duplicates the desination, we want that addend to be the
2020 first addend. */
2021 if (code == PLUS
2022 && rtx_equal_p (operands[0], operands[2])
2023 && !rtx_equal_p (operands[1], operands[2]))
2024 {
2025 temp = operands[2];
2026 operands[2] = operands[1];
2027 operands[1] = temp;
2028 }
2029
2030 if (!TARGET_QMATH)
2031 {
2032 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2033 }
2034 else if (hi_only)
2035 {
2036 if (!rtx_equal_p (operands[0], operands[1])
2037 && (REG_P (operands[0]) && MEM_P (operands[1])))
2038 {
2039 emit_move_insn (operands[0], operands[1]);
2040 operands[1] = operands[0];
2041 }
2042
2043 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2044 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2045 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2046
2047 if (!rtx_equal_p (operands[0], operands[1]))
2048 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2049 operand_subword (operands[1], 0, 0, DImode));
2050
2051 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2052 operand_subword (operands[1], 1, 0, DImode),
2053 operand_subword (operands[2], 1, 0, DImode)));
2054 }
2055 else
2056 {
294ca9ec
MR
2057 /* If we are adding a value to itself, that's really a multiply by 2,
2058 and that's just a left shift by 1. If subtracting, it's just 0. */
c4e75102
MT
2059 if (rtx_equal_p (operands[1], operands[2]))
2060 {
294ca9ec
MR
2061 if (code == PLUS)
2062 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2063 else
2064 emit_move_insn (operands[0], const0_rtx);
c4e75102
MT
2065 return;
2066 }
2067
2068 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2069
2070 /* If an operand is the same as operand[0], use the operand[0] rtx
2071 because fixup will an equivalent rtx but not an equal one. */
2072
2073 if (rtx_equal_p (operands[0], operands[1]))
2074 operands[1] = operands[0];
2075 else
2076 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2077
2078 if (rtx_equal_p (operands[0], operands[2]))
2079 operands[2] = operands[0];
2080 else
2081 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2082
ffb1dcf6
MR
2083 /* If we are adding or subtracting 0, then this is a move. */
2084 if (code == PLUS && operands[1] == const0_rtx)
2085 {
2086 temp = operands[2];
2087 operands[2] = operands[1];
2088 operands[1] = temp;
2089 }
2090 if (operands[2] == const0_rtx)
2091 {
2092 emit_move_insn (operands[0], operands[1]);
2093 return;
2094 }
2095
c4e75102
MT
2096 /* If we are subtracting not from ourselves [d = a - b], and because the
2097 carry ops are two operand only, we would need to do a move prior to
2098 the subtract. And if d == b, we would need a temp otherwise
2099 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
2100 into d = -b, d += a. Since -b can never overflow, even if b == d,
2101 no temp is needed.
2102
2103 If we are doing addition, since the carry ops are two operand, if
2104 we aren't adding to ourselves, move the first addend to the
2105 destination first. */
2106
2107 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2108 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2109 {
2110 if (code == MINUS && CONSTANT_P (operands[1]))
2111 {
c4e75102
MT
2112 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2113 code = PLUS;
2114 gen_insn = gen_adcdi3;
2115 operands[2] = operands[1];
2116 operands[1] = operands[0];
2117 }
2118 else
2119 emit_move_insn (operands[0], operands[1]);
2120 }
2121
2122 /* Subtracting a constant will have been rewritten to an addition of the
2123 negative of that constant before we get here. */
2124 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2125 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2126 }
2127}
2128
3814318d
RH
2129/* Output assembler code for a block containing the constant parts
2130 of a trampoline, leaving space for the variable parts. */
2131
2132/* On the VAX, the trampoline contains an entry mask and two instructions:
2133 .word NN
2134 movl $STATIC,r0 (store the functions static chain)
2135 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2136
2137static void
2138vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2139{
2140 assemble_aligned_integer (2, const0_rtx);
2141 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2142 assemble_aligned_integer (4, const0_rtx);
2143 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2144 assemble_aligned_integer (2, GEN_INT (0x9f17));
2145 assemble_aligned_integer (4, const0_rtx);
2146}
2147
2148/* We copy the register-mask from the function's pure code
2149 to the start of the trampoline. */
2150
2151static void
2152vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2153{
2154 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2155 rtx mem;
2156
2157 emit_block_move (m_tramp, assemble_trampoline_template (),
2158 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2159
2160 mem = adjust_address (m_tramp, HImode, 0);
2161 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2162
2163 mem = adjust_address (m_tramp, SImode, 4);
2164 emit_move_insn (mem, cxt);
2165 mem = adjust_address (m_tramp, SImode, 11);
0a81f074 2166 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
3814318d
RH
2167 emit_insn (gen_sync_istream ());
2168}
2169
079e7538
NF
2170/* Value is the number of bytes of arguments automatically
2171 popped when returning from a subroutine call.
2172 FUNDECL is the declaration node of the function (as a tree),
2173 FUNTYPE is the data type of the function (as a tree),
2174 or for a library call it is an identifier node for the subroutine name.
2175 SIZE is the number of bytes of arguments passed on the stack.
2176
2177 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2178
a20c5714 2179static poly_int64
079e7538 2180vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
a20c5714 2181 tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
079e7538 2182{
a20c5714 2183 return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
079e7538 2184}
8f8a46ba 2185
6783fdb7 2186/* Implement TARGET_FUNCTION_ARG. On the VAX all args are pushed. */
8f8a46ba
NF
2187
2188static rtx
6783fdb7 2189vax_function_arg (cumulative_args_t, const function_arg_info &)
8f8a46ba
NF
2190{
2191 return NULL_RTX;
2192}
2193
6930c98c 2194/* Update the data in CUM to advance over argument ARG. */
8f8a46ba
NF
2195
2196static void
6930c98c
RS
2197vax_function_arg_advance (cumulative_args_t cum_v,
2198 const function_arg_info &arg)
8f8a46ba 2199{
d5cc9181
JR
2200 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2201
6930c98c 2202 *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
8f8a46ba 2203}
2a31c321
RS
2204
2205static HOST_WIDE_INT
2206vax_starting_frame_offset (void)
2207{
2208 /* On ELF targets, reserve the top of the stack for exception handler
2209 stackadj value. */
2210 return TARGET_ELF ? -4 : 0;
2211}
2212