]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/vax/vax.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / vax / vax.cc
CommitLineData
e53b6e56 1/* Subroutines for insn-output.cc for VAX.
83ffe9cd 2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
9c21a7e7 3
e7d9d16b 4This file is part of GCC.
9c21a7e7 5
e7d9d16b 6GCC is free software; you can redistribute it and/or modify
9c21a7e7 7it under the terms of the GNU General Public License as published by
2f83c7d6 8the Free Software Foundation; either version 3, or (at your option)
9c21a7e7
RS
9any later version.
10
e7d9d16b 11GCC is distributed in the hope that it will be useful,
9c21a7e7
RS
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
2f83c7d6
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
9c21a7e7 19
8fcc61f8
RS
20#define IN_TARGET_CODE 1
21
9c21a7e7 22#include "config.h"
c5c76735 23#include "system.h"
4977bab6 24#include "coretypes.h"
c7131fb2 25#include "backend.h"
e11c4407 26#include "target.h"
9c21a7e7 27#include "rtl.h"
e11c4407 28#include "tree.h"
314e6352
ML
29#include "stringpool.h"
30#include "attribs.h"
c7131fb2 31#include "df.h"
4d0cdd0c 32#include "memmodel.h"
e11c4407
AM
33#include "tm_p.h"
34#include "optabs.h"
35#include "regs.h"
36#include "emit-rtl.h"
d8a2d370
DN
37#include "calls.h"
38#include "varasm.h"
9c21a7e7 39#include "conditions.h"
9c21a7e7 40#include "output.h"
215b48a7 41#include "expr.h"
c30e7434 42#include "reload.h"
9b2b7279 43#include "builtins.h"
08c148a8 44
994c5d85 45/* This file should be included last. */
d58627a0
RS
46#include "target-def.h"
47
c5387660 48static void vax_option_override (void);
ef4bddc2 49static bool vax_legitimate_address_p (machine_mode, rtx, bool);
0d92b0e4 50static void vax_file_start (void);
c15c90bb 51static void vax_init_libfuncs (void);
0d92b0e4
NN
52static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
53 HOST_WIDE_INT, tree);
54static int vax_address_cost_1 (rtx);
ef4bddc2 55static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
e548c9df 56static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
e552abe2
MR
57static machine_mode vax_cc_modes_compatible (machine_mode, machine_mode);
58static rtx_insn *vax_md_asm_adjust (vec<rtx> &, vec<rtx> &,
e52ef6e6 59 vec<machine_mode> &, vec<const char *> &,
8d76ff99 60 vec<rtx> &, HARD_REG_SET &, location_t);
6783fdb7 61static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
6930c98c
RS
62static void vax_function_arg_advance (cumulative_args_t,
63 const function_arg_info &);
f289e226 64static rtx vax_struct_value_rtx (tree, int);
3e09331f 65static bool vax_lra_p (void);
3814318d
RH
66static void vax_asm_trampoline_template (FILE *);
67static void vax_trampoline_init (rtx, tree, rtx);
a20c5714 68static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
5bfed9a9 69static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
2a31c321 70static HOST_WIDE_INT vax_starting_frame_offset (void);
672a6f42
NB
71\f
72/* Initialize the GCC target structure. */
301d03af
RS
73#undef TARGET_ASM_ALIGNED_HI_OP
74#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
75
1bc7c5b6
ZW
76#undef TARGET_ASM_FILE_START
77#define TARGET_ASM_FILE_START vax_file_start
78#undef TARGET_ASM_FILE_START_APP_OFF
79#define TARGET_ASM_FILE_START_APP_OFF true
80
c15c90bb
ZW
81#undef TARGET_INIT_LIBFUNCS
82#define TARGET_INIT_LIBFUNCS vax_init_libfuncs
83
3961e8fe
RH
84#undef TARGET_ASM_OUTPUT_MI_THUNK
85#define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
86#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
87#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88
e552abe2
MR
89/* Enable compare elimination pass. */
90#undef TARGET_FLAGS_REGNUM
91#define TARGET_FLAGS_REGNUM VAX_PSL_REGNUM
92
3c50106f
RH
93#undef TARGET_RTX_COSTS
94#define TARGET_RTX_COSTS vax_rtx_costs
dcefdf67
RH
95#undef TARGET_ADDRESS_COST
96#define TARGET_ADDRESS_COST vax_address_cost
3c50106f 97
e552abe2
MR
98/* Return the narrowest CC mode that spans both modes offered. */
99#undef TARGET_CC_MODES_COMPATIBLE
100#define TARGET_CC_MODES_COMPATIBLE vax_cc_modes_compatible
101
102/* Mark PSL as clobbered for compatibility with the CC0 representation. */
103#undef TARGET_MD_ASM_ADJUST
104#define TARGET_MD_ASM_ADJUST vax_md_asm_adjust
105
f289e226 106#undef TARGET_PROMOTE_PROTOTYPES
586de218 107#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
f289e226 108
8f8a46ba
NF
109#undef TARGET_FUNCTION_ARG
110#define TARGET_FUNCTION_ARG vax_function_arg
111#undef TARGET_FUNCTION_ARG_ADVANCE
112#define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
113
f289e226
KH
114#undef TARGET_STRUCT_VALUE_RTX
115#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
116
d81db636 117#undef TARGET_LRA_P
3e09331f 118#define TARGET_LRA_P vax_lra_p
d81db636 119
c6c3dba9
PB
120#undef TARGET_LEGITIMATE_ADDRESS_P
121#define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
b0f6b612
NF
122#undef TARGET_MODE_DEPENDENT_ADDRESS_P
123#define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
c6c3dba9 124
b52b1749
AS
125#undef TARGET_FRAME_POINTER_REQUIRED
126#define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
127
3814318d
RH
128#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
129#define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
130#undef TARGET_TRAMPOLINE_INIT
131#define TARGET_TRAMPOLINE_INIT vax_trampoline_init
079e7538
NF
132#undef TARGET_RETURN_POPS_ARGS
133#define TARGET_RETURN_POPS_ARGS vax_return_pops_args
3814318d 134
c5387660
JM
135#undef TARGET_OPTION_OVERRIDE
136#define TARGET_OPTION_OVERRIDE vax_option_override
137
2a31c321
RS
138#undef TARGET_STARTING_FRAME_OFFSET
139#define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
140
0fc7d9e3
MR
141#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
142#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
143
f6897b10 144struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 145\f
3dc85dfb
RH
146/* Set global variables as needed for the options enabled. */
147
c5387660
JM
148static void
149vax_option_override (void)
3dc85dfb
RH
150{
151 /* We're VAX floating point, not IEEE floating point. */
94134f42
ZW
152 if (TARGET_G_FLOAT)
153 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
c5387660
JM
154
155#ifdef SUBTARGET_OVERRIDE_OPTIONS
156 SUBTARGET_OVERRIDE_OPTIONS;
157#endif
3dc85dfb
RH
158}
159
a3515605
RH
160static void
161vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
162{
163 rtx x;
164
0a81f074 165 x = plus_constant (Pmode, frame_pointer_rtx, offset);
a3515605 166 x = gen_rtx_MEM (SImode, x);
f7df4a84 167 x = gen_rtx_SET (x, src);
a3515605
RH
168 add_reg_note (insn, REG_CFA_OFFSET, x);
169}
170
08c148a8
NB
171/* Generate the assembly code for function entry. FILE is a stdio
172 stream to output the code to. SIZE is an int: how many units of
173 temporary storage to allocate.
174
175 Refer to the array `regs_ever_live' to determine which registers to
176 save; `regs_ever_live[I]' is nonzero if register number I is ever
177 used in the function. This function is responsible for knowing
178 which registers should not be saved even if used. */
179
a3515605
RH
180void
181vax_expand_prologue (void)
08c148a8 182{
a3515605 183 int regno, offset;
d001241c 184 int mask = 0;
a3515605
RH
185 HOST_WIDE_INT size;
186 rtx insn;
08c148a8
NB
187
188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
a365fa06 189 if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
08c148a8
NB
190 mask |= 1 << regno;
191
a3515605
RH
192 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
193 RTX_FRAME_RELATED_P (insn) = 1;
08c148a8 194
a3515605 195 /* The layout of the CALLG/S stack frame is follows:
ec20cffb 196
a3515605
RH
197 <- CFA, AP
198 r11
199 r10
200 ... Registers saved as specified by MASK
201 r3
202 r2
203 return-addr
204 old fp
205 old ap
206 old psw
207 zero
208 <- FP, SP
ec20cffb 209
a3515605
RH
210 The rest of the prologue will adjust the SP for the local frame. */
211
212 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
213 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
214 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
ec20cffb 215
a3515605
RH
216 offset = 16;
217 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
218 if (mask & (1 << regno))
219 {
220 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
221 offset += 4;
222 }
223
224 /* Because add_reg_note pushes the notes, adding this last means that
225 it will be processed first. This is required to allow the other
226 notes be interpreted properly. */
227 add_reg_note (insn, REG_CFA_DEF_CFA,
0a81f074 228 plus_constant (Pmode, frame_pointer_rtx, offset));
a3515605
RH
229
230 /* Allocate the local stack frame. */
231 size = get_frame_size ();
2a31c321 232 size -= vax_starting_frame_offset ();
a3515605
RH
233 emit_insn (gen_addsi3 (stack_pointer_rtx,
234 stack_pointer_rtx, GEN_INT (-size)));
235
236 /* Do not allow instructions referencing local stack memory to be
237 scheduled before the frame is allocated. This is more pedantic
238 than anything else, given that VAX does not currently have a
239 scheduling description. */
240 emit_insn (gen_blockage ());
08c148a8
NB
241}
242
1bc7c5b6
ZW
243/* When debugging with stabs, we want to output an extra dummy label
244 so that gas can distinguish between D_float and G_float prior to
245 processing the .stabs directive identifying type double. */
246static void
0d92b0e4 247vax_file_start (void)
1bc7c5b6
ZW
248{
249 default_file_start ();
1bc7c5b6
ZW
250}
251
c15c90bb
ZW
252/* We can use the BSD C library routines for the libgcc calls that are
253 still generated, since that's what they boil down to anyways. When
254 ELF, avoid the user's namespace. */
255
256static void
257vax_init_libfuncs (void)
258{
1df34d4d
MR
259 if (TARGET_BSD_DIVMOD)
260 {
261 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
262 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
263 }
c15c90bb
ZW
264}
265
2a4bfeed
RS
266/* This is like nonimmediate_operand with a restriction on the type of MEM. */
267
c4e75102
MT
268static void
269split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
270 rtx * low, int n)
9c21a7e7
RS
271{
272 int i;
9c21a7e7 273
c4e75102
MT
274 for (i = 0; i < n; i++)
275 low[i] = 0;
276
277 for (i = 0; i < n; i++)
9c21a7e7 278 {
c4e75102
MT
279 if (MEM_P (operands[i])
280 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
281 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
9c21a7e7
RS
282 {
283 rtx addr = XEXP (operands[i], 0);
c5c76735 284 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
c4e75102
MT
285 }
286 else if (optimize_size && MEM_P (operands[i])
287 && REG_P (XEXP (operands[i], 0))
288 && (code != MINUS || operands[1] != const0_rtx)
289 && find_regno_note (insn, REG_DEAD,
290 REGNO (XEXP (operands[i], 0))))
291 {
292 low[i] = gen_rtx_MEM (SImode,
293 gen_rtx_POST_INC (Pmode,
294 XEXP (operands[i], 0)));
295 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
9c21a7e7
RS
296 }
297 else
298 {
299 low[i] = operand_subword (operands[i], 0, 0, DImode);
300 operands[i] = operand_subword (operands[i], 1, 0, DImode);
301 }
302 }
303}
304\f
2fd58acb 305void
d001241c 306print_operand_address (FILE * file, rtx addr)
9c21a7e7 307{
c4e75102 308 rtx orig = addr;
d001241c 309 rtx reg1, breg, ireg;
9c21a7e7
RS
310 rtx offset;
311
312 retry:
313 switch (GET_CODE (addr))
314 {
315 case MEM:
316 fprintf (file, "*");
317 addr = XEXP (addr, 0);
318 goto retry;
319
320 case REG:
321 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
322 break;
323
324 case PRE_DEC:
325 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
326 break;
327
328 case POST_INC:
329 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
330 break;
331
332 case PLUS:
333 /* There can be either two or three things added here. One must be a
c605a8bf
MR
334 REG. One can be either a REG or a MULT/ASHIFT of a REG and an
335 appropriate constant, and the third can only be a constant or a MEM.
9c21a7e7
RS
336
337 We get these two or three things and put the constant or MEM in
c605a8bf
MR
338 OFFSET, the MULT/ASHIFT or REG in IREG, and the REG in BREG. If we
339 have a register and can't tell yet if it is a base or index register,
9c21a7e7
RS
340 put it into REG1. */
341
342 reg1 = 0; ireg = 0; breg = 0; offset = 0;
343
344 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 345 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
346 {
347 offset = XEXP (addr, 0);
348 addr = XEXP (addr, 1);
349 }
350 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 351 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
352 {
353 offset = XEXP (addr, 1);
354 addr = XEXP (addr, 0);
355 }
c605a8bf
MR
356 else if (GET_CODE (XEXP (addr, 1)) == MULT
357 || GET_CODE (XEXP (addr, 1)) == ASHIFT)
9c21a7e7
RS
358 {
359 ireg = XEXP (addr, 1);
360 addr = XEXP (addr, 0);
361 }
c605a8bf
MR
362 else if (GET_CODE (XEXP (addr, 0)) == MULT
363 || GET_CODE (XEXP (addr, 0)) == ASHIFT)
9c21a7e7
RS
364 {
365 ireg = XEXP (addr, 0);
366 addr = XEXP (addr, 1);
367 }
ff9d4590 368 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
369 {
370 reg1 = XEXP (addr, 1);
371 addr = XEXP (addr, 0);
372 }
ff9d4590 373 else if (REG_P (XEXP (addr, 0)))
2d6cb879
TW
374 {
375 reg1 = XEXP (addr, 0);
376 addr = XEXP (addr, 1);
377 }
9c21a7e7 378 else
90285d8d 379 gcc_unreachable ();
9c21a7e7 380
ff9d4590 381 if (REG_P (addr))
9c21a7e7
RS
382 {
383 if (reg1)
384 ireg = addr;
385 else
386 reg1 = addr;
387 }
c605a8bf 388 else if (GET_CODE (addr) == MULT || GET_CODE (addr) == ASHIFT)
9c21a7e7 389 ireg = addr;
90285d8d 390 else
9c21a7e7 391 {
90285d8d 392 gcc_assert (GET_CODE (addr) == PLUS);
9c21a7e7 393 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
ff9d4590 394 || MEM_P (XEXP (addr, 0)))
9c21a7e7
RS
395 {
396 if (offset)
397 {
d97c1295 398 if (CONST_INT_P (offset))
0a81f074
RS
399 offset = plus_constant (Pmode, XEXP (addr, 0),
400 INTVAL (offset));
9c21a7e7 401 else
90285d8d 402 {
d97c1295 403 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
0a81f074
RS
404 offset = plus_constant (Pmode, offset,
405 INTVAL (XEXP (addr, 0)));
90285d8d 406 }
9c21a7e7
RS
407 }
408 offset = XEXP (addr, 0);
409 }
ff9d4590 410 else if (REG_P (XEXP (addr, 0)))
9c21a7e7
RS
411 {
412 if (reg1)
413 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
414 else
415 reg1 = XEXP (addr, 0);
416 }
90285d8d 417 else
9c21a7e7 418 {
c605a8bf
MR
419 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT
420 || GET_CODE (XEXP (addr, 0)) == ASHIFT);
90285d8d 421 gcc_assert (!ireg);
9c21a7e7
RS
422 ireg = XEXP (addr, 0);
423 }
9c21a7e7
RS
424
425 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
ff9d4590 426 || MEM_P (XEXP (addr, 1)))
9c21a7e7
RS
427 {
428 if (offset)
429 {
d97c1295 430 if (CONST_INT_P (offset))
0a81f074
RS
431 offset = plus_constant (Pmode, XEXP (addr, 1),
432 INTVAL (offset));
9c21a7e7 433 else
90285d8d 434 {
d97c1295 435 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
0a81f074
RS
436 offset = plus_constant (Pmode, offset,
437 INTVAL (XEXP (addr, 1)));
90285d8d 438 }
9c21a7e7
RS
439 }
440 offset = XEXP (addr, 1);
441 }
ff9d4590 442 else if (REG_P (XEXP (addr, 1)))
9c21a7e7
RS
443 {
444 if (reg1)
445 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
446 else
447 reg1 = XEXP (addr, 1);
448 }
90285d8d 449 else
9c21a7e7 450 {
c605a8bf
MR
451 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT
452 || GET_CODE (XEXP (addr, 1)) == ASHIFT);
90285d8d 453 gcc_assert (!ireg);
9c21a7e7
RS
454 ireg = XEXP (addr, 1);
455 }
9c21a7e7 456 }
9c21a7e7 457
5e7a8ee0 458 /* If REG1 is nonzero, figure out if it is a base or index register. */
9c21a7e7
RS
459 if (reg1)
460 {
c4e75102
MT
461 if (breg
462 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
463 || (offset
464 && (MEM_P (offset)
465 || (flag_pic && symbolic_operand (offset, SImode)))))
9c21a7e7 466 {
90285d8d 467 gcc_assert (!ireg);
9c21a7e7
RS
468 ireg = reg1;
469 }
470 else
471 breg = reg1;
472 }
473
474 if (offset != 0)
c4e75102
MT
475 {
476 if (flag_pic && symbolic_operand (offset, SImode))
477 {
478 if (breg && ireg)
479 {
480 debug_rtx (orig);
481 output_operand_lossage ("symbol used with both base and indexed registers");
482 }
483
484#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
485 if (flag_pic > 1 && GET_CODE (offset) == CONST
486 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
487 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
488 {
489 debug_rtx (orig);
490 output_operand_lossage ("symbol with offset used in PIC mode");
491 }
492#endif
493
494 /* symbol(reg) isn't PIC, but symbol[reg] is. */
495 if (breg)
496 {
497 ireg = breg;
498 breg = 0;
499 }
500
501 }
502
cc8ca59e 503 output_address (VOIDmode, offset);
c4e75102 504 }
9c21a7e7
RS
505
506 if (breg != 0)
507 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
508
509 if (ireg != 0)
510 {
c605a8bf 511 if (GET_CODE (ireg) == MULT || GET_CODE (ireg) == ASHIFT)
9c21a7e7 512 ireg = XEXP (ireg, 0);
ff9d4590 513 gcc_assert (REG_P (ireg));
9c21a7e7
RS
514 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
515 }
516 break;
517
518 default:
519 output_addr_const (file, addr);
520 }
521}
c4e75102
MT
522
523void
524print_operand (FILE *file, rtx x, int code)
525{
526 if (code == '#')
527 fputc (ASM_DOUBLE_CHAR, file);
528 else if (code == '|')
529 fputs (REGISTER_PREFIX, file);
91ae8fbc 530 else if (code == 'k')
f90b7a5a 531 fputs (cond_name (x), file);
91ae8fbc 532 else if (code == 'K')
c4e75102
MT
533 fputs (rev_cond_name (x), file);
534 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
535 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
536 else if (code == 'P' && CONST_INT_P (x))
537 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
538 else if (code == 'N' && CONST_INT_P (x))
539 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
540 /* rotl instruction cannot deal with negative arguments. */
541 else if (code == 'R' && CONST_INT_P (x))
542 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
543 else if (code == 'H' && CONST_INT_P (x))
544 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
545 else if (code == 'h' && CONST_INT_P (x))
546 fprintf (file, "$%d", (short) - INTVAL (x));
547 else if (code == 'B' && CONST_INT_P (x))
548 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
549 else if (code == 'b' && CONST_INT_P (x))
550 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
551 else if (code == 'M' && CONST_INT_P (x))
552 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
a3515605
RH
553 else if (code == 'x' && CONST_INT_P (x))
554 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
c4e75102
MT
555 else if (REG_P (x))
556 fprintf (file, "%s", reg_names[REGNO (x)]);
557 else if (MEM_P (x))
cc8ca59e 558 output_address (GET_MODE (x), XEXP (x, 0));
c4e75102
MT
559 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
560 {
561 char dstr[30];
562 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
563 sizeof (dstr), 0, 1);
564 fprintf (file, "$0f%s", dstr);
565 }
566 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
567 {
568 char dstr[30];
569 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
570 sizeof (dstr), 0, 1);
571 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
572 }
573 else
574 {
575 if (flag_pic > 1 && symbolic_operand (x, SImode))
576 {
577 debug_rtx (x);
578 output_operand_lossage ("symbol used as immediate operand");
579 }
580 putc ('$', file);
581 output_addr_const (file, x);
582 }
583}
9c21a7e7 584\f
f90b7a5a
PB
585const char *
586cond_name (rtx op)
587{
588 switch (GET_CODE (op))
589 {
590 case NE:
591 return "neq";
592 case EQ:
593 return "eql";
594 case GE:
595 return "geq";
596 case GT:
597 return "gtr";
598 case LE:
599 return "leq";
600 case LT:
601 return "lss";
602 case GEU:
603 return "gequ";
604 case GTU:
605 return "gtru";
606 case LEU:
607 return "lequ";
608 case LTU:
609 return "lssu";
610
611 default:
612 gcc_unreachable ();
613 }
614}
615
2fd58acb 616const char *
0d92b0e4 617rev_cond_name (rtx op)
9c21a7e7
RS
618{
619 switch (GET_CODE (op))
620 {
621 case EQ:
622 return "neq";
623 case NE:
624 return "eql";
625 case LT:
626 return "geq";
627 case LE:
628 return "gtr";
629 case GT:
630 return "leq";
631 case GE:
632 return "lss";
633 case LTU:
634 return "gequ";
635 case LEU:
636 return "gtru";
637 case GTU:
638 return "lequ";
639 case GEU:
640 return "lssu";
641
642 default:
90285d8d 643 gcc_unreachable ();
9c21a7e7
RS
644 }
645}
d3797078 646
c4e75102
MT
647static bool
648vax_float_literal (rtx c)
d3797078 649{
ef4bddc2 650 machine_mode mode;
34a72c33
RS
651 const REAL_VALUE_TYPE *r;
652 REAL_VALUE_TYPE s;
d3797078 653 int i;
d3797078
RS
654
655 if (GET_CODE (c) != CONST_DOUBLE)
c4e75102 656 return false;
d3797078
RS
657
658 mode = GET_MODE (c);
659
660 if (c == const_tiny_rtx[(int) mode][0]
661 || c == const_tiny_rtx[(int) mode][1]
662 || c == const_tiny_rtx[(int) mode][2])
c4e75102 663 return true;
d3797078 664
34a72c33 665 r = CONST_DOUBLE_REAL_VALUE (c);
d3797078 666
b216cd4a
ZW
667 for (i = 0; i < 7; i++)
668 {
669 int x = 1 << i;
90285d8d 670 bool ok;
807e902e 671 real_from_integer (&s, mode, x, SIGNED);
d3797078 672
34a72c33 673 if (real_equal (r, &s))
c4e75102 674 return true;
90285d8d
NS
675 ok = exact_real_inverse (mode, &s);
676 gcc_assert (ok);
34a72c33 677 if (real_equal (r, &s))
c4e75102 678 return true;
b216cd4a 679 }
c4e75102 680 return false;
d3797078
RS
681}
682
683
684/* Return the cost in cycles of a memory address, relative to register
685 indirect.
686
687 Each of the following adds the indicated number of cycles:
688
689 1 - symbolic address
690 1 - pre-decrement
691 1 - indexing and/or offset(register)
692 2 - indirect */
693
694
dcefdf67 695static int
d001241c 696vax_address_cost_1 (rtx addr)
d3797078
RS
697{
698 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
699 rtx plus_op0 = 0, plus_op1 = 0;
700 restart:
701 switch (GET_CODE (addr))
702 {
703 case PRE_DEC:
704 predec = 1;
9443c717 705 /* FALLTHRU */
d3797078
RS
706 case REG:
707 case SUBREG:
708 case POST_INC:
709 reg = 1;
710 break;
711 case MULT:
c605a8bf 712 case ASHIFT:
d3797078
RS
713 indexed = 1; /* 2 on VAX 2 */
714 break;
715 case CONST_INT:
716 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
717 if (offset == 0)
76335fef 718 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
d3797078
RS
719 break;
720 case CONST:
721 case SYMBOL_REF:
722 offset = 1; /* 2 on VAX 2 */
723 break;
724 case LABEL_REF: /* this is probably a byte offset from the pc */
725 if (offset == 0)
726 offset = 1;
727 break;
728 case PLUS:
729 if (plus_op0)
730 plus_op1 = XEXP (addr, 0);
731 else
732 plus_op0 = XEXP (addr, 0);
733 addr = XEXP (addr, 1);
734 goto restart;
735 case MEM:
736 indir = 2; /* 3 on VAX 2 */
737 addr = XEXP (addr, 0);
738 goto restart;
2fd58acb
KG
739 default:
740 break;
d3797078
RS
741 }
742
743 /* Up to 3 things can be added in an address. They are stored in
744 plus_op0, plus_op1, and addr. */
745
746 if (plus_op0)
747 {
748 addr = plus_op0;
749 plus_op0 = 0;
750 goto restart;
751 }
752 if (plus_op1)
753 {
754 addr = plus_op1;
755 plus_op1 = 0;
756 goto restart;
757 }
758 /* Indexing and register+offset can both be used (except on a VAX 2)
6b857ce3 759 without increasing execution time over either one alone. */
d3797078
RS
760 if (reg && indexed && offset)
761 return reg + indir + offset + predec;
762 return reg + indexed + indir + offset + predec;
763}
764
dcefdf67 765static int
ef4bddc2 766vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
b413068c
OE
767 addr_space_t as ATTRIBUTE_UNUSED,
768 bool speed ATTRIBUTE_UNUSED)
dcefdf67 769{
dfb21f37 770 return COSTS_N_INSNS (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
dcefdf67
RH
771}
772
d3797078
RS
773/* Cost of an expression on a VAX. This version has costs tuned for the
774 CVAX chip (found in the VAX 3 series) with comments for variations on
ccb527e4 775 other models.
d3797078 776
ccb527e4
JDA
777 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
778 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
779 costs on a per cpu basis. */
780
781static bool
e548c9df
AM
782vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
783 int opno ATTRIBUTE_UNUSED,
68f932c4 784 int *total, bool speed ATTRIBUTE_UNUSED)
d3797078 785{
8b3be949 786 enum rtx_code code = GET_CODE (x);
ccb527e4 787 int i = 0; /* may be modified in switch */
6f7d635c 788 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
d3797078
RS
789
790 switch (code)
791 {
3c50106f 792 /* On a VAX, constants from 0..63 are cheap because they can use the
ccb527e4
JDA
793 1 byte literal constant format. Compare to -1 should be made cheap
794 so that decrement-and-branch insns can be formed more easily (if
795 the value -1 is copied to a register some decrement-and-branch
3c50106f
RH
796 patterns will not match). */
797 case CONST_INT:
798 if (INTVAL (x) == 0)
c4e75102 799 {
dfb21f37 800 *total = COSTS_N_INSNS (1) / 2;
c4e75102
MT
801 return true;
802 }
3c50106f 803 if (outer_code == AND)
ccb527e4 804 {
dfb21f37
MR
805 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077
806 ? COSTS_N_INSNS (1) : COSTS_N_INSNS (2));
ccb527e4
JDA
807 return true;
808 }
809 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
810 || (outer_code == COMPARE
811 && INTVAL (x) == -1)
812 || ((outer_code == PLUS || outer_code == MINUS)
813 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
814 {
dfb21f37 815 *total = COSTS_N_INSNS (1);
ccb527e4
JDA
816 return true;
817 }
5efb1046 818 /* FALLTHRU */
3c50106f
RH
819
820 case CONST:
821 case LABEL_REF:
822 case SYMBOL_REF:
dfb21f37 823 *total = COSTS_N_INSNS (3);
ccb527e4 824 return true;
3c50106f
RH
825
826 case CONST_DOUBLE:
e548c9df 827 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
dfb21f37 828 *total = vax_float_literal (x) ? COSTS_N_INSNS (5) : COSTS_N_INSNS (8);
3c50106f 829 else
c4e75102 830 *total = ((CONST_DOUBLE_HIGH (x) == 0
ccb527e4
JDA
831 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
832 || (outer_code == PLUS
833 && CONST_DOUBLE_HIGH (x) == -1
dfb21f37
MR
834 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)
835 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (5));
ccb527e4 836 return true;
b20f13e9 837
d3797078 838 case POST_INC:
dfb21f37
MR
839 *total = COSTS_N_INSNS (2);
840 return true; /* Implies register operand. */
ccb527e4 841
d3797078 842 case PRE_DEC:
dfb21f37
MR
843 *total = COSTS_N_INSNS (3);
844 return true; /* Implies register operand. */
ccb527e4 845
d3797078
RS
846 case MULT:
847 switch (mode)
848 {
4e10a5a7 849 case E_DFmode:
dfb21f37 850 *total = COSTS_N_INSNS (16); /* 4 on VAX 9000 */
d3797078 851 break;
4e10a5a7 852 case E_SFmode:
dfb21f37 853 *total = COSTS_N_INSNS (9); /* 4 on VAX 9000, 12 on VAX 2 */
d3797078 854 break;
4e10a5a7 855 case E_DImode:
dfb21f37 856 *total = COSTS_N_INSNS (16); /* 6 on VAX 9000, 28 on VAX 2 */
d3797078 857 break;
4e10a5a7
RS
858 case E_SImode:
859 case E_HImode:
860 case E_QImode:
dfb21f37 861 *total = COSTS_N_INSNS (10); /* 3-4 on VAX 9000, 20-28 on VAX 2 */
d3797078 862 break;
2fd58acb 863 default:
dfb21f37 864 *total = MAX_COST; /* Mode is not supported. */
ccb527e4 865 return true;
d3797078
RS
866 }
867 break;
ccb527e4 868
d3797078 869 case UDIV:
5c41fdfb 870 if (mode != SImode)
ccb527e4 871 {
dfb21f37 872 *total = MAX_COST; /* Mode is not supported. */
ccb527e4
JDA
873 return true;
874 }
dfb21f37 875 *total = COSTS_N_INSNS (17);
d3797078 876 break;
ccb527e4 877
d3797078
RS
878 case DIV:
879 if (mode == DImode)
dfb21f37 880 *total = COSTS_N_INSNS (30); /* Highly variable. */
d3797078
RS
881 else if (mode == DFmode)
882 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
dfb21f37 883 *total = COSTS_N_INSNS (24);
d3797078 884 else
dfb21f37 885 *total = COSTS_N_INSNS (11); /* 25 on VAX 2 */
d3797078 886 break;
ccb527e4 887
d3797078 888 case MOD:
dfb21f37 889 *total = COSTS_N_INSNS (23);
d3797078 890 break;
ccb527e4 891
d3797078 892 case UMOD:
5c41fdfb 893 if (mode != SImode)
ccb527e4 894 {
dfb21f37 895 *total = MAX_COST; /* Mode is not supported. */
ccb527e4
JDA
896 return true;
897 }
dfb21f37 898 *total = COSTS_N_INSNS (29);
d3797078 899 break;
ccb527e4 900
d3797078 901 case FLOAT:
dfb21f37
MR
902 *total = COSTS_N_INSNS (6 /* 4 on VAX 9000 */
903 + (mode == DFmode)
904 + (GET_MODE (XEXP (x, 0)) != SImode));
d3797078 905 break;
ccb527e4 906
d3797078 907 case FIX:
dfb21f37 908 *total = COSTS_N_INSNS (7); /* 17 on VAX 2 */
d3797078 909 break;
ccb527e4 910
d3797078
RS
911 case ASHIFT:
912 case LSHIFTRT:
913 case ASHIFTRT:
914 if (mode == DImode)
dfb21f37 915 *total = COSTS_N_INSNS (12);
d3797078 916 else
dfb21f37 917 *total = COSTS_N_INSNS (10); /* 6 on VAX 9000 */
d3797078 918 break;
ccb527e4 919
d3797078
RS
920 case ROTATE:
921 case ROTATERT:
dfb21f37 922 *total = COSTS_N_INSNS (6); /* 5 on VAX 2, 4 on VAX 9000 */
d97c1295 923 if (CONST_INT_P (XEXP (x, 1)))
ccb527e4 924 fmt = "e"; /* all constant rotate counts are short */
d3797078 925 break;
ccb527e4 926
d3797078 927 case PLUS:
d3797078 928 case MINUS:
dfb21f37
MR
929 *total = (mode == DFmode /* 6/8 on VAX 9000, 16/15 on VAX 2 */
930 ? COSTS_N_INSNS (13) : COSTS_N_INSNS (8));
76335fef 931 /* Small integer operands can use subl2 and addl2. */
d97c1295 932 if ((CONST_INT_P (XEXP (x, 1)))
76335fef
JDA
933 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
934 fmt = "e";
935 break;
ccb527e4 936
d3797078
RS
937 case IOR:
938 case XOR:
dfb21f37 939 *total = COSTS_N_INSNS (3);
d3797078 940 break;
ccb527e4 941
d3797078 942 case AND:
6b857ce3 943 /* AND is special because the first operand is complemented. */
dfb21f37 944 *total = COSTS_N_INSNS (3);
d97c1295 945 if (CONST_INT_P (XEXP (x, 0)))
d3797078 946 {
76335fef 947 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
dfb21f37 948 *total = COSTS_N_INSNS (4);
d3797078
RS
949 fmt = "e";
950 i = 1;
951 }
952 break;
ccb527e4 953
d3797078
RS
954 case NEG:
955 if (mode == DFmode)
dfb21f37 956 *total = COSTS_N_INSNS (9);
d3797078 957 else if (mode == SFmode)
dfb21f37 958 *total = COSTS_N_INSNS (6);
d3797078 959 else if (mode == DImode)
dfb21f37 960 *total = COSTS_N_INSNS (4);
ccb527e4 961 else
dfb21f37 962 *total = COSTS_N_INSNS (2);
ccb527e4
JDA
963 break;
964
d3797078 965 case NOT:
dfb21f37 966 *total = COSTS_N_INSNS (2);
ccb527e4
JDA
967 break;
968
d3797078
RS
969 case ZERO_EXTRACT:
970 case SIGN_EXTRACT:
dfb21f37 971 *total = COSTS_N_INSNS (15);
d3797078 972 break;
ccb527e4 973
d3797078
RS
974 case MEM:
975 if (mode == DImode || mode == DFmode)
dfb21f37 976 *total = COSTS_N_INSNS (5); /* 7 on VAX 2 */
d3797078 977 else
dfb21f37 978 *total = COSTS_N_INSNS (3); /* 4 on VAX 2 */
d3797078 979 x = XEXP (x, 0);
ff9d4590 980 if (!REG_P (x) && GET_CODE (x) != POST_INC)
dfb21f37 981 *total += COSTS_N_INSNS (vax_address_cost_1 (x));
ccb527e4
JDA
982 return true;
983
984 case FLOAT_EXTEND:
985 case FLOAT_TRUNCATE:
986 case TRUNCATE:
dfb21f37 987 *total = COSTS_N_INSNS (3); /* FIXME: Costs need to be checked */
d3797078 988 break;
ccb527e4
JDA
989
990 default:
991 return false;
d3797078
RS
992 }
993
d3797078
RS
994 /* Now look inside the expression. Operands which are not registers or
995 short constants add to the cost.
996
997 FMT and I may have been adjusted in the switch above for instructions
ccb527e4 998 which require special handling. */
d3797078
RS
999
1000 while (*fmt++ == 'e')
1001 {
ccb527e4
JDA
1002 rtx op = XEXP (x, i);
1003
1004 i += 1;
d3797078
RS
1005 code = GET_CODE (op);
1006
1007 /* A NOT is likely to be found as the first operand of an AND
1008 (in which case the relevant cost is of the operand inside
1009 the not) and not likely to be found anywhere else. */
1010 if (code == NOT)
1011 op = XEXP (op, 0), code = GET_CODE (op);
1012
1013 switch (code)
1014 {
1015 case CONST_INT:
76335fef 1016 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
e548c9df 1017 && mode != QImode)
dfb21f37 1018 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
d3797078
RS
1019 break;
1020 case CONST:
1021 case LABEL_REF:
1022 case SYMBOL_REF:
dfb21f37 1023 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
d3797078
RS
1024 break;
1025 case CONST_DOUBLE:
1026 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1027 {
1028 /* Registers are faster than floating point constants -- even
1029 those constants which can be encoded in a single byte. */
1030 if (vax_float_literal (op))
dfb21f37 1031 *total += COSTS_N_INSNS (1);
d3797078 1032 else
dfb21f37
MR
1033 *total += (GET_MODE (x) == DFmode
1034 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (2));
d3797078
RS
1035 }
1036 else
1037 {
1038 if (CONST_DOUBLE_HIGH (op) != 0
c4e75102 1039 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
dfb21f37 1040 *total += COSTS_N_INSNS (2);
d3797078
RS
1041 }
1042 break;
1043 case MEM:
dfb21f37 1044 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
ff9d4590 1045 if (!REG_P (XEXP (op, 0)))
dfb21f37 1046 *total += COSTS_N_INSNS (vax_address_cost_1 (XEXP (op, 0)));
d3797078
RS
1047 break;
1048 case REG:
1049 case SUBREG:
1050 break;
1051 default:
dfb21f37 1052 *total += COSTS_N_INSNS (1);
d3797078
RS
1053 break;
1054 }
1055 }
3c50106f
RH
1056 return true;
1057}
ebea352b 1058\f
85f5a7d6
MR
1059/* With ELF we do not support GOT entries for external `symbol+offset'
1060 references, so do not accept external symbol references if an offset
1061 is to be added. Do not accept external symbol references at all if
1062 LOCAL_P is set. This is for cases where making a reference indirect
1063 would make it invalid. Do not accept any kind of symbols if SYMBOL_P
1064 is clear. This is for situations where the a reference is used as an
1065 immediate value for operations other than address loads (MOVA/PUSHA),
1066 as those operations do not support PC-relative immediates. */
1067
1068bool
1069vax_acceptable_pic_operand_p (rtx x ATTRIBUTE_UNUSED,
1070 bool local_p ATTRIBUTE_UNUSED,
1071 bool symbol_p ATTRIBUTE_UNUSED)
1072{
1073#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1074 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
1075 {
1076 x = XEXP (XEXP (x, 0), 0);
1077 local_p = true;
1078 }
1079 switch (GET_CODE (x))
1080 {
1081 case SYMBOL_REF:
1082 return symbol_p && !(local_p && !SYMBOL_REF_LOCAL_P (x));
1083 case LABEL_REF:
1084 return symbol_p && !(local_p && LABEL_REF_NONLOCAL_P (x));
1085 default:
1086 break;
1087 }
1088#endif
1089 return true;
1090}
1091\f
e552abe2
MR
1092/* Given a comparison code (NE, EQ, etc.) and the operands of a COMPARE,
1093 return the mode to be used for the comparison. As we have the same
1094 interpretation of condition codes across all the instructions we just
1095 return the narrowest mode suitable for the comparison code requested. */
1096
1097extern machine_mode
1098vax_select_cc_mode (enum rtx_code op,
1099 rtx x ATTRIBUTE_UNUSED, rtx y ATTRIBUTE_UNUSED)
1100{
1101 switch (op)
1102 {
1103 default:
1104 gcc_unreachable ();
1105 case NE:
1106 case EQ:
1107 return CCZmode;
1108 case GE:
1109 case LT:
1110 return CCNmode;
1111 case GT:
1112 case LE:
1113 return CCNZmode;
1114 case GEU:
1115 case GTU:
1116 case LEU:
1117 case LTU:
1118 return CCmode;
1119 }
1120}
1121
1122/* Return the narrowest CC mode that spans both modes offered. If they
1123 intersect, this will be the wider of the two, and if they do not then
027e3041 1124 find one that is a superset of both (i.e. CCNZmode for a pair
e552abe2
MR
1125 consisting of CCNmode and CCZmode). A wider CC writer will satisfy
1126 a narrower CC reader, e.g. a comparison operator that uses CCZmode
1127 can use a CCNZmode output of a previous instruction. */
1128
1129static machine_mode
1130vax_cc_modes_compatible (machine_mode m1, machine_mode m2)
1131{
1132 switch (m1)
1133 {
1134 default:
1135 gcc_unreachable ();
1136 case E_CCmode:
1137 switch (m2)
1138 {
1139 default:
1140 gcc_unreachable ();
1141 case E_CCmode:
1142 case E_CCNZmode:
1143 case E_CCNmode:
1144 case E_CCZmode:
1145 return m1;
1146 }
1147 case E_CCNZmode:
1148 switch (m2)
1149 {
1150 default:
1151 gcc_unreachable ();
1152 case E_CCmode:
1153 return m2;
1154 case E_CCNmode:
1155 case E_CCNZmode:
1156 case E_CCZmode:
1157 return m1;
1158 }
1159 case E_CCNmode:
1160 case E_CCZmode:
1161 switch (m2)
1162 {
1163 default:
1164 gcc_unreachable ();
1165 case E_CCmode:
1166 case E_CCNZmode:
1167 return m2;
1168 case E_CCNmode:
1169 case E_CCZmode:
1170 return m1 == m2 ? m1 : E_CCNZmode;
1171 }
1172 }
1173}
1174\f
1175/* Mark PSL as clobbered for compatibility with the CC0 representation. */
1176
1177static rtx_insn *
1178vax_md_asm_adjust (vec<rtx> &outputs ATTRIBUTE_UNUSED,
1179 vec<rtx> &inputs ATTRIBUTE_UNUSED,
e52ef6e6 1180 vec<machine_mode> &input_modes ATTRIBUTE_UNUSED,
e552abe2 1181 vec<const char *> &constraints ATTRIBUTE_UNUSED,
8d76ff99
TS
1182 vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs,
1183 location_t /*loc*/)
e552abe2
MR
1184{
1185 clobbers.safe_push (gen_rtx_REG (CCmode, VAX_PSL_REGNUM));
1186 SET_HARD_REG_BIT (clobbered_regs, VAX_PSL_REGNUM);
1187 return NULL;
1188}
1189\f
b9962e0a
RH
1190/* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1191 Used for C++ multiple inheritance.
1192 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1193 addl2 $DELTA, 4(ap) #adjust first argument
1194 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1195*/
1196
3961e8fe 1197static void
0d92b0e4 1198vax_output_mi_thunk (FILE * file,
c4e75102
MT
1199 tree thunk ATTRIBUTE_UNUSED,
1200 HOST_WIDE_INT delta,
1201 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1202 tree function)
483ab821 1203{
f7430263
MF
1204 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1205
1206 assemble_start_function (thunk, fnname);
4a0a75dd 1207 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
eb0424da 1208 asm_fprintf (file, ",4(%Rap)\n");
b20f13e9
MT
1209 fprintf (file, "\tjmp ");
1210 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1211 fprintf (file, "+2\n");
f7430263 1212 assemble_end_function (thunk, fnname);
483ab821 1213}
f289e226
KH
1214\f
1215static rtx
1216vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1217 int incoming ATTRIBUTE_UNUSED)
1218{
1219 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1220}
af0ec113 1221
3e09331f
MR
1222/* Return true if we use LRA instead of reload pass. */
1223
1224static bool
1225vax_lra_p (void)
1226{
1227 return TARGET_LRA;
1228}
1229
e552abe2 1230/* Output integer move instructions. */
af0ec113 1231
e552abe2
MR
1232bool
1233vax_maybe_split_dimode_move (rtx *operands)
af0ec113 1234{
e552abe2
MR
1235 return (TARGET_QMATH
1236 && (!MEM_P (operands[0])
1237 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1238 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1239 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1240 && ((CONST_INT_P (operands[1])
1241 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1242 || GET_CODE (operands[1]) == CONST_DOUBLE));
af0ec113 1243}
20acd226 1244
20acd226
SB
1245const char *
1246vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
ef4bddc2 1247 machine_mode mode)
20acd226 1248{
c4e75102
MT
1249 rtx hi[3], lo[3];
1250 const char *pattern_hi, *pattern_lo;
2c3d487a 1251 bool push_p;
c4e75102 1252
20acd226
SB
1253 switch (mode)
1254 {
4e10a5a7 1255 case E_DImode:
c4e75102
MT
1256 if (operands[1] == const0_rtx)
1257 return "clrq %0";
1258 if (TARGET_QMATH && optimize_size
1259 && (CONST_INT_P (operands[1])
1260 || GET_CODE (operands[1]) == CONST_DOUBLE))
1261 {
1262 unsigned HOST_WIDE_INT hval, lval;
1263 int n;
1264
1265 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1266 {
1267 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1268
1269 /* Make sure only the low 32 bits are valid. */
1270 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1271 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1272 }
1273 else
1274 {
1275 lval = INTVAL (operands[1]);
1276 hval = 0;
1277 }
1278
1279 /* Here we see if we are trying to see if the 64bit value is really
1280 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1281 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1282 8 bytes - 1 shift byte - 1 short literal byte. */
1283 if (lval != 0
1284 && (n = exact_log2 (lval & (- lval))) != -1
1285 && (lval >> n) < 64)
1286 {
1287 lval >>= n;
1288
c4e75102
MT
1289 /* On 32bit platforms, if the 6bits didn't overflow into the
1290 upper 32bit value that value better be 0. If we have
1291 overflowed, make sure it wasn't too much. */
ce7190e5 1292 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
c4e75102
MT
1293 {
1294 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1295 n = 0; /* failure */
1296 else
1297 lval |= hval << (32 - n);
1298 }
c4e75102
MT
1299 /* If n is 0, then ashq is not the best way to emit this. */
1300 if (n > 0)
1301 {
1302 operands[1] = GEN_INT (lval);
1303 operands[2] = GEN_INT (n);
7691132c 1304 return "ashq %2,%D1,%0";
c4e75102
MT
1305 }
1306#if HOST_BITS_PER_WIDE_INT == 32
1307 }
1308 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1309 upper 32bit value. */
1310 else if (hval != 0
1311 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1312 && (hval >> n) < 64)
1313 {
1314 operands[1] = GEN_INT (hval >> n);
1315 operands[2] = GEN_INT (n + 32);
7691132c 1316 return "ashq %2,%D1,%0";
c4e75102
MT
1317#endif
1318 }
1319 }
1320
e552abe2 1321 if (vax_maybe_split_dimode_move (operands))
c4e75102
MT
1322 {
1323 hi[0] = operands[0];
1324 hi[1] = operands[1];
1325
1326 split_quadword_operands (insn, SET, hi, lo, 2);
1327
1328 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1329 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1330
1331 /* The patterns are just movl/movl or pushl/pushl then a movq will
1332 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1333 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1334 value bytes. */
c0129e2d
ML
1335 if ((startswith (pattern_lo, "movl")
1336 && startswith (pattern_hi, "movl"))
1337 || (startswith (pattern_lo, "pushl")
1338 && startswith (pattern_hi, "pushl")))
c4e75102
MT
1339 return "movq %1,%0";
1340
1341 if (MEM_P (operands[0])
1342 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1343 {
1344 output_asm_insn (pattern_hi, hi);
1345 operands[0] = lo[0];
1346 operands[1] = lo[1];
1347 operands[2] = lo[2];
1348 return pattern_lo;
1349 }
1350 else
1351 {
1352 output_asm_insn (pattern_lo, lo);
1353 operands[0] = hi[0];
1354 operands[1] = hi[1];
1355 operands[2] = hi[2];
1356 return pattern_hi;
1357 }
1358 }
1359 return "movq %1,%0";
1360
4e10a5a7 1361 case E_SImode:
2c3d487a
MR
1362 push_p = push_operand (operands[0], SImode);
1363
c4e75102 1364 if (symbolic_operand (operands[1], SImode))
2c3d487a 1365 return push_p ? "pushab %a1" : "movab %a1,%0";
c4e75102 1366
20acd226 1367 if (operands[1] == const0_rtx)
2c3d487a 1368 return push_p ? "pushl %1" : "clrl %0";
c4e75102 1369
d97c1295 1370 if (CONST_INT_P (operands[1])
c4e75102 1371 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
20acd226 1372 {
c4e75102
MT
1373 HOST_WIDE_INT i = INTVAL (operands[1]);
1374 int n;
1375 if ((unsigned HOST_WIDE_INT)(~i) < 64)
20acd226 1376 return "mcoml %N1,%0";
c4e75102 1377 if ((unsigned HOST_WIDE_INT)i < 0x100)
20acd226
SB
1378 return "movzbl %1,%0";
1379 if (i >= -0x80 && i < 0)
1380 return "cvtbl %1,%0";
c4e75102
MT
1381 if (optimize_size
1382 && (n = exact_log2 (i & (-i))) != -1
1383 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1384 {
1385 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1386 operands[2] = GEN_INT (n);
1387 return "ashl %2,%1,%0";
1388 }
1389 if ((unsigned HOST_WIDE_INT)i < 0x10000)
20acd226
SB
1390 return "movzwl %1,%0";
1391 if (i >= -0x8000 && i < 0)
1392 return "cvtwl %1,%0";
1393 }
2c3d487a 1394 return push_p ? "pushl %1" : "movl %1,%0";
20acd226 1395
4e10a5a7 1396 case E_HImode:
d97c1295 1397 if (CONST_INT_P (operands[1]))
20acd226 1398 {
c4e75102 1399 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1400 if (i == 0)
1401 return "clrw %0";
c4e75102 1402 else if ((unsigned HOST_WIDE_INT)i < 64)
20acd226 1403 return "movw %1,%0";
c4e75102 1404 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226 1405 return "mcomw %H1,%0";
c4e75102 1406 else if ((unsigned HOST_WIDE_INT)i < 256)
20acd226 1407 return "movzbw %1,%0";
c4e75102
MT
1408 else if (i >= -0x80 && i < 0)
1409 return "cvtbw %1,%0";
20acd226
SB
1410 }
1411 return "movw %1,%0";
1412
4e10a5a7 1413 case E_QImode:
d97c1295 1414 if (CONST_INT_P (operands[1]))
20acd226 1415 {
c4e75102 1416 HOST_WIDE_INT i = INTVAL (operands[1]);
20acd226
SB
1417 if (i == 0)
1418 return "clrb %0";
c4e75102 1419 else if ((unsigned HOST_WIDE_INT)~i < 64)
20acd226
SB
1420 return "mcomb %B1,%0";
1421 }
1422 return "movb %1,%0";
1423
1424 default:
1425 gcc_unreachable ();
1426 }
1427}
1428
1429/* Output integer add instructions.
1430
1431 The space-time-opcode tradeoffs for addition vary by model of VAX.
1432
1433 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1434 but it not faster on other models.
1435
1436 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1437 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1438 a register is used in an address too soon after it is set.
1439 Compromise by using movab only when it is shorter than the add
1440 or the base register in the address is one of sp, ap, and fp,
1441 which are not modified very often. */
1442
1443const char *
df0b55f0 1444vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
20acd226
SB
1445{
1446 switch (mode)
1447 {
4e10a5a7 1448 case E_DImode:
c4e75102
MT
1449 {
1450 rtx low[3];
1451 const char *pattern;
1452 int carry = 1;
1453 bool sub;
1454
1455 if (TARGET_QMATH && 0)
1456 debug_rtx (insn);
1457
1458 split_quadword_operands (insn, PLUS, operands, low, 3);
1459
1460 if (TARGET_QMATH)
1461 {
1462 gcc_assert (rtx_equal_p (operands[0], operands[1]));
075fdf85 1463#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
85f5a7d6
MR
1464 gcc_assert (!flag_pic
1465 || !non_pic_external_memory_operand (low[2], SImode));
1466 gcc_assert (!flag_pic
1467 || !non_pic_external_memory_operand (low[0], SImode));
c4e75102
MT
1468#endif
1469
1470 /* No reason to add a 0 to the low part and thus no carry, so just
1471 emit the appropriate add/sub instruction. */
1472 if (low[2] == const0_rtx)
1473 return vax_output_int_add (NULL, operands, SImode);
1474
1475 /* Are we doing addition or subtraction? */
1476 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1477
1478 /* We can't use vax_output_int_add since some the patterns don't
1479 modify the carry bit. */
1480 if (sub)
1481 {
1482 if (low[2] == constm1_rtx)
1483 pattern = "decl %0";
1484 else
1485 pattern = "subl2 $%n2,%0";
1486 }
1487 else
1488 {
1489 if (low[2] == const1_rtx)
1490 pattern = "incl %0";
1491 else
1492 pattern = "addl2 %2,%0";
1493 }
1494 output_asm_insn (pattern, low);
1495
1496 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1497 two 32bit parts, we complement each and then add one to
1498 low part. We know that the low part can't overflow since
1499 it's value can never be 0. */
1500 if (sub)
1501 return "sbwc %N2,%0";
1502 return "adwc %2,%0";
1503 }
1504
1505 /* Add low parts. */
1506 if (rtx_equal_p (operands[0], operands[1]))
1507 {
1508 if (low[2] == const0_rtx)
1509 /* Should examine operand, punt if not POST_INC. */
1510 pattern = "tstl %0", carry = 0;
1511 else if (low[2] == const1_rtx)
1512 pattern = "incl %0";
1513 else
1514 pattern = "addl2 %2,%0";
1515 }
1516 else
1517 {
1518 if (low[2] == const0_rtx)
1519 pattern = "movl %1,%0", carry = 0;
1520 else
1521 pattern = "addl3 %2,%1,%0";
1522 }
1523 if (pattern)
1524 output_asm_insn (pattern, low);
1525 if (!carry)
1526 /* If CARRY is 0, we don't have any carry value to worry about. */
1527 return get_insn_template (CODE_FOR_addsi3, insn);
1528 /* %0 = C + %1 + %2 */
1529 if (!rtx_equal_p (operands[0], operands[1]))
1530 output_asm_insn ((operands[1] == const0_rtx
1531 ? "clrl %0"
1532 : "movl %1,%0"), operands);
1533 return "adwc %2,%0";
1534 }
1535
4e10a5a7 1536 case E_SImode:
20acd226
SB
1537 if (rtx_equal_p (operands[0], operands[1]))
1538 {
1539 if (operands[2] == const1_rtx)
1540 return "incl %0";
1541 if (operands[2] == constm1_rtx)
1542 return "decl %0";
d97c1295 1543 if (CONST_INT_P (operands[2])
c4e75102 1544 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226 1545 return "subl2 $%n2,%0";
d97c1295 1546 if (CONST_INT_P (operands[2])
c4e75102 1547 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1548 && REG_P (operands[1])
20acd226
SB
1549 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1550 || REGNO (operands[1]) > 11))
1551 return "movab %c2(%1),%0";
c4e75102
MT
1552 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1553 return "movab %a2[%0],%0";
20acd226
SB
1554 return "addl2 %2,%0";
1555 }
1556
1557 if (rtx_equal_p (operands[0], operands[2]))
c4e75102
MT
1558 {
1559 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1560 return "movab %a1[%0],%0";
1561 return "addl2 %1,%0";
1562 }
20acd226 1563
d97c1295 1564 if (CONST_INT_P (operands[2])
20acd226
SB
1565 && INTVAL (operands[2]) < 32767
1566 && INTVAL (operands[2]) > -32768
ff9d4590 1567 && REG_P (operands[1])
20acd226
SB
1568 && push_operand (operands[0], SImode))
1569 return "pushab %c2(%1)";
1570
d97c1295 1571 if (CONST_INT_P (operands[2])
c4e75102 1572 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1573 return "subl3 $%n2,%1,%0";
1574
d97c1295 1575 if (CONST_INT_P (operands[2])
c4e75102 1576 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
ff9d4590 1577 && REG_P (operands[1])
20acd226
SB
1578 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1579 || REGNO (operands[1]) > 11))
1580 return "movab %c2(%1),%0";
1581
1582 /* Add this if using gcc on a VAX 3xxx:
1583 if (REG_P (operands[1]) && REG_P (operands[2]))
1584 return "movab (%1)[%2],%0";
1585 */
c4e75102
MT
1586
1587 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1588 {
1589 if (push_operand (operands[0], SImode))
1590 return "pushab %a2[%1]";
1591 return "movab %a2[%1],%0";
1592 }
1593
1594 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1595 {
1596 if (push_operand (operands[0], SImode))
1597 return "pushab %a1[%2]";
1598 return "movab %a1[%2],%0";
1599 }
1600
1601 if (flag_pic && REG_P (operands[0])
1602 && symbolic_operand (operands[2], SImode))
1603 return "movab %a2,%0;addl2 %1,%0";
1604
1605 if (flag_pic
1606 && (symbolic_operand (operands[1], SImode)
c454324b 1607 || symbolic_operand (operands[2], SImode)))
c4e75102
MT
1608 debug_rtx (insn);
1609
20acd226
SB
1610 return "addl3 %1,%2,%0";
1611
4e10a5a7 1612 case E_HImode:
20acd226
SB
1613 if (rtx_equal_p (operands[0], operands[1]))
1614 {
1615 if (operands[2] == const1_rtx)
1616 return "incw %0";
1617 if (operands[2] == constm1_rtx)
1618 return "decw %0";
d97c1295 1619 if (CONST_INT_P (operands[2])
c4e75102 1620 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1621 return "subw2 $%n2,%0";
1622 return "addw2 %2,%0";
1623 }
1624 if (rtx_equal_p (operands[0], operands[2]))
1625 return "addw2 %1,%0";
d97c1295 1626 if (CONST_INT_P (operands[2])
c4e75102 1627 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1628 return "subw3 $%n2,%1,%0";
1629 return "addw3 %1,%2,%0";
1630
4e10a5a7 1631 case E_QImode:
20acd226
SB
1632 if (rtx_equal_p (operands[0], operands[1]))
1633 {
1634 if (operands[2] == const1_rtx)
1635 return "incb %0";
1636 if (operands[2] == constm1_rtx)
1637 return "decb %0";
d97c1295 1638 if (CONST_INT_P (operands[2])
c4e75102 1639 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1640 return "subb2 $%n2,%0";
1641 return "addb2 %2,%0";
1642 }
1643 if (rtx_equal_p (operands[0], operands[2]))
1644 return "addb2 %1,%0";
d97c1295 1645 if (CONST_INT_P (operands[2])
c4e75102 1646 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
20acd226
SB
1647 return "subb3 $%n2,%1,%0";
1648 return "addb3 %1,%2,%0";
1649
1650 default:
1651 gcc_unreachable ();
1652 }
1653}
1654
c4e75102 1655const char *
df0b55f0 1656vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
c4e75102
MT
1657{
1658 switch (mode)
1659 {
4e10a5a7 1660 case E_DImode:
c4e75102
MT
1661 {
1662 rtx low[3];
1663 const char *pattern;
1664 int carry = 1;
1665
1666 if (TARGET_QMATH && 0)
1667 debug_rtx (insn);
1668
1669 split_quadword_operands (insn, MINUS, operands, low, 3);
1670
1671 if (TARGET_QMATH)
1672 {
1673 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1674 {
1675 /* Negation is tricky. It's basically complement and increment.
1676 Negate hi, then lo, and subtract the carry back. */
1677 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1678 || (MEM_P (operands[0])
1679 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1680 fatal_insn ("illegal operand detected", insn);
1681 output_asm_insn ("mnegl %2,%0", operands);
1682 output_asm_insn ("mnegl %2,%0", low);
1683 return "sbwc $0,%0";
1684 }
1685 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1686 gcc_assert (rtx_equal_p (low[0], low[1]));
1687 if (low[2] == const1_rtx)
1688 output_asm_insn ("decl %0", low);
1689 else
1690 output_asm_insn ("subl2 %2,%0", low);
1691 return "sbwc %2,%0";
1692 }
1693
1694 /* Subtract low parts. */
1695 if (rtx_equal_p (operands[0], operands[1]))
1696 {
1697 if (low[2] == const0_rtx)
1698 pattern = 0, carry = 0;
1699 else if (low[2] == constm1_rtx)
1700 pattern = "decl %0";
1701 else
1702 pattern = "subl2 %2,%0";
1703 }
1704 else
1705 {
1706 if (low[2] == constm1_rtx)
1707 pattern = "decl %0";
1708 else if (low[2] == const0_rtx)
1709 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1710 else
1711 pattern = "subl3 %2,%1,%0";
1712 }
1713 if (pattern)
1714 output_asm_insn (pattern, low);
1715 if (carry)
1716 {
1717 if (!rtx_equal_p (operands[0], operands[1]))
1718 return "movl %1,%0;sbwc %2,%0";
1719 return "sbwc %2,%0";
1720 /* %0 = %2 - %1 - C */
1721 }
1722 return get_insn_template (CODE_FOR_subsi3, insn);
1723 }
1724
1725 default:
1726 gcc_unreachable ();
1727 }
1728}
1729
c4e75102 1730/* True if X is an rtx for a constant that is a valid address. */
fbf55580 1731
c4e75102 1732bool
fbf55580
MT
1733legitimate_constant_address_p (rtx x)
1734{
c4e75102
MT
1735 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1736 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1737 return true;
1738 if (GET_CODE (x) != CONST)
1739 return false;
1740#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1741 if (flag_pic
1742 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1743 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1744 return false;
1745#endif
1746 return true;
fbf55580
MT
1747}
1748
fbf55580
MT
1749/* The other macros defined here are used only in legitimate_address_p (). */
1750
1751/* Nonzero if X is a hard reg that can be used as an index
1752 or, if not strict, if it is a pseudo reg. */
b20f13e9 1753#define INDEX_REGISTER_P(X, STRICT) \
ff9d4590 1754(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
fbf55580
MT
1755
1756/* Nonzero if X is a hard reg that can be used as a base reg
1757 or, if not strict, if it is a pseudo reg. */
b20f13e9 1758#define BASE_REGISTER_P(X, STRICT) \
ff9d4590 1759(REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
fbf55580
MT
1760
1761#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1762
1763/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1764 are no SYMBOL_REFs for external symbols present. */
1765
c4e75102
MT
1766static bool
1767indirectable_constant_address_p (rtx x, bool indirect)
fbf55580 1768{
c4e75102
MT
1769 if (GET_CODE (x) == SYMBOL_REF)
1770 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1771
1772 if (GET_CODE (x) == CONST)
1773 return !flag_pic
1774 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1775 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1776
1777 return CONSTANT_ADDRESS_P (x);
fbf55580
MT
1778}
1779
1780#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1781
c4e75102
MT
1782static bool
1783indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
fbf55580
MT
1784{
1785 return CONSTANT_ADDRESS_P (x);
1786}
1787
1788#endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1789
c4e75102 1790/* True if X is an address which can be indirected. External symbols
fbf55580
MT
1791 could be in a sharable image library, so we disallow those. */
1792
c4e75102
MT
1793static bool
1794indirectable_address_p (rtx x, bool strict, bool indirect)
fbf55580 1795{
c4e75102
MT
1796 if (indirectable_constant_address_p (x, indirect)
1797 || BASE_REGISTER_P (x, strict))
1798 return true;
1799 if (GET_CODE (x) != PLUS
1800 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1801 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1802 return false;
1803 return indirectable_constant_address_p (XEXP (x, 1), indirect);
fbf55580
MT
1804}
1805
c4e75102 1806/* Return true if x is a valid address not using indexing.
fbf55580 1807 (This much is the easy part.) */
c4e75102
MT
1808static bool
1809nonindexed_address_p (rtx x, bool strict)
fbf55580
MT
1810{
1811 rtx xfoo0;
ff9d4590 1812 if (REG_P (x))
fbf55580 1813 {
c4e75102 1814 if (! reload_in_progress
f2034d06
JL
1815 || reg_equiv_mem (REGNO (x)) == 0
1816 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
c4e75102 1817 return true;
fbf55580 1818 }
c4e75102
MT
1819 if (indirectable_constant_address_p (x, false))
1820 return true;
1821 if (indirectable_address_p (x, strict, false))
1822 return true;
fbf55580 1823 xfoo0 = XEXP (x, 0);
c4e75102
MT
1824 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1825 return true;
fbf55580
MT
1826 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1827 && BASE_REGISTER_P (xfoo0, strict))
c4e75102
MT
1828 return true;
1829 return false;
fbf55580
MT
1830}
1831
c4e75102 1832/* True if PROD is either a reg times size of mode MODE and MODE is less
fbf55580
MT
1833 than or equal 8 bytes, or just a reg if MODE is one byte. */
1834
c4e75102 1835static bool
ef4bddc2 1836index_term_p (rtx prod, machine_mode mode, bool strict)
fbf55580
MT
1837{
1838 rtx xfoo0, xfoo1;
c605a8bf 1839 bool log_p;
fbf55580
MT
1840
1841 if (GET_MODE_SIZE (mode) == 1)
1842 return BASE_REGISTER_P (prod, strict);
1843
c605a8bf
MR
1844 if ((GET_CODE (prod) != MULT && GET_CODE (prod) != ASHIFT)
1845 || GET_MODE_SIZE (mode) > 8)
c4e75102 1846 return false;
fbf55580 1847
c605a8bf 1848 log_p = GET_CODE (prod) == ASHIFT;
fbf55580
MT
1849 xfoo0 = XEXP (prod, 0);
1850 xfoo1 = XEXP (prod, 1);
1851
d97c1295 1852 if (CONST_INT_P (xfoo0)
c605a8bf 1853 && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo0) : INTVAL (xfoo0))
fbf55580 1854 && INDEX_REGISTER_P (xfoo1, strict))
c4e75102 1855 return true;
fbf55580 1856
d97c1295 1857 if (CONST_INT_P (xfoo1)
c605a8bf 1858 && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo1) : INTVAL (xfoo1))
fbf55580 1859 && INDEX_REGISTER_P (xfoo0, strict))
c4e75102 1860 return true;
fbf55580 1861
c4e75102 1862 return false;
fbf55580
MT
1863}
1864
c4e75102 1865/* Return true if X is the sum of a register
fbf55580 1866 and a valid index term for mode MODE. */
c4e75102 1867static bool
ef4bddc2 1868reg_plus_index_p (rtx x, machine_mode mode, bool strict)
fbf55580
MT
1869{
1870 rtx xfoo0, xfoo1;
1871
1872 if (GET_CODE (x) != PLUS)
c4e75102 1873 return false;
fbf55580
MT
1874
1875 xfoo0 = XEXP (x, 0);
1876 xfoo1 = XEXP (x, 1);
1877
1878 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
c4e75102 1879 return true;
fbf55580
MT
1880
1881 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
c4e75102 1882 return true;
fbf55580 1883
c4e75102 1884 return false;
fbf55580
MT
1885}
1886
c4e75102
MT
1887/* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1888static bool
ef4bddc2 1889indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
c4e75102
MT
1890{
1891 if (!CONSTANT_ADDRESS_P (xfoo0))
1892 return false;
1893 if (BASE_REGISTER_P (xfoo1, strict))
1894 return !flag_pic || mode == QImode;
1895 if (flag_pic && symbolic_operand (xfoo0, SImode))
1896 return false;
1897 return reg_plus_index_p (xfoo1, mode, strict);
1898}
1899
1900/* legitimate_address_p returns true if it recognizes an RTL expression "x"
fbf55580
MT
1901 that is a valid memory address for an instruction.
1902 The MODE argument is the machine mode for the MEM expression
1903 that wants to use this address. */
c4e75102 1904bool
ef4bddc2 1905vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
fbf55580
MT
1906{
1907 rtx xfoo0, xfoo1;
1908
1909 if (nonindexed_address_p (x, strict))
c4e75102 1910 return true;
fbf55580
MT
1911
1912 if (GET_CODE (x) != PLUS)
c4e75102 1913 return false;
fbf55580
MT
1914
1915 /* Handle <address>[index] represented with index-sum outermost */
1916
1917 xfoo0 = XEXP (x, 0);
1918 xfoo1 = XEXP (x, 1);
1919
1920 if (index_term_p (xfoo0, mode, strict)
1921 && nonindexed_address_p (xfoo1, strict))
c4e75102 1922 return true;
fbf55580
MT
1923
1924 if (index_term_p (xfoo1, mode, strict)
1925 && nonindexed_address_p (xfoo0, strict))
c4e75102 1926 return true;
fbf55580 1927
b20f13e9 1928 /* Handle offset(reg)[index] with offset added outermost */
fbf55580 1929
c4e75102
MT
1930 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1931 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1932 return true;
fbf55580 1933
c4e75102 1934 return false;
b20f13e9 1935}
fbf55580 1936
c4e75102 1937/* Return true if x (a legitimate address expression) has an effect that
fbf55580
MT
1938 depends on the machine mode it is used for. On the VAX, the predecrement
1939 and postincrement address depend thus (the amount of decrement or
1940 increment being the length of the operand) and all indexed address depend
1941 thus (because the index scale factor is the length of the operand). */
1942
b0f6b612 1943static bool
5bfed9a9 1944vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
fbf55580
MT
1945{
1946 rtx xfoo0, xfoo1;
1947
e53b6e56 1948 /* Auto-increment cases are now dealt with generically in recog.cc. */
fbf55580 1949 if (GET_CODE (x) != PLUS)
c4e75102 1950 return false;
fbf55580
MT
1951
1952 xfoo0 = XEXP (x, 0);
1953 xfoo1 = XEXP (x, 1);
1954
c4e75102
MT
1955 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1956 return false;
1957 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1958 return false;
1959 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1960 return false;
1961 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1962 return false;
1963
1964 return true;
1965}
1966
1967static rtx
ef4bddc2 1968fixup_mathdi_operand (rtx x, machine_mode mode)
c4e75102
MT
1969{
1970 if (illegal_addsub_di_memory_operand (x, mode))
1971 {
1972 rtx addr = XEXP (x, 0);
1973 rtx temp = gen_reg_rtx (Pmode);
1974 rtx offset = 0;
1975#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1976 if (GET_CODE (addr) == CONST && flag_pic)
1977 {
1978 offset = XEXP (XEXP (addr, 0), 1);
1979 addr = XEXP (XEXP (addr, 0), 0);
1980 }
1981#endif
1982 emit_move_insn (temp, addr);
1983 if (offset)
1984 temp = gen_rtx_PLUS (Pmode, temp, offset);
1985 x = gen_rtx_MEM (DImode, temp);
1986 }
1987 return x;
1988}
1989
1990void
1991vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1992{
1993 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1994 rtx temp;
1995
1996 rtx (*gen_old_insn)(rtx, rtx, rtx);
1997 rtx (*gen_si_insn)(rtx, rtx, rtx);
1998 rtx (*gen_insn)(rtx, rtx, rtx);
1999
2000 if (code == PLUS)
2001 {
2002 gen_old_insn = gen_adddi3_old;
2003 gen_si_insn = gen_addsi3;
2004 gen_insn = gen_adcdi3;
2005 }
2006 else if (code == MINUS)
2007 {
2008 gen_old_insn = gen_subdi3_old;
2009 gen_si_insn = gen_subsi3;
2010 gen_insn = gen_sbcdi3;
2011 }
2012 else
2013 gcc_unreachable ();
2014
2015 /* If this is addition (thus operands are commutative) and if there is one
2016 addend that duplicates the desination, we want that addend to be the
2017 first addend. */
2018 if (code == PLUS
2019 && rtx_equal_p (operands[0], operands[2])
2020 && !rtx_equal_p (operands[1], operands[2]))
2021 {
2022 temp = operands[2];
2023 operands[2] = operands[1];
2024 operands[1] = temp;
2025 }
2026
2027 if (!TARGET_QMATH)
2028 {
2029 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2030 }
2031 else if (hi_only)
2032 {
2033 if (!rtx_equal_p (operands[0], operands[1])
2034 && (REG_P (operands[0]) && MEM_P (operands[1])))
2035 {
2036 emit_move_insn (operands[0], operands[1]);
2037 operands[1] = operands[0];
2038 }
2039
2040 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2041 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2042 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2043
2044 if (!rtx_equal_p (operands[0], operands[1]))
2045 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2046 operand_subword (operands[1], 0, 0, DImode));
2047
2048 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2049 operand_subword (operands[1], 1, 0, DImode),
2050 operand_subword (operands[2], 1, 0, DImode)));
2051 }
2052 else
2053 {
294ca9ec
MR
2054 /* If we are adding a value to itself, that's really a multiply by 2,
2055 and that's just a left shift by 1. If subtracting, it's just 0. */
c4e75102
MT
2056 if (rtx_equal_p (operands[1], operands[2]))
2057 {
294ca9ec
MR
2058 if (code == PLUS)
2059 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2060 else
2061 emit_move_insn (operands[0], const0_rtx);
c4e75102
MT
2062 return;
2063 }
2064
2065 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2066
2067 /* If an operand is the same as operand[0], use the operand[0] rtx
2068 because fixup will an equivalent rtx but not an equal one. */
2069
2070 if (rtx_equal_p (operands[0], operands[1]))
2071 operands[1] = operands[0];
2072 else
2073 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2074
2075 if (rtx_equal_p (operands[0], operands[2]))
2076 operands[2] = operands[0];
2077 else
2078 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2079
ffb1dcf6
MR
2080 /* If we are adding or subtracting 0, then this is a move. */
2081 if (code == PLUS && operands[1] == const0_rtx)
2082 {
2083 temp = operands[2];
2084 operands[2] = operands[1];
2085 operands[1] = temp;
2086 }
2087 if (operands[2] == const0_rtx)
2088 {
2089 emit_move_insn (operands[0], operands[1]);
2090 return;
2091 }
2092
c4e75102
MT
2093 /* If we are subtracting not from ourselves [d = a - b], and because the
2094 carry ops are two operand only, we would need to do a move prior to
2095 the subtract. And if d == b, we would need a temp otherwise
2096 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
2097 into d = -b, d += a. Since -b can never overflow, even if b == d,
2098 no temp is needed.
2099
2100 If we are doing addition, since the carry ops are two operand, if
2101 we aren't adding to ourselves, move the first addend to the
2102 destination first. */
2103
2104 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2105 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2106 {
2107 if (code == MINUS && CONSTANT_P (operands[1]))
2108 {
c4e75102
MT
2109 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2110 code = PLUS;
2111 gen_insn = gen_adcdi3;
2112 operands[2] = operands[1];
2113 operands[1] = operands[0];
2114 }
2115 else
2116 emit_move_insn (operands[0], operands[1]);
2117 }
2118
2119 /* Subtracting a constant will have been rewritten to an addition of the
2120 negative of that constant before we get here. */
2121 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2122 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2123 }
2124}
2125
3814318d
RH
2126/* Output assembler code for a block containing the constant parts
2127 of a trampoline, leaving space for the variable parts. */
2128
2129/* On the VAX, the trampoline contains an entry mask and two instructions:
2130 .word NN
2131 movl $STATIC,r0 (store the functions static chain)
2132 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2133
2134static void
2135vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2136{
2137 assemble_aligned_integer (2, const0_rtx);
2138 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2139 assemble_aligned_integer (4, const0_rtx);
2140 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2141 assemble_aligned_integer (2, GEN_INT (0x9f17));
2142 assemble_aligned_integer (4, const0_rtx);
2143}
2144
2145/* We copy the register-mask from the function's pure code
2146 to the start of the trampoline. */
2147
2148static void
2149vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2150{
2151 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2152 rtx mem;
2153
2154 emit_block_move (m_tramp, assemble_trampoline_template (),
2155 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2156
2157 mem = adjust_address (m_tramp, HImode, 0);
2158 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2159
2160 mem = adjust_address (m_tramp, SImode, 4);
2161 emit_move_insn (mem, cxt);
2162 mem = adjust_address (m_tramp, SImode, 11);
0a81f074 2163 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
3814318d
RH
2164 emit_insn (gen_sync_istream ());
2165}
2166
079e7538
NF
2167/* Value is the number of bytes of arguments automatically
2168 popped when returning from a subroutine call.
2169 FUNDECL is the declaration node of the function (as a tree),
2170 FUNTYPE is the data type of the function (as a tree),
2171 or for a library call it is an identifier node for the subroutine name.
2172 SIZE is the number of bytes of arguments passed on the stack.
2173
2174 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2175
a20c5714 2176static poly_int64
079e7538 2177vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
a20c5714 2178 tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
079e7538 2179{
a20c5714 2180 return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
079e7538 2181}
8f8a46ba 2182
6783fdb7 2183/* Implement TARGET_FUNCTION_ARG. On the VAX all args are pushed. */
8f8a46ba
NF
2184
2185static rtx
6783fdb7 2186vax_function_arg (cumulative_args_t, const function_arg_info &)
8f8a46ba
NF
2187{
2188 return NULL_RTX;
2189}
2190
6930c98c 2191/* Update the data in CUM to advance over argument ARG. */
8f8a46ba
NF
2192
2193static void
6930c98c
RS
2194vax_function_arg_advance (cumulative_args_t cum_v,
2195 const function_arg_info &arg)
8f8a46ba 2196{
d5cc9181
JR
2197 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2198
6930c98c 2199 *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
8f8a46ba 2200}
2a31c321
RS
2201
2202static HOST_WIDE_INT
2203vax_starting_frame_offset (void)
2204{
2205 /* On ELF targets, reserve the top of the stack for exception handler
2206 stackadj value. */
2207 return TARGET_ELF ? -4 : 0;
2208}
2209