]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/alpha/alpha.c
Merge basic-improvements-branch to trunk
[thirdparty/gcc.git] / gcc / config / alpha / alpha.c
CommitLineData
bf2a98b3 1/* Subroutines used for code generation on the DEC Alpha.
6d2bbb9f 2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
52470889 3 2000, 2001, 2002 Free Software Foundation, Inc.
0e0a0e7a 4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
bf2a98b3 5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
77b72456 20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
bf2a98b3 22
23
bf2a98b3 24#include "config.h"
769ea120 25#include "system.h"
805e22b2 26#include "coretypes.h"
27#include "tm.h"
bf2a98b3 28#include "rtl.h"
d8fc4d0b 29#include "tree.h"
bf2a98b3 30#include "regs.h"
31#include "hard-reg-set.h"
32#include "real.h"
33#include "insn-config.h"
34#include "conditions.h"
bf2a98b3 35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
38#include "recog.h"
bf2a98b3 39#include "expr.h"
d8fc4d0b 40#include "optabs.h"
41#include "reload.h"
bf2a98b3 42#include "obstack.h"
0c0464e6 43#include "except.h"
44#include "function.h"
769ea120 45#include "toplev.h"
11016d99 46#include "ggc.h"
0f37b7a2 47#include "integrate.h"
0d50f0b7 48#include "tm_p.h"
a767736d 49#include "target.h"
50#include "target-def.h"
961d6ddd 51#include "debug.h"
a1f71e15 52#include "langhooks.h"
0c0464e6 53
65abff06 54/* Specify which cpu to schedule for. */
0c0464e6 55
f141a8b4 56enum processor_type alpha_cpu;
ace75b22 57static const char * const alpha_cpu_name[] =
07c1a295 58{
59 "ev4", "ev5", "ev6"
60};
0c5845b3 61
c4622276 62/* Specify how accurate floating-point traps need to be. */
63
64enum alpha_trap_precision alpha_tp;
65
66/* Specify the floating-point rounding mode. */
67
68enum alpha_fp_rounding_mode alpha_fprm;
69
70/* Specify which things cause traps. */
71
72enum alpha_fp_trap_mode alpha_fptm;
73
5f7b9df8 74/* Specify bit size of immediate TLS offsets. */
75
76int alpha_tls_size = 32;
77
c4622276 78/* Strings decoded into the above options. */
0c0464e6 79
ace75b22 80const char *alpha_cpu_string; /* -mcpu= */
27de1488 81const char *alpha_tune_string; /* -mtune= */
ace75b22 82const char *alpha_tp_string; /* -mtrap-precision=[p|s|i] */
83const char *alpha_fprm_string; /* -mfp-rounding-mode=[n|m|c|d] */
84const char *alpha_fptm_string; /* -mfp-trap-mode=[n|u|su|sui] */
85const char *alpha_mlat_string; /* -mmemory-latency= */
5f7b9df8 86const char *alpha_tls_size_string; /* -mtls-size=[16|32|64] */
c4622276 87
bf2a98b3 88/* Save information from a "cmpxx" operation until the branch or scc is
89 emitted. */
90
b18b881f 91struct alpha_compare alpha_compare;
bf2a98b3 92
e3e08e7f 93/* Nonzero if inside of a function, because the Alpha asm can't
449b7f2d 94 handle .files inside of functions. */
95
96static int inside_function = FALSE;
97
07c1a295 98/* The number of cycles of latency we should assume on memory reads. */
99
100int alpha_memory_latency = 3;
101
b9a5aa8e 102/* Whether the function needs the GP. */
103
104static int alpha_function_needs_gp;
105
849674a3 106/* The alias set for prologue/epilogue register save/restore. */
107
108static int alpha_sr_alias_set;
109
a314eb5e 110/* The assembler name of the current function. */
111
112static const char *alpha_fnname;
113
1f0ce6a6 114/* The next explicit relocation sequence number. */
115int alpha_next_sequence_number = 1;
116
117/* The literal and gpdisp sequence numbers for this insn, as printed
118 by %# and %* respectively. */
119int alpha_this_literal_sequence_number;
120int alpha_this_gpdisp_sequence_number;
121
0e0a0e7a 122/* Declarations of static functions. */
805e22b2 123static bool alpha_function_ok_for_sibcall
124 PARAMS ((tree, tree));
5f7b9df8 125static int tls_symbolic_operand_1
126 PARAMS ((rtx, enum machine_mode, int, int));
127static enum tls_model tls_symbolic_operand_type
128 PARAMS ((rtx));
484edb77 129static bool decl_in_text_section
130 PARAMS ((tree));
52470889 131static bool alpha_in_small_data_p
132 PARAMS ((tree));
7811991d 133static void alpha_encode_section_info
134 PARAMS ((tree, int));
7b4a38a6 135static const char *alpha_strip_name_encoding
136 PARAMS ((const char *));
792433e3 137static int some_small_symbolic_operand_1
367e2ab3 138 PARAMS ((rtx *, void *));
792433e3 139static int split_small_symbolic_operand_1
367e2ab3 140 PARAMS ((rtx *, void *));
b9a5aa8e 141static void alpha_set_memflags_1
32bd1bda 142 PARAMS ((rtx, int, int, int));
b9a5aa8e 143static rtx alpha_emit_set_const_1
32bd1bda 144 PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT, int));
b9a5aa8e 145static void alpha_expand_unaligned_load_words
32bd1bda 146 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
b9a5aa8e 147static void alpha_expand_unaligned_store_words
32bd1bda 148 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
f2cc13dc 149static void alpha_init_builtins
150 PARAMS ((void));
151static rtx alpha_expand_builtin
152 PARAMS ((tree, rtx, rtx, enum machine_mode, int));
b9a5aa8e 153static void alpha_sa_mask
32bd1bda 154 PARAMS ((unsigned long *imaskP, unsigned long *fmaskP));
1f0ce6a6 155static int find_lo_sum
156 PARAMS ((rtx *, void *));
b9a5aa8e 157static int alpha_does_function_need_gp
32bd1bda 158 PARAMS ((void));
915c336f 159static int alpha_ra_ever_killed
160 PARAMS ((void));
6fec94c5 161static const char *get_trap_mode_suffix
162 PARAMS ((void));
163static const char *get_round_mode_suffix
164 PARAMS ((void));
5f7b9df8 165static const char *get_some_local_dynamic_name
166 PARAMS ((void));
167static int get_some_local_dynamic_name_1
168 PARAMS ((rtx *, void *));
915c336f 169static rtx set_frame_related_p
170 PARAMS ((void));
171static const char *alpha_lookup_xfloating_lib_func
172 PARAMS ((enum rtx_code));
173static int alpha_compute_xfloating_mode_arg
174 PARAMS ((enum rtx_code, enum alpha_fp_rounding_mode));
175static void alpha_emit_xfloating_libcall
176 PARAMS ((const char *, rtx, rtx[], int, rtx));
177static rtx alpha_emit_xfloating_compare
178 PARAMS ((enum rtx_code, rtx, rtx));
85ae73e8 179static void alpha_output_function_end_prologue
180 PARAMS ((FILE *));
747af5e7 181static int alpha_adjust_cost
182 PARAMS ((rtx, rtx, rtx, int));
183static int alpha_issue_rate
184 PARAMS ((void));
7eb0c947 185static int alpha_use_dfa_pipeline_interface
186 PARAMS ((void));
187static int alpha_multipass_dfa_lookahead
188 PARAMS ((void));
8df4a58b 189
bbfbe351 190#ifdef OBJECT_FORMAT_ELF
191static void alpha_elf_select_rtx_section
192 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
193#endif
194
cf73d31f 195#if TARGET_ABI_OPEN_VMS
196static bool alpha_linkage_symbol_p
197 PARAMS ((const char *symname));
198static void alpha_write_linkage
199 PARAMS ((FILE *, const char *, tree));
200#endif
201
6988553d 202#if TARGET_ABI_OSF
203static void alpha_output_mi_thunk_osf
eb344f43 204 PARAMS ((FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree));
6988553d 205#endif
206
1f3233d1 207static struct machine_function * alpha_init_machine_status
208 PARAMS ((void));
9caef960 209
210static void unicosmk_output_deferred_case_vectors PARAMS ((FILE *));
211static void unicosmk_gen_dsib PARAMS ((unsigned long *imaskP));
212static void unicosmk_output_ssib PARAMS ((FILE *, const char *));
213static int unicosmk_need_dex PARAMS ((rtx));
214
0dbd1c74 215/* Get the number of args of a function in one of two ways. */
9caef960 216#if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
0dbd1c74 217#define NUM_ARGS current_function_args_info.num_args
218#else
219#define NUM_ARGS current_function_args_info
220#endif
d2832bd8 221
d2832bd8 222#define REG_PV 27
223#define REG_RA 26
bf2a98b3 224\f
a767736d 225/* Initialize the GCC target structure. */
1467e953 226#if TARGET_ABI_OPEN_VMS
e3c541f0 227const struct attribute_spec vms_attribute_table[];
2cb4ac60 228static unsigned int vms_section_type_flags PARAMS ((tree, const char *, int));
29a0ebee 229static void vms_asm_named_section PARAMS ((const char *, unsigned int));
01d15dc5 230static void vms_asm_out_constructor PARAMS ((rtx, int));
231static void vms_asm_out_destructor PARAMS ((rtx, int));
e3c541f0 232# undef TARGET_ATTRIBUTE_TABLE
233# define TARGET_ATTRIBUTE_TABLE vms_attribute_table
2cb4ac60 234# undef TARGET_SECTION_TYPE_FLAGS
235# define TARGET_SECTION_TYPE_FLAGS vms_section_type_flags
2d280039 236#endif
a767736d 237
52470889 238#undef TARGET_IN_SMALL_DATA_P
239#define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p
7811991d 240#undef TARGET_ENCODE_SECTION_INFO
241#define TARGET_ENCODE_SECTION_INFO alpha_encode_section_info
7b4a38a6 242#undef TARGET_STRIP_NAME_ENCODING
243#define TARGET_STRIP_NAME_ENCODING alpha_strip_name_encoding
52470889 244
9caef960 245#if TARGET_ABI_UNICOSMK
246static void unicosmk_asm_named_section PARAMS ((const char *, unsigned int));
247static void unicosmk_insert_attributes PARAMS ((tree, tree *));
248static unsigned int unicosmk_section_type_flags PARAMS ((tree, const char *,
249 int));
52470889 250static void unicosmk_unique_section PARAMS ((tree, int));
9caef960 251# undef TARGET_INSERT_ATTRIBUTES
252# define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
253# undef TARGET_SECTION_TYPE_FLAGS
254# define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
52470889 255# undef TARGET_ASM_UNIQUE_SECTION
256# define TARGET_ASM_UNIQUE_SECTION unicosmk_unique_section
67c1e638 257# undef TARGET_ASM_GLOBALIZE_LABEL
258# define TARGET_ASM_GLOBALIZE_LABEL hook_FILEptr_constcharptr_void
9caef960 259#endif
260
58356836 261#undef TARGET_ASM_ALIGNED_HI_OP
262#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
263#undef TARGET_ASM_ALIGNED_DI_OP
264#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
265
266/* Default unaligned ops are provided for ELF systems. To get unaligned
267 data for non-ELF systems, we have to turn off auto alignment. */
268#ifndef OBJECT_FORMAT_ELF
269#undef TARGET_ASM_UNALIGNED_HI_OP
270#define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
271#undef TARGET_ASM_UNALIGNED_SI_OP
272#define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
273#undef TARGET_ASM_UNALIGNED_DI_OP
274#define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
275#endif
276
bbfbe351 277#ifdef OBJECT_FORMAT_ELF
278#undef TARGET_ASM_SELECT_RTX_SECTION
279#define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section
280#endif
281
85ae73e8 282#undef TARGET_ASM_FUNCTION_END_PROLOGUE
283#define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
284
747af5e7 285#undef TARGET_SCHED_ADJUST_COST
286#define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
287#undef TARGET_SCHED_ISSUE_RATE
288#define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
7eb0c947 289#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
290#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE \
291 alpha_use_dfa_pipeline_interface
292#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
293#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
294 alpha_multipass_dfa_lookahead
747af5e7 295
5f7b9df8 296#undef TARGET_HAVE_TLS
297#define TARGET_HAVE_TLS HAVE_AS_TLS
298
f2cc13dc 299#undef TARGET_INIT_BUILTINS
300#define TARGET_INIT_BUILTINS alpha_init_builtins
301#undef TARGET_EXPAND_BUILTIN
302#define TARGET_EXPAND_BUILTIN alpha_expand_builtin
303
805e22b2 304#undef TARGET_FUNCTION_OK_FOR_SIBCALL
305#define TARGET_FUNCTION_OK_FOR_SIBCALL alpha_function_ok_for_sibcall
306
6988553d 307#if TARGET_ABI_OSF
308#undef TARGET_ASM_OUTPUT_MI_THUNK
309#define TARGET_ASM_OUTPUT_MI_THUNK alpha_output_mi_thunk_osf
eb344f43 310#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
a19ec9da 311#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
6988553d 312#endif
313
57e4bbfb 314struct gcc_target targetm = TARGET_INITIALIZER;
a767736d 315\f
65abff06 316/* Parse target option strings. */
c4622276 317
318void
319override_options ()
320{
27de1488 321 int i;
e99c3a1d 322 static const struct cpu_table {
323 const char *const name;
324 const enum processor_type processor;
325 const int flags;
27de1488 326 } cpu_table[] = {
327#define EV5_MASK (MASK_CPU_EV5)
328#define EV6_MASK (MASK_CPU_EV6|MASK_BWX|MASK_MAX|MASK_FIX)
329 { "ev4", PROCESSOR_EV4, 0 },
330 { "ev45", PROCESSOR_EV4, 0 },
331 { "21064", PROCESSOR_EV4, 0 },
332 { "ev5", PROCESSOR_EV5, EV5_MASK },
333 { "21164", PROCESSOR_EV5, EV5_MASK },
334 { "ev56", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
335 { "21164a", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
336 { "pca56", PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
337 { "21164PC",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
338 { "21164pc",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
339 { "ev6", PROCESSOR_EV6, EV6_MASK },
340 { "21264", PROCESSOR_EV6, EV6_MASK },
341 { "ev67", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
342 { "21264a", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
343 { 0, 0, 0 }
344 };
345
9caef960 346 /* Unicos/Mk doesn't have shared libraries. */
347 if (TARGET_ABI_UNICOSMK && flag_pic)
348 {
349 warning ("-f%s ignored for Unicos/Mk (not supported)",
350 (flag_pic > 1) ? "PIC" : "pic");
351 flag_pic = 0;
352 }
353
354 /* On Unicos/Mk, the native compiler consistenly generates /d suffices for
355 floating-point instructions. Make that the default for this target. */
356 if (TARGET_ABI_UNICOSMK)
357 alpha_fprm = ALPHA_FPRM_DYN;
358 else
359 alpha_fprm = ALPHA_FPRM_NORM;
360
c4622276 361 alpha_tp = ALPHA_TP_PROG;
c4622276 362 alpha_fptm = ALPHA_FPTM_N;
363
9caef960 364 /* We cannot use su and sui qualifiers for conversion instructions on
365 Unicos/Mk. I'm not sure if this is due to assembler or hardware
366 limitations. Right now, we issue a warning if -mieee is specified
367 and then ignore it; eventually, we should either get it right or
368 disable the option altogether. */
369
c4622276 370 if (TARGET_IEEE)
371 {
9caef960 372 if (TARGET_ABI_UNICOSMK)
373 warning ("-mieee not supported on Unicos/Mk");
374 else
375 {
376 alpha_tp = ALPHA_TP_INSN;
377 alpha_fptm = ALPHA_FPTM_SU;
378 }
c4622276 379 }
380
381 if (TARGET_IEEE_WITH_INEXACT)
382 {
9caef960 383 if (TARGET_ABI_UNICOSMK)
384 warning ("-mieee-with-inexact not supported on Unicos/Mk");
385 else
386 {
387 alpha_tp = ALPHA_TP_INSN;
388 alpha_fptm = ALPHA_FPTM_SUI;
389 }
c4622276 390 }
391
392 if (alpha_tp_string)
264f7d8c 393 {
394 if (! strcmp (alpha_tp_string, "p"))
c4622276 395 alpha_tp = ALPHA_TP_PROG;
264f7d8c 396 else if (! strcmp (alpha_tp_string, "f"))
c4622276 397 alpha_tp = ALPHA_TP_FUNC;
264f7d8c 398 else if (! strcmp (alpha_tp_string, "i"))
c4622276 399 alpha_tp = ALPHA_TP_INSN;
264f7d8c 400 else
401 error ("bad value `%s' for -mtrap-precision switch", alpha_tp_string);
402 }
c4622276 403
404 if (alpha_fprm_string)
264f7d8c 405 {
406 if (! strcmp (alpha_fprm_string, "n"))
c4622276 407 alpha_fprm = ALPHA_FPRM_NORM;
264f7d8c 408 else if (! strcmp (alpha_fprm_string, "m"))
c4622276 409 alpha_fprm = ALPHA_FPRM_MINF;
264f7d8c 410 else if (! strcmp (alpha_fprm_string, "c"))
c4622276 411 alpha_fprm = ALPHA_FPRM_CHOP;
264f7d8c 412 else if (! strcmp (alpha_fprm_string,"d"))
c4622276 413 alpha_fprm = ALPHA_FPRM_DYN;
264f7d8c 414 else
415 error ("bad value `%s' for -mfp-rounding-mode switch",
c4622276 416 alpha_fprm_string);
264f7d8c 417 }
c4622276 418
419 if (alpha_fptm_string)
264f7d8c 420 {
421 if (strcmp (alpha_fptm_string, "n") == 0)
422 alpha_fptm = ALPHA_FPTM_N;
423 else if (strcmp (alpha_fptm_string, "u") == 0)
424 alpha_fptm = ALPHA_FPTM_U;
425 else if (strcmp (alpha_fptm_string, "su") == 0)
426 alpha_fptm = ALPHA_FPTM_SU;
427 else if (strcmp (alpha_fptm_string, "sui") == 0)
428 alpha_fptm = ALPHA_FPTM_SUI;
429 else
430 error ("bad value `%s' for -mfp-trap-mode switch", alpha_fptm_string);
431 }
c4622276 432
5f7b9df8 433 if (alpha_tls_size_string)
434 {
435 if (strcmp (alpha_tls_size_string, "16") == 0)
436 alpha_tls_size = 16;
437 else if (strcmp (alpha_tls_size_string, "32") == 0)
438 alpha_tls_size = 32;
439 else if (strcmp (alpha_tls_size_string, "64") == 0)
440 alpha_tls_size = 64;
441 else
442 error ("bad value `%s' for -mtls-size switch", alpha_tls_size_string);
443 }
444
cbd8ec27 445 alpha_cpu
446 = TARGET_CPU_DEFAULT & MASK_CPU_EV6 ? PROCESSOR_EV6
447 : (TARGET_CPU_DEFAULT & MASK_CPU_EV5 ? PROCESSOR_EV5 : PROCESSOR_EV4);
448
449 if (alpha_cpu_string)
450 {
27de1488 451 for (i = 0; cpu_table [i].name; i++)
452 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
453 {
454 alpha_cpu = cpu_table [i].processor;
455 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX
456 | MASK_CPU_EV5 | MASK_CPU_EV6);
457 target_flags |= cpu_table [i].flags;
458 break;
459 }
460 if (! cpu_table [i].name)
cbd8ec27 461 error ("bad value `%s' for -mcpu switch", alpha_cpu_string);
462 }
463
27de1488 464 if (alpha_tune_string)
465 {
466 for (i = 0; cpu_table [i].name; i++)
467 if (! strcmp (alpha_tune_string, cpu_table [i].name))
468 {
469 alpha_cpu = cpu_table [i].processor;
470 break;
471 }
472 if (! cpu_table [i].name)
473 error ("bad value `%s' for -mcpu switch", alpha_tune_string);
474 }
475
65abff06 476 /* Do some sanity checks on the above options. */
c4622276 477
9caef960 478 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
479 {
480 warning ("trap mode not supported on Unicos/Mk");
481 alpha_fptm = ALPHA_FPTM_N;
482 }
483
264f7d8c 484 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
27de1488 485 && alpha_tp != ALPHA_TP_INSN && ! TARGET_CPU_EV6)
c4622276 486 {
264f7d8c 487 warning ("fp software completion requires -mtrap-precision=i");
c4622276 488 alpha_tp = ALPHA_TP_INSN;
489 }
8df4a58b 490
27de1488 491 if (TARGET_CPU_EV6)
bc16f0c1 492 {
493 /* Except for EV6 pass 1 (not released), we always have precise
494 arithmetic traps. Which means we can do software completion
495 without minding trap shadows. */
496 alpha_tp = ALPHA_TP_PROG;
497 }
498
8df4a58b 499 if (TARGET_FLOAT_VAX)
500 {
501 if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
502 {
503 warning ("rounding mode not supported for VAX floats");
504 alpha_fprm = ALPHA_FPRM_NORM;
505 }
506 if (alpha_fptm == ALPHA_FPTM_SUI)
507 {
508 warning ("trap mode not supported for VAX floats");
509 alpha_fptm = ALPHA_FPTM_SU;
510 }
511 }
07c1a295 512
513 {
514 char *end;
515 int lat;
516
517 if (!alpha_mlat_string)
518 alpha_mlat_string = "L1";
519
14184418 520 if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
07c1a295 521 && (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
522 ;
523 else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
14184418 524 && ISDIGIT ((unsigned char)alpha_mlat_string[1])
07c1a295 525 && alpha_mlat_string[2] == '\0')
526 {
527 static int const cache_latency[][4] =
528 {
529 { 3, 30, -1 }, /* ev4 -- Bcache is a guess */
530 { 2, 12, 38 }, /* ev5 -- Bcache from PC164 LMbench numbers */
65abff06 531 { 3, 12, 30 }, /* ev6 -- Bcache from DS20 LMbench. */
07c1a295 532 };
533
534 lat = alpha_mlat_string[1] - '0';
88b64d67 535 if (lat <= 0 || lat > 3 || cache_latency[alpha_cpu][lat-1] == -1)
07c1a295 536 {
537 warning ("L%d cache latency unknown for %s",
538 lat, alpha_cpu_name[alpha_cpu]);
539 lat = 3;
540 }
541 else
542 lat = cache_latency[alpha_cpu][lat-1];
543 }
544 else if (! strcmp (alpha_mlat_string, "main"))
545 {
546 /* Most current memories have about 370ns latency. This is
547 a reasonable guess for a fast cpu. */
548 lat = 150;
549 }
550 else
551 {
552 warning ("bad value `%s' for -mmemory-latency", alpha_mlat_string);
553 lat = 3;
554 }
555
556 alpha_memory_latency = lat;
557 }
a9fa9190 558
559 /* Default the definition of "small data" to 8 bytes. */
560 if (!g_switch_set)
561 g_switch_value = 8;
849674a3 562
5dcb037d 563 /* Infer TARGET_SMALL_DATA from -fpic/-fPIC. */
564 if (flag_pic == 1)
565 target_flags |= MASK_SMALL_DATA;
566 else if (flag_pic == 2)
567 target_flags &= ~MASK_SMALL_DATA;
568
0ea5169b 569 /* Align labels and loops for optimal branching. */
570 /* ??? Kludge these by not doing anything if we don't optimize and also if
65abff06 571 we are writing ECOFF symbols to work around a bug in DEC's assembler. */
0ea5169b 572 if (optimize > 0 && write_symbols != SDB_DEBUG)
573 {
574 if (align_loops <= 0)
575 align_loops = 16;
576 if (align_jumps <= 0)
577 align_jumps = 16;
578 }
579 if (align_functions <= 0)
580 align_functions = 16;
581
849674a3 582 /* Acquire a unique set number for our register saves and restores. */
583 alpha_sr_alias_set = new_alias_set ();
9caef960 584
585 /* Register variables and functions with the garbage collector. */
586
9caef960 587 /* Set up function hooks. */
588 init_machine_status = alpha_init_machine_status;
1268285a 589
590 /* Tell the compiler when we're using VAX floating point. */
591 if (TARGET_FLOAT_VAX)
592 {
593 real_format_for_mode[SFmode - QFmode] = &vax_f_format;
594 real_format_for_mode[DFmode - QFmode] = &vax_g_format;
595 real_format_for_mode[TFmode - QFmode] = NULL;
596 }
c4622276 597}
598\f
bf2a98b3 599/* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
600
601int
602zap_mask (value)
603 HOST_WIDE_INT value;
604{
605 int i;
606
607 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
608 i++, value >>= 8)
609 if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
610 return 0;
611
612 return 1;
613}
614
615/* Returns 1 if OP is either the constant zero or a register. If a
616 register, it must be in the proper mode unless MODE is VOIDmode. */
617
618int
619reg_or_0_operand (op, mode)
620 register rtx op;
621 enum machine_mode mode;
622{
5ce11454 623 return op == CONST0_RTX (mode) || register_operand (op, mode);
bf2a98b3 624}
625
f203253e 626/* Return 1 if OP is a constant in the range of 0-63 (for a shift) or
627 any register. */
628
629int
630reg_or_6bit_operand (op, mode)
631 register rtx op;
632 enum machine_mode mode;
633{
634 return ((GET_CODE (op) == CONST_INT
635 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
636 || register_operand (op, mode));
637}
638
639
bf2a98b3 640/* Return 1 if OP is an 8-bit constant or any register. */
641
642int
643reg_or_8bit_operand (op, mode)
644 register rtx op;
645 enum machine_mode mode;
646{
647 return ((GET_CODE (op) == CONST_INT
648 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
649 || register_operand (op, mode));
650}
651
f2cc13dc 652/* Return 1 if OP is a constant or any register. */
653
654int
655reg_or_const_int_operand (op, mode)
656 register rtx op;
657 enum machine_mode mode;
658{
659 return GET_CODE (op) == CONST_INT || register_operand (op, mode);
660}
661
14a2e7f0 662/* Return 1 if OP is an 8-bit constant. */
663
664int
665cint8_operand (op, mode)
666 register rtx op;
769ea120 667 enum machine_mode mode ATTRIBUTE_UNUSED;
14a2e7f0 668{
0e2eb331 669 return ((GET_CODE (op) == CONST_INT
a249931c 670 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
14a2e7f0 671}
672
bf2a98b3 673/* Return 1 if the operand is a valid second operand to an add insn. */
674
675int
676add_operand (op, mode)
677 register rtx op;
678 enum machine_mode mode;
679{
680 if (GET_CODE (op) == CONST_INT)
c7412efd 681 /* Constraints I, J, O and P are covered by K. */
f7865966 682 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
c7412efd 683 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
bf2a98b3 684
685 return register_operand (op, mode);
686}
687
688/* Return 1 if the operand is a valid second operand to a sign-extending
689 add insn. */
690
691int
692sext_add_operand (op, mode)
693 register rtx op;
694 enum machine_mode mode;
695{
696 if (GET_CODE (op) == CONST_INT)
c7412efd 697 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
698 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
bf2a98b3 699
7014838c 700 return reg_not_elim_operand (op, mode);
bf2a98b3 701}
702
703/* Return 1 if OP is the constant 4 or 8. */
704
705int
706const48_operand (op, mode)
707 register rtx op;
769ea120 708 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 709{
710 return (GET_CODE (op) == CONST_INT
711 && (INTVAL (op) == 4 || INTVAL (op) == 8));
712}
713
714/* Return 1 if OP is a valid first operand to an AND insn. */
715
716int
717and_operand (op, mode)
718 register rtx op;
719 enum machine_mode mode;
720{
721 if (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
722 return (zap_mask (CONST_DOUBLE_LOW (op))
723 && zap_mask (CONST_DOUBLE_HIGH (op)));
724
725 if (GET_CODE (op) == CONST_INT)
726 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
727 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
728 || zap_mask (INTVAL (op)));
729
730 return register_operand (op, mode);
731}
732
7eb83e27 733/* Return 1 if OP is a valid first operand to an IOR or XOR insn. */
fb18a037 734
735int
7eb83e27 736or_operand (op, mode)
fb18a037 737 register rtx op;
738 enum machine_mode mode;
739{
740 if (GET_CODE (op) == CONST_INT)
741 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
742 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
743
744 return register_operand (op, mode);
745}
746
bf2a98b3 747/* Return 1 if OP is a constant that is the width, in bits, of an integral
748 mode smaller than DImode. */
749
750int
751mode_width_operand (op, mode)
752 register rtx op;
769ea120 753 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 754{
755 return (GET_CODE (op) == CONST_INT
34377880 756 && (INTVAL (op) == 8 || INTVAL (op) == 16
757 || INTVAL (op) == 32 || INTVAL (op) == 64));
bf2a98b3 758}
759
760/* Return 1 if OP is a constant that is the width of an integral machine mode
761 smaller than an integer. */
762
763int
764mode_mask_operand (op, mode)
765 register rtx op;
769ea120 766 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 767{
ae4cd3a5 768 if (GET_CODE (op) == CONST_INT)
769 {
770 HOST_WIDE_INT value = INTVAL (op);
bf2a98b3 771
ae4cd3a5 772 if (value == 0xff)
773 return 1;
774 if (value == 0xffff)
775 return 1;
776 if (value == 0xffffffff)
777 return 1;
778 if (value == -1)
779 return 1;
780 }
781 else if (HOST_BITS_PER_WIDE_INT == 32 && GET_CODE (op) == CONST_DOUBLE)
782 {
783 if (CONST_DOUBLE_LOW (op) == 0xffffffff && CONST_DOUBLE_HIGH (op) == 0)
784 return 1;
785 }
786
787 return 0;
bf2a98b3 788}
789
790/* Return 1 if OP is a multiple of 8 less than 64. */
791
792int
793mul8_operand (op, mode)
794 register rtx op;
769ea120 795 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 796{
797 return (GET_CODE (op) == CONST_INT
798 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64
799 && (INTVAL (op) & 7) == 0);
800}
801
5ce11454 802/* Return 1 if OP is the zero constant for MODE. */
bf2a98b3 803
804int
5ce11454 805const0_operand (op, mode)
bf2a98b3 806 register rtx op;
807 enum machine_mode mode;
808{
5ce11454 809 return op == CONST0_RTX (mode);
bf2a98b3 810}
811
f62714a1 812/* Return 1 if OP is a hard floating-point register. */
813
814int
815hard_fp_register_operand (op, mode)
816 register rtx op;
817 enum machine_mode mode;
818{
cf3c6764 819 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
820 return 0;
821
822 if (GET_CODE (op) == SUBREG)
823 op = SUBREG_REG (op);
824 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == FLOAT_REGS;
825}
826
827/* Return 1 if OP is a hard general register. */
828
829int
830hard_int_register_operand (op, mode)
831 register rtx op;
832 enum machine_mode mode;
833{
834 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
835 return 0;
836
837 if (GET_CODE (op) == SUBREG)
838 op = SUBREG_REG (op);
839 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS;
f62714a1 840}
841
bf2a98b3 842/* Return 1 if OP is a register or a constant integer. */
843
844
845int
846reg_or_cint_operand (op, mode)
847 register rtx op;
848 enum machine_mode mode;
849{
0e2eb331 850 return (GET_CODE (op) == CONST_INT
0e2eb331 851 || register_operand (op, mode));
bf2a98b3 852}
853
8a5ff806 854/* Return 1 if OP is something that can be reloaded into a register;
855 if it is a MEM, it need not be valid. */
856
857int
858some_operand (op, mode)
859 register rtx op;
860 enum machine_mode mode;
861{
862 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
863 return 0;
864
865 switch (GET_CODE (op))
866 {
f2cc13dc 867 case REG:
868 case MEM:
869 case CONST_INT:
870 case CONST_DOUBLE:
871 case CONST_VECTOR:
872 case LABEL_REF:
873 case SYMBOL_REF:
874 case CONST:
875 case HIGH:
8a5ff806 876 return 1;
877
878 case SUBREG:
879 return some_operand (SUBREG_REG (op), VOIDmode);
99c14947 880
881 default:
882 break;
8a5ff806 883 }
884
885 return 0;
886}
887
bb4e263c 888/* Likewise, but don't accept constants. */
889
890int
891some_ni_operand (op, mode)
892 register rtx op;
893 enum machine_mode mode;
894{
895 if (GET_MODE (op) != mode && mode != VOIDmode)
896 return 0;
897
898 if (GET_CODE (op) == SUBREG)
899 op = SUBREG_REG (op);
900
901 return (GET_CODE (op) == REG || GET_CODE (op) == MEM);
902}
903
bf2a98b3 904/* Return 1 if OP is a valid operand for the source of a move insn. */
905
906int
907input_operand (op, mode)
908 register rtx op;
909 enum machine_mode mode;
910{
911 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
912 return 0;
913
914 if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_MODE (op) != mode)
915 return 0;
916
917 switch (GET_CODE (op))
918 {
919 case LABEL_REF:
920 case SYMBOL_REF:
921 case CONST:
8afb6db4 922 if (TARGET_EXPLICIT_RELOCS)
f5a60074 923 {
924 /* We don't split symbolic operands into something unintelligable
925 until after reload, but we do not wish non-small, non-global
926 symbolic operands to be reconstructed from their high/lo_sum
927 form. */
928 return (small_symbolic_operand (op, mode)
5f7b9df8 929 || global_symbolic_operand (op, mode)
930 || gotdtp_symbolic_operand (op, mode)
931 || gottp_symbolic_operand (op, mode));
f5a60074 932 }
8afb6db4 933
0e2eb331 934 /* This handles both the Windows/NT and OSF cases. */
2fb6c134 935 return mode == ptr_mode || mode == DImode;
bf2a98b3 936
f5a60074 937 case HIGH:
938 return (TARGET_EXPLICIT_RELOCS
939 && local_symbolic_operand (XEXP (op, 0), mode));
940
bf2a98b3 941 case REG:
155b05dc 942 case ADDRESSOF:
bf2a98b3 943 return 1;
944
945 case SUBREG:
946 if (register_operand (op, mode))
947 return 1;
65abff06 948 /* ... fall through ... */
bf2a98b3 949 case MEM:
0dbd1c74 950 return ((TARGET_BWX || (mode != HImode && mode != QImode))
7b6bbf24 951 && general_operand (op, mode));
bf2a98b3 952
953 case CONST_DOUBLE:
f2cc13dc 954 case CONST_VECTOR:
955 return op == CONST0_RTX (mode);
bf2a98b3 956
957 case CONST_INT:
958 return mode == QImode || mode == HImode || add_operand (op, mode);
99c14947 959
dd5ff96d 960 case CONSTANT_P_RTX:
961 return 1;
962
99c14947 963 default:
964 break;
bf2a98b3 965 }
966
967 return 0;
968}
969
16b3392b 970/* Return 1 if OP is a SYMBOL_REF for a function known to be in this
484edb77 971 file, and in the same section as the current function. */
bf2a98b3 972
973int
16b3392b 974current_file_function_operand (op, mode)
bf2a98b3 975 rtx op;
769ea120 976 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 977{
8dca7aa1 978 if (GET_CODE (op) != SYMBOL_REF)
979 return 0;
980
484edb77 981 /* Easy test for recursion. */
982 if (op == XEXP (DECL_RTL (current_function_decl), 0))
983 return 1;
8dca7aa1 984
484edb77 985 /* Otherwise, we need the DECL for the SYMBOL_REF, which we can't get.
986 So SYMBOL_REF_FLAG has been declared to imply that the function is
987 in the default text section. So we must also check that the current
988 function is also in the text section. */
989 if (SYMBOL_REF_FLAG (op) && decl_in_text_section (current_function_decl))
990 return 1;
991
992 return 0;
8dca7aa1 993}
994
995/* Return 1 if OP is a SYMBOL_REF for which we can make a call via bsr. */
996
997int
998direct_call_operand (op, mode)
999 rtx op;
1000 enum machine_mode mode;
1001{
1002 /* Must be defined in this file. */
1003 if (! current_file_function_operand (op, mode))
1004 return 0;
1005
1006 /* If profiling is implemented via linker tricks, we can't jump
1007 to the nogp alternate entry point. */
1008 /* ??? TARGET_PROFILING_NEEDS_GP isn't really the right test,
1009 but is approximately correct for the OSF ABIs. Don't know
1010 what to do for VMS, NT, or UMK. */
1011 if (! TARGET_PROFILING_NEEDS_GP
7811c823 1012 && ! current_function_profile)
8dca7aa1 1013 return 0;
eea11b32 1014
1015 return 1;
bf2a98b3 1016}
1017
8afb6db4 1018/* Return true if OP is a LABEL_REF, or SYMBOL_REF or CONST referencing
487724a7 1019 a (non-tls) variable known to be defined in this file. */
8afb6db4 1020
1f0ce6a6 1021int
1022local_symbolic_operand (op, mode)
1023 rtx op;
9caef960 1024 enum machine_mode mode;
1f0ce6a6 1025{
5f7b9df8 1026 const char *str;
1027
9caef960 1028 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1029 return 0;
1030
1f0ce6a6 1031 if (GET_CODE (op) == LABEL_REF)
1032 return 1;
1033
1034 if (GET_CODE (op) == CONST
1035 && GET_CODE (XEXP (op, 0)) == PLUS
1036 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1037 op = XEXP (XEXP (op, 0), 0);
1038
1039 if (GET_CODE (op) != SYMBOL_REF)
1040 return 0;
1041
5f7b9df8 1042 /* Easy pickings. */
1043 if (CONSTANT_POOL_ADDRESS_P (op) || STRING_POOL_ADDRESS_P (op))
1044 return 1;
1045
1046 /* ??? SYMBOL_REF_FLAG is set for local function symbols, but we
1047 run into problems with the rtl inliner in that the symbol was
1048 once external, but is local after inlining, which results in
1049 unrecognizable insns. */
1050
1051 str = XSTR (op, 0);
1052
487724a7 1053 /* If @[LS], then alpha_encode_section_info sez it's local. */
5f7b9df8 1054 if (str[0] == '@' && (str[1] == 'L' || str[1] == 'S'))
1055 return 1;
1056
1057 /* If *$, then ASM_GENERATE_INTERNAL_LABEL sez it's local. */
1058 if (str[0] == '*' && str[1] == '$')
1059 return 1;
1060
1061 return 0;
1f0ce6a6 1062}
1063
5dcb037d 1064/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1065 known to be defined in this file in the small data area. */
1066
1067int
1068small_symbolic_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode ATTRIBUTE_UNUSED;
1071{
1072 const char *str;
1073
1074 if (! TARGET_SMALL_DATA)
1075 return 0;
1076
8afb6db4 1077 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1078 return 0;
1079
5dcb037d 1080 if (GET_CODE (op) == CONST
1081 && GET_CODE (XEXP (op, 0)) == PLUS
1082 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1083 op = XEXP (XEXP (op, 0), 0);
1084
1085 if (GET_CODE (op) != SYMBOL_REF)
1086 return 0;
1087
1088 if (CONSTANT_POOL_ADDRESS_P (op))
8afb6db4 1089 return GET_MODE_SIZE (get_pool_mode (op)) <= (unsigned) g_switch_value;
5dcb037d 1090 else
1091 {
1092 str = XSTR (op, 0);
5f7b9df8 1093 return str[0] == '@' && str[1] == 'S';
5dcb037d 1094 }
1095}
1096
8afb6db4 1097/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1098 not known (or known not) to be defined in this file. */
1099
1100int
1101global_symbolic_operand (op, mode)
1102 rtx op;
1103 enum machine_mode mode;
1104{
5f7b9df8 1105 const char *str;
1106
8afb6db4 1107 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1108 return 0;
1109
1110 if (GET_CODE (op) == CONST
1111 && GET_CODE (XEXP (op, 0)) == PLUS
1112 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1113 op = XEXP (XEXP (op, 0), 0);
1114
1115 if (GET_CODE (op) != SYMBOL_REF)
1116 return 0;
1117
5f7b9df8 1118 if (local_symbolic_operand (op, mode))
1119 return 0;
1120
1121 /* Also verify that it's not a TLS symbol. */
1122 str = XSTR (op, 0);
1123 return str[0] != '%' && str[0] != '@';
8afb6db4 1124}
1125
4901f901 1126/* Return 1 if OP is a valid operand for the MEM of a CALL insn. */
1127
1128int
1129call_operand (op, mode)
1130 rtx op;
1131 enum machine_mode mode;
1132{
1133 if (mode != Pmode)
1134 return 0;
1135
1467e953 1136 if (GET_CODE (op) == REG)
1137 {
1138 if (TARGET_ABI_OSF)
9f0ce87d 1139 {
1140 /* Disallow virtual registers to cope with pathalogical test cases
1141 such as compile/930117-1.c in which the virtual reg decomposes
1142 to the frame pointer. Which is a hard reg that is not $27. */
1143 return (REGNO (op) == 27 || REGNO (op) > LAST_VIRTUAL_REGISTER);
1144 }
1467e953 1145 else
1146 return 1;
1147 }
8afb6db4 1148 if (TARGET_ABI_UNICOSMK)
1149 return 0;
1150 if (GET_CODE (op) == SYMBOL_REF)
1151 return 1;
1467e953 1152
1153 return 0;
4901f901 1154}
1155
9caef960 1156/* Returns 1 if OP is a symbolic operand, i.e. a symbol_ref or a label_ref,
1157 possibly with an offset. */
1158
1159int
1160symbolic_operand (op, mode)
1161 register rtx op;
1162 enum machine_mode mode;
1163{
1164 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1165 return 0;
1166 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1167 return 1;
1168 if (GET_CODE (op) == CONST
1169 && GET_CODE (XEXP (op,0)) == PLUS
1170 && GET_CODE (XEXP (XEXP (op,0), 0)) == SYMBOL_REF
1171 && GET_CODE (XEXP (XEXP (op,0), 1)) == CONST_INT)
1172 return 1;
1173 return 0;
1174}
1175
5f7b9df8 1176/* Return true if OP is valid for a particular TLS relocation. */
1177
1178static int
1179tls_symbolic_operand_1 (op, mode, size, unspec)
1180 rtx op;
1181 enum machine_mode mode;
1182 int size, unspec;
1183{
1184 const char *str;
1185 int letter;
1186
1187 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1188 return 0;
1189
1190 if (GET_CODE (op) != CONST)
1191 return 0;
1192 op = XEXP (op, 0);
1193
1194 if (GET_CODE (op) != UNSPEC || XINT (op, 1) != unspec)
1195 return 0;
1196 op = XVECEXP (op, 0, 0);
1197
1198 if (GET_CODE (op) != SYMBOL_REF)
1199 return 0;
1200 str = XSTR (op, 0);
1201
1202 if (str[0] == '%')
1203 {
1204 if (size != 64)
1205 return 0;
1206 }
1207 else if (str[0] == '@')
1208 {
1209 if (alpha_tls_size > size)
1210 return 0;
1211 }
1212 else
1213 return 0;
1214
1215 letter = (unspec == UNSPEC_DTPREL ? 'D' : 'T');
1216
1217 return str[1] == letter;
1218}
1219
1220/* Return true if OP is valid for 16-bit DTP relative relocations. */
1221
1222int
1223dtp16_symbolic_operand (op, mode)
1224 rtx op;
1225 enum machine_mode mode;
1226{
1227 return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_DTPREL);
1228}
1229
1230/* Return true if OP is valid for 32-bit DTP relative relocations. */
1231
1232int
1233dtp32_symbolic_operand (op, mode)
1234 rtx op;
1235 enum machine_mode mode;
1236{
1237 return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_DTPREL);
1238}
1239
1240/* Return true if OP is valid for 64-bit DTP relative relocations. */
1241
1242int
1243gotdtp_symbolic_operand (op, mode)
1244 rtx op;
1245 enum machine_mode mode;
1246{
1247 return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_DTPREL);
1248}
1249
1250/* Return true if OP is valid for 16-bit TP relative relocations. */
1251
1252int
1253tp16_symbolic_operand (op, mode)
1254 rtx op;
1255 enum machine_mode mode;
1256{
1257 return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_TPREL);
1258}
1259
1260/* Return true if OP is valid for 32-bit TP relative relocations. */
1261
1262int
1263tp32_symbolic_operand (op, mode)
1264 rtx op;
1265 enum machine_mode mode;
1266{
1267 return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_TPREL);
1268}
1269
1270/* Return true if OP is valid for 64-bit TP relative relocations. */
1271
1272int
1273gottp_symbolic_operand (op, mode)
1274 rtx op;
1275 enum machine_mode mode;
1276{
1277 return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_TPREL);
1278}
1279
bf2a98b3 1280/* Return 1 if OP is a valid Alpha comparison operator. Here we know which
1281 comparisons are valid in which insn. */
1282
1283int
1284alpha_comparison_operator (op, mode)
1285 register rtx op;
1286 enum machine_mode mode;
1287{
1288 enum rtx_code code = GET_CODE (op);
1289
a4110d9a 1290 if (mode != GET_MODE (op) && mode != VOIDmode)
bf2a98b3 1291 return 0;
1292
1293 return (code == EQ || code == LE || code == LT
41ee2fd4 1294 || code == LEU || code == LTU);
bf2a98b3 1295}
1296
bbf31a61 1297/* Return 1 if OP is a valid Alpha comparison operator against zero.
1298 Here we know which comparisons are valid in which insn. */
1299
1300int
1301alpha_zero_comparison_operator (op, mode)
1302 register rtx op;
1303 enum machine_mode mode;
1304{
1305 enum rtx_code code = GET_CODE (op);
1306
1307 if (mode != GET_MODE (op) && mode != VOIDmode)
1308 return 0;
1309
1310 return (code == EQ || code == NE || code == LE || code == LT
1311 || code == LEU || code == LTU);
1312}
1313
2cc5a04b 1314/* Return 1 if OP is a valid Alpha swapped comparison operator. */
1315
1316int
1317alpha_swapped_comparison_operator (op, mode)
1318 register rtx op;
1319 enum machine_mode mode;
1320{
1321 enum rtx_code code = GET_CODE (op);
1322
a4110d9a 1323 if ((mode != GET_MODE (op) && mode != VOIDmode)
1324 || GET_RTX_CLASS (code) != '<')
2cc5a04b 1325 return 0;
1326
1327 code = swap_condition (code);
1328 return (code == EQ || code == LE || code == LT
41ee2fd4 1329 || code == LEU || code == LTU);
2cc5a04b 1330}
1331
bf2a98b3 1332/* Return 1 if OP is a signed comparison operation. */
1333
1334int
1335signed_comparison_operator (op, mode)
1336 register rtx op;
769ea120 1337 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 1338{
a4110d9a 1339 enum rtx_code code = GET_CODE (op);
99c14947 1340
a4110d9a 1341 if (mode != GET_MODE (op) && mode != VOIDmode)
1342 return 0;
bf2a98b3 1343
a4110d9a 1344 return (code == EQ || code == NE
1345 || code == LE || code == LT
1346 || code == GE || code == GT);
1347}
1348
1349/* Return 1 if OP is a valid Alpha floating point comparison operator.
1350 Here we know which comparisons are valid in which insn. */
1351
1352int
1353alpha_fp_comparison_operator (op, mode)
1354 register rtx op;
1355 enum machine_mode mode;
1356{
1357 enum rtx_code code = GET_CODE (op);
1358
1359 if (mode != GET_MODE (op) && mode != VOIDmode)
1360 return 0;
1361
1362 return (code == EQ || code == LE || code == LT || code == UNORDERED);
bf2a98b3 1363}
1364
1365/* Return 1 if this is a divide or modulus operator. */
1366
1367int
1368divmod_operator (op, mode)
1369 register rtx op;
769ea120 1370 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 1371{
1372 switch (GET_CODE (op))
1373 {
1374 case DIV: case MOD: case UDIV: case UMOD:
1375 return 1;
99c14947 1376
1377 default:
1378 break;
bf2a98b3 1379 }
1380
1381 return 0;
1382}
1383
1384/* Return 1 if this memory address is a known aligned register plus
1385 a constant. It must be a valid address. This means that we can do
1386 this as an aligned reference plus some offset.
1387
a99a652b 1388 Take into account what reload will do. */
bf2a98b3 1389
1390int
1391aligned_memory_operand (op, mode)
1392 register rtx op;
1393 enum machine_mode mode;
1394{
cc215844 1395 rtx base;
bf2a98b3 1396
a99a652b 1397 if (reload_in_progress)
1398 {
cc215844 1399 rtx tmp = op;
1400 if (GET_CODE (tmp) == SUBREG)
1401 tmp = SUBREG_REG (tmp);
1402 if (GET_CODE (tmp) == REG
1403 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1404 {
1405 op = reg_equiv_memory_loc[REGNO (tmp)];
1406 if (op == 0)
1407 return 0;
1408 }
a99a652b 1409 }
bf2a98b3 1410
a99a652b 1411 if (GET_CODE (op) != MEM
cc215844 1412 || GET_MODE (op) != mode)
bf2a98b3 1413 return 0;
bf2a98b3 1414 op = XEXP (op, 0);
1415
cc215844 1416 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1417 sorts of constructs. Dig for the real base register. */
1418 if (reload_in_progress
1419 && GET_CODE (op) == PLUS
1420 && GET_CODE (XEXP (op, 0)) == PLUS)
1421 base = XEXP (XEXP (op, 0), 0);
1422 else
1423 {
1424 if (! memory_address_p (mode, op))
1425 return 0;
1426 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1427 }
bf2a98b3 1428
80909c64 1429 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) >= 32);
bf2a98b3 1430}
1431
1432/* Similar, but return 1 if OP is a MEM which is not alignable. */
1433
1434int
1435unaligned_memory_operand (op, mode)
1436 register rtx op;
1437 enum machine_mode mode;
1438{
cc215844 1439 rtx base;
1440
1441 if (reload_in_progress)
bf2a98b3 1442 {
cc215844 1443 rtx tmp = op;
1444 if (GET_CODE (tmp) == SUBREG)
1445 tmp = SUBREG_REG (tmp);
1446 if (GET_CODE (tmp) == REG
1447 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1448 {
1449 op = reg_equiv_memory_loc[REGNO (tmp)];
1450 if (op == 0)
1451 return 0;
1452 }
bf2a98b3 1453 }
1454
cc215844 1455 if (GET_CODE (op) != MEM
1456 || GET_MODE (op) != mode)
bf2a98b3 1457 return 0;
bf2a98b3 1458 op = XEXP (op, 0);
1459
cc215844 1460 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1461 sorts of constructs. Dig for the real base register. */
1462 if (reload_in_progress
1463 && GET_CODE (op) == PLUS
1464 && GET_CODE (XEXP (op, 0)) == PLUS)
1465 base = XEXP (XEXP (op, 0), 0);
1466 else
1467 {
1468 if (! memory_address_p (mode, op))
1469 return 0;
1470 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1471 }
bf2a98b3 1472
80909c64 1473 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) < 32);
b044f41c 1474}
1475
1476/* Return 1 if OP is either a register or an unaligned memory location. */
1477
1478int
1479reg_or_unaligned_mem_operand (op, mode)
1480 rtx op;
1481 enum machine_mode mode;
1482{
1483 return register_operand (op, mode) || unaligned_memory_operand (op, mode);
bf2a98b3 1484}
1485
1486/* Return 1 if OP is any memory location. During reload a pseudo matches. */
1487
1488int
1489any_memory_operand (op, mode)
1490 register rtx op;
769ea120 1491 enum machine_mode mode ATTRIBUTE_UNUSED;
bf2a98b3 1492{
1493 return (GET_CODE (op) == MEM
1494 || (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == REG)
1495 || (reload_in_progress && GET_CODE (op) == REG
1496 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1497 || (reload_in_progress && GET_CODE (op) == SUBREG
1498 && GET_CODE (SUBREG_REG (op)) == REG
1499 && REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
1500}
1501
d882587d 1502/* Returns 1 if OP is not an eliminable register.
1503
1504 This exists to cure a pathological abort in the s8addq (et al) patterns,
1505
1506 long foo () { long t; bar(); return (long) &t * 26107; }
1507
1508 which run afoul of a hack in reload to cure a (presumably) similar
1509 problem with lea-type instructions on other targets. But there is
1510 one of us and many of them, so work around the problem by selectively
1511 preventing combine from making the optimization. */
1512
1513int
1514reg_not_elim_operand (op, mode)
1515 register rtx op;
1516 enum machine_mode mode;
1517{
1518 rtx inner = op;
1519 if (GET_CODE (op) == SUBREG)
1520 inner = SUBREG_REG (op);
1521 if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
1522 return 0;
1523
1524 return register_operand (op, mode);
1525}
b9a5aa8e 1526
1836306d 1527/* Return 1 is OP is a memory location that is not a reference (using
be2828ce 1528 an AND) to an unaligned location. Take into account what reload
1529 will do. */
1530
1531int
1532normal_memory_operand (op, mode)
1533 register rtx op;
ace75b22 1534 enum machine_mode mode ATTRIBUTE_UNUSED;
be2828ce 1535{
cc215844 1536 if (reload_in_progress)
be2828ce 1537 {
cc215844 1538 rtx tmp = op;
1539 if (GET_CODE (tmp) == SUBREG)
1540 tmp = SUBREG_REG (tmp);
1541 if (GET_CODE (tmp) == REG
1542 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1543 {
1544 op = reg_equiv_memory_loc[REGNO (tmp)];
be2828ce 1545
cc215844 1546 /* This may not have been assigned an equivalent address if it will
1547 be eliminated. In that case, it doesn't matter what we do. */
1548 if (op == 0)
1549 return 1;
1550 }
be2828ce 1551 }
1552
1553 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
1554}
1836306d 1555
1556/* Accept a register, but not a subreg of any kind. This allows us to
1557 avoid pathological cases in reload wrt data movement common in
1558 int->fp conversion. */
1559
1560int
1561reg_no_subreg_operand (op, mode)
1562 register rtx op;
1563 enum machine_mode mode;
1564{
2f1a5feb 1565 if (GET_CODE (op) != REG)
1836306d 1566 return 0;
1567 return register_operand (op, mode);
1568}
14f7bc98 1569
45981c0a 1570/* Recognize an addition operation that includes a constant. Used to
14f7bc98 1571 convince reload to canonize (plus (plus reg c1) c2) during register
1572 elimination. */
1573
1574int
1575addition_operation (op, mode)
1576 register rtx op;
1577 enum machine_mode mode;
1578{
1579 if (GET_MODE (op) != mode && mode != VOIDmode)
1580 return 0;
1581 if (GET_CODE (op) == PLUS
1582 && register_operand (XEXP (op, 0), mode)
1583 && GET_CODE (XEXP (op, 1)) == CONST_INT
1584 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op, 1)), 'K'))
1585 return 1;
1586 return 0;
1587}
1588
f5a60074 1589/* Implements CONST_OK_FOR_LETTER_P. Return true if the value matches
1590 the range defined for C in [I-P]. */
1591
1592bool
1593alpha_const_ok_for_letter_p (value, c)
1594 HOST_WIDE_INT value;
1595 int c;
1596{
1597 switch (c)
1598 {
1599 case 'I':
1600 /* An unsigned 8 bit constant. */
1601 return (unsigned HOST_WIDE_INT) value < 0x100;
1602 case 'J':
1603 /* The constant zero. */
1604 return value == 0;
1605 case 'K':
1606 /* A signed 16 bit constant. */
1607 return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
1608 case 'L':
1609 /* A shifted signed 16 bit constant appropriate for LDAH. */
1610 return ((value & 0xffff) == 0
1611 && ((value) >> 31 == -1 || value >> 31 == 0));
1612 case 'M':
1613 /* A constant that can be AND'ed with using a ZAP insn. */
1614 return zap_mask (value);
1615 case 'N':
1616 /* A complemented unsigned 8 bit constant. */
1617 return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
1618 case 'O':
1619 /* A negated unsigned 8 bit constant. */
1620 return (unsigned HOST_WIDE_INT) (- value) < 0x100;
1621 case 'P':
1622 /* The constant 1, 2 or 3. */
1623 return value == 1 || value == 2 || value == 3;
1624
1625 default:
1626 return false;
1627 }
1628}
1629
1630/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1631 matches for C in [GH]. */
1632
1633bool
1634alpha_const_double_ok_for_letter_p (value, c)
1635 rtx value;
1636 int c;
1637{
1638 switch (c)
1639 {
1640 case 'G':
1641 /* The floating point zero constant. */
1642 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1643 && value == CONST0_RTX (GET_MODE (value)));
1644
1645 case 'H':
1646 /* A valid operand of a ZAP insn. */
1647 return (GET_MODE (value) == VOIDmode
1648 && zap_mask (CONST_DOUBLE_LOW (value))
1649 && zap_mask (CONST_DOUBLE_HIGH (value)));
1650
1651 default:
1652 return false;
1653 }
1654}
1655
1656/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1657 matches for C. */
1658
1659bool
1660alpha_extra_constraint (value, c)
1661 rtx value;
1662 int c;
1663{
1664 switch (c)
1665 {
1666 case 'Q':
1667 return normal_memory_operand (value, VOIDmode);
1668 case 'R':
1669 return direct_call_operand (value, Pmode);
1670 case 'S':
1671 return (GET_CODE (value) == CONST_INT
1672 && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
1673 case 'T':
1674 return GET_CODE (value) == HIGH;
1675 case 'U':
1676 return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
f2cc13dc 1677 case 'W':
1678 return (GET_CODE (value) == CONST_VECTOR
1679 && value == CONST0_RTX (GET_MODE (value)));
f5a60074 1680 default:
1681 return false;
1682 }
1683}
1684
550e415f 1685/* Return 1 if this function can directly return via $26. */
1686
1687int
1688direct_return ()
1689{
9caef960 1690 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
1467e953 1691 && reload_completed
1692 && alpha_sa_size () == 0
550e415f 1693 && get_frame_size () == 0
1694 && current_function_outgoing_args_size == 0
1695 && current_function_pretend_args_size == 0);
1696}
ecb98d40 1697
1698/* Return the ADDR_VEC associated with a tablejump insn. */
1699
1700rtx
1701alpha_tablejump_addr_vec (insn)
1702 rtx insn;
1703{
1704 rtx tmp;
1705
1706 tmp = JUMP_LABEL (insn);
1707 if (!tmp)
1708 return NULL_RTX;
1709 tmp = NEXT_INSN (tmp);
1710 if (!tmp)
1711 return NULL_RTX;
1712 if (GET_CODE (tmp) == JUMP_INSN
1713 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
1714 return PATTERN (tmp);
1715 return NULL_RTX;
1716}
1717
1718/* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
1719
1720rtx
1721alpha_tablejump_best_label (insn)
1722 rtx insn;
1723{
1724 rtx jump_table = alpha_tablejump_addr_vec (insn);
1725 rtx best_label = NULL_RTX;
1726
1727 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
1728 there for edge frequency counts from profile data. */
1729
1730 if (jump_table)
1731 {
1732 int n_labels = XVECLEN (jump_table, 1);
1733 int best_count = -1;
1734 int i, j;
1735
1736 for (i = 0; i < n_labels; i++)
1737 {
1738 int count = 1;
1739
1740 for (j = i + 1; j < n_labels; j++)
1741 if (XEXP (XVECEXP (jump_table, 1, i), 0)
1742 == XEXP (XVECEXP (jump_table, 1, j), 0))
1743 count++;
1744
1745 if (count > best_count)
1746 best_count = count, best_label = XVECEXP (jump_table, 1, i);
1747 }
1748 }
1749
1750 return best_label ? best_label : const0_rtx;
1751}
5f7b9df8 1752
1753/* Return the TLS model to use for SYMBOL. */
1754
1755static enum tls_model
1756tls_symbolic_operand_type (symbol)
1757 rtx symbol;
1758{
1759 const char *str;
1760
1761 if (GET_CODE (symbol) != SYMBOL_REF)
1762 return 0;
1763 str = XSTR (symbol, 0);
1764
1765 if (str[0] == '%')
1766 {
1767 /* ??? Be prepared for -ftls-model=local-dynamic. Perhaps we shouldn't
1768 have separately encoded local-ness. On well, maybe the user will use
1769 attribute visibility next time. At least we don't crash... */
1770 if (str[1] == 'G' || str[1] == 'D')
1771 return TLS_MODEL_GLOBAL_DYNAMIC;
1772 if (str[1] == 'T')
1773 return TLS_MODEL_INITIAL_EXEC;
1774 }
1775 else if (str[0] == '@')
1776 {
1777 if (str[1] == 'D')
1778 {
1779 /* Local dynamic is a waste if we're not going to combine
1780 the __tls_get_addr calls. So avoid it if not optimizing. */
1781 if (optimize)
1782 return TLS_MODEL_LOCAL_DYNAMIC;
1783 else
1784 return TLS_MODEL_GLOBAL_DYNAMIC;
1785 }
1786 if (str[1] == 'T')
1787 {
1788 /* 64-bit local exec is the same as initial exec except without
1789 the dynamic relocation. In either case we use a got entry. */
1790 if (alpha_tls_size == 64)
1791 return TLS_MODEL_INITIAL_EXEC;
1792 else
1793 return TLS_MODEL_LOCAL_EXEC;
1794 }
1795 }
1796
1797 return 0;
1798}
1799
14f7bc98 1800\f
484edb77 1801/* Return true if the function DECL will be placed in the default text
1802 section. */
1803/* ??? Ideally we'd be able to always move from a SYMBOL_REF back to the
1804 decl, as that would allow us to determine if two functions are in the
1805 same section, which is what we really want to know. */
1806
1807static bool
1808decl_in_text_section (decl)
1809 tree decl;
1810{
1811 return (DECL_SECTION_NAME (decl) == NULL_TREE
1812 && ! (flag_function_sections
1813 || (targetm.have_named_sections
1814 && DECL_ONE_ONLY (decl))));
1815}
1816
52470889 1817/* Return true if EXP should be placed in the small data section. */
1818
1819static bool
1820alpha_in_small_data_p (exp)
1821 tree exp;
1822{
0aad4cd2 1823 /* We want to merge strings, so we never consider them small data. */
1824 if (TREE_CODE (exp) == STRING_CST)
1825 return false;
1826
52470889 1827 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
1828 {
1829 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
1830 if (strcmp (section, ".sdata") == 0
1831 || strcmp (section, ".sbss") == 0)
1832 return true;
1833 }
1834 else
1835 {
1836 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
1837
1838 /* If this is an incomplete type with size 0, then we can't put it
1839 in sdata because it might be too big when completed. */
1840 if (size > 0 && size <= g_switch_value)
1841 return true;
1842 }
1843
1844 return false;
1845}
1846
1f0ce6a6 1847/* If we are referencing a function that is static, make the SYMBOL_REF
1848 special. We use this to see indicate we can branch to this function
1849 without setting PV or restoring GP.
1850
1851 If this is a variable that is known to be defined locally, add "@v"
1852 to the name. If in addition the variable is to go in .sdata/.sbss,
1853 then add "@s" instead. */
1854
7811991d 1855static void
41eb471c 1856alpha_encode_section_info (decl, first)
1f0ce6a6 1857 tree decl;
41eb471c 1858 int first ATTRIBUTE_UNUSED;
1f0ce6a6 1859{
1860 const char *symbol_str;
5f7b9df8 1861 bool is_local;
1862 char encoding = 0;
33459320 1863 rtx rtl, symbol;
1f0ce6a6 1864
33459320 1865 rtl = DECL_P (decl) ? DECL_RTL (decl) : TREE_CST_RTL (decl);
1866
1867 /* Careful not to prod global register variables. */
1868 if (GET_CODE (rtl) != MEM)
1869 return;
1870 symbol = XEXP (rtl, 0);
1871 if (GET_CODE (symbol) != SYMBOL_REF)
1872 return;
1873
1f0ce6a6 1874 if (TREE_CODE (decl) == FUNCTION_DECL)
1875 {
484edb77 1876 /* We mark public functions once they are emitted; otherwise we
1877 don't know that they exist in this unit of translation. */
1878 if (TREE_PUBLIC (decl))
1879 return;
c67ce487 1880
484edb77 1881 /* Do not mark functions that are not in .text; otherwise we
1882 don't know that they are near enough for a direct branch. */
1883 if (! decl_in_text_section (decl))
1884 return;
1885
33459320 1886 SYMBOL_REF_FLAG (symbol) = 1;
1f0ce6a6 1887 return;
1888 }
1889
1890 /* Early out if we're not going to do anything with this data. */
1891 if (! TARGET_EXPLICIT_RELOCS)
1892 return;
1893
33459320 1894 symbol_str = XSTR (symbol, 0);
1f0ce6a6 1895
1896 /* A variable is considered "local" if it is defined in this module. */
33459320 1897 is_local = (*targetm.binds_local_p) (decl);
1f0ce6a6 1898
5f7b9df8 1899 /* Care for TLS variables. */
1900 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
1901 {
24dfead4 1902 switch (decl_tls_model (decl))
5f7b9df8 1903 {
1904 case TLS_MODEL_GLOBAL_DYNAMIC:
1905 encoding = 'G';
1906 break;
1907 case TLS_MODEL_LOCAL_DYNAMIC:
1908 encoding = 'D';
1909 break;
1910 case TLS_MODEL_INITIAL_EXEC:
1911 case TLS_MODEL_LOCAL_EXEC:
1912 encoding = 'T';
1913 break;
1914 }
1915 }
1916 else if (is_local)
1917 {
1918 /* Determine if DECL will wind up in .sdata/.sbss. */
1919 if (alpha_in_small_data_p (decl))
1920 encoding = 'S';
1921 else
1922 encoding = 'L';
1923 }
1f0ce6a6 1924
1925 /* Finally, encode this into the symbol string. */
5f7b9df8 1926 if (encoding)
1f0ce6a6 1927 {
1f0ce6a6 1928 char *newstr;
1929 size_t len;
1930
5f7b9df8 1931 if (symbol_str[0] == (is_local ? '@' : '%'))
1f0ce6a6 1932 {
5f7b9df8 1933 if (symbol_str[1] == encoding)
1f0ce6a6 1934 return;
1935 symbol_str += 2;
1936 }
1937
1938 len = strlen (symbol_str) + 1;
1939 newstr = alloca (len + 2);
1940
5f7b9df8 1941 newstr[0] = (is_local ? '@' : '%');
1942 newstr[1] = encoding;
1f0ce6a6 1943 memcpy (newstr + 2, symbol_str, len);
1944
33459320 1945 XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 2 - 1);
1f0ce6a6 1946 }
1f0ce6a6 1947}
1948
7b4a38a6 1949/* Undo the effects of the above. */
1950
1951static const char *
1952alpha_strip_name_encoding (str)
1953 const char *str;
1954{
5f7b9df8 1955 if (str[0] == '@' || str[0] == '%')
7b4a38a6 1956 str += 2;
1957 if (str[0] == '*')
1958 str++;
1959 return str;
1960}
1961
cf73d31f 1962#if TARGET_ABI_OPEN_VMS
1963static bool
1964alpha_linkage_symbol_p (symname)
1965 const char *symname;
1966{
1967 int symlen = strlen (symname);
1968
1969 if (symlen > 4)
1970 return strcmp (&symname [symlen - 4], "..lk") == 0;
1971
1972 return false;
1973}
1974
1975#define LINKAGE_SYMBOL_REF_P(X) \
1976 ((GET_CODE (X) == SYMBOL_REF \
1977 && alpha_linkage_symbol_p (XSTR (X, 0))) \
1978 || (GET_CODE (X) == CONST \
1979 && GET_CODE (XEXP (X, 0)) == PLUS \
1980 && GET_CODE (XEXP (XEXP (X, 0), 0)) == SYMBOL_REF \
1981 && alpha_linkage_symbol_p (XSTR (XEXP (XEXP (X, 0), 0), 0))))
1982#endif
1983
24b3c0ed 1984/* legitimate_address_p recognizes an RTL expression that is a valid
1985 memory address for an instruction. The MODE argument is the
1986 machine mode for the MEM expression that wants to use this address.
1987
1988 For Alpha, we have either a constant address or the sum of a
1989 register and a constant address, or just a register. For DImode,
1990 any of those forms can be surrounded with an AND that clear the
1991 low-order three bits; this is an "unaligned" access. */
1992
1993bool
1994alpha_legitimate_address_p (mode, x, strict)
1995 enum machine_mode mode;
1996 rtx x;
1997 int strict;
1998{
1999 /* If this is an ldq_u type address, discard the outer AND. */
2000 if (mode == DImode
2001 && GET_CODE (x) == AND
2002 && GET_CODE (XEXP (x, 1)) == CONST_INT
2003 && INTVAL (XEXP (x, 1)) == -8)
2004 x = XEXP (x, 0);
2005
2006 /* Discard non-paradoxical subregs. */
2007 if (GET_CODE (x) == SUBREG
2008 && (GET_MODE_SIZE (GET_MODE (x))
2009 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2010 x = SUBREG_REG (x);
2011
2012 /* Unadorned general registers are valid. */
2013 if (REG_P (x)
2014 && (strict
2015 ? STRICT_REG_OK_FOR_BASE_P (x)
2016 : NONSTRICT_REG_OK_FOR_BASE_P (x)))
2017 return true;
2018
2019 /* Constant addresses (i.e. +/- 32k) are valid. */
2020 if (CONSTANT_ADDRESS_P (x))
2021 return true;
2022
cf73d31f 2023#if TARGET_ABI_OPEN_VMS
2024 if (LINKAGE_SYMBOL_REF_P (x))
2025 return true;
2026#endif
2027
24b3c0ed 2028 /* Register plus a small constant offset is valid. */
2029 if (GET_CODE (x) == PLUS)
2030 {
2031 rtx ofs = XEXP (x, 1);
2032 x = XEXP (x, 0);
2033
2034 /* Discard non-paradoxical subregs. */
2035 if (GET_CODE (x) == SUBREG
2036 && (GET_MODE_SIZE (GET_MODE (x))
2037 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2038 x = SUBREG_REG (x);
2039
2040 if (REG_P (x))
2041 {
2042 if (! strict
2043 && NONSTRICT_REG_OK_FP_BASE_P (x)
2044 && GET_CODE (ofs) == CONST_INT)
2045 return true;
2046 if ((strict
2047 ? STRICT_REG_OK_FOR_BASE_P (x)
2048 : NONSTRICT_REG_OK_FOR_BASE_P (x))
2049 && CONSTANT_ADDRESS_P (ofs))
2050 return true;
2051 }
2052 else if (GET_CODE (x) == ADDRESSOF
2053 && GET_CODE (ofs) == CONST_INT)
2054 return true;
2055 }
2056
f5a60074 2057 /* If we're managing explicit relocations, LO_SUM is valid, as
2058 are small data symbols. */
2059 else if (TARGET_EXPLICIT_RELOCS)
1f0ce6a6 2060 {
f5a60074 2061 if (small_symbolic_operand (x, Pmode))
1f0ce6a6 2062 return true;
f5a60074 2063
2064 if (GET_CODE (x) == LO_SUM)
2065 {
2066 rtx ofs = XEXP (x, 1);
2067 x = XEXP (x, 0);
2068
2069 /* Discard non-paradoxical subregs. */
2070 if (GET_CODE (x) == SUBREG
2071 && (GET_MODE_SIZE (GET_MODE (x))
2072 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2073 x = SUBREG_REG (x);
2074
2075 /* Must have a valid base register. */
2076 if (! (REG_P (x)
2077 && (strict
2078 ? STRICT_REG_OK_FOR_BASE_P (x)
2079 : NONSTRICT_REG_OK_FOR_BASE_P (x))))
2080 return false;
2081
2082 /* The symbol must be local. */
5f7b9df8 2083 if (local_symbolic_operand (ofs, Pmode)
2084 || dtp32_symbolic_operand (ofs, Pmode)
2085 || tp32_symbolic_operand (ofs, Pmode))
f5a60074 2086 return true;
2087 }
1f0ce6a6 2088 }
2089
24b3c0ed 2090 return false;
2091}
2092
0d50f0b7 2093/* Try machine-dependent ways of modifying an illegitimate address
2094 to be legitimate. If we find one, return the new, valid address. */
2095
2096rtx
f5a60074 2097alpha_legitimize_address (x, scratch, mode)
0d50f0b7 2098 rtx x;
f5a60074 2099 rtx scratch;
0d50f0b7 2100 enum machine_mode mode ATTRIBUTE_UNUSED;
2101{
2102 HOST_WIDE_INT addend;
2103
2104 /* If the address is (plus reg const_int) and the CONST_INT is not a
2105 valid offset, compute the high part of the constant and add it to
2106 the register. Then our address is (plus temp low-part-const). */
2107 if (GET_CODE (x) == PLUS
2108 && GET_CODE (XEXP (x, 0)) == REG
2109 && GET_CODE (XEXP (x, 1)) == CONST_INT
2110 && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
2111 {
2112 addend = INTVAL (XEXP (x, 1));
2113 x = XEXP (x, 0);
2114 goto split_addend;
2115 }
2116
2117 /* If the address is (const (plus FOO const_int)), find the low-order
2118 part of the CONST_INT. Then load FOO plus any high-order part of the
2119 CONST_INT into a register. Our address is (plus reg low-part-const).
2120 This is done to reduce the number of GOT entries. */
f5a60074 2121 if (!no_new_pseudos
2122 && GET_CODE (x) == CONST
0d50f0b7 2123 && GET_CODE (XEXP (x, 0)) == PLUS
2124 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2125 {
2126 addend = INTVAL (XEXP (XEXP (x, 0), 1));
2127 x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
2128 goto split_addend;
2129 }
2130
2131 /* If we have a (plus reg const), emit the load as in (2), then add
2132 the two registers, and finally generate (plus reg low-part-const) as
2133 our address. */
f5a60074 2134 if (!no_new_pseudos
2135 && GET_CODE (x) == PLUS
0d50f0b7 2136 && GET_CODE (XEXP (x, 0)) == REG
2137 && GET_CODE (XEXP (x, 1)) == CONST
2138 && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
2139 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)
2140 {
2141 addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
2142 x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
2143 XEXP (XEXP (XEXP (x, 1), 0), 0),
2144 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2145 goto split_addend;
2146 }
2147
1f0ce6a6 2148 /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */
8afb6db4 2149 if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode))
1f0ce6a6 2150 {
5f7b9df8 2151 rtx r0, r16, eqv, tga, tp, insn, dest, seq;
2152
2153 switch (tls_symbolic_operand_type (x))
2154 {
2155 case TLS_MODEL_GLOBAL_DYNAMIC:
2156 start_sequence ();
2157
2158 r0 = gen_rtx_REG (Pmode, 0);
2159 r16 = gen_rtx_REG (Pmode, 16);
2160 tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2161 dest = gen_reg_rtx (Pmode);
2162 seq = GEN_INT (alpha_next_sequence_number++);
2163
2164 emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq));
2165 insn = gen_call_value_osf_tlsgd (r0, tga, seq);
2166 insn = emit_call_insn (insn);
2167 CONST_OR_PURE_CALL_P (insn) = 1;
2168 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2169
2170 insn = get_insns ();
2171 end_sequence ();
2172
2173 emit_libcall_block (insn, dest, r0, x);
2174 return dest;
2175
2176 case TLS_MODEL_LOCAL_DYNAMIC:
2177 start_sequence ();
2178
2179 r0 = gen_rtx_REG (Pmode, 0);
2180 r16 = gen_rtx_REG (Pmode, 16);
2181 tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2182 scratch = gen_reg_rtx (Pmode);
2183 seq = GEN_INT (alpha_next_sequence_number++);
2184
2185 emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq));
2186 insn = gen_call_value_osf_tlsldm (r0, tga, seq);
2187 insn = emit_call_insn (insn);
2188 CONST_OR_PURE_CALL_P (insn) = 1;
2189 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2190
2191 insn = get_insns ();
2192 end_sequence ();
2193
2194 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2195 UNSPEC_TLSLDM_CALL);
2196 emit_libcall_block (insn, scratch, r0, eqv);
2197
2198 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL);
2199 eqv = gen_rtx_CONST (Pmode, eqv);
2200
2201 if (alpha_tls_size == 64)
2202 {
2203 dest = gen_reg_rtx (Pmode);
2204 emit_insn (gen_rtx_SET (VOIDmode, dest, eqv));
2205 emit_insn (gen_adddi3 (dest, dest, scratch));
2206 return dest;
2207 }
2208 if (alpha_tls_size == 32)
2209 {
2210 insn = gen_rtx_HIGH (Pmode, eqv);
2211 insn = gen_rtx_PLUS (Pmode, scratch, insn);
2212 scratch = gen_reg_rtx (Pmode);
2213 emit_insn (gen_rtx_SET (VOIDmode, scratch, insn));
2214 }
2215 return gen_rtx_LO_SUM (Pmode, scratch, eqv);
2216
2217 case TLS_MODEL_INITIAL_EXEC:
2218 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2219 eqv = gen_rtx_CONST (Pmode, eqv);
2220 tp = gen_reg_rtx (Pmode);
2221 scratch = gen_reg_rtx (Pmode);
2222 dest = gen_reg_rtx (Pmode);
2223
2224 emit_insn (gen_load_tp (tp));
2225 emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv));
2226 emit_insn (gen_adddi3 (dest, tp, scratch));
2227 return dest;
2228
2229 case TLS_MODEL_LOCAL_EXEC:
2230 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2231 eqv = gen_rtx_CONST (Pmode, eqv);
2232 tp = gen_reg_rtx (Pmode);
2233
2234 emit_insn (gen_load_tp (tp));
2235 if (alpha_tls_size == 32)
2236 {
2237 insn = gen_rtx_HIGH (Pmode, eqv);
2238 insn = gen_rtx_PLUS (Pmode, tp, insn);
2239 tp = gen_reg_rtx (Pmode);
2240 emit_insn (gen_rtx_SET (VOIDmode, tp, insn));
2241 }
2242 return gen_rtx_LO_SUM (Pmode, tp, eqv);
2243 }
2244
8afb6db4 2245 if (local_symbolic_operand (x, Pmode))
2246 {
2247 if (small_symbolic_operand (x, Pmode))
f5a60074 2248 return x;
8afb6db4 2249 else
2250 {
f5a60074 2251 if (!no_new_pseudos)
2252 scratch = gen_reg_rtx (Pmode);
2253 emit_insn (gen_rtx_SET (VOIDmode, scratch,
2254 gen_rtx_HIGH (Pmode, x)));
2255 return gen_rtx_LO_SUM (Pmode, scratch, x);
8afb6db4 2256 }
5dcb037d 2257 }
1f0ce6a6 2258 }
2259
0d50f0b7 2260 return NULL;
2261
2262 split_addend:
2263 {
f5a60074 2264 HOST_WIDE_INT low, high;
2265
2266 low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
2267 addend -= low;
2268 high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
2269 addend -= high;
2270
2271 if (addend)
2272 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
2273 (no_new_pseudos ? scratch : NULL_RTX),
2274 1, OPTAB_LIB_WIDEN);
2275 if (high)
2276 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
2277 (no_new_pseudos ? scratch : NULL_RTX),
2278 1, OPTAB_LIB_WIDEN);
2279
2280 return plus_constant (x, low);
0d50f0b7 2281 }
2282}
2283
805e22b2 2284/* We do not allow indirect calls to be optimized into sibling calls, nor
2285 can we allow a call to a function in a different compilation unit to
2286 be optimized into a sibcall. */
2287static bool
2288alpha_function_ok_for_sibcall (decl, exp)
2289 tree decl;
2290 tree exp ATTRIBUTE_UNUSED;
2291{
2292 return (decl
2293 && (! TREE_PUBLIC (decl)
2294 || (TREE_ASM_WRITTEN (decl) && (*targetm.binds_local_p) (decl))));
2295}
2296
f5a60074 2297/* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a
2298 small symbolic operand until after reload. At which point we need
2299 to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref))
2300 so that sched2 has the proper dependency information. */
2301
2302int
792433e3 2303some_small_symbolic_operand (x, mode)
f5a60074 2304 rtx x;
2305 enum machine_mode mode ATTRIBUTE_UNUSED;
2306{
792433e3 2307 return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);
367e2ab3 2308}
2309
2310static int
792433e3 2311some_small_symbolic_operand_1 (px, data)
367e2ab3 2312 rtx *px;
2313 void *data ATTRIBUTE_UNUSED;
2314{
2315 rtx x = *px;
f5a60074 2316
792433e3 2317 /* Don't re-split. */
2318 if (GET_CODE (x) == LO_SUM)
2319 return -1;
367e2ab3 2320
792433e3 2321 return small_symbolic_operand (x, Pmode) != 0;
f5a60074 2322}
2323
2324rtx
792433e3 2325split_small_symbolic_operand (x)
f5a60074 2326 rtx x;
2327{
455b78a0 2328 x = copy_insn (x);
792433e3 2329 for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
367e2ab3 2330 return x;
2331}
f5a60074 2332
367e2ab3 2333static int
792433e3 2334split_small_symbolic_operand_1 (px, data)
367e2ab3 2335 rtx *px;
2336 void *data ATTRIBUTE_UNUSED;
2337{
2338 rtx x = *px;
443bb1a6 2339
792433e3 2340 /* Don't re-split. */
2341 if (GET_CODE (x) == LO_SUM)
2342 return -1;
f5a60074 2343
367e2ab3 2344 if (small_symbolic_operand (x, Pmode))
2345 {
2346 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
2347 *px = x;
792433e3 2348 return -1;
367e2ab3 2349 }
2350
792433e3 2351 return 0;
f5a60074 2352}
2353
0d50f0b7 2354/* Try a machine-dependent way of reloading an illegitimate address
2355 operand. If we find one, push the reload and return the new rtx. */
2356
2357rtx
2358alpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)
2359 rtx x;
2360 enum machine_mode mode ATTRIBUTE_UNUSED;
2361 int opnum;
2362 int type;
2363 int ind_levels ATTRIBUTE_UNUSED;
2364{
2365 /* We must recognize output that we have already generated ourselves. */
2366 if (GET_CODE (x) == PLUS
2367 && GET_CODE (XEXP (x, 0)) == PLUS
2368 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2369 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2370 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2371 {
2372 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2373 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2374 opnum, type);
2375 return x;
2376 }
2377
2378 /* We wish to handle large displacements off a base register by
2379 splitting the addend across an ldah and the mem insn. This
2380 cuts number of extra insns needed from 3 to 1. */
2381 if (GET_CODE (x) == PLUS
2382 && GET_CODE (XEXP (x, 0)) == REG
2383 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2384 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
2385 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2386 {
2387 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2388 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2389 HOST_WIDE_INT high
2390 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2391
2392 /* Check for 32-bit overflow. */
2393 if (high + low != val)
2394 return NULL_RTX;
2395
2396 /* Reload the high part into a base reg; leave the low part
2397 in the mem directly. */
2398 x = gen_rtx_PLUS (GET_MODE (x),
2399 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2400 GEN_INT (high)),
2401 GEN_INT (low));
2402
2403 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2404 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2405 opnum, type);
2406 return x;
2407 }
2408
2409 return NULL_RTX;
2410}
2411\f
bf2a98b3 2412/* REF is an alignable memory location. Place an aligned SImode
2413 reference into *PALIGNED_MEM and the number of bits to shift into
a99a652b 2414 *PBITNUM. SCRATCH is a free register for use in reloading out
2415 of range stack slots. */
bf2a98b3 2416
2417void
cc215844 2418get_aligned_mem (ref, paligned_mem, pbitnum)
2419 rtx ref;
bf2a98b3 2420 rtx *paligned_mem, *pbitnum;
2421{
2422 rtx base;
2423 HOST_WIDE_INT offset = 0;
2424
cc215844 2425 if (GET_CODE (ref) != MEM)
2426 abort ();
bf2a98b3 2427
cc215844 2428 if (reload_in_progress
2429 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
a99a652b 2430 {
cc215844 2431 base = find_replacement (&XEXP (ref, 0));
2432
2433 if (! memory_address_p (GET_MODE (ref), base))
2434 abort ();
a99a652b 2435 }
bf2a98b3 2436 else
a99a652b 2437 {
a99a652b 2438 base = XEXP (ref, 0);
2439 }
bf2a98b3 2440
2441 if (GET_CODE (base) == PLUS)
2442 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2443
8259ab07 2444 *paligned_mem
2445 = widen_memory_access (ref, SImode, (offset & ~3) - offset);
bf2a98b3 2446
9caef960 2447 if (WORDS_BIG_ENDIAN)
2448 *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))
2449 + (offset & 3) * 8));
2450 else
2451 *pbitnum = GEN_INT ((offset & 3) * 8);
bf2a98b3 2452}
2453
b044f41c 2454/* Similar, but just get the address. Handle the two reload cases.
2455 Add EXTRA_OFFSET to the address we return. */
bf2a98b3 2456
2457rtx
b044f41c 2458get_unaligned_address (ref, extra_offset)
bf2a98b3 2459 rtx ref;
b044f41c 2460 int extra_offset;
bf2a98b3 2461{
2462 rtx base;
2463 HOST_WIDE_INT offset = 0;
2464
cc215844 2465 if (GET_CODE (ref) != MEM)
2466 abort ();
bf2a98b3 2467
cc215844 2468 if (reload_in_progress
2469 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
a99a652b 2470 {
a99a652b 2471 base = find_replacement (&XEXP (ref, 0));
cc215844 2472
2473 if (! memory_address_p (GET_MODE (ref), base))
2474 abort ();
a99a652b 2475 }
bf2a98b3 2476 else
a99a652b 2477 {
a99a652b 2478 base = XEXP (ref, 0);
2479 }
bf2a98b3 2480
2481 if (GET_CODE (base) == PLUS)
2482 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2483
b044f41c 2484 return plus_constant (base, offset + extra_offset);
bf2a98b3 2485}
14f7bc98 2486
f5a60074 2487/* On the Alpha, all (non-symbolic) constants except zero go into
2488 a floating-point register via memory. Note that we cannot
2489 return anything that is not a subset of CLASS, and that some
2490 symbolic constants cannot be dropped to memory. */
2491
2492enum reg_class
2493alpha_preferred_reload_class(x, class)
2494 rtx x;
2495 enum reg_class class;
2496{
2497 /* Zero is present in any register class. */
2498 if (x == CONST0_RTX (GET_MODE (x)))
2499 return class;
2500
2501 /* These sorts of constants we can easily drop to memory. */
2502 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2503 {
2504 if (class == FLOAT_REGS)
2505 return NO_REGS;
2506 if (class == ALL_REGS)
2507 return GENERAL_REGS;
2508 return class;
2509 }
2510
2511 /* All other kinds of constants should not (and in the case of HIGH
2512 cannot) be dropped to memory -- instead we use a GENERAL_REGS
2513 secondary reload. */
2514 if (CONSTANT_P (x))
2515 return (class == ALL_REGS ? GENERAL_REGS : class);
2516
2517 return class;
2518}
2519
14f7bc98 2520/* Loading and storing HImode or QImode values to and from memory
2521 usually requires a scratch register. The exceptions are loading
2522 QImode and HImode from an aligned address to a general register
2523 unless byte instructions are permitted.
2524
2525 We also cannot load an unaligned address or a paradoxical SUBREG
2526 into an FP register.
2527
2528 We also cannot do integral arithmetic into FP regs, as might result
2529 from register elimination into a DImode fp register. */
2530
2531enum reg_class
2532secondary_reload_class (class, mode, x, in)
2533 enum reg_class class;
2534 enum machine_mode mode;
2535 rtx x;
2536 int in;
2537{
d60e9bbd 2538 if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
d2494d49 2539 {
d60e9bbd 2540 if (GET_CODE (x) == MEM
2541 || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
2542 || (GET_CODE (x) == SUBREG
2543 && (GET_CODE (SUBREG_REG (x)) == MEM
2544 || (GET_CODE (SUBREG_REG (x)) == REG
2545 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
d2494d49 2546 {
2547 if (!in || !aligned_memory_operand(x, mode))
2548 return GENERAL_REGS;
2549 }
2550 }
14f7bc98 2551
2552 if (class == FLOAT_REGS)
2553 {
2554 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
2555 return GENERAL_REGS;
2556
2557 if (GET_CODE (x) == SUBREG
2558 && (GET_MODE_SIZE (GET_MODE (x))
2559 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2560 return GENERAL_REGS;
2561
1f0ce6a6 2562 if (in && INTEGRAL_MODE_P (mode)
2563 && ! (memory_operand (x, mode) || x == const0_rtx))
14f7bc98 2564 return GENERAL_REGS;
2565 }
2566
2567 return NO_REGS;
2568}
bf2a98b3 2569\f
2570/* Subfunction of the following function. Update the flags of any MEM
2571 found in part of X. */
2572
2573static void
e2c8a34a 2574alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
bf2a98b3 2575 rtx x;
e2c8a34a 2576 int in_struct_p, volatile_p, unchanging_p;
bf2a98b3 2577{
2578 int i;
2579
2580 switch (GET_CODE (x))
2581 {
2582 case SEQUENCE:
31d3e01c 2583 abort ();
2584
bf2a98b3 2585 case PARALLEL:
2586 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2587 alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
e2c8a34a 2588 unchanging_p);
bf2a98b3 2589 break;
2590
2591 case INSN:
2592 alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
e2c8a34a 2593 unchanging_p);
bf2a98b3 2594 break;
2595
2596 case SET:
2597 alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
e2c8a34a 2598 unchanging_p);
bf2a98b3 2599 alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
e2c8a34a 2600 unchanging_p);
bf2a98b3 2601 break;
2602
2603 case MEM:
2604 MEM_IN_STRUCT_P (x) = in_struct_p;
2605 MEM_VOLATILE_P (x) = volatile_p;
2606 RTX_UNCHANGING_P (x) = unchanging_p;
e2c8a34a 2607 /* Sadly, we cannot use alias sets because the extra aliasing
2608 produced by the AND interferes. Given that two-byte quantities
2609 are the only thing we would be able to differentiate anyway,
2610 there does not seem to be any point in convoluting the early
2611 out of the alias check. */
bf2a98b3 2612 break;
99c14947 2613
2614 default:
2615 break;
bf2a98b3 2616 }
2617}
2618
31d3e01c 2619/* Given INSN, which is an INSN list or the PATTERN of a single insn
2620 generated to perform a memory operation, look for any MEMs in either
2621 a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and
2622 volatile flags from REF into each of the MEMs found. If REF is not
2623 a MEM, don't do anything. */
bf2a98b3 2624
2625void
2626alpha_set_memflags (insn, ref)
2627 rtx insn;
2628 rtx ref;
2629{
e2c8a34a 2630 int in_struct_p, volatile_p, unchanging_p;
849674a3 2631
2632 if (GET_CODE (ref) != MEM)
bf2a98b3 2633 return;
2634
849674a3 2635 in_struct_p = MEM_IN_STRUCT_P (ref);
2636 volatile_p = MEM_VOLATILE_P (ref);
2637 unchanging_p = RTX_UNCHANGING_P (ref);
849674a3 2638
2639 /* This is only called from alpha.md, after having had something
2640 generated from one of the insn patterns. So if everything is
2641 zero, the pattern is already up-to-date. */
e2c8a34a 2642 if (! in_struct_p && ! volatile_p && ! unchanging_p)
849674a3 2643 return;
2644
e2c8a34a 2645 alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
bf2a98b3 2646}
2647\f
2648/* Try to output insns to set TARGET equal to the constant C if it can be
ea5db00c 2649 done in less than N insns. Do all computations in MODE. Returns the place
2650 where the output has been placed if it can be done and the insns have been
2651 emitted. If it would take more than N insns, zero is returned and no
2652 insns and emitted. */
bf2a98b3 2653
ea5db00c 2654rtx
2655alpha_emit_set_const (target, mode, c, n)
bf2a98b3 2656 rtx target;
ea5db00c 2657 enum machine_mode mode;
bf2a98b3 2658 HOST_WIDE_INT c;
2659 int n;
6f86cb15 2660{
d2422fc2 2661 rtx result = 0;
2662 rtx orig_target = target;
6f86cb15 2663 int i;
2664
d2422fc2 2665 /* If we can't make any pseudos, TARGET is an SImode hard register, we
2666 can't load this constant in one insn, do this in DImode. */
2667 if (no_new_pseudos && mode == SImode
2668 && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER
2669 && (result = alpha_emit_set_const_1 (target, mode, c, 1)) == 0)
2670 {
2671 target = gen_lowpart (DImode, target);
2672 mode = DImode;
2673 }
2674
65abff06 2675 /* Try 1 insn, then 2, then up to N. */
b8585446 2676 for (i = 1; i <= n; i++)
2677 {
2678 result = alpha_emit_set_const_1 (target, mode, c, i);
2679 if (result)
2680 {
2681 rtx insn = get_last_insn ();
2682 rtx set = single_set (insn);
2683 if (! CONSTANT_P (SET_SRC (set)))
2684 set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));
2685 break;
2686 }
2687 }
6f86cb15 2688
d2422fc2 2689 /* Allow for the case where we changed the mode of TARGET. */
2690 if (result == target)
2691 result = orig_target;
2692
2693 return result;
6f86cb15 2694}
2695
2696/* Internal routine for the above to check for N or below insns. */
2697
2698static rtx
2699alpha_emit_set_const_1 (target, mode, c, n)
2700 rtx target;
2701 enum machine_mode mode;
2702 HOST_WIDE_INT c;
2703 int n;
bf2a98b3 2704{
bdb19034 2705 HOST_WIDE_INT new;
bf2a98b3 2706 int i, bits;
ea5db00c 2707 /* Use a pseudo if highly optimizing and still generating RTL. */
2708 rtx subtarget
d2422fc2 2709 = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);
301416af 2710 rtx temp, insn;
bf2a98b3 2711
bf2a98b3 2712 /* If this is a sign-extended 32-bit constant, we can do this in at most
2713 three insns, so do it if we have enough insns left. We always have
65abff06 2714 a sign-extended 32-bit constant when compiling on a narrow machine. */
bf2a98b3 2715
3bc2043a 2716 if (HOST_BITS_PER_WIDE_INT != 64
2717 || c >> 31 == -1 || c >> 31 == 0)
bf2a98b3 2718 {
bdb19034 2719 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
bf2a98b3 2720 HOST_WIDE_INT tmp1 = c - low;
bdb19034 2721 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
bf2a98b3 2722 HOST_WIDE_INT extra = 0;
2723
81d03ebd 2724 /* If HIGH will be interpreted as negative but the constant is
2725 positive, we must adjust it to do two ldha insns. */
2726
2727 if ((high & 0x8000) != 0 && c >= 0)
bf2a98b3 2728 {
2729 extra = 0x4000;
2730 tmp1 -= 0x40000000;
2731 high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
2732 }
2733
2734 if (c == low || (low == 0 && extra == 0))
3bc2043a 2735 {
2736 /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
2737 but that meant that we can't handle INT_MIN on 32-bit machines
2738 (like NT/Alpha), because we recurse indefinitely through
2739 emit_move_insn to gen_movdi. So instead, since we know exactly
2740 what we want, create it explicitly. */
2741
2742 if (target == NULL)
2743 target = gen_reg_rtx (mode);
941522d6 2744 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
3bc2043a 2745 return target;
2746 }
6f86cb15 2747 else if (n >= 2 + (extra != 0))
bf2a98b3 2748 {
bdb19034 2749 temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);
ea5db00c 2750
301416af 2751 /* As of 2002-02-23, addsi3 is only available when not optimizing.
2752 This means that if we go through expand_binop, we'll try to
2753 generate extensions, etc, which will require new pseudos, which
2754 will fail during some split phases. The SImode add patterns
2755 still exist, but are not named. So build the insns by hand. */
2756
bf2a98b3 2757 if (extra != 0)
301416af 2758 {
2759 if (! subtarget)
2760 subtarget = gen_reg_rtx (mode);
2761 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
2762 insn = gen_rtx_SET (VOIDmode, subtarget, insn);
2763 emit_insn (insn);
b8585446 2764 temp = subtarget;
301416af 2765 }
bf2a98b3 2766
301416af 2767 if (target == NULL)
2768 target = gen_reg_rtx (mode);
2769 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2770 insn = gen_rtx_SET (VOIDmode, target, insn);
2771 emit_insn (insn);
2772 return target;
bf2a98b3 2773 }
2774 }
2775
dacd345b 2776 /* If we couldn't do it that way, try some other methods. But if we have
07014ed9 2777 no instructions left, don't bother. Likewise, if this is SImode and
2778 we can't make pseudos, we can't do anything since the expand_binop
2779 and expand_unop calls will widen and try to make pseudos. */
bf2a98b3 2780
d2422fc2 2781 if (n == 1 || (mode == SImode && no_new_pseudos))
bf2a98b3 2782 return 0;
2783
dacd345b 2784 /* Next, see if we can load a related constant and then shift and possibly
bf2a98b3 2785 negate it to get the constant we want. Try this once each increasing
2786 numbers of insns. */
2787
2788 for (i = 1; i < n; i++)
2789 {
bdb19034 2790 /* First, see if minus some low bits, we've an easy load of
2791 high bits. */
2792
2793 new = ((c & 0xffff) ^ 0x8000) - 0x8000;
2794 if (new != 0
2795 && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)
2796 return expand_binop (mode, add_optab, temp, GEN_INT (new),
2797 target, 0, OPTAB_WIDEN);
2798
2799 /* Next try complementing. */
ea5db00c 2800 if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)
2801 return expand_unop (mode, one_cmpl_optab, temp, target, 0);
bf2a98b3 2802
ea5db00c 2803 /* Next try to form a constant and do a left shift. We can do this
bf2a98b3 2804 if some low-order bits are zero; the exact_log2 call below tells
2805 us that information. The bits we are shifting out could be any
2806 value, but here we'll just try the 0- and sign-extended forms of
2807 the constant. To try to increase the chance of having the same
2808 constant in more than one insn, start at the highest number of
2809 bits to shift, but try all possibilities in case a ZAPNOT will
2810 be useful. */
2811
2812 if ((bits = exact_log2 (c & - c)) > 0)
2813 for (; bits > 0; bits--)
dacd345b 2814 if ((temp = (alpha_emit_set_const
bdb19034 2815 (subtarget, mode, c >> bits, i))) != 0
ea5db00c 2816 || ((temp = (alpha_emit_set_const
2817 (subtarget, mode,
2818 ((unsigned HOST_WIDE_INT) c) >> bits, i)))
2819 != 0))
2820 return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
2821 target, 0, OPTAB_WIDEN);
bf2a98b3 2822
2823 /* Now try high-order zero bits. Here we try the shifted-in bits as
066efb8d 2824 all zero and all ones. Be careful to avoid shifting outside the
2825 mode and to avoid shifting outside the host wide int size. */
3bc2043a 2826 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
2827 confuse the recursive call and set all of the high 32 bits. */
bf2a98b3 2828
066efb8d 2829 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
3bc2043a 2830 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64))) > 0)
bf2a98b3 2831 for (; bits > 0; bits--)
ea5db00c 2832 if ((temp = alpha_emit_set_const (subtarget, mode,
2833 c << bits, i)) != 0
2834 || ((temp = (alpha_emit_set_const
2835 (subtarget, mode,
2836 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2837 i)))
2838 != 0))
2839 return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
066efb8d 2840 target, 1, OPTAB_WIDEN);
bf2a98b3 2841
2842 /* Now try high-order 1 bits. We get that with a sign-extension.
066efb8d 2843 But one bit isn't enough here. Be careful to avoid shifting outside
65abff06 2844 the mode and to avoid shifting outside the host wide int size. */
9caef960 2845
066efb8d 2846 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2847 - floor_log2 (~ c) - 2)) > 0)
bf2a98b3 2848 for (; bits > 0; bits--)
ea5db00c 2849 if ((temp = alpha_emit_set_const (subtarget, mode,
2850 c << bits, i)) != 0
2851 || ((temp = (alpha_emit_set_const
2852 (subtarget, mode,
2853 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2854 i)))
2855 != 0))
2856 return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
2857 target, 0, OPTAB_WIDEN);
bf2a98b3 2858 }
2859
bdb19034 2860#if HOST_BITS_PER_WIDE_INT == 64
2861 /* Finally, see if can load a value into the target that is the same as the
2862 constant except that all bytes that are 0 are changed to be 0xff. If we
2863 can, then we can do a ZAPNOT to obtain the desired constant. */
2864
2865 new = c;
2866 for (i = 0; i < 64; i += 8)
2867 if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0)
2868 new |= (HOST_WIDE_INT) 0xff << i;
e52799e9 2869
bdb19034 2870 /* We are only called for SImode and DImode. If this is SImode, ensure that
2871 we are sign extended to a full word. */
2872
2873 if (mode == SImode)
2874 new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000;
2875
2876 if (new != c && new != -1
2877 && (temp = alpha_emit_set_const (subtarget, mode, new, n - 1)) != 0)
2878 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new),
e52799e9 2879 target, 0, OPTAB_WIDEN);
bdb19034 2880#endif
e52799e9 2881
bf2a98b3 2882 return 0;
2883}
996a379d 2884
2612f626 2885/* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2886 fall back to a straight forward decomposition. We do this to avoid
2887 exponential run times encountered when looking for longer sequences
2888 with alpha_emit_set_const. */
2889
2890rtx
af792316 2891alpha_emit_set_long_const (target, c1, c2)
2612f626 2892 rtx target;
af792316 2893 HOST_WIDE_INT c1, c2;
2612f626 2894{
2612f626 2895 HOST_WIDE_INT d1, d2, d3, d4;
2612f626 2896
2897 /* Decompose the entire word */
af792316 2898#if HOST_BITS_PER_WIDE_INT >= 64
2899 if (c2 != -(c1 < 0))
2900 abort ();
2901 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2902 c1 -= d1;
2903 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2904 c1 = (c1 - d2) >> 32;
2905 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2906 c1 -= d3;
2907 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2908 if (c1 != d4)
2909 abort ();
2910#else
2911 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2912 c1 -= d1;
2913 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2914 if (c1 != d2)
2915 abort ();
2916 c2 += (d2 < 0);
2917 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2918 c2 -= d3;
2919 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2920 if (c2 != d4)
2921 abort ();
2922#endif
2612f626 2923
2924 /* Construct the high word */
af792316 2925 if (d4)
2926 {
2927 emit_move_insn (target, GEN_INT (d4));
2928 if (d3)
2929 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2930 }
2612f626 2931 else
af792316 2932 emit_move_insn (target, GEN_INT (d3));
2612f626 2933
2934 /* Shift it into place */
af792316 2935 emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
2612f626 2936
af792316 2937 /* Add in the low bits. */
2938 if (d2)
2939 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2940 if (d1)
2941 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
2612f626 2942
af792316 2943 return target;
2612f626 2944}
2612f626 2945
cb6e3ae1 2946/* Expand a move instruction; return true if all work is done.
2947 We don't handle non-bwx subword loads here. */
2948
2949bool
2950alpha_expand_mov (mode, operands)
2951 enum machine_mode mode;
2952 rtx *operands;
2953{
2954 /* If the output is not a register, the input must be. */
2955 if (GET_CODE (operands[0]) == MEM
2956 && ! reg_or_0_operand (operands[1], mode))
2957 operands[1] = force_reg (mode, operands[1]);
2958
f5a60074 2959 /* Allow legitimize_address to perform some simplifications. */
62e050c6 2960 if (mode == Pmode && symbolic_operand (operands[1], mode))
1f0ce6a6 2961 {
05b07808 2962 rtx tmp;
2963
2964 /* With RTL inlining, at -O3, rtl is generated, stored, then actually
2965 compiled at the end of compilation. In the meantime, someone can
2966 re-encode-section-info on some symbol changing it e.g. from global
2967 to local-not-small. If this happens, we'd have emitted a plain
2968 load rather than a high+losum load and not recognize the insn.
2969
2970 So if rtl inlining is in effect, we delay the global/not-global
2971 decision until rest_of_compilation by wrapping it in an
2972 UNSPEC_SYMBOL. */
2973 if (TARGET_EXPLICIT_RELOCS && flag_inline_functions
2974 && rtx_equal_function_value_matters
2975 && global_symbolic_operand (operands[1], mode))
2976 {
2977 emit_insn (gen_movdi_er_maybe_g (operands[0], operands[1]));
2978 return true;
2979 }
2980
2981 tmp = alpha_legitimize_address (operands[1], operands[0], mode);
f5a60074 2982 if (tmp)
5dcb037d 2983 {
5f7b9df8 2984 if (tmp == operands[0])
2985 return true;
f5a60074 2986 operands[1] = tmp;
8afb6db4 2987 return false;
2988 }
1f0ce6a6 2989 }
2990
cb6e3ae1 2991 /* Early out for non-constants and valid constants. */
2992 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2993 return false;
2994
2995 /* Split large integers. */
2996 if (GET_CODE (operands[1]) == CONST_INT
2997 || GET_CODE (operands[1]) == CONST_DOUBLE)
2998 {
2999 HOST_WIDE_INT i0, i1;
d1bf99d0 3000 rtx temp = NULL_RTX;
cb6e3ae1 3001
3002 if (GET_CODE (operands[1]) == CONST_INT)
3003 {
3004 i0 = INTVAL (operands[1]);
3005 i1 = -(i0 < 0);
3006 }
3007 else if (HOST_BITS_PER_WIDE_INT >= 64)
3008 {
3009 i0 = CONST_DOUBLE_LOW (operands[1]);
3010 i1 = -(i0 < 0);
3011 }
3012 else
3013 {
3014 i0 = CONST_DOUBLE_LOW (operands[1]);
3015 i1 = CONST_DOUBLE_HIGH (operands[1]);
3016 }
3017
3018 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
3019 temp = alpha_emit_set_const (operands[0], mode, i0, 3);
3020
3021 if (!temp && TARGET_BUILD_CONSTANTS)
3022 temp = alpha_emit_set_long_const (operands[0], i0, i1);
3023
3024 if (temp)
3025 {
3026 if (rtx_equal_p (operands[0], temp))
3027 return true;
3028 operands[1] = temp;
3029 return false;
3030 }
3031 }
3032
3033 /* Otherwise we've nothing left but to drop the thing to memory. */
3034 operands[1] = force_const_mem (DImode, operands[1]);
3035 if (reload_in_progress)
3036 {
3037 emit_move_insn (operands[0], XEXP (operands[1], 0));
3038 operands[1] = copy_rtx (operands[1]);
3039 XEXP (operands[1], 0) = operands[0];
3040 }
3041 else
3042 operands[1] = validize_mem (operands[1]);
3043 return false;
3044}
3045
3046/* Expand a non-bwx QImode or HImode move instruction;
3047 return true if all work is done. */
3048
3049bool
3050alpha_expand_mov_nobwx (mode, operands)
3051 enum machine_mode mode;
3052 rtx *operands;
3053{
3054 /* If the output is not a register, the input must be. */
3055 if (GET_CODE (operands[0]) == MEM)
3056 operands[1] = force_reg (mode, operands[1]);
3057
3058 /* Handle four memory cases, unaligned and aligned for either the input
3059 or the output. The only case where we can be called during reload is
3060 for aligned loads; all other cases require temporaries. */
3061
3062 if (GET_CODE (operands[1]) == MEM
3063 || (GET_CODE (operands[1]) == SUBREG
3064 && GET_CODE (SUBREG_REG (operands[1])) == MEM)
3065 || (reload_in_progress && GET_CODE (operands[1]) == REG
3066 && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
3067 || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
3068 && GET_CODE (SUBREG_REG (operands[1])) == REG
3069 && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
3070 {
3071 if (aligned_memory_operand (operands[1], mode))
3072 {
3073 if (reload_in_progress)
3074 {
3075 emit_insn ((mode == QImode
3076 ? gen_reload_inqi_help
3077 : gen_reload_inhi_help)
3078 (operands[0], operands[1],
3079 gen_rtx_REG (SImode, REGNO (operands[0]))));
3080 }
3081 else
3082 {
3083 rtx aligned_mem, bitnum;
3084 rtx scratch = gen_reg_rtx (SImode);
3085
3086 get_aligned_mem (operands[1], &aligned_mem, &bitnum);
3087
3088 emit_insn ((mode == QImode
3089 ? gen_aligned_loadqi
3090 : gen_aligned_loadhi)
3091 (operands[0], aligned_mem, bitnum, scratch));
3092 }
3093 }
3094 else
3095 {
3096 /* Don't pass these as parameters since that makes the generated
3097 code depend on parameter evaluation order which will cause
3098 bootstrap failures. */
3099
3100 rtx temp1 = gen_reg_rtx (DImode);
3101 rtx temp2 = gen_reg_rtx (DImode);
3102 rtx seq = ((mode == QImode
3103 ? gen_unaligned_loadqi
3104 : gen_unaligned_loadhi)
3105 (operands[0], get_unaligned_address (operands[1], 0),
3106 temp1, temp2));
3107
3108 alpha_set_memflags (seq, operands[1]);
3109 emit_insn (seq);
3110 }
3111 return true;
3112 }
3113
3114 if (GET_CODE (operands[0]) == MEM
3115 || (GET_CODE (operands[0]) == SUBREG
3116 && GET_CODE (SUBREG_REG (operands[0])) == MEM)
3117 || (reload_in_progress && GET_CODE (operands[0]) == REG
3118 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
3119 || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
3120 && GET_CODE (SUBREG_REG (operands[0])) == REG
3121 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
3122 {
3123 if (aligned_memory_operand (operands[0], mode))
3124 {
3125 rtx aligned_mem, bitnum;
3126 rtx temp1 = gen_reg_rtx (SImode);
3127 rtx temp2 = gen_reg_rtx (SImode);
3128
3129 get_aligned_mem (operands[0], &aligned_mem, &bitnum);
3130
3131 emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
3132 temp1, temp2));
3133 }
3134 else
3135 {
3136 rtx temp1 = gen_reg_rtx (DImode);
3137 rtx temp2 = gen_reg_rtx (DImode);
3138 rtx temp3 = gen_reg_rtx (DImode);
3139 rtx seq = ((mode == QImode
3140 ? gen_unaligned_storeqi
3141 : gen_unaligned_storehi)
3142 (get_unaligned_address (operands[0], 0),
3143 operands[1], temp1, temp2, temp3));
3144
3145 alpha_set_memflags (seq, operands[0]);
3146 emit_insn (seq);
3147 }
3148 return true;
3149 }
3150
3151 return false;
3152}
3153
2a42ba09 3154/* Generate an unsigned DImode to FP conversion. This is the same code
3155 optabs would emit if we didn't have TFmode patterns.
3156
3157 For SFmode, this is the only construction I've found that can pass
3158 gcc.c-torture/execute/ieee/rbug.c. No scenario that uses DFmode
3159 intermediates will work, because you'll get intermediate rounding
3160 that ruins the end result. Some of this could be fixed by turning
3161 on round-to-positive-infinity, but that requires diddling the fpsr,
3162 which kills performance. I tried turning this around and converting
3163 to a negative number, so that I could turn on /m, but either I did
3164 it wrong or there's something else cause I wound up with the exact
3165 same single-bit error. There is a branch-less form of this same code:
3166
3167 srl $16,1,$1
3168 and $16,1,$2
3169 cmplt $16,0,$3
3170 or $1,$2,$2
3171 cmovge $16,$16,$2
3172 itoft $3,$f10
3173 itoft $2,$f11
3174 cvtqs $f11,$f11
3175 adds $f11,$f11,$f0
3176 fcmoveq $f10,$f11,$f0
3177
3178 I'm not using it because it's the same number of instructions as
3179 this branch-full form, and it has more serialized long latency
3180 instructions on the critical path.
3181
3182 For DFmode, we can avoid rounding errors by breaking up the word
3183 into two pieces, converting them separately, and adding them back:
3184
3185 LC0: .long 0,0x5f800000
3186
3187 itoft $16,$f11
3188 lda $2,LC0
093c0196 3189 cmplt $16,0,$1
2a42ba09 3190 cpyse $f11,$f31,$f10
3191 cpyse $f31,$f11,$f11
3192 s4addq $1,$2,$1
3193 lds $f12,0($1)
3194 cvtqt $f10,$f10
3195 cvtqt $f11,$f11
3196 addt $f12,$f10,$f0
3197 addt $f0,$f11,$f0
3198
3199 This doesn't seem to be a clear-cut win over the optabs form.
3200 It probably all depends on the distribution of numbers being
3201 converted -- in the optabs form, all but high-bit-set has a
3202 much lower minimum execution time. */
3203
3204void
3205alpha_emit_floatuns (operands)
3206 rtx operands[2];
3207{
3208 rtx neglab, donelab, i0, i1, f0, in, out;
3209 enum machine_mode mode;
3210
3211 out = operands[0];
8e2025b4 3212 in = force_reg (DImode, operands[1]);
2a42ba09 3213 mode = GET_MODE (out);
3214 neglab = gen_label_rtx ();
3215 donelab = gen_label_rtx ();
3216 i0 = gen_reg_rtx (DImode);
3217 i1 = gen_reg_rtx (DImode);
3218 f0 = gen_reg_rtx (mode);
3219
7e69f45b 3220 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
2a42ba09 3221
3222 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
3223 emit_jump_insn (gen_jump (donelab));
093c0196 3224 emit_barrier ();
2a42ba09 3225
3226 emit_label (neglab);
3227
3228 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
3229 emit_insn (gen_anddi3 (i1, in, const1_rtx));
3230 emit_insn (gen_iordi3 (i0, i0, i1));
3231 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
3232 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
3233
3234 emit_label (donelab);
3235}
3236
3a2a3a7f 3237/* Generate the comparison for a conditional branch. */
3238
3239rtx
3240alpha_emit_conditional_branch (code)
3241 enum rtx_code code;
3242{
3243 enum rtx_code cmp_code, branch_code;
3244 enum machine_mode cmp_mode, branch_mode = VOIDmode;
b18b881f 3245 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3a2a3a7f 3246 rtx tem;
3247
915c336f 3248 if (alpha_compare.fp_p && GET_MODE (op0) == TFmode)
3249 {
3250 if (! TARGET_HAS_XFLOATING_LIBS)
3251 abort ();
3252
3253 /* X_floating library comparison functions return
3254 -1 unordered
3255 0 false
3256 1 true
3257 Convert the compare against the raw return value. */
3258
a92123e9 3259 switch (code)
3260 {
3261 case UNORDERED:
3262 cmp_code = EQ;
3263 code = LT;
3264 break;
3265 case ORDERED:
3266 cmp_code = EQ;
3267 code = GE;
3268 break;
3269 case NE:
3270 cmp_code = NE;
3271 code = NE;
3272 break;
3273 default:
3274 cmp_code = code;
f4a6c960 3275 code = GT;
a92123e9 3276 break;
3277 }
d74ce6fa 3278
3279 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
915c336f 3280 op1 = const0_rtx;
3281 alpha_compare.fp_p = 0;
915c336f 3282 }
3283
3a2a3a7f 3284 /* The general case: fold the comparison code to the types of compares
3285 that we have, choosing the branch as necessary. */
3286 switch (code)
3287 {
3288 case EQ: case LE: case LT: case LEU: case LTU:
a4110d9a 3289 case UNORDERED:
3a2a3a7f 3290 /* We have these compares: */
3291 cmp_code = code, branch_code = NE;
3292 break;
3293
3294 case NE:
a4110d9a 3295 case ORDERED:
65abff06 3296 /* These must be reversed. */
a4110d9a 3297 cmp_code = reverse_condition (code), branch_code = EQ;
3a2a3a7f 3298 break;
3299
3300 case GE: case GT: case GEU: case GTU:
3301 /* For FP, we swap them, for INT, we reverse them. */
b18b881f 3302 if (alpha_compare.fp_p)
3a2a3a7f 3303 {
3304 cmp_code = swap_condition (code);
3305 branch_code = NE;
3306 tem = op0, op0 = op1, op1 = tem;
3307 }
3308 else
3309 {
3310 cmp_code = reverse_condition (code);
3311 branch_code = EQ;
3312 }
3313 break;
3314
3315 default:
3316 abort ();
3317 }
3318
b18b881f 3319 if (alpha_compare.fp_p)
3a2a3a7f 3320 {
3321 cmp_mode = DFmode;
7f3be425 3322 if (flag_unsafe_math_optimizations)
3a2a3a7f 3323 {
3324 /* When we are not as concerned about non-finite values, and we
3325 are comparing against zero, we can branch directly. */
3326 if (op1 == CONST0_RTX (DFmode))
3327 cmp_code = NIL, branch_code = code;
3328 else if (op0 == CONST0_RTX (DFmode))
3329 {
3330 /* Undo the swap we probably did just above. */
3331 tem = op0, op0 = op1, op1 = tem;
4899654e 3332 branch_code = swap_condition (cmp_code);
3333 cmp_code = NIL;
3a2a3a7f 3334 }
3335 }
3336 else
3337 {
d30e015b 3338 /* ??? We mark the branch mode to be CCmode to prevent the
3a2a3a7f 3339 compare and branch from being combined, since the compare
3340 insn follows IEEE rules that the branch does not. */
3341 branch_mode = CCmode;
3342 }
3343 }
3344 else
3345 {
3346 cmp_mode = DImode;
3347
3348 /* The following optimizations are only for signed compares. */
3349 if (code != LEU && code != LTU && code != GEU && code != GTU)
3350 {
3351 /* Whee. Compare and branch against 0 directly. */
3352 if (op1 == const0_rtx)
3353 cmp_code = NIL, branch_code = code;
3354
3355 /* We want to use cmpcc/bcc when we can, since there is a zero delay
3356 bypass between logicals and br/cmov on EV5. But we don't want to
3357 force valid immediate constants into registers needlessly. */
3358 else if (GET_CODE (op1) == CONST_INT)
3359 {
3360 HOST_WIDE_INT v = INTVAL (op1), n = -v;
3361
3362 if (! CONST_OK_FOR_LETTER_P (v, 'I')
3363 && (CONST_OK_FOR_LETTER_P (n, 'K')
3364 || CONST_OK_FOR_LETTER_P (n, 'L')))
3365 {
3366 cmp_code = PLUS, branch_code = code;
3367 op1 = GEN_INT (n);
3368 }
3369 }
3370 }
3a2a3a7f 3371
d74ce6fa 3372 if (!reg_or_0_operand (op0, DImode))
3373 op0 = force_reg (DImode, op0);
3374 if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
3375 op1 = force_reg (DImode, op1);
3376 }
3a2a3a7f 3377
3378 /* Emit an initial compare instruction, if necessary. */
3379 tem = op0;
3380 if (cmp_code != NIL)
3381 {
3382 tem = gen_reg_rtx (cmp_mode);
3383 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
3384 }
3385
b18b881f 3386 /* Zero the operands. */
3387 memset (&alpha_compare, 0, sizeof (alpha_compare));
3388
3a2a3a7f 3389 /* Return the branch comparison. */
3390 return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
3391}
3392
d74ce6fa 3393/* Certain simplifications can be done to make invalid setcc operations
3394 valid. Return the final comparison, or NULL if we can't work. */
3395
3396rtx
3397alpha_emit_setcc (code)
3398 enum rtx_code code;
3399{
3400 enum rtx_code cmp_code;
3401 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3402 int fp_p = alpha_compare.fp_p;
3403 rtx tmp;
3404
3405 /* Zero the operands. */
3406 memset (&alpha_compare, 0, sizeof (alpha_compare));
3407
3408 if (fp_p && GET_MODE (op0) == TFmode)
3409 {
3410 if (! TARGET_HAS_XFLOATING_LIBS)
3411 abort ();
3412
3413 /* X_floating library comparison functions return
3414 -1 unordered
3415 0 false
3416 1 true
3417 Convert the compare against the raw return value. */
3418
3419 if (code == UNORDERED || code == ORDERED)
3420 cmp_code = EQ;
3421 else
3422 cmp_code = code;
3423
3424 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3425 op1 = const0_rtx;
3426 fp_p = 0;
3427
3428 if (code == UNORDERED)
3429 code = LT;
3430 else if (code == ORDERED)
3431 code = GE;
3432 else
3433 code = GT;
3434 }
3435
3436 if (fp_p && !TARGET_FIX)
3437 return NULL_RTX;
3438
3439 /* The general case: fold the comparison code to the types of compares
3440 that we have, choosing the branch as necessary. */
3441
3442 cmp_code = NIL;
3443 switch (code)
3444 {
3445 case EQ: case LE: case LT: case LEU: case LTU:
3446 case UNORDERED:
3447 /* We have these compares. */
3448 if (fp_p)
3449 cmp_code = code, code = NE;
3450 break;
3451
3452 case NE:
3453 if (!fp_p && op1 == const0_rtx)
3454 break;
3455 /* FALLTHRU */
3456
3457 case ORDERED:
3458 cmp_code = reverse_condition (code);
3459 code = EQ;
3460 break;
3461
3462 case GE: case GT: case GEU: case GTU:
75b3314a 3463 /* These normally need swapping, but for integer zero we have
bc882521 3464 special patterns that recognize swapped operands. */
3465 if (!fp_p && op1 == const0_rtx)
3466 break;
d74ce6fa 3467 code = swap_condition (code);
3468 if (fp_p)
3469 cmp_code = code, code = NE;
3470 tmp = op0, op0 = op1, op1 = tmp;
3471 break;
3472
3473 default:
3474 abort ();
3475 }
3476
3477 if (!fp_p)
3478 {
bc882521 3479 if (!register_operand (op0, DImode))
d74ce6fa 3480 op0 = force_reg (DImode, op0);
3481 if (!reg_or_8bit_operand (op1, DImode))
3482 op1 = force_reg (DImode, op1);
3483 }
3484
3485 /* Emit an initial compare instruction, if necessary. */
3486 if (cmp_code != NIL)
3487 {
3488 enum machine_mode mode = fp_p ? DFmode : DImode;
3489
3490 tmp = gen_reg_rtx (mode);
3491 emit_insn (gen_rtx_SET (VOIDmode, tmp,
3492 gen_rtx_fmt_ee (cmp_code, mode, op0, op1)));
3493
3494 op0 = fp_p ? gen_lowpart (DImode, tmp) : tmp;
3495 op1 = const0_rtx;
3496 }
3497
3498 /* Return the setcc comparison. */
3499 return gen_rtx_fmt_ee (code, DImode, op0, op1);
3500}
3501
3a2a3a7f 3502
996a379d 3503/* Rewrite a comparison against zero CMP of the form
3504 (CODE (cc0) (const_int 0)) so it can be written validly in
3505 a conditional move (if_then_else CMP ...).
e3e08e7f 3506 If both of the operands that set cc0 are nonzero we must emit
996a379d 3507 an insn to perform the compare (it can't be done within
65abff06 3508 the conditional move). */
996a379d 3509rtx
3510alpha_emit_conditional_move (cmp, mode)
3511 rtx cmp;
3512 enum machine_mode mode;
3513{
23be97c5 3514 enum rtx_code code = GET_CODE (cmp);
c60bc286 3515 enum rtx_code cmov_code = NE;
b18b881f 3516 rtx op0 = alpha_compare.op0;
3517 rtx op1 = alpha_compare.op1;
3518 int fp_p = alpha_compare.fp_p;
23be97c5 3519 enum machine_mode cmp_mode
3520 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
b18b881f 3521 enum machine_mode cmp_op_mode = fp_p ? DFmode : DImode;
3a2a3a7f 3522 enum machine_mode cmov_mode = VOIDmode;
7f3be425 3523 int local_fast_math = flag_unsafe_math_optimizations;
23be97c5 3524 rtx tem;
996a379d 3525
b18b881f 3526 /* Zero the operands. */
3527 memset (&alpha_compare, 0, sizeof (alpha_compare));
3528
3529 if (fp_p != FLOAT_MODE_P (mode))
d6cc9868 3530 {
3531 enum rtx_code cmp_code;
3532
3533 if (! TARGET_FIX)
3534 return 0;
3535
3536 /* If we have fp<->int register move instructions, do a cmov by
3537 performing the comparison in fp registers, and move the
e3e08e7f 3538 zero/nonzero value to integer registers, where we can then
d6cc9868 3539 use a normal cmov, or vice-versa. */
3540
3541 switch (code)
3542 {
3543 case EQ: case LE: case LT: case LEU: case LTU:
3544 /* We have these compares. */
3545 cmp_code = code, code = NE;
3546 break;
3547
3548 case NE:
3549 /* This must be reversed. */
3550 cmp_code = EQ, code = EQ;
3551 break;
3552
3553 case GE: case GT: case GEU: case GTU:
75b3314a 3554 /* These normally need swapping, but for integer zero we have
3555 special patterns that recognize swapped operands. */
3556 if (!fp_p && op1 == const0_rtx)
88f8f2a2 3557 cmp_code = code, code = NE;
3558 else
3559 {
3560 cmp_code = swap_condition (code);
3561 code = NE;
3562 tem = op0, op0 = op1, op1 = tem;
3563 }
d6cc9868 3564 break;
3565
3566 default:
3567 abort ();
3568 }
3569
3570 tem = gen_reg_rtx (cmp_op_mode);
3571 emit_insn (gen_rtx_SET (VOIDmode, tem,
3572 gen_rtx_fmt_ee (cmp_code, cmp_op_mode,
3573 op0, op1)));
3574
3575 cmp_mode = cmp_op_mode = fp_p ? DImode : DFmode;
3576 op0 = gen_lowpart (cmp_op_mode, tem);
3577 op1 = CONST0_RTX (cmp_op_mode);
3578 fp_p = !fp_p;
3579 local_fast_math = 1;
3580 }
996a379d 3581
3582 /* We may be able to use a conditional move directly.
65abff06 3583 This avoids emitting spurious compares. */
2a42ba09 3584 if (signed_comparison_operator (cmp, VOIDmode)
d6cc9868 3585 && (!fp_p || local_fast_math)
23be97c5 3586 && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
941522d6 3587 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
996a379d 3588
3029ee00 3589 /* We can't put the comparison inside the conditional move;
996a379d 3590 emit a compare instruction and put that inside the
23be97c5 3591 conditional move. Make sure we emit only comparisons we have;
3592 swap or reverse as necessary. */
996a379d 3593
3029ee00 3594 if (no_new_pseudos)
3595 return NULL_RTX;
3596
996a379d 3597 switch (code)
3598 {
23be97c5 3599 case EQ: case LE: case LT: case LEU: case LTU:
3600 /* We have these compares: */
996a379d 3601 break;
23be97c5 3602
996a379d 3603 case NE:
65abff06 3604 /* This must be reversed. */
23be97c5 3605 code = reverse_condition (code);
c60bc286 3606 cmov_code = EQ;
996a379d 3607 break;
23be97c5 3608
3609 case GE: case GT: case GEU: case GTU:
d74ce6fa 3610 /* These must be swapped. */
88f8f2a2 3611 if (op1 != CONST0_RTX (cmp_mode))
3612 {
3613 code = swap_condition (code);
3614 tem = op0, op0 = op1, op1 = tem;
3615 }
996a379d 3616 break;
23be97c5 3617
996a379d 3618 default:
23be97c5 3619 abort ();
996a379d 3620 }
3621
d74ce6fa 3622 if (!fp_p)
3623 {
3624 if (!reg_or_0_operand (op0, DImode))
3625 op0 = force_reg (DImode, op0);
3626 if (!reg_or_8bit_operand (op1, DImode))
3627 op1 = force_reg (DImode, op1);
3628 }
3629
b9b4428b 3630 /* ??? We mark the branch mode to be CCmode to prevent the compare
3a2a3a7f 3631 and cmov from being combined, since the compare insn follows IEEE
3632 rules that the cmov does not. */
d6cc9868 3633 if (fp_p && !local_fast_math)
3a2a3a7f 3634 cmov_mode = CCmode;
3635
23be97c5 3636 tem = gen_reg_rtx (cmp_op_mode);
941522d6 3637 emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
3a2a3a7f 3638 return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
996a379d 3639}
bbf31a61 3640
3641/* Simplify a conditional move of two constants into a setcc with
3642 arithmetic. This is done with a splitter since combine would
3643 just undo the work if done during code generation. It also catches
3644 cases we wouldn't have before cse. */
3645
3646int
3647alpha_split_conditional_move (code, dest, cond, t_rtx, f_rtx)
3648 enum rtx_code code;
3649 rtx dest, cond, t_rtx, f_rtx;
3650{
3651 HOST_WIDE_INT t, f, diff;
3652 enum machine_mode mode;
3653 rtx target, subtarget, tmp;
3654
3655 mode = GET_MODE (dest);
3656 t = INTVAL (t_rtx);
3657 f = INTVAL (f_rtx);
3658 diff = t - f;
3659
3660 if (((code == NE || code == EQ) && diff < 0)
3661 || (code == GE || code == GT))
3662 {
3663 code = reverse_condition (code);
3664 diff = t, t = f, f = diff;
3665 diff = t - f;
3666 }
3667
3668 subtarget = target = dest;
3669 if (mode != DImode)
3670 {
3671 target = gen_lowpart (DImode, dest);
3672 if (! no_new_pseudos)
3673 subtarget = gen_reg_rtx (DImode);
3674 else
3675 subtarget = target;
3676 }
64656695 3677 /* Below, we must be careful to use copy_rtx on target and subtarget
3678 in intermediate insns, as they may be a subreg rtx, which may not
3679 be shared. */
bbf31a61 3680
3681 if (f == 0 && exact_log2 (diff) > 0
3682 /* On EV6, we've got enough shifters to make non-arithmatic shifts
3683 viable over a longer latency cmove. On EV5, the E0 slot is a
65abff06 3684 scarce resource, and on EV4 shift has the same latency as a cmove. */
bbf31a61 3685 && (diff <= 8 || alpha_cpu == PROCESSOR_EV6))
3686 {
3687 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
64656695 3688 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
bbf31a61 3689
64656695 3690 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
3691 GEN_INT (exact_log2 (t)));
bbf31a61 3692 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3693 }
3694 else if (f == 0 && t == -1)
3695 {
3696 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
64656695 3697 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
bbf31a61 3698
64656695 3699 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
bbf31a61 3700 }
3701 else if (diff == 1 || diff == 4 || diff == 8)
3702 {
3703 rtx add_op;
3704
3705 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
64656695 3706 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
bbf31a61 3707
3708 if (diff == 1)
64656695 3709 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
bbf31a61 3710 else
3711 {
3712 add_op = GEN_INT (f);
3713 if (sext_add_operand (add_op, mode))
3714 {
64656695 3715 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
3716 GEN_INT (diff));
bbf31a61 3717 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
3718 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3719 }
3720 else
3721 return 0;
3722 }
3723 }
3724 else
3725 return 0;
3726
3727 return 1;
3728}
34377880 3729\f
915c336f 3730/* Look up the function X_floating library function name for the
3731 given operation. */
3732
3733static const char *
3734alpha_lookup_xfloating_lib_func (code)
3735 enum rtx_code code;
3736{
3737 struct xfloating_op
3738 {
e99c3a1d 3739 const enum rtx_code code;
3740 const char *const func;
915c336f 3741 };
3742
3743 static const struct xfloating_op vms_xfloating_ops[] =
3744 {
3745 { PLUS, "OTS$ADD_X" },
3746 { MINUS, "OTS$SUB_X" },
3747 { MULT, "OTS$MUL_X" },
3748 { DIV, "OTS$DIV_X" },
3749 { EQ, "OTS$EQL_X" },
3750 { NE, "OTS$NEQ_X" },
3751 { LT, "OTS$LSS_X" },
3752 { LE, "OTS$LEQ_X" },
3753 { GT, "OTS$GTR_X" },
3754 { GE, "OTS$GEQ_X" },
3755 { FIX, "OTS$CVTXQ" },
3756 { FLOAT, "OTS$CVTQX" },
3757 { UNSIGNED_FLOAT, "OTS$CVTQUX" },
3758 { FLOAT_EXTEND, "OTS$CVT_FLOAT_T_X" },
3759 { FLOAT_TRUNCATE, "OTS$CVT_FLOAT_X_T" },
3760 };
3761
3762 static const struct xfloating_op osf_xfloating_ops[] =
3763 {
3764 { PLUS, "_OtsAddX" },
3765 { MINUS, "_OtsSubX" },
3766 { MULT, "_OtsMulX" },
3767 { DIV, "_OtsDivX" },
3768 { EQ, "_OtsEqlX" },
3769 { NE, "_OtsNeqX" },
3770 { LT, "_OtsLssX" },
3771 { LE, "_OtsLeqX" },
3772 { GT, "_OtsGtrX" },
3773 { GE, "_OtsGeqX" },
3774 { FIX, "_OtsCvtXQ" },
3775 { FLOAT, "_OtsCvtQX" },
3776 { UNSIGNED_FLOAT, "_OtsCvtQUX" },
3777 { FLOAT_EXTEND, "_OtsConvertFloatTX" },
3778 { FLOAT_TRUNCATE, "_OtsConvertFloatXT" },
3779 };
3780
3781 const struct xfloating_op *ops;
3098b2d3 3782 const long n = ARRAY_SIZE (osf_xfloating_ops);
915c336f 3783 long i;
3784
3785 /* How irritating. Nothing to key off for the table. Hardcode
3786 knowledge of the G_floating routines. */
3787 if (TARGET_FLOAT_VAX)
3788 {
1467e953 3789 if (TARGET_ABI_OPEN_VMS)
915c336f 3790 {
3791 if (code == FLOAT_EXTEND)
3792 return "OTS$CVT_FLOAT_G_X";
3793 if (code == FLOAT_TRUNCATE)
3794 return "OTS$CVT_FLOAT_X_G";
3795 }
3796 else
3797 {
3798 if (code == FLOAT_EXTEND)
3799 return "_OtsConvertFloatGX";
3800 if (code == FLOAT_TRUNCATE)
3801 return "_OtsConvertFloatXG";
3802 }
3803 }
3804
1467e953 3805 if (TARGET_ABI_OPEN_VMS)
915c336f 3806 ops = vms_xfloating_ops;
3807 else
3808 ops = osf_xfloating_ops;
3809
3810 for (i = 0; i < n; ++i)
3811 if (ops[i].code == code)
3812 return ops[i].func;
3813
3814 abort();
3815}
3816
3817/* Most X_floating operations take the rounding mode as an argument.
3818 Compute that here. */
3819
3820static int
3821alpha_compute_xfloating_mode_arg (code, round)
3822 enum rtx_code code;
3823 enum alpha_fp_rounding_mode round;
3824{
3825 int mode;
3826
3827 switch (round)
3828 {
3829 case ALPHA_FPRM_NORM:
3830 mode = 2;
3831 break;
3832 case ALPHA_FPRM_MINF:
3833 mode = 1;
3834 break;
3835 case ALPHA_FPRM_CHOP:
3836 mode = 0;
3837 break;
3838 case ALPHA_FPRM_DYN:
3839 mode = 4;
3840 break;
3841 default:
3842 abort ();
3843
3844 /* XXX For reference, round to +inf is mode = 3. */
3845 }
3846
3847 if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3848 mode |= 0x10000;
3849
3850 return mode;
3851}
3852
3853/* Emit an X_floating library function call.
3854
3855 Note that these functions do not follow normal calling conventions:
3856 TFmode arguments are passed in two integer registers (as opposed to
3857 indirect); TFmode return values appear in R16+R17.
3858
3859 FUNC is the function name to call.
3860 TARGET is where the output belongs.
3861 OPERANDS are the inputs.
3862 NOPERANDS is the count of inputs.
3863 EQUIV is the expression equivalent for the function.
3864*/
3865
3866static void
3867alpha_emit_xfloating_libcall (func, target, operands, noperands, equiv)
3868 const char *func;
3869 rtx target;
3870 rtx operands[];
3871 int noperands;
3872 rtx equiv;
3873{
3874 rtx usage = NULL_RTX, tmp, reg;
3875 int regno = 16, i;
3876
3877 start_sequence ();
3878
3879 for (i = 0; i < noperands; ++i)
3880 {
3881 switch (GET_MODE (operands[i]))
3882 {
3883 case TFmode:
3884 reg = gen_rtx_REG (TFmode, regno);
3885 regno += 2;
3886 break;
3887
3888 case DFmode:
3889 reg = gen_rtx_REG (DFmode, regno + 32);
3890 regno += 1;
3891 break;
3892
3893 case VOIDmode:
3894 if (GET_CODE (operands[i]) != CONST_INT)
3895 abort ();
3896 /* FALLTHRU */
3897 case DImode:
3898 reg = gen_rtx_REG (DImode, regno);
3899 regno += 1;
3900 break;
3901
3902 default:
3903 abort ();
3904 }
3905
3906 emit_move_insn (reg, operands[i]);
3907 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3908 }
3909
3910 switch (GET_MODE (target))
3911 {
3912 case TFmode:
3913 reg = gen_rtx_REG (TFmode, 16);
3914 break;
3915 case DFmode:
3916 reg = gen_rtx_REG (DFmode, 32);
3917 break;
3918 case DImode:
3919 reg = gen_rtx_REG (DImode, 0);
3920 break;
3921 default:
3922 abort ();
3923 }
3924
29768226 3925 tmp = gen_rtx_MEM (QImode, gen_rtx_SYMBOL_REF (Pmode, (char *) func));
2c6f8e4d 3926 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
915c336f 3927 const0_rtx, const0_rtx));
3928 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3929
3930 tmp = get_insns ();
3931 end_sequence ();
3932
3933 emit_libcall_block (tmp, target, reg, equiv);
3934}
3935
3936/* Emit an X_floating library function call for arithmetic (+,-,*,/). */
3937
3938void
3939alpha_emit_xfloating_arith (code, operands)
3940 enum rtx_code code;
3941 rtx operands[];
3942{
3943 const char *func;
3944 int mode;
b90b6519 3945 rtx out_operands[3];
915c336f 3946
3947 func = alpha_lookup_xfloating_lib_func (code);
3948 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3949
b90b6519 3950 out_operands[0] = operands[1];
3951 out_operands[1] = operands[2];
3952 out_operands[2] = GEN_INT (mode);
3953 alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
915c336f 3954 gen_rtx_fmt_ee (code, TFmode, operands[1],
3955 operands[2]));
3956}
3957
3958/* Emit an X_floating library function call for a comparison. */
3959
3960static rtx
3961alpha_emit_xfloating_compare (code, op0, op1)
3962 enum rtx_code code;
3963 rtx op0, op1;
3964{
3965 const char *func;
3966 rtx out, operands[2];
3967
3968 func = alpha_lookup_xfloating_lib_func (code);
3969
3970 operands[0] = op0;
3971 operands[1] = op1;
3972 out = gen_reg_rtx (DImode);
3973
d1324b4b 3974 /* ??? Strange mode for equiv because what's actually returned
3975 is -1,0,1, not a proper boolean value. */
3976 alpha_emit_xfloating_libcall (func, out, operands, 2,
3977 gen_rtx_fmt_ee (code, CCmode, op0, op1));
915c336f 3978
3979 return out;
3980}
3981
3982/* Emit an X_floating library function call for a conversion. */
3983
3984void
3985alpha_emit_xfloating_cvt (code, operands)
3986 enum rtx_code code;
3987 rtx operands[];
3988{
3989 int noperands = 1, mode;
b90b6519 3990 rtx out_operands[2];
915c336f 3991 const char *func;
3992
3993 func = alpha_lookup_xfloating_lib_func (code);
3994
b90b6519 3995 out_operands[0] = operands[1];
3996
915c336f 3997 switch (code)
3998 {
3999 case FIX:
4000 mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
b90b6519 4001 out_operands[1] = GEN_INT (mode);
8581412d 4002 noperands = 2;
915c336f 4003 break;
4004 case FLOAT_TRUNCATE:
4005 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
b90b6519 4006 out_operands[1] = GEN_INT (mode);
8581412d 4007 noperands = 2;
915c336f 4008 break;
4009 default:
4010 break;
4011 }
4012
b90b6519 4013 alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
915c336f 4014 gen_rtx_fmt_e (code, GET_MODE (operands[0]),
4015 operands[1]));
4016}
3420680b 4017
2267ca84 4018/* Split a TFmode OP[1] into DImode OP[2,3] and likewise for
4019 OP[0] into OP[0,1]. Naturally, output operand ordering is
4020 little-endian. */
4021
3420680b 4022void
4023alpha_split_tfmode_pair (operands)
4024 rtx operands[4];
4025{
4026 if (GET_CODE (operands[1]) == REG)
4027 {
4028 operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
4029 operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
4030 }
4031 else if (GET_CODE (operands[1]) == MEM)
4032 {
e513d163 4033 operands[3] = adjust_address (operands[1], DImode, 8);
4034 operands[2] = adjust_address (operands[1], DImode, 0);
3420680b 4035 }
4036 else if (operands[1] == CONST0_RTX (TFmode))
4037 operands[2] = operands[3] = const0_rtx;
4038 else
4039 abort ();
4040
4041 if (GET_CODE (operands[0]) == REG)
4042 {
4043 operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
4044 operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
4045 }
4046 else if (GET_CODE (operands[0]) == MEM)
4047 {
e513d163 4048 operands[1] = adjust_address (operands[0], DImode, 8);
4049 operands[0] = adjust_address (operands[0], DImode, 0);
3420680b 4050 }
4051 else
4052 abort ();
4053}
2267ca84 4054
4055/* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
4056 op2 is a register containing the sign bit, operation is the
4057 logical operation to be performed. */
4058
4059void
4060alpha_split_tfmode_frobsign (operands, operation)
4061 rtx operands[3];
4062 rtx (*operation) PARAMS ((rtx, rtx, rtx));
4063{
4064 rtx high_bit = operands[2];
4065 rtx scratch;
4066 int move;
4067
4068 alpha_split_tfmode_pair (operands);
4069
e3e08e7f 4070 /* Detect three flavors of operand overlap. */
2267ca84 4071 move = 1;
4072 if (rtx_equal_p (operands[0], operands[2]))
4073 move = 0;
4074 else if (rtx_equal_p (operands[1], operands[2]))
4075 {
4076 if (rtx_equal_p (operands[0], high_bit))
4077 move = 2;
4078 else
4079 move = -1;
4080 }
4081
4082 if (move < 0)
4083 emit_move_insn (operands[0], operands[2]);
4084
4085 /* ??? If the destination overlaps both source tf and high_bit, then
4086 assume source tf is dead in its entirety and use the other half
4087 for a scratch register. Otherwise "scratch" is just the proper
4088 destination register. */
4089 scratch = operands[move < 2 ? 1 : 3];
4090
4091 emit_insn ((*operation) (scratch, high_bit, operands[3]));
4092
4093 if (move > 0)
4094 {
4095 emit_move_insn (operands[0], operands[2]);
4096 if (move > 1)
4097 emit_move_insn (operands[1], scratch);
4098 }
4099}
915c336f 4100\f
34377880 4101/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
4102 unaligned data:
4103
4104 unsigned: signed:
4105 word: ldq_u r1,X(r11) ldq_u r1,X(r11)
4106 ldq_u r2,X+1(r11) ldq_u r2,X+1(r11)
4107 lda r3,X(r11) lda r3,X+2(r11)
4108 extwl r1,r3,r1 extql r1,r3,r1
4109 extwh r2,r3,r2 extqh r2,r3,r2
4110 or r1.r2.r1 or r1,r2,r1
4111 sra r1,48,r1
4112
4113 long: ldq_u r1,X(r11) ldq_u r1,X(r11)
4114 ldq_u r2,X+3(r11) ldq_u r2,X+3(r11)
4115 lda r3,X(r11) lda r3,X(r11)
4116 extll r1,r3,r1 extll r1,r3,r1
4117 extlh r2,r3,r2 extlh r2,r3,r2
4118 or r1.r2.r1 addl r1,r2,r1
4119
4120 quad: ldq_u r1,X(r11)
4121 ldq_u r2,X+7(r11)
4122 lda r3,X(r11)
4123 extql r1,r3,r1
4124 extqh r2,r3,r2
4125 or r1.r2.r1
4126*/
4127
4128void
4129alpha_expand_unaligned_load (tgt, mem, size, ofs, sign)
4130 rtx tgt, mem;
4131 HOST_WIDE_INT size, ofs;
4132 int sign;
4133{
1f0ce6a6 4134 rtx meml, memh, addr, extl, exth, tmp, mema;
2cc46ade 4135 enum machine_mode mode;
34377880 4136
4137 meml = gen_reg_rtx (DImode);
4138 memh = gen_reg_rtx (DImode);
4139 addr = gen_reg_rtx (DImode);
4140 extl = gen_reg_rtx (DImode);
4141 exth = gen_reg_rtx (DImode);
4142
1f0ce6a6 4143 mema = XEXP (mem, 0);
4144 if (GET_CODE (mema) == LO_SUM)
4145 mema = force_reg (Pmode, mema);
4146
3024e9f8 4147 /* AND addresses cannot be in any alias set, since they may implicitly
4148 alias surrounding code. Ideally we'd have some alias set that
4149 covered all types except those with alignment 8 or higher. */
4150
4151 tmp = change_address (mem, DImode,
4152 gen_rtx_AND (DImode,
1f0ce6a6 4153 plus_constant (mema, ofs),
3024e9f8 4154 GEN_INT (-8)));
ab6ab77e 4155 set_mem_alias_set (tmp, 0);
3024e9f8 4156 emit_move_insn (meml, tmp);
4157
4158 tmp = change_address (mem, DImode,
4159 gen_rtx_AND (DImode,
1f0ce6a6 4160 plus_constant (mema, ofs + size - 1),
3024e9f8 4161 GEN_INT (-8)));
ab6ab77e 4162 set_mem_alias_set (tmp, 0);
3024e9f8 4163 emit_move_insn (memh, tmp);
34377880 4164
9caef960 4165 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
4166 {
4167 emit_move_insn (addr, plus_constant (mema, -1));
4168
4169 emit_insn (gen_extqh_be (extl, meml, addr));
4170 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
4171
4172 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4173 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
4174 addr, 1, OPTAB_WIDEN);
4175 }
4176 else if (sign && size == 2)
34377880 4177 {
1f0ce6a6 4178 emit_move_insn (addr, plus_constant (mema, ofs+2));
34377880 4179
9caef960 4180 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
4181 emit_insn (gen_extqh_le (exth, memh, addr));
34377880 4182
ba4a7733 4183 /* We must use tgt here for the target. Alpha-vms port fails if we use
4184 addr for the target, because addr is marked as a pointer and combine
4185 knows that pointers are always sign-extended 32 bit values. */
4186 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
2cc46ade 4187 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
4188 addr, 1, OPTAB_WIDEN);
34377880 4189 }
2cc46ade 4190 else
34377880 4191 {
9caef960 4192 if (WORDS_BIG_ENDIAN)
2cc46ade 4193 {
9caef960 4194 emit_move_insn (addr, plus_constant (mema, ofs+size-1));
4195 switch ((int) size)
4196 {
4197 case 2:
4198 emit_insn (gen_extwh_be (extl, meml, addr));
4199 mode = HImode;
4200 break;
34377880 4201
9caef960 4202 case 4:
4203 emit_insn (gen_extlh_be (extl, meml, addr));
4204 mode = SImode;
4205 break;
34377880 4206
9caef960 4207 case 8:
4208 emit_insn (gen_extqh_be (extl, meml, addr));
4209 mode = DImode;
4210 break;
915c336f 4211
9caef960 4212 default:
4213 abort ();
4214 }
4215 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
4216 }
4217 else
4218 {
4219 emit_move_insn (addr, plus_constant (mema, ofs));
4220 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
4221 switch ((int) size)
4222 {
4223 case 2:
4224 emit_insn (gen_extwh_le (exth, memh, addr));
4225 mode = HImode;
4226 break;
4227
4228 case 4:
4229 emit_insn (gen_extlh_le (exth, memh, addr));
4230 mode = SImode;
4231 break;
4232
4233 case 8:
4234 emit_insn (gen_extqh_le (exth, memh, addr));
4235 mode = DImode;
4236 break;
4237
4238 default:
4239 abort();
4240 }
2cc46ade 4241 }
4242
4243 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
4244 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
4245 sign, OPTAB_WIDEN);
34377880 4246 }
4247
2cc46ade 4248 if (addr != tgt)
4249 emit_move_insn (tgt, gen_lowpart(GET_MODE (tgt), addr));
34377880 4250}
4251
4252/* Similarly, use ins and msk instructions to perform unaligned stores. */
4253
4254void
4255alpha_expand_unaligned_store (dst, src, size, ofs)
4256 rtx dst, src;
4257 HOST_WIDE_INT size, ofs;
4258{
1f0ce6a6 4259 rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
34377880 4260
4261 dstl = gen_reg_rtx (DImode);
4262 dsth = gen_reg_rtx (DImode);
4263 insl = gen_reg_rtx (DImode);
4264 insh = gen_reg_rtx (DImode);
4265
1f0ce6a6 4266 dsta = XEXP (dst, 0);
4267 if (GET_CODE (dsta) == LO_SUM)
4268 dsta = force_reg (Pmode, dsta);
4269
3024e9f8 4270 /* AND addresses cannot be in any alias set, since they may implicitly
4271 alias surrounding code. Ideally we'd have some alias set that
4272 covered all types except those with alignment 8 or higher. */
4273
34377880 4274 meml = change_address (dst, DImode,
941522d6 4275 gen_rtx_AND (DImode,
1f0ce6a6 4276 plus_constant (dsta, ofs),
941522d6 4277 GEN_INT (-8)));
ab6ab77e 4278 set_mem_alias_set (meml, 0);
3024e9f8 4279
34377880 4280 memh = change_address (dst, DImode,
941522d6 4281 gen_rtx_AND (DImode,
1f0ce6a6 4282 plus_constant (dsta, ofs + size - 1),
941522d6 4283 GEN_INT (-8)));
ab6ab77e 4284 set_mem_alias_set (memh, 0);
34377880 4285
4286 emit_move_insn (dsth, memh);
4287 emit_move_insn (dstl, meml);
9caef960 4288 if (WORDS_BIG_ENDIAN)
34377880 4289 {
9caef960 4290 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
4291
4292 if (src != const0_rtx)
4293 {
4294 switch ((int) size)
4295 {
4296 case 2:
4297 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
4298 break;
4299 case 4:
4300 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
4301 break;
4302 case 8:
4303 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
4304 break;
4305 }
4306 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
4307 GEN_INT (size*8), addr));
4308 }
34377880 4309
29768226 4310 switch ((int) size)
34377880 4311 {
4312 case 2:
9caef960 4313 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
34377880 4314 break;
4315 case 4:
9caef960 4316 {
ae4cd3a5 4317 rtx msk = immed_double_const (0xffffffff, 0, DImode);
9caef960 4318 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
ae4cd3a5 4319 break;
9caef960 4320 }
ae4cd3a5 4321 case 8:
4322 emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr));
34377880 4323 break;
4324 }
9caef960 4325
4326 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
34377880 4327 }
9caef960 4328 else
4329 {
4330 addr = copy_addr_to_reg (plus_constant (dsta, ofs));
34377880 4331
9caef960 4332 if (src != const0_rtx)
4333 {
4334 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
4335 GEN_INT (size*8), addr));
34377880 4336
9caef960 4337 switch ((int) size)
4338 {
4339 case 2:
4340 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
4341 break;
4342 case 4:
4343 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
4344 break;
4345 case 8:
4346 emit_insn (gen_insql_le (insl, src, addr));
4347 break;
4348 }
4349 }
4350
4351 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
4352
4353 switch ((int) size)
4354 {
4355 case 2:
4356 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
4357 break;
4358 case 4:
9caef960 4359 {
ae4cd3a5 4360 rtx msk = immed_double_const (0xffffffff, 0, DImode);
9caef960 4361 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
ae4cd3a5 4362 break;
9caef960 4363 }
ae4cd3a5 4364 case 8:
4365 emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
9caef960 4366 break;
4367 }
34377880 4368 }
4369
4370 if (src != const0_rtx)
4371 {
2cc46ade 4372 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
4373 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
34377880 4374 }
9caef960 4375
4376 if (WORDS_BIG_ENDIAN)
4377 {
4378 emit_move_insn (meml, dstl);
4379 emit_move_insn (memh, dsth);
4380 }
4381 else
4382 {
4383 /* Must store high before low for degenerate case of aligned. */
4384 emit_move_insn (memh, dsth);
4385 emit_move_insn (meml, dstl);
4386 }
34377880 4387}
4388
2cc46ade 4389/* The block move code tries to maximize speed by separating loads and
4390 stores at the expense of register pressure: we load all of the data
4391 before we store it back out. There are two secondary effects worth
4392 mentioning, that this speeds copying to/from aligned and unaligned
4393 buffers, and that it makes the code significantly easier to write. */
34377880 4394
2cc46ade 4395#define MAX_MOVE_WORDS 8
4396
4397/* Load an integral number of consecutive unaligned quadwords. */
34377880 4398
4399static void
2cc46ade 4400alpha_expand_unaligned_load_words (out_regs, smem, words, ofs)
4401 rtx *out_regs;
4402 rtx smem;
4403 HOST_WIDE_INT words, ofs;
34377880 4404{
4405 rtx const im8 = GEN_INT (-8);
4406 rtx const i64 = GEN_INT (64);
2cc46ade 4407 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
1f0ce6a6 4408 rtx sreg, areg, tmp, smema;
34377880 4409 HOST_WIDE_INT i;
4410
1f0ce6a6 4411 smema = XEXP (smem, 0);
4412 if (GET_CODE (smema) == LO_SUM)
4413 smema = force_reg (Pmode, smema);
4414
34377880 4415 /* Generate all the tmp registers we need. */
4416 for (i = 0; i < words; ++i)
2cc46ade 4417 {
4418 data_regs[i] = out_regs[i];
4419 ext_tmps[i] = gen_reg_rtx (DImode);
4420 }
4421 data_regs[words] = gen_reg_rtx (DImode);
4422
4423 if (ofs != 0)
e513d163 4424 smem = adjust_address (smem, GET_MODE (smem), ofs);
34377880 4425
4426 /* Load up all of the source data. */
4427 for (i = 0; i < words; ++i)
4428 {
3024e9f8 4429 tmp = change_address (smem, DImode,
4430 gen_rtx_AND (DImode,
1f0ce6a6 4431 plus_constant (smema, 8*i),
3024e9f8 4432 im8));
ab6ab77e 4433 set_mem_alias_set (tmp, 0);
3024e9f8 4434 emit_move_insn (data_regs[i], tmp);
34377880 4435 }
3024e9f8 4436
4437 tmp = change_address (smem, DImode,
4438 gen_rtx_AND (DImode,
1f0ce6a6 4439 plus_constant (smema, 8*words - 1),
3024e9f8 4440 im8));
ab6ab77e 4441 set_mem_alias_set (tmp, 0);
3024e9f8 4442 emit_move_insn (data_regs[words], tmp);
34377880 4443
4444 /* Extract the half-word fragments. Unfortunately DEC decided to make
4445 extxh with offset zero a noop instead of zeroing the register, so
4446 we must take care of that edge condition ourselves with cmov. */
4447
1f0ce6a6 4448 sreg = copy_addr_to_reg (smema);
2cc46ade 4449 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
4450 1, OPTAB_WIDEN);
9caef960 4451 if (WORDS_BIG_ENDIAN)
4452 emit_move_insn (sreg, plus_constant (sreg, 7));
34377880 4453 for (i = 0; i < words; ++i)
4454 {
9caef960 4455 if (WORDS_BIG_ENDIAN)
4456 {
4457 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
4458 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
4459 }
4460 else
4461 {
4462 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
4463 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
4464 }
941522d6 4465 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
4466 gen_rtx_IF_THEN_ELSE (DImode,
2cc46ade 4467 gen_rtx_EQ (DImode, areg,
4468 const0_rtx),
941522d6 4469 const0_rtx, ext_tmps[i])));
34377880 4470 }
4471
4472 /* Merge the half-words into whole words. */
4473 for (i = 0; i < words; ++i)
4474 {
2cc46ade 4475 out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
4476 ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
34377880 4477 }
4478}
4479
4480/* Store an integral number of consecutive unaligned quadwords. DATA_REGS
4481 may be NULL to store zeros. */
4482
4483static void
2cc46ade 4484alpha_expand_unaligned_store_words (data_regs, dmem, words, ofs)
34377880 4485 rtx *data_regs;
2cc46ade 4486 rtx dmem;
4487 HOST_WIDE_INT words, ofs;
34377880 4488{
4489 rtx const im8 = GEN_INT (-8);
4490 rtx const i64 = GEN_INT (64);
34377880 4491 rtx ins_tmps[MAX_MOVE_WORDS];
2cc46ade 4492 rtx st_tmp_1, st_tmp_2, dreg;
1f0ce6a6 4493 rtx st_addr_1, st_addr_2, dmema;
34377880 4494 HOST_WIDE_INT i;
4495
1f0ce6a6 4496 dmema = XEXP (dmem, 0);
4497 if (GET_CODE (dmema) == LO_SUM)
4498 dmema = force_reg (Pmode, dmema);
4499
34377880 4500 /* Generate all the tmp registers we need. */
4501 if (data_regs != NULL)
4502 for (i = 0; i < words; ++i)
4503 ins_tmps[i] = gen_reg_rtx(DImode);
4504 st_tmp_1 = gen_reg_rtx(DImode);
4505 st_tmp_2 = gen_reg_rtx(DImode);
4506
2cc46ade 4507 if (ofs != 0)
e513d163 4508 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
2cc46ade 4509
4510 st_addr_2 = change_address (dmem, DImode,
941522d6 4511 gen_rtx_AND (DImode,
1f0ce6a6 4512 plus_constant (dmema, words*8 - 1),
34377880 4513 im8));
ab6ab77e 4514 set_mem_alias_set (st_addr_2, 0);
3024e9f8 4515
2cc46ade 4516 st_addr_1 = change_address (dmem, DImode,
1f0ce6a6 4517 gen_rtx_AND (DImode, dmema, im8));
ab6ab77e 4518 set_mem_alias_set (st_addr_1, 0);
34377880 4519
4520 /* Load up the destination end bits. */
4521 emit_move_insn (st_tmp_2, st_addr_2);
4522 emit_move_insn (st_tmp_1, st_addr_1);
4523
4524 /* Shift the input data into place. */
1f0ce6a6 4525 dreg = copy_addr_to_reg (dmema);
9caef960 4526 if (WORDS_BIG_ENDIAN)
4527 emit_move_insn (dreg, plus_constant (dreg, 7));
34377880 4528 if (data_regs != NULL)
4529 {
4530 for (i = words-1; i >= 0; --i)
4531 {
9caef960 4532 if (WORDS_BIG_ENDIAN)
4533 {
4534 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
4535 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
4536 }
4537 else
4538 {
4539 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
4540 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
4541 }
34377880 4542 }
34377880 4543 for (i = words-1; i > 0; --i)
4544 {
2cc46ade 4545 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
4546 ins_tmps[i-1], ins_tmps[i-1], 1,
4547 OPTAB_WIDEN);
34377880 4548 }
4549 }
4550
4551 /* Split and merge the ends with the destination data. */
9caef960 4552 if (WORDS_BIG_ENDIAN)
4553 {
ae4cd3a5 4554 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
9caef960 4555 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
4556 }
4557 else
4558 {
4559 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
ae4cd3a5 4560 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
9caef960 4561 }
34377880 4562
4563 if (data_regs != NULL)
4564 {
2cc46ade 4565 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
4566 st_tmp_2, 1, OPTAB_WIDEN);
4567 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
4568 st_tmp_1, 1, OPTAB_WIDEN);
34377880 4569 }
4570
4571 /* Store it all. */
9caef960 4572 if (WORDS_BIG_ENDIAN)
4573 emit_move_insn (st_addr_1, st_tmp_1);
4574 else
4575 emit_move_insn (st_addr_2, st_tmp_2);
34377880 4576 for (i = words-1; i > 0; --i)
4577 {
3024e9f8 4578 rtx tmp = change_address (dmem, DImode,
4579 gen_rtx_AND (DImode,
9caef960 4580 plus_constant(dmema,
4581 WORDS_BIG_ENDIAN ? i*8-1 : i*8),
3024e9f8 4582 im8));
ab6ab77e 4583 set_mem_alias_set (tmp, 0);
3024e9f8 4584 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
34377880 4585 }
9caef960 4586 if (WORDS_BIG_ENDIAN)
4587 emit_move_insn (st_addr_2, st_tmp_2);
4588 else
4589 emit_move_insn (st_addr_1, st_tmp_1);
34377880 4590}
4591
4592
4593/* Expand string/block move operations.
4594
4595 operands[0] is the pointer to the destination.
4596 operands[1] is the pointer to the source.
4597 operands[2] is the number of bytes to move.
4598 operands[3] is the alignment. */
4599
4600int
4601alpha_expand_block_move (operands)
4602 rtx operands[];
4603{
4604 rtx bytes_rtx = operands[2];
4605 rtx align_rtx = operands[3];
d94b545b 4606 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
a9aaae37 4607 HOST_WIDE_INT bytes = orig_bytes;
4608 HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
4609 HOST_WIDE_INT dst_align = src_align;
80909c64 4610 rtx orig_src = operands[1];
4611 rtx orig_dst = operands[0];
4612 rtx data_regs[2 * MAX_MOVE_WORDS + 16];
2cc46ade 4613 rtx tmp;
1f0ce6a6 4614 unsigned int i, words, ofs, nregs = 0;
34377880 4615
80909c64 4616 if (orig_bytes <= 0)
34377880 4617 return 1;
a9aaae37 4618 else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
34377880 4619 return 0;
4620
2cc46ade 4621 /* Look for additional alignment information from recorded register info. */
4622
4623 tmp = XEXP (orig_src, 0);
4624 if (GET_CODE (tmp) == REG)
80909c64 4625 src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
2cc46ade 4626 else if (GET_CODE (tmp) == PLUS
4627 && GET_CODE (XEXP (tmp, 0)) == REG
4628 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4629 {
80909c64 4630 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4631 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
2cc46ade 4632
4633 if (a > src_align)
4634 {
80909c64 4635 if (a >= 64 && c % 8 == 0)
4636 src_align = 64;
4637 else if (a >= 32 && c % 4 == 0)
4638 src_align = 32;
4639 else if (a >= 16 && c % 2 == 0)
4640 src_align = 16;
2cc46ade 4641 }
4642 }
4643
4644 tmp = XEXP (orig_dst, 0);
4645 if (GET_CODE (tmp) == REG)
80909c64 4646 dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
2cc46ade 4647 else if (GET_CODE (tmp) == PLUS
4648 && GET_CODE (XEXP (tmp, 0)) == REG
4649 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4650 {
80909c64 4651 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4652 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
2cc46ade 4653
4654 if (a > dst_align)
4655 {
80909c64 4656 if (a >= 64 && c % 8 == 0)
4657 dst_align = 64;
4658 else if (a >= 32 && c % 4 == 0)
4659 dst_align = 32;
4660 else if (a >= 16 && c % 2 == 0)
4661 dst_align = 16;
2cc46ade 4662 }
4663 }
4664
80909c64 4665 /* Load the entire block into registers. */
6cf26464 4666 if (GET_CODE (XEXP (orig_src, 0)) == ADDRESSOF)
2cc46ade 4667 {
4668 enum machine_mode mode;
80909c64 4669
2cc46ade 4670 tmp = XEXP (XEXP (orig_src, 0), 0);
6cf26464 4671
b47268cf 4672 /* Don't use the existing register if we're reading more than
4673 is held in the register. Nor if there is not a mode that
4674 handles the exact size. */
d94b545b 4675 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
2cc46ade 4676 if (mode != BLKmode
b47268cf 4677 && GET_MODE_SIZE (GET_MODE (tmp)) >= bytes)
2cc46ade 4678 {
b47268cf 4679 if (mode == TImode)
4680 {
4681 data_regs[nregs] = gen_lowpart (DImode, tmp);
27d0c333 4682 data_regs[nregs + 1] = gen_highpart (DImode, tmp);
b47268cf 4683 nregs += 2;
4684 }
4685 else
4686 data_regs[nregs++] = gen_lowpart (mode, tmp);
80909c64 4687
2cc46ade 4688 goto src_done;
4689 }
34377880 4690
2cc46ade 4691 /* No appropriate mode; fall back on memory. */
537ffcfc 4692 orig_src = replace_equiv_address (orig_src,
4693 copy_addr_to_reg (XEXP (orig_src, 0)));
5262884c 4694 src_align = GET_MODE_BITSIZE (GET_MODE (tmp));
2cc46ade 4695 }
4696
4697 ofs = 0;
80909c64 4698 if (src_align >= 64 && bytes >= 8)
34377880 4699 {
4700 words = bytes / 8;
4701
34377880 4702 for (i = 0; i < words; ++i)
27d0c333 4703 data_regs[nregs + i] = gen_reg_rtx (DImode);
34377880 4704
34377880 4705 for (i = 0; i < words; ++i)
80909c64 4706 emit_move_insn (data_regs[nregs + i],
e513d163 4707 adjust_address (orig_src, DImode, ofs + i * 8));
34377880 4708
2cc46ade 4709 nregs += words;
34377880 4710 bytes -= words * 8;
7597afe9 4711 ofs += words * 8;
34377880 4712 }
80909c64 4713
4714 if (src_align >= 32 && bytes >= 4)
34377880 4715 {
4716 words = bytes / 4;
4717
34377880 4718 for (i = 0; i < words; ++i)
27d0c333 4719 data_regs[nregs + i] = gen_reg_rtx (SImode);
34377880 4720
34377880 4721 for (i = 0; i < words; ++i)
80909c64 4722 emit_move_insn (data_regs[nregs + i],
537ffcfc 4723 adjust_address (orig_src, SImode, ofs + i * 4));
34377880 4724
2cc46ade 4725 nregs += words;
34377880 4726 bytes -= words * 4;
7597afe9 4727 ofs += words * 4;
34377880 4728 }
80909c64 4729
a9aaae37 4730 if (bytes >= 8)
34377880 4731 {
4732 words = bytes / 8;
4733
34377880 4734 for (i = 0; i < words+1; ++i)
27d0c333 4735 data_regs[nregs + i] = gen_reg_rtx (DImode);
34377880 4736
b47268cf 4737 alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
4738 words, ofs);
34377880 4739
2cc46ade 4740 nregs += words;
34377880 4741 bytes -= words * 8;
7597afe9 4742 ofs += words * 8;
34377880 4743 }
80909c64 4744
80909c64 4745 if (! TARGET_BWX && bytes >= 4)
34377880 4746 {
2cc46ade 4747 data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
34377880 4748 alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
34377880 4749 bytes -= 4;
4750 ofs += 4;
4751 }
80909c64 4752
34377880 4753 if (bytes >= 2)
4754 {
80909c64 4755 if (src_align >= 16)
34377880 4756 {
4757 do {
2cc46ade 4758 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
e513d163 4759 emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
34377880 4760 bytes -= 2;
4761 ofs += 2;
4762 } while (bytes >= 2);
4763 }
80909c64 4764 else if (! TARGET_BWX)
34377880 4765 {
2cc46ade 4766 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
34377880 4767 alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
34377880 4768 bytes -= 2;
4769 ofs += 2;
4770 }
4771 }
80909c64 4772
34377880 4773 while (bytes > 0)
4774 {
2cc46ade 4775 data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
e513d163 4776 emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
34377880 4777 bytes -= 1;
4778 ofs += 1;
4779 }
80909c64 4780
2cc46ade 4781 src_done:
4782
d6a88f01 4783 if (nregs > ARRAY_SIZE (data_regs))
80909c64 4784 abort ();
2cc46ade 4785
80909c64 4786 /* Now save it back out again. */
2cc46ade 4787
4788 i = 0, ofs = 0;
4789
4790 if (GET_CODE (XEXP (orig_dst, 0)) == ADDRESSOF)
4791 {
4792 enum machine_mode mode;
4793 tmp = XEXP (XEXP (orig_dst, 0), 0);
4794
d94b545b 4795 mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
b47268cf 4796 if (GET_MODE (tmp) == mode)
2cc46ade 4797 {
b47268cf 4798 if (nregs == 1)
4799 {
4800 emit_move_insn (tmp, data_regs[0]);
4801 i = 1;
4802 goto dst_done;
4803 }
80909c64 4804
b47268cf 4805 else if (nregs == 2 && mode == TImode)
4806 {
4807 /* Undo the subregging done above when copying between
4808 two TImode registers. */
4809 if (GET_CODE (data_regs[0]) == SUBREG
4810 && GET_MODE (SUBREG_REG (data_regs[0])) == TImode)
80909c64 4811 emit_move_insn (tmp, SUBREG_REG (data_regs[0]));
b47268cf 4812 else
4813 {
4814 rtx seq;
4815
4816 start_sequence ();
4817 emit_move_insn (gen_lowpart (DImode, tmp), data_regs[0]);
4818 emit_move_insn (gen_highpart (DImode, tmp), data_regs[1]);
725f57de 4819 seq = get_insns ();
b47268cf 4820 end_sequence ();
4821
4822 emit_no_conflict_block (seq, tmp, data_regs[0],
4823 data_regs[1], NULL_RTX);
4824 }
4825
4826 i = 2;
4827 goto dst_done;
4828 }
2cc46ade 4829 }
4830
4831 /* ??? If nregs > 1, consider reconstructing the word in regs. */
4832 /* ??? Optimize mode < dst_mode with strict_low_part. */
d94b545b 4833
4834 /* No appropriate mode; fall back on memory. We can speed things
4835 up by recognizing extra alignment information. */
537ffcfc 4836 orig_dst = replace_equiv_address (orig_dst,
4837 copy_addr_to_reg (XEXP (orig_dst, 0)));
5262884c 4838 dst_align = GET_MODE_BITSIZE (GET_MODE (tmp));
2cc46ade 4839 }
4840
4841 /* Write out the data in whatever chunks reading the source allowed. */
80909c64 4842 if (dst_align >= 64)
2cc46ade 4843 {
4844 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4845 {
e513d163 4846 emit_move_insn (adjust_address (orig_dst, DImode, ofs),
2cc46ade 4847 data_regs[i]);
4848 ofs += 8;
4849 i++;
4850 }
4851 }
80909c64 4852
4853 if (dst_align >= 32)
2cc46ade 4854 {
4855 /* If the source has remaining DImode regs, write them out in
4856 two pieces. */
4857 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4858 {
4859 tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4860 NULL_RTX, 1, OPTAB_WIDEN);
4861
e513d163 4862 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
2cc46ade 4863 gen_lowpart (SImode, data_regs[i]));
e513d163 4864 emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
2cc46ade 4865 gen_lowpart (SImode, tmp));
4866 ofs += 8;
4867 i++;
4868 }
4869
4870 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4871 {
e513d163 4872 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
2cc46ade 4873 data_regs[i]);
4874 ofs += 4;
4875 i++;
4876 }
4877 }
80909c64 4878
2cc46ade 4879 if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4880 {
4881 /* Write out a remaining block of words using unaligned methods. */
4882
80909c64 4883 for (words = 1; i + words < nregs; words++)
4884 if (GET_MODE (data_regs[i + words]) != DImode)
2cc46ade 4885 break;
4886
4887 if (words == 1)
4888 alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4889 else
80909c64 4890 alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4891 words, ofs);
2cc46ade 4892
4893 i += words;
4894 ofs += words * 8;
4895 }
4896
4897 /* Due to the above, this won't be aligned. */
4898 /* ??? If we have more than one of these, consider constructing full
4899 words in registers and using alpha_expand_unaligned_store_words. */
4900 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4901 {
4902 alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4903 ofs += 4;
4904 i++;
4905 }
4906
80909c64 4907 if (dst_align >= 16)
2cc46ade 4908 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4909 {
e513d163 4910 emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
2cc46ade 4911 i++;
4912 ofs += 2;
4913 }
4914 else
4915 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4916 {
4917 alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4918 i++;
4919 ofs += 2;
4920 }
80909c64 4921
2cc46ade 4922 while (i < nregs && GET_MODE (data_regs[i]) == QImode)
4923 {
e513d163 4924 emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
2cc46ade 4925 i++;
4926 ofs += 1;
4927 }
80909c64 4928
2cc46ade 4929 dst_done:
4930
4931 if (i != nregs)
80909c64 4932 abort ();
34377880 4933
4934 return 1;
4935}
4936
4937int
4938alpha_expand_block_clear (operands)
4939 rtx operands[];
4940{
4941 rtx bytes_rtx = operands[1];
4942 rtx align_rtx = operands[2];
80909c64 4943 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
a9aaae37 4944 HOST_WIDE_INT bytes = orig_bytes;
4945 HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4946 HOST_WIDE_INT alignofs = 0;
80909c64 4947 rtx orig_dst = operands[0];
2cc46ade 4948 rtx tmp;
a9aaae37 4949 int i, words, ofs = 0;
34377880 4950
80909c64 4951 if (orig_bytes <= 0)
34377880 4952 return 1;
a9aaae37 4953 if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
34377880 4954 return 0;
4955
2cc46ade 4956 /* Look for stricter alignment. */
2cc46ade 4957 tmp = XEXP (orig_dst, 0);
4958 if (GET_CODE (tmp) == REG)
80909c64 4959 align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
2cc46ade 4960 else if (GET_CODE (tmp) == PLUS
4961 && GET_CODE (XEXP (tmp, 0)) == REG
4962 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4963 {
a9aaae37 4964 HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4965 int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
2cc46ade 4966
4967 if (a > align)
4968 {
a9aaae37 4969 if (a >= 64)
4970 align = a, alignofs = 8 - c % 8;
4971 else if (a >= 32)
4972 align = a, alignofs = 4 - c % 4;
4973 else if (a >= 16)
4974 align = a, alignofs = 2 - c % 2;
2cc46ade 4975 }
4976 }
b47268cf 4977 else if (GET_CODE (tmp) == ADDRESSOF)
4978 {
4979 enum machine_mode mode;
4980
4981 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4982 if (GET_MODE (XEXP (tmp, 0)) == mode)
4983 {
4984 emit_move_insn (XEXP (tmp, 0), const0_rtx);
4985 return 1;
4986 }
4987
4988 /* No appropriate mode; fall back on memory. */
537ffcfc 4989 orig_dst = replace_equiv_address (orig_dst, copy_addr_to_reg (tmp));
a9aaae37 4990 align = GET_MODE_BITSIZE (GET_MODE (XEXP (tmp, 0)));
b47268cf 4991 }
2cc46ade 4992
a9aaae37 4993 /* Handle an unaligned prefix first. */
4994
4995 if (alignofs > 0)
4996 {
4997#if HOST_BITS_PER_WIDE_INT >= 64
4998 /* Given that alignofs is bounded by align, the only time BWX could
4999 generate three stores is for a 7 byte fill. Prefer two individual
5000 stores over a load/mask/store sequence. */
5001 if ((!TARGET_BWX || alignofs == 7)
5002 && align >= 32
5003 && !(alignofs == 4 && bytes >= 4))
5004 {
5005 enum machine_mode mode = (align >= 64 ? DImode : SImode);
5006 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
5007 rtx mem, tmp;
5008 HOST_WIDE_INT mask;
5009
e513d163 5010 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
ab6ab77e 5011 set_mem_alias_set (mem, 0);
a9aaae37 5012
5013 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
5014 if (bytes < alignofs)
5015 {
5016 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
5017 ofs += bytes;
5018 bytes = 0;
5019 }
5020 else
5021 {
5022 bytes -= alignofs;
5023 ofs += alignofs;
5024 }
5025 alignofs = 0;
5026
5027 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
5028 NULL_RTX, 1, OPTAB_WIDEN);
5029
5030 emit_move_insn (mem, tmp);
5031 }
5032#endif
5033
5034 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
5035 {
e513d163 5036 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
a9aaae37 5037 bytes -= 1;
5038 ofs += 1;
5039 alignofs -= 1;
5040 }
5041 if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
5042 {
e513d163 5043 emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
a9aaae37 5044 bytes -= 2;
5045 ofs += 2;
5046 alignofs -= 2;
5047 }
5048 if (alignofs == 4 && bytes >= 4)
5049 {
e513d163 5050 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
a9aaae37 5051 bytes -= 4;
5052 ofs += 4;
5053 alignofs = 0;
5054 }
5055
5056 /* If we've not used the extra lead alignment information by now,
5057 we won't be able to. Downgrade align to match what's left over. */
5058 if (alignofs > 0)
5059 {
5060 alignofs = alignofs & -alignofs;
5061 align = MIN (align, alignofs * BITS_PER_UNIT);
5062 }
5063 }
5064
5065 /* Handle a block of contiguous long-words. */
34377880 5066
80909c64 5067 if (align >= 64 && bytes >= 8)
34377880 5068 {
5069 words = bytes / 8;
5070
5071 for (i = 0; i < words; ++i)
1f0ce6a6 5072 emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
e513d163 5073 const0_rtx);
34377880 5074
5075 bytes -= words * 8;
7597afe9 5076 ofs += words * 8;
34377880 5077 }
80909c64 5078
a9aaae37 5079 /* If the block is large and appropriately aligned, emit a single
5080 store followed by a sequence of stq_u insns. */
5081
5082 if (align >= 32 && bytes > 16)
5083 {
1f0ce6a6 5084 rtx orig_dsta;
5085
e513d163 5086 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
a9aaae37 5087 bytes -= 4;
5088 ofs += 4;
5089
1f0ce6a6 5090 orig_dsta = XEXP (orig_dst, 0);
5091 if (GET_CODE (orig_dsta) == LO_SUM)
5092 orig_dsta = force_reg (Pmode, orig_dsta);
5093
a9aaae37 5094 words = bytes / 8;
5095 for (i = 0; i < words; ++i)
5096 {
ab6ab77e 5097 rtx mem
5098 = change_address (orig_dst, DImode,
5099 gen_rtx_AND (DImode,
1f0ce6a6 5100 plus_constant (orig_dsta, ofs + i*8),
ab6ab77e 5101 GEN_INT (-8)));
5102 set_mem_alias_set (mem, 0);
a9aaae37 5103 emit_move_insn (mem, const0_rtx);
5104 }
5105
5106 /* Depending on the alignment, the first stq_u may have overlapped
5107 with the initial stl, which means that the last stq_u didn't
5108 write as much as it would appear. Leave those questionable bytes
5109 unaccounted for. */
5110 bytes -= words * 8 - 4;
5111 ofs += words * 8 - 4;
5112 }
5113
5114 /* Handle a smaller block of aligned words. */
5115
5116 if ((align >= 64 && bytes == 4)
5117 || (align == 32 && bytes >= 4))
34377880 5118 {
5119 words = bytes / 4;
5120
5121 for (i = 0; i < words; ++i)
e513d163 5122 emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
80909c64 5123 const0_rtx);
34377880 5124
5125 bytes -= words * 4;
7597afe9 5126 ofs += words * 4;
34377880 5127 }
80909c64 5128
a9aaae37 5129 /* An unaligned block uses stq_u stores for as many as possible. */
5130
5131 if (bytes >= 8)
34377880 5132 {
5133 words = bytes / 8;
5134
7597afe9 5135 alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
34377880 5136
5137 bytes -= words * 8;
7597afe9 5138 ofs += words * 8;
34377880 5139 }
5140
a9aaae37 5141 /* Next clean up any trailing pieces. */
34377880 5142
a9aaae37 5143#if HOST_BITS_PER_WIDE_INT >= 64
5144 /* Count the number of bits in BYTES for which aligned stores could
5145 be emitted. */
5146 words = 0;
5147 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
5148 if (bytes & i)
5149 words += 1;
5150
5151 /* If we have appropriate alignment (and it wouldn't take too many
5152 instructions otherwise), mask out the bytes we need. */
5153 if (TARGET_BWX ? words > 2 : bytes > 0)
5154 {
5155 if (align >= 64)
5156 {
5157 rtx mem, tmp;
5158 HOST_WIDE_INT mask;
5159
e513d163 5160 mem = adjust_address (orig_dst, DImode, ofs);
ab6ab77e 5161 set_mem_alias_set (mem, 0);
a9aaae37 5162
5163 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5164
5165 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
5166 NULL_RTX, 1, OPTAB_WIDEN);
5167
5168 emit_move_insn (mem, tmp);
5169 return 1;
5170 }
5171 else if (align >= 32 && bytes < 4)
5172 {
5173 rtx mem, tmp;
5174 HOST_WIDE_INT mask;
5175
e513d163 5176 mem = adjust_address (orig_dst, SImode, ofs);
ab6ab77e 5177 set_mem_alias_set (mem, 0);
a9aaae37 5178
5179 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5180
5181 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
5182 NULL_RTX, 1, OPTAB_WIDEN);
5183
5184 emit_move_insn (mem, tmp);
5185 return 1;
5186 }
34377880 5187 }
a9aaae37 5188#endif
80909c64 5189
34377880 5190 if (!TARGET_BWX && bytes >= 4)
5191 {
5192 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
5193 bytes -= 4;
5194 ofs += 4;
5195 }
80909c64 5196
34377880 5197 if (bytes >= 2)
5198 {
80909c64 5199 if (align >= 16)
34377880 5200 {
5201 do {
e513d163 5202 emit_move_insn (adjust_address (orig_dst, HImode, ofs),
34377880 5203 const0_rtx);
5204 bytes -= 2;
5205 ofs += 2;
5206 } while (bytes >= 2);
5207 }
80909c64 5208 else if (! TARGET_BWX)
34377880 5209 {
5210 alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
5211 bytes -= 2;
5212 ofs += 2;
5213 }
5214 }
80909c64 5215
34377880 5216 while (bytes > 0)
5217 {
e513d163 5218 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
34377880 5219 bytes -= 1;
5220 ofs += 1;
5221 }
5222
5223 return 1;
5224}
f2cc13dc 5225
5226/* Returns a mask so that zap(x, value) == x & mask. */
5227
5228rtx
5229alpha_expand_zap_mask (value)
5230 HOST_WIDE_INT value;
5231{
5232 rtx result;
5233 int i;
5234
5235 if (HOST_BITS_PER_WIDE_INT >= 64)
5236 {
5237 HOST_WIDE_INT mask = 0;
5238
5239 for (i = 7; i >= 0; --i)
5240 {
5241 mask <<= 8;
5242 if (!((value >> i) & 1))
5243 mask |= 0xff;
5244 }
5245
5246 result = gen_int_mode (mask, DImode);
5247 }
5248 else if (HOST_BITS_PER_WIDE_INT == 32)
5249 {
5250 HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
5251
5252 for (i = 7; i >= 4; --i)
5253 {
5254 mask_hi <<= 8;
5255 if (!((value >> i) & 1))
5256 mask_hi |= 0xff;
5257 }
5258
5259 for (i = 3; i >= 0; --i)
5260 {
5261 mask_lo <<= 8;
5262 if (!((value >> i) & 1))
5263 mask_lo |= 0xff;
5264 }
5265
5266 result = immed_double_const (mask_lo, mask_hi, DImode);
5267 }
5268 else
5269 abort ();
5270
5271 return result;
5272}
5273
5274void
5275alpha_expand_builtin_vector_binop (gen, mode, op0, op1, op2)
5276 rtx (*gen) PARAMS ((rtx, rtx, rtx));
5277 enum machine_mode mode;
5278 rtx op0, op1, op2;
5279{
5280 op0 = gen_lowpart (mode, op0);
5281
5282 if (op1 == const0_rtx)
5283 op1 = CONST0_RTX (mode);
5284 else
5285 op1 = gen_lowpart (mode, op1);
ae4cd3a5 5286
5287 if (op2 == const0_rtx)
f2cc13dc 5288 op2 = CONST0_RTX (mode);
5289 else
5290 op2 = gen_lowpart (mode, op2);
5291
5292 emit_insn ((*gen) (op0, op1, op2));
5293}
bf2a98b3 5294\f
5295/* Adjust the cost of a scheduling dependency. Return the new cost of
5296 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
5297
747af5e7 5298static int
bf2a98b3 5299alpha_adjust_cost (insn, link, dep_insn, cost)
5300 rtx insn;
5301 rtx link;
5302 rtx dep_insn;
5303 int cost;
5304{
d2832bd8 5305 enum attr_type insn_type, dep_insn_type;
bf2a98b3 5306
5307 /* If the dependence is an anti-dependence, there is no cost. For an
5308 output dependence, there is sometimes a cost, but it doesn't seem
5309 worth handling those few cases. */
bf2a98b3 5310 if (REG_NOTE_KIND (link) != 0)
7eb0c947 5311 return cost;
bf2a98b3 5312
d2832bd8 5313 /* If we can't recognize the insns, we can't really do anything. */
5314 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
5315 return cost;
5316
5317 insn_type = get_attr_type (insn);
5318 dep_insn_type = get_attr_type (dep_insn);
5319
07c1a295 5320 /* Bring in the user-defined memory latency. */
e7a69d05 5321 if (dep_insn_type == TYPE_ILD
5322 || dep_insn_type == TYPE_FLD
5323 || dep_insn_type == TYPE_LDSYM)
07c1a295 5324 cost += alpha_memory_latency-1;
5325
7eb0c947 5326 /* Everything else handled in DFA bypasses now. */
3680ac41 5327
bf2a98b3 5328 return cost;
5329}
747af5e7 5330
7eb0c947 5331/* The number of instructions that can be issued per cycle. */
5332
747af5e7 5333static int
5334alpha_issue_rate ()
5335{
5336 return (alpha_cpu == PROCESSOR_EV4 ? 2 : 4);
5337}
5338
5339static int
7eb0c947 5340alpha_use_dfa_pipeline_interface ()
747af5e7 5341{
7eb0c947 5342 return true;
747af5e7 5343}
5344
7eb0c947 5345/* How many alternative schedules to try. This should be as wide as the
5346 scheduling freedom in the DFA, but no wider. Making this value too
5347 large results extra work for the scheduler.
5348
5349 For EV4, loads can be issued to either IB0 or IB1, thus we have 2
5350 alternative schedules. For EV5, we can choose between E0/E1 and
5351 FA/FM. For EV6, an arithmatic insn can be issued to U0/U1/L0/L1. */
5352
5353static int
5354alpha_multipass_dfa_lookahead ()
5355{
5356 return (alpha_cpu == PROCESSOR_EV6 ? 4 : 2);
5357}
0c0464e6 5358\f
5f7b9df8 5359/* Machine-specific function data. */
5360
1f3233d1 5361struct machine_function GTY(())
5f7b9df8 5362{
1f3233d1 5363 /* For unicosmk. */
5f7b9df8 5364 /* List of call information words for calls from this function. */
5365 struct rtx_def *first_ciw;
5366 struct rtx_def *last_ciw;
5367 int ciw_count;
5368
5369 /* List of deferred case vectors. */
5370 struct rtx_def *addr_list;
1f3233d1 5371
5372 /* For OSF. */
5f7b9df8 5373 const char *some_ld_name;
5f7b9df8 5374};
5375
1f3233d1 5376/* How to allocate a 'struct machine_function'. */
9caef960 5377
1f3233d1 5378static struct machine_function *
5379alpha_init_machine_status ()
9caef960 5380{
1f3233d1 5381 return ((struct machine_function *)
5382 ggc_alloc_cleared (sizeof (struct machine_function)));
9caef960 5383}
9caef960 5384
0c0464e6 5385/* Functions to save and restore alpha_return_addr_rtx. */
5386
0c0464e6 5387/* Start the ball rolling with RETURN_ADDR_RTX. */
5388
5389rtx
5390alpha_return_addr (count, frame)
5391 int count;
769ea120 5392 rtx frame ATTRIBUTE_UNUSED;
0c0464e6 5393{
0c0464e6 5394 if (count != 0)
5395 return const0_rtx;
5396
0f37b7a2 5397 return get_hard_reg_initial_val (Pmode, REG_RA);
0c0464e6 5398}
5399
66561750 5400/* Return or create a pseudo containing the gp value for the current
5401 function. Needed only if TARGET_LD_BUGGY_LDGP. */
5402
5403rtx
5404alpha_gp_save_rtx ()
5405{
09a6a91b 5406 rtx r = get_hard_reg_initial_val (DImode, 29);
5407 if (GET_CODE (r) != MEM)
5408 r = gen_mem_addressof (r, NULL_TREE);
5409 return r;
66561750 5410}
5411
0c0464e6 5412static int
5413alpha_ra_ever_killed ()
5414{
5a965225 5415 rtx top;
5416
0f37b7a2 5417 if (!has_hard_reg_initial_val (Pmode, REG_RA))
0c0464e6 5418 return regs_ever_live[REG_RA];
5419
5a965225 5420 push_topmost_sequence ();
5421 top = get_insns ();
5422 pop_topmost_sequence ();
5423
5424 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
0c0464e6 5425}
5426
bf2a98b3 5427\f
6fec94c5 5428/* Return the trap mode suffix applicable to the current
65abff06 5429 instruction, or NULL. */
bf2a98b3 5430
6fec94c5 5431static const char *
5432get_trap_mode_suffix ()
bf2a98b3 5433{
6fec94c5 5434 enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
bf2a98b3 5435
6fec94c5 5436 switch (s)
bf2a98b3 5437 {
6fec94c5 5438 case TRAP_SUFFIX_NONE:
5439 return NULL;
c4622276 5440
6fec94c5 5441 case TRAP_SUFFIX_SU:
bc16f0c1 5442 if (alpha_fptm >= ALPHA_FPTM_SU)
6fec94c5 5443 return "su";
5444 return NULL;
c4622276 5445
6fec94c5 5446 case TRAP_SUFFIX_SUI:
5447 if (alpha_fptm >= ALPHA_FPTM_SUI)
5448 return "sui";
5449 return NULL;
5450
5451 case TRAP_SUFFIX_V_SV:
39344852 5452 switch (alpha_fptm)
5453 {
5454 case ALPHA_FPTM_N:
6fec94c5 5455 return NULL;
39344852 5456 case ALPHA_FPTM_U:
6fec94c5 5457 return "v";
39344852 5458 case ALPHA_FPTM_SU:
5459 case ALPHA_FPTM_SUI:
6fec94c5 5460 return "sv";
39344852 5461 }
5462 break;
5463
6fec94c5 5464 case TRAP_SUFFIX_V_SV_SVI:
b5ea3193 5465 switch (alpha_fptm)
5466 {
5467 case ALPHA_FPTM_N:
6fec94c5 5468 return NULL;
b5ea3193 5469 case ALPHA_FPTM_U:
6fec94c5 5470 return "v";
b5ea3193 5471 case ALPHA_FPTM_SU:
6fec94c5 5472 return "sv";
b5ea3193 5473 case ALPHA_FPTM_SUI:
6fec94c5 5474 return "svi";
b5ea3193 5475 }
5476 break;
5477
6fec94c5 5478 case TRAP_SUFFIX_U_SU_SUI:
c4622276 5479 switch (alpha_fptm)
5480 {
5481 case ALPHA_FPTM_N:
6fec94c5 5482 return NULL;
c4622276 5483 case ALPHA_FPTM_U:
6fec94c5 5484 return "u";
c4622276 5485 case ALPHA_FPTM_SU:
6fec94c5 5486 return "su";
c4622276 5487 case ALPHA_FPTM_SUI:
6fec94c5 5488 return "sui";
c4622276 5489 }
5490 break;
6fec94c5 5491 }
5492 abort ();
5493}
c4622276 5494
6fec94c5 5495/* Return the rounding mode suffix applicable to the current
65abff06 5496 instruction, or NULL. */
6fec94c5 5497
5498static const char *
5499get_round_mode_suffix ()
5500{
5501 enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5502
5503 switch (s)
5504 {
5505 case ROUND_SUFFIX_NONE:
5506 return NULL;
5507 case ROUND_SUFFIX_NORMAL:
5508 switch (alpha_fprm)
c4622276 5509 {
6fec94c5 5510 case ALPHA_FPRM_NORM:
5511 return NULL;
5512 case ALPHA_FPRM_MINF:
5513 return "m";
5514 case ALPHA_FPRM_CHOP:
5515 return "c";
5516 case ALPHA_FPRM_DYN:
5517 return "d";
c4622276 5518 }
5519 break;
5520
6fec94c5 5521 case ROUND_SUFFIX_C:
5522 return "c";
5523 }
5524 abort ();
5525}
5526
5f7b9df8 5527/* Locate some local-dynamic symbol still in use by this function
5528 so that we can print its name in some movdi_er_tlsldm pattern. */
5529
5530static const char *
5531get_some_local_dynamic_name ()
5532{
5533 rtx insn;
5534
5535 if (cfun->machine->some_ld_name)
5536 return cfun->machine->some_ld_name;
5537
5538 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
5539 if (INSN_P (insn)
5540 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
5541 return cfun->machine->some_ld_name;
5542
5543 abort ();
5544}
5545
5546static int
5547get_some_local_dynamic_name_1 (px, data)
5548 rtx *px;
5549 void *data ATTRIBUTE_UNUSED;
5550{
5551 rtx x = *px;
5552
5553 if (GET_CODE (x) == SYMBOL_REF)
5554 {
5555 const char *str = XSTR (x, 0);
5556 if (str[0] == '@' && str[1] == 'D')
5557 {
5558 cfun->machine->some_ld_name = str;
5559 return 1;
5560 }
5561 }
5562
5563 return 0;
5564}
5565
6fec94c5 5566/* Print an operand. Recognize special options, documented below. */
5567
5568void
5569print_operand (file, x, code)
5570 FILE *file;
5571 rtx x;
5572 int code;
5573{
5574 int i;
5575
5576 switch (code)
5577 {
5578 case '~':
5579 /* Print the assembler name of the current function. */
5580 assemble_name (file, alpha_fnname);
5581 break;
5582
5f7b9df8 5583 case '&':
5584 assemble_name (file, get_some_local_dynamic_name ());
5585 break;
5586
6fec94c5 5587 case '/':
5588 {
5589 const char *trap = get_trap_mode_suffix ();
5590 const char *round = get_round_mode_suffix ();
5591
5592 if (trap || round)
9caef960 5593 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5594 (trap ? trap : ""), (round ? round : ""));
6fec94c5 5595 break;
5596 }
5597
8df4a58b 5598 case ',':
5599 /* Generates single precision instruction suffix. */
6fec94c5 5600 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
8df4a58b 5601 break;
5602
5603 case '-':
5604 /* Generates double precision instruction suffix. */
6fec94c5 5605 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
8df4a58b 5606 break;
5607
1f0ce6a6 5608 case '#':
5609 if (alpha_this_literal_sequence_number == 0)
5610 alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5611 fprintf (file, "%d", alpha_this_literal_sequence_number);
5612 break;
5613
5614 case '*':
5615 if (alpha_this_gpdisp_sequence_number == 0)
5616 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5617 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5618 break;
5619
5620 case 'H':
5621 if (GET_CODE (x) == HIGH)
5dcb037d 5622 output_addr_const (file, XEXP (x, 0));
1f0ce6a6 5623 else
5624 output_operand_lossage ("invalid %%H value");
5625 break;
5626
ad2ed779 5627 case 'J':
5f7b9df8 5628 {
5629 const char *lituse;
5630
5631 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD_CALL)
5632 {
5633 x = XVECEXP (x, 0, 0);
5634 lituse = "lituse_tlsgd";
5635 }
5636 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM_CALL)
5637 {
5638 x = XVECEXP (x, 0, 0);
5639 lituse = "lituse_tlsldm";
5640 }
5641 else if (GET_CODE (x) == CONST_INT)
5642 lituse = "lituse_jsr";
5643 else
5644 {
5645 output_operand_lossage ("invalid %%J value");
5646 break;
5647 }
5648
5649 if (x != const0_rtx)
5650 fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
5651 }
ad2ed779 5652 break;
5653
bf2a98b3 5654 case 'r':
5655 /* If this operand is the constant zero, write it as "$31". */
5656 if (GET_CODE (x) == REG)
5657 fprintf (file, "%s", reg_names[REGNO (x)]);
5658 else if (x == CONST0_RTX (GET_MODE (x)))
5659 fprintf (file, "$31");
5660 else
5661 output_operand_lossage ("invalid %%r value");
bf2a98b3 5662 break;
5663
5664 case 'R':
5665 /* Similar, but for floating-point. */
5666 if (GET_CODE (x) == REG)
5667 fprintf (file, "%s", reg_names[REGNO (x)]);
5668 else if (x == CONST0_RTX (GET_MODE (x)))
5669 fprintf (file, "$f31");
5670 else
5671 output_operand_lossage ("invalid %%R value");
bf2a98b3 5672 break;
5673
5674 case 'N':
5675 /* Write the 1's complement of a constant. */
5676 if (GET_CODE (x) != CONST_INT)
5677 output_operand_lossage ("invalid %%N value");
5678
61a63ca5 5679 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
bf2a98b3 5680 break;
5681
5682 case 'P':
5683 /* Write 1 << C, for a constant C. */
5684 if (GET_CODE (x) != CONST_INT)
5685 output_operand_lossage ("invalid %%P value");
5686
61a63ca5 5687 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
bf2a98b3 5688 break;
5689
5690 case 'h':
5691 /* Write the high-order 16 bits of a constant, sign-extended. */
5692 if (GET_CODE (x) != CONST_INT)
5693 output_operand_lossage ("invalid %%h value");
5694
61a63ca5 5695 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
bf2a98b3 5696 break;
5697
5698 case 'L':
5699 /* Write the low-order 16 bits of a constant, sign-extended. */
5700 if (GET_CODE (x) != CONST_INT)
5701 output_operand_lossage ("invalid %%L value");
5702
61a63ca5 5703 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5704 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
bf2a98b3 5705 break;
5706
5707 case 'm':
5708 /* Write mask for ZAP insn. */
5709 if (GET_CODE (x) == CONST_DOUBLE)
5710 {
5711 HOST_WIDE_INT mask = 0;
5712 HOST_WIDE_INT value;
5713
5714 value = CONST_DOUBLE_LOW (x);
5715 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5716 i++, value >>= 8)
5717 if (value & 0xff)
5718 mask |= (1 << i);
5719
5720 value = CONST_DOUBLE_HIGH (x);
5721 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5722 i++, value >>= 8)
5723 if (value & 0xff)
5724 mask |= (1 << (i + sizeof (int)));
5725
61a63ca5 5726 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
bf2a98b3 5727 }
5728
5729 else if (GET_CODE (x) == CONST_INT)
5730 {
5731 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5732
5733 for (i = 0; i < 8; i++, value >>= 8)
5734 if (value & 0xff)
5735 mask |= (1 << i);
5736
61a63ca5 5737 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
bf2a98b3 5738 }
5739 else
5740 output_operand_lossage ("invalid %%m value");
5741 break;
5742
5743 case 'M':
34377880 5744 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
bf2a98b3 5745 if (GET_CODE (x) != CONST_INT
34377880 5746 || (INTVAL (x) != 8 && INTVAL (x) != 16
5747 && INTVAL (x) != 32 && INTVAL (x) != 64))
bf2a98b3 5748 output_operand_lossage ("invalid %%M value");
5749
5750 fprintf (file, "%s",
34377880 5751 (INTVAL (x) == 8 ? "b"
5752 : INTVAL (x) == 16 ? "w"
5753 : INTVAL (x) == 32 ? "l"
5754 : "q"));
bf2a98b3 5755 break;
5756
5757 case 'U':
5758 /* Similar, except do it from the mask. */
ae4cd3a5 5759 if (GET_CODE (x) == CONST_INT)
5760 {
5761 HOST_WIDE_INT value = INTVAL (x);
5762
5763 if (value == 0xff)
5764 {
5765 fputc ('b', file);
5766 break;
5767 }
5768 if (value == 0xffff)
5769 {
5770 fputc ('w', file);
5771 break;
5772 }
5773 if (value == 0xffffffff)
5774 {
5775 fputc ('l', file);
5776 break;
5777 }
5778 if (value == -1)
5779 {
5780 fputc ('q', file);
5781 break;
5782 }
5783 }
5784 else if (HOST_BITS_PER_WIDE_INT == 32
5785 && GET_CODE (x) == CONST_DOUBLE
5786 && CONST_DOUBLE_LOW (x) == 0xffffffff
5787 && CONST_DOUBLE_HIGH (x) == 0)
5788 {
5789 fputc ('l', file);
5790 break;
5791 }
5792 output_operand_lossage ("invalid %%U value");
bf2a98b3 5793 break;
5794
5795 case 's':
9caef960 5796 /* Write the constant value divided by 8 for little-endian mode or
5797 (56 - value) / 8 for big-endian mode. */
5798
bf2a98b3 5799 if (GET_CODE (x) != CONST_INT
9caef960 5800 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5801 ? 56
5802 : 64)
5803 || (INTVAL (x) & 7) != 0)
bf2a98b3 5804 output_operand_lossage ("invalid %%s value");
5805
9caef960 5806 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5807 WORDS_BIG_ENDIAN
5808 ? (56 - INTVAL (x)) / 8
5809 : INTVAL (x) / 8);
bf2a98b3 5810 break;
5811
5812 case 'S':
5813 /* Same, except compute (64 - c) / 8 */
5814
5815 if (GET_CODE (x) != CONST_INT
5816 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5817 && (INTVAL (x) & 7) != 8)
5818 output_operand_lossage ("invalid %%s value");
5819
61a63ca5 5820 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
bf2a98b3 5821 break;
5822
9caef960 5823 case 't':
5824 {
5825 /* On Unicos/Mk systems: use a DEX expression if the symbol
5826 clashes with a register name. */
5827 int dex = unicosmk_need_dex (x);
5828 if (dex)
5829 fprintf (file, "DEX(%d)", dex);
5830 else
5831 output_addr_const (file, x);
5832 }
5833 break;
5834
62dc3582 5835 case 'C': case 'D': case 'c': case 'd':
bf2a98b3 5836 /* Write out comparison name. */
62dc3582 5837 {
5838 enum rtx_code c = GET_CODE (x);
5839
5840 if (GET_RTX_CLASS (c) != '<')
5841 output_operand_lossage ("invalid %%C value");
5842
f3d263a7 5843 else if (code == 'D')
62dc3582 5844 c = reverse_condition (c);
5845 else if (code == 'c')
5846 c = swap_condition (c);
5847 else if (code == 'd')
5848 c = swap_condition (reverse_condition (c));
5849
5850 if (c == LEU)
5851 fprintf (file, "ule");
5852 else if (c == LTU)
5853 fprintf (file, "ult");
a4110d9a 5854 else if (c == UNORDERED)
5855 fprintf (file, "un");
62dc3582 5856 else
5857 fprintf (file, "%s", GET_RTX_NAME (c));
5858 }
8ad50a44 5859 break;
5860
bf2a98b3 5861 case 'E':
5862 /* Write the divide or modulus operator. */
5863 switch (GET_CODE (x))
5864 {
5865 case DIV:
5866 fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5867 break;
5868 case UDIV:
5869 fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5870 break;
5871 case MOD:
5872 fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5873 break;
5874 case UMOD:
5875 fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5876 break;
5877 default:
5878 output_operand_lossage ("invalid %%E value");
5879 break;
5880 }
5881 break;
5882
bf2a98b3 5883 case 'A':
5884 /* Write "_u" for unaligned access. */
5885 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
5886 fprintf (file, "_u");
5887 break;
5888
5889 case 0:
5890 if (GET_CODE (x) == REG)
5891 fprintf (file, "%s", reg_names[REGNO (x)]);
5892 else if (GET_CODE (x) == MEM)
5893 output_address (XEXP (x, 0));
5f7b9df8 5894 else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
5895 {
5896 switch (XINT (XEXP (x, 0), 1))
5897 {
5898 case UNSPEC_DTPREL:
5899 case UNSPEC_TPREL:
5900 output_addr_const (file, XVECEXP (XEXP (x, 0), 0, 0));
5901 break;
5902 default:
5903 output_operand_lossage ("unknown relocation unspec");
5904 break;
5905 }
5906 }
bf2a98b3 5907 else
5908 output_addr_const (file, x);
5909 break;
5910
5911 default:
5912 output_operand_lossage ("invalid %%xn code");
5913 }
5914}
6e0fe99e 5915
5916void
5917print_operand_address (file, addr)
5918 FILE *file;
5919 rtx addr;
5920{
a3e39a24 5921 int basereg = 31;
6e0fe99e 5922 HOST_WIDE_INT offset = 0;
5923
5924 if (GET_CODE (addr) == AND)
5925 addr = XEXP (addr, 0);
6e0fe99e 5926
a3e39a24 5927 if (GET_CODE (addr) == PLUS
5928 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6e0fe99e 5929 {
5930 offset = INTVAL (XEXP (addr, 1));
a3e39a24 5931 addr = XEXP (addr, 0);
6e0fe99e 5932 }
1f0ce6a6 5933
5934 if (GET_CODE (addr) == LO_SUM)
5935 {
5f7b9df8 5936 const char *reloc16, *reloclo;
5937 rtx op1 = XEXP (addr, 1);
5938
5939 if (GET_CODE (op1) == CONST && GET_CODE (XEXP (op1, 0)) == UNSPEC)
5940 {
5941 op1 = XEXP (op1, 0);
5942 switch (XINT (op1, 1))
5943 {
5944 case UNSPEC_DTPREL:
5945 reloc16 = NULL;
5946 reloclo = (alpha_tls_size == 16 ? "dtprel" : "dtprello");
5947 break;
5948 case UNSPEC_TPREL:
5949 reloc16 = NULL;
5950 reloclo = (alpha_tls_size == 16 ? "tprel" : "tprello");
5951 break;
5952 default:
5953 output_operand_lossage ("unknown relocation unspec");
5954 return;
5955 }
5956
5957 output_addr_const (file, XVECEXP (op1, 0, 0));
5958 }
5959 else
5960 {
5961 reloc16 = "gprel";
5962 reloclo = "gprellow";
5963 output_addr_const (file, op1);
5964 }
5965
1f0ce6a6 5966 if (offset)
5967 {
5968 fputc ('+', file);
5969 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5970 }
5971
5972 addr = XEXP (addr, 0);
5973 if (GET_CODE (addr) == REG)
5974 basereg = REGNO (addr);
5975 else if (GET_CODE (addr) == SUBREG
5976 && GET_CODE (SUBREG_REG (addr)) == REG)
5977 basereg = subreg_regno (addr);
5978 else
5979 abort ();
5dcb037d 5980
5981 fprintf (file, "($%d)\t\t!%s", basereg,
5f7b9df8 5982 (basereg == 29 ? reloc16 : reloclo));
1f0ce6a6 5983 return;
5984 }
5985
a3e39a24 5986 if (GET_CODE (addr) == REG)
5987 basereg = REGNO (addr);
5988 else if (GET_CODE (addr) == SUBREG
5989 && GET_CODE (SUBREG_REG (addr)) == REG)
1f0ce6a6 5990 basereg = subreg_regno (addr);
a3e39a24 5991 else if (GET_CODE (addr) == CONST_INT)
5992 offset = INTVAL (addr);
cf73d31f 5993
5994#if TARGET_ABI_OPEN_VMS
5995 else if (GET_CODE (addr) == SYMBOL_REF)
5996 {
5997 fprintf (file, "%s", XSTR (addr, 0));
5998 return;
5999 }
6000 else if (GET_CODE (addr) == CONST
6001 && GET_CODE (XEXP (addr, 0)) == PLUS
6002 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF)
6003 {
6004 fprintf (file, "%s+%d",
6005 XSTR (XEXP (XEXP (addr, 0), 0), 0),
6006 INTVAL (XEXP (XEXP (addr, 0), 1)));
6007 return;
6008 }
6009#endif
6010
6e0fe99e 6011 else
6012 abort ();
6013
6014 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
a3e39a24 6015 fprintf (file, "($%d)", basereg);
6e0fe99e 6016}
bf2a98b3 6017\f
9e042f31 6018/* Emit RTL insns to initialize the variable parts of a trampoline at
6019 TRAMP. FNADDR is an RTX for the address of the function's pure
6020 code. CXT is an RTX for the static chain value for the function.
96297568 6021
6022 The three offset parameters are for the individual template's
6023 layout. A JMPOFS < 0 indicates that the trampoline does not
6024 contain instructions at all.
6025
9e042f31 6026 We assume here that a function will be called many more times than
6027 its address is taken (e.g., it might be passed to qsort), so we
6028 take the trouble to initialize the "hint" field in the JMP insn.
6029 Note that the hint field is PC (new) + 4 * bits 13:0. */
6030
6031void
96297568 6032alpha_initialize_trampoline (tramp, fnaddr, cxt, fnofs, cxtofs, jmpofs)
6033 rtx tramp, fnaddr, cxt;
6034 int fnofs, cxtofs, jmpofs;
9e042f31 6035{
6036 rtx temp, temp1, addr;
17683b9f 6037 /* VMS really uses DImode pointers in memory at this point. */
1467e953 6038 enum machine_mode mode = TARGET_ABI_OPEN_VMS ? Pmode : ptr_mode;
9e042f31 6039
17683b9f 6040#ifdef POINTERS_EXTEND_UNSIGNED
6041 fnaddr = convert_memory_address (mode, fnaddr);
6042 cxt = convert_memory_address (mode, cxt);
6043#endif
6044
9e042f31 6045 /* Store function address and CXT. */
46ba8e1c 6046 addr = memory_address (mode, plus_constant (tramp, fnofs));
7014838c 6047 emit_move_insn (gen_rtx_MEM (mode, addr), fnaddr);
46ba8e1c 6048 addr = memory_address (mode, plus_constant (tramp, cxtofs));
7014838c 6049 emit_move_insn (gen_rtx_MEM (mode, addr), cxt);
96297568 6050
6051 /* This has been disabled since the hint only has a 32k range, and in
65abff06 6052 no existing OS is the stack within 32k of the text segment. */
96297568 6053 if (0 && jmpofs >= 0)
6054 {
6055 /* Compute hint value. */
6056 temp = force_operand (plus_constant (tramp, jmpofs+4), NULL_RTX);
6057 temp = expand_binop (DImode, sub_optab, fnaddr, temp, temp, 1,
6058 OPTAB_WIDEN);
6059 temp = expand_shift (RSHIFT_EXPR, Pmode, temp,
6060 build_int_2 (2, 0), NULL_RTX, 1);
6de9716c 6061 temp = expand_and (SImode, gen_lowpart (SImode, temp),
6062 GEN_INT (0x3fff), 0);
96297568 6063
6064 /* Merge in the hint. */
6065 addr = memory_address (SImode, plus_constant (tramp, jmpofs));
7014838c 6066 temp1 = force_reg (SImode, gen_rtx_MEM (SImode, addr));
6de9716c 6067 temp1 = expand_and (SImode, temp1, GEN_INT (0xffffc000), NULL_RTX);
96297568 6068 temp1 = expand_binop (SImode, ior_optab, temp1, temp, temp1, 1,
6069 OPTAB_WIDEN);
7014838c 6070 emit_move_insn (gen_rtx_MEM (SImode, addr), temp1);
96297568 6071 }
9e042f31 6072
6073#ifdef TRANSFER_FROM_TRAMPOLINE
7014838c 6074 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
f0bdd254 6075 0, VOIDmode, 1, tramp, Pmode);
9e042f31 6076#endif
6077
96297568 6078 if (jmpofs >= 0)
6079 emit_insn (gen_imb ());
9e042f31 6080}
6081\f
915c336f 6082/* Determine where to put an argument to a function.
6083 Value is zero to push the argument on the stack,
6084 or a hard register in which to store the argument.
6085
6086 MODE is the argument's machine mode.
6087 TYPE is the data type of the argument (as a tree).
6088 This is null for libcalls where that information may
6089 not be available.
6090 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6091 the preceding args and about the function being called.
6092 NAMED is nonzero if this argument is a named parameter
6093 (otherwise it is an extra parameter matching an ellipsis).
6094
6095 On Alpha the first 6 words of args are normally in registers
6096 and the rest are pushed. */
6097
6098rtx
4e5bf555 6099function_arg (cum, mode, type, named)
915c336f 6100 CUMULATIVE_ARGS cum;
6101 enum machine_mode mode;
6102 tree type;
6103 int named ATTRIBUTE_UNUSED;
6104{
6105 int basereg;
57e47080 6106 int num_args;
915c336f 6107
9caef960 6108 /* Set up defaults for FP operands passed in FP registers, and
6109 integral operands passed in integer registers. */
6110 if (TARGET_FPREGS
6111 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
6112 || GET_MODE_CLASS (mode) == MODE_FLOAT))
6113 basereg = 32 + 16;
6114 else
6115 basereg = 16;
6116
6117 /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
6118 the three platforms, so we can't avoid conditional compilation. */
1467e953 6119#if TARGET_ABI_OPEN_VMS
9caef960 6120 {
6121 if (mode == VOIDmode)
6122 return alpha_arg_info_reg_val (cum);
1467e953 6123
9caef960 6124 num_args = cum.num_args;
6125 if (num_args >= 6 || MUST_PASS_IN_STACK (mode, type))
6126 return NULL_RTX;
6127 }
1467e953 6128#else
9caef960 6129#if TARGET_ABI_UNICOSMK
6130 {
6131 int size;
915c336f 6132
9caef960 6133 /* If this is the last argument, generate the call info word (CIW). */
6134 /* ??? We don't include the caller's line number in the CIW because
6135 I don't know how to determine it if debug infos are turned off. */
6136 if (mode == VOIDmode)
6137 {
6138 int i;
6139 HOST_WIDE_INT lo;
6140 HOST_WIDE_INT hi;
6141 rtx ciw;
6142
6143 lo = 0;
6144
6145 for (i = 0; i < cum.num_reg_words && i < 5; i++)
6146 if (cum.reg_args_type[i])
6147 lo |= (1 << (7 - i));
6148
6149 if (cum.num_reg_words == 6 && cum.reg_args_type[5])
6150 lo |= 7;
6151 else
6152 lo |= cum.num_reg_words;
6153
6154#if HOST_BITS_PER_WIDE_INT == 32
6155 hi = (cum.num_args << 20) | cum.num_arg_words;
6156#else
e162157f 6157 lo = lo | ((HOST_WIDE_INT) cum.num_args << 52)
6158 | ((HOST_WIDE_INT) cum.num_arg_words << 32);
9caef960 6159 hi = 0;
6160#endif
6161 ciw = immed_double_const (lo, hi, DImode);
6162
6163 return gen_rtx_UNSPEC (DImode, gen_rtvec (1, ciw),
6164 UNSPEC_UMK_LOAD_CIW);
6165 }
6166
6167 size = ALPHA_ARG_SIZE (mode, type, named);
6168 num_args = cum.num_reg_words;
6169 if (MUST_PASS_IN_STACK (mode, type)
6170 || cum.num_reg_words + size > 6 || cum.force_stack)
6171 return NULL_RTX;
6172 else if (type && TYPE_MODE (type) == BLKmode)
6173 {
6174 rtx reg1, reg2;
6175
6176 reg1 = gen_rtx_REG (DImode, num_args + 16);
6177 reg1 = gen_rtx_EXPR_LIST (DImode, reg1, const0_rtx);
6178
6179 /* The argument fits in two registers. Note that we still need to
6180 reserve a register for empty structures. */
6181 if (size == 0)
6182 return NULL_RTX;
6183 else if (size == 1)
6184 return gen_rtx_PARALLEL (mode, gen_rtvec (1, reg1));
6185 else
6186 {
6187 reg2 = gen_rtx_REG (DImode, num_args + 17);
6188 reg2 = gen_rtx_EXPR_LIST (DImode, reg2, GEN_INT (8));
6189 return gen_rtx_PARALLEL (mode, gen_rtvec (2, reg1, reg2));
6190 }
6191 }
6192 }
6193#else
6194 {
6195 if (cum >= 6)
6196 return NULL_RTX;
6197 num_args = cum;
6198
6199 /* VOID is passed as a special flag for "last argument". */
6200 if (type == void_type_node)
6201 basereg = 16;
6202 else if (MUST_PASS_IN_STACK (mode, type))
6203 return NULL_RTX;
6204 else if (FUNCTION_ARG_PASS_BY_REFERENCE (cum, mode, type, named))
6205 basereg = 16;
6206 }
6207#endif /* TARGET_ABI_UNICOSMK */
1467e953 6208#endif /* TARGET_ABI_OPEN_VMS */
915c336f 6209
57e47080 6210 return gen_rtx_REG (mode, num_args + basereg);
915c336f 6211}
6212
e7aabeab 6213tree
6214alpha_build_va_list ()
bf2a98b3 6215{
0054fd98 6216 tree base, ofs, record, type_decl;
bf2a98b3 6217
9caef960 6218 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
e7aabeab 6219 return ptr_type_node;
6220
a1f71e15 6221 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
0054fd98 6222 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6223 TREE_CHAIN (record) = type_decl;
6224 TYPE_NAME (record) = type_decl;
6225
e7aabeab 6226 /* C++? SET_IS_AGGR_TYPE (record, 1); */
bf2a98b3 6227
e7aabeab 6228 ofs = build_decl (FIELD_DECL, get_identifier ("__offset"),
6229 integer_type_node);
6230 DECL_FIELD_CONTEXT (ofs) = record;
fc4c89ed 6231
e7aabeab 6232 base = build_decl (FIELD_DECL, get_identifier ("__base"),
6233 ptr_type_node);
6234 DECL_FIELD_CONTEXT (base) = record;
6235 TREE_CHAIN (base) = ofs;
fc4c89ed 6236
e7aabeab 6237 TYPE_FIELDS (record) = base;
6238 layout_type (record);
6239
6240 return record;
6241}
6242
6243void
7df226a2 6244alpha_va_start (valist, nextarg)
e7aabeab 6245 tree valist;
6246 rtx nextarg ATTRIBUTE_UNUSED;
6247{
6248 HOST_WIDE_INT offset;
6249 tree t, offset_field, base_field;
fc4c89ed 6250
80909c64 6251 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
6252 return;
6253
fc264da3 6254 if (TARGET_ABI_UNICOSMK)
7df226a2 6255 std_expand_builtin_va_start (valist, nextarg);
e7aabeab 6256
6257 /* For Unix, SETUP_INCOMING_VARARGS moves the starting address base
6258 up by 48, storing fp arg registers in the first 48 bytes, and the
6259 integer arg registers in the next 48 bytes. This is only done,
6260 however, if any integer registers need to be stored.
6261
6262 If no integer registers need be stored, then we must subtract 48
6263 in order to account for the integer arg registers which are counted
6264 in argsize above, but which are not actually stored on the stack. */
6265
7ccc713a 6266 if (NUM_ARGS <= 6)
fc264da3 6267 offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
8df4a58b 6268 else
e7aabeab 6269 offset = -6 * UNITS_PER_WORD;
6270
fc264da3 6271 if (TARGET_ABI_OPEN_VMS)
6272 {
6273 nextarg = plus_constant (nextarg, offset);
6274 nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
6275 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
6276 make_tree (ptr_type_node, nextarg));
6277 TREE_SIDE_EFFECTS (t) = 1;
e7aabeab 6278
fc264da3 6279 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6280 }
6281 else
6282 {
6283 base_field = TYPE_FIELDS (TREE_TYPE (valist));
6284 offset_field = TREE_CHAIN (base_field);
6285
6286 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6287 valist, base_field);
6288 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6289 valist, offset_field);
6290
6291 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6292 t = build (PLUS_EXPR, ptr_type_node, t, build_int_2 (offset, 0));
6293 t = build (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
6294 TREE_SIDE_EFFECTS (t) = 1;
6295 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6296
6297 t = build_int_2 (NUM_ARGS * UNITS_PER_WORD, 0);
6298 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
6299 TREE_SIDE_EFFECTS (t) = 1;
6300 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6301 }
e7aabeab 6302}
6303
6304rtx
6305alpha_va_arg (valist, type)
6306 tree valist, type;
6307{
e7aabeab 6308 rtx addr;
0514cfdc 6309 tree t, type_size, rounded_size;
e7aabeab 6310 tree offset_field, base_field, addr_tree, addend;
6311 tree wide_type, wide_ofs;
46219bb0 6312 int indirect = 0;
e7aabeab 6313
9caef960 6314 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
e7aabeab 6315 return std_expand_builtin_va_arg (valist, type);
bf2a98b3 6316
0514cfdc 6317 if (type == error_mark_node
6318 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
6319 || TREE_OVERFLOW (type_size))
6320 rounded_size = size_zero_node;
6321 else
6322 rounded_size = fold (build (MULT_EXPR, sizetype,
6323 fold (build (TRUNC_DIV_EXPR, sizetype,
6324 fold (build (PLUS_EXPR, sizetype,
6325 type_size,
6326 size_int (7))),
6327 size_int (8))),
6328 size_int (8)));
d096370e 6329
e7aabeab 6330 base_field = TYPE_FIELDS (TREE_TYPE (valist));
6331 offset_field = TREE_CHAIN (base_field);
d096370e 6332
e7aabeab 6333 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6334 valist, base_field);
6335 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6336 valist, offset_field);
6337
714b4dd3 6338 /* If the type could not be passed in registers, skip the block
6339 reserved for the registers. */
6340 if (MUST_PASS_IN_STACK (TYPE_MODE (type), type))
6341 {
6342 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6343 build (MAX_EXPR, TREE_TYPE (offset_field),
6344 offset_field, build_int_2 (6*8, 0)));
6345 TREE_SIDE_EFFECTS (t) = 1;
6346 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6347 }
6348
e7aabeab 6349 wide_type = make_signed_type (64);
6350 wide_ofs = save_expr (build1 (CONVERT_EXPR, wide_type, offset_field));
6351
6352 addend = wide_ofs;
46219bb0 6353
6354 if (TYPE_MODE (type) == TFmode || TYPE_MODE (type) == TCmode)
6355 {
6356 indirect = 1;
0514cfdc 6357 rounded_size = size_int (UNITS_PER_WORD);
46219bb0 6358 }
6359 else if (FLOAT_TYPE_P (type))
8df4a58b 6360 {
e7aabeab 6361 tree fpaddend, cond;
8df4a58b 6362
e7aabeab 6363 fpaddend = fold (build (PLUS_EXPR, TREE_TYPE (addend),
6364 addend, build_int_2 (-6*8, 0)));
8df4a58b 6365
e7aabeab 6366 cond = fold (build (LT_EXPR, integer_type_node,
6367 wide_ofs, build_int_2 (6*8, 0)));
8df4a58b 6368
e7aabeab 6369 addend = fold (build (COND_EXPR, TREE_TYPE (addend), cond,
6370 fpaddend, addend));
8df4a58b 6371 }
e7aabeab 6372
6373 addr_tree = build (PLUS_EXPR, TREE_TYPE (base_field),
6374 base_field, addend);
6375
6376 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
6377 addr = copy_to_reg (addr);
6378
6379 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6380 build (PLUS_EXPR, TREE_TYPE (offset_field),
0514cfdc 6381 offset_field, rounded_size));
e7aabeab 6382 TREE_SIDE_EFFECTS (t) = 1;
6383 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6384
46219bb0 6385 if (indirect)
6386 {
6387 addr = force_reg (Pmode, addr);
6388 addr = gen_rtx_MEM (Pmode, addr);
6389 }
6390
e7aabeab 6391 return addr;
bf2a98b3 6392}
6393\f
f2cc13dc 6394/* Builtins. */
6395
6396enum alpha_builtin
6397{
6398 ALPHA_BUILTIN_CMPBGE,
ae4cd3a5 6399 ALPHA_BUILTIN_EXTBL,
6400 ALPHA_BUILTIN_EXTWL,
6401 ALPHA_BUILTIN_EXTLL,
f2cc13dc 6402 ALPHA_BUILTIN_EXTQL,
ae4cd3a5 6403 ALPHA_BUILTIN_EXTWH,
6404 ALPHA_BUILTIN_EXTLH,
f2cc13dc 6405 ALPHA_BUILTIN_EXTQH,
ae4cd3a5 6406 ALPHA_BUILTIN_INSBL,
6407 ALPHA_BUILTIN_INSWL,
6408 ALPHA_BUILTIN_INSLL,
6409 ALPHA_BUILTIN_INSQL,
6410 ALPHA_BUILTIN_INSWH,
6411 ALPHA_BUILTIN_INSLH,
6412 ALPHA_BUILTIN_INSQH,
6413 ALPHA_BUILTIN_MSKBL,
6414 ALPHA_BUILTIN_MSKWL,
6415 ALPHA_BUILTIN_MSKLL,
6416 ALPHA_BUILTIN_MSKQL,
6417 ALPHA_BUILTIN_MSKWH,
6418 ALPHA_BUILTIN_MSKLH,
6419 ALPHA_BUILTIN_MSKQH,
6420 ALPHA_BUILTIN_UMULH,
f2cc13dc 6421 ALPHA_BUILTIN_ZAP,
6422 ALPHA_BUILTIN_ZAPNOT,
6423 ALPHA_BUILTIN_AMASK,
6424 ALPHA_BUILTIN_IMPLVER,
6425 ALPHA_BUILTIN_RPCC,
938e069b 6426 ALPHA_BUILTIN_THREAD_POINTER,
6427 ALPHA_BUILTIN_SET_THREAD_POINTER,
f2cc13dc 6428
6429 /* TARGET_MAX */
6430 ALPHA_BUILTIN_MINUB8,
6431 ALPHA_BUILTIN_MINSB8,
6432 ALPHA_BUILTIN_MINUW4,
6433 ALPHA_BUILTIN_MINSW4,
6434 ALPHA_BUILTIN_MAXUB8,
6435 ALPHA_BUILTIN_MAXSB8,
6436 ALPHA_BUILTIN_MAXUW4,
6437 ALPHA_BUILTIN_MAXSW4,
6438 ALPHA_BUILTIN_PERR,
6439 ALPHA_BUILTIN_PKLB,
6440 ALPHA_BUILTIN_PKWB,
6441 ALPHA_BUILTIN_UNPKBL,
6442 ALPHA_BUILTIN_UNPKBW,
6443
ae4cd3a5 6444 /* TARGET_CIX */
6445 ALPHA_BUILTIN_CTTZ,
6446 ALPHA_BUILTIN_CTLZ,
6447 ALPHA_BUILTIN_CTPOP,
6448
f2cc13dc 6449 ALPHA_BUILTIN_max
6450};
6451
ae4cd3a5 6452static unsigned int const code_for_builtin[ALPHA_BUILTIN_max] = {
6453 CODE_FOR_builtin_cmpbge,
6454 CODE_FOR_builtin_extbl,
6455 CODE_FOR_builtin_extwl,
6456 CODE_FOR_builtin_extll,
6457 CODE_FOR_builtin_extql,
6458 CODE_FOR_builtin_extwh,
6459 CODE_FOR_builtin_extlh,
6460 CODE_FOR_builtin_extqh,
6461 CODE_FOR_builtin_insbl,
6462 CODE_FOR_builtin_inswl,
6463 CODE_FOR_builtin_insll,
6464 CODE_FOR_builtin_insql,
6465 CODE_FOR_builtin_inswh,
6466 CODE_FOR_builtin_inslh,
6467 CODE_FOR_builtin_insqh,
6468 CODE_FOR_builtin_mskbl,
6469 CODE_FOR_builtin_mskwl,
6470 CODE_FOR_builtin_mskll,
6471 CODE_FOR_builtin_mskql,
6472 CODE_FOR_builtin_mskwh,
6473 CODE_FOR_builtin_msklh,
6474 CODE_FOR_builtin_mskqh,
6475 CODE_FOR_umuldi3_highpart,
6476 CODE_FOR_builtin_zap,
6477 CODE_FOR_builtin_zapnot,
6478 CODE_FOR_builtin_amask,
6479 CODE_FOR_builtin_implver,
6480 CODE_FOR_builtin_rpcc,
938e069b 6481 CODE_FOR_load_tp,
6482 CODE_FOR_set_tp,
ae4cd3a5 6483
6484 /* TARGET_MAX */
6485 CODE_FOR_builtin_minub8,
6486 CODE_FOR_builtin_minsb8,
6487 CODE_FOR_builtin_minuw4,
6488 CODE_FOR_builtin_minsw4,
6489 CODE_FOR_builtin_maxub8,
6490 CODE_FOR_builtin_maxsb8,
6491 CODE_FOR_builtin_maxuw4,
6492 CODE_FOR_builtin_maxsw4,
6493 CODE_FOR_builtin_perr,
6494 CODE_FOR_builtin_pklb,
6495 CODE_FOR_builtin_pkwb,
6496 CODE_FOR_builtin_unpkbl,
6497 CODE_FOR_builtin_unpkbw,
6498
6499 /* TARGET_CIX */
6500 CODE_FOR_builtin_cttz,
6501 CODE_FOR_builtin_ctlz,
6502 CODE_FOR_builtin_ctpop
6503};
6504
f2cc13dc 6505struct alpha_builtin_def
6506{
6507 const char *name;
6508 enum alpha_builtin code;
6509 unsigned int target_mask;
6510};
6511
6512static struct alpha_builtin_def const zero_arg_builtins[] = {
6513 { "__builtin_alpha_implver", ALPHA_BUILTIN_IMPLVER, 0 },
6514 { "__builtin_alpha_rpcc", ALPHA_BUILTIN_RPCC, 0 }
6515};
6516
6517static struct alpha_builtin_def const one_arg_builtins[] = {
6518 { "__builtin_alpha_amask", ALPHA_BUILTIN_AMASK, 0 },
6519 { "__builtin_alpha_pklb", ALPHA_BUILTIN_PKLB, MASK_MAX },
6520 { "__builtin_alpha_pkwb", ALPHA_BUILTIN_PKWB, MASK_MAX },
6521 { "__builtin_alpha_unpkbl", ALPHA_BUILTIN_UNPKBL, MASK_MAX },
ae4cd3a5 6522 { "__builtin_alpha_unpkbw", ALPHA_BUILTIN_UNPKBW, MASK_MAX },
6523 { "__builtin_alpha_cttz", ALPHA_BUILTIN_CTTZ, MASK_CIX },
6524 { "__builtin_alpha_ctlz", ALPHA_BUILTIN_CTLZ, MASK_CIX },
6525 { "__builtin_alpha_ctpop", ALPHA_BUILTIN_CTPOP, MASK_CIX }
f2cc13dc 6526};
6527
6528static struct alpha_builtin_def const two_arg_builtins[] = {
6529 { "__builtin_alpha_cmpbge", ALPHA_BUILTIN_CMPBGE, 0 },
ae4cd3a5 6530 { "__builtin_alpha_extbl", ALPHA_BUILTIN_EXTBL, 0 },
6531 { "__builtin_alpha_extwl", ALPHA_BUILTIN_EXTWL, 0 },
6532 { "__builtin_alpha_extll", ALPHA_BUILTIN_EXTLL, 0 },
f2cc13dc 6533 { "__builtin_alpha_extql", ALPHA_BUILTIN_EXTQL, 0 },
ae4cd3a5 6534 { "__builtin_alpha_extwh", ALPHA_BUILTIN_EXTWH, 0 },
6535 { "__builtin_alpha_extlh", ALPHA_BUILTIN_EXTLH, 0 },
f2cc13dc 6536 { "__builtin_alpha_extqh", ALPHA_BUILTIN_EXTQH, 0 },
ae4cd3a5 6537 { "__builtin_alpha_insbl", ALPHA_BUILTIN_INSBL, 0 },
6538 { "__builtin_alpha_inswl", ALPHA_BUILTIN_INSWL, 0 },
6539 { "__builtin_alpha_insll", ALPHA_BUILTIN_INSLL, 0 },
6540 { "__builtin_alpha_insql", ALPHA_BUILTIN_INSQL, 0 },
6541 { "__builtin_alpha_inswh", ALPHA_BUILTIN_INSWH, 0 },
6542 { "__builtin_alpha_inslh", ALPHA_BUILTIN_INSLH, 0 },
6543 { "__builtin_alpha_insqh", ALPHA_BUILTIN_INSQH, 0 },
6544 { "__builtin_alpha_mskbl", ALPHA_BUILTIN_MSKBL, 0 },
6545 { "__builtin_alpha_mskwl", ALPHA_BUILTIN_MSKWL, 0 },
6546 { "__builtin_alpha_mskll", ALPHA_BUILTIN_MSKLL, 0 },
6547 { "__builtin_alpha_mskql", ALPHA_BUILTIN_MSKQL, 0 },
6548 { "__builtin_alpha_mskwh", ALPHA_BUILTIN_MSKWH, 0 },
6549 { "__builtin_alpha_msklh", ALPHA_BUILTIN_MSKLH, 0 },
6550 { "__builtin_alpha_mskqh", ALPHA_BUILTIN_MSKQH, 0 },
6551 { "__builtin_alpha_umulh", ALPHA_BUILTIN_UMULH, 0 },
f2cc13dc 6552 { "__builtin_alpha_zap", ALPHA_BUILTIN_ZAP, 0 },
6553 { "__builtin_alpha_zapnot", ALPHA_BUILTIN_ZAPNOT, 0 },
6554 { "__builtin_alpha_minub8", ALPHA_BUILTIN_MINUB8, MASK_MAX },
6555 { "__builtin_alpha_minsb8", ALPHA_BUILTIN_MINSB8, MASK_MAX },
6556 { "__builtin_alpha_minuw4", ALPHA_BUILTIN_MINUW4, MASK_MAX },
6557 { "__builtin_alpha_minsw4", ALPHA_BUILTIN_MINSW4, MASK_MAX },
6558 { "__builtin_alpha_maxub8", ALPHA_BUILTIN_MAXUB8, MASK_MAX },
6559 { "__builtin_alpha_maxsb8", ALPHA_BUILTIN_MAXSB8, MASK_MAX },
6560 { "__builtin_alpha_maxuw4", ALPHA_BUILTIN_MAXUW4, MASK_MAX },
6561 { "__builtin_alpha_maxsw4", ALPHA_BUILTIN_MAXSW4, MASK_MAX },
6562 { "__builtin_alpha_perr", ALPHA_BUILTIN_PERR, MASK_MAX }
6563};
6564
6565static void
6566alpha_init_builtins ()
6567{
6568 const struct alpha_builtin_def *p;
6569 tree ftype;
6570 size_t i;
6571
6572 ftype = build_function_type (long_integer_type_node, void_list_node);
6573
6574 p = zero_arg_builtins;
6575 for (i = 0; i < ARRAY_SIZE (zero_arg_builtins); ++i, ++p)
6576 if ((target_flags & p->target_mask) == p->target_mask)
a06abcfb 6577 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6578 NULL, NULL_TREE);
f2cc13dc 6579
8b55c4ba 6580 ftype = build_function_type_list (long_integer_type_node,
6581 long_integer_type_node, NULL_TREE);
f2cc13dc 6582
6583 p = one_arg_builtins;
6584 for (i = 0; i < ARRAY_SIZE (one_arg_builtins); ++i, ++p)
6585 if ((target_flags & p->target_mask) == p->target_mask)
a06abcfb 6586 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6587 NULL, NULL_TREE);
f2cc13dc 6588
8b55c4ba 6589 ftype = build_function_type_list (long_integer_type_node,
6590 long_integer_type_node,
6591 long_integer_type_node, NULL_TREE);
f2cc13dc 6592
6593 p = two_arg_builtins;
6594 for (i = 0; i < ARRAY_SIZE (two_arg_builtins); ++i, ++p)
6595 if ((target_flags & p->target_mask) == p->target_mask)
a06abcfb 6596 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6597 NULL, NULL_TREE);
938e069b 6598
6599 ftype = build_function_type (ptr_type_node, void_list_node);
6600 builtin_function ("__builtin_thread_pointer", ftype,
a06abcfb 6601 ALPHA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6602 NULL, NULL_TREE);
938e069b 6603
8b55c4ba 6604 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
938e069b 6605 builtin_function ("__builtin_set_thread_pointer", ftype,
a06abcfb 6606 ALPHA_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6607 NULL, NULL_TREE);
f2cc13dc 6608}
6609
6610/* Expand an expression EXP that calls a built-in function,
6611 with result going to TARGET if that's convenient
6612 (and in mode MODE if that's convenient).
6613 SUBTARGET may be used as the target for computing one of EXP's operands.
6614 IGNORE is nonzero if the value is to be ignored. */
6615
6616static rtx
6617alpha_expand_builtin (exp, target, subtarget, mode, ignore)
6618 tree exp;
6619 rtx target;
6620 rtx subtarget ATTRIBUTE_UNUSED;
6621 enum machine_mode mode ATTRIBUTE_UNUSED;
6622 int ignore ATTRIBUTE_UNUSED;
6623{
f2cc13dc 6624#define MAX_ARGS 2
6625
6626 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6627 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6628 tree arglist = TREE_OPERAND (exp, 1);
6629 enum insn_code icode;
6630 rtx op[MAX_ARGS], pat;
6631 int arity;
938e069b 6632 bool nonvoid;
f2cc13dc 6633
6634 if (fcode >= ALPHA_BUILTIN_max)
6635 internal_error ("bad builtin fcode");
6636 icode = code_for_builtin[fcode];
6637 if (icode == 0)
6638 internal_error ("bad builtin fcode");
6639
938e069b 6640 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6641
f2cc13dc 6642 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6643 arglist;
6644 arglist = TREE_CHAIN (arglist), arity++)
6645 {
6646 const struct insn_operand_data *insn_op;
6647
6648 tree arg = TREE_VALUE (arglist);
6649 if (arg == error_mark_node)
6650 return NULL_RTX;
6651 if (arity > MAX_ARGS)
6652 return NULL_RTX;
6653
938e069b 6654 insn_op = &insn_data[icode].operand[arity + nonvoid];
6655
6656 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
f2cc13dc 6657
f2cc13dc 6658 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6659 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6660 }
6661
938e069b 6662 if (nonvoid)
6663 {
6664 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6665 if (!target
6666 || GET_MODE (target) != tmode
6667 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6668 target = gen_reg_rtx (tmode);
6669 }
f2cc13dc 6670
6671 switch (arity)
6672 {
6673 case 0:
6674 pat = GEN_FCN (icode) (target);
6675 break;
6676 case 1:
938e069b 6677 if (nonvoid)
6678 pat = GEN_FCN (icode) (target, op[0]);
6679 else
6680 pat = GEN_FCN (icode) (op[0]);
f2cc13dc 6681 break;
6682 case 2:
6683 pat = GEN_FCN (icode) (target, op[0], op[1]);
6684 break;
6685 default:
6686 abort ();
6687 }
6688 if (!pat)
6689 return NULL_RTX;
6690 emit_insn (pat);
6691
938e069b 6692 if (nonvoid)
6693 return target;
6694 else
6695 return const0_rtx;
f2cc13dc 6696}
6697\f
bf2a98b3 6698/* This page contains routines that are used to determine what the function
6699 prologue and epilogue code will do and write them out. */
6700
6701/* Compute the size of the save area in the stack. */
6702
8df4a58b 6703/* These variables are used for communication between the following functions.
6704 They indicate various things about the current function being compiled
6705 that are used to tell what kind of prologue, epilogue and procedure
65abff06 6706 descriptior to generate. */
8df4a58b 6707
6708/* Nonzero if we need a stack procedure. */
b19d7ab1 6709enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
6710static enum alpha_procedure_types alpha_procedure_type;
8df4a58b 6711
6712/* Register number (either FP or SP) that is used to unwind the frame. */
b9a5aa8e 6713static int vms_unwind_regno;
8df4a58b 6714
6715/* Register number used to save FP. We need not have one for RA since
6716 we don't modify it for register procedures. This is only defined
6717 for register frame procedures. */
b9a5aa8e 6718static int vms_save_fp_regno;
8df4a58b 6719
6720/* Register number used to reference objects off our PV. */
b9a5aa8e 6721static int vms_base_regno;
8df4a58b 6722
2cf1388a 6723/* Compute register masks for saved registers. */
8df4a58b 6724
6725static void
6726alpha_sa_mask (imaskP, fmaskP)
6727 unsigned long *imaskP;
6728 unsigned long *fmaskP;
6729{
6730 unsigned long imask = 0;
6731 unsigned long fmask = 0;
1f0ce6a6 6732 unsigned int i;
8df4a58b 6733
961d6ddd 6734 /* Irritatingly, there are two kinds of thunks -- those created with
e7f5e241 6735 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
6736 through the regular part of the compiler. In the
6737 TARGET_ASM_OUTPUT_MI_THUNK case we don't have valid register life
6738 info, but assemble_start_function wants to output .frame and
6739 .mask directives. */
d2422fc2 6740 if (current_function_is_thunk && !no_new_pseudos)
2cf1388a 6741 {
961d6ddd 6742 *imaskP = 0;
6743 *fmaskP = 0;
6744 return;
6745 }
8df4a58b 6746
b19d7ab1 6747 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
961d6ddd 6748 imask |= (1L << HARD_FRAME_POINTER_REGNUM);
8df4a58b 6749
961d6ddd 6750 /* One for every register we have to save. */
6751 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6752 if (! fixed_regs[i] && ! call_used_regs[i]
6753 && regs_ever_live[i] && i != REG_RA
6754 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
6755 {
6756 if (i < 32)
6757 imask |= (1L << i);
6758 else
6759 fmask |= (1L << (i - 32));
6760 }
6761
6762 /* We need to restore these for the handler. */
6763 if (current_function_calls_eh_return)
6764 for (i = 0; ; ++i)
6765 {
6766 unsigned regno = EH_RETURN_DATA_REGNO (i);
6767 if (regno == INVALID_REGNUM)
6768 break;
6769 imask |= 1L << regno;
6770 }
9caef960 6771
961d6ddd 6772 /* If any register spilled, then spill the return address also. */
6773 /* ??? This is required by the Digital stack unwind specification
6774 and isn't needed if we're doing Dwarf2 unwinding. */
6775 if (imask || fmask || alpha_ra_ever_killed ())
6776 imask |= (1L << REG_RA);
b9a5aa8e 6777
8df4a58b 6778 *imaskP = imask;
6779 *fmaskP = fmask;
8df4a58b 6780}
6781
6782int
6783alpha_sa_size ()
6784{
5aae9d06 6785 unsigned long mask[2];
8df4a58b 6786 int sa_size = 0;
5aae9d06 6787 int i, j;
8df4a58b 6788
5aae9d06 6789 alpha_sa_mask (&mask[0], &mask[1]);
6790
6791 if (TARGET_ABI_UNICOSMK)
6792 {
6793 if (mask[0] || mask[1])
6794 sa_size = 14;
6795 }
2cf1388a 6796 else
2cf1388a 6797 {
5aae9d06 6798 for (j = 0; j < 2; ++j)
6799 for (i = 0; i < 32; ++i)
6800 if ((mask[j] >> i) & 1)
6801 sa_size++;
2cf1388a 6802 }
8df4a58b 6803
9caef960 6804 if (TARGET_ABI_UNICOSMK)
6805 {
6806 /* We might not need to generate a frame if we don't make any calls
6807 (including calls to __T3E_MISMATCH if this is a vararg function),
6808 don't have any local variables which require stack slots, don't
6809 use alloca and have not determined that we need a frame for other
6810 reasons. */
6811
b19d7ab1 6812 alpha_procedure_type
6813 = (sa_size || get_frame_size() != 0
7ccc713a 6814 || current_function_outgoing_args_size
b19d7ab1 6815 || current_function_stdarg || current_function_calls_alloca
6816 || frame_pointer_needed)
6817 ? PT_STACK : PT_REGISTER;
9caef960 6818
6819 /* Always reserve space for saving callee-saved registers if we
6820 need a frame as required by the calling convention. */
b19d7ab1 6821 if (alpha_procedure_type == PT_STACK)
9caef960 6822 sa_size = 14;
6823 }
6824 else if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 6825 {
6826 /* Start by assuming we can use a register procedure if we don't
6827 make any calls (REG_RA not used) or need to save any
6828 registers and a stack procedure if we do. */
b19d7ab1 6829 if ((mask[0] >> REG_RA) & 1)
6830 alpha_procedure_type = PT_STACK;
6831 else if (get_frame_size() != 0)
6832 alpha_procedure_type = PT_REGISTER;
6833 else
6834 alpha_procedure_type = PT_NULL;
5aae9d06 6835
2ab60bb1 6836 /* Don't reserve space for saving FP & RA yet. Do that later after we've
5aae9d06 6837 made the final decision on stack procedure vs register procedure. */
b19d7ab1 6838 if (alpha_procedure_type == PT_STACK)
2ab60bb1 6839 sa_size -= 2;
b9a5aa8e 6840
6841 /* Decide whether to refer to objects off our PV via FP or PV.
6842 If we need FP for something else or if we receive a nonlocal
6843 goto (which expects PV to contain the value), we must use PV.
6844 Otherwise, start by assuming we can use FP. */
b19d7ab1 6845
6846 vms_base_regno
6847 = (frame_pointer_needed
6848 || current_function_has_nonlocal_label
6849 || alpha_procedure_type == PT_STACK
6850 || current_function_outgoing_args_size)
6851 ? REG_PV : HARD_FRAME_POINTER_REGNUM;
b9a5aa8e 6852
6853 /* If we want to copy PV into FP, we need to find some register
6854 in which to save FP. */
6855
6856 vms_save_fp_regno = -1;
6857 if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
6858 for (i = 0; i < 32; i++)
6859 if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
6860 vms_save_fp_regno = i;
6861
b19d7ab1 6862 if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
6863 vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
6864 else if (alpha_procedure_type == PT_NULL)
6865 vms_base_regno = REG_PV;
b9a5aa8e 6866
6867 /* Stack unwinding should be done via FP unless we use it for PV. */
6868 vms_unwind_regno = (vms_base_regno == REG_PV
6869 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
6870
6871 /* If this is a stack procedure, allow space for saving FP and RA. */
b19d7ab1 6872 if (alpha_procedure_type == PT_STACK)
b9a5aa8e 6873 sa_size += 2;
6874 }
6875 else
6876 {
b9a5aa8e 6877 /* Our size must be even (multiple of 16 bytes). */
6878 if (sa_size & 1)
6879 sa_size++;
6880 }
8df4a58b 6881
6882 return sa_size * 8;
6883}
6884
6885int
6886alpha_pv_save_size ()
6887{
6888 alpha_sa_size ();
b19d7ab1 6889 return alpha_procedure_type == PT_STACK ? 8 : 0;
8df4a58b 6890}
6891
6892int
6893alpha_using_fp ()
6894{
6895 alpha_sa_size ();
b9a5aa8e 6896 return vms_unwind_regno == HARD_FRAME_POINTER_REGNUM;
8df4a58b 6897}
6898
1467e953 6899#if TARGET_ABI_OPEN_VMS
2d280039 6900
e3c541f0 6901const struct attribute_spec vms_attribute_table[] =
bf2a98b3 6902{
e3c541f0 6903 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
c64a8830 6904 { "overlaid", 0, 0, true, false, false, NULL },
6905 { "global", 0, 0, true, false, false, NULL },
6906 { "initialize", 0, 0, true, false, false, NULL },
6907 { NULL, 0, 0, false, false, false, NULL }
e3c541f0 6908};
bf2a98b3 6909
2d280039 6910#endif
6911
1f0ce6a6 6912static int
6913find_lo_sum (px, data)
6914 rtx *px;
6915 void *data ATTRIBUTE_UNUSED;
6916{
6917 return GET_CODE (*px) == LO_SUM;
6918}
6919
b9a5aa8e 6920static int
6921alpha_does_function_need_gp ()
6922{
6923 rtx insn;
bf2a98b3 6924
9caef960 6925 /* The GP being variable is an OSF abi thing. */
6926 if (! TARGET_ABI_OSF)
b9a5aa8e 6927 return 0;
bf2a98b3 6928
7811c823 6929 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
b9a5aa8e 6930 return 1;
0e0a0e7a 6931
2cf1388a 6932 if (current_function_is_thunk)
6933 return 1;
2cf1388a 6934
b9a5aa8e 6935 /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
6936 Even if we are a static function, we still need to do this in case
6937 our address is taken and passed to something like qsort. */
bf2a98b3 6938
b9a5aa8e 6939 push_topmost_sequence ();
6940 insn = get_insns ();
6941 pop_topmost_sequence ();
8df4a58b 6942
b9a5aa8e 6943 for (; insn; insn = NEXT_INSN (insn))
9204e736 6944 if (INSN_P (insn)
b9a5aa8e 6945 && GET_CODE (PATTERN (insn)) != USE
6946 && GET_CODE (PATTERN (insn)) != CLOBBER)
6947 {
6948 enum attr_type type = get_attr_type (insn);
6949 if (type == TYPE_LDSYM || type == TYPE_JSR)
6950 return 1;
1f0ce6a6 6951 if (TARGET_EXPLICIT_RELOCS
6952 && for_each_rtx (&PATTERN (insn), find_lo_sum, NULL) > 0)
6953 return 1;
b9a5aa8e 6954 }
bf2a98b3 6955
b9a5aa8e 6956 return 0;
bf2a98b3 6957}
6958
16b3392b 6959/* Write a version stamp. Don't write anything if we are running as a
6960 cross-compiler. Otherwise, use the versions in /usr/include/stamp.h. */
6961
60ac811a 6962#ifdef HAVE_STAMP_H
16b3392b 6963#include <stamp.h>
6964#endif
6965
6966void
6967alpha_write_verstamp (file)
9c4fbff9 6968 FILE *file ATTRIBUTE_UNUSED;
16b3392b 6969{
6970#ifdef MS_STAMP
17665531 6971 fprintf (file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
16b3392b 6972#endif
6973}
7d73bc2a 6974\f
5a965225 6975/* Helper function to set RTX_FRAME_RELATED_P on instructions, including
6976 sequences. */
6977
6978static rtx
6979set_frame_related_p ()
6980{
31d3e01c 6981 rtx seq = get_insns ();
6982 rtx insn;
6983
5a965225 6984 end_sequence ();
6985
31d3e01c 6986 if (!seq)
6987 return NULL_RTX;
6988
6989 if (INSN_P (seq))
5a965225 6990 {
31d3e01c 6991 insn = seq;
6992 while (insn != NULL_RTX)
6993 {
6994 RTX_FRAME_RELATED_P (insn) = 1;
6995 insn = NEXT_INSN (insn);
6996 }
6997 seq = emit_insn (seq);
5a965225 6998 }
6999 else
7000 {
7001 seq = emit_insn (seq);
7002 RTX_FRAME_RELATED_P (seq) = 1;
5a965225 7003 }
31d3e01c 7004 return seq;
5a965225 7005}
7006
7007#define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
7008
bf2a98b3 7009/* Write function prologue. */
7010
8df4a58b 7011/* On vms we have two kinds of functions:
7012
7013 - stack frame (PROC_STACK)
7014 these are 'normal' functions with local vars and which are
7015 calling other functions
7016 - register frame (PROC_REGISTER)
7017 keeps all data in registers, needs no stack
7018
7019 We must pass this to the assembler so it can generate the
7020 proper pdsc (procedure descriptor)
7021 This is done with the '.pdesc' command.
7022
b9a5aa8e 7023 On not-vms, we don't really differentiate between the two, as we can
7024 simply allocate stack without saving registers. */
8df4a58b 7025
7026void
b9a5aa8e 7027alpha_expand_prologue ()
8df4a58b 7028{
b9a5aa8e 7029 /* Registers to save. */
8df4a58b 7030 unsigned long imask = 0;
7031 unsigned long fmask = 0;
7032 /* Stack space needed for pushing registers clobbered by us. */
7033 HOST_WIDE_INT sa_size;
7034 /* Complete stack size needed. */
7035 HOST_WIDE_INT frame_size;
7036 /* Offset from base reg to register save area. */
b9a5aa8e 7037 HOST_WIDE_INT reg_offset;
849674a3 7038 rtx sa_reg, mem;
8df4a58b 7039 int i;
7040
7041 sa_size = alpha_sa_size ();
8df4a58b 7042
b9a5aa8e 7043 frame_size = get_frame_size ();
1467e953 7044 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7045 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7046 + (alpha_procedure_type == PT_STACK ? 8 : 0)
b9a5aa8e 7047 + frame_size
7048 + current_function_pretend_args_size);
9caef960 7049 else if (TARGET_ABI_UNICOSMK)
7050 /* We have to allocate space for the DSIB if we generate a frame. */
7051 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7052 + (alpha_procedure_type == PT_STACK ? 48 : 0))
9caef960 7053 + ALPHA_ROUND (frame_size
7054 + current_function_outgoing_args_size);
b9a5aa8e 7055 else
7056 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7057 + sa_size
7058 + ALPHA_ROUND (frame_size
7059 + current_function_pretend_args_size));
8df4a58b 7060
1467e953 7061 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7062 reg_offset = 8;
7063 else
7064 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
8df4a58b 7065
b9a5aa8e 7066 alpha_sa_mask (&imask, &fmask);
8df4a58b 7067
a314eb5e 7068 /* Emit an insn to reload GP, if needed. */
1467e953 7069 if (TARGET_ABI_OSF)
a314eb5e 7070 {
7071 alpha_function_needs_gp = alpha_does_function_need_gp ();
7072 if (alpha_function_needs_gp)
7073 emit_insn (gen_prologue_ldgp ());
7074 }
7075
30dceb30 7076 /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
7077 the call to mcount ourselves, rather than having the linker do it
7078 magically in response to -pg. Since _mcount has special linkage,
7079 don't represent the call as a call. */
7811c823 7080 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
30dceb30 7081 emit_insn (gen_prologue_mcount ());
9caef960 7082
7083 if (TARGET_ABI_UNICOSMK)
7084 unicosmk_gen_dsib (&imask);
7085
8df4a58b 7086 /* Adjust the stack by the frame size. If the frame size is > 4096
7087 bytes, we need to be sure we probe somewhere in the first and last
7088 4096 bytes (we can probably get away without the latter test) and
7089 every 8192 bytes in between. If the frame size is > 32768, we
7090 do this in a loop. Otherwise, we generate the explicit probe
7091 instructions.
7092
7093 Note that we are only allowed to adjust sp once in the prologue. */
7094
b9a5aa8e 7095 if (frame_size <= 32768)
8df4a58b 7096 {
7097 if (frame_size > 4096)
7098 {
7099 int probed = 4096;
7100
b9a5aa8e 7101 do
9caef960 7102 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
7103 ? -probed + 64
7104 : -probed)));
b9a5aa8e 7105 while ((probed += 8192) < frame_size);
8df4a58b 7106
7107 /* We only have to do this probe if we aren't saving registers. */
7108 if (sa_size == 0 && probed + 4096 < frame_size)
b9a5aa8e 7109 emit_insn (gen_probe_stack (GEN_INT (-frame_size)));
8df4a58b 7110 }
7111
7112 if (frame_size != 0)
205b281f 7113 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
9caef960 7114 GEN_INT (TARGET_ABI_UNICOSMK
7115 ? -frame_size + 64
7116 : -frame_size))));
8df4a58b 7117 }
7118 else
7119 {
b9a5aa8e 7120 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
8df4a58b 7121 number of 8192 byte blocks to probe. We then probe each block
7122 in the loop and then set SP to the proper location. If the
7123 amount remaining is > 4096, we have to do one more probe if we
7124 are not saving any registers. */
7125
7126 HOST_WIDE_INT blocks = (frame_size + 4096) / 8192;
7127 HOST_WIDE_INT leftover = frame_size + 4096 - blocks * 8192;
b9a5aa8e 7128 rtx ptr = gen_rtx_REG (DImode, 22);
7129 rtx count = gen_rtx_REG (DImode, 23);
cd28cb76 7130 rtx seq;
8df4a58b 7131
b9a5aa8e 7132 emit_move_insn (count, GEN_INT (blocks));
9caef960 7133 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
7134 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
8df4a58b 7135
b9a5aa8e 7136 /* Because of the difficulty in emitting a new basic block this
7137 late in the compilation, generate the loop as a single insn. */
7138 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
8df4a58b 7139
7140 if (leftover > 4096 && sa_size == 0)
b9a5aa8e 7141 {
7142 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
7143 MEM_VOLATILE_P (last) = 1;
7144 emit_move_insn (last, const0_rtx);
7145 }
8df4a58b 7146
1467e953 7147 if (TARGET_ABI_WINDOWS_NT)
f88f2646 7148 {
7149 /* For NT stack unwind (done by 'reverse execution'), it's
7150 not OK to take the result of a loop, even though the value
7151 is already in ptr, so we reload it via a single operation
cd28cb76 7152 and subtract it to sp.
7153
7154 Yes, that's correct -- we have to reload the whole constant
7155 into a temporary via ldah+lda then subtract from sp. To
7156 ensure we get ldah+lda, we use a special pattern. */
f88f2646 7157
7158 HOST_WIDE_INT lo, hi;
05bea6dd 7159 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7160 hi = frame_size - lo;
5a965225 7161
cd28cb76 7162 emit_move_insn (ptr, GEN_INT (hi));
7163 emit_insn (gen_nt_lda (ptr, GEN_INT (lo)));
7164 seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
7165 ptr));
f88f2646 7166 }
7167 else
7168 {
f88f2646 7169 seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
7170 GEN_INT (-leftover)));
f88f2646 7171 }
cd28cb76 7172
7173 /* This alternative is special, because the DWARF code cannot
7174 possibly intuit through the loop above. So we invent this
7175 note it looks at instead. */
7176 RTX_FRAME_RELATED_P (seq) = 1;
7177 REG_NOTES (seq)
7178 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7179 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7180 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9caef960 7181 GEN_INT (TARGET_ABI_UNICOSMK
7182 ? -frame_size + 64
7183 : -frame_size))),
cd28cb76 7184 REG_NOTES (seq));
8df4a58b 7185 }
7186
9caef960 7187 if (!TARGET_ABI_UNICOSMK)
8df4a58b 7188 {
9caef960 7189 /* Cope with very large offsets to the register save area. */
7190 sa_reg = stack_pointer_rtx;
7191 if (reg_offset + sa_size > 0x8000)
7192 {
7193 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7194 HOST_WIDE_INT bias;
8df4a58b 7195
9caef960 7196 if (low + sa_size <= 0x8000)
7197 bias = reg_offset - low, reg_offset = low;
7198 else
7199 bias = reg_offset, reg_offset = 0;
8df4a58b 7200
9caef960 7201 sa_reg = gen_rtx_REG (DImode, 24);
7202 FRP (emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx,
7203 GEN_INT (bias))));
7204 }
b9a5aa8e 7205
9caef960 7206 /* Save regs in stack order. Beginning with VMS PV. */
b19d7ab1 7207 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
9caef960 7208 {
7209 mem = gen_rtx_MEM (DImode, stack_pointer_rtx);
7210 set_mem_alias_set (mem, alpha_sr_alias_set);
7211 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_PV)));
7212 }
8df4a58b 7213
9caef960 7214 /* Save register RA next. */
7215 if (imask & (1L << REG_RA))
7216 {
7217 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7218 set_mem_alias_set (mem, alpha_sr_alias_set);
7219 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
7220 imask &= ~(1L << REG_RA);
7221 reg_offset += 8;
7222 }
8df4a58b 7223
9caef960 7224 /* Now save any other registers required to be saved. */
7225 for (i = 0; i < 32; i++)
7226 if (imask & (1L << i))
7227 {
7228 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7229 set_mem_alias_set (mem, alpha_sr_alias_set);
7230 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7231 reg_offset += 8;
7232 }
8df4a58b 7233
9caef960 7234 for (i = 0; i < 32; i++)
7235 if (fmask & (1L << i))
7236 {
7237 mem = gen_rtx_MEM (DFmode, plus_constant (sa_reg, reg_offset));
7238 set_mem_alias_set (mem, alpha_sr_alias_set);
7239 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7240 reg_offset += 8;
7241 }
7242 }
b19d7ab1 7243 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
9caef960 7244 {
7245 /* The standard frame on the T3E includes space for saving registers.
7246 We just have to use it. We don't have to save the return address and
7247 the old frame pointer here - they are saved in the DSIB. */
7248
7249 reg_offset = -56;
7250 for (i = 9; i < 15; i++)
7251 if (imask & (1L << i))
7252 {
7253 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7254 reg_offset));
7255 set_mem_alias_set (mem, alpha_sr_alias_set);
7256 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7257 reg_offset -= 8;
7258 }
7259 for (i = 2; i < 10; i++)
7260 if (fmask & (1L << i))
7261 {
7262 mem = gen_rtx_MEM (DFmode, plus_constant (hard_frame_pointer_rtx,
7263 reg_offset));
7264 set_mem_alias_set (mem, alpha_sr_alias_set);
7265 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7266 reg_offset -= 8;
7267 }
7268 }
8df4a58b 7269
1467e953 7270 if (TARGET_ABI_OPEN_VMS)
8df4a58b 7271 {
b19d7ab1 7272 if (alpha_procedure_type == PT_REGISTER)
7273 /* Register frame procedures save the fp.
7274 ?? Ought to have a dwarf2 save for this. */
6d50e356 7275 emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
7276 hard_frame_pointer_rtx);
8df4a58b 7277
b19d7ab1 7278 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
6d50e356 7279 emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
7280 gen_rtx_REG (DImode, REG_PV)));
8df4a58b 7281
b19d7ab1 7282 if (alpha_procedure_type != PT_NULL
7283 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
205b281f 7284 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
8df4a58b 7285
b9a5aa8e 7286 /* If we have to allocate space for outgoing args, do it now. */
7287 if (current_function_outgoing_args_size != 0)
205b281f 7288 FRP (emit_move_insn
7289 (stack_pointer_rtx,
7290 plus_constant (hard_frame_pointer_rtx,
7291 - (ALPHA_ROUND
7292 (current_function_outgoing_args_size)))));
b9a5aa8e 7293 }
9caef960 7294 else if (!TARGET_ABI_UNICOSMK)
b9a5aa8e 7295 {
7296 /* If we need a frame pointer, set it from the stack pointer. */
7297 if (frame_pointer_needed)
7298 {
7299 if (TARGET_CAN_FAULT_IN_PROLOGUE)
5a965225 7300 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
8df4a58b 7301 else
205b281f 7302 /* This must always be the last instruction in the
7303 prologue, thus we emit a special move + clobber. */
5a965225 7304 FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
7305 stack_pointer_rtx, sa_reg)));
8df4a58b 7306 }
8df4a58b 7307 }
7308
b9a5aa8e 7309 /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
7310 the prologue, for exception handling reasons, we cannot do this for
7311 any insn that might fault. We could prevent this for mems with a
7312 (clobber:BLK (scratch)), but this doesn't work for fp insns. So we
7313 have to prevent all such scheduling with a blockage.
8df4a58b 7314
b9a5aa8e 7315 Linux, on the other hand, never bothered to implement OSF/1's
7316 exception handling, and so doesn't care about such things. Anyone
7317 planning to use dwarf2 frame-unwind info can also omit the blockage. */
8df4a58b 7318
b9a5aa8e 7319 if (! TARGET_CAN_FAULT_IN_PROLOGUE)
7320 emit_insn (gen_blockage ());
1fce2e8a 7321}
7322
2cf1388a 7323/* Output the textual info surrounding the prologue. */
8df4a58b 7324
b9a5aa8e 7325void
2cf1388a 7326alpha_start_function (file, fnname, decl)
b9a5aa8e 7327 FILE *file;
ef241053 7328 const char *fnname;
769ea120 7329 tree decl ATTRIBUTE_UNUSED;
0c0464e6 7330{
b9a5aa8e 7331 unsigned long imask = 0;
7332 unsigned long fmask = 0;
7333 /* Stack space needed for pushing registers clobbered by us. */
7334 HOST_WIDE_INT sa_size;
7335 /* Complete stack size needed. */
7336 HOST_WIDE_INT frame_size;
7337 /* Offset from base reg to register save area. */
7338 HOST_WIDE_INT reg_offset;
2cf1388a 7339 char *entry_label = (char *) alloca (strlen (fnname) + 6);
b9a5aa8e 7340 int i;
0c0464e6 7341
9caef960 7342 /* Don't emit an extern directive for functions defined in the same file. */
7343 if (TARGET_ABI_UNICOSMK)
7344 {
7345 tree name_tree;
7346 name_tree = get_identifier (fnname);
7347 TREE_ASM_WRITTEN (name_tree) = 1;
7348 }
7349
a314eb5e 7350 alpha_fnname = fnname;
b9a5aa8e 7351 sa_size = alpha_sa_size ();
0c0464e6 7352
b9a5aa8e 7353 frame_size = get_frame_size ();
1467e953 7354 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7355 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7356 + (alpha_procedure_type == PT_STACK ? 8 : 0)
b9a5aa8e 7357 + frame_size
7358 + current_function_pretend_args_size);
9caef960 7359 else if (TARGET_ABI_UNICOSMK)
7360 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7361 + (alpha_procedure_type == PT_STACK ? 48 : 0))
9caef960 7362 + ALPHA_ROUND (frame_size
7363 + current_function_outgoing_args_size);
b9a5aa8e 7364 else
7365 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7366 + sa_size
7367 + ALPHA_ROUND (frame_size
7368 + current_function_pretend_args_size));
0c0464e6 7369
1467e953 7370 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7371 reg_offset = 8;
7372 else
7373 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
0c0464e6 7374
b9a5aa8e 7375 alpha_sa_mask (&imask, &fmask);
bf2a98b3 7376
0e0a0e7a 7377 /* Ecoff can handle multiple .file directives, so put out file and lineno.
449b7f2d 7378 We have to do that before the .ent directive as we cannot switch
7379 files within procedures with native ecoff because line numbers are
7380 linked to procedure descriptors.
7381 Outputting the lineno helps debugging of one line functions as they
7382 would otherwise get no line number at all. Please note that we would
01cc3b75 7383 like to put out last_linenum from final.c, but it is not accessible. */
449b7f2d 7384
7385 if (write_symbols == SDB_DEBUG)
7386 {
9caef960 7387#ifdef ASM_OUTPUT_SOURCE_FILENAME
449b7f2d 7388 ASM_OUTPUT_SOURCE_FILENAME (file,
7389 DECL_SOURCE_FILE (current_function_decl));
9caef960 7390#endif
7391#ifdef ASM_OUTPUT_SOURCE_LINE
449b7f2d 7392 if (debug_info_level != DINFO_LEVEL_TERSE)
0e0a0e7a 7393 ASM_OUTPUT_SOURCE_LINE (file,
7394 DECL_SOURCE_LINE (current_function_decl));
9caef960 7395#endif
449b7f2d 7396 }
7397
b9a5aa8e 7398 /* Issue function start and label. */
9caef960 7399 if (TARGET_ABI_OPEN_VMS
7400 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
f1fe649e 7401 {
b9a5aa8e 7402 fputs ("\t.ent ", file);
2cf1388a 7403 assemble_name (file, fnname);
b9a5aa8e 7404 putc ('\n', file);
a314eb5e 7405
7406 /* If the function needs GP, we'll write the "..ng" label there.
7407 Otherwise, do it here. */
961d6ddd 7408 if (TARGET_ABI_OSF
7409 && ! alpha_function_needs_gp
7410 && ! current_function_is_thunk)
a314eb5e 7411 {
7412 putc ('$', file);
7413 assemble_name (file, fnname);
7414 fputs ("..ng:\n", file);
7415 }
f1fe649e 7416 }
449b7f2d 7417
2cf1388a 7418 strcpy (entry_label, fnname);
1467e953 7419 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7420 strcat (entry_label, "..en");
9caef960 7421
7422 /* For public functions, the label must be globalized by appending an
7423 additional colon. */
7424 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
7425 strcat (entry_label, ":");
7426
b9a5aa8e 7427 ASM_OUTPUT_LABEL (file, entry_label);
7428 inside_function = TRUE;
449b7f2d 7429
1467e953 7430 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7431 fprintf (file, "\t.base $%d\n", vms_base_regno);
bf2a98b3 7432
9caef960 7433 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
b9a5aa8e 7434 && !flag_inhibit_size_directive)
9c0e5703 7435 {
b9a5aa8e 7436 /* Set flags in procedure descriptor to request IEEE-conformant
7437 math-library routines. The value we set it to is PDSC_EXC_IEEE
65abff06 7438 (/usr/include/pdsc.h). */
b9a5aa8e 7439 fputs ("\t.eflag 48\n", file);
9c0e5703 7440 }
bf2a98b3 7441
b9a5aa8e 7442 /* Set up offsets to alpha virtual arg/local debugging pointer. */
7443 alpha_auto_offset = -frame_size + current_function_pretend_args_size;
7444 alpha_arg_offset = -frame_size + 48;
cb015df5 7445
b9a5aa8e 7446 /* Describe our frame. If the frame size is larger than an integer,
7447 print it as zero to avoid an assembler error. We won't be
7448 properly describing such a frame, but that's the best we can do. */
9caef960 7449 if (TARGET_ABI_UNICOSMK)
7450 ;
7451 else if (TARGET_ABI_OPEN_VMS)
bf2a98b3 7452 {
b9a5aa8e 7453 fprintf (file, "\t.frame $%d,", vms_unwind_regno);
7454 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
d68046b3 7455 frame_size >= ((HOST_WIDE_INT) 1 << 31) ? 0 : frame_size);
769ea120 7456 fputs (",$26,", file);
7457 fprintf (file, HOST_WIDE_INT_PRINT_DEC, reg_offset);
7458 fputs ("\n", file);
bf2a98b3 7459 }
b9a5aa8e 7460 else if (!flag_inhibit_size_directive)
f1fe649e 7461 {
be3797c1 7462 fprintf (file, "\t.frame $%d,",
f1fe649e 7463 (frame_pointer_needed
b9a5aa8e 7464 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM));
be3797c1 7465 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
b9a5aa8e 7466 frame_size >= (1l << 31) ? 0 : frame_size);
be3797c1 7467 fprintf (file, ",$26,%d\n", current_function_pretend_args_size);
f1fe649e 7468 }
15d5236f 7469
b9a5aa8e 7470 /* Describe which registers were spilled. */
9caef960 7471 if (TARGET_ABI_UNICOSMK)
7472 ;
7473 else if (TARGET_ABI_OPEN_VMS)
15d5236f 7474 {
b9a5aa8e 7475 if (imask)
9caef960 7476 /* ??? Does VMS care if mask contains ra? The old code didn't
b9a5aa8e 7477 set it, so I don't here. */
769ea120 7478 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1L << REG_RA));
b9a5aa8e 7479 if (fmask)
769ea120 7480 fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
b19d7ab1 7481 if (alpha_procedure_type == PT_REGISTER)
b9a5aa8e 7482 fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
7483 }
7484 else if (!flag_inhibit_size_directive)
7485 {
7486 if (imask)
15d5236f 7487 {
769ea120 7488 fprintf (file, "\t.mask 0x%lx,", imask);
b9a5aa8e 7489 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7490 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7491 putc ('\n', file);
7492
7493 for (i = 0; i < 32; ++i)
7494 if (imask & (1L << i))
7495 reg_offset += 8;
15d5236f 7496 }
b9a5aa8e 7497
7498 if (fmask)
15d5236f 7499 {
769ea120 7500 fprintf (file, "\t.fmask 0x%lx,", fmask);
b9a5aa8e 7501 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7502 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7503 putc ('\n', file);
15d5236f 7504 }
bf2a98b3 7505 }
7506
1467e953 7507#if TARGET_ABI_OPEN_VMS
6cde52a2 7508 /* Ifdef'ed cause link_section are only available then. */
7509 readonly_data_section ();
b9a5aa8e 7510 fprintf (file, "\t.align 3\n");
2cf1388a 7511 assemble_name (file, fnname); fputs ("..na:\n", file);
b9a5aa8e 7512 fputs ("\t.ascii \"", file);
2cf1388a 7513 assemble_name (file, fnname);
b9a5aa8e 7514 fputs ("\\0\"\n", file);
2cf1388a 7515 alpha_need_linkage (fnname, 1);
b9a5aa8e 7516 text_section ();
7517#endif
7518}
bf2a98b3 7519
b9a5aa8e 7520/* Emit the .prologue note at the scheduled end of the prologue. */
16b3392b 7521
85ae73e8 7522static void
7523alpha_output_function_end_prologue (file)
b9a5aa8e 7524 FILE *file;
7525{
9caef960 7526 if (TARGET_ABI_UNICOSMK)
7527 ;
7528 else if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7529 fputs ("\t.prologue\n", file);
1467e953 7530 else if (TARGET_ABI_WINDOWS_NT)
b9a5aa8e 7531 fputs ("\t.prologue 0\n", file);
7532 else if (!flag_inhibit_size_directive)
961d6ddd 7533 fprintf (file, "\t.prologue %d\n",
7534 alpha_function_needs_gp || current_function_is_thunk);
bf2a98b3 7535}
7536
7537/* Write function epilogue. */
7538
5a965225 7539/* ??? At some point we will want to support full unwind, and so will
7540 need to mark the epilogue as well. At the moment, we just confuse
7541 dwarf2out. */
7542#undef FRP
7543#define FRP(exp) exp
7544
bf2a98b3 7545void
b9a5aa8e 7546alpha_expand_epilogue ()
bf2a98b3 7547{
b9a5aa8e 7548 /* Registers to save. */
7549 unsigned long imask = 0;
7550 unsigned long fmask = 0;
7551 /* Stack space needed for pushing registers clobbered by us. */
7552 HOST_WIDE_INT sa_size;
7553 /* Complete stack size needed. */
7554 HOST_WIDE_INT frame_size;
7555 /* Offset from base reg to register save area. */
7556 HOST_WIDE_INT reg_offset;
7557 int fp_is_frame_pointer, fp_offset;
7558 rtx sa_reg, sa_reg_exp = NULL;
849674a3 7559 rtx sp_adj1, sp_adj2, mem;
11016d99 7560 rtx eh_ofs;
bf2a98b3 7561 int i;
7562
b9a5aa8e 7563 sa_size = alpha_sa_size ();
bf2a98b3 7564
b9a5aa8e 7565 frame_size = get_frame_size ();
1467e953 7566 if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7567 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7568 + (alpha_procedure_type == PT_STACK ? 8 : 0)
b9a5aa8e 7569 + frame_size
7570 + current_function_pretend_args_size);
9caef960 7571 else if (TARGET_ABI_UNICOSMK)
7572 frame_size = ALPHA_ROUND (sa_size
b19d7ab1 7573 + (alpha_procedure_type == PT_STACK ? 48 : 0))
9caef960 7574 + ALPHA_ROUND (frame_size
7575 + current_function_outgoing_args_size);
b9a5aa8e 7576 else
7577 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7578 + sa_size
7579 + ALPHA_ROUND (frame_size
7580 + current_function_pretend_args_size));
bf2a98b3 7581
1467e953 7582 if (TARGET_ABI_OPEN_VMS)
b19d7ab1 7583 {
7584 if (alpha_procedure_type == PT_STACK)
7585 reg_offset = 8;
7586 else
7587 reg_offset = 0;
7588 }
b9a5aa8e 7589 else
7590 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7591
7592 alpha_sa_mask (&imask, &fmask);
7593
b19d7ab1 7594 fp_is_frame_pointer
7595 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7596 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
29768226 7597 fp_offset = 0;
7598 sa_reg = stack_pointer_rtx;
b9a5aa8e 7599
c92c328f 7600 if (current_function_calls_eh_return)
7601 eh_ofs = EH_RETURN_STACKADJ_RTX;
7602 else
7603 eh_ofs = NULL_RTX;
7604
9caef960 7605 if (!TARGET_ABI_UNICOSMK && sa_size)
b9a5aa8e 7606 {
7607 /* If we have a frame pointer, restore SP from it. */
1467e953 7608 if ((TARGET_ABI_OPEN_VMS
b9a5aa8e 7609 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
1467e953 7610 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
205b281f 7611 FRP (emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx));
15d5236f 7612
b9a5aa8e 7613 /* Cope with very large offsets to the register save area. */
b9a5aa8e 7614 if (reg_offset + sa_size > 0x8000)
bf2a98b3 7615 {
b9a5aa8e 7616 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7617 HOST_WIDE_INT bias;
7618
7619 if (low + sa_size <= 0x8000)
7620 bias = reg_offset - low, reg_offset = low;
7621 else
7622 bias = reg_offset, reg_offset = 0;
7623
7624 sa_reg = gen_rtx_REG (DImode, 22);
7625 sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
7626
5a965225 7627 FRP (emit_move_insn (sa_reg, sa_reg_exp));
bf2a98b3 7628 }
b9a5aa8e 7629
65abff06 7630 /* Restore registers in order, excepting a true frame pointer. */
bf2a98b3 7631
c92c328f 7632 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
11016d99 7633 if (! eh_ofs)
ab6ab77e 7634 set_mem_alias_set (mem, alpha_sr_alias_set);
c92c328f 7635 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7636
b9a5aa8e 7637 reg_offset += 8;
7638 imask &= ~(1L << REG_RA);
16b3392b 7639
b9a5aa8e 7640 for (i = 0; i < 32; ++i)
7641 if (imask & (1L << i))
bf2a98b3 7642 {
b9a5aa8e 7643 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
16b3392b 7644 fp_offset = reg_offset;
7645 else
b9a5aa8e 7646 {
849674a3 7647 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
ab6ab77e 7648 set_mem_alias_set (mem, alpha_sr_alias_set);
849674a3 7649 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
b9a5aa8e 7650 }
bf2a98b3 7651 reg_offset += 8;
7652 }
7653
b9a5aa8e 7654 for (i = 0; i < 32; ++i)
7655 if (fmask & (1L << i))
bf2a98b3 7656 {
849674a3 7657 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
ab6ab77e 7658 set_mem_alias_set (mem, alpha_sr_alias_set);
849674a3 7659 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
bf2a98b3 7660 reg_offset += 8;
7661 }
b9a5aa8e 7662 }
b19d7ab1 7663 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
9caef960 7664 {
7665 /* Restore callee-saved general-purpose registers. */
7666
7667 reg_offset = -56;
7668
7669 for (i = 9; i < 15; i++)
7670 if (imask & (1L << i))
7671 {
7672 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7673 reg_offset));
7674 set_mem_alias_set (mem, alpha_sr_alias_set);
7675 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
7676 reg_offset -= 8;
7677 }
7678
7679 for (i = 2; i < 10; i++)
7680 if (fmask & (1L << i))
7681 {
7682 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
7683 reg_offset));
7684 set_mem_alias_set (mem, alpha_sr_alias_set);
7685 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
7686 reg_offset -= 8;
7687 }
7688
7689 /* Restore the return address from the DSIB. */
7690
7691 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx, -8));
7692 set_mem_alias_set (mem, alpha_sr_alias_set);
7693 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7694 }
bf2a98b3 7695
11016d99 7696 if (frame_size || eh_ofs)
b9a5aa8e 7697 {
ec37ccb4 7698 sp_adj1 = stack_pointer_rtx;
7699
11016d99 7700 if (eh_ofs)
ec37ccb4 7701 {
7702 sp_adj1 = gen_rtx_REG (DImode, 23);
7703 emit_move_insn (sp_adj1,
11016d99 7704 gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
ec37ccb4 7705 }
7706
b9a5aa8e 7707 /* If the stack size is large, begin computation into a temporary
7708 register so as not to interfere with a potential fp restore,
7709 which must be consecutive with an SP restore. */
9caef960 7710 if (frame_size < 32768
7711 && ! (TARGET_ABI_UNICOSMK && current_function_calls_alloca))
ec37ccb4 7712 sp_adj2 = GEN_INT (frame_size);
9caef960 7713 else if (TARGET_ABI_UNICOSMK)
7714 {
7715 sp_adj1 = gen_rtx_REG (DImode, 23);
7716 FRP (emit_move_insn (sp_adj1, hard_frame_pointer_rtx));
7717 sp_adj2 = const0_rtx;
7718 }
b9a5aa8e 7719 else if (frame_size < 0x40007fffL)
7720 {
7721 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7722
ec37ccb4 7723 sp_adj2 = plus_constant (sp_adj1, frame_size - low);
b9a5aa8e 7724 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
7725 sp_adj1 = sa_reg;
7726 else
7727 {
7728 sp_adj1 = gen_rtx_REG (DImode, 23);
5a965225 7729 FRP (emit_move_insn (sp_adj1, sp_adj2));
b9a5aa8e 7730 }
7731 sp_adj2 = GEN_INT (low);
7732 }
0e0a0e7a 7733 else
b9a5aa8e 7734 {
ec37ccb4 7735 rtx tmp = gen_rtx_REG (DImode, 23);
7736 FRP (sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3));
7737 if (!sp_adj2)
b9a5aa8e 7738 {
7739 /* We can't drop new things to memory this late, afaik,
7740 so build it up by pieces. */
af792316 7741 FRP (sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
7742 -(frame_size < 0)));
ec37ccb4 7743 if (!sp_adj2)
b9a5aa8e 7744 abort ();
b9a5aa8e 7745 }
b9a5aa8e 7746 }
bf2a98b3 7747
b9a5aa8e 7748 /* From now on, things must be in order. So emit blockages. */
7749
7750 /* Restore the frame pointer. */
9caef960 7751 if (TARGET_ABI_UNICOSMK)
7752 {
7753 emit_insn (gen_blockage ());
7754 mem = gen_rtx_MEM (DImode,
7755 plus_constant (hard_frame_pointer_rtx, -16));
7756 set_mem_alias_set (mem, alpha_sr_alias_set);
7757 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
7758 }
7759 else if (fp_is_frame_pointer)
b9a5aa8e 7760 {
7761 emit_insn (gen_blockage ());
205b281f 7762 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
ab6ab77e 7763 set_mem_alias_set (mem, alpha_sr_alias_set);
849674a3 7764 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
b9a5aa8e 7765 }
1467e953 7766 else if (TARGET_ABI_OPEN_VMS)
b9a5aa8e 7767 {
7768 emit_insn (gen_blockage ());
5a965225 7769 FRP (emit_move_insn (hard_frame_pointer_rtx,
7770 gen_rtx_REG (DImode, vms_save_fp_regno)));
b9a5aa8e 7771 }
7772
7773 /* Restore the stack pointer. */
7774 emit_insn (gen_blockage ());
9caef960 7775 if (sp_adj2 == const0_rtx)
7776 FRP (emit_move_insn (stack_pointer_rtx, sp_adj1));
7777 else
7778 FRP (emit_move_insn (stack_pointer_rtx,
7779 gen_rtx_PLUS (DImode, sp_adj1, sp_adj2)));
b9a5aa8e 7780 }
7781 else
7782 {
b19d7ab1 7783 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
b9a5aa8e 7784 {
7785 emit_insn (gen_blockage ());
5a965225 7786 FRP (emit_move_insn (hard_frame_pointer_rtx,
7787 gen_rtx_REG (DImode, vms_save_fp_regno)));
b9a5aa8e 7788 }
b19d7ab1 7789 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
9caef960 7790 {
7791 /* Decrement the frame pointer if the function does not have a
7792 frame. */
7793
7794 emit_insn (gen_blockage ());
7795 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
7796 hard_frame_pointer_rtx, GEN_INT (-1))));
7797 }
bf2a98b3 7798 }
b9a5aa8e 7799}
cf73d31f 7800\f
7801#if TARGET_ABI_OPEN_VMS
7802#include <splay-tree.h>
7803
7804/* Structure to collect function names for final output
7805 in link section. */
7806
7807enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
7808enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR};
7809
7810struct alpha_funcs
7811{
7812 int num;
7813 splay_tree links;
7814};
7815
7816struct alpha_links
7817{
7818 int num;
7819 rtx linkage;
7820 enum links_kind lkind;
7821 enum reloc_kind rkind;
7822};
7823
7824static splay_tree alpha_funcs_tree;
7825static splay_tree alpha_links_tree;
7826
7827static int mark_alpha_links_node PARAMS ((splay_tree_node, void *));
7828static void mark_alpha_links PARAMS ((void *));
7829static int alpha_write_one_linkage PARAMS ((splay_tree_node, void *));
7830
7831static int alpha_funcs_num;
7832#endif
b9a5aa8e 7833
7834/* Output the rest of the textual info surrounding the epilogue. */
7835
7836void
2cf1388a 7837alpha_end_function (file, fnname, decl)
b9a5aa8e 7838 FILE *file;
ef241053 7839 const char *fnname;
6fce022c 7840 tree decl;
b9a5aa8e 7841{
bf2a98b3 7842 /* End the function. */
9caef960 7843 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
f1fe649e 7844 {
b9a5aa8e 7845 fputs ("\t.end ", file);
2cf1388a 7846 assemble_name (file, fnname);
b9a5aa8e 7847 putc ('\n', file);
f1fe649e 7848 }
449b7f2d 7849 inside_function = FALSE;
9c0e5703 7850
cf73d31f 7851#if TARGET_ABI_OPEN_VMS
7852 alpha_write_linkage (file, fnname, decl);
7853#endif
7854
6fce022c 7855 /* Show that we know this function if it is called again.
6f9a435a 7856
6fce022c 7857 Do this only for functions whose symbols bind locally.
484edb77 7858
7859 Don't do this for functions not defined in the .text section, as
7860 otherwise it's not unlikely that the destination is out of range
7861 for a direct branch. */
6f9a435a 7862
6fce022c 7863 if ((*targetm.binds_local_p) (decl) && decl_in_text_section (decl))
7864 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
9caef960 7865
7866 /* Output jump tables and the static subroutine information block. */
7867 if (TARGET_ABI_UNICOSMK)
7868 {
7869 unicosmk_output_ssib (file, fnname);
7870 unicosmk_output_deferred_case_vectors (file);
7871 }
bf2a98b3 7872}
961d6ddd 7873
6988553d 7874#if TARGET_ABI_OSF
7875/* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
961d6ddd 7876
7877 In order to avoid the hordes of differences between generated code
7878 with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
7879 lots of code loading up large constants, generate rtl and emit it
7880 instead of going straight to text.
7881
7882 Not sure why this idea hasn't been explored before... */
7883
6988553d 7884static void
eb344f43 7885alpha_output_mi_thunk_osf (file, thunk_fndecl, delta, vcall_offset, function)
961d6ddd 7886 FILE *file;
7887 tree thunk_fndecl ATTRIBUTE_UNUSED;
7888 HOST_WIDE_INT delta;
a19ec9da 7889 HOST_WIDE_INT vcall_offset;
961d6ddd 7890 tree function;
7891{
7892 HOST_WIDE_INT hi, lo;
7893 rtx this, insn, funexp;
7894
7895 /* We always require a valid GP. */
7896 emit_insn (gen_prologue_ldgp ());
7897 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7898
7899 /* Find the "this" pointer. If the function returns a structure,
7900 the structure return pointer is in $16. */
7901 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
7902 this = gen_rtx_REG (Pmode, 17);
7903 else
7904 this = gen_rtx_REG (Pmode, 16);
7905
7906 /* Add DELTA. When possible we use ldah+lda. Otherwise load the
7907 entire constant for the add. */
7908 lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
7909 hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
7910 if (hi + lo == delta)
7911 {
7912 if (hi)
7913 emit_insn (gen_adddi3 (this, this, GEN_INT (hi)));
7914 if (lo)
7915 emit_insn (gen_adddi3 (this, this, GEN_INT (lo)));
7916 }
7917 else
7918 {
7919 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
7920 delta, -(delta < 0));
7921 emit_insn (gen_adddi3 (this, this, tmp));
7922 }
7923
a19ec9da 7924 /* Add a delta stored in the vtable at VCALL_OFFSET. */
7925 if (vcall_offset)
7926 {
7927 rtx tmp, tmp2;
7928
7929 tmp = gen_rtx_REG (Pmode, 0);
7930 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
7931
7932 lo = ((vcall_offset & 0xffff) ^ 0x8000) - 0x8000;
7933 hi = (((vcall_offset - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
7934 if (hi + lo == vcall_offset)
7935 {
7936 if (hi)
7937 emit_insn (gen_adddi3 (tmp, tmp, GEN_INT (hi)));
7938 }
7939 else
7940 {
7941 tmp2 = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 1),
7942 vcall_offset, -(vcall_offset < 0));
7943 emit_insn (gen_adddi3 (tmp, tmp, tmp2));
7944 lo = 0;
7945 }
7946 if (lo)
7947 tmp2 = gen_rtx_PLUS (Pmode, tmp, GEN_INT (lo));
7948 else
7949 tmp2 = tmp;
7950 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp2));
7951
7952 emit_insn (gen_adddi3 (this, this, tmp));
7953 }
7954
961d6ddd 7955 /* Generate a tail call to the target function. */
7956 if (! TREE_USED (function))
7957 {
7958 assemble_external (function);
7959 TREE_USED (function) = 1;
7960 }
7961 funexp = XEXP (DECL_RTL (function), 0);
7962 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
7963 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
7964 SIBLING_CALL_P (insn) = 1;
7965
7966 /* Run just enough of rest_of_compilation to get the insns emitted.
7967 There's not really enough bulk here to make other passes such as
7968 instruction scheduling worth while. Note that use_thunk calls
7969 assemble_start_function and assemble_end_function. */
7970 insn = get_insns ();
7971 shorten_branches (insn);
7972 final_start_function (insn, file, 1);
7973 final (insn, file, 1, 0);
7974 final_end_function ();
7975}
6988553d 7976#endif /* TARGET_ABI_OSF */
449b7f2d 7977\f
7978/* Debugging support. */
7979
7980#include "gstab.h"
7981
7982/* Count the number of sdb related labels are generated (to find block
7983 start and end boundaries). */
7984
7985int sdb_label_count = 0;
7986
7987/* Next label # for each statement. */
7988
7989static int sym_lineno = 0;
7990
7991/* Count the number of .file directives, so that .loc is up to date. */
7992
7993static int num_source_filenames = 0;
7994
7995/* Name of the file containing the current function. */
7996
ace75b22 7997static const char *current_function_file = "";
449b7f2d 7998
7999/* Offsets to alpha virtual arg/local debugging pointers. */
8000
8001long alpha_arg_offset;
8002long alpha_auto_offset;
8003\f
8004/* Emit a new filename to a stream. */
8005
8006void
8007alpha_output_filename (stream, name)
8008 FILE *stream;
1dd6c958 8009 const char *name;
449b7f2d 8010{
8011 static int first_time = TRUE;
8012 char ltext_label_name[100];
8013
8014 if (first_time)
8015 {
8016 first_time = FALSE;
8017 ++num_source_filenames;
8018 current_function_file = name;
8019 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8020 output_quoted_string (stream, name);
8021 fprintf (stream, "\n");
8022 if (!TARGET_GAS && write_symbols == DBX_DEBUG)
8023 fprintf (stream, "\t#@stabs\n");
8024 }
8025
8763f243 8026 else if (write_symbols == DBX_DEBUG)
449b7f2d 8027 {
8028 ASM_GENERATE_INTERNAL_LABEL (ltext_label_name, "Ltext", 0);
95d655c3 8029 fprintf (stream, "%s", ASM_STABS_OP);
449b7f2d 8030 output_quoted_string (stream, name);
8031 fprintf (stream, ",%d,0,0,%s\n", N_SOL, &ltext_label_name[1]);
8032 }
8033
8034 else if (name != current_function_file
be3797c1 8035 && strcmp (name, current_function_file) != 0)
449b7f2d 8036 {
8037 if (inside_function && ! TARGET_GAS)
8038 fprintf (stream, "\t#.file\t%d ", num_source_filenames);
8039 else
8040 {
8041 ++num_source_filenames;
8042 current_function_file = name;
8043 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8044 }
8045
8046 output_quoted_string (stream, name);
8047 fprintf (stream, "\n");
8048 }
8049}
8050\f
8051/* Emit a linenumber to a stream. */
8052
8053void
8054alpha_output_lineno (stream, line)
8055 FILE *stream;
8056 int line;
8057{
8763f243 8058 if (write_symbols == DBX_DEBUG)
449b7f2d 8059 {
8060 /* mips-tfile doesn't understand .stabd directives. */
8061 ++sym_lineno;
95d655c3 8062 fprintf (stream, "$LM%d:\n%s%d,0,%d,$LM%d\n",
449b7f2d 8063 sym_lineno, ASM_STABN_OP, N_SLINE, line, sym_lineno);
8064 }
8065 else
cffb5069 8066 fprintf (stream, "\n\t.loc\t%d %d\n", num_source_filenames, line);
449b7f2d 8067}
c4622276 8068\f
8069/* Structure to show the current status of registers and memory. */
8070
8071struct shadow_summary
8072{
8073 struct {
495c4a78 8074 unsigned int i : 31; /* Mask of int regs */
8075 unsigned int fp : 31; /* Mask of fp regs */
8076 unsigned int mem : 1; /* mem == imem | fpmem */
c4622276 8077 } used, defd;
8078};
8079
32bd1bda 8080static void summarize_insn PARAMS ((rtx, struct shadow_summary *, int));
8081static void alpha_handle_trap_shadows PARAMS ((rtx));
b9a5aa8e 8082
c4622276 8083/* Summary the effects of expression X on the machine. Update SUM, a pointer
8084 to the summary structure. SET is nonzero if the insn is setting the
8085 object, otherwise zero. */
8086
8087static void
8088summarize_insn (x, sum, set)
8089 rtx x;
8090 struct shadow_summary *sum;
8091 int set;
8092{
d2ca078f 8093 const char *format_ptr;
c4622276 8094 int i, j;
8095
8096 if (x == 0)
8097 return;
8098
8099 switch (GET_CODE (x))
8100 {
8101 /* ??? Note that this case would be incorrect if the Alpha had a
8102 ZERO_EXTRACT in SET_DEST. */
8103 case SET:
8104 summarize_insn (SET_SRC (x), sum, 0);
8105 summarize_insn (SET_DEST (x), sum, 1);
8106 break;
8107
8108 case CLOBBER:
8109 summarize_insn (XEXP (x, 0), sum, 1);
8110 break;
8111
8112 case USE:
8113 summarize_insn (XEXP (x, 0), sum, 0);
8114 break;
8115
a886cc41 8116 case ASM_OPERANDS:
8117 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
8118 summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
8119 break;
8120
c4622276 8121 case PARALLEL:
3a5dbb5e 8122 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
c4622276 8123 summarize_insn (XVECEXP (x, 0, i), sum, 0);
8124 break;
8125
a886cc41 8126 case SUBREG:
b9a5aa8e 8127 summarize_insn (SUBREG_REG (x), sum, 0);
8128 break;
a886cc41 8129
c4622276 8130 case REG:
8131 {
8132 int regno = REGNO (x);
f3d263a7 8133 unsigned long mask = ((unsigned long) 1) << (regno % 32);
c4622276 8134
8135 if (regno == 31 || regno == 63)
8136 break;
8137
8138 if (set)
8139 {
8140 if (regno < 32)
8141 sum->defd.i |= mask;
8142 else
8143 sum->defd.fp |= mask;
8144 }
8145 else
8146 {
8147 if (regno < 32)
8148 sum->used.i |= mask;
8149 else
8150 sum->used.fp |= mask;
8151 }
8152 }
8153 break;
8154
8155 case MEM:
8156 if (set)
8157 sum->defd.mem = 1;
8158 else
8159 sum->used.mem = 1;
8160
8161 /* Find the regs used in memory address computation: */
8162 summarize_insn (XEXP (x, 0), sum, 0);
8163 break;
8164
2d710b28 8165 case CONST_INT: case CONST_DOUBLE:
8166 case SYMBOL_REF: case LABEL_REF: case CONST:
5bdbf614 8167 case SCRATCH: case ASM_INPUT:
2d710b28 8168 break;
8169
c4622276 8170 /* Handle common unary and binary ops for efficiency. */
8171 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
8172 case MOD: case UDIV: case UMOD: case AND: case IOR:
8173 case XOR: case ASHIFT: case ROTATE: case ASHIFTRT: case LSHIFTRT:
8174 case ROTATERT: case SMIN: case SMAX: case UMIN: case UMAX:
8175 case NE: case EQ: case GE: case GT: case LE:
8176 case LT: case GEU: case GTU: case LEU: case LTU:
8177 summarize_insn (XEXP (x, 0), sum, 0);
8178 summarize_insn (XEXP (x, 1), sum, 0);
8179 break;
8180
8181 case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND:
8182 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT:
8183 case FIX: case UNSIGNED_FLOAT: case UNSIGNED_FIX: case ABS:
8184 case SQRT: case FFS:
8185 summarize_insn (XEXP (x, 0), sum, 0);
8186 break;
8187
8188 default:
8189 format_ptr = GET_RTX_FORMAT (GET_CODE (x));
3a5dbb5e 8190 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
cada32d3 8191 switch (format_ptr[i])
c4622276 8192 {
8193 case 'e':
8194 summarize_insn (XEXP (x, i), sum, 0);
8195 break;
8196
8197 case 'E':
3a5dbb5e 8198 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
c4622276 8199 summarize_insn (XVECEXP (x, i, j), sum, 0);
8200 break;
8201
1dc5f36f 8202 case 'i':
8203 break;
8204
c4622276 8205 default:
8206 abort ();
8207 }
8208 }
8209}
c4622276 8210
b9a5aa8e 8211/* Ensure a sufficient number of `trapb' insns are in the code when
8212 the user requests code with a trap precision of functions or
8213 instructions.
8214
8215 In naive mode, when the user requests a trap-precision of
8216 "instruction", a trapb is needed after every instruction that may
8217 generate a trap. This ensures that the code is resumption safe but
8218 it is also slow.
8219
8220 When optimizations are turned on, we delay issuing a trapb as long
8221 as possible. In this context, a trap shadow is the sequence of
8222 instructions that starts with a (potentially) trap generating
8223 instruction and extends to the next trapb or call_pal instruction
8224 (but GCC never generates call_pal by itself). We can delay (and
8225 therefore sometimes omit) a trapb subject to the following
8226 conditions:
8227
8228 (a) On entry to the trap shadow, if any Alpha register or memory
8229 location contains a value that is used as an operand value by some
8230 instruction in the trap shadow (live on entry), then no instruction
8231 in the trap shadow may modify the register or memory location.
8232
8233 (b) Within the trap shadow, the computation of the base register
8234 for a memory load or store instruction may not involve using the
8235 result of an instruction that might generate an UNPREDICTABLE
8236 result.
8237
8238 (c) Within the trap shadow, no register may be used more than once
8239 as a destination register. (This is to make life easier for the
8240 trap-handler.)
c4622276 8241
18adf4f6 8242 (d) The trap shadow may not include any branch instructions. */
c4622276 8243
18adf4f6 8244static void
8245alpha_handle_trap_shadows (insns)
8246 rtx insns;
c4622276 8247{
18adf4f6 8248 struct shadow_summary shadow;
8249 int trap_pending, exception_nesting;
b9b4428b 8250 rtx i, n;
c4622276 8251
18adf4f6 8252 trap_pending = 0;
8253 exception_nesting = 0;
8254 shadow.used.i = 0;
8255 shadow.used.fp = 0;
8256 shadow.used.mem = 0;
8257 shadow.defd = shadow.used;
8258
8259 for (i = insns; i ; i = NEXT_INSN (i))
8260 {
8261 if (GET_CODE (i) == NOTE)
8262 {
8263 switch (NOTE_LINE_NUMBER (i))
8264 {
8265 case NOTE_INSN_EH_REGION_BEG:
8266 exception_nesting++;
8267 if (trap_pending)
8268 goto close_shadow;
8269 break;
8270
8271 case NOTE_INSN_EH_REGION_END:
8272 exception_nesting--;
8273 if (trap_pending)
8274 goto close_shadow;
8275 break;
8276
8277 case NOTE_INSN_EPILOGUE_BEG:
8278 if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
8279 goto close_shadow;
8280 break;
8281 }
8282 }
8283 else if (trap_pending)
8284 {
8285 if (alpha_tp == ALPHA_TP_FUNC)
8286 {
8287 if (GET_CODE (i) == JUMP_INSN
8288 && GET_CODE (PATTERN (i)) == RETURN)
8289 goto close_shadow;
8290 }
8291 else if (alpha_tp == ALPHA_TP_INSN)
8292 {
8293 if (optimize > 0)
8294 {
8295 struct shadow_summary sum;
8296
8297 sum.used.i = 0;
8298 sum.used.fp = 0;
8299 sum.used.mem = 0;
a886cc41 8300 sum.defd = sum.used;
18adf4f6 8301
8302 switch (GET_CODE (i))
8303 {
8304 case INSN:
fad0a39b 8305 /* Annoyingly, get_attr_trap will abort on these. */
8306 if (GET_CODE (PATTERN (i)) == USE
8307 || GET_CODE (PATTERN (i)) == CLOBBER)
18adf4f6 8308 break;
8309
8310 summarize_insn (PATTERN (i), &sum, 0);
8311
8312 if ((sum.defd.i & shadow.defd.i)
8313 || (sum.defd.fp & shadow.defd.fp))
8314 {
8315 /* (c) would be violated */
8316 goto close_shadow;
8317 }
8318
8319 /* Combine shadow with summary of current insn: */
8320 shadow.used.i |= sum.used.i;
8321 shadow.used.fp |= sum.used.fp;
8322 shadow.used.mem |= sum.used.mem;
8323 shadow.defd.i |= sum.defd.i;
8324 shadow.defd.fp |= sum.defd.fp;
8325 shadow.defd.mem |= sum.defd.mem;
8326
8327 if ((sum.defd.i & shadow.used.i)
8328 || (sum.defd.fp & shadow.used.fp)
8329 || (sum.defd.mem & shadow.used.mem))
8330 {
8331 /* (a) would be violated (also takes care of (b)) */
8332 if (get_attr_trap (i) == TRAP_YES
8333 && ((sum.defd.i & sum.used.i)
8334 || (sum.defd.fp & sum.used.fp)))
8335 abort ();
8336
8337 goto close_shadow;
8338 }
8339 break;
8340
8341 case JUMP_INSN:
8342 case CALL_INSN:
8343 case CODE_LABEL:
8344 goto close_shadow;
8345
8346 default:
c4622276 8347 abort ();
18adf4f6 8348 }
8349 }
8350 else
8351 {
8352 close_shadow:
b9b4428b 8353 n = emit_insn_before (gen_trapb (), i);
8354 PUT_MODE (n, TImode);
8355 PUT_MODE (i, TImode);
18adf4f6 8356 trap_pending = 0;
8357 shadow.used.i = 0;
8358 shadow.used.fp = 0;
8359 shadow.used.mem = 0;
8360 shadow.defd = shadow.used;
8361 }
8362 }
8363 }
c4622276 8364
609d4083 8365 if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
8366 && GET_CODE (i) == INSN
8367 && GET_CODE (PATTERN (i)) != USE
8368 && GET_CODE (PATTERN (i)) != CLOBBER
8369 && get_attr_trap (i) == TRAP_YES)
8370 {
8371 if (optimize && !trap_pending)
8372 summarize_insn (PATTERN (i), &shadow, 0);
8373 trap_pending = 1;
8374 }
c4622276 8375 }
8376}
b9b4428b 8377\f
b9b4428b 8378/* Alpha can only issue instruction groups simultaneously if they are
8379 suitibly aligned. This is very processor-specific. */
8380
849674a3 8381enum alphaev4_pipe {
8382 EV4_STOP = 0,
8383 EV4_IB0 = 1,
8384 EV4_IB1 = 2,
8385 EV4_IBX = 4
8386};
8387
b9b4428b 8388enum alphaev5_pipe {
8389 EV5_STOP = 0,
8390 EV5_NONE = 1,
8391 EV5_E01 = 2,
8392 EV5_E0 = 4,
8393 EV5_E1 = 8,
8394 EV5_FAM = 16,
8395 EV5_FA = 32,
8396 EV5_FM = 64
8397};
8398
32bd1bda 8399static enum alphaev4_pipe alphaev4_insn_pipe PARAMS ((rtx));
8400static enum alphaev5_pipe alphaev5_insn_pipe PARAMS ((rtx));
b53f315c 8401static rtx alphaev4_next_group PARAMS ((rtx, int *, int *));
8402static rtx alphaev5_next_group PARAMS ((rtx, int *, int *));
8403static rtx alphaev4_next_nop PARAMS ((int *));
8404static rtx alphaev5_next_nop PARAMS ((int *));
849674a3 8405
8406static void alpha_align_insns
b53f315c 8407 PARAMS ((rtx, unsigned int, rtx (*)(rtx, int *, int *), rtx (*)(int *)));
849674a3 8408
8409static enum alphaev4_pipe
8410alphaev4_insn_pipe (insn)
8411 rtx insn;
8412{
8413 if (recog_memoized (insn) < 0)
8414 return EV4_STOP;
8415 if (get_attr_length (insn) != 4)
8416 return EV4_STOP;
8417
8418 switch (get_attr_type (insn))
8419 {
8420 case TYPE_ILD:
8421 case TYPE_FLD:
8422 return EV4_IBX;
8423
8424 case TYPE_LDSYM:
8425 case TYPE_IADD:
8426 case TYPE_ILOG:
8427 case TYPE_ICMOV:
8428 case TYPE_ICMP:
8429 case TYPE_IST:
8430 case TYPE_FST:
8431 case TYPE_SHIFT:
8432 case TYPE_IMUL:
8433 case TYPE_FBR:
8434 return EV4_IB0;
8435
8436 case TYPE_MISC:
8437 case TYPE_IBR:
8438 case TYPE_JSR:
1050b77e 8439 case TYPE_CALLPAL:
849674a3 8440 case TYPE_FCPYS:
8441 case TYPE_FCMOV:
8442 case TYPE_FADD:
8443 case TYPE_FDIV:
8444 case TYPE_FMUL:
8445 return EV4_IB1;
8446
8447 default:
b53f315c 8448 abort ();
849674a3 8449 }
8450}
8451
b9b4428b 8452static enum alphaev5_pipe
8453alphaev5_insn_pipe (insn)
8454 rtx insn;
8455{
8456 if (recog_memoized (insn) < 0)
8457 return EV5_STOP;
8458 if (get_attr_length (insn) != 4)
8459 return EV5_STOP;
8460
8461 switch (get_attr_type (insn))
8462 {
8463 case TYPE_ILD:
8464 case TYPE_FLD:
8465 case TYPE_LDSYM:
8466 case TYPE_IADD:
8467 case TYPE_ILOG:
8468 case TYPE_ICMOV:
8469 case TYPE_ICMP:
8470 return EV5_E01;
8471
8472 case TYPE_IST:
8473 case TYPE_FST:
8474 case TYPE_SHIFT:
8475 case TYPE_IMUL:
8476 case TYPE_MISC:
8477 case TYPE_MVI:
8478 return EV5_E0;
8479
8480 case TYPE_IBR:
8481 case TYPE_JSR:
1050b77e 8482 case TYPE_CALLPAL:
b9b4428b 8483 return EV5_E1;
8484
8485 case TYPE_FCPYS:
8486 return EV5_FAM;
8487
8488 case TYPE_FBR:
8489 case TYPE_FCMOV:
8490 case TYPE_FADD:
8491 case TYPE_FDIV:
8492 return EV5_FA;
8493
8494 case TYPE_FMUL:
8495 return EV5_FM;
ddca68f8 8496
8497 default:
8498 abort();
b9b4428b 8499 }
b9b4428b 8500}
8501
849674a3 8502/* IN_USE is a mask of the slots currently filled within the insn group.
8503 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
8504 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
8505
8506 LEN is, of course, the length of the group in bytes. */
8507
8508static rtx
8509alphaev4_next_group (insn, pin_use, plen)
8510 rtx insn;
8511 int *pin_use, *plen;
8512{
8513 int len, in_use;
8514
8515 len = in_use = 0;
8516
9204e736 8517 if (! INSN_P (insn)
849674a3 8518 || GET_CODE (PATTERN (insn)) == CLOBBER
8519 || GET_CODE (PATTERN (insn)) == USE)
8520 goto next_and_done;
8521
8522 while (1)
8523 {
8524 enum alphaev4_pipe pipe;
8525
8526 pipe = alphaev4_insn_pipe (insn);
8527 switch (pipe)
8528 {
8529 case EV4_STOP:
8530 /* Force complex instructions to start new groups. */
8531 if (in_use)
8532 goto done;
8533
8534 /* If this is a completely unrecognized insn, its an asm.
8535 We don't know how long it is, so record length as -1 to
8536 signal a needed realignment. */
8537 if (recog_memoized (insn) < 0)
8538 len = -1;
8539 else
8540 len = get_attr_length (insn);
8541 goto next_and_done;
8542
8543 case EV4_IBX:
8544 if (in_use & EV4_IB0)
8545 {
8546 if (in_use & EV4_IB1)
8547 goto done;
8548 in_use |= EV4_IB1;
8549 }
8550 else
8551 in_use |= EV4_IB0 | EV4_IBX;
8552 break;
8553
8554 case EV4_IB0:
8555 if (in_use & EV4_IB0)
8556 {
8557 if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
8558 goto done;
8559 in_use |= EV4_IB1;
8560 }
8561 in_use |= EV4_IB0;
8562 break;
8563
8564 case EV4_IB1:
8565 if (in_use & EV4_IB1)
8566 goto done;
8567 in_use |= EV4_IB1;
8568 break;
8569
8570 default:
8571 abort();
8572 }
8573 len += 4;
8574
8575 /* Haifa doesn't do well scheduling branches. */
8576 if (GET_CODE (insn) == JUMP_INSN)
8577 goto next_and_done;
8578
8579 next:
8580 insn = next_nonnote_insn (insn);
8581
9204e736 8582 if (!insn || ! INSN_P (insn))
849674a3 8583 goto done;
8584
8585 /* Let Haifa tell us where it thinks insn group boundaries are. */
8586 if (GET_MODE (insn) == TImode)
8587 goto done;
8588
8589 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8590 goto next;
8591 }
8592
8593 next_and_done:
8594 insn = next_nonnote_insn (insn);
8595
8596 done:
8597 *plen = len;
8598 *pin_use = in_use;
8599 return insn;
8600}
8601
8602/* IN_USE is a mask of the slots currently filled within the insn group.
8603 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
8604 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
b9b4428b 8605
8606 LEN is, of course, the length of the group in bytes. */
8607
8608static rtx
8609alphaev5_next_group (insn, pin_use, plen)
8610 rtx insn;
8611 int *pin_use, *plen;
8612{
8613 int len, in_use;
8614
8615 len = in_use = 0;
8616
9204e736 8617 if (! INSN_P (insn)
ddca68f8 8618 || GET_CODE (PATTERN (insn)) == CLOBBER
8619 || GET_CODE (PATTERN (insn)) == USE)
8620 goto next_and_done;
b9b4428b 8621
ddca68f8 8622 while (1)
b9b4428b 8623 {
8624 enum alphaev5_pipe pipe;
b9b4428b 8625
8626 pipe = alphaev5_insn_pipe (insn);
8627 switch (pipe)
8628 {
8629 case EV5_STOP:
8630 /* Force complex instructions to start new groups. */
8631 if (in_use)
8632 goto done;
8633
8634 /* If this is a completely unrecognized insn, its an asm.
8635 We don't know how long it is, so record length as -1 to
8636 signal a needed realignment. */
8637 if (recog_memoized (insn) < 0)
8638 len = -1;
8639 else
8640 len = get_attr_length (insn);
ddca68f8 8641 goto next_and_done;
b9b4428b 8642
8643 /* ??? Most of the places below, we would like to abort, as
8644 it would indicate an error either in Haifa, or in the
8645 scheduling description. Unfortunately, Haifa never
8646 schedules the last instruction of the BB, so we don't
8647 have an accurate TI bit to go off. */
8648 case EV5_E01:
8649 if (in_use & EV5_E0)
8650 {
8651 if (in_use & EV5_E1)
8652 goto done;
8653 in_use |= EV5_E1;
8654 }
8655 else
8656 in_use |= EV5_E0 | EV5_E01;
8657 break;
8658
8659 case EV5_E0:
8660 if (in_use & EV5_E0)
8661 {
849674a3 8662 if (!(in_use & EV5_E01) || (in_use & EV5_E1))
b9b4428b 8663 goto done;
8664 in_use |= EV5_E1;
8665 }
8666 in_use |= EV5_E0;
8667 break;
8668
8669 case EV5_E1:
8670 if (in_use & EV5_E1)
8671 goto done;
8672 in_use |= EV5_E1;
8673 break;
8674
8675 case EV5_FAM:
8676 if (in_use & EV5_FA)
8677 {
8678 if (in_use & EV5_FM)
8679 goto done;
8680 in_use |= EV5_FM;
8681 }
8682 else
8683 in_use |= EV5_FA | EV5_FAM;
8684 break;
8685
8686 case EV5_FA:
8687 if (in_use & EV5_FA)
8688 goto done;
8689 in_use |= EV5_FA;
8690 break;
8691
8692 case EV5_FM:
8693 if (in_use & EV5_FM)
8694 goto done;
8695 in_use |= EV5_FM;
8696 break;
8697
8698 case EV5_NONE:
8699 break;
8700
8701 default:
8702 abort();
8703 }
8704 len += 4;
8705
8706 /* Haifa doesn't do well scheduling branches. */
8707 /* ??? If this is predicted not-taken, slotting continues, except
8708 that no more IBR, FBR, or JSR insns may be slotted. */
8709 if (GET_CODE (insn) == JUMP_INSN)
ddca68f8 8710 goto next_and_done;
b9b4428b 8711
ddca68f8 8712 next:
b9b4428b 8713 insn = next_nonnote_insn (insn);
8714
9204e736 8715 if (!insn || ! INSN_P (insn))
b9b4428b 8716 goto done;
f9137da0 8717
b9b4428b 8718 /* Let Haifa tell us where it thinks insn group boundaries are. */
8719 if (GET_MODE (insn) == TImode)
8720 goto done;
8721
ddca68f8 8722 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8723 goto next;
b9b4428b 8724 }
ddca68f8 8725
8726 next_and_done:
8727 insn = next_nonnote_insn (insn);
b9b4428b 8728
8729 done:
8730 *plen = len;
8731 *pin_use = in_use;
8732 return insn;
b9b4428b 8733}
8734
849674a3 8735static rtx
8736alphaev4_next_nop (pin_use)
8737 int *pin_use;
8738{
8739 int in_use = *pin_use;
8740 rtx nop;
8741
8742 if (!(in_use & EV4_IB0))
8743 {
8744 in_use |= EV4_IB0;
8745 nop = gen_nop ();
8746 }
8747 else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
8748 {
8749 in_use |= EV4_IB1;
8750 nop = gen_nop ();
8751 }
8752 else if (TARGET_FP && !(in_use & EV4_IB1))
8753 {
8754 in_use |= EV4_IB1;
8755 nop = gen_fnop ();
8756 }
8757 else
8758 nop = gen_unop ();
8759
8760 *pin_use = in_use;
8761 return nop;
8762}
8763
8764static rtx
8765alphaev5_next_nop (pin_use)
8766 int *pin_use;
8767{
8768 int in_use = *pin_use;
8769 rtx nop;
8770
8771 if (!(in_use & EV5_E1))
8772 {
8773 in_use |= EV5_E1;
8774 nop = gen_nop ();
8775 }
8776 else if (TARGET_FP && !(in_use & EV5_FA))
8777 {
8778 in_use |= EV5_FA;
8779 nop = gen_fnop ();
8780 }
8781 else if (TARGET_FP && !(in_use & EV5_FM))
8782 {
8783 in_use |= EV5_FM;
8784 nop = gen_fnop ();
8785 }
8786 else
8787 nop = gen_unop ();
8788
8789 *pin_use = in_use;
8790 return nop;
8791}
8792
8793/* The instruction group alignment main loop. */
8794
b9b4428b 8795static void
b53f315c 8796alpha_align_insns (insns, max_align, next_group, next_nop)
b9b4428b 8797 rtx insns;
b53f315c 8798 unsigned int max_align;
8799 rtx (*next_group) PARAMS ((rtx, int *, int *));
8800 rtx (*next_nop) PARAMS ((int *));
b9b4428b 8801{
8802 /* ALIGN is the known alignment for the insn group. */
b53f315c 8803 unsigned int align;
b9b4428b 8804 /* OFS is the offset of the current insn in the insn group. */
8805 int ofs;
8806 int prev_in_use, in_use, len;
8807 rtx i, next;
8808
8809 /* Let shorten branches care for assigning alignments to code labels. */
8810 shorten_branches (insns);
8811
d815ce59 8812 if (align_functions < 4)
8813 align = 4;
eeca3ba1 8814 else if ((unsigned int) align_functions < max_align)
d815ce59 8815 align = align_functions;
8816 else
8817 align = max_align;
e2c8a34a 8818
b9b4428b 8819 ofs = prev_in_use = 0;
b9b4428b 8820 i = insns;
8821 if (GET_CODE (i) == NOTE)
8822 i = next_nonnote_insn (i);
8823
8824 while (i)
8825 {
b53f315c 8826 next = (*next_group) (i, &in_use, &len);
b9b4428b 8827
8828 /* When we see a label, resync alignment etc. */
8829 if (GET_CODE (i) == CODE_LABEL)
8830 {
b53f315c 8831 unsigned int new_align = 1 << label_to_alignment (i);
8832
b9b4428b 8833 if (new_align >= align)
8834 {
849674a3 8835 align = new_align < max_align ? new_align : max_align;
b9b4428b 8836 ofs = 0;
8837 }
b53f315c 8838
b9b4428b 8839 else if (ofs & (new_align-1))
8840 ofs = (ofs | (new_align-1)) + 1;
8841 if (len != 0)
8842 abort();
8843 }
8844
8845 /* Handle complex instructions special. */
8846 else if (in_use == 0)
8847 {
8848 /* Asms will have length < 0. This is a signal that we have
8849 lost alignment knowledge. Assume, however, that the asm
8850 will not mis-align instructions. */
8851 if (len < 0)
8852 {
8853 ofs = 0;
8854 align = 4;
8855 len = 0;
8856 }
8857 }
8858
8859 /* If the known alignment is smaller than the recognized insn group,
8860 realign the output. */
1f0ce6a6 8861 else if ((int) align < len)
b9b4428b 8862 {
b53f315c 8863 unsigned int new_log_align = len > 8 ? 4 : 3;
943a1b57 8864 rtx prev, where;
b9b4428b 8865
943a1b57 8866 where = prev = prev_nonnote_insn (i);
b9b4428b 8867 if (!where || GET_CODE (where) != CODE_LABEL)
8868 where = i;
8869
943a1b57 8870 /* Can't realign between a call and its gp reload. */
8871 if (! (TARGET_EXPLICIT_RELOCS
8872 && prev && GET_CODE (prev) == CALL_INSN))
8873 {
8874 emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
8875 align = 1 << new_log_align;
8876 ofs = 0;
8877 }
b9b4428b 8878 }
8879
8880 /* If the group won't fit in the same INT16 as the previous,
8881 we need to add padding to keep the group together. Rather
8882 than simply leaving the insn filling to the assembler, we
8883 can make use of the knowledge of what sorts of instructions
8884 were issued in the previous group to make sure that all of
8885 the added nops are really free. */
1f0ce6a6 8886 else if (ofs + len > (int) align)
b9b4428b 8887 {
8888 int nop_count = (align - ofs) / 4;
8889 rtx where;
8890
943a1b57 8891 /* Insert nops before labels, branches, and calls to truely merge
8892 the execution of the nops with the previous instruction group. */
b9b4428b 8893 where = prev_nonnote_insn (i);
849674a3 8894 if (where)
b9b4428b 8895 {
849674a3 8896 if (GET_CODE (where) == CODE_LABEL)
b9b4428b 8897 {
849674a3 8898 rtx where2 = prev_nonnote_insn (where);
8899 if (where2 && GET_CODE (where2) == JUMP_INSN)
8900 where = where2;
b9b4428b 8901 }
943a1b57 8902 else if (GET_CODE (where) == INSN)
849674a3 8903 where = i;
b9b4428b 8904 }
849674a3 8905 else
8906 where = i;
8907
8908 do
8909 emit_insn_before ((*next_nop)(&prev_in_use), where);
b9b4428b 8910 while (--nop_count);
8911 ofs = 0;
8912 }
8913
8914 ofs = (ofs + len) & (align - 1);
8915 prev_in_use = in_use;
8916 i = next;
8917 }
8918}
b9b4428b 8919\f
35a3065a 8920/* Machine dependent reorg pass. */
18adf4f6 8921
8922void
8923alpha_reorg (insns)
8924 rtx insns;
8925{
b9b4428b 8926 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
8927 alpha_handle_trap_shadows (insns);
8928
b9b4428b 8929 /* Due to the number of extra trapb insns, don't bother fixing up
8930 alignment when trap precision is instruction. Moreover, we can
b53f315c 8931 only do our job when sched2 is run. */
b9b4428b 8932 if (optimize && !optimize_size
8933 && alpha_tp != ALPHA_TP_INSN
8934 && flag_schedule_insns_after_reload)
8935 {
849674a3 8936 if (alpha_cpu == PROCESSOR_EV4)
b53f315c 8937 alpha_align_insns (insns, 8, alphaev4_next_group, alphaev4_next_nop);
849674a3 8938 else if (alpha_cpu == PROCESSOR_EV5)
b53f315c 8939 alpha_align_insns (insns, 16, alphaev5_next_group, alphaev5_next_nop);
b9b4428b 8940 }
18adf4f6 8941}
18adf4f6 8942\f
bbfbe351 8943#ifdef OBJECT_FORMAT_ELF
8944
8945/* Switch to the section to which we should output X. The only thing
8946 special we do here is to honor small data. */
8947
8948static void
8949alpha_elf_select_rtx_section (mode, x, align)
8950 enum machine_mode mode;
8951 rtx x;
8952 unsigned HOST_WIDE_INT align;
8953{
8954 if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
8955 /* ??? Consider using mergable sdata sections. */
8956 sdata_section ();
8957 else
8958 default_elf_select_rtx_section (mode, x, align);
8959}
8960
8961#endif /* OBJECT_FORMAT_ELF */
8962\f
1467e953 8963#if TARGET_ABI_OPEN_VMS
8df4a58b 8964
0dbd1c74 8965/* Return the VMS argument type corresponding to MODE. */
8df4a58b 8966
0dbd1c74 8967enum avms_arg_type
8968alpha_arg_type (mode)
8969 enum machine_mode mode;
8970{
8971 switch (mode)
8df4a58b 8972 {
0dbd1c74 8973 case SFmode:
8974 return TARGET_FLOAT_VAX ? FF : FS;
8975 case DFmode:
8976 return TARGET_FLOAT_VAX ? FD : FT;
8977 default:
8978 return I64;
8df4a58b 8979 }
0dbd1c74 8980}
8df4a58b 8981
0dbd1c74 8982/* Return an rtx for an integer representing the VMS Argument Information
8983 register value. */
8df4a58b 8984
1dd6c958 8985rtx
0dbd1c74 8986alpha_arg_info_reg_val (cum)
8987 CUMULATIVE_ARGS cum;
8988{
8989 unsigned HOST_WIDE_INT regval = cum.num_args;
8990 int i;
8df4a58b 8991
0dbd1c74 8992 for (i = 0; i < 6; i++)
8993 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
8df4a58b 8994
0dbd1c74 8995 return GEN_INT (regval);
8996}
8997\f
57e47080 8998/* Protect alpha_links from garbage collection. */
8999
9000static int
9001mark_alpha_links_node (node, data)
9002 splay_tree_node node;
9003 void *data ATTRIBUTE_UNUSED;
9004{
9005 struct alpha_links *links = (struct alpha_links *) node->value;
9006 ggc_mark_rtx (links->linkage);
9007 return 0;
9008}
9009
9010static void
9011mark_alpha_links (ptr)
9012 void *ptr;
9013{
9014 splay_tree tree = *(splay_tree *) ptr;
9015 splay_tree_foreach (tree, mark_alpha_links_node, NULL);
9016}
8df4a58b 9017
9018/* Make (or fake) .linkage entry for function call.
9019
57e47080 9020 IS_LOCAL is 0 if name is used in call, 1 if name is used in definition.
8df4a58b 9021
57e47080 9022 Return an SYMBOL_REF rtx for the linkage. */
9023
9024rtx
8df4a58b 9025alpha_need_linkage (name, is_local)
ef241053 9026 const char *name;
8df4a58b 9027 int is_local;
9028{
57e47080 9029 splay_tree_node node;
9030 struct alpha_links *al;
cf73d31f 9031 struct alpha_funcs *cfaf;
8df4a58b 9032
9033 if (name[0] == '*')
9034 name++;
9035
cf73d31f 9036 if (is_local)
9037 {
9038 alpha_funcs_tree = splay_tree_new
9039 ((splay_tree_compare_fn) splay_tree_compare_pointers,
9040 (splay_tree_delete_key_fn) free,
9041 (splay_tree_delete_key_fn) free);
9042
9043 cfaf = (struct alpha_funcs *) xmalloc (sizeof (struct alpha_funcs));
9044
9045 cfaf->links = 0;
9046 cfaf->num = ++alpha_funcs_num;
9047
9048 splay_tree_insert (alpha_funcs_tree,
9049 (splay_tree_key) current_function_decl,
9050 (splay_tree_value) cfaf);
9051
9052 }
9053
9054 if (alpha_links_tree)
57e47080 9055 {
9056 /* Is this name already defined? */
8df4a58b 9057
cf73d31f 9058 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
57e47080 9059 if (node)
9060 {
9061 al = (struct alpha_links *) node->value;
9062 if (is_local)
9063 {
9064 /* Defined here but external assumed. */
cf73d31f 9065 if (al->lkind == KIND_EXTERN)
9066 al->lkind = KIND_LOCAL;
57e47080 9067 }
9068 else
9069 {
9070 /* Used here but unused assumed. */
cf73d31f 9071 if (al->lkind == KIND_UNUSED)
9072 al->lkind = KIND_LOCAL;
57e47080 9073 }
9074 return al->linkage;
9075 }
9076 }
9077 else
9078 {
cf73d31f 9079 alpha_links_tree = splay_tree_new
9080 ((splay_tree_compare_fn) strcmp,
9081 (splay_tree_delete_key_fn) free,
9082 (splay_tree_delete_key_fn) free);
9083
9084 ggc_add_root (&alpha_links_tree, 1, 1, mark_alpha_links);
57e47080 9085 }
8df4a58b 9086
57e47080 9087 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9088 name = xstrdup (name);
8df4a58b 9089
9090 /* Assume external if no definition. */
cf73d31f 9091 al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
8df4a58b 9092
57e47080 9093 /* Ensure we have an IDENTIFIER so assemble_name can mark it used. */
d2899e26 9094 get_identifier (name);
9095
57e47080 9096 /* Construct a SYMBOL_REF for us to call. */
9097 {
9098 size_t name_len = strlen (name);
44acf429 9099 char *linksym = alloca (name_len + 6);
57e47080 9100 linksym[0] = '$';
9101 memcpy (linksym + 1, name, name_len);
9102 memcpy (linksym + 1 + name_len, "..lk", 5);
44acf429 9103 al->linkage = gen_rtx_SYMBOL_REF (Pmode,
9104 ggc_alloc_string (linksym, name_len + 5));
57e47080 9105 }
9106
cf73d31f 9107 splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
57e47080 9108 (splay_tree_value) al);
8df4a58b 9109
57e47080 9110 return al->linkage;
8df4a58b 9111}
9112
cf73d31f 9113rtx
9114alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9115 rtx linkage;
9116 tree cfundecl;
9117 int lflag;
9118 int rflag;
9119{
9120 splay_tree_node cfunnode;
9121 struct alpha_funcs *cfaf;
9122 struct alpha_links *al;
9123 const char *name = XSTR (linkage, 0);
9124
9125 cfaf = (struct alpha_funcs *) 0;
9126 al = (struct alpha_links *) 0;
9127
9128 cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
9129 cfaf = (struct alpha_funcs *) cfunnode->value;
9130
9131 if (cfaf->links)
9132 {
9133 splay_tree_node lnode;
9134
9135 /* Is this name already defined? */
9136
9137 lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name);
9138 if (lnode)
9139 al = (struct alpha_links *) lnode->value;
9140 }
9141 else
9142 {
9143 cfaf->links = splay_tree_new
9144 ((splay_tree_compare_fn) strcmp,
9145 (splay_tree_delete_key_fn) free,
9146 (splay_tree_delete_key_fn) free);
9147 ggc_add_root (&cfaf->links, 1, 1, mark_alpha_links);
9148 }
9149
9150 if (!al)
9151 {
9152 size_t name_len;
9153 size_t buflen;
9154 char buf [512];
9155 char *linksym;
9156 splay_tree_node node = 0;
9157 struct alpha_links *anl;
9158
9159 if (name[0] == '*')
9160 name++;
9161
9162 name_len = strlen (name);
9163
9164 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9165 al->num = cfaf->num;
9166
9167 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9168 if (node)
9169 {
9170 anl = (struct alpha_links *) node->value;
9171 al->lkind = anl->lkind;
9172 }
9173
9174 sprintf (buf, "$%d..%s..lk", cfaf->num, name);
9175 buflen = strlen (buf);
9176 linksym = alloca (buflen + 1);
9177 memcpy (linksym, buf, buflen + 1);
9178
9179 al->linkage = gen_rtx_SYMBOL_REF
9180 (Pmode, ggc_alloc_string (linksym, buflen + 1));
9181
9182 splay_tree_insert (cfaf->links, (splay_tree_key) name,
9183 (splay_tree_value) al);
9184 }
9185
9186 if (rflag)
9187 al->rkind = KIND_CODEADDR;
9188 else
9189 al->rkind = KIND_LINKAGE;
9190
9191 if (lflag)
9192 return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8));
9193 else
9194 return al->linkage;
9195}
9196
57e47080 9197static int
9198alpha_write_one_linkage (node, data)
9199 splay_tree_node node;
9200 void *data;
9201{
0d95286f 9202 const char *const name = (const char *) node->key;
cf73d31f 9203 struct alpha_links *link = (struct alpha_links *) node->value;
57e47080 9204 FILE *stream = (FILE *) data;
9205
cf73d31f 9206 fprintf (stream, "$%d..%s..lk:\n", link->num, name);
9207 if (link->rkind == KIND_CODEADDR)
57e47080 9208 {
cf73d31f 9209 if (link->lkind == KIND_LOCAL)
9210 {
9211 /* Local and used */
9212 fprintf (stream, "\t.quad %s..en\n", name);
9213 }
9214 else
9215 {
9216 /* External and used, request code address. */
9217 fprintf (stream, "\t.code_address %s\n", name);
9218 }
57e47080 9219 }
9220 else
9221 {
cf73d31f 9222 if (link->lkind == KIND_LOCAL)
9223 {
9224 /* Local and used, build linkage pair. */
9225 fprintf (stream, "\t.quad %s..en\n", name);
9226 fprintf (stream, "\t.quad %s\n", name);
9227 }
9228 else
9229 {
9230 /* External and used, request linkage pair. */
9231 fprintf (stream, "\t.linkage %s\n", name);
9232 }
57e47080 9233 }
9234
9235 return 0;
9236}
8df4a58b 9237
cf73d31f 9238static void
9239alpha_write_linkage (stream, funname, fundecl)
9240 FILE *stream;
9241 const char *funname;
9242 tree fundecl;
8df4a58b 9243{
cf73d31f 9244 splay_tree_node node;
9245 struct alpha_funcs *func;
9246
9247 link_section ();
9248 fprintf (stream, "\t.align 3\n");
9249 node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl);
9250 func = (struct alpha_funcs *) node->value;
9251
9252 fputs ("\t.name ", stream);
9253 assemble_name (stream, funname);
9254 fputs ("..na\n", stream);
9255 ASM_OUTPUT_LABEL (stream, funname);
9256 fprintf (stream, "\t.pdesc ");
9257 assemble_name (stream, funname);
9258 fprintf (stream, "..en,%s\n",
9259 alpha_procedure_type == PT_STACK ? "stack"
9260 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
9261
9262 if (func->links)
c64a8830 9263 {
cf73d31f 9264 splay_tree_foreach (func->links, alpha_write_one_linkage, stream);
9265 /* splay_tree_delete (func->links); */
c64a8830 9266 }
8df4a58b 9267}
9268
2cb4ac60 9269/* Given a decl, a section name, and whether the decl initializer
9270 has relocs, choose attributes for the section. */
9271
9272#define SECTION_VMS_OVERLAY SECTION_FORGET
c64a8830 9273#define SECTION_VMS_GLOBAL SECTION_MACH_DEP
9274#define SECTION_VMS_INITIALIZE (SECTION_VMS_GLOBAL << 1)
2cb4ac60 9275
9276static unsigned int
9277vms_section_type_flags (decl, name, reloc)
9278 tree decl;
9279 const char *name;
9280 int reloc;
9281{
9282 unsigned int flags = default_section_type_flags (decl, name, reloc);
9283
e3c541f0 9284 if (decl && DECL_ATTRIBUTES (decl)
9285 && lookup_attribute ("overlaid", DECL_ATTRIBUTES (decl)))
2cb4ac60 9286 flags |= SECTION_VMS_OVERLAY;
c64a8830 9287 if (decl && DECL_ATTRIBUTES (decl)
9288 && lookup_attribute ("global", DECL_ATTRIBUTES (decl)))
9289 flags |= SECTION_VMS_GLOBAL;
9290 if (decl && DECL_ATTRIBUTES (decl)
9291 && lookup_attribute ("initialize", DECL_ATTRIBUTES (decl)))
9292 flags |= SECTION_VMS_INITIALIZE;
2cb4ac60 9293
9294 return flags;
9295}
9296
9297/* Switch to an arbitrary section NAME with attributes as specified
9298 by FLAGS. ALIGN specifies any known alignment requirements for
9299 the section; 0 if the default should be used. */
9300
9301static void
29a0ebee 9302vms_asm_named_section (name, flags)
2cb4ac60 9303 const char *name;
9304 unsigned int flags;
2cb4ac60 9305{
c64a8830 9306 fputc ('\n', asm_out_file);
9307 fprintf (asm_out_file, ".section\t%s", name);
2cb4ac60 9308
9309 if (flags & SECTION_VMS_OVERLAY)
c64a8830 9310 fprintf (asm_out_file, ",OVR");
9311 if (flags & SECTION_VMS_GLOBAL)
9312 fprintf (asm_out_file, ",GBL");
9313 if (flags & SECTION_VMS_INITIALIZE)
9314 fprintf (asm_out_file, ",NOMOD");
9315 if (flags & SECTION_DEBUG)
9316 fprintf (asm_out_file, ",NOWRT");
9317
9318 fputc ('\n', asm_out_file);
2cb4ac60 9319}
9320
01d15dc5 9321/* Record an element in the table of global constructors. SYMBOL is
9322 a SYMBOL_REF of the function to be called; PRIORITY is a number
9323 between 0 and MAX_INIT_PRIORITY.
9324
9325 Differs from default_ctors_section_asm_out_constructor in that the
9326 width of the .ctors entry is always 64 bits, rather than the 32 bits
9327 used by a normal pointer. */
9328
9329static void
9330vms_asm_out_constructor (symbol, priority)
9331 rtx symbol;
9332 int priority ATTRIBUTE_UNUSED;
9333{
9334 ctors_section ();
09d688ff 9335 assemble_align (BITS_PER_WORD);
9336 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
01d15dc5 9337}
9338
9339static void
9340vms_asm_out_destructor (symbol, priority)
9341 rtx symbol;
9342 int priority ATTRIBUTE_UNUSED;
9343{
9344 dtors_section ();
09d688ff 9345 assemble_align (BITS_PER_WORD);
9346 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
01d15dc5 9347}
8df4a58b 9348#else
9349
57e47080 9350rtx
8df4a58b 9351alpha_need_linkage (name, is_local)
ef241053 9352 const char *name ATTRIBUTE_UNUSED;
769ea120 9353 int is_local ATTRIBUTE_UNUSED;
8df4a58b 9354{
57e47080 9355 return NULL_RTX;
8df4a58b 9356}
9357
cf73d31f 9358rtx
9359alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9360 rtx linkage ATTRIBUTE_UNUSED;
9361 tree cfundecl ATTRIBUTE_UNUSED;
9362 int lflag ATTRIBUTE_UNUSED;
9363 int rflag ATTRIBUTE_UNUSED;
9364{
9365 return NULL_RTX;
9366}
9367
1467e953 9368#endif /* TARGET_ABI_OPEN_VMS */
9caef960 9369\f
9370#if TARGET_ABI_UNICOSMK
9371
9372static void unicosmk_output_module_name PARAMS ((FILE *));
9373static void unicosmk_output_default_externs PARAMS ((FILE *));
9374static void unicosmk_output_dex PARAMS ((FILE *));
9375static void unicosmk_output_externs PARAMS ((FILE *));
9376static void unicosmk_output_addr_vec PARAMS ((FILE *, rtx));
9377static const char *unicosmk_ssib_name PARAMS ((void));
e51fa923 9378static int unicosmk_special_name PARAMS ((const char *));
9caef960 9379
9380/* Define the offset between two registers, one to be eliminated, and the
9381 other its replacement, at the start of a routine. */
9382
9383int
9384unicosmk_initial_elimination_offset (from, to)
9385 int from;
9386 int to;
9387{
9388 int fixed_size;
9389
9390 fixed_size = alpha_sa_size();
9391 if (fixed_size != 0)
9392 fixed_size += 48;
9393
9394 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9395 return -fixed_size;
9396 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9397 return 0;
9398 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9399 return (ALPHA_ROUND (current_function_outgoing_args_size)
9400 + ALPHA_ROUND (get_frame_size()));
9401 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9402 return (ALPHA_ROUND (fixed_size)
9403 + ALPHA_ROUND (get_frame_size()
9404 + current_function_outgoing_args_size));
9405 else
9406 abort ();
9407}
9408
9409/* Output the module name for .ident and .end directives. We have to strip
9410 directories and add make sure that the module name starts with a letter
9411 or '$'. */
9412
9413static void
9414unicosmk_output_module_name (file)
9415 FILE *file;
9416{
9417 const char *name;
9418
9419 /* Strip directories. */
9420
9421 name = strrchr (main_input_filename, '/');
9422 if (name)
9423 ++name;
9424 else
9425 name = main_input_filename;
9426
9427 /* CAM only accepts module names that start with a letter or '$'. We
9428 prefix the module name with a '$' if necessary. */
9429
9430 if (!ISALPHA (*name))
6e957326 9431 putc ('$', file);
9432 output_clean_symbol_name (file, name);
9caef960 9433}
9434
9435/* Output text that to appear at the beginning of an assembler file. */
9436
9437void
9438unicosmk_asm_file_start (file)
9439 FILE *file;
9440{
9441 int i;
9442
9443 fputs ("\t.ident\t", file);
9444 unicosmk_output_module_name (file);
9445 fputs ("\n\n", file);
9446
9447 /* The Unicos/Mk assembler uses different register names. Instead of trying
9448 to support them, we simply use micro definitions. */
9449
9450 /* CAM has different register names: rN for the integer register N and fN
9451 for the floating-point register N. Instead of trying to use these in
9452 alpha.md, we define the symbols $N and $fN to refer to the appropriate
9453 register. */
9454
9455 for (i = 0; i < 32; ++i)
9456 fprintf (file, "$%d <- r%d\n", i, i);
9457
9458 for (i = 0; i < 32; ++i)
9459 fprintf (file, "$f%d <- f%d\n", i, i);
9460
9461 putc ('\n', file);
9462
9463 /* The .align directive fill unused space with zeroes which does not work
9464 in code sections. We define the macro 'gcc@code@align' which uses nops
9465 instead. Note that it assumes that code sections always have the
9466 biggest possible alignment since . refers to the current offset from
9467 the beginning of the section. */
9468
9469 fputs ("\t.macro gcc@code@align n\n", file);
9470 fputs ("gcc@n@bytes = 1 << n\n", file);
9471 fputs ("gcc@here = . % gcc@n@bytes\n", file);
9472 fputs ("\t.if ne, gcc@here, 0\n", file);
9473 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", file);
9474 fputs ("\tbis r31,r31,r31\n", file);
9475 fputs ("\t.endr\n", file);
9476 fputs ("\t.endif\n", file);
9477 fputs ("\t.endm gcc@code@align\n\n", file);
9478
9479 /* Output extern declarations which should always be visible. */
9480 unicosmk_output_default_externs (file);
9481
9482 /* Open a dummy section. We always need to be inside a section for the
9483 section-switching code to work correctly.
9484 ??? This should be a module id or something like that. I still have to
9485 figure out what the rules for those are. */
9486 fputs ("\n\t.psect\t$SG00000,data\n", file);
9487}
9488
9489/* Output text to appear at the end of an assembler file. This includes all
9490 pending extern declarations and DEX expressions. */
9491
9492void
9493unicosmk_asm_file_end (file)
9494 FILE *file;
9495{
9496 fputs ("\t.endp\n\n", file);
9497
9498 /* Output all pending externs. */
9499
9500 unicosmk_output_externs (file);
9501
9502 /* Output dex definitions used for functions whose names conflict with
9503 register names. */
9504
9505 unicosmk_output_dex (file);
9506
9507 fputs ("\t.end\t", file);
9508 unicosmk_output_module_name (file);
9509 putc ('\n', file);
9510}
9511
9512/* Output the definition of a common variable. */
9513
9514void
9515unicosmk_output_common (file, name, size, align)
9516 FILE *file;
9517 const char *name;
9518 int size;
9519 int align;
9520{
9521 tree name_tree;
9522 printf ("T3E__: common %s\n", name);
9523
9524 common_section ();
9525 fputs("\t.endp\n\n\t.psect ", file);
9526 assemble_name(file, name);
9527 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
9528 fprintf(file, "\t.byte\t0:%d\n", size);
9529
9530 /* Mark the symbol as defined in this module. */
9531 name_tree = get_identifier (name);
9532 TREE_ASM_WRITTEN (name_tree) = 1;
9533}
9534
9535#define SECTION_PUBLIC SECTION_MACH_DEP
9536#define SECTION_MAIN (SECTION_PUBLIC << 1)
9537static int current_section_align;
9538
9539static unsigned int
9540unicosmk_section_type_flags (decl, name, reloc)
9541 tree decl;
9542 const char *name;
9543 int reloc ATTRIBUTE_UNUSED;
9544{
9545 unsigned int flags = default_section_type_flags (decl, name, reloc);
9546
9547 if (!decl)
9548 return flags;
9549
9550 if (TREE_CODE (decl) == FUNCTION_DECL)
9551 {
9552 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
9553 if (align_functions_log > current_section_align)
9554 current_section_align = align_functions_log;
9555
9556 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
9557 flags |= SECTION_MAIN;
9558 }
9559 else
9560 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
9561
9562 if (TREE_PUBLIC (decl))
9563 flags |= SECTION_PUBLIC;
9564
9565 return flags;
9566}
9567
9568/* Generate a section name for decl and associate it with the
9569 declaration. */
9570
52470889 9571static void
9caef960 9572unicosmk_unique_section (decl, reloc)
9573 tree decl;
9574 int reloc ATTRIBUTE_UNUSED;
9575{
9576 const char *name;
9577 int len;
9578
9579 if (!decl)
9580 abort ();
9581
9582 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
7b4a38a6 9583 name = alpha_strip_name_encoding (name);
9caef960 9584 len = strlen (name);
9585
9586 if (TREE_CODE (decl) == FUNCTION_DECL)
9587 {
9588 char *string;
9589
9590 /* It is essential that we prefix the section name here because
9591 otherwise the section names generated for constructors and
9592 destructors confuse collect2. */
9593
9594 string = alloca (len + 6);
9595 sprintf (string, "code@%s", name);
9596 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9597 }
9598 else if (TREE_PUBLIC (decl))
9599 DECL_SECTION_NAME (decl) = build_string (len, name);
9600 else
9601 {
9602 char *string;
9603
9604 string = alloca (len + 6);
9605 sprintf (string, "data@%s", name);
9606 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9607 }
9608}
9609
9610/* Switch to an arbitrary section NAME with attributes as specified
9611 by FLAGS. ALIGN specifies any known alignment requirements for
9612 the section; 0 if the default should be used. */
9613
9614static void
9615unicosmk_asm_named_section (name, flags)
9616 const char *name;
9617 unsigned int flags;
9618{
9619 const char *kind;
9620
9621 /* Close the previous section. */
9622
9623 fputs ("\t.endp\n\n", asm_out_file);
9624
9625 /* Find out what kind of section we are opening. */
9626
9627 if (flags & SECTION_MAIN)
9628 fputs ("\t.start\tmain\n", asm_out_file);
9629
9630 if (flags & SECTION_CODE)
9631 kind = "code";
9632 else if (flags & SECTION_PUBLIC)
9633 kind = "common";
9634 else
9635 kind = "data";
9636
9637 if (current_section_align != 0)
9638 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
9639 current_section_align, kind);
9640 else
9641 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
9642}
9643
9644static void
9645unicosmk_insert_attributes (decl, attr_ptr)
9646 tree decl;
9647 tree *attr_ptr ATTRIBUTE_UNUSED;
9648{
9649 if (DECL_P (decl)
9650 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
52470889 9651 unicosmk_unique_section (decl, 0);
9caef960 9652}
9653
9654/* Output an alignment directive. We have to use the macro 'gcc@code@align'
9655 in code sections because .align fill unused space with zeroes. */
9656
9657void
9658unicosmk_output_align (file, align)
9659 FILE *file;
9660 int align;
9661{
9662 if (inside_function)
9663 fprintf (file, "\tgcc@code@align\t%d\n", align);
9664 else
9665 fprintf (file, "\t.align\t%d\n", align);
9666}
9667
9668/* Add a case vector to the current function's list of deferred case
9669 vectors. Case vectors have to be put into a separate section because CAM
9670 does not allow data definitions in code sections. */
9671
9672void
9673unicosmk_defer_case_vector (lab, vec)
9674 rtx lab;
9675 rtx vec;
9676{
9677 struct machine_function *machine = cfun->machine;
9678
9679 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
9680 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
9681 machine->addr_list);
9682}
9683
9684/* Output a case vector. */
9685
9686static void
9687unicosmk_output_addr_vec (file, vec)
9688 FILE *file;
9689 rtx vec;
9690{
9691 rtx lab = XEXP (vec, 0);
9692 rtx body = XEXP (vec, 1);
9693 int vlen = XVECLEN (body, 0);
9694 int idx;
9695
805e22b2 9696 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (lab));
9caef960 9697
9698 for (idx = 0; idx < vlen; idx++)
9699 {
9700 ASM_OUTPUT_ADDR_VEC_ELT
9701 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
9702 }
9703}
9704
9705/* Output current function's deferred case vectors. */
9706
9707static void
9708unicosmk_output_deferred_case_vectors (file)
9709 FILE *file;
9710{
9711 struct machine_function *machine = cfun->machine;
9712 rtx t;
9713
9714 if (machine->addr_list == NULL_RTX)
9715 return;
9716
9717 data_section ();
9718 for (t = machine->addr_list; t; t = XEXP (t, 1))
9719 unicosmk_output_addr_vec (file, XEXP (t, 0));
9720}
9721
9722/* Set up the dynamic subprogram information block (DSIB) and update the
9723 frame pointer register ($15) for subroutines which have a frame. If the
9724 subroutine doesn't have a frame, simply increment $15. */
9725
9726static void
9727unicosmk_gen_dsib (imaskP)
9728 unsigned long * imaskP;
9729{
b19d7ab1 9730 if (alpha_procedure_type == PT_STACK)
9caef960 9731 {
9732 const char *ssib_name;
9733 rtx mem;
9734
9735 /* Allocate 64 bytes for the DSIB. */
9736
9737 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
9738 GEN_INT (-64))));
9739 emit_insn (gen_blockage ());
9740
9741 /* Save the return address. */
9742
9743 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
9744 set_mem_alias_set (mem, alpha_sr_alias_set);
9745 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
9746 (*imaskP) &= ~(1L << REG_RA);
9747
9748 /* Save the old frame pointer. */
9749
9750 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
9751 set_mem_alias_set (mem, alpha_sr_alias_set);
9752 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
9753 (*imaskP) &= ~(1L << HARD_FRAME_POINTER_REGNUM);
9754
9755 emit_insn (gen_blockage ());
9756
9757 /* Store the SSIB pointer. */
9758
9759 ssib_name = ggc_strdup (unicosmk_ssib_name ());
9760 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
9761 set_mem_alias_set (mem, alpha_sr_alias_set);
9762
9763 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
9764 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
9765 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
9766
9767 /* Save the CIW index. */
9768
9769 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
9770 set_mem_alias_set (mem, alpha_sr_alias_set);
9771 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
9772
9773 emit_insn (gen_blockage ());
9774
9775 /* Set the new frame pointer. */
9776
9777 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9778 stack_pointer_rtx, GEN_INT (64))));
9779
9780 }
9781 else
9782 {
9783 /* Increment the frame pointer register to indicate that we do not
9784 have a frame. */
9785
9786 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9787 hard_frame_pointer_rtx, GEN_INT (1))));
9788 }
9789}
9790
9791#define SSIB_PREFIX "__SSIB_"
9792#define SSIB_PREFIX_LEN 7
9793
9794/* Generate the name of the SSIB section for the current function. */
9795
9796static const char *
9797unicosmk_ssib_name ()
9798{
9799 /* This is ok since CAM won't be able to deal with names longer than that
9800 anyway. */
9801
9802 static char name[256];
9803
9804 rtx x;
9805 const char *fnname;
9caef960 9806 int len;
9807
9808 x = DECL_RTL (cfun->decl);
9809 if (GET_CODE (x) != MEM)
9810 abort ();
9811 x = XEXP (x, 0);
9812 if (GET_CODE (x) != SYMBOL_REF)
9813 abort ();
7b4a38a6 9814 fnname = alpha_strip_name_encoding (XSTR (x, 0));
9caef960 9815
9816 len = strlen (fnname);
9817 if (len + SSIB_PREFIX_LEN > 255)
9818 len = 255 - SSIB_PREFIX_LEN;
9819
9820 strcpy (name, SSIB_PREFIX);
9821 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
9822 name[len + SSIB_PREFIX_LEN] = 0;
9823
9824 return name;
9825}
9826
9827/* Output the static subroutine information block for the current
9828 function. */
9829
9830static void
9831unicosmk_output_ssib (file, fnname)
9832 FILE *file;
9833 const char *fnname;
9834{
9835 int len;
9836 int i;
9837 rtx x;
9838 rtx ciw;
9839 struct machine_function *machine = cfun->machine;
9840
9841 ssib_section ();
9842 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
9843 unicosmk_ssib_name ());
9844
9845 /* Some required stuff and the function name length. */
9846
9847 len = strlen (fnname);
9848 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
9849
9850 /* Saved registers
9851 ??? We don't do that yet. */
9852
9853 fputs ("\t.quad\t0\n", file);
9854
9855 /* Function address. */
9856
9857 fputs ("\t.quad\t", file);
9858 assemble_name (file, fnname);
9859 putc ('\n', file);
9860
9861 fputs ("\t.quad\t0\n", file);
9862 fputs ("\t.quad\t0\n", file);
9863
9864 /* Function name.
9865 ??? We do it the same way Cray CC does it but this could be
9866 simplified. */
9867
9868 for( i = 0; i < len; i++ )
9869 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
9870 if( (len % 8) == 0 )
9871 fputs ("\t.quad\t0\n", file);
9872 else
9873 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
9874
9875 /* All call information words used in the function. */
9876
9877 for (x = machine->first_ciw; x; x = XEXP (x, 1))
9878 {
9879 ciw = XEXP (x, 0);
9880 fprintf (file, "\t.quad\t");
9881#if HOST_BITS_PER_WIDE_INT == 32
9882 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
9883 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
9884#else
9885 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (ciw));
9886#endif
9887 fprintf (file, "\n");
9888 }
9889}
9890
9891/* Add a call information word (CIW) to the list of the current function's
9892 CIWs and return its index.
9893
9894 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
9895
9896rtx
9897unicosmk_add_call_info_word (x)
9898 rtx x;
9899{
9900 rtx node;
9901 struct machine_function *machine = cfun->machine;
9902
9903 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
9904 if (machine->first_ciw == NULL_RTX)
9905 machine->first_ciw = node;
9906 else
9907 XEXP (machine->last_ciw, 1) = node;
9908
9909 machine->last_ciw = node;
9910 ++machine->ciw_count;
9911
9912 return GEN_INT (machine->ciw_count
9913 + strlen (current_function_name)/8 + 5);
9914}
9915
9916static char unicosmk_section_buf[100];
9917
9918char *
9919unicosmk_text_section ()
9920{
9921 static int count = 0;
9922 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@text___%d,code",
9923 count++);
9924 return unicosmk_section_buf;
9925}
9926
9927char *
9928unicosmk_data_section ()
9929{
9930 static int count = 1;
9931 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@data___%d,data",
9932 count++);
9933 return unicosmk_section_buf;
9934}
9935
9936/* The Cray assembler doesn't accept extern declarations for symbols which
9937 are defined in the same file. We have to keep track of all global
9938 symbols which are referenced and/or defined in a source file and output
9939 extern declarations for those which are referenced but not defined at
9940 the end of file. */
9941
9942/* List of identifiers for which an extern declaration might have to be
9943 emitted. */
9944
9945struct unicosmk_extern_list
9946{
9947 struct unicosmk_extern_list *next;
9948 const char *name;
9949};
9950
9951static struct unicosmk_extern_list *unicosmk_extern_head = 0;
9952
9953/* Output extern declarations which are required for every asm file. */
9954
9955static void
9956unicosmk_output_default_externs (file)
9957 FILE *file;
9958{
0d95286f 9959 static const char *const externs[] =
9caef960 9960 { "__T3E_MISMATCH" };
9961
9962 int i;
9963 int n;
9964
9965 n = ARRAY_SIZE (externs);
9966
9967 for (i = 0; i < n; i++)
9968 fprintf (file, "\t.extern\t%s\n", externs[i]);
9969}
9970
9971/* Output extern declarations for global symbols which are have been
9972 referenced but not defined. */
9973
9974static void
9975unicosmk_output_externs (file)
9976 FILE *file;
9977{
9978 struct unicosmk_extern_list *p;
9979 const char *real_name;
9980 int len;
9981 tree name_tree;
9982
9983 len = strlen (user_label_prefix);
9984 for (p = unicosmk_extern_head; p != 0; p = p->next)
9985 {
9986 /* We have to strip the encoding and possibly remove user_label_prefix
9987 from the identifier in order to handle -fleading-underscore and
9988 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
7b4a38a6 9989 real_name = alpha_strip_name_encoding (p->name);
9caef960 9990 if (len && p->name[0] == '*'
9991 && !memcmp (real_name, user_label_prefix, len))
9992 real_name += len;
9993
9994 name_tree = get_identifier (real_name);
9995 if (! TREE_ASM_WRITTEN (name_tree))
9996 {
9997 TREE_ASM_WRITTEN (name_tree) = 1;
9998 fputs ("\t.extern\t", file);
9999 assemble_name (file, p->name);
10000 putc ('\n', file);
10001 }
10002 }
10003}
10004
10005/* Record an extern. */
10006
10007void
10008unicosmk_add_extern (name)
10009 const char *name;
10010{
10011 struct unicosmk_extern_list *p;
10012
10013 p = (struct unicosmk_extern_list *)
92192583 10014 xmalloc (sizeof (struct unicosmk_extern_list));
9caef960 10015 p->next = unicosmk_extern_head;
10016 p->name = name;
10017 unicosmk_extern_head = p;
10018}
10019
10020/* The Cray assembler generates incorrect code if identifiers which
10021 conflict with register names are used as instruction operands. We have
10022 to replace such identifiers with DEX expressions. */
10023
10024/* Structure to collect identifiers which have been replaced by DEX
10025 expressions. */
10026
10027struct unicosmk_dex {
10028 struct unicosmk_dex *next;
10029 const char *name;
10030};
10031
10032/* List of identifiers which have been replaced by DEX expressions. The DEX
10033 number is determined by the position in the list. */
10034
10035static struct unicosmk_dex *unicosmk_dex_list = NULL;
10036
10037/* The number of elements in the DEX list. */
10038
10039static int unicosmk_dex_count = 0;
10040
10041/* Check if NAME must be replaced by a DEX expression. */
10042
10043static int
10044unicosmk_special_name (name)
10045 const char *name;
10046{
10047 if (name[0] == '*')
10048 ++name;
10049
10050 if (name[0] == '$')
10051 ++name;
10052
10053 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
10054 return 0;
10055
10056 switch (name[1])
10057 {
10058 case '1': case '2':
10059 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
10060
10061 case '3':
10062 return (name[2] == '\0'
10063 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
10064
10065 default:
10066 return (ISDIGIT (name[1]) && name[2] == '\0');
10067 }
10068}
10069
10070/* Return the DEX number if X must be replaced by a DEX expression and 0
10071 otherwise. */
10072
10073static int
10074unicosmk_need_dex (x)
10075 rtx x;
10076{
10077 struct unicosmk_dex *dex;
10078 const char *name;
10079 int i;
10080
10081 if (GET_CODE (x) != SYMBOL_REF)
10082 return 0;
10083
10084 name = XSTR (x,0);
10085 if (! unicosmk_special_name (name))
10086 return 0;
10087
10088 i = unicosmk_dex_count;
10089 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10090 {
10091 if (! strcmp (name, dex->name))
10092 return i;
10093 --i;
10094 }
10095
92192583 10096 dex = (struct unicosmk_dex *) xmalloc (sizeof (struct unicosmk_dex));
9caef960 10097 dex->name = name;
10098 dex->next = unicosmk_dex_list;
10099 unicosmk_dex_list = dex;
10100
10101 ++unicosmk_dex_count;
10102 return unicosmk_dex_count;
10103}
10104
10105/* Output the DEX definitions for this file. */
10106
10107static void
10108unicosmk_output_dex (file)
10109 FILE *file;
10110{
10111 struct unicosmk_dex *dex;
10112 int i;
10113
10114 if (unicosmk_dex_list == NULL)
10115 return;
10116
10117 fprintf (file, "\t.dexstart\n");
10118
10119 i = unicosmk_dex_count;
10120 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10121 {
10122 fprintf (file, "\tDEX (%d) = ", i);
10123 assemble_name (file, dex->name);
10124 putc ('\n', file);
10125 --i;
10126 }
10127
10128 fprintf (file, "\t.dexend\n");
10129}
10130
10131#else
10132
10133static void
10134unicosmk_output_deferred_case_vectors (file)
10135 FILE *file ATTRIBUTE_UNUSED;
10136{}
10137
10138static void
10139unicosmk_gen_dsib (imaskP)
10140 unsigned long * imaskP ATTRIBUTE_UNUSED;
10141{}
10142
10143static void
10144unicosmk_output_ssib (file, fnname)
10145 FILE * file ATTRIBUTE_UNUSED;
10146 const char * fnname ATTRIBUTE_UNUSED;
10147{}
10148
10149rtx
10150unicosmk_add_call_info_word (x)
10151 rtx x ATTRIBUTE_UNUSED;
10152{
10153 return NULL_RTX;
10154}
10155
10156static int
10157unicosmk_need_dex (x)
10158 rtx x ATTRIBUTE_UNUSED;
10159{
10160 return 0;
10161}
10162
10163#endif /* TARGET_ABI_UNICOSMK */
1f3233d1 10164
10165#include "gt-alpha.h"
10166