]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/alpha/alpha.c
target.def (handle_option): Take gcc_options and cl_decoded_option pointers and locat...
[thirdparty/gcc.git] / gcc / config / alpha / alpha.c
CommitLineData
a6f12d7c 1/* Subroutines used for code generation on the DEC Alpha.
b6e46ca1 2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
9e43ad68 3 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
c0f64575 4 Free Software Foundation, Inc.
d60a05a1 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
a6f12d7c 6
7ec022b2 7This file is part of GCC.
a6f12d7c 8
7ec022b2 9GCC is free software; you can redistribute it and/or modify
a6f12d7c 10it under the terms of the GNU General Public License as published by
2f83c7d6 11the Free Software Foundation; either version 3, or (at your option)
a6f12d7c
RK
12any later version.
13
7ec022b2 14GCC is distributed in the hope that it will be useful,
a6f12d7c
RK
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
2f83c7d6
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
a6f12d7c
RK
22
23
a6f12d7c 24#include "config.h"
3c303f52 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
a6f12d7c 28#include "rtl.h"
e78d8e51 29#include "tree.h"
a6f12d7c
RK
30#include "regs.h"
31#include "hard-reg-set.h"
a6f12d7c
RK
32#include "insn-config.h"
33#include "conditions.h"
a6f12d7c
RK
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
37#include "recog.h"
a6f12d7c 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "reload.h"
a6f12d7c 41#include "obstack.h"
9ecc37f0
RH
42#include "except.h"
43#include "function.h"
718f9c0f 44#include "diagnostic-core.h"
01439aee 45#include "ggc.h"
b91055dd 46#include "integrate.h"
aead1ca3 47#include "tm_p.h"
672a6f42
NB
48#include "target.h"
49#include "target-def.h"
14691f8d 50#include "debug.h"
f1e639b1 51#include "langhooks.h"
71f3e391 52#include "splay-tree.h"
117dca74 53#include "cfglayout.h"
726a989a 54#include "gimple.h"
9d30f3c1
JJ
55#include "tree-flow.h"
56#include "tree-stdarg.h"
dfcbeaa5 57#include "tm-constrs.h"
6fb5fa3c 58#include "df.h"
5e3fef6c 59#include "libfuncs.h"
96e45421 60#include "opts.h"
9ecc37f0 61
285a5742 62/* Specify which cpu to schedule for. */
8bea7f7c 63enum processor_type alpha_tune;
9ecc37f0 64
8bea7f7c 65/* Which cpu we're generating code for. */
9b009d45 66enum processor_type alpha_cpu;
8bea7f7c 67
f676971a 68static const char * const alpha_cpu_name[] =
bcbbac26
RH
69{
70 "ev4", "ev5", "ev6"
71};
da792a68 72
6245e3df
RK
73/* Specify how accurate floating-point traps need to be. */
74
75enum alpha_trap_precision alpha_tp;
76
77/* Specify the floating-point rounding mode. */
78
79enum alpha_fp_rounding_mode alpha_fprm;
80
81/* Specify which things cause traps. */
82
83enum alpha_fp_trap_mode alpha_fptm;
84
825dda42 85/* Nonzero if inside of a function, because the Alpha asm can't
48f6bfac
RK
86 handle .files inside of functions. */
87
88static int inside_function = FALSE;
89
bcbbac26
RH
90/* The number of cycles of latency we should assume on memory reads. */
91
92int alpha_memory_latency = 3;
93
9c0e94a5
RH
94/* Whether the function needs the GP. */
95
96static int alpha_function_needs_gp;
97
3873d24b
RH
98/* The alias set for prologue/epilogue register save/restore. */
99
4862826d 100static GTY(()) alias_set_type alpha_sr_alias_set;
3873d24b 101
941cc05a
RK
102/* The assembler name of the current function. */
103
104static const char *alpha_fnname;
105
1eb356b9 106/* The next explicit relocation sequence number. */
f030826a 107extern GTY(()) int alpha_next_sequence_number;
1eb356b9
RH
108int alpha_next_sequence_number = 1;
109
110/* The literal and gpdisp sequence numbers for this insn, as printed
111 by %# and %* respectively. */
f030826a
RH
112extern GTY(()) int alpha_this_literal_sequence_number;
113extern GTY(()) int alpha_this_gpdisp_sequence_number;
1eb356b9
RH
114int alpha_this_literal_sequence_number;
115int alpha_this_gpdisp_sequence_number;
116
3c50106f
RH
117/* Costs of various operations on the different architectures. */
118
119struct alpha_rtx_cost_data
120{
121 unsigned char fp_add;
122 unsigned char fp_mult;
123 unsigned char fp_div_sf;
124 unsigned char fp_div_df;
125 unsigned char int_mult_si;
126 unsigned char int_mult_di;
127 unsigned char int_shift;
128 unsigned char int_cmov;
8260c194 129 unsigned short int_div;
3c50106f
RH
130};
131
132static struct alpha_rtx_cost_data const alpha_rtx_cost_data[PROCESSOR_MAX] =
133{
134 { /* EV4 */
135 COSTS_N_INSNS (6), /* fp_add */
136 COSTS_N_INSNS (6), /* fp_mult */
137 COSTS_N_INSNS (34), /* fp_div_sf */
138 COSTS_N_INSNS (63), /* fp_div_df */
139 COSTS_N_INSNS (23), /* int_mult_si */
140 COSTS_N_INSNS (23), /* int_mult_di */
141 COSTS_N_INSNS (2), /* int_shift */
142 COSTS_N_INSNS (2), /* int_cmov */
9b4f6a07 143 COSTS_N_INSNS (97), /* int_div */
3c50106f
RH
144 },
145 { /* EV5 */
146 COSTS_N_INSNS (4), /* fp_add */
147 COSTS_N_INSNS (4), /* fp_mult */
148 COSTS_N_INSNS (15), /* fp_div_sf */
149 COSTS_N_INSNS (22), /* fp_div_df */
150 COSTS_N_INSNS (8), /* int_mult_si */
151 COSTS_N_INSNS (12), /* int_mult_di */
152 COSTS_N_INSNS (1) + 1, /* int_shift */
153 COSTS_N_INSNS (1), /* int_cmov */
9b4f6a07 154 COSTS_N_INSNS (83), /* int_div */
3c50106f
RH
155 },
156 { /* EV6 */
157 COSTS_N_INSNS (4), /* fp_add */
158 COSTS_N_INSNS (4), /* fp_mult */
159 COSTS_N_INSNS (12), /* fp_div_sf */
160 COSTS_N_INSNS (15), /* fp_div_df */
161 COSTS_N_INSNS (7), /* int_mult_si */
162 COSTS_N_INSNS (7), /* int_mult_di */
163 COSTS_N_INSNS (1), /* int_shift */
164 COSTS_N_INSNS (2), /* int_cmov */
9b4f6a07 165 COSTS_N_INSNS (86), /* int_div */
3c50106f
RH
166 },
167};
168
8260c194
RH
169/* Similar but tuned for code size instead of execution latency. The
170 extra +N is fractional cost tuning based on latency. It's used to
171 encourage use of cheaper insns like shift, but only if there's just
172 one of them. */
173
174static struct alpha_rtx_cost_data const alpha_rtx_cost_size =
175{
176 COSTS_N_INSNS (1), /* fp_add */
177 COSTS_N_INSNS (1), /* fp_mult */
178 COSTS_N_INSNS (1), /* fp_div_sf */
179 COSTS_N_INSNS (1) + 1, /* fp_div_df */
180 COSTS_N_INSNS (1) + 1, /* int_mult_si */
181 COSTS_N_INSNS (1) + 2, /* int_mult_di */
182 COSTS_N_INSNS (1), /* int_shift */
183 COSTS_N_INSNS (1), /* int_cmov */
184 COSTS_N_INSNS (6), /* int_div */
185};
186
e9a25f70 187/* Get the number of args of a function in one of two ways. */
30102605 188#if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
38173d38 189#define NUM_ARGS crtl->args.info.num_args
e9a25f70 190#else
38173d38 191#define NUM_ARGS crtl->args.info
e9a25f70 192#endif
26250081 193
26250081
RH
194#define REG_PV 27
195#define REG_RA 26
6d8fd7bb 196
a5c24926
RH
197/* Declarations of static functions. */
198static struct machine_function *alpha_init_machine_status (void);
0da4e73a 199static rtx alpha_emit_xfloating_compare (enum rtx_code *, rtx, rtx);
4977bab6 200
a5c24926
RH
201#if TARGET_ABI_OPEN_VMS
202static void alpha_write_linkage (FILE *, const char *, tree);
dfe6ba6d 203static bool vms_valid_pointer_mode (enum machine_mode);
c590b625
RH
204#endif
205
a5c24926
RH
206static void unicosmk_output_deferred_case_vectors (FILE *);
207static void unicosmk_gen_dsib (unsigned long *);
208static void unicosmk_output_ssib (FILE *, const char *);
209static int unicosmk_need_dex (rtx);
672a6f42 210\f
3020190e
JM
211/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
212static const struct default_options alpha_option_optimization_table[] =
213 {
214 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
215 { OPT_LEVELS_NONE, 0, NULL, 0 }
216 };
217
8bea7f7c
RH
218/* Implement TARGET_HANDLE_OPTION. */
219
220static bool
96e45421
JM
221alpha_handle_option (struct gcc_options *opts, struct gcc_options *opts_set,
222 const struct cl_decoded_option *decoded,
223 location_t loc ATTRIBUTE_UNUSED)
8bea7f7c 224{
96e45421
JM
225 size_t code = decoded->opt_index;
226 const char *arg = decoded->arg;
227 int value = decoded->value;
228
229 gcc_assert (opts == &global_options);
230 gcc_assert (opts_set == &global_options_set);
231
8bea7f7c
RH
232 switch (code)
233 {
234 case OPT_mfp_regs:
235 if (value == 0)
236 target_flags |= MASK_SOFT_FP;
237 break;
238
239 case OPT_mieee:
240 case OPT_mieee_with_inexact:
241 target_flags |= MASK_IEEE_CONFORMANT;
242 break;
243
8bea7f7c 244 case OPT_mtls_size_:
55bea00a 245 if (value != 16 && value != 32 && value != 64)
8bea7f7c
RH
246 error ("bad value %qs for -mtls-size switch", arg);
247 break;
248 }
249
250 return true;
251}
252
7269aee7 253#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3 254/* Implement TARGET_MANGLE_TYPE. */
7269aee7
AH
255
256static const char *
3101faab 257alpha_mangle_type (const_tree type)
7269aee7
AH
258{
259 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
260 && TARGET_LONG_DOUBLE_128)
261 return "g";
262
263 /* For all other types, use normal C++ mangling. */
264 return NULL;
265}
266#endif
267
285a5742 268/* Parse target option strings. */
6245e3df 269
c5387660
JM
270static void
271alpha_option_override (void)
6245e3df 272{
8b60264b
KG
273 static const struct cpu_table {
274 const char *const name;
275 const enum processor_type processor;
276 const int flags;
a3b815cb 277 } cpu_table[] = {
a3b815cb
JJ
278 { "ev4", PROCESSOR_EV4, 0 },
279 { "ev45", PROCESSOR_EV4, 0 },
280 { "21064", PROCESSOR_EV4, 0 },
8bea7f7c
RH
281 { "ev5", PROCESSOR_EV5, 0 },
282 { "21164", PROCESSOR_EV5, 0 },
283 { "ev56", PROCESSOR_EV5, MASK_BWX },
284 { "21164a", PROCESSOR_EV5, MASK_BWX },
285 { "pca56", PROCESSOR_EV5, MASK_BWX|MASK_MAX },
286 { "21164PC",PROCESSOR_EV5, MASK_BWX|MASK_MAX },
287 { "21164pc",PROCESSOR_EV5, MASK_BWX|MASK_MAX },
288 { "ev6", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX },
289 { "21264", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX },
290 { "ev67", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX },
8224166e 291 { "21264a", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX }
a3b815cb 292 };
f676971a 293
8224166e 294 int const ct_size = ARRAY_SIZE (cpu_table);
8bea7f7c
RH
295 int i;
296
c5387660
JM
297#ifdef SUBTARGET_OVERRIDE_OPTIONS
298 SUBTARGET_OVERRIDE_OPTIONS;
299#endif
300
30102605
RH
301 /* Unicos/Mk doesn't have shared libraries. */
302 if (TARGET_ABI_UNICOSMK && flag_pic)
303 {
d4ee4d25 304 warning (0, "-f%s ignored for Unicos/Mk (not supported)",
30102605
RH
305 (flag_pic > 1) ? "PIC" : "pic");
306 flag_pic = 0;
307 }
308
f676971a 309 /* On Unicos/Mk, the native compiler consistently generates /d suffices for
30102605
RH
310 floating-point instructions. Make that the default for this target. */
311 if (TARGET_ABI_UNICOSMK)
312 alpha_fprm = ALPHA_FPRM_DYN;
313 else
314 alpha_fprm = ALPHA_FPRM_NORM;
315
6245e3df 316 alpha_tp = ALPHA_TP_PROG;
6245e3df
RK
317 alpha_fptm = ALPHA_FPTM_N;
318
f676971a 319 /* We cannot use su and sui qualifiers for conversion instructions on
30102605
RH
320 Unicos/Mk. I'm not sure if this is due to assembler or hardware
321 limitations. Right now, we issue a warning if -mieee is specified
322 and then ignore it; eventually, we should either get it right or
323 disable the option altogether. */
324
6245e3df
RK
325 if (TARGET_IEEE)
326 {
30102605 327 if (TARGET_ABI_UNICOSMK)
d4ee4d25 328 warning (0, "-mieee not supported on Unicos/Mk");
30102605
RH
329 else
330 {
331 alpha_tp = ALPHA_TP_INSN;
332 alpha_fptm = ALPHA_FPTM_SU;
333 }
6245e3df
RK
334 }
335
336 if (TARGET_IEEE_WITH_INEXACT)
337 {
30102605 338 if (TARGET_ABI_UNICOSMK)
d4ee4d25 339 warning (0, "-mieee-with-inexact not supported on Unicos/Mk");
30102605
RH
340 else
341 {
342 alpha_tp = ALPHA_TP_INSN;
343 alpha_fptm = ALPHA_FPTM_SUI;
344 }
6245e3df
RK
345 }
346
347 if (alpha_tp_string)
10d5c73f
RK
348 {
349 if (! strcmp (alpha_tp_string, "p"))
6245e3df 350 alpha_tp = ALPHA_TP_PROG;
10d5c73f 351 else if (! strcmp (alpha_tp_string, "f"))
6245e3df 352 alpha_tp = ALPHA_TP_FUNC;
10d5c73f 353 else if (! strcmp (alpha_tp_string, "i"))
6245e3df 354 alpha_tp = ALPHA_TP_INSN;
10d5c73f 355 else
9e637a26 356 error ("bad value %qs for -mtrap-precision switch", alpha_tp_string);
10d5c73f 357 }
6245e3df
RK
358
359 if (alpha_fprm_string)
10d5c73f
RK
360 {
361 if (! strcmp (alpha_fprm_string, "n"))
6245e3df 362 alpha_fprm = ALPHA_FPRM_NORM;
10d5c73f 363 else if (! strcmp (alpha_fprm_string, "m"))
6245e3df 364 alpha_fprm = ALPHA_FPRM_MINF;
10d5c73f 365 else if (! strcmp (alpha_fprm_string, "c"))
6245e3df 366 alpha_fprm = ALPHA_FPRM_CHOP;
10d5c73f 367 else if (! strcmp (alpha_fprm_string,"d"))
6245e3df 368 alpha_fprm = ALPHA_FPRM_DYN;
10d5c73f 369 else
9e637a26 370 error ("bad value %qs for -mfp-rounding-mode switch",
6245e3df 371 alpha_fprm_string);
10d5c73f 372 }
6245e3df
RK
373
374 if (alpha_fptm_string)
10d5c73f
RK
375 {
376 if (strcmp (alpha_fptm_string, "n") == 0)
377 alpha_fptm = ALPHA_FPTM_N;
378 else if (strcmp (alpha_fptm_string, "u") == 0)
379 alpha_fptm = ALPHA_FPTM_U;
380 else if (strcmp (alpha_fptm_string, "su") == 0)
381 alpha_fptm = ALPHA_FPTM_SU;
382 else if (strcmp (alpha_fptm_string, "sui") == 0)
383 alpha_fptm = ALPHA_FPTM_SUI;
384 else
9e637a26 385 error ("bad value %qs for -mfp-trap-mode switch", alpha_fptm_string);
10d5c73f 386 }
6245e3df 387
de4abb91
RH
388 if (alpha_cpu_string)
389 {
8224166e 390 for (i = 0; i < ct_size; i++)
a3b815cb
JJ
391 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
392 {
8bea7f7c
RH
393 alpha_tune = alpha_cpu = cpu_table [i].processor;
394 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX);
a3b815cb
JJ
395 target_flags |= cpu_table [i].flags;
396 break;
397 }
8224166e 398 if (i == ct_size)
9e637a26 399 error ("bad value %qs for -mcpu switch", alpha_cpu_string);
de4abb91
RH
400 }
401
a3b815cb
JJ
402 if (alpha_tune_string)
403 {
8224166e 404 for (i = 0; i < ct_size; i++)
a3b815cb
JJ
405 if (! strcmp (alpha_tune_string, cpu_table [i].name))
406 {
8bea7f7c 407 alpha_tune = cpu_table [i].processor;
a3b815cb
JJ
408 break;
409 }
8224166e 410 if (i == ct_size)
02d43000 411 error ("bad value %qs for -mtune switch", alpha_tune_string);
a3b815cb
JJ
412 }
413
285a5742 414 /* Do some sanity checks on the above options. */
6245e3df 415
30102605
RH
416 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
417 {
d4ee4d25 418 warning (0, "trap mode not supported on Unicos/Mk");
30102605
RH
419 alpha_fptm = ALPHA_FPTM_N;
420 }
421
10d5c73f 422 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
8bea7f7c 423 && alpha_tp != ALPHA_TP_INSN && alpha_cpu != PROCESSOR_EV6)
6245e3df 424 {
d4ee4d25 425 warning (0, "fp software completion requires -mtrap-precision=i");
6245e3df
RK
426 alpha_tp = ALPHA_TP_INSN;
427 }
89cfc2c6 428
8bea7f7c 429 if (alpha_cpu == PROCESSOR_EV6)
981a828e
RH
430 {
431 /* Except for EV6 pass 1 (not released), we always have precise
432 arithmetic traps. Which means we can do software completion
433 without minding trap shadows. */
434 alpha_tp = ALPHA_TP_PROG;
435 }
436
89cfc2c6
RK
437 if (TARGET_FLOAT_VAX)
438 {
439 if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
440 {
d4ee4d25 441 warning (0, "rounding mode not supported for VAX floats");
89cfc2c6
RK
442 alpha_fprm = ALPHA_FPRM_NORM;
443 }
444 if (alpha_fptm == ALPHA_FPTM_SUI)
445 {
d4ee4d25 446 warning (0, "trap mode not supported for VAX floats");
89cfc2c6
RK
447 alpha_fptm = ALPHA_FPTM_SU;
448 }
0f15adbd 449 if (target_flags_explicit & MASK_LONG_DOUBLE_128)
d4ee4d25 450 warning (0, "128-bit long double not supported for VAX floats");
0f15adbd 451 target_flags &= ~MASK_LONG_DOUBLE_128;
89cfc2c6 452 }
bcbbac26
RH
453
454 {
455 char *end;
456 int lat;
457
458 if (!alpha_mlat_string)
459 alpha_mlat_string = "L1";
460
d1e6b55b 461 if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
bcbbac26
RH
462 && (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
463 ;
464 else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
d1e6b55b 465 && ISDIGIT ((unsigned char)alpha_mlat_string[1])
bcbbac26
RH
466 && alpha_mlat_string[2] == '\0')
467 {
f676971a 468 static int const cache_latency[][4] =
bcbbac26
RH
469 {
470 { 3, 30, -1 }, /* ev4 -- Bcache is a guess */
471 { 2, 12, 38 }, /* ev5 -- Bcache from PC164 LMbench numbers */
285a5742 472 { 3, 12, 30 }, /* ev6 -- Bcache from DS20 LMbench. */
bcbbac26
RH
473 };
474
475 lat = alpha_mlat_string[1] - '0';
8bea7f7c 476 if (lat <= 0 || lat > 3 || cache_latency[alpha_tune][lat-1] == -1)
bcbbac26 477 {
d4ee4d25 478 warning (0, "L%d cache latency unknown for %s",
8bea7f7c 479 lat, alpha_cpu_name[alpha_tune]);
bcbbac26
RH
480 lat = 3;
481 }
482 else
8bea7f7c 483 lat = cache_latency[alpha_tune][lat-1];
bcbbac26
RH
484 }
485 else if (! strcmp (alpha_mlat_string, "main"))
486 {
487 /* Most current memories have about 370ns latency. This is
488 a reasonable guess for a fast cpu. */
489 lat = 150;
490 }
491 else
492 {
d4ee4d25 493 warning (0, "bad value %qs for -mmemory-latency", alpha_mlat_string);
bcbbac26
RH
494 lat = 3;
495 }
496
497 alpha_memory_latency = lat;
498 }
bb8ebb7f
RH
499
500 /* Default the definition of "small data" to 8 bytes. */
fa37ed29 501 if (!global_options_set.x_g_switch_value)
bb8ebb7f 502 g_switch_value = 8;
3873d24b 503
133d3133
RH
504 /* Infer TARGET_SMALL_DATA from -fpic/-fPIC. */
505 if (flag_pic == 1)
506 target_flags |= MASK_SMALL_DATA;
507 else if (flag_pic == 2)
508 target_flags &= ~MASK_SMALL_DATA;
509
c176c051
RH
510 /* Align labels and loops for optimal branching. */
511 /* ??? Kludge these by not doing anything if we don't optimize and also if
285a5742 512 we are writing ECOFF symbols to work around a bug in DEC's assembler. */
c176c051
RH
513 if (optimize > 0 && write_symbols != SDB_DEBUG)
514 {
515 if (align_loops <= 0)
516 align_loops = 16;
517 if (align_jumps <= 0)
518 align_jumps = 16;
519 }
520 if (align_functions <= 0)
521 align_functions = 16;
522
3873d24b
RH
523 /* Acquire a unique set number for our register saves and restores. */
524 alpha_sr_alias_set = new_alias_set ();
30102605
RH
525
526 /* Register variables and functions with the garbage collector. */
527
30102605
RH
528 /* Set up function hooks. */
529 init_machine_status = alpha_init_machine_status;
3dc85dfb
RH
530
531 /* Tell the compiler when we're using VAX floating point. */
532 if (TARGET_FLOAT_VAX)
533 {
70a01792
ZW
534 REAL_MODE_FORMAT (SFmode) = &vax_f_format;
535 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
536 REAL_MODE_FORMAT (TFmode) = NULL;
3dc85dfb 537 }
ed965309
JJ
538
539#ifdef TARGET_DEFAULT_LONG_DOUBLE_128
540 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
541 target_flags |= MASK_LONG_DOUBLE_128;
542#endif
d7bd8aeb
JJ
543
544 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
545 can be optimized to ap = __builtin_next_arg (0). */
546 if (TARGET_ABI_UNICOSMK)
547 targetm.expand_builtin_va_start = NULL;
6245e3df
RK
548}
549\f
a6f12d7c
RK
550/* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
551
552int
a5c24926 553zap_mask (HOST_WIDE_INT value)
a6f12d7c
RK
554{
555 int i;
556
557 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
558 i++, value >>= 8)
559 if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
560 return 0;
561
562 return 1;
563}
564
f676971a 565/* Return true if OP is valid for a particular TLS relocation.
201312c2 566 We are already guaranteed that OP is a CONST. */
a6f12d7c
RK
567
568int
201312c2 569tls_symbolic_operand_1 (rtx op, int size, int unspec)
a6f12d7c 570{
6f9b006d
RH
571 op = XEXP (op, 0);
572
573 if (GET_CODE (op) != UNSPEC || XINT (op, 1) != unspec)
574 return 0;
575 op = XVECEXP (op, 0, 0);
576
577 if (GET_CODE (op) != SYMBOL_REF)
578 return 0;
6f9b006d 579
d055668e 580 switch (SYMBOL_REF_TLS_MODEL (op))
3094247f 581 {
d055668e 582 case TLS_MODEL_LOCAL_DYNAMIC:
f6326c19 583 return unspec == UNSPEC_DTPREL && size == alpha_tls_size;
d055668e 584 case TLS_MODEL_INITIAL_EXEC:
3094247f 585 return unspec == UNSPEC_TPREL && size == 64;
d055668e 586 case TLS_MODEL_LOCAL_EXEC:
f6326c19 587 return unspec == UNSPEC_TPREL && size == alpha_tls_size;
3094247f 588 default:
56daab84 589 gcc_unreachable ();
3094247f 590 }
6f9b006d
RH
591}
592
201312c2
RH
593/* Used by aligned_memory_operand and unaligned_memory_operand to
594 resolve what reload is going to do with OP if it's a register. */
8f4773ea 595
201312c2
RH
596rtx
597resolve_reload_operand (rtx op)
a6f12d7c 598{
4e46365b 599 if (reload_in_progress)
a6f12d7c 600 {
4e46365b
RH
601 rtx tmp = op;
602 if (GET_CODE (tmp) == SUBREG)
603 tmp = SUBREG_REG (tmp);
7d83f4f5 604 if (REG_P (tmp)
4e46365b
RH
605 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
606 {
607 op = reg_equiv_memory_loc[REGNO (tmp)];
608 if (op == 0)
609 return 0;
610 }
a6f12d7c 611 }
201312c2 612 return op;
3611aef0
RH
613}
614
6dd53648
RH
615/* The scalar modes supported differs from the default check-what-c-supports
616 version in that sometimes TFmode is available even when long double
617 indicates only DFmode. On unicosmk, we have the situation that HImode
618 doesn't map to any C type, but of course we still support that. */
619
620static bool
621alpha_scalar_mode_supported_p (enum machine_mode mode)
622{
623 switch (mode)
624 {
625 case QImode:
626 case HImode:
627 case SImode:
628 case DImode:
629 case TImode: /* via optabs.c */
630 return true;
631
632 case SFmode:
633 case DFmode:
634 return true;
635
636 case TFmode:
637 return TARGET_HAS_XFLOATING_LIBS;
638
639 default:
640 return false;
641 }
642}
643
644/* Alpha implements a couple of integer vector mode operations when
e2ea71ea
RH
645 TARGET_MAX is enabled. We do not check TARGET_MAX here, however,
646 which allows the vectorizer to operate on e.g. move instructions,
647 or when expand_vector_operations can do something useful. */
6dd53648 648
f676971a
EC
649static bool
650alpha_vector_mode_supported_p (enum machine_mode mode)
651{
e2ea71ea 652 return mode == V8QImode || mode == V4HImode || mode == V2SImode;
f676971a
EC
653}
654
39157bcc
RH
655/* Return 1 if this function can directly return via $26. */
656
657int
a5c24926 658direct_return (void)
39157bcc 659{
30102605 660 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
be7b80f4
RH
661 && reload_completed
662 && alpha_sa_size () == 0
39157bcc 663 && get_frame_size () == 0
38173d38
JH
664 && crtl->outgoing_args_size == 0
665 && crtl->args.pretend_args_size == 0);
39157bcc 666}
25e21aed
RH
667
668/* Return the ADDR_VEC associated with a tablejump insn. */
669
670rtx
a5c24926 671alpha_tablejump_addr_vec (rtx insn)
25e21aed
RH
672{
673 rtx tmp;
674
675 tmp = JUMP_LABEL (insn);
676 if (!tmp)
677 return NULL_RTX;
678 tmp = NEXT_INSN (tmp);
679 if (!tmp)
680 return NULL_RTX;
7d83f4f5 681 if (JUMP_P (tmp)
25e21aed
RH
682 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
683 return PATTERN (tmp);
684 return NULL_RTX;
685}
686
687/* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
688
689rtx
a5c24926 690alpha_tablejump_best_label (rtx insn)
25e21aed
RH
691{
692 rtx jump_table = alpha_tablejump_addr_vec (insn);
693 rtx best_label = NULL_RTX;
694
695 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
696 there for edge frequency counts from profile data. */
697
698 if (jump_table)
699 {
700 int n_labels = XVECLEN (jump_table, 1);
701 int best_count = -1;
702 int i, j;
703
704 for (i = 0; i < n_labels; i++)
705 {
706 int count = 1;
707
708 for (j = i + 1; j < n_labels; j++)
709 if (XEXP (XVECEXP (jump_table, 1, i), 0)
710 == XEXP (XVECEXP (jump_table, 1, j), 0))
711 count++;
712
713 if (count > best_count)
714 best_count = count, best_label = XVECEXP (jump_table, 1, i);
715 }
716 }
717
718 return best_label ? best_label : const0_rtx;
719}
6f9b006d
RH
720
721/* Return the TLS model to use for SYMBOL. */
722
723static enum tls_model
a5c24926 724tls_symbolic_operand_type (rtx symbol)
6f9b006d 725{
d055668e 726 enum tls_model model;
6f9b006d
RH
727
728 if (GET_CODE (symbol) != SYMBOL_REF)
8224166e 729 return TLS_MODEL_NONE;
d055668e 730 model = SYMBOL_REF_TLS_MODEL (symbol);
6f9b006d 731
d055668e
RH
732 /* Local-exec with a 64-bit size is the same code as initial-exec. */
733 if (model == TLS_MODEL_LOCAL_EXEC && alpha_tls_size == 64)
734 model = TLS_MODEL_INITIAL_EXEC;
6f9b006d 735
d055668e 736 return model;
6f9b006d 737}
3611aef0 738\f
3094247f
RH
739/* Return true if the function DECL will share the same GP as any
740 function in the current unit of translation. */
741
742static bool
3101faab 743decl_has_samegp (const_tree decl)
3094247f
RH
744{
745 /* Functions that are not local can be overridden, and thus may
746 not share the same gp. */
747 if (!(*targetm.binds_local_p) (decl))
748 return false;
749
750 /* If -msmall-data is in effect, assume that there is only one GP
751 for the module, and so any local symbol has this property. We
752 need explicit relocations to be able to enforce this for symbols
753 not defined in this unit of translation, however. */
754 if (TARGET_EXPLICIT_RELOCS && TARGET_SMALL_DATA)
755 return true;
756
757 /* Functions that are not external are defined in this UoT. */
7f24e7c5
RH
758 /* ??? Irritatingly, static functions not yet emitted are still
759 marked "external". Apply this to non-static functions only. */
760 return !TREE_PUBLIC (decl) || !DECL_EXTERNAL (decl);
3094247f
RH
761}
762
ae46c4e0
RH
763/* Return true if EXP should be placed in the small data section. */
764
765static bool
3101faab 766alpha_in_small_data_p (const_tree exp)
ae46c4e0 767{
34a6c2ec
RH
768 /* We want to merge strings, so we never consider them small data. */
769 if (TREE_CODE (exp) == STRING_CST)
770 return false;
771
7179b6db
RH
772 /* Functions are never in the small data area. Duh. */
773 if (TREE_CODE (exp) == FUNCTION_DECL)
774 return false;
775
ae46c4e0
RH
776 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
777 {
778 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
779 if (strcmp (section, ".sdata") == 0
780 || strcmp (section, ".sbss") == 0)
781 return true;
782 }
783 else
784 {
785 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
786
787 /* If this is an incomplete type with size 0, then we can't put it
788 in sdata because it might be too big when completed. */
fa37ed29 789 if (size > 0 && size <= g_switch_value)
ae46c4e0
RH
790 return true;
791 }
792
793 return false;
794}
795
1330f7d5 796#if TARGET_ABI_OPEN_VMS
dfe6ba6d
DR
797static bool
798vms_valid_pointer_mode (enum machine_mode mode)
799{
800 return (mode == SImode || mode == DImode);
801}
802
1330f7d5 803static bool
a5c24926 804alpha_linkage_symbol_p (const char *symname)
1330f7d5
DR
805{
806 int symlen = strlen (symname);
807
808 if (symlen > 4)
809 return strcmp (&symname [symlen - 4], "..lk") == 0;
810
811 return false;
812}
813
814#define LINKAGE_SYMBOL_REF_P(X) \
815 ((GET_CODE (X) == SYMBOL_REF \
816 && alpha_linkage_symbol_p (XSTR (X, 0))) \
817 || (GET_CODE (X) == CONST \
818 && GET_CODE (XEXP (X, 0)) == PLUS \
819 && GET_CODE (XEXP (XEXP (X, 0), 0)) == SYMBOL_REF \
820 && alpha_linkage_symbol_p (XSTR (XEXP (XEXP (X, 0), 0), 0))))
821#endif
822
a39bdefc
RH
823/* legitimate_address_p recognizes an RTL expression that is a valid
824 memory address for an instruction. The MODE argument is the
825 machine mode for the MEM expression that wants to use this address.
826
827 For Alpha, we have either a constant address or the sum of a
828 register and a constant address, or just a register. For DImode,
829 any of those forms can be surrounded with an AND that clear the
830 low-order three bits; this is an "unaligned" access. */
831
c6c3dba9
PB
832static bool
833alpha_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
a39bdefc
RH
834{
835 /* If this is an ldq_u type address, discard the outer AND. */
836 if (mode == DImode
837 && GET_CODE (x) == AND
7d83f4f5 838 && CONST_INT_P (XEXP (x, 1))
a39bdefc
RH
839 && INTVAL (XEXP (x, 1)) == -8)
840 x = XEXP (x, 0);
841
842 /* Discard non-paradoxical subregs. */
843 if (GET_CODE (x) == SUBREG
844 && (GET_MODE_SIZE (GET_MODE (x))
845 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
846 x = SUBREG_REG (x);
847
848 /* Unadorned general registers are valid. */
849 if (REG_P (x)
850 && (strict
851 ? STRICT_REG_OK_FOR_BASE_P (x)
852 : NONSTRICT_REG_OK_FOR_BASE_P (x)))
853 return true;
854
855 /* Constant addresses (i.e. +/- 32k) are valid. */
856 if (CONSTANT_ADDRESS_P (x))
857 return true;
858
1330f7d5
DR
859#if TARGET_ABI_OPEN_VMS
860 if (LINKAGE_SYMBOL_REF_P (x))
861 return true;
862#endif
863
a39bdefc
RH
864 /* Register plus a small constant offset is valid. */
865 if (GET_CODE (x) == PLUS)
866 {
867 rtx ofs = XEXP (x, 1);
868 x = XEXP (x, 0);
869
870 /* Discard non-paradoxical subregs. */
871 if (GET_CODE (x) == SUBREG
872 && (GET_MODE_SIZE (GET_MODE (x))
873 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
874 x = SUBREG_REG (x);
875
876 if (REG_P (x))
877 {
878 if (! strict
879 && NONSTRICT_REG_OK_FP_BASE_P (x)
7d83f4f5 880 && CONST_INT_P (ofs))
a39bdefc
RH
881 return true;
882 if ((strict
883 ? STRICT_REG_OK_FOR_BASE_P (x)
884 : NONSTRICT_REG_OK_FOR_BASE_P (x))
885 && CONSTANT_ADDRESS_P (ofs))
886 return true;
887 }
a39bdefc
RH
888 }
889
26d5bf5b
UB
890 /* If we're managing explicit relocations, LO_SUM is valid, as are small
891 data symbols. Avoid explicit relocations of modes larger than word
892 mode since i.e. $LC0+8($1) can fold around +/- 32k offset. */
893 else if (TARGET_EXPLICIT_RELOCS
894 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1eb356b9 895 {
551cc6fd 896 if (small_symbolic_operand (x, Pmode))
1eb356b9 897 return true;
551cc6fd
RH
898
899 if (GET_CODE (x) == LO_SUM)
900 {
901 rtx ofs = XEXP (x, 1);
902 x = XEXP (x, 0);
903
904 /* Discard non-paradoxical subregs. */
905 if (GET_CODE (x) == SUBREG
906 && (GET_MODE_SIZE (GET_MODE (x))
907 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
908 x = SUBREG_REG (x);
909
910 /* Must have a valid base register. */
911 if (! (REG_P (x)
912 && (strict
913 ? STRICT_REG_OK_FOR_BASE_P (x)
914 : NONSTRICT_REG_OK_FOR_BASE_P (x))))
915 return false;
916
917 /* The symbol must be local. */
6f9b006d
RH
918 if (local_symbolic_operand (ofs, Pmode)
919 || dtp32_symbolic_operand (ofs, Pmode)
920 || tp32_symbolic_operand (ofs, Pmode))
551cc6fd
RH
921 return true;
922 }
1eb356b9
RH
923 }
924
a39bdefc
RH
925 return false;
926}
927
d055668e
RH
928/* Build the SYMBOL_REF for __tls_get_addr. */
929
930static GTY(()) rtx tls_get_addr_libfunc;
931
932static rtx
a5c24926 933get_tls_get_addr (void)
d055668e
RH
934{
935 if (!tls_get_addr_libfunc)
936 tls_get_addr_libfunc = init_one_libfunc ("__tls_get_addr");
937 return tls_get_addr_libfunc;
938}
939
aead1ca3
RH
940/* Try machine-dependent ways of modifying an illegitimate address
941 to be legitimate. If we find one, return the new, valid address. */
942
506d7b68
PB
943static rtx
944alpha_legitimize_address_1 (rtx x, rtx scratch, enum machine_mode mode)
aead1ca3
RH
945{
946 HOST_WIDE_INT addend;
947
948 /* If the address is (plus reg const_int) and the CONST_INT is not a
949 valid offset, compute the high part of the constant and add it to
950 the register. Then our address is (plus temp low-part-const). */
951 if (GET_CODE (x) == PLUS
7d83f4f5
UB
952 && REG_P (XEXP (x, 0))
953 && CONST_INT_P (XEXP (x, 1))
aead1ca3
RH
954 && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
955 {
956 addend = INTVAL (XEXP (x, 1));
957 x = XEXP (x, 0);
958 goto split_addend;
959 }
960
961 /* If the address is (const (plus FOO const_int)), find the low-order
962 part of the CONST_INT. Then load FOO plus any high-order part of the
963 CONST_INT into a register. Our address is (plus reg low-part-const).
964 This is done to reduce the number of GOT entries. */
b3a13419 965 if (can_create_pseudo_p ()
551cc6fd 966 && GET_CODE (x) == CONST
aead1ca3 967 && GET_CODE (XEXP (x, 0)) == PLUS
7d83f4f5 968 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
aead1ca3
RH
969 {
970 addend = INTVAL (XEXP (XEXP (x, 0), 1));
971 x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
972 goto split_addend;
973 }
974
975 /* If we have a (plus reg const), emit the load as in (2), then add
976 the two registers, and finally generate (plus reg low-part-const) as
977 our address. */
b3a13419 978 if (can_create_pseudo_p ()
551cc6fd 979 && GET_CODE (x) == PLUS
7d83f4f5 980 && REG_P (XEXP (x, 0))
aead1ca3
RH
981 && GET_CODE (XEXP (x, 1)) == CONST
982 && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
7d83f4f5 983 && CONST_INT_P (XEXP (XEXP (XEXP (x, 1), 0), 1)))
aead1ca3
RH
984 {
985 addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
986 x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
987 XEXP (XEXP (XEXP (x, 1), 0), 0),
988 NULL_RTX, 1, OPTAB_LIB_WIDEN);
989 goto split_addend;
990 }
991
26d5bf5b
UB
992 /* If this is a local symbol, split the address into HIGH/LO_SUM parts.
993 Avoid modes larger than word mode since i.e. $LC0+8($1) can fold
994 around +/- 32k offset. */
995 if (TARGET_EXPLICIT_RELOCS
996 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
997 && symbolic_operand (x, Pmode))
1eb356b9 998 {
6f9b006d
RH
999 rtx r0, r16, eqv, tga, tp, insn, dest, seq;
1000
1001 switch (tls_symbolic_operand_type (x))
1002 {
6cb718e4
RH
1003 case TLS_MODEL_NONE:
1004 break;
1005
6f9b006d
RH
1006 case TLS_MODEL_GLOBAL_DYNAMIC:
1007 start_sequence ();
1008
1009 r0 = gen_rtx_REG (Pmode, 0);
1010 r16 = gen_rtx_REG (Pmode, 16);
d055668e 1011 tga = get_tls_get_addr ();
6f9b006d
RH
1012 dest = gen_reg_rtx (Pmode);
1013 seq = GEN_INT (alpha_next_sequence_number++);
f676971a 1014
6f9b006d
RH
1015 emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq));
1016 insn = gen_call_value_osf_tlsgd (r0, tga, seq);
1017 insn = emit_call_insn (insn);
becfd6e5 1018 RTL_CONST_CALL_P (insn) = 1;
6f9b006d
RH
1019 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
1020
1021 insn = get_insns ();
1022 end_sequence ();
1023
1024 emit_libcall_block (insn, dest, r0, x);
1025 return dest;
1026
1027 case TLS_MODEL_LOCAL_DYNAMIC:
1028 start_sequence ();
1029
1030 r0 = gen_rtx_REG (Pmode, 0);
1031 r16 = gen_rtx_REG (Pmode, 16);
d055668e 1032 tga = get_tls_get_addr ();
6f9b006d
RH
1033 scratch = gen_reg_rtx (Pmode);
1034 seq = GEN_INT (alpha_next_sequence_number++);
1035
1036 emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq));
1037 insn = gen_call_value_osf_tlsldm (r0, tga, seq);
1038 insn = emit_call_insn (insn);
becfd6e5 1039 RTL_CONST_CALL_P (insn) = 1;
6f9b006d
RH
1040 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
1041
1042 insn = get_insns ();
1043 end_sequence ();
1044
1045 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1046 UNSPEC_TLSLDM_CALL);
1047 emit_libcall_block (insn, scratch, r0, eqv);
1048
1049 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL);
1050 eqv = gen_rtx_CONST (Pmode, eqv);
1051
1052 if (alpha_tls_size == 64)
1053 {
1054 dest = gen_reg_rtx (Pmode);
1055 emit_insn (gen_rtx_SET (VOIDmode, dest, eqv));
1056 emit_insn (gen_adddi3 (dest, dest, scratch));
1057 return dest;
1058 }
1059 if (alpha_tls_size == 32)
1060 {
1061 insn = gen_rtx_HIGH (Pmode, eqv);
1062 insn = gen_rtx_PLUS (Pmode, scratch, insn);
1063 scratch = gen_reg_rtx (Pmode);
1064 emit_insn (gen_rtx_SET (VOIDmode, scratch, insn));
1065 }
1066 return gen_rtx_LO_SUM (Pmode, scratch, eqv);
1067
1068 case TLS_MODEL_INITIAL_EXEC:
1069 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
1070 eqv = gen_rtx_CONST (Pmode, eqv);
1071 tp = gen_reg_rtx (Pmode);
1072 scratch = gen_reg_rtx (Pmode);
1073 dest = gen_reg_rtx (Pmode);
1074
1075 emit_insn (gen_load_tp (tp));
1076 emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv));
1077 emit_insn (gen_adddi3 (dest, tp, scratch));
1078 return dest;
1079
1080 case TLS_MODEL_LOCAL_EXEC:
1081 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
1082 eqv = gen_rtx_CONST (Pmode, eqv);
1083 tp = gen_reg_rtx (Pmode);
1084
1085 emit_insn (gen_load_tp (tp));
1086 if (alpha_tls_size == 32)
1087 {
1088 insn = gen_rtx_HIGH (Pmode, eqv);
1089 insn = gen_rtx_PLUS (Pmode, tp, insn);
1090 tp = gen_reg_rtx (Pmode);
1091 emit_insn (gen_rtx_SET (VOIDmode, tp, insn));
1092 }
1093 return gen_rtx_LO_SUM (Pmode, tp, eqv);
6cb718e4
RH
1094
1095 default:
1096 gcc_unreachable ();
6f9b006d
RH
1097 }
1098
e2c9fb9b
RH
1099 if (local_symbolic_operand (x, Pmode))
1100 {
1101 if (small_symbolic_operand (x, Pmode))
551cc6fd 1102 return x;
e2c9fb9b
RH
1103 else
1104 {
b3a13419 1105 if (can_create_pseudo_p ())
551cc6fd
RH
1106 scratch = gen_reg_rtx (Pmode);
1107 emit_insn (gen_rtx_SET (VOIDmode, scratch,
1108 gen_rtx_HIGH (Pmode, x)));
1109 return gen_rtx_LO_SUM (Pmode, scratch, x);
e2c9fb9b 1110 }
133d3133 1111 }
1eb356b9
RH
1112 }
1113
aead1ca3
RH
1114 return NULL;
1115
1116 split_addend:
1117 {
551cc6fd
RH
1118 HOST_WIDE_INT low, high;
1119
1120 low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
1121 addend -= low;
1122 high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
1123 addend -= high;
1124
1125 if (addend)
1126 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
b3a13419 1127 (!can_create_pseudo_p () ? scratch : NULL_RTX),
551cc6fd
RH
1128 1, OPTAB_LIB_WIDEN);
1129 if (high)
1130 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
b3a13419 1131 (!can_create_pseudo_p () ? scratch : NULL_RTX),
551cc6fd
RH
1132 1, OPTAB_LIB_WIDEN);
1133
1134 return plus_constant (x, low);
aead1ca3
RH
1135 }
1136}
1137
506d7b68
PB
1138
1139/* Try machine-dependent ways of modifying an illegitimate address
1140 to be legitimate. Return X or the new, valid address. */
1141
1142static rtx
1143alpha_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1144 enum machine_mode mode)
1145{
1146 rtx new_x = alpha_legitimize_address_1 (x, NULL_RTX, mode);
1147 return new_x ? new_x : x;
1148}
1149
04886dc0
RH
1150/* Primarily this is required for TLS symbols, but given that our move
1151 patterns *ought* to be able to handle any symbol at any time, we
1152 should never be spilling symbolic operands to the constant pool, ever. */
1153
1154static bool
1155alpha_cannot_force_const_mem (rtx x)
1156{
1157 enum rtx_code code = GET_CODE (x);
1158 return code == SYMBOL_REF || code == LABEL_REF || code == CONST;
1159}
1160
4977bab6 1161/* We do not allow indirect calls to be optimized into sibling calls, nor
3094247f
RH
1162 can we allow a call to a function with a different GP to be optimized
1163 into a sibcall. */
1164
4977bab6 1165static bool
a5c24926 1166alpha_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
4977bab6 1167{
3094247f
RH
1168 /* Can't do indirect tail calls, since we don't know if the target
1169 uses the same GP. */
1170 if (!decl)
1171 return false;
1172
1173 /* Otherwise, we can make a tail call if the target function shares
1174 the same GP. */
1175 return decl_has_samegp (decl);
4977bab6
ZW
1176}
1177
04161e2b
RH
1178int
1179some_small_symbolic_operand_int (rtx *px, void *data ATTRIBUTE_UNUSED)
1e7e480e
RH
1180{
1181 rtx x = *px;
551cc6fd 1182
a615ca3e
RH
1183 /* Don't re-split. */
1184 if (GET_CODE (x) == LO_SUM)
1185 return -1;
1e7e480e 1186
a615ca3e 1187 return small_symbolic_operand (x, Pmode) != 0;
551cc6fd
RH
1188}
1189
1e7e480e 1190static int
a5c24926 1191split_small_symbolic_operand_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
1e7e480e
RH
1192{
1193 rtx x = *px;
51c561e3 1194
a615ca3e
RH
1195 /* Don't re-split. */
1196 if (GET_CODE (x) == LO_SUM)
1197 return -1;
551cc6fd 1198
1e7e480e
RH
1199 if (small_symbolic_operand (x, Pmode))
1200 {
1201 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
1202 *px = x;
a615ca3e 1203 return -1;
1e7e480e
RH
1204 }
1205
a615ca3e 1206 return 0;
551cc6fd
RH
1207}
1208
a5c24926
RH
1209rtx
1210split_small_symbolic_operand (rtx x)
1211{
1212 x = copy_insn (x);
1213 for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
1214 return x;
1215}
1216
0b077eac
RH
1217/* Indicate that INSN cannot be duplicated. This is true for any insn
1218 that we've marked with gpdisp relocs, since those have to stay in
1219 1-1 correspondence with one another.
1220
093354e0 1221 Technically we could copy them if we could set up a mapping from one
0b077eac
RH
1222 sequence number to another, across the set of insns to be duplicated.
1223 This seems overly complicated and error-prone since interblock motion
501e79ef
RH
1224 from sched-ebb could move one of the pair of insns to a different block.
1225
1226 Also cannot allow jsr insns to be duplicated. If they throw exceptions,
1227 then they'll be in a different block from their ldgp. Which could lead
1228 the bb reorder code to think that it would be ok to copy just the block
1229 containing the call and branch to the block containing the ldgp. */
0b077eac
RH
1230
1231static bool
a5c24926 1232alpha_cannot_copy_insn_p (rtx insn)
0b077eac 1233{
0b077eac
RH
1234 if (!reload_completed || !TARGET_EXPLICIT_RELOCS)
1235 return false;
501e79ef
RH
1236 if (recog_memoized (insn) >= 0)
1237 return get_attr_cannot_copy (insn);
1238 else
0b077eac 1239 return false;
0b077eac
RH
1240}
1241
f676971a 1242
aead1ca3
RH
1243/* Try a machine-dependent way of reloading an illegitimate address
1244 operand. If we find one, push the reload and return the new rtx. */
f676971a 1245
aead1ca3 1246rtx
a5c24926
RH
1247alpha_legitimize_reload_address (rtx x,
1248 enum machine_mode mode ATTRIBUTE_UNUSED,
1249 int opnum, int type,
1250 int ind_levels ATTRIBUTE_UNUSED)
aead1ca3
RH
1251{
1252 /* We must recognize output that we have already generated ourselves. */
1253 if (GET_CODE (x) == PLUS
1254 && GET_CODE (XEXP (x, 0)) == PLUS
7d83f4f5
UB
1255 && REG_P (XEXP (XEXP (x, 0), 0))
1256 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
1257 && CONST_INT_P (XEXP (x, 1)))
aead1ca3
RH
1258 {
1259 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1260 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
bf758008 1261 opnum, (enum reload_type) type);
aead1ca3
RH
1262 return x;
1263 }
1264
1265 /* We wish to handle large displacements off a base register by
1266 splitting the addend across an ldah and the mem insn. This
1267 cuts number of extra insns needed from 3 to 1. */
1268 if (GET_CODE (x) == PLUS
7d83f4f5 1269 && REG_P (XEXP (x, 0))
aead1ca3
RH
1270 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1271 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
1272 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1273 {
1274 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1275 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1276 HOST_WIDE_INT high
1277 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1278
1279 /* Check for 32-bit overflow. */
1280 if (high + low != val)
1281 return NULL_RTX;
1282
1283 /* Reload the high part into a base reg; leave the low part
1284 in the mem directly. */
1285 x = gen_rtx_PLUS (GET_MODE (x),
1286 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1287 GEN_INT (high)),
1288 GEN_INT (low));
1289
1290 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1291 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
bf758008 1292 opnum, (enum reload_type) type);
aead1ca3
RH
1293 return x;
1294 }
1295
1296 return NULL_RTX;
1297}
1298\f
3c50106f
RH
1299/* Compute a (partial) cost for rtx X. Return true if the complete
1300 cost has been computed, and false if subexpressions should be
1301 scanned. In either case, *TOTAL contains the cost result. */
1302
1303static bool
f40751dd
JH
1304alpha_rtx_costs (rtx x, int code, int outer_code, int *total,
1305 bool speed)
3c50106f
RH
1306{
1307 enum machine_mode mode = GET_MODE (x);
1308 bool float_mode_p = FLOAT_MODE_P (mode);
8260c194
RH
1309 const struct alpha_rtx_cost_data *cost_data;
1310
f40751dd 1311 if (!speed)
8260c194
RH
1312 cost_data = &alpha_rtx_cost_size;
1313 else
8bea7f7c 1314 cost_data = &alpha_rtx_cost_data[alpha_tune];
3c50106f
RH
1315
1316 switch (code)
1317 {
8260c194 1318 case CONST_INT:
3c50106f
RH
1319 /* If this is an 8-bit constant, return zero since it can be used
1320 nearly anywhere with no cost. If it is a valid operand for an
1321 ADD or AND, likewise return 0 if we know it will be used in that
1322 context. Otherwise, return 2 since it might be used there later.
1323 All other constants take at least two insns. */
3c50106f
RH
1324 if (INTVAL (x) >= 0 && INTVAL (x) < 256)
1325 {
1326 *total = 0;
1327 return true;
1328 }
5efb1046 1329 /* FALLTHRU */
3c50106f
RH
1330
1331 case CONST_DOUBLE:
1332 if (x == CONST0_RTX (mode))
1333 *total = 0;
1334 else if ((outer_code == PLUS && add_operand (x, VOIDmode))
1335 || (outer_code == AND && and_operand (x, VOIDmode)))
1336 *total = 0;
1337 else if (add_operand (x, VOIDmode) || and_operand (x, VOIDmode))
1338 *total = 2;
1339 else
1340 *total = COSTS_N_INSNS (2);
1341 return true;
f676971a 1342
3c50106f
RH
1343 case CONST:
1344 case SYMBOL_REF:
1345 case LABEL_REF:
1346 if (TARGET_EXPLICIT_RELOCS && small_symbolic_operand (x, VOIDmode))
1347 *total = COSTS_N_INSNS (outer_code != MEM);
1348 else if (TARGET_EXPLICIT_RELOCS && local_symbolic_operand (x, VOIDmode))
1349 *total = COSTS_N_INSNS (1 + (outer_code != MEM));
1350 else if (tls_symbolic_operand_type (x))
1351 /* Estimate of cost for call_pal rduniq. */
8260c194 1352 /* ??? How many insns do we emit here? More than one... */
3c50106f
RH
1353 *total = COSTS_N_INSNS (15);
1354 else
1355 /* Otherwise we do a load from the GOT. */
f40751dd 1356 *total = COSTS_N_INSNS (!speed ? 1 : alpha_memory_latency);
3c50106f 1357 return true;
f676971a 1358
72910a0b
RH
1359 case HIGH:
1360 /* This is effectively an add_operand. */
1361 *total = 2;
1362 return true;
1363
3c50106f
RH
1364 case PLUS:
1365 case MINUS:
1366 if (float_mode_p)
8260c194 1367 *total = cost_data->fp_add;
3c50106f
RH
1368 else if (GET_CODE (XEXP (x, 0)) == MULT
1369 && const48_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
1370 {
bf758008
UB
1371 *total = (rtx_cost (XEXP (XEXP (x, 0), 0),
1372 (enum rtx_code) outer_code, speed)
1373 + rtx_cost (XEXP (x, 1),
1374 (enum rtx_code) outer_code, speed)
1375 + COSTS_N_INSNS (1));
3c50106f
RH
1376 return true;
1377 }
1378 return false;
1379
1380 case MULT:
1381 if (float_mode_p)
8260c194 1382 *total = cost_data->fp_mult;
3c50106f 1383 else if (mode == DImode)
8260c194 1384 *total = cost_data->int_mult_di;
3c50106f 1385 else
8260c194 1386 *total = cost_data->int_mult_si;
3c50106f
RH
1387 return false;
1388
1389 case ASHIFT:
7d83f4f5 1390 if (CONST_INT_P (XEXP (x, 1))
3c50106f
RH
1391 && INTVAL (XEXP (x, 1)) <= 3)
1392 {
1393 *total = COSTS_N_INSNS (1);
1394 return false;
1395 }
5efb1046 1396 /* FALLTHRU */
3c50106f
RH
1397
1398 case ASHIFTRT:
1399 case LSHIFTRT:
8260c194 1400 *total = cost_data->int_shift;
3c50106f
RH
1401 return false;
1402
1403 case IF_THEN_ELSE:
1404 if (float_mode_p)
8260c194 1405 *total = cost_data->fp_add;
3c50106f 1406 else
8260c194 1407 *total = cost_data->int_cmov;
3c50106f
RH
1408 return false;
1409
1410 case DIV:
1411 case UDIV:
1412 case MOD:
1413 case UMOD:
1414 if (!float_mode_p)
8260c194 1415 *total = cost_data->int_div;
3c50106f 1416 else if (mode == SFmode)
8260c194 1417 *total = cost_data->fp_div_sf;
3c50106f 1418 else
8260c194 1419 *total = cost_data->fp_div_df;
3c50106f
RH
1420 return false;
1421
1422 case MEM:
f40751dd 1423 *total = COSTS_N_INSNS (!speed ? 1 : alpha_memory_latency);
3c50106f
RH
1424 return true;
1425
1426 case NEG:
1427 if (! float_mode_p)
1428 {
1429 *total = COSTS_N_INSNS (1);
1430 return false;
1431 }
5efb1046 1432 /* FALLTHRU */
3c50106f
RH
1433
1434 case ABS:
1435 if (! float_mode_p)
1436 {
8260c194 1437 *total = COSTS_N_INSNS (1) + cost_data->int_cmov;
3c50106f
RH
1438 return false;
1439 }
5efb1046 1440 /* FALLTHRU */
3c50106f
RH
1441
1442 case FLOAT:
1443 case UNSIGNED_FLOAT:
1444 case FIX:
1445 case UNSIGNED_FIX:
3c50106f 1446 case FLOAT_TRUNCATE:
8260c194 1447 *total = cost_data->fp_add;
3c50106f
RH
1448 return false;
1449
a220ee34 1450 case FLOAT_EXTEND:
7d83f4f5 1451 if (MEM_P (XEXP (x, 0)))
a220ee34
RH
1452 *total = 0;
1453 else
1454 *total = cost_data->fp_add;
1455 return false;
1456
3c50106f
RH
1457 default:
1458 return false;
1459 }
1460}
1461\f
a6f12d7c
RK
1462/* REF is an alignable memory location. Place an aligned SImode
1463 reference into *PALIGNED_MEM and the number of bits to shift into
96043e7e
RH
1464 *PBITNUM. SCRATCH is a free register for use in reloading out
1465 of range stack slots. */
a6f12d7c
RK
1466
1467void
a5c24926 1468get_aligned_mem (rtx ref, rtx *paligned_mem, rtx *pbitnum)
a6f12d7c
RK
1469{
1470 rtx base;
525e67c1 1471 HOST_WIDE_INT disp, offset;
a6f12d7c 1472
7d83f4f5 1473 gcc_assert (MEM_P (ref));
a6f12d7c 1474
4e46365b
RH
1475 if (reload_in_progress
1476 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 1477 {
4e46365b 1478 base = find_replacement (&XEXP (ref, 0));
56daab84 1479 gcc_assert (memory_address_p (GET_MODE (ref), base));
96043e7e 1480 }
a6f12d7c 1481 else
56daab84 1482 base = XEXP (ref, 0);
a6f12d7c
RK
1483
1484 if (GET_CODE (base) == PLUS)
525e67c1
RH
1485 disp = INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
1486 else
1487 disp = 0;
1488
1489 /* Find the byte offset within an aligned word. If the memory itself is
1490 claimed to be aligned, believe it. Otherwise, aligned_memory_operand
1491 will have examined the base register and determined it is aligned, and
1492 thus displacements from it are naturally alignable. */
1493 if (MEM_ALIGN (ref) >= 32)
1494 offset = 0;
1495 else
1496 offset = disp & 3;
a6f12d7c 1497
02143d0b
UB
1498 /* The location should not cross aligned word boundary. */
1499 gcc_assert (offset + GET_MODE_SIZE (GET_MODE (ref))
1500 <= GET_MODE_SIZE (SImode));
1501
525e67c1
RH
1502 /* Access the entire aligned word. */
1503 *paligned_mem = widen_memory_access (ref, SImode, -offset);
a6f12d7c 1504
525e67c1 1505 /* Convert the byte offset within the word to a bit offset. */
30102605 1506 if (WORDS_BIG_ENDIAN)
525e67c1 1507 offset = 32 - (GET_MODE_BITSIZE (GET_MODE (ref)) + offset * 8);
30102605 1508 else
525e67c1
RH
1509 offset *= 8;
1510 *pbitnum = GEN_INT (offset);
a6f12d7c
RK
1511}
1512
f676971a 1513/* Similar, but just get the address. Handle the two reload cases.
adb18b68 1514 Add EXTRA_OFFSET to the address we return. */
a6f12d7c
RK
1515
1516rtx
60e93525 1517get_unaligned_address (rtx ref)
a6f12d7c
RK
1518{
1519 rtx base;
1520 HOST_WIDE_INT offset = 0;
1521
7d83f4f5 1522 gcc_assert (MEM_P (ref));
a6f12d7c 1523
4e46365b
RH
1524 if (reload_in_progress
1525 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 1526 {
96043e7e 1527 base = find_replacement (&XEXP (ref, 0));
4e46365b 1528
56daab84 1529 gcc_assert (memory_address_p (GET_MODE (ref), base));
96043e7e 1530 }
a6f12d7c 1531 else
56daab84 1532 base = XEXP (ref, 0);
a6f12d7c
RK
1533
1534 if (GET_CODE (base) == PLUS)
1535 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
1536
60e93525
RH
1537 return plus_constant (base, offset);
1538}
1539
1540/* Compute a value X, such that X & 7 == (ADDR + OFS) & 7.
1541 X is always returned in a register. */
1542
1543rtx
1544get_unaligned_offset (rtx addr, HOST_WIDE_INT ofs)
1545{
1546 if (GET_CODE (addr) == PLUS)
1547 {
1548 ofs += INTVAL (XEXP (addr, 1));
1549 addr = XEXP (addr, 0);
1550 }
1551
1552 return expand_simple_binop (Pmode, PLUS, addr, GEN_INT (ofs & 7),
1553 NULL_RTX, 1, OPTAB_LIB_WIDEN);
a6f12d7c 1554}
3611aef0 1555
551cc6fd 1556/* On the Alpha, all (non-symbolic) constants except zero go into
f676971a 1557 a floating-point register via memory. Note that we cannot
0a2aaacc 1558 return anything that is not a subset of RCLASS, and that some
551cc6fd
RH
1559 symbolic constants cannot be dropped to memory. */
1560
1561enum reg_class
0a2aaacc 1562alpha_preferred_reload_class(rtx x, enum reg_class rclass)
551cc6fd
RH
1563{
1564 /* Zero is present in any register class. */
1565 if (x == CONST0_RTX (GET_MODE (x)))
0a2aaacc 1566 return rclass;
551cc6fd
RH
1567
1568 /* These sorts of constants we can easily drop to memory. */
7d83f4f5 1569 if (CONST_INT_P (x)
72910a0b
RH
1570 || GET_CODE (x) == CONST_DOUBLE
1571 || GET_CODE (x) == CONST_VECTOR)
551cc6fd 1572 {
0a2aaacc 1573 if (rclass == FLOAT_REGS)
551cc6fd 1574 return NO_REGS;
0a2aaacc 1575 if (rclass == ALL_REGS)
551cc6fd 1576 return GENERAL_REGS;
0a2aaacc 1577 return rclass;
551cc6fd
RH
1578 }
1579
1580 /* All other kinds of constants should not (and in the case of HIGH
1581 cannot) be dropped to memory -- instead we use a GENERAL_REGS
1582 secondary reload. */
1583 if (CONSTANT_P (x))
0a2aaacc 1584 return (rclass == ALL_REGS ? GENERAL_REGS : rclass);
551cc6fd 1585
0a2aaacc 1586 return rclass;
551cc6fd
RH
1587}
1588
48f46219 1589/* Inform reload about cases where moving X with a mode MODE to a register in
0a2aaacc 1590 RCLASS requires an extra scratch or immediate register. Return the class
48f46219 1591 needed for the immediate register. */
3611aef0 1592
a87cf97e
JR
1593static reg_class_t
1594alpha_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
48f46219 1595 enum machine_mode mode, secondary_reload_info *sri)
3611aef0 1596{
a87cf97e
JR
1597 enum reg_class rclass = (enum reg_class) rclass_i;
1598
48f46219
RH
1599 /* Loading and storing HImode or QImode values to and from memory
1600 usually requires a scratch register. */
1601 if (!TARGET_BWX && (mode == QImode || mode == HImode || mode == CQImode))
35a414df 1602 {
48f46219 1603 if (any_memory_operand (x, mode))
35a414df 1604 {
48f46219
RH
1605 if (in_p)
1606 {
1607 if (!aligned_memory_operand (x, mode))
f9621cc4 1608 sri->icode = direct_optab_handler (reload_in_optab, mode);
48f46219
RH
1609 }
1610 else
f9621cc4 1611 sri->icode = direct_optab_handler (reload_out_optab, mode);
48f46219 1612 return NO_REGS;
35a414df
RH
1613 }
1614 }
3611aef0 1615
48f46219
RH
1616 /* We also cannot do integral arithmetic into FP regs, as might result
1617 from register elimination into a DImode fp register. */
0a2aaacc 1618 if (rclass == FLOAT_REGS)
3611aef0 1619 {
48f46219 1620 if (MEM_P (x) && GET_CODE (XEXP (x, 0)) == AND)
3611aef0 1621 return GENERAL_REGS;
48f46219
RH
1622 if (in_p && INTEGRAL_MODE_P (mode)
1623 && !MEM_P (x) && !REG_P (x) && !CONST_INT_P (x))
3611aef0
RH
1624 return GENERAL_REGS;
1625 }
1626
1627 return NO_REGS;
1628}
a6f12d7c
RK
1629\f
1630/* Subfunction of the following function. Update the flags of any MEM
1631 found in part of X. */
1632
389fdba0
RH
1633static int
1634alpha_set_memflags_1 (rtx *xp, void *data)
a6f12d7c 1635{
389fdba0 1636 rtx x = *xp, orig = (rtx) data;
a6f12d7c 1637
7d83f4f5 1638 if (!MEM_P (x))
389fdba0 1639 return 0;
a6f12d7c 1640
389fdba0
RH
1641 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (orig);
1642 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (orig);
1643 MEM_SCALAR_P (x) = MEM_SCALAR_P (orig);
1644 MEM_NOTRAP_P (x) = MEM_NOTRAP_P (orig);
1645 MEM_READONLY_P (x) = MEM_READONLY_P (orig);
a6f12d7c 1646
389fdba0
RH
1647 /* Sadly, we cannot use alias sets because the extra aliasing
1648 produced by the AND interferes. Given that two-byte quantities
1649 are the only thing we would be able to differentiate anyway,
1650 there does not seem to be any point in convoluting the early
1651 out of the alias check. */
1d300e19 1652
389fdba0 1653 return -1;
a6f12d7c
RK
1654}
1655
13a4e577 1656/* Given SEQ, which is an INSN list, look for any MEMs in either
2f937369
DM
1657 a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and
1658 volatile flags from REF into each of the MEMs found. If REF is not
1659 a MEM, don't do anything. */
a6f12d7c
RK
1660
1661void
13a4e577 1662alpha_set_memflags (rtx seq, rtx ref)
a6f12d7c 1663{
13a4e577 1664 rtx insn;
3873d24b 1665
13a4e577 1666 if (!MEM_P (ref))
a6f12d7c
RK
1667 return;
1668
f676971a 1669 /* This is only called from alpha.md, after having had something
3873d24b
RH
1670 generated from one of the insn patterns. So if everything is
1671 zero, the pattern is already up-to-date. */
389fdba0
RH
1672 if (!MEM_VOLATILE_P (ref)
1673 && !MEM_IN_STRUCT_P (ref)
1674 && !MEM_SCALAR_P (ref)
1675 && !MEM_NOTRAP_P (ref)
1676 && !MEM_READONLY_P (ref))
3873d24b
RH
1677 return;
1678
13a4e577
UB
1679 for (insn = seq; insn; insn = NEXT_INSN (insn))
1680 if (INSN_P (insn))
1681 for_each_rtx (&PATTERN (insn), alpha_set_memflags_1, (void *) ref);
1682 else
1683 gcc_unreachable ();
a6f12d7c
RK
1684}
1685\f
72910a0b
RH
1686static rtx alpha_emit_set_const (rtx, enum machine_mode, HOST_WIDE_INT,
1687 int, bool);
1688
1689/* Internal routine for alpha_emit_set_const to check for N or below insns.
1690 If NO_OUTPUT is true, then we only check to see if N insns are possible,
1691 and return pc_rtx if successful. */
9102cd1f
RK
1692
1693static rtx
a5c24926 1694alpha_emit_set_const_1 (rtx target, enum machine_mode mode,
72910a0b 1695 HOST_WIDE_INT c, int n, bool no_output)
a6f12d7c 1696{
0a2aaacc 1697 HOST_WIDE_INT new_const;
a6f12d7c 1698 int i, bits;
fd94addf
RK
1699 /* Use a pseudo if highly optimizing and still generating RTL. */
1700 rtx subtarget
b3a13419 1701 = (flag_expensive_optimizations && can_create_pseudo_p () ? 0 : target);
b83b7fa3 1702 rtx temp, insn;
a6f12d7c 1703
a6f12d7c
RK
1704 /* If this is a sign-extended 32-bit constant, we can do this in at most
1705 three insns, so do it if we have enough insns left. We always have
285a5742 1706 a sign-extended 32-bit constant when compiling on a narrow machine. */
a6f12d7c 1707
858e4e8c
RH
1708 if (HOST_BITS_PER_WIDE_INT != 64
1709 || c >> 31 == -1 || c >> 31 == 0)
a6f12d7c 1710 {
20a4db98 1711 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c 1712 HOST_WIDE_INT tmp1 = c - low;
20a4db98 1713 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c
RK
1714 HOST_WIDE_INT extra = 0;
1715
ab034cfc
RK
1716 /* If HIGH will be interpreted as negative but the constant is
1717 positive, we must adjust it to do two ldha insns. */
1718
1719 if ((high & 0x8000) != 0 && c >= 0)
a6f12d7c
RK
1720 {
1721 extra = 0x4000;
1722 tmp1 -= 0x40000000;
1723 high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
1724 }
1725
1726 if (c == low || (low == 0 && extra == 0))
858e4e8c
RH
1727 {
1728 /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
1729 but that meant that we can't handle INT_MIN on 32-bit machines
f676971a 1730 (like NT/Alpha), because we recurse indefinitely through
858e4e8c
RH
1731 emit_move_insn to gen_movdi. So instead, since we know exactly
1732 what we want, create it explicitly. */
1733
72910a0b
RH
1734 if (no_output)
1735 return pc_rtx;
858e4e8c
RH
1736 if (target == NULL)
1737 target = gen_reg_rtx (mode);
38a448ca 1738 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
858e4e8c
RH
1739 return target;
1740 }
9102cd1f 1741 else if (n >= 2 + (extra != 0))
a6f12d7c 1742 {
72910a0b
RH
1743 if (no_output)
1744 return pc_rtx;
b3a13419 1745 if (!can_create_pseudo_p ())
c3cda381
FH
1746 {
1747 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (high << 16)));
1748 temp = target;
1749 }
1750 else
1751 temp = copy_to_suggested_reg (GEN_INT (high << 16),
1752 subtarget, mode);
fd94addf 1753
b83b7fa3
RH
1754 /* As of 2002-02-23, addsi3 is only available when not optimizing.
1755 This means that if we go through expand_binop, we'll try to
1756 generate extensions, etc, which will require new pseudos, which
1757 will fail during some split phases. The SImode add patterns
1758 still exist, but are not named. So build the insns by hand. */
1759
a6f12d7c 1760 if (extra != 0)
b83b7fa3
RH
1761 {
1762 if (! subtarget)
1763 subtarget = gen_reg_rtx (mode);
1764 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
1765 insn = gen_rtx_SET (VOIDmode, subtarget, insn);
1766 emit_insn (insn);
1ef9531b 1767 temp = subtarget;
b83b7fa3 1768 }
a6f12d7c 1769
b83b7fa3
RH
1770 if (target == NULL)
1771 target = gen_reg_rtx (mode);
1772 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
1773 insn = gen_rtx_SET (VOIDmode, target, insn);
1774 emit_insn (insn);
1775 return target;
a6f12d7c
RK
1776 }
1777 }
1778
0af3ee30 1779 /* If we couldn't do it that way, try some other methods. But if we have
f444f304
RK
1780 no instructions left, don't bother. Likewise, if this is SImode and
1781 we can't make pseudos, we can't do anything since the expand_binop
1782 and expand_unop calls will widen and try to make pseudos. */
a6f12d7c 1783
b3a13419 1784 if (n == 1 || (mode == SImode && !can_create_pseudo_p ()))
a6f12d7c
RK
1785 return 0;
1786
0af3ee30 1787 /* Next, see if we can load a related constant and then shift and possibly
a6f12d7c
RK
1788 negate it to get the constant we want. Try this once each increasing
1789 numbers of insns. */
1790
1791 for (i = 1; i < n; i++)
1792 {
20a4db98
RH
1793 /* First, see if minus some low bits, we've an easy load of
1794 high bits. */
1795
0a2aaacc
KG
1796 new_const = ((c & 0xffff) ^ 0x8000) - 0x8000;
1797 if (new_const != 0)
72910a0b 1798 {
0a2aaacc 1799 temp = alpha_emit_set_const (subtarget, mode, c - new_const, i, no_output);
72910a0b
RH
1800 if (temp)
1801 {
1802 if (no_output)
1803 return temp;
0a2aaacc 1804 return expand_binop (mode, add_optab, temp, GEN_INT (new_const),
72910a0b
RH
1805 target, 0, OPTAB_WIDEN);
1806 }
1807 }
20a4db98
RH
1808
1809 /* Next try complementing. */
72910a0b
RH
1810 temp = alpha_emit_set_const (subtarget, mode, ~c, i, no_output);
1811 if (temp)
1812 {
1813 if (no_output)
1814 return temp;
1815 return expand_unop (mode, one_cmpl_optab, temp, target, 0);
1816 }
a6f12d7c 1817
fd94addf 1818 /* Next try to form a constant and do a left shift. We can do this
a6f12d7c
RK
1819 if some low-order bits are zero; the exact_log2 call below tells
1820 us that information. The bits we are shifting out could be any
1821 value, but here we'll just try the 0- and sign-extended forms of
1822 the constant. To try to increase the chance of having the same
1823 constant in more than one insn, start at the highest number of
1824 bits to shift, but try all possibilities in case a ZAPNOT will
1825 be useful. */
1826
72910a0b
RH
1827 bits = exact_log2 (c & -c);
1828 if (bits > 0)
a6f12d7c 1829 for (; bits > 0; bits--)
72910a0b 1830 {
0a2aaacc
KG
1831 new_const = c >> bits;
1832 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output);
72910a0b
RH
1833 if (!temp && c < 0)
1834 {
0a2aaacc
KG
1835 new_const = (unsigned HOST_WIDE_INT)c >> bits;
1836 temp = alpha_emit_set_const (subtarget, mode, new_const,
72910a0b
RH
1837 i, no_output);
1838 }
1839 if (temp)
1840 {
1841 if (no_output)
1842 return temp;
1843 return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
1844 target, 0, OPTAB_WIDEN);
1845 }
1846 }
a6f12d7c
RK
1847
1848 /* Now try high-order zero bits. Here we try the shifted-in bits as
57cfde96
RK
1849 all zero and all ones. Be careful to avoid shifting outside the
1850 mode and to avoid shifting outside the host wide int size. */
858e4e8c
RH
1851 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
1852 confuse the recursive call and set all of the high 32 bits. */
a6f12d7c 1853
72910a0b
RH
1854 bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
1855 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64));
1856 if (bits > 0)
a6f12d7c 1857 for (; bits > 0; bits--)
72910a0b 1858 {
0a2aaacc
KG
1859 new_const = c << bits;
1860 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output);
72910a0b
RH
1861 if (!temp)
1862 {
0a2aaacc
KG
1863 new_const = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1);
1864 temp = alpha_emit_set_const (subtarget, mode, new_const,
72910a0b
RH
1865 i, no_output);
1866 }
1867 if (temp)
1868 {
1869 if (no_output)
1870 return temp;
1871 return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
1872 target, 1, OPTAB_WIDEN);
1873 }
1874 }
a6f12d7c
RK
1875
1876 /* Now try high-order 1 bits. We get that with a sign-extension.
57cfde96 1877 But one bit isn't enough here. Be careful to avoid shifting outside
285a5742 1878 the mode and to avoid shifting outside the host wide int size. */
30102605 1879
72910a0b
RH
1880 bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
1881 - floor_log2 (~ c) - 2);
1882 if (bits > 0)
a6f12d7c 1883 for (; bits > 0; bits--)
72910a0b 1884 {
0a2aaacc
KG
1885 new_const = c << bits;
1886 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output);
72910a0b
RH
1887 if (!temp)
1888 {
0a2aaacc
KG
1889 new_const = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1);
1890 temp = alpha_emit_set_const (subtarget, mode, new_const,
72910a0b
RH
1891 i, no_output);
1892 }
1893 if (temp)
1894 {
1895 if (no_output)
1896 return temp;
1897 return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
1898 target, 0, OPTAB_WIDEN);
1899 }
1900 }
a6f12d7c
RK
1901 }
1902
20a4db98
RH
1903#if HOST_BITS_PER_WIDE_INT == 64
1904 /* Finally, see if can load a value into the target that is the same as the
1905 constant except that all bytes that are 0 are changed to be 0xff. If we
1906 can, then we can do a ZAPNOT to obtain the desired constant. */
1907
0a2aaacc 1908 new_const = c;
20a4db98 1909 for (i = 0; i < 64; i += 8)
0a2aaacc
KG
1910 if ((new_const & ((HOST_WIDE_INT) 0xff << i)) == 0)
1911 new_const |= (HOST_WIDE_INT) 0xff << i;
e68c380c 1912
20a4db98
RH
1913 /* We are only called for SImode and DImode. If this is SImode, ensure that
1914 we are sign extended to a full word. */
1915
1916 if (mode == SImode)
0a2aaacc 1917 new_const = ((new_const & 0xffffffff) ^ 0x80000000) - 0x80000000;
20a4db98 1918
0a2aaacc 1919 if (new_const != c)
72910a0b 1920 {
0a2aaacc 1921 temp = alpha_emit_set_const (subtarget, mode, new_const, n - 1, no_output);
72910a0b
RH
1922 if (temp)
1923 {
1924 if (no_output)
1925 return temp;
0a2aaacc 1926 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new_const),
72910a0b
RH
1927 target, 0, OPTAB_WIDEN);
1928 }
1929 }
20a4db98 1930#endif
e68c380c 1931
a6f12d7c
RK
1932 return 0;
1933}
758d2c0c 1934
a5c24926
RH
1935/* Try to output insns to set TARGET equal to the constant C if it can be
1936 done in less than N insns. Do all computations in MODE. Returns the place
1937 where the output has been placed if it can be done and the insns have been
1938 emitted. If it would take more than N insns, zero is returned and no
1939 insns and emitted. */
1940
72910a0b 1941static rtx
a5c24926 1942alpha_emit_set_const (rtx target, enum machine_mode mode,
72910a0b 1943 HOST_WIDE_INT c, int n, bool no_output)
a5c24926 1944{
72910a0b 1945 enum machine_mode orig_mode = mode;
a5c24926 1946 rtx orig_target = target;
72910a0b 1947 rtx result = 0;
a5c24926
RH
1948 int i;
1949
1950 /* If we can't make any pseudos, TARGET is an SImode hard register, we
1951 can't load this constant in one insn, do this in DImode. */
b3a13419 1952 if (!can_create_pseudo_p () && mode == SImode
7d83f4f5 1953 && REG_P (target) && REGNO (target) < FIRST_PSEUDO_REGISTER)
a5c24926 1954 {
72910a0b
RH
1955 result = alpha_emit_set_const_1 (target, mode, c, 1, no_output);
1956 if (result)
1957 return result;
1958
1959 target = no_output ? NULL : gen_lowpart (DImode, target);
1960 mode = DImode;
1961 }
1962 else if (mode == V8QImode || mode == V4HImode || mode == V2SImode)
1963 {
1964 target = no_output ? NULL : gen_lowpart (DImode, target);
a5c24926
RH
1965 mode = DImode;
1966 }
1967
1968 /* Try 1 insn, then 2, then up to N. */
1969 for (i = 1; i <= n; i++)
1970 {
72910a0b 1971 result = alpha_emit_set_const_1 (target, mode, c, i, no_output);
a5c24926
RH
1972 if (result)
1973 {
72910a0b
RH
1974 rtx insn, set;
1975
1976 if (no_output)
1977 return result;
1978
1979 insn = get_last_insn ();
1980 set = single_set (insn);
a5c24926
RH
1981 if (! CONSTANT_P (SET_SRC (set)))
1982 set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));
1983 break;
1984 }
1985 }
1986
1987 /* Allow for the case where we changed the mode of TARGET. */
72910a0b
RH
1988 if (result)
1989 {
1990 if (result == target)
1991 result = orig_target;
1992 else if (mode != orig_mode)
1993 result = gen_lowpart (orig_mode, result);
1994 }
a5c24926
RH
1995
1996 return result;
1997}
1998
97aea203
RK
1999/* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2000 fall back to a straight forward decomposition. We do this to avoid
2001 exponential run times encountered when looking for longer sequences
2002 with alpha_emit_set_const. */
2003
72910a0b 2004static rtx
a5c24926 2005alpha_emit_set_long_const (rtx target, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
97aea203 2006{
97aea203 2007 HOST_WIDE_INT d1, d2, d3, d4;
97aea203
RK
2008
2009 /* Decompose the entire word */
3fe5612d 2010#if HOST_BITS_PER_WIDE_INT >= 64
56daab84 2011 gcc_assert (c2 == -(c1 < 0));
3fe5612d
RH
2012 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2013 c1 -= d1;
2014 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2015 c1 = (c1 - d2) >> 32;
2016 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2017 c1 -= d3;
2018 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
56daab84 2019 gcc_assert (c1 == d4);
3fe5612d
RH
2020#else
2021 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2022 c1 -= d1;
2023 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
56daab84 2024 gcc_assert (c1 == d2);
3fe5612d
RH
2025 c2 += (d2 < 0);
2026 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2027 c2 -= d3;
2028 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
56daab84 2029 gcc_assert (c2 == d4);
3fe5612d 2030#endif
97aea203
RK
2031
2032 /* Construct the high word */
3fe5612d
RH
2033 if (d4)
2034 {
2035 emit_move_insn (target, GEN_INT (d4));
2036 if (d3)
2037 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2038 }
97aea203 2039 else
3fe5612d 2040 emit_move_insn (target, GEN_INT (d3));
97aea203
RK
2041
2042 /* Shift it into place */
3fe5612d 2043 emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
97aea203 2044
3fe5612d
RH
2045 /* Add in the low bits. */
2046 if (d2)
2047 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2048 if (d1)
2049 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
97aea203 2050
3fe5612d 2051 return target;
97aea203 2052}
97aea203 2053
72910a0b
RH
2054/* Given an integral CONST_INT, CONST_DOUBLE, or CONST_VECTOR, return
2055 the low 64 bits. */
2056
2057static void
2058alpha_extract_integer (rtx x, HOST_WIDE_INT *p0, HOST_WIDE_INT *p1)
2059{
2060 HOST_WIDE_INT i0, i1;
2061
2062 if (GET_CODE (x) == CONST_VECTOR)
2063 x = simplify_subreg (DImode, x, GET_MODE (x), 0);
2064
2065
7d83f4f5 2066 if (CONST_INT_P (x))
72910a0b
RH
2067 {
2068 i0 = INTVAL (x);
2069 i1 = -(i0 < 0);
2070 }
2071 else if (HOST_BITS_PER_WIDE_INT >= 64)
2072 {
2073 i0 = CONST_DOUBLE_LOW (x);
2074 i1 = -(i0 < 0);
2075 }
2076 else
2077 {
2078 i0 = CONST_DOUBLE_LOW (x);
2079 i1 = CONST_DOUBLE_HIGH (x);
2080 }
2081
2082 *p0 = i0;
2083 *p1 = i1;
2084}
2085
2086/* Implement LEGITIMATE_CONSTANT_P. This is all constants for which we
2087 are willing to load the value into a register via a move pattern.
2088 Normally this is all symbolic constants, integral constants that
2089 take three or fewer instructions, and floating-point zero. */
2090
2091bool
2092alpha_legitimate_constant_p (rtx x)
2093{
2094 enum machine_mode mode = GET_MODE (x);
2095 HOST_WIDE_INT i0, i1;
2096
2097 switch (GET_CODE (x))
2098 {
72910a0b 2099 case LABEL_REF:
72910a0b
RH
2100 case HIGH:
2101 return true;
2102
42a9ba1d
UB
2103 case CONST:
2104 if (GET_CODE (XEXP (x, 0)) == PLUS
2105 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2106 x = XEXP (XEXP (x, 0), 0);
2107 else
2108 return true;
2109
2110 if (GET_CODE (x) != SYMBOL_REF)
2111 return true;
2112
2113 /* FALLTHRU */
2114
e584065d
RH
2115 case SYMBOL_REF:
2116 /* TLS symbols are never valid. */
2117 return SYMBOL_REF_TLS_MODEL (x) == 0;
2118
72910a0b
RH
2119 case CONST_DOUBLE:
2120 if (x == CONST0_RTX (mode))
2121 return true;
2122 if (FLOAT_MODE_P (mode))
2123 return false;
2124 goto do_integer;
2125
2126 case CONST_VECTOR:
2127 if (x == CONST0_RTX (mode))
2128 return true;
2129 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2130 return false;
2131 if (GET_MODE_SIZE (mode) != 8)
2132 return false;
2133 goto do_integer;
2134
2135 case CONST_INT:
2136 do_integer:
2137 if (TARGET_BUILD_CONSTANTS)
2138 return true;
2139 alpha_extract_integer (x, &i0, &i1);
2140 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == (-i0 < 0))
2141 return alpha_emit_set_const_1 (x, mode, i0, 3, true) != NULL;
2142 return false;
2143
2144 default:
2145 return false;
2146 }
2147}
2148
2149/* Operand 1 is known to be a constant, and should require more than one
2150 instruction to load. Emit that multi-part load. */
2151
2152bool
2153alpha_split_const_mov (enum machine_mode mode, rtx *operands)
2154{
2155 HOST_WIDE_INT i0, i1;
2156 rtx temp = NULL_RTX;
2157
2158 alpha_extract_integer (operands[1], &i0, &i1);
2159
2160 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
2161 temp = alpha_emit_set_const (operands[0], mode, i0, 3, false);
2162
2163 if (!temp && TARGET_BUILD_CONSTANTS)
2164 temp = alpha_emit_set_long_const (operands[0], i0, i1);
2165
2166 if (temp)
2167 {
2168 if (!rtx_equal_p (operands[0], temp))
2169 emit_move_insn (operands[0], temp);
2170 return true;
2171 }
2172
2173 return false;
2174}
2175
23296a36
RH
2176/* Expand a move instruction; return true if all work is done.
2177 We don't handle non-bwx subword loads here. */
2178
2179bool
a5c24926 2180alpha_expand_mov (enum machine_mode mode, rtx *operands)
23296a36 2181{
9dadeeb8
UB
2182 rtx tmp;
2183
23296a36 2184 /* If the output is not a register, the input must be. */
7d83f4f5 2185 if (MEM_P (operands[0])
23296a36
RH
2186 && ! reg_or_0_operand (operands[1], mode))
2187 operands[1] = force_reg (mode, operands[1]);
2188
551cc6fd 2189 /* Allow legitimize_address to perform some simplifications. */
d3e98208 2190 if (mode == Pmode && symbolic_operand (operands[1], mode))
1eb356b9 2191 {
506d7b68 2192 tmp = alpha_legitimize_address_1 (operands[1], operands[0], mode);
551cc6fd 2193 if (tmp)
133d3133 2194 {
6f9b006d
RH
2195 if (tmp == operands[0])
2196 return true;
551cc6fd 2197 operands[1] = tmp;
e2c9fb9b
RH
2198 return false;
2199 }
1eb356b9
RH
2200 }
2201
23296a36
RH
2202 /* Early out for non-constants and valid constants. */
2203 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2204 return false;
2205
2206 /* Split large integers. */
7d83f4f5 2207 if (CONST_INT_P (operands[1])
72910a0b
RH
2208 || GET_CODE (operands[1]) == CONST_DOUBLE
2209 || GET_CODE (operands[1]) == CONST_VECTOR)
23296a36 2210 {
72910a0b
RH
2211 if (alpha_split_const_mov (mode, operands))
2212 return true;
23296a36
RH
2213 }
2214
2215 /* Otherwise we've nothing left but to drop the thing to memory. */
9dadeeb8
UB
2216 tmp = force_const_mem (mode, operands[1]);
2217
2218 if (tmp == NULL_RTX)
2219 return false;
2220
23296a36
RH
2221 if (reload_in_progress)
2222 {
9dadeeb8
UB
2223 emit_move_insn (operands[0], XEXP (tmp, 0));
2224 operands[1] = replace_equiv_address (tmp, operands[0]);
23296a36
RH
2225 }
2226 else
9dadeeb8 2227 operands[1] = validize_mem (tmp);
23296a36
RH
2228 return false;
2229}
2230
2231/* Expand a non-bwx QImode or HImode move instruction;
2232 return true if all work is done. */
2233
2234bool
a5c24926 2235alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
23296a36 2236{
48f46219
RH
2237 rtx seq;
2238
23296a36 2239 /* If the output is not a register, the input must be. */
48f46219 2240 if (MEM_P (operands[0]))
23296a36
RH
2241 operands[1] = force_reg (mode, operands[1]);
2242
2243 /* Handle four memory cases, unaligned and aligned for either the input
2244 or the output. The only case where we can be called during reload is
2245 for aligned loads; all other cases require temporaries. */
2246
48f46219 2247 if (any_memory_operand (operands[1], mode))
23296a36
RH
2248 {
2249 if (aligned_memory_operand (operands[1], mode))
2250 {
2251 if (reload_in_progress)
2252 {
48f46219
RH
2253 if (mode == QImode)
2254 seq = gen_reload_inqi_aligned (operands[0], operands[1]);
2255 else
2256 seq = gen_reload_inhi_aligned (operands[0], operands[1]);
2257 emit_insn (seq);
23296a36
RH
2258 }
2259 else
2260 {
2261 rtx aligned_mem, bitnum;
2262 rtx scratch = gen_reg_rtx (SImode);
62e88293
RH
2263 rtx subtarget;
2264 bool copyout;
23296a36
RH
2265
2266 get_aligned_mem (operands[1], &aligned_mem, &bitnum);
2267
62e88293 2268 subtarget = operands[0];
7d83f4f5 2269 if (REG_P (subtarget))
62e88293
RH
2270 subtarget = gen_lowpart (DImode, subtarget), copyout = false;
2271 else
2272 subtarget = gen_reg_rtx (DImode), copyout = true;
2273
48f46219
RH
2274 if (mode == QImode)
2275 seq = gen_aligned_loadqi (subtarget, aligned_mem,
2276 bitnum, scratch);
2277 else
2278 seq = gen_aligned_loadhi (subtarget, aligned_mem,
2279 bitnum, scratch);
2280 emit_insn (seq);
62e88293
RH
2281
2282 if (copyout)
2283 emit_move_insn (operands[0], gen_lowpart (mode, subtarget));
23296a36
RH
2284 }
2285 }
2286 else
2287 {
2288 /* Don't pass these as parameters since that makes the generated
2289 code depend on parameter evaluation order which will cause
2290 bootstrap failures. */
2291
48f46219 2292 rtx temp1, temp2, subtarget, ua;
62e88293
RH
2293 bool copyout;
2294
2295 temp1 = gen_reg_rtx (DImode);
2296 temp2 = gen_reg_rtx (DImode);
23296a36 2297
62e88293 2298 subtarget = operands[0];
7d83f4f5 2299 if (REG_P (subtarget))
62e88293
RH
2300 subtarget = gen_lowpart (DImode, subtarget), copyout = false;
2301 else
2302 subtarget = gen_reg_rtx (DImode), copyout = true;
2303
48f46219
RH
2304 ua = get_unaligned_address (operands[1]);
2305 if (mode == QImode)
2306 seq = gen_unaligned_loadqi (subtarget, ua, temp1, temp2);
2307 else
2308 seq = gen_unaligned_loadhi (subtarget, ua, temp1, temp2);
2309
23296a36
RH
2310 alpha_set_memflags (seq, operands[1]);
2311 emit_insn (seq);
62e88293
RH
2312
2313 if (copyout)
2314 emit_move_insn (operands[0], gen_lowpart (mode, subtarget));
23296a36
RH
2315 }
2316 return true;
2317 }
2318
48f46219 2319 if (any_memory_operand (operands[0], mode))
23296a36
RH
2320 {
2321 if (aligned_memory_operand (operands[0], mode))
2322 {
2323 rtx aligned_mem, bitnum;
2324 rtx temp1 = gen_reg_rtx (SImode);
2325 rtx temp2 = gen_reg_rtx (SImode);
2326
2327 get_aligned_mem (operands[0], &aligned_mem, &bitnum);
2328
2329 emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
2330 temp1, temp2));
2331 }
2332 else
2333 {
2334 rtx temp1 = gen_reg_rtx (DImode);
2335 rtx temp2 = gen_reg_rtx (DImode);
2336 rtx temp3 = gen_reg_rtx (DImode);
48f46219
RH
2337 rtx ua = get_unaligned_address (operands[0]);
2338
2339 if (mode == QImode)
2340 seq = gen_unaligned_storeqi (ua, operands[1], temp1, temp2, temp3);
2341 else
2342 seq = gen_unaligned_storehi (ua, operands[1], temp1, temp2, temp3);
23296a36
RH
2343
2344 alpha_set_memflags (seq, operands[0]);
2345 emit_insn (seq);
2346 }
2347 return true;
2348 }
2349
2350 return false;
2351}
2352
ad78a663 2353/* Implement the movmisalign patterns. One of the operands is a memory
35c0104b 2354 that is not naturally aligned. Emit instructions to load it. */
ad78a663
RH
2355
2356void
2357alpha_expand_movmisalign (enum machine_mode mode, rtx *operands)
2358{
2359 /* Honor misaligned loads, for those we promised to do so. */
2360 if (MEM_P (operands[1]))
2361 {
2362 rtx tmp;
2363
2364 if (register_operand (operands[0], mode))
2365 tmp = operands[0];
2366 else
2367 tmp = gen_reg_rtx (mode);
2368
2369 alpha_expand_unaligned_load (tmp, operands[1], 8, 0, 0);
2370 if (tmp != operands[0])
2371 emit_move_insn (operands[0], tmp);
2372 }
2373 else if (MEM_P (operands[0]))
2374 {
2375 if (!reg_or_0_operand (operands[1], mode))
2376 operands[1] = force_reg (mode, operands[1]);
2377 alpha_expand_unaligned_store (operands[0], operands[1], 8, 0);
2378 }
2379 else
2380 gcc_unreachable ();
2381}
2382
01b9e84e
RH
2383/* Generate an unsigned DImode to FP conversion. This is the same code
2384 optabs would emit if we didn't have TFmode patterns.
2385
2386 For SFmode, this is the only construction I've found that can pass
2387 gcc.c-torture/execute/ieee/rbug.c. No scenario that uses DFmode
2388 intermediates will work, because you'll get intermediate rounding
2389 that ruins the end result. Some of this could be fixed by turning
2390 on round-to-positive-infinity, but that requires diddling the fpsr,
2391 which kills performance. I tried turning this around and converting
2392 to a negative number, so that I could turn on /m, but either I did
2393 it wrong or there's something else cause I wound up with the exact
2394 same single-bit error. There is a branch-less form of this same code:
2395
2396 srl $16,1,$1
2397 and $16,1,$2
2398 cmplt $16,0,$3
2399 or $1,$2,$2
2400 cmovge $16,$16,$2
2401 itoft $3,$f10
2402 itoft $2,$f11
2403 cvtqs $f11,$f11
2404 adds $f11,$f11,$f0
2405 fcmoveq $f10,$f11,$f0
2406
2407 I'm not using it because it's the same number of instructions as
2408 this branch-full form, and it has more serialized long latency
2409 instructions on the critical path.
2410
2411 For DFmode, we can avoid rounding errors by breaking up the word
2412 into two pieces, converting them separately, and adding them back:
2413
2414 LC0: .long 0,0x5f800000
2415
2416 itoft $16,$f11
2417 lda $2,LC0
70994f30 2418 cmplt $16,0,$1
01b9e84e
RH
2419 cpyse $f11,$f31,$f10
2420 cpyse $f31,$f11,$f11
2421 s4addq $1,$2,$1
2422 lds $f12,0($1)
2423 cvtqt $f10,$f10
2424 cvtqt $f11,$f11
2425 addt $f12,$f10,$f0
2426 addt $f0,$f11,$f0
2427
2428 This doesn't seem to be a clear-cut win over the optabs form.
2429 It probably all depends on the distribution of numbers being
2430 converted -- in the optabs form, all but high-bit-set has a
2431 much lower minimum execution time. */
2432
2433void
a5c24926 2434alpha_emit_floatuns (rtx operands[2])
01b9e84e
RH
2435{
2436 rtx neglab, donelab, i0, i1, f0, in, out;
2437 enum machine_mode mode;
2438
2439 out = operands[0];
57014cb9 2440 in = force_reg (DImode, operands[1]);
01b9e84e
RH
2441 mode = GET_MODE (out);
2442 neglab = gen_label_rtx ();
2443 donelab = gen_label_rtx ();
2444 i0 = gen_reg_rtx (DImode);
2445 i1 = gen_reg_rtx (DImode);
2446 f0 = gen_reg_rtx (mode);
2447
d43e0b7d 2448 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
01b9e84e
RH
2449
2450 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
2451 emit_jump_insn (gen_jump (donelab));
70994f30 2452 emit_barrier ();
01b9e84e
RH
2453
2454 emit_label (neglab);
2455
2456 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
2457 emit_insn (gen_anddi3 (i1, in, const1_rtx));
2458 emit_insn (gen_iordi3 (i0, i0, i1));
2459 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
2460 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
2461
2462 emit_label (donelab);
2463}
2464
f283421d
RH
2465/* Generate the comparison for a conditional branch. */
2466
f90b7a5a
PB
2467void
2468alpha_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
f283421d
RH
2469{
2470 enum rtx_code cmp_code, branch_code;
f90b7a5a
PB
2471 enum machine_mode branch_mode = VOIDmode;
2472 enum rtx_code code = GET_CODE (operands[0]);
2473 rtx op0 = operands[1], op1 = operands[2];
f283421d
RH
2474 rtx tem;
2475
f90b7a5a 2476 if (cmp_mode == TFmode)
5495cc55 2477 {
0da4e73a 2478 op0 = alpha_emit_xfloating_compare (&code, op0, op1);
5495cc55 2479 op1 = const0_rtx;
f90b7a5a 2480 cmp_mode = DImode;
5495cc55
RH
2481 }
2482
f283421d
RH
2483 /* The general case: fold the comparison code to the types of compares
2484 that we have, choosing the branch as necessary. */
2485 switch (code)
2486 {
2487 case EQ: case LE: case LT: case LEU: case LTU:
1eb8759b 2488 case UNORDERED:
f283421d
RH
2489 /* We have these compares: */
2490 cmp_code = code, branch_code = NE;
2491 break;
2492
2493 case NE:
1eb8759b 2494 case ORDERED:
285a5742 2495 /* These must be reversed. */
1eb8759b 2496 cmp_code = reverse_condition (code), branch_code = EQ;
f283421d
RH
2497 break;
2498
2499 case GE: case GT: case GEU: case GTU:
2500 /* For FP, we swap them, for INT, we reverse them. */
f90b7a5a 2501 if (cmp_mode == DFmode)
f283421d
RH
2502 {
2503 cmp_code = swap_condition (code);
2504 branch_code = NE;
2505 tem = op0, op0 = op1, op1 = tem;
2506 }
2507 else
2508 {
2509 cmp_code = reverse_condition (code);
2510 branch_code = EQ;
2511 }
2512 break;
2513
2514 default:
56daab84 2515 gcc_unreachable ();
f283421d
RH
2516 }
2517
f90b7a5a 2518 if (cmp_mode == DFmode)
f283421d 2519 {
ec46190f 2520 if (flag_unsafe_math_optimizations && cmp_code != UNORDERED)
f283421d
RH
2521 {
2522 /* When we are not as concerned about non-finite values, and we
2523 are comparing against zero, we can branch directly. */
2524 if (op1 == CONST0_RTX (DFmode))
f822d252 2525 cmp_code = UNKNOWN, branch_code = code;
f283421d
RH
2526 else if (op0 == CONST0_RTX (DFmode))
2527 {
2528 /* Undo the swap we probably did just above. */
2529 tem = op0, op0 = op1, op1 = tem;
b771b6b4 2530 branch_code = swap_condition (cmp_code);
f822d252 2531 cmp_code = UNKNOWN;
f283421d
RH
2532 }
2533 }
2534 else
2535 {
27d30956 2536 /* ??? We mark the branch mode to be CCmode to prevent the
f676971a 2537 compare and branch from being combined, since the compare
f283421d
RH
2538 insn follows IEEE rules that the branch does not. */
2539 branch_mode = CCmode;
2540 }
2541 }
2542 else
2543 {
f283421d
RH
2544 /* The following optimizations are only for signed compares. */
2545 if (code != LEU && code != LTU && code != GEU && code != GTU)
2546 {
2547 /* Whee. Compare and branch against 0 directly. */
2548 if (op1 == const0_rtx)
f822d252 2549 cmp_code = UNKNOWN, branch_code = code;
f283421d 2550
e006ced2
FH
2551 /* If the constants doesn't fit into an immediate, but can
2552 be generated by lda/ldah, we adjust the argument and
2553 compare against zero, so we can use beq/bne directly. */
4a4f95d9
RH
2554 /* ??? Don't do this when comparing against symbols, otherwise
2555 we'll reduce (&x == 0x1234) to (&x-0x1234 == 0), which will
2556 be declared false out of hand (at least for non-weak). */
7d83f4f5 2557 else if (CONST_INT_P (op1)
4a4f95d9
RH
2558 && (code == EQ || code == NE)
2559 && !(symbolic_operand (op0, VOIDmode)
7d83f4f5 2560 || (REG_P (op0) && REG_POINTER (op0))))
f283421d 2561 {
dfcbeaa5
RH
2562 rtx n_op1 = GEN_INT (-INTVAL (op1));
2563
2564 if (! satisfies_constraint_I (op1)
2565 && (satisfies_constraint_K (n_op1)
2566 || satisfies_constraint_L (n_op1)))
2567 cmp_code = PLUS, branch_code = code, op1 = n_op1;
f283421d
RH
2568 }
2569 }
f283421d 2570
9e495700
RH
2571 if (!reg_or_0_operand (op0, DImode))
2572 op0 = force_reg (DImode, op0);
2573 if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
2574 op1 = force_reg (DImode, op1);
2575 }
f283421d
RH
2576
2577 /* Emit an initial compare instruction, if necessary. */
2578 tem = op0;
f822d252 2579 if (cmp_code != UNKNOWN)
f283421d
RH
2580 {
2581 tem = gen_reg_rtx (cmp_mode);
2582 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
2583 }
2584
f90b7a5a
PB
2585 /* Emit the branch instruction. */
2586 tem = gen_rtx_SET (VOIDmode, pc_rtx,
2587 gen_rtx_IF_THEN_ELSE (VOIDmode,
2588 gen_rtx_fmt_ee (branch_code,
2589 branch_mode, tem,
2590 CONST0_RTX (cmp_mode)),
2591 gen_rtx_LABEL_REF (VOIDmode,
2592 operands[3]),
2593 pc_rtx));
2594 emit_jump_insn (tem);
f283421d
RH
2595}
2596
9e495700
RH
2597/* Certain simplifications can be done to make invalid setcc operations
2598 valid. Return the final comparison, or NULL if we can't work. */
2599
f90b7a5a
PB
2600bool
2601alpha_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
9e495700
RH
2602{
2603 enum rtx_code cmp_code;
f90b7a5a
PB
2604 enum rtx_code code = GET_CODE (operands[1]);
2605 rtx op0 = operands[2], op1 = operands[3];
9e495700
RH
2606 rtx tmp;
2607
f90b7a5a 2608 if (cmp_mode == TFmode)
9e495700 2609 {
0da4e73a 2610 op0 = alpha_emit_xfloating_compare (&code, op0, op1);
9e495700 2611 op1 = const0_rtx;
f90b7a5a 2612 cmp_mode = DImode;
9e495700
RH
2613 }
2614
f90b7a5a
PB
2615 if (cmp_mode == DFmode && !TARGET_FIX)
2616 return 0;
9e495700
RH
2617
2618 /* The general case: fold the comparison code to the types of compares
2619 that we have, choosing the branch as necessary. */
2620
f822d252 2621 cmp_code = UNKNOWN;
9e495700
RH
2622 switch (code)
2623 {
2624 case EQ: case LE: case LT: case LEU: case LTU:
2625 case UNORDERED:
2626 /* We have these compares. */
f90b7a5a 2627 if (cmp_mode == DFmode)
9e495700
RH
2628 cmp_code = code, code = NE;
2629 break;
2630
2631 case NE:
f90b7a5a 2632 if (cmp_mode == DImode && op1 == const0_rtx)
9e495700 2633 break;
5efb1046 2634 /* FALLTHRU */
9e495700
RH
2635
2636 case ORDERED:
2637 cmp_code = reverse_condition (code);
2638 code = EQ;
2639 break;
2640
2641 case GE: case GT: case GEU: case GTU:
56f19d92 2642 /* These normally need swapping, but for integer zero we have
c74fa144 2643 special patterns that recognize swapped operands. */
f90b7a5a 2644 if (cmp_mode == DImode && op1 == const0_rtx)
c74fa144 2645 break;
9e495700 2646 code = swap_condition (code);
f90b7a5a 2647 if (cmp_mode == DFmode)
9e495700
RH
2648 cmp_code = code, code = NE;
2649 tmp = op0, op0 = op1, op1 = tmp;
2650 break;
2651
2652 default:
56daab84 2653 gcc_unreachable ();
9e495700
RH
2654 }
2655
f90b7a5a 2656 if (cmp_mode == DImode)
9e495700 2657 {
c74fa144 2658 if (!register_operand (op0, DImode))
9e495700
RH
2659 op0 = force_reg (DImode, op0);
2660 if (!reg_or_8bit_operand (op1, DImode))
2661 op1 = force_reg (DImode, op1);
2662 }
2663
2664 /* Emit an initial compare instruction, if necessary. */
f822d252 2665 if (cmp_code != UNKNOWN)
9e495700 2666 {
f90b7a5a 2667 tmp = gen_reg_rtx (cmp_mode);
9e495700 2668 emit_insn (gen_rtx_SET (VOIDmode, tmp,
f90b7a5a 2669 gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1)));
9e495700 2670
7c1db202 2671 op0 = cmp_mode != DImode ? gen_lowpart (DImode, tmp) : tmp;
9e495700
RH
2672 op1 = const0_rtx;
2673 }
2674
f90b7a5a
PB
2675 /* Emit the setcc instruction. */
2676 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2677 gen_rtx_fmt_ee (code, DImode, op0, op1)));
2678 return true;
9e495700
RH
2679}
2680
f283421d 2681
758d2c0c
RK
2682/* Rewrite a comparison against zero CMP of the form
2683 (CODE (cc0) (const_int 0)) so it can be written validly in
2684 a conditional move (if_then_else CMP ...).
825dda42 2685 If both of the operands that set cc0 are nonzero we must emit
758d2c0c 2686 an insn to perform the compare (it can't be done within
285a5742 2687 the conditional move). */
a5c24926 2688
758d2c0c 2689rtx
a5c24926 2690alpha_emit_conditional_move (rtx cmp, enum machine_mode mode)
758d2c0c 2691{
1ad2a62d 2692 enum rtx_code code = GET_CODE (cmp);
89b7c471 2693 enum rtx_code cmov_code = NE;
f90b7a5a
PB
2694 rtx op0 = XEXP (cmp, 0);
2695 rtx op1 = XEXP (cmp, 1);
1ad2a62d
RK
2696 enum machine_mode cmp_mode
2697 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
f283421d 2698 enum machine_mode cmov_mode = VOIDmode;
de6c5979 2699 int local_fast_math = flag_unsafe_math_optimizations;
1ad2a62d 2700 rtx tem;
758d2c0c 2701
387c39e1
UB
2702 if (cmp_mode == TFmode)
2703 {
2704 op0 = alpha_emit_xfloating_compare (&code, op0, op1);
2705 op1 = const0_rtx;
2706 cmp_mode = DImode;
2707 }
2708
f90b7a5a 2709 gcc_assert (cmp_mode == DFmode || cmp_mode == DImode);
6db21c7f 2710
f90b7a5a 2711 if (FLOAT_MODE_P (cmp_mode) != FLOAT_MODE_P (mode))
09fe1c49
RH
2712 {
2713 enum rtx_code cmp_code;
2714
2715 if (! TARGET_FIX)
2716 return 0;
2717
2718 /* If we have fp<->int register move instructions, do a cmov by
2719 performing the comparison in fp registers, and move the
825dda42 2720 zero/nonzero value to integer registers, where we can then
09fe1c49
RH
2721 use a normal cmov, or vice-versa. */
2722
2723 switch (code)
2724 {
2725 case EQ: case LE: case LT: case LEU: case LTU:
2726 /* We have these compares. */
2727 cmp_code = code, code = NE;
2728 break;
2729
2730 case NE:
2731 /* This must be reversed. */
2732 cmp_code = EQ, code = EQ;
2733 break;
2734
2735 case GE: case GT: case GEU: case GTU:
56f19d92
RH
2736 /* These normally need swapping, but for integer zero we have
2737 special patterns that recognize swapped operands. */
f90b7a5a 2738 if (cmp_mode == DImode && op1 == const0_rtx)
c53f9f5b
RH
2739 cmp_code = code, code = NE;
2740 else
2741 {
2742 cmp_code = swap_condition (code);
2743 code = NE;
2744 tem = op0, op0 = op1, op1 = tem;
2745 }
09fe1c49
RH
2746 break;
2747
2748 default:
56daab84 2749 gcc_unreachable ();
09fe1c49
RH
2750 }
2751
f90b7a5a 2752 tem = gen_reg_rtx (cmp_mode);
09fe1c49 2753 emit_insn (gen_rtx_SET (VOIDmode, tem,
f90b7a5a 2754 gen_rtx_fmt_ee (cmp_code, cmp_mode,
09fe1c49
RH
2755 op0, op1)));
2756
f90b7a5a
PB
2757 cmp_mode = cmp_mode == DImode ? DFmode : DImode;
2758 op0 = gen_lowpart (cmp_mode, tem);
2759 op1 = CONST0_RTX (cmp_mode);
09fe1c49
RH
2760 local_fast_math = 1;
2761 }
758d2c0c
RK
2762
2763 /* We may be able to use a conditional move directly.
285a5742 2764 This avoids emitting spurious compares. */
01b9e84e 2765 if (signed_comparison_operator (cmp, VOIDmode)
f90b7a5a 2766 && (cmp_mode == DImode || local_fast_math)
1ad2a62d 2767 && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
38a448ca 2768 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
758d2c0c 2769
000ca373 2770 /* We can't put the comparison inside the conditional move;
758d2c0c 2771 emit a compare instruction and put that inside the
1ad2a62d
RK
2772 conditional move. Make sure we emit only comparisons we have;
2773 swap or reverse as necessary. */
758d2c0c 2774
b3a13419 2775 if (!can_create_pseudo_p ())
000ca373
RH
2776 return NULL_RTX;
2777
758d2c0c
RK
2778 switch (code)
2779 {
1ad2a62d
RK
2780 case EQ: case LE: case LT: case LEU: case LTU:
2781 /* We have these compares: */
758d2c0c 2782 break;
1ad2a62d 2783
758d2c0c 2784 case NE:
285a5742 2785 /* This must be reversed. */
1ad2a62d 2786 code = reverse_condition (code);
89b7c471 2787 cmov_code = EQ;
758d2c0c 2788 break;
1ad2a62d
RK
2789
2790 case GE: case GT: case GEU: case GTU:
9e495700 2791 /* These must be swapped. */
c53f9f5b
RH
2792 if (op1 != CONST0_RTX (cmp_mode))
2793 {
2794 code = swap_condition (code);
2795 tem = op0, op0 = op1, op1 = tem;
2796 }
758d2c0c 2797 break;
1ad2a62d 2798
758d2c0c 2799 default:
56daab84 2800 gcc_unreachable ();
758d2c0c
RK
2801 }
2802
f90b7a5a 2803 if (cmp_mode == DImode)
9e495700
RH
2804 {
2805 if (!reg_or_0_operand (op0, DImode))
2806 op0 = force_reg (DImode, op0);
2807 if (!reg_or_8bit_operand (op1, DImode))
2808 op1 = force_reg (DImode, op1);
2809 }
2810
68aed21b 2811 /* ??? We mark the branch mode to be CCmode to prevent the compare
f283421d
RH
2812 and cmov from being combined, since the compare insn follows IEEE
2813 rules that the cmov does not. */
f90b7a5a 2814 if (cmp_mode == DFmode && !local_fast_math)
f283421d
RH
2815 cmov_mode = CCmode;
2816
f90b7a5a
PB
2817 tem = gen_reg_rtx (cmp_mode);
2818 emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_mode, op0, op1));
2819 return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_mode));
758d2c0c 2820}
8f4773ea
RH
2821
2822/* Simplify a conditional move of two constants into a setcc with
2823 arithmetic. This is done with a splitter since combine would
2824 just undo the work if done during code generation. It also catches
2825 cases we wouldn't have before cse. */
2826
2827int
a5c24926
RH
2828alpha_split_conditional_move (enum rtx_code code, rtx dest, rtx cond,
2829 rtx t_rtx, rtx f_rtx)
8f4773ea
RH
2830{
2831 HOST_WIDE_INT t, f, diff;
2832 enum machine_mode mode;
2833 rtx target, subtarget, tmp;
2834
2835 mode = GET_MODE (dest);
2836 t = INTVAL (t_rtx);
2837 f = INTVAL (f_rtx);
2838 diff = t - f;
2839
2840 if (((code == NE || code == EQ) && diff < 0)
2841 || (code == GE || code == GT))
2842 {
2843 code = reverse_condition (code);
2844 diff = t, t = f, f = diff;
2845 diff = t - f;
2846 }
2847
2848 subtarget = target = dest;
2849 if (mode != DImode)
2850 {
2851 target = gen_lowpart (DImode, dest);
b3a13419 2852 if (can_create_pseudo_p ())
8f4773ea
RH
2853 subtarget = gen_reg_rtx (DImode);
2854 else
2855 subtarget = target;
2856 }
a5376276
RH
2857 /* Below, we must be careful to use copy_rtx on target and subtarget
2858 in intermediate insns, as they may be a subreg rtx, which may not
2859 be shared. */
8f4773ea
RH
2860
2861 if (f == 0 && exact_log2 (diff) > 0
9a9f7594 2862 /* On EV6, we've got enough shifters to make non-arithmetic shifts
8f4773ea 2863 viable over a longer latency cmove. On EV5, the E0 slot is a
285a5742 2864 scarce resource, and on EV4 shift has the same latency as a cmove. */
8bea7f7c 2865 && (diff <= 8 || alpha_tune == PROCESSOR_EV6))
8f4773ea
RH
2866 {
2867 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 2868 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 2869
a5376276
RH
2870 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
2871 GEN_INT (exact_log2 (t)));
8f4773ea
RH
2872 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
2873 }
2874 else if (f == 0 && t == -1)
2875 {
2876 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 2877 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 2878
a5376276 2879 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
8f4773ea
RH
2880 }
2881 else if (diff == 1 || diff == 4 || diff == 8)
2882 {
2883 rtx add_op;
2884
2885 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 2886 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea
RH
2887
2888 if (diff == 1)
a5376276 2889 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
8f4773ea
RH
2890 else
2891 {
2892 add_op = GEN_INT (f);
2893 if (sext_add_operand (add_op, mode))
2894 {
a5376276
RH
2895 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
2896 GEN_INT (diff));
8f4773ea
RH
2897 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
2898 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
2899 }
2900 else
2901 return 0;
2902 }
2903 }
2904 else
2905 return 0;
2906
2907 return 1;
2908}
6c174fc0 2909\f
5495cc55
RH
2910/* Look up the function X_floating library function name for the
2911 given operation. */
2912
d1b38208 2913struct GTY(()) xfloating_op
75959f0a
RH
2914{
2915 const enum rtx_code code;
1431042e
ZW
2916 const char *const GTY((skip)) osf_func;
2917 const char *const GTY((skip)) vms_func;
75959f0a
RH
2918 rtx libcall;
2919};
2920
f676971a 2921static GTY(()) struct xfloating_op xfloating_ops[] =
75959f0a
RH
2922{
2923 { PLUS, "_OtsAddX", "OTS$ADD_X", 0 },
2924 { MINUS, "_OtsSubX", "OTS$SUB_X", 0 },
2925 { MULT, "_OtsMulX", "OTS$MUL_X", 0 },
2926 { DIV, "_OtsDivX", "OTS$DIV_X", 0 },
2927 { EQ, "_OtsEqlX", "OTS$EQL_X", 0 },
2928 { NE, "_OtsNeqX", "OTS$NEQ_X", 0 },
2929 { LT, "_OtsLssX", "OTS$LSS_X", 0 },
2930 { LE, "_OtsLeqX", "OTS$LEQ_X", 0 },
2931 { GT, "_OtsGtrX", "OTS$GTR_X", 0 },
2932 { GE, "_OtsGeqX", "OTS$GEQ_X", 0 },
2933 { FIX, "_OtsCvtXQ", "OTS$CVTXQ", 0 },
2934 { FLOAT, "_OtsCvtQX", "OTS$CVTQX", 0 },
2935 { UNSIGNED_FLOAT, "_OtsCvtQUX", "OTS$CVTQUX", 0 },
2936 { FLOAT_EXTEND, "_OtsConvertFloatTX", "OTS$CVT_FLOAT_T_X", 0 },
2937 { FLOAT_TRUNCATE, "_OtsConvertFloatXT", "OTS$CVT_FLOAT_X_T", 0 }
2938};
2939
2940static GTY(()) struct xfloating_op vax_cvt_ops[] =
2941{
2942 { FLOAT_EXTEND, "_OtsConvertFloatGX", "OTS$CVT_FLOAT_G_X", 0 },
2943 { FLOAT_TRUNCATE, "_OtsConvertFloatXG", "OTS$CVT_FLOAT_X_G", 0 }
2944};
2945
2946static rtx
a5c24926 2947alpha_lookup_xfloating_lib_func (enum rtx_code code)
5495cc55 2948{
75959f0a
RH
2949 struct xfloating_op *ops = xfloating_ops;
2950 long n = ARRAY_SIZE (xfloating_ops);
5495cc55
RH
2951 long i;
2952
0da4e73a
RH
2953 gcc_assert (TARGET_HAS_XFLOATING_LIBS);
2954
75959f0a
RH
2955 /* How irritating. Nothing to key off for the main table. */
2956 if (TARGET_FLOAT_VAX && (code == FLOAT_EXTEND || code == FLOAT_TRUNCATE))
5495cc55 2957 {
75959f0a
RH
2958 ops = vax_cvt_ops;
2959 n = ARRAY_SIZE (vax_cvt_ops);
5495cc55
RH
2960 }
2961
75959f0a
RH
2962 for (i = 0; i < n; ++i, ++ops)
2963 if (ops->code == code)
2964 {
2965 rtx func = ops->libcall;
2966 if (!func)
2967 {
2968 func = init_one_libfunc (TARGET_ABI_OPEN_VMS
2969 ? ops->vms_func : ops->osf_func);
2970 ops->libcall = func;
2971 }
2972 return func;
2973 }
5495cc55 2974
56daab84 2975 gcc_unreachable ();
5495cc55
RH
2976}
2977
2978/* Most X_floating operations take the rounding mode as an argument.
2979 Compute that here. */
2980
2981static int
a5c24926
RH
2982alpha_compute_xfloating_mode_arg (enum rtx_code code,
2983 enum alpha_fp_rounding_mode round)
5495cc55
RH
2984{
2985 int mode;
2986
2987 switch (round)
2988 {
2989 case ALPHA_FPRM_NORM:
2990 mode = 2;
2991 break;
2992 case ALPHA_FPRM_MINF:
2993 mode = 1;
2994 break;
2995 case ALPHA_FPRM_CHOP:
2996 mode = 0;
2997 break;
2998 case ALPHA_FPRM_DYN:
2999 mode = 4;
3000 break;
3001 default:
56daab84 3002 gcc_unreachable ();
5495cc55
RH
3003
3004 /* XXX For reference, round to +inf is mode = 3. */
3005 }
3006
3007 if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3008 mode |= 0x10000;
3009
3010 return mode;
3011}
3012
3013/* Emit an X_floating library function call.
3014
3015 Note that these functions do not follow normal calling conventions:
3016 TFmode arguments are passed in two integer registers (as opposed to
f676971a 3017 indirect); TFmode return values appear in R16+R17.
5495cc55 3018
75959f0a 3019 FUNC is the function to call.
5495cc55
RH
3020 TARGET is where the output belongs.
3021 OPERANDS are the inputs.
3022 NOPERANDS is the count of inputs.
3023 EQUIV is the expression equivalent for the function.
3024*/
3025
3026static void
75959f0a 3027alpha_emit_xfloating_libcall (rtx func, rtx target, rtx operands[],
a5c24926 3028 int noperands, rtx equiv)
5495cc55
RH
3029{
3030 rtx usage = NULL_RTX, tmp, reg;
3031 int regno = 16, i;
3032
3033 start_sequence ();
3034
3035 for (i = 0; i < noperands; ++i)
3036 {
3037 switch (GET_MODE (operands[i]))
3038 {
3039 case TFmode:
3040 reg = gen_rtx_REG (TFmode, regno);
3041 regno += 2;
3042 break;
3043
3044 case DFmode:
3045 reg = gen_rtx_REG (DFmode, regno + 32);
3046 regno += 1;
3047 break;
3048
3049 case VOIDmode:
7d83f4f5 3050 gcc_assert (CONST_INT_P (operands[i]));
5efb1046 3051 /* FALLTHRU */
5495cc55
RH
3052 case DImode:
3053 reg = gen_rtx_REG (DImode, regno);
3054 regno += 1;
3055 break;
3056
3057 default:
56daab84 3058 gcc_unreachable ();
5495cc55
RH
3059 }
3060
3061 emit_move_insn (reg, operands[i]);
3062 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3063 }
3064
3065 switch (GET_MODE (target))
3066 {
3067 case TFmode:
3068 reg = gen_rtx_REG (TFmode, 16);
3069 break;
3070 case DFmode:
3071 reg = gen_rtx_REG (DFmode, 32);
3072 break;
3073 case DImode:
3074 reg = gen_rtx_REG (DImode, 0);
3075 break;
3076 default:
56daab84 3077 gcc_unreachable ();
5495cc55
RH
3078 }
3079
75959f0a 3080 tmp = gen_rtx_MEM (QImode, func);
0499c2e4 3081 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
5495cc55
RH
3082 const0_rtx, const0_rtx));
3083 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
becfd6e5 3084 RTL_CONST_CALL_P (tmp) = 1;
5495cc55
RH
3085
3086 tmp = get_insns ();
3087 end_sequence ();
3088
3089 emit_libcall_block (tmp, target, reg, equiv);
3090}
3091
3092/* Emit an X_floating library function call for arithmetic (+,-,*,/). */
3093
3094void
a5c24926 3095alpha_emit_xfloating_arith (enum rtx_code code, rtx operands[])
5495cc55 3096{
75959f0a 3097 rtx func;
5495cc55 3098 int mode;
c77f46c6 3099 rtx out_operands[3];
5495cc55
RH
3100
3101 func = alpha_lookup_xfloating_lib_func (code);
3102 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3103
c77f46c6
AO
3104 out_operands[0] = operands[1];
3105 out_operands[1] = operands[2];
3106 out_operands[2] = GEN_INT (mode);
f676971a 3107 alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
5495cc55
RH
3108 gen_rtx_fmt_ee (code, TFmode, operands[1],
3109 operands[2]));
3110}
3111
3112/* Emit an X_floating library function call for a comparison. */
3113
3114static rtx
0da4e73a 3115alpha_emit_xfloating_compare (enum rtx_code *pcode, rtx op0, rtx op1)
5495cc55 3116{
0da4e73a 3117 enum rtx_code cmp_code, res_code;
32891ff6 3118 rtx func, out, operands[2], note;
5495cc55 3119
0da4e73a
RH
3120 /* X_floating library comparison functions return
3121 -1 unordered
3122 0 false
3123 1 true
3124 Convert the compare against the raw return value. */
3125
3126 cmp_code = *pcode;
3127 switch (cmp_code)
3128 {
3129 case UNORDERED:
3130 cmp_code = EQ;
3131 res_code = LT;
3132 break;
3133 case ORDERED:
3134 cmp_code = EQ;
3135 res_code = GE;
3136 break;
3137 case NE:
3138 res_code = NE;
3139 break;
3140 case EQ:
3141 case LT:
3142 case GT:
3143 case LE:
3144 case GE:
3145 res_code = GT;
3146 break;
3147 default:
3148 gcc_unreachable ();
3149 }
3150 *pcode = res_code;
3151
3152 func = alpha_lookup_xfloating_lib_func (cmp_code);
5495cc55
RH
3153
3154 operands[0] = op0;
3155 operands[1] = op1;
3156 out = gen_reg_rtx (DImode);
3157
32891ff6
RH
3158 /* What's actually returned is -1,0,1, not a proper boolean value,
3159 so use an EXPR_LIST as with a generic libcall instead of a
3160 comparison type expression. */
3161 note = gen_rtx_EXPR_LIST (VOIDmode, op1, NULL_RTX);
3162 note = gen_rtx_EXPR_LIST (VOIDmode, op0, note);
3163 note = gen_rtx_EXPR_LIST (VOIDmode, func, note);
3164 alpha_emit_xfloating_libcall (func, out, operands, 2, note);
5495cc55
RH
3165
3166 return out;
3167}
3168
3169/* Emit an X_floating library function call for a conversion. */
3170
3171void
64bb2e1d 3172alpha_emit_xfloating_cvt (enum rtx_code orig_code, rtx operands[])
5495cc55
RH
3173{
3174 int noperands = 1, mode;
c77f46c6 3175 rtx out_operands[2];
75959f0a 3176 rtx func;
64bb2e1d
RH
3177 enum rtx_code code = orig_code;
3178
3179 if (code == UNSIGNED_FIX)
3180 code = FIX;
5495cc55
RH
3181
3182 func = alpha_lookup_xfloating_lib_func (code);
3183
c77f46c6
AO
3184 out_operands[0] = operands[1];
3185
5495cc55
RH
3186 switch (code)
3187 {
3188 case FIX:
3189 mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
c77f46c6 3190 out_operands[1] = GEN_INT (mode);
d6cde845 3191 noperands = 2;
5495cc55
RH
3192 break;
3193 case FLOAT_TRUNCATE:
3194 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
c77f46c6 3195 out_operands[1] = GEN_INT (mode);
d6cde845 3196 noperands = 2;
5495cc55
RH
3197 break;
3198 default:
3199 break;
3200 }
3201
c77f46c6 3202 alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
64bb2e1d
RH
3203 gen_rtx_fmt_e (orig_code,
3204 GET_MODE (operands[0]),
5495cc55
RH
3205 operands[1]));
3206}
628d74de 3207
b2f39494
EB
3208/* Split a TImode or TFmode move from OP[1] to OP[0] into a pair of
3209 DImode moves from OP[2,3] to OP[0,1]. If FIXUP_OVERLAP is true,
3210 guarantee that the sequence
3211 set (OP[0] OP[2])
3212 set (OP[1] OP[3])
3213 is valid. Naturally, output operand ordering is little-endian.
3214 This is used by *movtf_internal and *movti_internal. */
3215
628d74de 3216void
b2f39494
EB
3217alpha_split_tmode_pair (rtx operands[4], enum machine_mode mode,
3218 bool fixup_overlap)
628d74de 3219{
56daab84 3220 switch (GET_CODE (operands[1]))
628d74de 3221 {
56daab84 3222 case REG:
628d74de
RH
3223 operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
3224 operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
56daab84
NS
3225 break;
3226
3227 case MEM:
f4ef873c
RK
3228 operands[3] = adjust_address (operands[1], DImode, 8);
3229 operands[2] = adjust_address (operands[1], DImode, 0);
56daab84
NS
3230 break;
3231
b2f39494 3232 case CONST_INT:
65ab381c 3233 case CONST_DOUBLE:
b2f39494 3234 gcc_assert (operands[1] == CONST0_RTX (mode));
56daab84
NS
3235 operands[2] = operands[3] = const0_rtx;
3236 break;
3237
3238 default:
3239 gcc_unreachable ();
628d74de 3240 }
628d74de 3241
56daab84 3242 switch (GET_CODE (operands[0]))
628d74de 3243 {
56daab84 3244 case REG:
628d74de
RH
3245 operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
3246 operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
56daab84
NS
3247 break;
3248
3249 case MEM:
f4ef873c
RK
3250 operands[1] = adjust_address (operands[0], DImode, 8);
3251 operands[0] = adjust_address (operands[0], DImode, 0);
56daab84
NS
3252 break;
3253
3254 default:
3255 gcc_unreachable ();
628d74de 3256 }
b2f39494
EB
3257
3258 if (fixup_overlap && reg_overlap_mentioned_p (operands[0], operands[3]))
3259 {
3260 rtx tmp;
3261 tmp = operands[0], operands[0] = operands[1], operands[1] = tmp;
3262 tmp = operands[2], operands[2] = operands[3], operands[3] = tmp;
3263 }
628d74de 3264}
f940c352 3265
f676971a
EC
3266/* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
3267 op2 is a register containing the sign bit, operation is the
f940c352
RH
3268 logical operation to be performed. */
3269
3270void
a5c24926 3271alpha_split_tfmode_frobsign (rtx operands[3], rtx (*operation) (rtx, rtx, rtx))
f940c352
RH
3272{
3273 rtx high_bit = operands[2];
3274 rtx scratch;
3275 int move;
3276
b2f39494 3277 alpha_split_tmode_pair (operands, TFmode, false);
f940c352 3278
825dda42 3279 /* Detect three flavors of operand overlap. */
f940c352
RH
3280 move = 1;
3281 if (rtx_equal_p (operands[0], operands[2]))
3282 move = 0;
3283 else if (rtx_equal_p (operands[1], operands[2]))
3284 {
3285 if (rtx_equal_p (operands[0], high_bit))
3286 move = 2;
3287 else
3288 move = -1;
3289 }
3290
3291 if (move < 0)
3292 emit_move_insn (operands[0], operands[2]);
3293
3294 /* ??? If the destination overlaps both source tf and high_bit, then
3295 assume source tf is dead in its entirety and use the other half
3296 for a scratch register. Otherwise "scratch" is just the proper
3297 destination register. */
3298 scratch = operands[move < 2 ? 1 : 3];
3299
3300 emit_insn ((*operation) (scratch, high_bit, operands[3]));
3301
3302 if (move > 0)
3303 {
3304 emit_move_insn (operands[0], operands[2]);
3305 if (move > 1)
3306 emit_move_insn (operands[1], scratch);
3307 }
3308}
5495cc55 3309\f
6c174fc0
RH
3310/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
3311 unaligned data:
3312
3313 unsigned: signed:
3314 word: ldq_u r1,X(r11) ldq_u r1,X(r11)
3315 ldq_u r2,X+1(r11) ldq_u r2,X+1(r11)
3316 lda r3,X(r11) lda r3,X+2(r11)
3317 extwl r1,r3,r1 extql r1,r3,r1
3318 extwh r2,r3,r2 extqh r2,r3,r2
3319 or r1.r2.r1 or r1,r2,r1
3320 sra r1,48,r1
3321
3322 long: ldq_u r1,X(r11) ldq_u r1,X(r11)
3323 ldq_u r2,X+3(r11) ldq_u r2,X+3(r11)
3324 lda r3,X(r11) lda r3,X(r11)
3325 extll r1,r3,r1 extll r1,r3,r1
3326 extlh r2,r3,r2 extlh r2,r3,r2
3327 or r1.r2.r1 addl r1,r2,r1
3328
3329 quad: ldq_u r1,X(r11)
3330 ldq_u r2,X+7(r11)
3331 lda r3,X(r11)
3332 extql r1,r3,r1
3333 extqh r2,r3,r2
3334 or r1.r2.r1
3335*/
3336
3337void
a5c24926
RH
3338alpha_expand_unaligned_load (rtx tgt, rtx mem, HOST_WIDE_INT size,
3339 HOST_WIDE_INT ofs, int sign)
6c174fc0 3340{
1eb356b9 3341 rtx meml, memh, addr, extl, exth, tmp, mema;
4208b40f 3342 enum machine_mode mode;
6c174fc0 3343
9f7d06d6
RH
3344 if (TARGET_BWX && size == 2)
3345 {
34642493
RH
3346 meml = adjust_address (mem, QImode, ofs);
3347 memh = adjust_address (mem, QImode, ofs+1);
9f7d06d6
RH
3348 if (BYTES_BIG_ENDIAN)
3349 tmp = meml, meml = memh, memh = tmp;
3350 extl = gen_reg_rtx (DImode);
3351 exth = gen_reg_rtx (DImode);
3352 emit_insn (gen_zero_extendqidi2 (extl, meml));
3353 emit_insn (gen_zero_extendqidi2 (exth, memh));
3354 exth = expand_simple_binop (DImode, ASHIFT, exth, GEN_INT (8),
3355 NULL, 1, OPTAB_LIB_WIDEN);
3356 addr = expand_simple_binop (DImode, IOR, extl, exth,
3357 NULL, 1, OPTAB_LIB_WIDEN);
3358
3359 if (sign && GET_MODE (tgt) != HImode)
3360 {
3361 addr = gen_lowpart (HImode, addr);
3362 emit_insn (gen_extend_insn (tgt, addr, GET_MODE (tgt), HImode, 0));
3363 }
3364 else
3365 {
3366 if (GET_MODE (tgt) != DImode)
3367 addr = gen_lowpart (GET_MODE (tgt), addr);
3368 emit_move_insn (tgt, addr);
3369 }
3370 return;
3371 }
3372
6c174fc0
RH
3373 meml = gen_reg_rtx (DImode);
3374 memh = gen_reg_rtx (DImode);
3375 addr = gen_reg_rtx (DImode);
3376 extl = gen_reg_rtx (DImode);
3377 exth = gen_reg_rtx (DImode);
3378
1eb356b9
RH
3379 mema = XEXP (mem, 0);
3380 if (GET_CODE (mema) == LO_SUM)
3381 mema = force_reg (Pmode, mema);
3382
e01acbb1 3383 /* AND addresses cannot be in any alias set, since they may implicitly
f676971a 3384 alias surrounding code. Ideally we'd have some alias set that
e01acbb1
RH
3385 covered all types except those with alignment 8 or higher. */
3386
3387 tmp = change_address (mem, DImode,
f676971a 3388 gen_rtx_AND (DImode,
1eb356b9 3389 plus_constant (mema, ofs),
e01acbb1 3390 GEN_INT (-8)));
ba4828e0 3391 set_mem_alias_set (tmp, 0);
e01acbb1
RH
3392 emit_move_insn (meml, tmp);
3393
3394 tmp = change_address (mem, DImode,
f676971a 3395 gen_rtx_AND (DImode,
1eb356b9 3396 plus_constant (mema, ofs + size - 1),
e01acbb1 3397 GEN_INT (-8)));
ba4828e0 3398 set_mem_alias_set (tmp, 0);
e01acbb1 3399 emit_move_insn (memh, tmp);
6c174fc0 3400
30102605
RH
3401 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
3402 {
3403 emit_move_insn (addr, plus_constant (mema, -1));
3404
3405 emit_insn (gen_extqh_be (extl, meml, addr));
3406 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
3407
3408 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
3409 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
3410 addr, 1, OPTAB_WIDEN);
3411 }
3412 else if (sign && size == 2)
6c174fc0 3413 {
1eb356b9 3414 emit_move_insn (addr, plus_constant (mema, ofs+2));
6c174fc0 3415
30102605
RH
3416 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
3417 emit_insn (gen_extqh_le (exth, memh, addr));
6c174fc0 3418
1a7cb241
JW
3419 /* We must use tgt here for the target. Alpha-vms port fails if we use
3420 addr for the target, because addr is marked as a pointer and combine
a50aa827 3421 knows that pointers are always sign-extended 32-bit values. */
1a7cb241 3422 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
f676971a 3423 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
4208b40f 3424 addr, 1, OPTAB_WIDEN);
6c174fc0 3425 }
4208b40f 3426 else
6c174fc0 3427 {
30102605 3428 if (WORDS_BIG_ENDIAN)
4208b40f 3429 {
30102605
RH
3430 emit_move_insn (addr, plus_constant (mema, ofs+size-1));
3431 switch ((int) size)
3432 {
3433 case 2:
3434 emit_insn (gen_extwh_be (extl, meml, addr));
3435 mode = HImode;
3436 break;
6c174fc0 3437
30102605
RH
3438 case 4:
3439 emit_insn (gen_extlh_be (extl, meml, addr));
3440 mode = SImode;
3441 break;
6c174fc0 3442
30102605
RH
3443 case 8:
3444 emit_insn (gen_extqh_be (extl, meml, addr));
3445 mode = DImode;
3446 break;
5495cc55 3447
30102605 3448 default:
56daab84 3449 gcc_unreachable ();
30102605
RH
3450 }
3451 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
3452 }
3453 else
3454 {
3455 emit_move_insn (addr, plus_constant (mema, ofs));
3456 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
3457 switch ((int) size)
3458 {
3459 case 2:
3460 emit_insn (gen_extwh_le (exth, memh, addr));
3461 mode = HImode;
3462 break;
3463
3464 case 4:
3465 emit_insn (gen_extlh_le (exth, memh, addr));
3466 mode = SImode;
3467 break;
3468
3469 case 8:
3470 emit_insn (gen_extqh_le (exth, memh, addr));
3471 mode = DImode;
3472 break;
3473
3474 default:
56daab84 3475 gcc_unreachable ();
30102605 3476 }
4208b40f
RH
3477 }
3478
3479 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
3480 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
3481 sign, OPTAB_WIDEN);
6c174fc0
RH
3482 }
3483
4208b40f 3484 if (addr != tgt)
9f7d06d6 3485 emit_move_insn (tgt, gen_lowpart (GET_MODE (tgt), addr));
6c174fc0
RH
3486}
3487
3488/* Similarly, use ins and msk instructions to perform unaligned stores. */
3489
3490void
a5c24926
RH
3491alpha_expand_unaligned_store (rtx dst, rtx src,
3492 HOST_WIDE_INT size, HOST_WIDE_INT ofs)
6c174fc0 3493{
1eb356b9 3494 rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
f676971a 3495
9f7d06d6
RH
3496 if (TARGET_BWX && size == 2)
3497 {
3498 if (src != const0_rtx)
3499 {
3500 dstl = gen_lowpart (QImode, src);
3501 dsth = expand_simple_binop (DImode, LSHIFTRT, src, GEN_INT (8),
3502 NULL, 1, OPTAB_LIB_WIDEN);
3503 dsth = gen_lowpart (QImode, dsth);
3504 }
3505 else
3506 dstl = dsth = const0_rtx;
3507
34642493
RH
3508 meml = adjust_address (dst, QImode, ofs);
3509 memh = adjust_address (dst, QImode, ofs+1);
9f7d06d6
RH
3510 if (BYTES_BIG_ENDIAN)
3511 addr = meml, meml = memh, memh = addr;
3512
3513 emit_move_insn (meml, dstl);
3514 emit_move_insn (memh, dsth);
3515 return;
3516 }
3517
6c174fc0
RH
3518 dstl = gen_reg_rtx (DImode);
3519 dsth = gen_reg_rtx (DImode);
3520 insl = gen_reg_rtx (DImode);
3521 insh = gen_reg_rtx (DImode);
3522
1eb356b9
RH
3523 dsta = XEXP (dst, 0);
3524 if (GET_CODE (dsta) == LO_SUM)
3525 dsta = force_reg (Pmode, dsta);
3526
e01acbb1 3527 /* AND addresses cannot be in any alias set, since they may implicitly
f676971a 3528 alias surrounding code. Ideally we'd have some alias set that
e01acbb1
RH
3529 covered all types except those with alignment 8 or higher. */
3530
6c174fc0 3531 meml = change_address (dst, DImode,
f676971a 3532 gen_rtx_AND (DImode,
1eb356b9 3533 plus_constant (dsta, ofs),
38a448ca 3534 GEN_INT (-8)));
ba4828e0 3535 set_mem_alias_set (meml, 0);
e01acbb1 3536
6c174fc0 3537 memh = change_address (dst, DImode,
f676971a 3538 gen_rtx_AND (DImode,
1eb356b9 3539 plus_constant (dsta, ofs + size - 1),
38a448ca 3540 GEN_INT (-8)));
ba4828e0 3541 set_mem_alias_set (memh, 0);
6c174fc0
RH
3542
3543 emit_move_insn (dsth, memh);
3544 emit_move_insn (dstl, meml);
30102605 3545 if (WORDS_BIG_ENDIAN)
6c174fc0 3546 {
30102605
RH
3547 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
3548
3549 if (src != const0_rtx)
3550 {
3551 switch ((int) size)
3552 {
3553 case 2:
3554 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
3555 break;
3556 case 4:
3557 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
3558 break;
3559 case 8:
3560 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
3561 break;
3562 }
3563 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
3564 GEN_INT (size*8), addr));
3565 }
6c174fc0 3566
c8d8ed65 3567 switch ((int) size)
6c174fc0
RH
3568 {
3569 case 2:
30102605 3570 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
6c174fc0
RH
3571 break;
3572 case 4:
30102605 3573 {
c4b50f1a 3574 rtx msk = immed_double_const (0xffffffff, 0, DImode);
30102605 3575 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
c4b50f1a 3576 break;
30102605 3577 }
c4b50f1a
RH
3578 case 8:
3579 emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr));
6c174fc0
RH
3580 break;
3581 }
30102605
RH
3582
3583 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
6c174fc0 3584 }
30102605
RH
3585 else
3586 {
3587 addr = copy_addr_to_reg (plus_constant (dsta, ofs));
6c174fc0 3588
e2ea71ea 3589 if (src != CONST0_RTX (GET_MODE (src)))
30102605
RH
3590 {
3591 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
3592 GEN_INT (size*8), addr));
6c174fc0 3593
30102605
RH
3594 switch ((int) size)
3595 {
3596 case 2:
3597 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
3598 break;
3599 case 4:
3600 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
3601 break;
3602 case 8:
bc4eac6d 3603 emit_insn (gen_insql_le (insl, gen_lowpart (DImode, src), addr));
30102605
RH
3604 break;
3605 }
3606 }
3607
3608 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
3609
3610 switch ((int) size)
3611 {
3612 case 2:
3613 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
3614 break;
3615 case 4:
30102605 3616 {
c4b50f1a 3617 rtx msk = immed_double_const (0xffffffff, 0, DImode);
30102605 3618 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
c4b50f1a 3619 break;
30102605 3620 }
c4b50f1a
RH
3621 case 8:
3622 emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
30102605
RH
3623 break;
3624 }
6c174fc0
RH
3625 }
3626
e2ea71ea 3627 if (src != CONST0_RTX (GET_MODE (src)))
6c174fc0 3628 {
4208b40f
RH
3629 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
3630 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
6c174fc0 3631 }
f676971a 3632
30102605
RH
3633 if (WORDS_BIG_ENDIAN)
3634 {
3635 emit_move_insn (meml, dstl);
3636 emit_move_insn (memh, dsth);
3637 }
3638 else
3639 {
3640 /* Must store high before low for degenerate case of aligned. */
3641 emit_move_insn (memh, dsth);
3642 emit_move_insn (meml, dstl);
3643 }
6c174fc0
RH
3644}
3645
4208b40f
RH
3646/* The block move code tries to maximize speed by separating loads and
3647 stores at the expense of register pressure: we load all of the data
3648 before we store it back out. There are two secondary effects worth
3649 mentioning, that this speeds copying to/from aligned and unaligned
3650 buffers, and that it makes the code significantly easier to write. */
6c174fc0 3651
4208b40f
RH
3652#define MAX_MOVE_WORDS 8
3653
3654/* Load an integral number of consecutive unaligned quadwords. */
6c174fc0
RH
3655
3656static void
a5c24926
RH
3657alpha_expand_unaligned_load_words (rtx *out_regs, rtx smem,
3658 HOST_WIDE_INT words, HOST_WIDE_INT ofs)
6c174fc0
RH
3659{
3660 rtx const im8 = GEN_INT (-8);
3661 rtx const i64 = GEN_INT (64);
4208b40f 3662 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
1eb356b9 3663 rtx sreg, areg, tmp, smema;
6c174fc0
RH
3664 HOST_WIDE_INT i;
3665
1eb356b9
RH
3666 smema = XEXP (smem, 0);
3667 if (GET_CODE (smema) == LO_SUM)
3668 smema = force_reg (Pmode, smema);
3669
6c174fc0
RH
3670 /* Generate all the tmp registers we need. */
3671 for (i = 0; i < words; ++i)
4208b40f
RH
3672 {
3673 data_regs[i] = out_regs[i];
3674 ext_tmps[i] = gen_reg_rtx (DImode);
3675 }
3676 data_regs[words] = gen_reg_rtx (DImode);
3677
3678 if (ofs != 0)
f4ef873c 3679 smem = adjust_address (smem, GET_MODE (smem), ofs);
f676971a 3680
6c174fc0
RH
3681 /* Load up all of the source data. */
3682 for (i = 0; i < words; ++i)
3683 {
e01acbb1
RH
3684 tmp = change_address (smem, DImode,
3685 gen_rtx_AND (DImode,
1eb356b9 3686 plus_constant (smema, 8*i),
e01acbb1 3687 im8));
ba4828e0 3688 set_mem_alias_set (tmp, 0);
e01acbb1 3689 emit_move_insn (data_regs[i], tmp);
6c174fc0 3690 }
e01acbb1
RH
3691
3692 tmp = change_address (smem, DImode,
3693 gen_rtx_AND (DImode,
1eb356b9 3694 plus_constant (smema, 8*words - 1),
e01acbb1 3695 im8));
ba4828e0 3696 set_mem_alias_set (tmp, 0);
e01acbb1 3697 emit_move_insn (data_regs[words], tmp);
6c174fc0
RH
3698
3699 /* Extract the half-word fragments. Unfortunately DEC decided to make
f676971a 3700 extxh with offset zero a noop instead of zeroing the register, so
6c174fc0
RH
3701 we must take care of that edge condition ourselves with cmov. */
3702
1eb356b9 3703 sreg = copy_addr_to_reg (smema);
f676971a 3704 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
4208b40f 3705 1, OPTAB_WIDEN);
30102605
RH
3706 if (WORDS_BIG_ENDIAN)
3707 emit_move_insn (sreg, plus_constant (sreg, 7));
6c174fc0
RH
3708 for (i = 0; i < words; ++i)
3709 {
30102605
RH
3710 if (WORDS_BIG_ENDIAN)
3711 {
3712 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
3713 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
3714 }
3715 else
3716 {
3717 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
3718 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
3719 }
38a448ca
RH
3720 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
3721 gen_rtx_IF_THEN_ELSE (DImode,
4208b40f
RH
3722 gen_rtx_EQ (DImode, areg,
3723 const0_rtx),
38a448ca 3724 const0_rtx, ext_tmps[i])));
6c174fc0
RH
3725 }
3726
3727 /* Merge the half-words into whole words. */
3728 for (i = 0; i < words; ++i)
3729 {
4208b40f
RH
3730 out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
3731 ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
6c174fc0
RH
3732 }
3733}
3734
3735/* Store an integral number of consecutive unaligned quadwords. DATA_REGS
3736 may be NULL to store zeros. */
3737
3738static void
a5c24926
RH
3739alpha_expand_unaligned_store_words (rtx *data_regs, rtx dmem,
3740 HOST_WIDE_INT words, HOST_WIDE_INT ofs)
6c174fc0
RH
3741{
3742 rtx const im8 = GEN_INT (-8);
3743 rtx const i64 = GEN_INT (64);
6c174fc0 3744 rtx ins_tmps[MAX_MOVE_WORDS];
4208b40f 3745 rtx st_tmp_1, st_tmp_2, dreg;
1eb356b9 3746 rtx st_addr_1, st_addr_2, dmema;
6c174fc0
RH
3747 HOST_WIDE_INT i;
3748
1eb356b9
RH
3749 dmema = XEXP (dmem, 0);
3750 if (GET_CODE (dmema) == LO_SUM)
3751 dmema = force_reg (Pmode, dmema);
3752
6c174fc0
RH
3753 /* Generate all the tmp registers we need. */
3754 if (data_regs != NULL)
3755 for (i = 0; i < words; ++i)
3756 ins_tmps[i] = gen_reg_rtx(DImode);
3757 st_tmp_1 = gen_reg_rtx(DImode);
3758 st_tmp_2 = gen_reg_rtx(DImode);
f676971a 3759
4208b40f 3760 if (ofs != 0)
f4ef873c 3761 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
4208b40f
RH
3762
3763 st_addr_2 = change_address (dmem, DImode,
38a448ca 3764 gen_rtx_AND (DImode,
1eb356b9 3765 plus_constant (dmema, words*8 - 1),
6c174fc0 3766 im8));
ba4828e0 3767 set_mem_alias_set (st_addr_2, 0);
e01acbb1 3768
4208b40f 3769 st_addr_1 = change_address (dmem, DImode,
1eb356b9 3770 gen_rtx_AND (DImode, dmema, im8));
ba4828e0 3771 set_mem_alias_set (st_addr_1, 0);
6c174fc0
RH
3772
3773 /* Load up the destination end bits. */
3774 emit_move_insn (st_tmp_2, st_addr_2);
3775 emit_move_insn (st_tmp_1, st_addr_1);
3776
3777 /* Shift the input data into place. */
1eb356b9 3778 dreg = copy_addr_to_reg (dmema);
30102605
RH
3779 if (WORDS_BIG_ENDIAN)
3780 emit_move_insn (dreg, plus_constant (dreg, 7));
6c174fc0
RH
3781 if (data_regs != NULL)
3782 {
3783 for (i = words-1; i >= 0; --i)
3784 {
30102605
RH
3785 if (WORDS_BIG_ENDIAN)
3786 {
3787 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
3788 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
3789 }
3790 else
3791 {
3792 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
3793 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
3794 }
6c174fc0 3795 }
6c174fc0
RH
3796 for (i = words-1; i > 0; --i)
3797 {
4208b40f
RH
3798 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
3799 ins_tmps[i-1], ins_tmps[i-1], 1,
3800 OPTAB_WIDEN);
6c174fc0
RH
3801 }
3802 }
3803
3804 /* Split and merge the ends with the destination data. */
30102605
RH
3805 if (WORDS_BIG_ENDIAN)
3806 {
c4b50f1a 3807 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
30102605
RH
3808 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
3809 }
3810 else
3811 {
3812 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
c4b50f1a 3813 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
30102605 3814 }
6c174fc0
RH
3815
3816 if (data_regs != NULL)
3817 {
4208b40f
RH
3818 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
3819 st_tmp_2, 1, OPTAB_WIDEN);
3820 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
3821 st_tmp_1, 1, OPTAB_WIDEN);
6c174fc0
RH
3822 }
3823
3824 /* Store it all. */
30102605
RH
3825 if (WORDS_BIG_ENDIAN)
3826 emit_move_insn (st_addr_1, st_tmp_1);
3827 else
3828 emit_move_insn (st_addr_2, st_tmp_2);
6c174fc0
RH
3829 for (i = words-1; i > 0; --i)
3830 {
e01acbb1
RH
3831 rtx tmp = change_address (dmem, DImode,
3832 gen_rtx_AND (DImode,
30102605
RH
3833 plus_constant(dmema,
3834 WORDS_BIG_ENDIAN ? i*8-1 : i*8),
e01acbb1 3835 im8));
ba4828e0 3836 set_mem_alias_set (tmp, 0);
e01acbb1 3837 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
6c174fc0 3838 }
30102605
RH
3839 if (WORDS_BIG_ENDIAN)
3840 emit_move_insn (st_addr_2, st_tmp_2);
3841 else
3842 emit_move_insn (st_addr_1, st_tmp_1);
6c174fc0
RH
3843}
3844
3845
3846/* Expand string/block move operations.
3847
3848 operands[0] is the pointer to the destination.
3849 operands[1] is the pointer to the source.
3850 operands[2] is the number of bytes to move.
3851 operands[3] is the alignment. */
3852
3853int
a5c24926 3854alpha_expand_block_move (rtx operands[])
6c174fc0
RH
3855{
3856 rtx bytes_rtx = operands[2];
3857 rtx align_rtx = operands[3];
f35cba21 3858 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
3859 HOST_WIDE_INT bytes = orig_bytes;
3860 HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
3861 HOST_WIDE_INT dst_align = src_align;
bdb429a5
RK
3862 rtx orig_src = operands[1];
3863 rtx orig_dst = operands[0];
3864 rtx data_regs[2 * MAX_MOVE_WORDS + 16];
4208b40f 3865 rtx tmp;
1eb356b9 3866 unsigned int i, words, ofs, nregs = 0;
f676971a 3867
bdb429a5 3868 if (orig_bytes <= 0)
6c174fc0 3869 return 1;
c17f08e1 3870 else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
3871 return 0;
3872
4208b40f
RH
3873 /* Look for additional alignment information from recorded register info. */
3874
3875 tmp = XEXP (orig_src, 0);
7d83f4f5 3876 if (REG_P (tmp))
bdb429a5 3877 src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f 3878 else if (GET_CODE (tmp) == PLUS
7d83f4f5
UB
3879 && REG_P (XEXP (tmp, 0))
3880 && CONST_INT_P (XEXP (tmp, 1)))
4208b40f 3881 {
bdb429a5
RK
3882 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
3883 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
3884
3885 if (a > src_align)
3886 {
bdb429a5
RK
3887 if (a >= 64 && c % 8 == 0)
3888 src_align = 64;
3889 else if (a >= 32 && c % 4 == 0)
3890 src_align = 32;
3891 else if (a >= 16 && c % 2 == 0)
3892 src_align = 16;
4208b40f
RH
3893 }
3894 }
f676971a 3895
4208b40f 3896 tmp = XEXP (orig_dst, 0);
7d83f4f5 3897 if (REG_P (tmp))
bdb429a5 3898 dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f 3899 else if (GET_CODE (tmp) == PLUS
7d83f4f5
UB
3900 && REG_P (XEXP (tmp, 0))
3901 && CONST_INT_P (XEXP (tmp, 1)))
4208b40f 3902 {
bdb429a5
RK
3903 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
3904 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
3905
3906 if (a > dst_align)
3907 {
bdb429a5
RK
3908 if (a >= 64 && c % 8 == 0)
3909 dst_align = 64;
3910 else if (a >= 32 && c % 4 == 0)
3911 dst_align = 32;
3912 else if (a >= 16 && c % 2 == 0)
3913 dst_align = 16;
4208b40f
RH
3914 }
3915 }
3916
4208b40f 3917 ofs = 0;
bdb429a5 3918 if (src_align >= 64 && bytes >= 8)
6c174fc0
RH
3919 {
3920 words = bytes / 8;
3921
6c174fc0 3922 for (i = 0; i < words; ++i)
5197bd50 3923 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 3924
6c174fc0 3925 for (i = 0; i < words; ++i)
bdb429a5 3926 emit_move_insn (data_regs[nregs + i],
f4ef873c 3927 adjust_address (orig_src, DImode, ofs + i * 8));
6c174fc0 3928
4208b40f 3929 nregs += words;
6c174fc0 3930 bytes -= words * 8;
cd36edbd 3931 ofs += words * 8;
6c174fc0 3932 }
bdb429a5
RK
3933
3934 if (src_align >= 32 && bytes >= 4)
6c174fc0
RH
3935 {
3936 words = bytes / 4;
3937
6c174fc0 3938 for (i = 0; i < words; ++i)
5197bd50 3939 data_regs[nregs + i] = gen_reg_rtx (SImode);
6c174fc0 3940
6c174fc0 3941 for (i = 0; i < words; ++i)
bdb429a5 3942 emit_move_insn (data_regs[nregs + i],
792760b9 3943 adjust_address (orig_src, SImode, ofs + i * 4));
6c174fc0 3944
4208b40f 3945 nregs += words;
6c174fc0 3946 bytes -= words * 4;
cd36edbd 3947 ofs += words * 4;
6c174fc0 3948 }
bdb429a5 3949
c17f08e1 3950 if (bytes >= 8)
6c174fc0
RH
3951 {
3952 words = bytes / 8;
3953
6c174fc0 3954 for (i = 0; i < words+1; ++i)
5197bd50 3955 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 3956
c576fce7
RH
3957 alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
3958 words, ofs);
6c174fc0 3959
4208b40f 3960 nregs += words;
6c174fc0 3961 bytes -= words * 8;
cd36edbd 3962 ofs += words * 8;
6c174fc0 3963 }
bdb429a5 3964
bdb429a5 3965 if (! TARGET_BWX && bytes >= 4)
6c174fc0 3966 {
4208b40f 3967 data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
6c174fc0 3968 alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
6c174fc0
RH
3969 bytes -= 4;
3970 ofs += 4;
3971 }
bdb429a5 3972
6c174fc0
RH
3973 if (bytes >= 2)
3974 {
bdb429a5 3975 if (src_align >= 16)
6c174fc0
RH
3976 {
3977 do {
4208b40f 3978 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
f4ef873c 3979 emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
6c174fc0
RH
3980 bytes -= 2;
3981 ofs += 2;
3982 } while (bytes >= 2);
3983 }
bdb429a5 3984 else if (! TARGET_BWX)
6c174fc0 3985 {
4208b40f 3986 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
6c174fc0 3987 alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
6c174fc0
RH
3988 bytes -= 2;
3989 ofs += 2;
3990 }
3991 }
bdb429a5 3992
6c174fc0
RH
3993 while (bytes > 0)
3994 {
4208b40f 3995 data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
f4ef873c 3996 emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
6c174fc0
RH
3997 bytes -= 1;
3998 ofs += 1;
3999 }
bdb429a5 4000
56daab84 4001 gcc_assert (nregs <= ARRAY_SIZE (data_regs));
4208b40f 4002
bdb429a5 4003 /* Now save it back out again. */
4208b40f
RH
4004
4005 i = 0, ofs = 0;
4006
4208b40f 4007 /* Write out the data in whatever chunks reading the source allowed. */
bdb429a5 4008 if (dst_align >= 64)
4208b40f
RH
4009 {
4010 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4011 {
f4ef873c 4012 emit_move_insn (adjust_address (orig_dst, DImode, ofs),
4208b40f
RH
4013 data_regs[i]);
4014 ofs += 8;
4015 i++;
4016 }
4017 }
bdb429a5
RK
4018
4019 if (dst_align >= 32)
4208b40f
RH
4020 {
4021 /* If the source has remaining DImode regs, write them out in
4022 two pieces. */
4023 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4024 {
4025 tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4026 NULL_RTX, 1, OPTAB_WIDEN);
4027
f4ef873c 4028 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f 4029 gen_lowpart (SImode, data_regs[i]));
f4ef873c 4030 emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
4208b40f
RH
4031 gen_lowpart (SImode, tmp));
4032 ofs += 8;
4033 i++;
4034 }
4035
4036 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4037 {
f4ef873c 4038 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f
RH
4039 data_regs[i]);
4040 ofs += 4;
4041 i++;
4042 }
4043 }
bdb429a5 4044
4208b40f
RH
4045 if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4046 {
4047 /* Write out a remaining block of words using unaligned methods. */
4048
bdb429a5
RK
4049 for (words = 1; i + words < nregs; words++)
4050 if (GET_MODE (data_regs[i + words]) != DImode)
4208b40f
RH
4051 break;
4052
4053 if (words == 1)
4054 alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4055 else
bdb429a5
RK
4056 alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4057 words, ofs);
f676971a 4058
4208b40f
RH
4059 i += words;
4060 ofs += words * 8;
4061 }
4062
4063 /* Due to the above, this won't be aligned. */
4064 /* ??? If we have more than one of these, consider constructing full
4065 words in registers and using alpha_expand_unaligned_store_words. */
4066 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4067 {
4068 alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4069 ofs += 4;
4070 i++;
4071 }
4072
bdb429a5 4073 if (dst_align >= 16)
4208b40f
RH
4074 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4075 {
f4ef873c 4076 emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
4208b40f
RH
4077 i++;
4078 ofs += 2;
4079 }
4080 else
4081 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4082 {
4083 alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4084 i++;
4085 ofs += 2;
4086 }
bdb429a5 4087
56daab84
NS
4088 /* The remainder must be byte copies. */
4089 while (i < nregs)
4208b40f 4090 {
56daab84 4091 gcc_assert (GET_MODE (data_regs[i]) == QImode);
f4ef873c 4092 emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
4208b40f
RH
4093 i++;
4094 ofs += 1;
4095 }
bdb429a5 4096
6c174fc0
RH
4097 return 1;
4098}
4099
4100int
a5c24926 4101alpha_expand_block_clear (rtx operands[])
6c174fc0
RH
4102{
4103 rtx bytes_rtx = operands[1];
57e84f18 4104 rtx align_rtx = operands[3];
bdb429a5 4105 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
4106 HOST_WIDE_INT bytes = orig_bytes;
4107 HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4108 HOST_WIDE_INT alignofs = 0;
bdb429a5 4109 rtx orig_dst = operands[0];
4208b40f 4110 rtx tmp;
c17f08e1 4111 int i, words, ofs = 0;
f676971a 4112
bdb429a5 4113 if (orig_bytes <= 0)
6c174fc0 4114 return 1;
c17f08e1 4115 if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
4116 return 0;
4117
4208b40f 4118 /* Look for stricter alignment. */
4208b40f 4119 tmp = XEXP (orig_dst, 0);
7d83f4f5 4120 if (REG_P (tmp))
bdb429a5 4121 align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f 4122 else if (GET_CODE (tmp) == PLUS
7d83f4f5
UB
4123 && REG_P (XEXP (tmp, 0))
4124 && CONST_INT_P (XEXP (tmp, 1)))
4208b40f 4125 {
c17f08e1
RH
4126 HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4127 int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4128
4129 if (a > align)
4130 {
c17f08e1
RH
4131 if (a >= 64)
4132 align = a, alignofs = 8 - c % 8;
4133 else if (a >= 32)
4134 align = a, alignofs = 4 - c % 4;
4135 else if (a >= 16)
4136 align = a, alignofs = 2 - c % 2;
4208b40f
RH
4137 }
4138 }
4139
c17f08e1
RH
4140 /* Handle an unaligned prefix first. */
4141
4142 if (alignofs > 0)
4143 {
4144#if HOST_BITS_PER_WIDE_INT >= 64
4145 /* Given that alignofs is bounded by align, the only time BWX could
4146 generate three stores is for a 7 byte fill. Prefer two individual
4147 stores over a load/mask/store sequence. */
4148 if ((!TARGET_BWX || alignofs == 7)
4149 && align >= 32
4150 && !(alignofs == 4 && bytes >= 4))
4151 {
4152 enum machine_mode mode = (align >= 64 ? DImode : SImode);
4153 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
4154 rtx mem, tmp;
4155 HOST_WIDE_INT mask;
4156
f4ef873c 4157 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
ba4828e0 4158 set_mem_alias_set (mem, 0);
c17f08e1
RH
4159
4160 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
4161 if (bytes < alignofs)
4162 {
4163 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
4164 ofs += bytes;
4165 bytes = 0;
4166 }
4167 else
4168 {
4169 bytes -= alignofs;
4170 ofs += alignofs;
4171 }
4172 alignofs = 0;
4173
4174 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
4175 NULL_RTX, 1, OPTAB_WIDEN);
4176
4177 emit_move_insn (mem, tmp);
4178 }
4179#endif
4180
4181 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
4182 {
f4ef873c 4183 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
c17f08e1
RH
4184 bytes -= 1;
4185 ofs += 1;
4186 alignofs -= 1;
4187 }
4188 if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
4189 {
f4ef873c 4190 emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
c17f08e1
RH
4191 bytes -= 2;
4192 ofs += 2;
4193 alignofs -= 2;
4194 }
4195 if (alignofs == 4 && bytes >= 4)
4196 {
f4ef873c 4197 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4198 bytes -= 4;
4199 ofs += 4;
4200 alignofs = 0;
4201 }
4202
4203 /* If we've not used the extra lead alignment information by now,
4204 we won't be able to. Downgrade align to match what's left over. */
4205 if (alignofs > 0)
4206 {
4207 alignofs = alignofs & -alignofs;
4208 align = MIN (align, alignofs * BITS_PER_UNIT);
4209 }
4210 }
4211
4212 /* Handle a block of contiguous long-words. */
6c174fc0 4213
bdb429a5 4214 if (align >= 64 && bytes >= 8)
6c174fc0
RH
4215 {
4216 words = bytes / 8;
4217
4218 for (i = 0; i < words; ++i)
1eb356b9 4219 emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
f4ef873c 4220 const0_rtx);
6c174fc0
RH
4221
4222 bytes -= words * 8;
cd36edbd 4223 ofs += words * 8;
6c174fc0 4224 }
bdb429a5 4225
c17f08e1
RH
4226 /* If the block is large and appropriately aligned, emit a single
4227 store followed by a sequence of stq_u insns. */
4228
4229 if (align >= 32 && bytes > 16)
4230 {
1eb356b9
RH
4231 rtx orig_dsta;
4232
f4ef873c 4233 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4234 bytes -= 4;
4235 ofs += 4;
4236
1eb356b9
RH
4237 orig_dsta = XEXP (orig_dst, 0);
4238 if (GET_CODE (orig_dsta) == LO_SUM)
4239 orig_dsta = force_reg (Pmode, orig_dsta);
4240
c17f08e1
RH
4241 words = bytes / 8;
4242 for (i = 0; i < words; ++i)
4243 {
ba4828e0
RK
4244 rtx mem
4245 = change_address (orig_dst, DImode,
4246 gen_rtx_AND (DImode,
1eb356b9 4247 plus_constant (orig_dsta, ofs + i*8),
ba4828e0
RK
4248 GEN_INT (-8)));
4249 set_mem_alias_set (mem, 0);
c17f08e1
RH
4250 emit_move_insn (mem, const0_rtx);
4251 }
4252
4253 /* Depending on the alignment, the first stq_u may have overlapped
4254 with the initial stl, which means that the last stq_u didn't
4255 write as much as it would appear. Leave those questionable bytes
4256 unaccounted for. */
4257 bytes -= words * 8 - 4;
4258 ofs += words * 8 - 4;
4259 }
4260
4261 /* Handle a smaller block of aligned words. */
4262
4263 if ((align >= 64 && bytes == 4)
4264 || (align == 32 && bytes >= 4))
6c174fc0
RH
4265 {
4266 words = bytes / 4;
4267
4268 for (i = 0; i < words; ++i)
f4ef873c 4269 emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
bdb429a5 4270 const0_rtx);
6c174fc0
RH
4271
4272 bytes -= words * 4;
cd36edbd 4273 ofs += words * 4;
6c174fc0 4274 }
bdb429a5 4275
c17f08e1
RH
4276 /* An unaligned block uses stq_u stores for as many as possible. */
4277
4278 if (bytes >= 8)
6c174fc0
RH
4279 {
4280 words = bytes / 8;
4281
cd36edbd 4282 alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
6c174fc0
RH
4283
4284 bytes -= words * 8;
cd36edbd 4285 ofs += words * 8;
6c174fc0
RH
4286 }
4287
c17f08e1 4288 /* Next clean up any trailing pieces. */
6c174fc0 4289
c17f08e1
RH
4290#if HOST_BITS_PER_WIDE_INT >= 64
4291 /* Count the number of bits in BYTES for which aligned stores could
4292 be emitted. */
4293 words = 0;
4294 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
4295 if (bytes & i)
4296 words += 1;
4297
4298 /* If we have appropriate alignment (and it wouldn't take too many
4299 instructions otherwise), mask out the bytes we need. */
4300 if (TARGET_BWX ? words > 2 : bytes > 0)
4301 {
4302 if (align >= 64)
4303 {
4304 rtx mem, tmp;
4305 HOST_WIDE_INT mask;
4306
f4ef873c 4307 mem = adjust_address (orig_dst, DImode, ofs);
ba4828e0 4308 set_mem_alias_set (mem, 0);
c17f08e1
RH
4309
4310 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4311
4312 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
4313 NULL_RTX, 1, OPTAB_WIDEN);
4314
4315 emit_move_insn (mem, tmp);
4316 return 1;
4317 }
4318 else if (align >= 32 && bytes < 4)
4319 {
4320 rtx mem, tmp;
4321 HOST_WIDE_INT mask;
4322
f4ef873c 4323 mem = adjust_address (orig_dst, SImode, ofs);
ba4828e0 4324 set_mem_alias_set (mem, 0);
c17f08e1
RH
4325
4326 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4327
4328 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
4329 NULL_RTX, 1, OPTAB_WIDEN);
4330
4331 emit_move_insn (mem, tmp);
4332 return 1;
4333 }
6c174fc0 4334 }
c17f08e1 4335#endif
bdb429a5 4336
6c174fc0
RH
4337 if (!TARGET_BWX && bytes >= 4)
4338 {
4339 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
4340 bytes -= 4;
4341 ofs += 4;
4342 }
bdb429a5 4343
6c174fc0
RH
4344 if (bytes >= 2)
4345 {
bdb429a5 4346 if (align >= 16)
6c174fc0
RH
4347 {
4348 do {
f4ef873c 4349 emit_move_insn (adjust_address (orig_dst, HImode, ofs),
6c174fc0
RH
4350 const0_rtx);
4351 bytes -= 2;
4352 ofs += 2;
4353 } while (bytes >= 2);
4354 }
bdb429a5 4355 else if (! TARGET_BWX)
6c174fc0
RH
4356 {
4357 alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
4358 bytes -= 2;
4359 ofs += 2;
4360 }
4361 }
bdb429a5 4362
6c174fc0
RH
4363 while (bytes > 0)
4364 {
f4ef873c 4365 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
6c174fc0
RH
4366 bytes -= 1;
4367 ofs += 1;
4368 }
4369
4370 return 1;
4371}
6d8fd7bb
RH
4372
4373/* Returns a mask so that zap(x, value) == x & mask. */
4374
4375rtx
a5c24926 4376alpha_expand_zap_mask (HOST_WIDE_INT value)
6d8fd7bb
RH
4377{
4378 rtx result;
4379 int i;
4380
4381 if (HOST_BITS_PER_WIDE_INT >= 64)
4382 {
4383 HOST_WIDE_INT mask = 0;
4384
4385 for (i = 7; i >= 0; --i)
4386 {
4387 mask <<= 8;
4388 if (!((value >> i) & 1))
4389 mask |= 0xff;
4390 }
4391
4392 result = gen_int_mode (mask, DImode);
4393 }
56daab84 4394 else
6d8fd7bb
RH
4395 {
4396 HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
4397
56daab84
NS
4398 gcc_assert (HOST_BITS_PER_WIDE_INT == 32);
4399
6d8fd7bb
RH
4400 for (i = 7; i >= 4; --i)
4401 {
4402 mask_hi <<= 8;
4403 if (!((value >> i) & 1))
4404 mask_hi |= 0xff;
4405 }
4406
4407 for (i = 3; i >= 0; --i)
4408 {
4409 mask_lo <<= 8;
4410 if (!((value >> i) & 1))
4411 mask_lo |= 0xff;
4412 }
4413
4414 result = immed_double_const (mask_lo, mask_hi, DImode);
4415 }
6d8fd7bb
RH
4416
4417 return result;
4418}
4419
4420void
a5c24926
RH
4421alpha_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
4422 enum machine_mode mode,
4423 rtx op0, rtx op1, rtx op2)
6d8fd7bb
RH
4424{
4425 op0 = gen_lowpart (mode, op0);
4426
4427 if (op1 == const0_rtx)
4428 op1 = CONST0_RTX (mode);
4429 else
4430 op1 = gen_lowpart (mode, op1);
c4b50f1a
RH
4431
4432 if (op2 == const0_rtx)
6d8fd7bb
RH
4433 op2 = CONST0_RTX (mode);
4434 else
4435 op2 = gen_lowpart (mode, op2);
4436
4437 emit_insn ((*gen) (op0, op1, op2));
4438}
0b196b18 4439
b686c48c
RH
4440/* A subroutine of the atomic operation splitters. Jump to LABEL if
4441 COND is true. Mark the jump as unlikely to be taken. */
4442
4443static void
4444emit_unlikely_jump (rtx cond, rtx label)
4445{
4446 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
4447 rtx x;
4448
4449 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
4450 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
bf758008 4451 add_reg_note (x, REG_BR_PROB, very_unlikely);
b686c48c
RH
4452}
4453
4454/* A subroutine of the atomic operation splitters. Emit a load-locked
4455 instruction in MODE. */
4456
4457static void
4458emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
4459{
4460 rtx (*fn) (rtx, rtx) = NULL;
4461 if (mode == SImode)
4462 fn = gen_load_locked_si;
4463 else if (mode == DImode)
4464 fn = gen_load_locked_di;
4465 emit_insn (fn (reg, mem));
4466}
4467
4468/* A subroutine of the atomic operation splitters. Emit a store-conditional
4469 instruction in MODE. */
4470
4471static void
4472emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
4473{
4474 rtx (*fn) (rtx, rtx, rtx) = NULL;
4475 if (mode == SImode)
4476 fn = gen_store_conditional_si;
4477 else if (mode == DImode)
4478 fn = gen_store_conditional_di;
4479 emit_insn (fn (res, mem, val));
4480}
4481
38f31687
RH
4482/* A subroutine of the atomic operation splitters. Emit an insxl
4483 instruction in MODE. */
4484
4485static rtx
4486emit_insxl (enum machine_mode mode, rtx op1, rtx op2)
4487{
4488 rtx ret = gen_reg_rtx (DImode);
4489 rtx (*fn) (rtx, rtx, rtx);
4490
4491 if (WORDS_BIG_ENDIAN)
4492 {
4493 if (mode == QImode)
4494 fn = gen_insbl_be;
4495 else
4496 fn = gen_inswl_be;
4497 }
4498 else
4499 {
4500 if (mode == QImode)
4501 fn = gen_insbl_le;
4502 else
4503 fn = gen_inswl_le;
4504 }
f2477b06
RS
4505 /* The insbl and inswl patterns require a register operand. */
4506 op1 = force_reg (mode, op1);
38f31687
RH
4507 emit_insn (fn (ret, op1, op2));
4508
4509 return ret;
4510}
4511
ea2c620c 4512/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
0b196b18
RH
4513 to perform. MEM is the memory on which to operate. VAL is the second
4514 operand of the binary operator. BEFORE and AFTER are optional locations to
4515 return the value of MEM either before of after the operation. SCRATCH is
4516 a scratch register. */
4517
4518void
4519alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
4520 rtx before, rtx after, rtx scratch)
4521{
4522 enum machine_mode mode = GET_MODE (mem);
b686c48c 4523 rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
0b196b18
RH
4524
4525 emit_insn (gen_memory_barrier ());
4526
4527 label = gen_label_rtx ();
4528 emit_label (label);
4529 label = gen_rtx_LABEL_REF (DImode, label);
4530
4531 if (before == NULL)
4532 before = scratch;
b686c48c 4533 emit_load_locked (mode, before, mem);
0b196b18
RH
4534
4535 if (code == NOT)
d04dceb5
UB
4536 {
4537 x = gen_rtx_AND (mode, before, val);
4538 emit_insn (gen_rtx_SET (VOIDmode, val, x));
4539
4540 x = gen_rtx_NOT (mode, val);
4541 }
0b196b18
RH
4542 else
4543 x = gen_rtx_fmt_ee (code, mode, before, val);
0b196b18
RH
4544 if (after)
4545 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
f12b785d 4546 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
0b196b18 4547
b686c48c
RH
4548 emit_store_conditional (mode, cond, mem, scratch);
4549
4550 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4551 emit_unlikely_jump (x, label);
4552
4553 emit_insn (gen_memory_barrier ());
4554}
4555
4556/* Expand a compare and swap operation. */
4557
4558void
4559alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
4560 rtx scratch)
4561{
4562 enum machine_mode mode = GET_MODE (mem);
4563 rtx label1, label2, x, cond = gen_lowpart (DImode, scratch);
4564
4565 emit_insn (gen_memory_barrier ());
4566
4567 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4568 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4569 emit_label (XEXP (label1, 0));
4570
4571 emit_load_locked (mode, retval, mem);
4572
4573 x = gen_lowpart (DImode, retval);
4574 if (oldval == const0_rtx)
4575 x = gen_rtx_NE (DImode, x, const0_rtx);
0b196b18 4576 else
b686c48c
RH
4577 {
4578 x = gen_rtx_EQ (DImode, x, oldval);
4579 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
4580 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4581 }
4582 emit_unlikely_jump (x, label2);
4583
4584 emit_move_insn (scratch, newval);
4585 emit_store_conditional (mode, cond, mem, scratch);
0b196b18
RH
4586
4587 x = gen_rtx_EQ (DImode, cond, const0_rtx);
b686c48c
RH
4588 emit_unlikely_jump (x, label1);
4589
4590 emit_insn (gen_memory_barrier ());
4591 emit_label (XEXP (label2, 0));
4592}
4593
38f31687
RH
4594void
4595alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval)
4596{
4597 enum machine_mode mode = GET_MODE (mem);
4598 rtx addr, align, wdst;
4599 rtx (*fn5) (rtx, rtx, rtx, rtx, rtx);
4600
4601 addr = force_reg (DImode, XEXP (mem, 0));
4602 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
4603 NULL_RTX, 1, OPTAB_DIRECT);
4604
4605 oldval = convert_modes (DImode, mode, oldval, 1);
4606 newval = emit_insxl (mode, newval, addr);
4607
4608 wdst = gen_reg_rtx (DImode);
4609 if (mode == QImode)
4610 fn5 = gen_sync_compare_and_swapqi_1;
4611 else
4612 fn5 = gen_sync_compare_and_swaphi_1;
4613 emit_insn (fn5 (wdst, addr, oldval, newval, align));
4614
4615 emit_move_insn (dst, gen_lowpart (mode, wdst));
4616}
4617
4618void
4619alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
4620 rtx oldval, rtx newval, rtx align,
4621 rtx scratch, rtx cond)
4622{
4623 rtx label1, label2, mem, width, mask, x;
4624
4625 mem = gen_rtx_MEM (DImode, align);
4626 MEM_VOLATILE_P (mem) = 1;
4627
4628 emit_insn (gen_memory_barrier ());
4629 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4630 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4631 emit_label (XEXP (label1, 0));
4632
4633 emit_load_locked (DImode, scratch, mem);
4634
4635 width = GEN_INT (GET_MODE_BITSIZE (mode));
4636 mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
4637 if (WORDS_BIG_ENDIAN)
4638 emit_insn (gen_extxl_be (dest, scratch, width, addr));
4639 else
4640 emit_insn (gen_extxl_le (dest, scratch, width, addr));
4641
4642 if (oldval == const0_rtx)
4643 x = gen_rtx_NE (DImode, dest, const0_rtx);
4644 else
4645 {
4646 x = gen_rtx_EQ (DImode, dest, oldval);
4647 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
4648 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4649 }
4650 emit_unlikely_jump (x, label2);
4651
4652 if (WORDS_BIG_ENDIAN)
4653 emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
4654 else
4655 emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
4656 emit_insn (gen_iordi3 (scratch, scratch, newval));
4657
4658 emit_store_conditional (DImode, scratch, mem, scratch);
4659
4660 x = gen_rtx_EQ (DImode, scratch, const0_rtx);
4661 emit_unlikely_jump (x, label1);
4662
4663 emit_insn (gen_memory_barrier ());
4664 emit_label (XEXP (label2, 0));
4665}
4666
b686c48c
RH
4667/* Expand an atomic exchange operation. */
4668
4669void
4670alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
4671{
4672 enum machine_mode mode = GET_MODE (mem);
4673 rtx label, x, cond = gen_lowpart (DImode, scratch);
0b196b18 4674
b686c48c
RH
4675 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4676 emit_label (XEXP (label, 0));
4677
4678 emit_load_locked (mode, retval, mem);
4679 emit_move_insn (scratch, val);
4680 emit_store_conditional (mode, cond, mem, scratch);
4681
4682 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4683 emit_unlikely_jump (x, label);
27738602
RH
4684
4685 emit_insn (gen_memory_barrier ());
0b196b18 4686}
38f31687
RH
4687
4688void
4689alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val)
4690{
4691 enum machine_mode mode = GET_MODE (mem);
4692 rtx addr, align, wdst;
4693 rtx (*fn4) (rtx, rtx, rtx, rtx);
4694
4695 /* Force the address into a register. */
4696 addr = force_reg (DImode, XEXP (mem, 0));
4697
4698 /* Align it to a multiple of 8. */
4699 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
4700 NULL_RTX, 1, OPTAB_DIRECT);
4701
4702 /* Insert val into the correct byte location within the word. */
4703 val = emit_insxl (mode, val, addr);
4704
4705 wdst = gen_reg_rtx (DImode);
4706 if (mode == QImode)
4707 fn4 = gen_sync_lock_test_and_setqi_1;
4708 else
4709 fn4 = gen_sync_lock_test_and_sethi_1;
4710 emit_insn (fn4 (wdst, addr, val, align));
4711
4712 emit_move_insn (dst, gen_lowpart (mode, wdst));
4713}
4714
4715void
4716alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
4717 rtx val, rtx align, rtx scratch)
4718{
4719 rtx label, mem, width, mask, x;
4720
4721 mem = gen_rtx_MEM (DImode, align);
4722 MEM_VOLATILE_P (mem) = 1;
4723
38f31687
RH
4724 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4725 emit_label (XEXP (label, 0));
4726
4727 emit_load_locked (DImode, scratch, mem);
4728
4729 width = GEN_INT (GET_MODE_BITSIZE (mode));
4730 mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
4731 if (WORDS_BIG_ENDIAN)
4732 {
4733 emit_insn (gen_extxl_be (dest, scratch, width, addr));
4734 emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
4735 }
4736 else
4737 {
4738 emit_insn (gen_extxl_le (dest, scratch, width, addr));
4739 emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
4740 }
4741 emit_insn (gen_iordi3 (scratch, scratch, val));
4742
4743 emit_store_conditional (DImode, scratch, mem, scratch);
4744
4745 x = gen_rtx_EQ (DImode, scratch, const0_rtx);
4746 emit_unlikely_jump (x, label);
27738602
RH
4747
4748 emit_insn (gen_memory_barrier ());
38f31687 4749}
a6f12d7c
RK
4750\f
4751/* Adjust the cost of a scheduling dependency. Return the new cost of
4752 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
4753
c237e94a 4754static int
a5c24926 4755alpha_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a6f12d7c 4756{
d58770e7 4757 enum attr_type dep_insn_type;
a6f12d7c
RK
4758
4759 /* If the dependence is an anti-dependence, there is no cost. For an
4760 output dependence, there is sometimes a cost, but it doesn't seem
4761 worth handling those few cases. */
a6f12d7c 4762 if (REG_NOTE_KIND (link) != 0)
98791e3a 4763 return cost;
a6f12d7c 4764
26250081
RH
4765 /* If we can't recognize the insns, we can't really do anything. */
4766 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
4767 return cost;
4768
26250081
RH
4769 dep_insn_type = get_attr_type (dep_insn);
4770
bcbbac26 4771 /* Bring in the user-defined memory latency. */
71d9b493
RH
4772 if (dep_insn_type == TYPE_ILD
4773 || dep_insn_type == TYPE_FLD
4774 || dep_insn_type == TYPE_LDSYM)
bcbbac26
RH
4775 cost += alpha_memory_latency-1;
4776
98791e3a 4777 /* Everything else handled in DFA bypasses now. */
74835ed8 4778
a6f12d7c
RK
4779 return cost;
4780}
c237e94a 4781
98791e3a
RH
4782/* The number of instructions that can be issued per cycle. */
4783
c237e94a 4784static int
a5c24926 4785alpha_issue_rate (void)
c237e94a 4786{
8bea7f7c 4787 return (alpha_tune == PROCESSOR_EV4 ? 2 : 4);
c237e94a
ZW
4788}
4789
98791e3a
RH
4790/* How many alternative schedules to try. This should be as wide as the
4791 scheduling freedom in the DFA, but no wider. Making this value too
4792 large results extra work for the scheduler.
4793
4794 For EV4, loads can be issued to either IB0 or IB1, thus we have 2
4795 alternative schedules. For EV5, we can choose between E0/E1 and
9a9f7594 4796 FA/FM. For EV6, an arithmetic insn can be issued to U0/U1/L0/L1. */
98791e3a
RH
4797
4798static int
a5c24926 4799alpha_multipass_dfa_lookahead (void)
98791e3a 4800{
8bea7f7c 4801 return (alpha_tune == PROCESSOR_EV6 ? 4 : 2);
98791e3a 4802}
9ecc37f0 4803\f
6f9b006d
RH
4804/* Machine-specific function data. */
4805
d1b38208 4806struct GTY(()) machine_function
6f9b006d 4807{
d6b4baa4 4808 /* For unicosmk. */
6f9b006d
RH
4809 /* List of call information words for calls from this function. */
4810 struct rtx_def *first_ciw;
4811 struct rtx_def *last_ciw;
4812 int ciw_count;
4813
4814 /* List of deferred case vectors. */
4815 struct rtx_def *addr_list;
e2500fed 4816
d6b4baa4 4817 /* For OSF. */
6f9b006d 4818 const char *some_ld_name;
229aa352
RH
4819
4820 /* For TARGET_LD_BUGGY_LDGP. */
4821 struct rtx_def *gp_save_rtx;
221cf9ab
OH
4822
4823 /* For VMS condition handlers. */
4824 bool uses_condition_handler;
6f9b006d
RH
4825};
4826
e2500fed 4827/* How to allocate a 'struct machine_function'. */
30102605 4828
e2500fed 4829static struct machine_function *
a5c24926 4830alpha_init_machine_status (void)
30102605 4831{
a9429e29 4832 return ggc_alloc_cleared_machine_function ();
30102605 4833}
30102605 4834
221cf9ab
OH
4835/* Support for frame based VMS condition handlers. */
4836
4837/* A VMS condition handler may be established for a function with a call to
4838 __builtin_establish_vms_condition_handler, and cancelled with a call to
4839 __builtin_revert_vms_condition_handler.
4840
4841 The VMS Condition Handling Facility knows about the existence of a handler
4842 from the procedure descriptor .handler field. As the VMS native compilers,
4843 we store the user specified handler's address at a fixed location in the
4844 stack frame and point the procedure descriptor at a common wrapper which
4845 fetches the real handler's address and issues an indirect call.
4846
4847 The indirection wrapper is "__gcc_shell_handler", provided by libgcc.
4848
4849 We force the procedure kind to PT_STACK, and the fixed frame location is
4850 fp+8, just before the register save area. We use the handler_data field in
4851 the procedure descriptor to state the fp offset at which the installed
4852 handler address can be found. */
4853
4854#define VMS_COND_HANDLER_FP_OFFSET 8
4855
4856/* Expand code to store the currently installed user VMS condition handler
4857 into TARGET and install HANDLER as the new condition handler. */
4858
4859void
4860alpha_expand_builtin_establish_vms_condition_handler (rtx target, rtx handler)
4861{
4862 rtx handler_slot_address
4863 = plus_constant (hard_frame_pointer_rtx, VMS_COND_HANDLER_FP_OFFSET);
4864
4865 rtx handler_slot
4866 = gen_rtx_MEM (DImode, handler_slot_address);
4867
4868 emit_move_insn (target, handler_slot);
4869 emit_move_insn (handler_slot, handler);
4870
4871 /* Notify the start/prologue/epilogue emitters that the condition handler
4872 slot is needed. In addition to reserving the slot space, this will force
4873 the procedure kind to PT_STACK so ensure that the hard_frame_pointer_rtx
4874 use above is correct. */
4875 cfun->machine->uses_condition_handler = true;
4876}
4877
4878/* Expand code to store the current VMS condition handler into TARGET and
4879 nullify it. */
4880
4881void
4882alpha_expand_builtin_revert_vms_condition_handler (rtx target)
4883{
4884 /* We implement this by establishing a null condition handler, with the tiny
4885 side effect of setting uses_condition_handler. This is a little bit
4886 pessimistic if no actual builtin_establish call is ever issued, which is
4887 not a real problem and expected never to happen anyway. */
4888
4889 alpha_expand_builtin_establish_vms_condition_handler (target, const0_rtx);
4890}
4891
9ecc37f0
RH
4892/* Functions to save and restore alpha_return_addr_rtx. */
4893
9ecc37f0
RH
4894/* Start the ball rolling with RETURN_ADDR_RTX. */
4895
4896rtx
a5c24926 4897alpha_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
9ecc37f0 4898{
9ecc37f0
RH
4899 if (count != 0)
4900 return const0_rtx;
4901
b91055dd 4902 return get_hard_reg_initial_val (Pmode, REG_RA);
9ecc37f0
RH
4903}
4904
229aa352 4905/* Return or create a memory slot containing the gp value for the current
ccb83cbc
RH
4906 function. Needed only if TARGET_LD_BUGGY_LDGP. */
4907
4908rtx
a5c24926 4909alpha_gp_save_rtx (void)
ccb83cbc 4910{
229aa352
RH
4911 rtx seq, m = cfun->machine->gp_save_rtx;
4912
4913 if (m == NULL)
4914 {
4915 start_sequence ();
4916
4917 m = assign_stack_local (DImode, UNITS_PER_WORD, BITS_PER_WORD);
4918 m = validize_mem (m);
4919 emit_move_insn (m, pic_offset_table_rtx);
4920
4921 seq = get_insns ();
4922 end_sequence ();
8deb1d31
EB
4923
4924 /* We used to simply emit the sequence after entry_of_function.
4925 However this breaks the CFG if the first instruction in the
4926 first block is not the NOTE_INSN_BASIC_BLOCK, for example a
4927 label. Emit the sequence properly on the edge. We are only
4928 invoked from dw2_build_landing_pads and finish_eh_generation
4929 will call commit_edge_insertions thanks to a kludge. */
4930 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
229aa352
RH
4931
4932 cfun->machine->gp_save_rtx = m;
4933 }
4934
4935 return m;
ccb83cbc
RH
4936}
4937
9ecc37f0 4938static int
a5c24926 4939alpha_ra_ever_killed (void)
9ecc37f0 4940{
6abc6f40
RH
4941 rtx top;
4942
b91055dd 4943 if (!has_hard_reg_initial_val (Pmode, REG_RA))
6fb5fa3c 4944 return (int)df_regs_ever_live_p (REG_RA);
9ecc37f0 4945
6abc6f40
RH
4946 push_topmost_sequence ();
4947 top = get_insns ();
4948 pop_topmost_sequence ();
4949
4950 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
9ecc37f0
RH
4951}
4952
a6f12d7c 4953\f
be7560ea 4954/* Return the trap mode suffix applicable to the current
285a5742 4955 instruction, or NULL. */
a6f12d7c 4956
be7560ea 4957static const char *
a5c24926 4958get_trap_mode_suffix (void)
a6f12d7c 4959{
be7560ea 4960 enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
a6f12d7c 4961
be7560ea 4962 switch (s)
a6f12d7c 4963 {
be7560ea
RH
4964 case TRAP_SUFFIX_NONE:
4965 return NULL;
6245e3df 4966
be7560ea 4967 case TRAP_SUFFIX_SU:
981a828e 4968 if (alpha_fptm >= ALPHA_FPTM_SU)
be7560ea
RH
4969 return "su";
4970 return NULL;
6245e3df 4971
be7560ea
RH
4972 case TRAP_SUFFIX_SUI:
4973 if (alpha_fptm >= ALPHA_FPTM_SUI)
4974 return "sui";
4975 return NULL;
4976
4977 case TRAP_SUFFIX_V_SV:
e83015a9
RH
4978 switch (alpha_fptm)
4979 {
4980 case ALPHA_FPTM_N:
be7560ea 4981 return NULL;
e83015a9 4982 case ALPHA_FPTM_U:
be7560ea 4983 return "v";
e83015a9
RH
4984 case ALPHA_FPTM_SU:
4985 case ALPHA_FPTM_SUI:
be7560ea 4986 return "sv";
56daab84
NS
4987 default:
4988 gcc_unreachable ();
e83015a9 4989 }
e83015a9 4990
be7560ea 4991 case TRAP_SUFFIX_V_SV_SVI:
0022a940
DMT
4992 switch (alpha_fptm)
4993 {
4994 case ALPHA_FPTM_N:
be7560ea 4995 return NULL;
0022a940 4996 case ALPHA_FPTM_U:
be7560ea 4997 return "v";
0022a940 4998 case ALPHA_FPTM_SU:
be7560ea 4999 return "sv";
0022a940 5000 case ALPHA_FPTM_SUI:
be7560ea 5001 return "svi";
56daab84
NS
5002 default:
5003 gcc_unreachable ();
0022a940
DMT
5004 }
5005 break;
5006
be7560ea 5007 case TRAP_SUFFIX_U_SU_SUI:
6245e3df
RK
5008 switch (alpha_fptm)
5009 {
5010 case ALPHA_FPTM_N:
be7560ea 5011 return NULL;
6245e3df 5012 case ALPHA_FPTM_U:
be7560ea 5013 return "u";
6245e3df 5014 case ALPHA_FPTM_SU:
be7560ea 5015 return "su";
6245e3df 5016 case ALPHA_FPTM_SUI:
be7560ea 5017 return "sui";
56daab84
NS
5018 default:
5019 gcc_unreachable ();
6245e3df
RK
5020 }
5021 break;
56daab84
NS
5022
5023 default:
5024 gcc_unreachable ();
be7560ea 5025 }
56daab84 5026 gcc_unreachable ();
be7560ea 5027}
6245e3df 5028
be7560ea 5029/* Return the rounding mode suffix applicable to the current
285a5742 5030 instruction, or NULL. */
be7560ea
RH
5031
5032static const char *
a5c24926 5033get_round_mode_suffix (void)
be7560ea
RH
5034{
5035 enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5036
5037 switch (s)
5038 {
5039 case ROUND_SUFFIX_NONE:
5040 return NULL;
5041 case ROUND_SUFFIX_NORMAL:
5042 switch (alpha_fprm)
6245e3df 5043 {
be7560ea
RH
5044 case ALPHA_FPRM_NORM:
5045 return NULL;
f676971a 5046 case ALPHA_FPRM_MINF:
be7560ea
RH
5047 return "m";
5048 case ALPHA_FPRM_CHOP:
5049 return "c";
5050 case ALPHA_FPRM_DYN:
5051 return "d";
56daab84
NS
5052 default:
5053 gcc_unreachable ();
6245e3df
RK
5054 }
5055 break;
5056
be7560ea
RH
5057 case ROUND_SUFFIX_C:
5058 return "c";
56daab84
NS
5059
5060 default:
5061 gcc_unreachable ();
be7560ea 5062 }
56daab84 5063 gcc_unreachable ();
be7560ea
RH
5064}
5065
6f9b006d
RH
5066/* Locate some local-dynamic symbol still in use by this function
5067 so that we can print its name in some movdi_er_tlsldm pattern. */
5068
a5c24926
RH
5069static int
5070get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
5071{
5072 rtx x = *px;
5073
5074 if (GET_CODE (x) == SYMBOL_REF
5075 && SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
5076 {
5077 cfun->machine->some_ld_name = XSTR (x, 0);
5078 return 1;
5079 }
5080
5081 return 0;
5082}
5083
6f9b006d 5084static const char *
a5c24926 5085get_some_local_dynamic_name (void)
6f9b006d
RH
5086{
5087 rtx insn;
5088
5089 if (cfun->machine->some_ld_name)
5090 return cfun->machine->some_ld_name;
5091
5092 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
5093 if (INSN_P (insn)
5094 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
5095 return cfun->machine->some_ld_name;
5096
56daab84 5097 gcc_unreachable ();
6f9b006d
RH
5098}
5099
be7560ea
RH
5100/* Print an operand. Recognize special options, documented below. */
5101
5102void
a5c24926 5103print_operand (FILE *file, rtx x, int code)
be7560ea
RH
5104{
5105 int i;
5106
5107 switch (code)
5108 {
5109 case '~':
5110 /* Print the assembler name of the current function. */
5111 assemble_name (file, alpha_fnname);
5112 break;
5113
6f9b006d
RH
5114 case '&':
5115 assemble_name (file, get_some_local_dynamic_name ());
5116 break;
5117
be7560ea
RH
5118 case '/':
5119 {
5120 const char *trap = get_trap_mode_suffix ();
5121 const char *round = get_round_mode_suffix ();
5122
5123 if (trap || round)
30102605
RH
5124 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5125 (trap ? trap : ""), (round ? round : ""));
be7560ea
RH
5126 break;
5127 }
5128
89cfc2c6
RK
5129 case ',':
5130 /* Generates single precision instruction suffix. */
be7560ea 5131 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
89cfc2c6
RK
5132 break;
5133
5134 case '-':
5135 /* Generates double precision instruction suffix. */
be7560ea 5136 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
89cfc2c6
RK
5137 break;
5138
1eb356b9
RH
5139 case '#':
5140 if (alpha_this_literal_sequence_number == 0)
5141 alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5142 fprintf (file, "%d", alpha_this_literal_sequence_number);
5143 break;
5144
5145 case '*':
5146 if (alpha_this_gpdisp_sequence_number == 0)
5147 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5148 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5149 break;
5150
5151 case 'H':
5152 if (GET_CODE (x) == HIGH)
133d3133 5153 output_addr_const (file, XEXP (x, 0));
1eb356b9
RH
5154 else
5155 output_operand_lossage ("invalid %%H value");
5156 break;
5157
40571d67 5158 case 'J':
6f9b006d
RH
5159 {
5160 const char *lituse;
5161
5162 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD_CALL)
5163 {
5164 x = XVECEXP (x, 0, 0);
5165 lituse = "lituse_tlsgd";
5166 }
5167 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM_CALL)
5168 {
5169 x = XVECEXP (x, 0, 0);
5170 lituse = "lituse_tlsldm";
5171 }
7d83f4f5 5172 else if (CONST_INT_P (x))
6f9b006d
RH
5173 lituse = "lituse_jsr";
5174 else
5175 {
5176 output_operand_lossage ("invalid %%J value");
5177 break;
5178 }
5179
5180 if (x != const0_rtx)
5181 fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
5182 }
40571d67
RH
5183 break;
5184
d006f5eb
RH
5185 case 'j':
5186 {
5187 const char *lituse;
5188
5189#ifdef HAVE_AS_JSRDIRECT_RELOCS
5190 lituse = "lituse_jsrdirect";
5191#else
5192 lituse = "lituse_jsr";
5193#endif
5194
5195 gcc_assert (INTVAL (x) != 0);
5196 fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
5197 }
5198 break;
a6f12d7c
RK
5199 case 'r':
5200 /* If this operand is the constant zero, write it as "$31". */
7d83f4f5 5201 if (REG_P (x))
a6f12d7c
RK
5202 fprintf (file, "%s", reg_names[REGNO (x)]);
5203 else if (x == CONST0_RTX (GET_MODE (x)))
5204 fprintf (file, "$31");
5205 else
5206 output_operand_lossage ("invalid %%r value");
a6f12d7c
RK
5207 break;
5208
5209 case 'R':
5210 /* Similar, but for floating-point. */
7d83f4f5 5211 if (REG_P (x))
a6f12d7c
RK
5212 fprintf (file, "%s", reg_names[REGNO (x)]);
5213 else if (x == CONST0_RTX (GET_MODE (x)))
5214 fprintf (file, "$f31");
5215 else
5216 output_operand_lossage ("invalid %%R value");
a6f12d7c
RK
5217 break;
5218
5219 case 'N':
5220 /* Write the 1's complement of a constant. */
7d83f4f5 5221 if (!CONST_INT_P (x))
a6f12d7c
RK
5222 output_operand_lossage ("invalid %%N value");
5223
0bc8ae6e 5224 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
a6f12d7c
RK
5225 break;
5226
5227 case 'P':
5228 /* Write 1 << C, for a constant C. */
7d83f4f5 5229 if (!CONST_INT_P (x))
a6f12d7c
RK
5230 output_operand_lossage ("invalid %%P value");
5231
0bc8ae6e 5232 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
a6f12d7c
RK
5233 break;
5234
5235 case 'h':
5236 /* Write the high-order 16 bits of a constant, sign-extended. */
7d83f4f5 5237 if (!CONST_INT_P (x))
a6f12d7c
RK
5238 output_operand_lossage ("invalid %%h value");
5239
0bc8ae6e 5240 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
a6f12d7c
RK
5241 break;
5242
5243 case 'L':
5244 /* Write the low-order 16 bits of a constant, sign-extended. */
7d83f4f5 5245 if (!CONST_INT_P (x))
a6f12d7c
RK
5246 output_operand_lossage ("invalid %%L value");
5247
0bc8ae6e
RK
5248 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5249 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
a6f12d7c
RK
5250 break;
5251
5252 case 'm':
5253 /* Write mask for ZAP insn. */
5254 if (GET_CODE (x) == CONST_DOUBLE)
5255 {
5256 HOST_WIDE_INT mask = 0;
5257 HOST_WIDE_INT value;
5258
5259 value = CONST_DOUBLE_LOW (x);
5260 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5261 i++, value >>= 8)
5262 if (value & 0xff)
5263 mask |= (1 << i);
5264
5265 value = CONST_DOUBLE_HIGH (x);
5266 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5267 i++, value >>= 8)
5268 if (value & 0xff)
5269 mask |= (1 << (i + sizeof (int)));
5270
0bc8ae6e 5271 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
a6f12d7c
RK
5272 }
5273
7d83f4f5 5274 else if (CONST_INT_P (x))
a6f12d7c
RK
5275 {
5276 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5277
5278 for (i = 0; i < 8; i++, value >>= 8)
5279 if (value & 0xff)
5280 mask |= (1 << i);
5281
0bc8ae6e 5282 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
a6f12d7c
RK
5283 }
5284 else
5285 output_operand_lossage ("invalid %%m value");
5286 break;
5287
5288 case 'M':
6c174fc0 5289 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
7d83f4f5 5290 if (!CONST_INT_P (x)
6c174fc0
RH
5291 || (INTVAL (x) != 8 && INTVAL (x) != 16
5292 && INTVAL (x) != 32 && INTVAL (x) != 64))
a6f12d7c
RK
5293 output_operand_lossage ("invalid %%M value");
5294
5295 fprintf (file, "%s",
6c174fc0
RH
5296 (INTVAL (x) == 8 ? "b"
5297 : INTVAL (x) == 16 ? "w"
5298 : INTVAL (x) == 32 ? "l"
5299 : "q"));
a6f12d7c
RK
5300 break;
5301
5302 case 'U':
5303 /* Similar, except do it from the mask. */
7d83f4f5 5304 if (CONST_INT_P (x))
c4b50f1a
RH
5305 {
5306 HOST_WIDE_INT value = INTVAL (x);
5307
5308 if (value == 0xff)
5309 {
5310 fputc ('b', file);
5311 break;
5312 }
5313 if (value == 0xffff)
5314 {
5315 fputc ('w', file);
5316 break;
5317 }
5318 if (value == 0xffffffff)
5319 {
5320 fputc ('l', file);
5321 break;
5322 }
5323 if (value == -1)
5324 {
5325 fputc ('q', file);
5326 break;
5327 }
5328 }
5329 else if (HOST_BITS_PER_WIDE_INT == 32
5330 && GET_CODE (x) == CONST_DOUBLE
5331 && CONST_DOUBLE_LOW (x) == 0xffffffff
5332 && CONST_DOUBLE_HIGH (x) == 0)
5333 {
5334 fputc ('l', file);
5335 break;
5336 }
5337 output_operand_lossage ("invalid %%U value");
a6f12d7c
RK
5338 break;
5339
5340 case 's':
30102605
RH
5341 /* Write the constant value divided by 8 for little-endian mode or
5342 (56 - value) / 8 for big-endian mode. */
5343
7d83f4f5 5344 if (!CONST_INT_P (x)
30102605
RH
5345 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5346 ? 56
f676971a 5347 : 64)
30102605 5348 || (INTVAL (x) & 7) != 0)
a6f12d7c
RK
5349 output_operand_lossage ("invalid %%s value");
5350
30102605
RH
5351 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5352 WORDS_BIG_ENDIAN
5353 ? (56 - INTVAL (x)) / 8
5354 : INTVAL (x) / 8);
a6f12d7c
RK
5355 break;
5356
5357 case 'S':
5358 /* Same, except compute (64 - c) / 8 */
5359
7d83f4f5 5360 if (!CONST_INT_P (x)
a6f12d7c
RK
5361 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5362 && (INTVAL (x) & 7) != 8)
5363 output_operand_lossage ("invalid %%s value");
5364
0bc8ae6e 5365 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
a6f12d7c
RK
5366 break;
5367
30102605
RH
5368 case 't':
5369 {
5370 /* On Unicos/Mk systems: use a DEX expression if the symbol
5371 clashes with a register name. */
5372 int dex = unicosmk_need_dex (x);
5373 if (dex)
5374 fprintf (file, "DEX(%d)", dex);
5375 else
5376 output_addr_const (file, x);
5377 }
5378 break;
5379
bdd4c95a 5380 case 'C': case 'D': case 'c': case 'd':
a6f12d7c 5381 /* Write out comparison name. */
bdd4c95a
RK
5382 {
5383 enum rtx_code c = GET_CODE (x);
5384
ec8e098d 5385 if (!COMPARISON_P (x))
bdd4c95a
RK
5386 output_operand_lossage ("invalid %%C value");
5387
948068e2 5388 else if (code == 'D')
bdd4c95a
RK
5389 c = reverse_condition (c);
5390 else if (code == 'c')
5391 c = swap_condition (c);
5392 else if (code == 'd')
5393 c = swap_condition (reverse_condition (c));
5394
5395 if (c == LEU)
5396 fprintf (file, "ule");
5397 else if (c == LTU)
5398 fprintf (file, "ult");
1eb8759b
RH
5399 else if (c == UNORDERED)
5400 fprintf (file, "un");
bdd4c95a
RK
5401 else
5402 fprintf (file, "%s", GET_RTX_NAME (c));
5403 }
ab561e66
RK
5404 break;
5405
a6f12d7c
RK
5406 case 'E':
5407 /* Write the divide or modulus operator. */
5408 switch (GET_CODE (x))
5409 {
5410 case DIV:
5411 fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5412 break;
5413 case UDIV:
5414 fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5415 break;
5416 case MOD:
5417 fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5418 break;
5419 case UMOD:
5420 fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5421 break;
5422 default:
5423 output_operand_lossage ("invalid %%E value");
5424 break;
5425 }
5426 break;
5427
a6f12d7c
RK
5428 case 'A':
5429 /* Write "_u" for unaligned access. */
7d83f4f5 5430 if (MEM_P (x) && GET_CODE (XEXP (x, 0)) == AND)
a6f12d7c
RK
5431 fprintf (file, "_u");
5432 break;
5433
5434 case 0:
7d83f4f5 5435 if (REG_P (x))
a6f12d7c 5436 fprintf (file, "%s", reg_names[REGNO (x)]);
7d83f4f5 5437 else if (MEM_P (x))
a6f12d7c 5438 output_address (XEXP (x, 0));
6f9b006d
RH
5439 else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
5440 {
5441 switch (XINT (XEXP (x, 0), 1))
5442 {
5443 case UNSPEC_DTPREL:
5444 case UNSPEC_TPREL:
5445 output_addr_const (file, XVECEXP (XEXP (x, 0), 0, 0));
5446 break;
5447 default:
5448 output_operand_lossage ("unknown relocation unspec");
5449 break;
5450 }
5451 }
a6f12d7c
RK
5452 else
5453 output_addr_const (file, x);
5454 break;
5455
5456 default:
5457 output_operand_lossage ("invalid %%xn code");
5458 }
5459}
714b019c
RH
5460
5461void
a5c24926 5462print_operand_address (FILE *file, rtx addr)
714b019c 5463{
e03ec28f 5464 int basereg = 31;
714b019c
RH
5465 HOST_WIDE_INT offset = 0;
5466
5467 if (GET_CODE (addr) == AND)
5468 addr = XEXP (addr, 0);
714b019c 5469
e03ec28f 5470 if (GET_CODE (addr) == PLUS
7d83f4f5 5471 && CONST_INT_P (XEXP (addr, 1)))
714b019c
RH
5472 {
5473 offset = INTVAL (XEXP (addr, 1));
e03ec28f 5474 addr = XEXP (addr, 0);
714b019c 5475 }
1eb356b9
RH
5476
5477 if (GET_CODE (addr) == LO_SUM)
5478 {
6f9b006d
RH
5479 const char *reloc16, *reloclo;
5480 rtx op1 = XEXP (addr, 1);
5481
5482 if (GET_CODE (op1) == CONST && GET_CODE (XEXP (op1, 0)) == UNSPEC)
5483 {
5484 op1 = XEXP (op1, 0);
5485 switch (XINT (op1, 1))
5486 {
5487 case UNSPEC_DTPREL:
5488 reloc16 = NULL;
5489 reloclo = (alpha_tls_size == 16 ? "dtprel" : "dtprello");
5490 break;
5491 case UNSPEC_TPREL:
5492 reloc16 = NULL;
5493 reloclo = (alpha_tls_size == 16 ? "tprel" : "tprello");
5494 break;
5495 default:
5496 output_operand_lossage ("unknown relocation unspec");
5497 return;
5498 }
5499
5500 output_addr_const (file, XVECEXP (op1, 0, 0));
5501 }
5502 else
5503 {
5504 reloc16 = "gprel";
5505 reloclo = "gprellow";
5506 output_addr_const (file, op1);
5507 }
5508
1eb356b9 5509 if (offset)
4a0a75dd 5510 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
f676971a 5511
1eb356b9 5512 addr = XEXP (addr, 0);
56daab84
NS
5513 switch (GET_CODE (addr))
5514 {
5515 case REG:
5516 basereg = REGNO (addr);
5517 break;
5518
5519 case SUBREG:
5520 basereg = subreg_regno (addr);
5521 break;
5522
5523 default:
5524 gcc_unreachable ();
5525 }
133d3133
RH
5526
5527 fprintf (file, "($%d)\t\t!%s", basereg,
6f9b006d 5528 (basereg == 29 ? reloc16 : reloclo));
1eb356b9
RH
5529 return;
5530 }
5531
56daab84
NS
5532 switch (GET_CODE (addr))
5533 {
5534 case REG:
5535 basereg = REGNO (addr);
5536 break;
5537
5538 case SUBREG:
5539 basereg = subreg_regno (addr);
5540 break;
5541
5542 case CONST_INT:
5543 offset = INTVAL (addr);
5544 break;
1330f7d5
DR
5545
5546#if TARGET_ABI_OPEN_VMS
56daab84 5547 case SYMBOL_REF:
1330f7d5
DR
5548 fprintf (file, "%s", XSTR (addr, 0));
5549 return;
56daab84
NS
5550
5551 case CONST:
5552 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS
5553 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF);
74eda121 5554 fprintf (file, "%s+" HOST_WIDE_INT_PRINT_DEC,
1330f7d5
DR
5555 XSTR (XEXP (XEXP (addr, 0), 0), 0),
5556 INTVAL (XEXP (XEXP (addr, 0), 1)));
5557 return;
56daab84 5558
1330f7d5 5559#endif
56daab84
NS
5560 default:
5561 gcc_unreachable ();
5562 }
714b019c 5563
4a0a75dd 5564 fprintf (file, HOST_WIDE_INT_PRINT_DEC "($%d)", offset, basereg);
714b019c 5565}
a6f12d7c 5566\f
9ec36da5 5567/* Emit RTL insns to initialize the variable parts of a trampoline at
2d7b663a
RH
5568 M_TRAMP. FNDECL is target function's decl. CHAIN_VALUE is an rtx
5569 for the static chain value for the function. */
c714f03d 5570
2d7b663a
RH
5571static void
5572alpha_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
9ec36da5 5573{
2d7b663a
RH
5574 rtx fnaddr, mem, word1, word2;
5575
5576 fnaddr = XEXP (DECL_RTL (fndecl), 0);
9ec36da5 5577
d2692ef8 5578#ifdef POINTERS_EXTEND_UNSIGNED
2d7b663a
RH
5579 fnaddr = convert_memory_address (Pmode, fnaddr);
5580 chain_value = convert_memory_address (Pmode, chain_value);
d2692ef8
DT
5581#endif
5582
fe2786f5
DR
5583 if (TARGET_ABI_OPEN_VMS)
5584 {
fe2786f5
DR
5585 const char *fnname;
5586 char *trname;
5587
5588 /* Construct the name of the trampoline entry point. */
5589 fnname = XSTR (fnaddr, 0);
5590 trname = (char *) alloca (strlen (fnname) + 5);
5591 strcpy (trname, fnname);
5592 strcat (trname, "..tr");
2d7b663a
RH
5593 fnname = ggc_alloc_string (trname, strlen (trname) + 1);
5594 word2 = gen_rtx_SYMBOL_REF (Pmode, fnname);
fe2786f5
DR
5595
5596 /* Trampoline (or "bounded") procedure descriptor is constructed from
5597 the function's procedure descriptor with certain fields zeroed IAW
5598 the VMS calling standard. This is stored in the first quadword. */
2d7b663a
RH
5599 word1 = force_reg (DImode, gen_const_mem (DImode, fnaddr));
5600 word1 = expand_and (DImode, word1, GEN_INT (0xffff0fff0000fff0), NULL);
fe2786f5 5601 }
2d7b663a
RH
5602 else
5603 {
5604 /* These 4 instructions are:
5605 ldq $1,24($27)
5606 ldq $27,16($27)
5607 jmp $31,($27),0
5608 nop
5609 We don't bother setting the HINT field of the jump; the nop
5610 is merely there for padding. */
5611 word1 = GEN_INT (0xa77b0010a43b0018);
5612 word2 = GEN_INT (0x47ff041f6bfb0000);
5613 }
5614
5615 /* Store the first two words, as computed above. */
5616 mem = adjust_address (m_tramp, DImode, 0);
5617 emit_move_insn (mem, word1);
5618 mem = adjust_address (m_tramp, DImode, 8);
5619 emit_move_insn (mem, word2);
5620
5621 /* Store function address and static chain value. */
5622 mem = adjust_address (m_tramp, Pmode, 16);
5623 emit_move_insn (mem, fnaddr);
5624 mem = adjust_address (m_tramp, Pmode, 24);
5625 emit_move_insn (mem, chain_value);
fe2786f5 5626
2d7b663a
RH
5627 if (!TARGET_ABI_OPEN_VMS)
5628 {
5629 emit_insn (gen_imb ());
e7a742ec 5630#ifdef ENABLE_EXECUTE_STACK
2d7b663a
RH
5631 emit_library_call (init_one_libfunc ("__enable_execute_stack"),
5632 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode);
9ec36da5 5633#endif
2d7b663a 5634 }
9ec36da5
JL
5635}
5636\f
5495cc55
RH
5637/* Determine where to put an argument to a function.
5638 Value is zero to push the argument on the stack,
5639 or a hard register in which to store the argument.
5640
5641 MODE is the argument's machine mode.
5642 TYPE is the data type of the argument (as a tree).
5643 This is null for libcalls where that information may
5644 not be available.
5645 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5646 the preceding args and about the function being called.
5647 NAMED is nonzero if this argument is a named parameter
5648 (otherwise it is an extra parameter matching an ellipsis).
5649
5650 On Alpha the first 6 words of args are normally in registers
5651 and the rest are pushed. */
5652
0c3a9758
NF
5653static rtx
5654alpha_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5655 const_tree type, bool named ATTRIBUTE_UNUSED)
5495cc55
RH
5656{
5657 int basereg;
a82c7f05 5658 int num_args;
5495cc55 5659
7e4fb06a
RH
5660 /* Don't get confused and pass small structures in FP registers. */
5661 if (type && AGGREGATE_TYPE_P (type))
30102605 5662 basereg = 16;
7e4fb06a
RH
5663 else
5664 {
5665#ifdef ENABLE_CHECKING
42ba5130 5666 /* With alpha_split_complex_arg, we shouldn't see any raw complex
7e4fb06a 5667 values here. */
56daab84 5668 gcc_assert (!COMPLEX_MODE_P (mode));
7e4fb06a
RH
5669#endif
5670
5671 /* Set up defaults for FP operands passed in FP registers, and
5672 integral operands passed in integer registers. */
5673 if (TARGET_FPREGS && GET_MODE_CLASS (mode) == MODE_FLOAT)
5674 basereg = 32 + 16;
5675 else
5676 basereg = 16;
5677 }
30102605
RH
5678
5679 /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
0c3a9758 5680 the two platforms, so we can't avoid conditional compilation. */
be7b80f4 5681#if TARGET_ABI_OPEN_VMS
30102605
RH
5682 {
5683 if (mode == VOIDmode)
1f5576a8 5684 return alpha_arg_info_reg_val (*cum);
be7b80f4 5685
0c3a9758 5686 num_args = cum->num_args;
fe984136
RH
5687 if (num_args >= 6
5688 || targetm.calls.must_pass_in_stack (mode, type))
30102605
RH
5689 return NULL_RTX;
5690 }
7e4fb06a 5691#elif TARGET_ABI_OSF
30102605 5692 {
0c3a9758 5693 if (*cum >= 6)
30102605 5694 return NULL_RTX;
0c3a9758 5695 num_args = *cum;
30102605
RH
5696
5697 /* VOID is passed as a special flag for "last argument". */
5698 if (type == void_type_node)
5699 basereg = 16;
fe984136 5700 else if (targetm.calls.must_pass_in_stack (mode, type))
30102605 5701 return NULL_RTX;
30102605 5702 }
7e4fb06a
RH
5703#else
5704#error Unhandled ABI
5705#endif
5495cc55 5706
a82c7f05 5707 return gen_rtx_REG (mode, num_args + basereg);
5495cc55
RH
5708}
5709
0c3a9758
NF
5710/* Update the data in CUM to advance over an argument
5711 of mode MODE and data type TYPE.
5712 (TYPE is null for libcalls where that information may not be available.) */
5713
5714static void
5715alpha_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
bdf057c6 5716 const_tree type, bool named ATTRIBUTE_UNUSED)
0c3a9758
NF
5717{
5718 bool onstack = targetm.calls.must_pass_in_stack (mode, type);
5719 int increment = onstack ? 6 : ALPHA_ARG_SIZE (mode, type, named);
5720
5721#if TARGET_ABI_OSF
5722 *cum += increment;
5723#else
5724 if (!onstack && cum->num_args < 6)
5725 cum->atypes[cum->num_args] = alpha_arg_type (mode);
5726 cum->num_args += increment;
5727#endif
5728}
5729
78a52f11
RH
5730static int
5731alpha_arg_partial_bytes (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5732 enum machine_mode mode ATTRIBUTE_UNUSED,
5733 tree type ATTRIBUTE_UNUSED,
5734 bool named ATTRIBUTE_UNUSED)
5735{
5736 int words = 0;
5737
5738#if TARGET_ABI_OPEN_VMS
5739 if (cum->num_args < 6
5740 && 6 < cum->num_args + ALPHA_ARG_SIZE (mode, type, named))
907f033f 5741 words = 6 - cum->num_args;
78a52f11
RH
5742#elif TARGET_ABI_UNICOSMK
5743 /* Never any split arguments. */
5744#elif TARGET_ABI_OSF
5745 if (*cum < 6 && 6 < *cum + ALPHA_ARG_SIZE (mode, type, named))
5746 words = 6 - *cum;
5747#else
5748#error Unhandled ABI
5749#endif
5750
5751 return words * UNITS_PER_WORD;
5752}
5753
5754
7e4fb06a
RH
5755/* Return true if TYPE must be returned in memory, instead of in registers. */
5756
f93c2180 5757static bool
586de218 5758alpha_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
7e4fb06a 5759{
f93c2180 5760 enum machine_mode mode = VOIDmode;
7e4fb06a
RH
5761 int size;
5762
5763 if (type)
5764 {
5765 mode = TYPE_MODE (type);
5766
050d3f9d
VF
5767 /* All aggregates are returned in memory, except on OpenVMS where
5768 records that fit 64 bits should be returned by immediate value
5769 as required by section 3.8.7.1 of the OpenVMS Calling Standard. */
5770 if (TARGET_ABI_OPEN_VMS
5771 && TREE_CODE (type) != ARRAY_TYPE
5772 && (unsigned HOST_WIDE_INT) int_size_in_bytes(type) <= 8)
5773 return false;
5774
7e4fb06a
RH
5775 if (AGGREGATE_TYPE_P (type))
5776 return true;
5777 }
5778
5779 size = GET_MODE_SIZE (mode);
5780 switch (GET_MODE_CLASS (mode))
5781 {
5782 case MODE_VECTOR_FLOAT:
5783 /* Pass all float vectors in memory, like an aggregate. */
5784 return true;
5785
5786 case MODE_COMPLEX_FLOAT:
5787 /* We judge complex floats on the size of their element,
5788 not the size of the whole type. */
5789 size = GET_MODE_UNIT_SIZE (mode);
5790 break;
5791
5792 case MODE_INT:
5793 case MODE_FLOAT:
5794 case MODE_COMPLEX_INT:
5795 case MODE_VECTOR_INT:
5796 break;
5797
5798 default:
f676971a 5799 /* ??? We get called on all sorts of random stuff from
56daab84
NS
5800 aggregate_value_p. We must return something, but it's not
5801 clear what's safe to return. Pretend it's a struct I
5802 guess. */
7e4fb06a
RH
5803 return true;
5804 }
5805
5806 /* Otherwise types must fit in one register. */
5807 return size > UNITS_PER_WORD;
5808}
5809
8cd5a4e0
RH
5810/* Return true if TYPE should be passed by invisible reference. */
5811
5812static bool
5813alpha_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
5814 enum machine_mode mode,
586de218 5815 const_tree type ATTRIBUTE_UNUSED,
8cd5a4e0
RH
5816 bool named ATTRIBUTE_UNUSED)
5817{
5818 return mode == TFmode || mode == TCmode;
5819}
5820
7e4fb06a
RH
5821/* Define how to find the value returned by a function. VALTYPE is the
5822 data type of the value (as a tree). If the precise function being
5823 called is known, FUNC is its FUNCTION_DECL; otherwise, FUNC is 0.
5824 MODE is set instead of VALTYPE for libcalls.
5825
5826 On Alpha the value is found in $0 for integer functions and
5827 $f0 for floating-point functions. */
5828
5829rtx
586de218 5830function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
7e4fb06a
RH
5831 enum machine_mode mode)
5832{
d58770e7 5833 unsigned int regnum, dummy ATTRIBUTE_UNUSED;
0a2aaacc 5834 enum mode_class mclass;
7e4fb06a 5835
56daab84 5836 gcc_assert (!valtype || !alpha_return_in_memory (valtype, func));
7e4fb06a
RH
5837
5838 if (valtype)
5839 mode = TYPE_MODE (valtype);
5840
0a2aaacc
KG
5841 mclass = GET_MODE_CLASS (mode);
5842 switch (mclass)
7e4fb06a
RH
5843 {
5844 case MODE_INT:
050d3f9d
VF
5845 /* Do the same thing as PROMOTE_MODE except for libcalls on VMS,
5846 where we have them returning both SImode and DImode. */
5847 if (!(TARGET_ABI_OPEN_VMS && valtype && AGGREGATE_TYPE_P (valtype)))
5848 PROMOTE_MODE (mode, dummy, valtype);
5efb1046 5849 /* FALLTHRU */
7e4fb06a
RH
5850
5851 case MODE_COMPLEX_INT:
5852 case MODE_VECTOR_INT:
5853 regnum = 0;
5854 break;
5855
5856 case MODE_FLOAT:
5857 regnum = 32;
5858 break;
5859
5860 case MODE_COMPLEX_FLOAT:
5861 {
5862 enum machine_mode cmode = GET_MODE_INNER (mode);
5863
5864 return gen_rtx_PARALLEL
5865 (VOIDmode,
5866 gen_rtvec (2,
5867 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (cmode, 32),
a556fd39 5868 const0_rtx),
7e4fb06a
RH
5869 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (cmode, 33),
5870 GEN_INT (GET_MODE_SIZE (cmode)))));
5871 }
5872
050d3f9d
VF
5873 case MODE_RANDOM:
5874 /* We should only reach here for BLKmode on VMS. */
5875 gcc_assert (TARGET_ABI_OPEN_VMS && mode == BLKmode);
5876 regnum = 0;
5877 break;
5878
7e4fb06a 5879 default:
56daab84 5880 gcc_unreachable ();
7e4fb06a
RH
5881 }
5882
5883 return gen_rtx_REG (mode, regnum);
5884}
5885
f676971a 5886/* TCmode complex values are passed by invisible reference. We
42ba5130
RH
5887 should not split these values. */
5888
5889static bool
3101faab 5890alpha_split_complex_arg (const_tree type)
42ba5130
RH
5891{
5892 return TYPE_MODE (type) != TCmode;
5893}
5894
c35d187f
RH
5895static tree
5896alpha_build_builtin_va_list (void)
a6f12d7c 5897{
5849d27c 5898 tree base, ofs, space, record, type_decl;
a6f12d7c 5899
30102605 5900 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
63966b3b
RH
5901 return ptr_type_node;
5902
f1e639b1 5903 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4c4bde29
AH
5904 type_decl = build_decl (BUILTINS_LOCATION,
5905 TYPE_DECL, get_identifier ("__va_list_tag"), record);
0fd2eac2 5906 TYPE_STUB_DECL (record) = type_decl;
d4b15af9
RH
5907 TYPE_NAME (record) = type_decl;
5908
63966b3b 5909 /* C++? SET_IS_AGGR_TYPE (record, 1); */
a6f12d7c 5910
5849d27c 5911 /* Dummy field to prevent alignment warnings. */
4c4bde29
AH
5912 space = build_decl (BUILTINS_LOCATION,
5913 FIELD_DECL, NULL_TREE, integer_type_node);
5849d27c
RH
5914 DECL_FIELD_CONTEXT (space) = record;
5915 DECL_ARTIFICIAL (space) = 1;
5916 DECL_IGNORED_P (space) = 1;
5917
4c4bde29
AH
5918 ofs = build_decl (BUILTINS_LOCATION,
5919 FIELD_DECL, get_identifier ("__offset"),
63966b3b
RH
5920 integer_type_node);
5921 DECL_FIELD_CONTEXT (ofs) = record;
910ad8de 5922 DECL_CHAIN (ofs) = space;
fea8c257
UB
5923 /* ??? This is a hack, __offset is marked volatile to prevent
5924 DCE that confuses stdarg optimization and results in
5925 gcc.c-torture/execute/stdarg-1.c failure. See PR 41089. */
5926 TREE_THIS_VOLATILE (ofs) = 1;
29587b1c 5927
4c4bde29
AH
5928 base = build_decl (BUILTINS_LOCATION,
5929 FIELD_DECL, get_identifier ("__base"),
63966b3b
RH
5930 ptr_type_node);
5931 DECL_FIELD_CONTEXT (base) = record;
910ad8de 5932 DECL_CHAIN (base) = ofs;
29587b1c 5933
63966b3b
RH
5934 TYPE_FIELDS (record) = base;
5935 layout_type (record);
5936
9d30f3c1 5937 va_list_gpr_counter_field = ofs;
63966b3b
RH
5938 return record;
5939}
5940
3f620b5f 5941#if TARGET_ABI_OSF
9d30f3c1
JJ
5942/* Helper function for alpha_stdarg_optimize_hook. Skip over casts
5943 and constant additions. */
5944
777b1fbe 5945static gimple
9d30f3c1
JJ
5946va_list_skip_additions (tree lhs)
5947{
777b1fbe 5948 gimple stmt;
9d30f3c1
JJ
5949
5950 for (;;)
5951 {
777b1fbe
JJ
5952 enum tree_code code;
5953
9d30f3c1
JJ
5954 stmt = SSA_NAME_DEF_STMT (lhs);
5955
777b1fbe 5956 if (gimple_code (stmt) == GIMPLE_PHI)
9d30f3c1
JJ
5957 return stmt;
5958
777b1fbe
JJ
5959 if (!is_gimple_assign (stmt)
5960 || gimple_assign_lhs (stmt) != lhs)
5961 return NULL;
9d30f3c1 5962
777b1fbe
JJ
5963 if (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME)
5964 return stmt;
5965 code = gimple_assign_rhs_code (stmt);
5966 if (!CONVERT_EXPR_CODE_P (code)
5967 && ((code != PLUS_EXPR && code != POINTER_PLUS_EXPR)
5968 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST
5969 || !host_integerp (gimple_assign_rhs2 (stmt), 1)))
5970 return stmt;
9d30f3c1 5971
777b1fbe 5972 lhs = gimple_assign_rhs1 (stmt);
9d30f3c1
JJ
5973 }
5974}
5975
5976/* Check if LHS = RHS statement is
5977 LHS = *(ap.__base + ap.__offset + cst)
5978 or
5979 LHS = *(ap.__base
5980 + ((ap.__offset + cst <= 47)
5981 ? ap.__offset + cst - 48 : ap.__offset + cst) + cst2).
5982 If the former, indicate that GPR registers are needed,
5983 if the latter, indicate that FPR registers are needed.
138ae41e
RH
5984
5985 Also look for LHS = (*ptr).field, where ptr is one of the forms
5986 listed above.
5987
9d30f3c1 5988 On alpha, cfun->va_list_gpr_size is used as size of the needed
138ae41e
RH
5989 regs and cfun->va_list_fpr_size is a bitmask, bit 0 set if GPR
5990 registers are needed and bit 1 set if FPR registers are needed.
5991 Return true if va_list references should not be scanned for the
5992 current statement. */
9d30f3c1
JJ
5993
5994static bool
726a989a 5995alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
9d30f3c1 5996{
777b1fbe 5997 tree base, offset, rhs;
9d30f3c1 5998 int offset_arg = 1;
777b1fbe 5999 gimple base_stmt;
9d30f3c1 6000
777b1fbe
JJ
6001 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
6002 != GIMPLE_SINGLE_RHS)
6003 return false;
6004
6005 rhs = gimple_assign_rhs1 (stmt);
138ae41e
RH
6006 while (handled_component_p (rhs))
6007 rhs = TREE_OPERAND (rhs, 0);
70f34814 6008 if (TREE_CODE (rhs) != MEM_REF
9d30f3c1
JJ
6009 || TREE_CODE (TREE_OPERAND (rhs, 0)) != SSA_NAME)
6010 return false;
6011
777b1fbe
JJ
6012 stmt = va_list_skip_additions (TREE_OPERAND (rhs, 0));
6013 if (stmt == NULL
6014 || !is_gimple_assign (stmt)
6015 || gimple_assign_rhs_code (stmt) != POINTER_PLUS_EXPR)
9d30f3c1
JJ
6016 return false;
6017
777b1fbe 6018 base = gimple_assign_rhs1 (stmt);
9d30f3c1 6019 if (TREE_CODE (base) == SSA_NAME)
777b1fbe
JJ
6020 {
6021 base_stmt = va_list_skip_additions (base);
6022 if (base_stmt
6023 && is_gimple_assign (base_stmt)
6024 && gimple_assign_rhs_code (base_stmt) == COMPONENT_REF)
6025 base = gimple_assign_rhs1 (base_stmt);
6026 }
9d30f3c1
JJ
6027
6028 if (TREE_CODE (base) != COMPONENT_REF
6029 || TREE_OPERAND (base, 1) != TYPE_FIELDS (va_list_type_node))
6030 {
777b1fbe 6031 base = gimple_assign_rhs2 (stmt);
9d30f3c1 6032 if (TREE_CODE (base) == SSA_NAME)
777b1fbe
JJ
6033 {
6034 base_stmt = va_list_skip_additions (base);
6035 if (base_stmt
6036 && is_gimple_assign (base_stmt)
6037 && gimple_assign_rhs_code (base_stmt) == COMPONENT_REF)
6038 base = gimple_assign_rhs1 (base_stmt);
6039 }
9d30f3c1
JJ
6040
6041 if (TREE_CODE (base) != COMPONENT_REF
6042 || TREE_OPERAND (base, 1) != TYPE_FIELDS (va_list_type_node))
6043 return false;
6044
6045 offset_arg = 0;
6046 }
6047
6048 base = get_base_address (base);
6049 if (TREE_CODE (base) != VAR_DECL
6cb718e4 6050 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
9d30f3c1
JJ
6051 return false;
6052
777b1fbe 6053 offset = gimple_op (stmt, 1 + offset_arg);
9d30f3c1 6054 if (TREE_CODE (offset) == SSA_NAME)
9d30f3c1 6055 {
777b1fbe 6056 gimple offset_stmt = va_list_skip_additions (offset);
9d30f3c1 6057
777b1fbe
JJ
6058 if (offset_stmt
6059 && gimple_code (offset_stmt) == GIMPLE_PHI)
9d30f3c1 6060 {
777b1fbe
JJ
6061 HOST_WIDE_INT sub;
6062 gimple arg1_stmt, arg2_stmt;
6063 tree arg1, arg2;
6064 enum tree_code code1, code2;
9d30f3c1 6065
777b1fbe 6066 if (gimple_phi_num_args (offset_stmt) != 2)
3f620b5f 6067 goto escapes;
9d30f3c1 6068
777b1fbe
JJ
6069 arg1_stmt
6070 = va_list_skip_additions (gimple_phi_arg_def (offset_stmt, 0));
6071 arg2_stmt
6072 = va_list_skip_additions (gimple_phi_arg_def (offset_stmt, 1));
6073 if (arg1_stmt == NULL
6074 || !is_gimple_assign (arg1_stmt)
6075 || arg2_stmt == NULL
6076 || !is_gimple_assign (arg2_stmt))
6077 goto escapes;
9d30f3c1 6078
777b1fbe
JJ
6079 code1 = gimple_assign_rhs_code (arg1_stmt);
6080 code2 = gimple_assign_rhs_code (arg2_stmt);
6081 if (code1 == COMPONENT_REF
6082 && (code2 == MINUS_EXPR || code2 == PLUS_EXPR))
6083 /* Do nothing. */;
6084 else if (code2 == COMPONENT_REF
6085 && (code1 == MINUS_EXPR || code1 == PLUS_EXPR))
6086 {
6087 gimple tem = arg1_stmt;
6088 code2 = code1;
6089 arg1_stmt = arg2_stmt;
6090 arg2_stmt = tem;
6091 }
6092 else
6093 goto escapes;
3f620b5f 6094
777b1fbe
JJ
6095 if (!host_integerp (gimple_assign_rhs2 (arg2_stmt), 0))
6096 goto escapes;
3f620b5f 6097
777b1fbe
JJ
6098 sub = tree_low_cst (gimple_assign_rhs2 (arg2_stmt), 0);
6099 if (code2 == MINUS_EXPR)
6100 sub = -sub;
6101 if (sub < -48 || sub > -32)
6102 goto escapes;
9d30f3c1 6103
777b1fbe
JJ
6104 arg1 = gimple_assign_rhs1 (arg1_stmt);
6105 arg2 = gimple_assign_rhs1 (arg2_stmt);
6106 if (TREE_CODE (arg2) == SSA_NAME)
6107 {
6108 arg2_stmt = va_list_skip_additions (arg2);
6109 if (arg2_stmt == NULL
6110 || !is_gimple_assign (arg2_stmt)
6111 || gimple_assign_rhs_code (arg2_stmt) != COMPONENT_REF)
6112 goto escapes;
6113 arg2 = gimple_assign_rhs1 (arg2_stmt);
6114 }
6115 if (arg1 != arg2)
6116 goto escapes;
6117
6118 if (TREE_CODE (arg1) != COMPONENT_REF
6119 || TREE_OPERAND (arg1, 1) != va_list_gpr_counter_field
6120 || get_base_address (arg1) != base)
6121 goto escapes;
6122
6123 /* Need floating point regs. */
6124 cfun->va_list_fpr_size |= 2;
6125 return false;
6126 }
6127 if (offset_stmt
6128 && is_gimple_assign (offset_stmt)
6129 && gimple_assign_rhs_code (offset_stmt) == COMPONENT_REF)
6130 offset = gimple_assign_rhs1 (offset_stmt);
6131 }
6132 if (TREE_CODE (offset) != COMPONENT_REF
6133 || TREE_OPERAND (offset, 1) != va_list_gpr_counter_field
6134 || get_base_address (offset) != base)
9d30f3c1
JJ
6135 goto escapes;
6136 else
6137 /* Need general regs. */
6138 cfun->va_list_fpr_size |= 1;
6139 return false;
6140
6141escapes:
6142 si->va_list_escapes = true;
6143 return false;
6144}
3f620b5f 6145#endif
9d30f3c1 6146
35d9c403 6147/* Perform any needed actions needed for a function that is receiving a
f93c2180 6148 variable number of arguments. */
35d9c403 6149
f93c2180 6150static void
bae47977
RH
6151alpha_setup_incoming_varargs (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
6152 tree type, int *pretend_size, int no_rtl)
f93c2180 6153{
bae47977
RH
6154 CUMULATIVE_ARGS cum = *pcum;
6155
6156 /* Skip the current argument. */
bdf057c6 6157 targetm.calls.function_arg_advance (&cum, mode, type, true);
bae47977 6158
f93c2180
RH
6159#if TARGET_ABI_UNICOSMK
6160 /* On Unicos/Mk, the standard subroutine __T3E_MISMATCH stores all register
6161 arguments on the stack. Unfortunately, it doesn't always store the first
6162 one (i.e. the one that arrives in $16 or $f16). This is not a problem
6163 with stdargs as we always have at least one named argument there. */
bae47977 6164 if (cum.num_reg_words < 6)
f93c2180
RH
6165 {
6166 if (!no_rtl)
6167 {
bae47977 6168 emit_insn (gen_umk_mismatch_args (GEN_INT (cum.num_reg_words)));
f93c2180
RH
6169 emit_insn (gen_arg_home_umk ());
6170 }
6171 *pretend_size = 0;
6172 }
6173#elif TARGET_ABI_OPEN_VMS
6174 /* For VMS, we allocate space for all 6 arg registers plus a count.
35d9c403 6175
f93c2180
RH
6176 However, if NO registers need to be saved, don't allocate any space.
6177 This is not only because we won't need the space, but because AP
6178 includes the current_pretend_args_size and we don't want to mess up
6179 any ap-relative addresses already made. */
bae47977 6180 if (cum.num_args < 6)
f93c2180
RH
6181 {
6182 if (!no_rtl)
6183 {
6184 emit_move_insn (gen_rtx_REG (DImode, 1), virtual_incoming_args_rtx);
6185 emit_insn (gen_arg_home ());
6186 }
6187 *pretend_size = 7 * UNITS_PER_WORD;
6188 }
6189#else
6190 /* On OSF/1 and friends, we allocate space for all 12 arg registers, but
6191 only push those that are remaining. However, if NO registers need to
6192 be saved, don't allocate any space. This is not only because we won't
6193 need the space, but because AP includes the current_pretend_args_size
6194 and we don't want to mess up any ap-relative addresses already made.
6195
6196 If we are not to use the floating-point registers, save the integer
6197 registers where we would put the floating-point registers. This is
6198 not the most efficient way to implement varargs with just one register
6199 class, but it isn't worth doing anything more efficient in this rare
6200 case. */
35d9c403
RH
6201 if (cum >= 6)
6202 return;
6203
6204 if (!no_rtl)
6205 {
4862826d
ILT
6206 int count;
6207 alias_set_type set = get_varargs_alias_set ();
35d9c403
RH
6208 rtx tmp;
6209
3f620b5f
RH
6210 count = cfun->va_list_gpr_size / UNITS_PER_WORD;
6211 if (count > 6 - cum)
6212 count = 6 - cum;
35d9c403 6213
3f620b5f
RH
6214 /* Detect whether integer registers or floating-point registers
6215 are needed by the detected va_arg statements. See above for
6216 how these values are computed. Note that the "escape" value
6217 is VA_LIST_MAX_FPR_SIZE, which is 255, which has both of
6218 these bits set. */
6219 gcc_assert ((VA_LIST_MAX_FPR_SIZE & 3) == 3);
6220
6221 if (cfun->va_list_fpr_size & 1)
6222 {
6223 tmp = gen_rtx_MEM (BLKmode,
6224 plus_constant (virtual_incoming_args_rtx,
6225 (cum + 6) * UNITS_PER_WORD));
8476af98 6226 MEM_NOTRAP_P (tmp) = 1;
3f620b5f
RH
6227 set_mem_alias_set (tmp, set);
6228 move_block_from_reg (16 + cum, tmp, count);
6229 }
6230
6231 if (cfun->va_list_fpr_size & 2)
6232 {
6233 tmp = gen_rtx_MEM (BLKmode,
6234 plus_constant (virtual_incoming_args_rtx,
6235 cum * UNITS_PER_WORD));
8476af98 6236 MEM_NOTRAP_P (tmp) = 1;
3f620b5f
RH
6237 set_mem_alias_set (tmp, set);
6238 move_block_from_reg (16 + cum + TARGET_FPREGS*32, tmp, count);
6239 }
6240 }
35d9c403 6241 *pretend_size = 12 * UNITS_PER_WORD;
a5fe455b 6242#endif
f93c2180 6243}
35d9c403 6244
d7bd8aeb 6245static void
a5c24926 6246alpha_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
63966b3b
RH
6247{
6248 HOST_WIDE_INT offset;
6249 tree t, offset_field, base_field;
29587b1c 6250
bdb429a5
RK
6251 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
6252 return;
6253
f7130778 6254 if (TARGET_ABI_UNICOSMK)
e5faf155 6255 std_expand_builtin_va_start (valist, nextarg);
63966b3b 6256
bd5bd7ac 6257 /* For Unix, TARGET_SETUP_INCOMING_VARARGS moves the starting address base
63966b3b
RH
6258 up by 48, storing fp arg registers in the first 48 bytes, and the
6259 integer arg registers in the next 48 bytes. This is only done,
6260 however, if any integer registers need to be stored.
6261
6262 If no integer registers need be stored, then we must subtract 48
6263 in order to account for the integer arg registers which are counted
35d9c403
RH
6264 in argsize above, but which are not actually stored on the stack.
6265 Must further be careful here about structures straddling the last
f676971a 6266 integer argument register; that futzes with pretend_args_size,
35d9c403 6267 which changes the meaning of AP. */
63966b3b 6268
bae47977 6269 if (NUM_ARGS < 6)
f7130778 6270 offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
89cfc2c6 6271 else
38173d38 6272 offset = -6 * UNITS_PER_WORD + crtl->args.pretend_args_size;
63966b3b 6273
f7130778
DR
6274 if (TARGET_ABI_OPEN_VMS)
6275 {
050d3f9d
VF
6276 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6277 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
6278 size_int (offset + NUM_ARGS * UNITS_PER_WORD));
6279 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
f7130778 6280 TREE_SIDE_EFFECTS (t) = 1;
f7130778
DR
6281 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6282 }
6283 else
6284 {
6285 base_field = TYPE_FIELDS (TREE_TYPE (valist));
910ad8de 6286 offset_field = DECL_CHAIN (base_field);
f7130778 6287
47a25a46
RG
6288 base_field = build3 (COMPONENT_REF, TREE_TYPE (base_field),
6289 valist, base_field, NULL_TREE);
6290 offset_field = build3 (COMPONENT_REF, TREE_TYPE (offset_field),
6291 valist, offset_field, NULL_TREE);
f7130778
DR
6292
6293 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
5a4ac684
AP
6294 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
6295 size_int (offset));
726a989a 6296 t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
f7130778
DR
6297 TREE_SIDE_EFFECTS (t) = 1;
6298 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6299
7d60be94 6300 t = build_int_cst (NULL_TREE, NUM_ARGS * UNITS_PER_WORD);
726a989a 6301 t = build2 (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
f7130778
DR
6302 TREE_SIDE_EFFECTS (t) = 1;
6303 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6304 }
63966b3b
RH
6305}
6306
28245018 6307static tree
777b1fbe 6308alpha_gimplify_va_arg_1 (tree type, tree base, tree offset,
726a989a 6309 gimple_seq *pre_p)
28245018 6310{
777b1fbe
JJ
6311 tree type_size, ptr_type, addend, t, addr;
6312 gimple_seq internal_post;
28245018 6313
28245018
RH
6314 /* If the type could not be passed in registers, skip the block
6315 reserved for the registers. */
fe984136 6316 if (targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
28245018 6317 {
7d60be94 6318 t = build_int_cst (TREE_TYPE (offset), 6*8);
726a989a
RB
6319 gimplify_assign (offset,
6320 build2 (MAX_EXPR, TREE_TYPE (offset), offset, t),
6321 pre_p);
28245018
RH
6322 }
6323
6324 addend = offset;
1f063d10 6325 ptr_type = build_pointer_type_for_mode (type, ptr_mode, true);
28245018 6326
08b0dc1b 6327 if (TREE_CODE (type) == COMPLEX_TYPE)
28245018
RH
6328 {
6329 tree real_part, imag_part, real_temp;
6330
65da5a20
RH
6331 real_part = alpha_gimplify_va_arg_1 (TREE_TYPE (type), base,
6332 offset, pre_p);
6333
6334 /* Copy the value into a new temporary, lest the formal temporary
28245018 6335 be reused out from under us. */
65da5a20 6336 real_temp = get_initialized_tmp_var (real_part, pre_p, NULL);
28245018 6337
65da5a20
RH
6338 imag_part = alpha_gimplify_va_arg_1 (TREE_TYPE (type), base,
6339 offset, pre_p);
28245018 6340
47a25a46 6341 return build2 (COMPLEX_EXPR, type, real_temp, imag_part);
28245018
RH
6342 }
6343 else if (TREE_CODE (type) == REAL_TYPE)
6344 {
6345 tree fpaddend, cond, fourtyeight;
6346
7d60be94 6347 fourtyeight = build_int_cst (TREE_TYPE (addend), 6*8);
47a25a46
RG
6348 fpaddend = fold_build2 (MINUS_EXPR, TREE_TYPE (addend),
6349 addend, fourtyeight);
6350 cond = fold_build2 (LT_EXPR, boolean_type_node, addend, fourtyeight);
6351 addend = fold_build3 (COND_EXPR, TREE_TYPE (addend), cond,
6352 fpaddend, addend);
28245018
RH
6353 }
6354
6355 /* Build the final address and force that value into a temporary. */
5a4ac684
AP
6356 addr = build2 (POINTER_PLUS_EXPR, ptr_type, fold_convert (ptr_type, base),
6357 fold_convert (sizetype, addend));
65da5a20
RH
6358 internal_post = NULL;
6359 gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue);
777b1fbe 6360 gimple_seq_add_seq (pre_p, internal_post);
28245018
RH
6361
6362 /* Update the offset field. */
65da5a20
RH
6363 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
6364 if (type_size == NULL || TREE_OVERFLOW (type_size))
6365 t = size_zero_node;
6366 else
6367 {
6368 t = size_binop (PLUS_EXPR, type_size, size_int (7));
6369 t = size_binop (TRUNC_DIV_EXPR, t, size_int (8));
6370 t = size_binop (MULT_EXPR, t, size_int (8));
6371 }
6372 t = fold_convert (TREE_TYPE (offset), t);
726a989a
RB
6373 gimplify_assign (offset, build2 (PLUS_EXPR, TREE_TYPE (offset), offset, t),
6374 pre_p);
28245018 6375
d6e9821f 6376 return build_va_arg_indirect_ref (addr);
28245018
RH
6377}
6378
23a60a04 6379static tree
726a989a
RB
6380alpha_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6381 gimple_seq *post_p)
28245018 6382{
23a60a04 6383 tree offset_field, base_field, offset, base, t, r;
08b0dc1b 6384 bool indirect;
28245018
RH
6385
6386 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
23a60a04 6387 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
28245018
RH
6388
6389 base_field = TYPE_FIELDS (va_list_type_node);
910ad8de 6390 offset_field = DECL_CHAIN (base_field);
47a25a46
RG
6391 base_field = build3 (COMPONENT_REF, TREE_TYPE (base_field),
6392 valist, base_field, NULL_TREE);
6393 offset_field = build3 (COMPONENT_REF, TREE_TYPE (offset_field),
6394 valist, offset_field, NULL_TREE);
28245018 6395
65da5a20
RH
6396 /* Pull the fields of the structure out into temporaries. Since we never
6397 modify the base field, we can use a formal temporary. Sign-extend the
6398 offset field so that it's the proper width for pointer arithmetic. */
6399 base = get_formal_tmp_var (base_field, pre_p);
28245018 6400
65da5a20
RH
6401 t = fold_convert (lang_hooks.types.type_for_size (64, 0), offset_field);
6402 offset = get_initialized_tmp_var (t, pre_p, NULL);
28245018 6403
08b0dc1b
RH
6404 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
6405 if (indirect)
1f063d10 6406 type = build_pointer_type_for_mode (type, ptr_mode, true);
08b0dc1b 6407
28245018
RH
6408 /* Find the value. Note that this will be a stable indirection, or
6409 a composite of stable indirections in the case of complex. */
65da5a20 6410 r = alpha_gimplify_va_arg_1 (type, base, offset, pre_p);
28245018
RH
6411
6412 /* Stuff the offset temporary back into its field. */
777b1fbe 6413 gimplify_assign (unshare_expr (offset_field),
726a989a 6414 fold_convert (TREE_TYPE (offset_field), offset), pre_p);
23a60a04 6415
08b0dc1b 6416 if (indirect)
d6e9821f 6417 r = build_va_arg_indirect_ref (r);
08b0dc1b 6418
23a60a04 6419 return r;
28245018 6420}
a6f12d7c 6421\f
6d8fd7bb
RH
6422/* Builtins. */
6423
6424enum alpha_builtin
6425{
6426 ALPHA_BUILTIN_CMPBGE,
c4b50f1a
RH
6427 ALPHA_BUILTIN_EXTBL,
6428 ALPHA_BUILTIN_EXTWL,
6429 ALPHA_BUILTIN_EXTLL,
6d8fd7bb 6430 ALPHA_BUILTIN_EXTQL,
c4b50f1a
RH
6431 ALPHA_BUILTIN_EXTWH,
6432 ALPHA_BUILTIN_EXTLH,
6d8fd7bb 6433 ALPHA_BUILTIN_EXTQH,
c4b50f1a
RH
6434 ALPHA_BUILTIN_INSBL,
6435 ALPHA_BUILTIN_INSWL,
6436 ALPHA_BUILTIN_INSLL,
6437 ALPHA_BUILTIN_INSQL,
6438 ALPHA_BUILTIN_INSWH,
6439 ALPHA_BUILTIN_INSLH,
6440 ALPHA_BUILTIN_INSQH,
6441 ALPHA_BUILTIN_MSKBL,
6442 ALPHA_BUILTIN_MSKWL,
6443 ALPHA_BUILTIN_MSKLL,
6444 ALPHA_BUILTIN_MSKQL,
6445 ALPHA_BUILTIN_MSKWH,
6446 ALPHA_BUILTIN_MSKLH,
6447 ALPHA_BUILTIN_MSKQH,
6448 ALPHA_BUILTIN_UMULH,
6d8fd7bb
RH
6449 ALPHA_BUILTIN_ZAP,
6450 ALPHA_BUILTIN_ZAPNOT,
6451 ALPHA_BUILTIN_AMASK,
6452 ALPHA_BUILTIN_IMPLVER,
6453 ALPHA_BUILTIN_RPCC,
116b7a5e
RH
6454 ALPHA_BUILTIN_THREAD_POINTER,
6455 ALPHA_BUILTIN_SET_THREAD_POINTER,
221cf9ab
OH
6456 ALPHA_BUILTIN_ESTABLISH_VMS_CONDITION_HANDLER,
6457 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER,
6d8fd7bb
RH
6458
6459 /* TARGET_MAX */
6460 ALPHA_BUILTIN_MINUB8,
6461 ALPHA_BUILTIN_MINSB8,
6462 ALPHA_BUILTIN_MINUW4,
6463 ALPHA_BUILTIN_MINSW4,
6464 ALPHA_BUILTIN_MAXUB8,
6465 ALPHA_BUILTIN_MAXSB8,
6466 ALPHA_BUILTIN_MAXUW4,
6467 ALPHA_BUILTIN_MAXSW4,
6468 ALPHA_BUILTIN_PERR,
6469 ALPHA_BUILTIN_PKLB,
6470 ALPHA_BUILTIN_PKWB,
6471 ALPHA_BUILTIN_UNPKBL,
6472 ALPHA_BUILTIN_UNPKBW,
6473
c4b50f1a
RH
6474 /* TARGET_CIX */
6475 ALPHA_BUILTIN_CTTZ,
6476 ALPHA_BUILTIN_CTLZ,
6477 ALPHA_BUILTIN_CTPOP,
6478
6d8fd7bb
RH
6479 ALPHA_BUILTIN_max
6480};
6481
e3136fa2 6482static enum insn_code const code_for_builtin[ALPHA_BUILTIN_max] = {
c4b50f1a
RH
6483 CODE_FOR_builtin_cmpbge,
6484 CODE_FOR_builtin_extbl,
6485 CODE_FOR_builtin_extwl,
6486 CODE_FOR_builtin_extll,
6487 CODE_FOR_builtin_extql,
6488 CODE_FOR_builtin_extwh,
6489 CODE_FOR_builtin_extlh,
6490 CODE_FOR_builtin_extqh,
6491 CODE_FOR_builtin_insbl,
6492 CODE_FOR_builtin_inswl,
6493 CODE_FOR_builtin_insll,
6494 CODE_FOR_builtin_insql,
6495 CODE_FOR_builtin_inswh,
6496 CODE_FOR_builtin_inslh,
6497 CODE_FOR_builtin_insqh,
6498 CODE_FOR_builtin_mskbl,
6499 CODE_FOR_builtin_mskwl,
6500 CODE_FOR_builtin_mskll,
6501 CODE_FOR_builtin_mskql,
6502 CODE_FOR_builtin_mskwh,
6503 CODE_FOR_builtin_msklh,
6504 CODE_FOR_builtin_mskqh,
6505 CODE_FOR_umuldi3_highpart,
6506 CODE_FOR_builtin_zap,
6507 CODE_FOR_builtin_zapnot,
6508 CODE_FOR_builtin_amask,
6509 CODE_FOR_builtin_implver,
6510 CODE_FOR_builtin_rpcc,
116b7a5e
RH
6511 CODE_FOR_load_tp,
6512 CODE_FOR_set_tp,
221cf9ab
OH
6513 CODE_FOR_builtin_establish_vms_condition_handler,
6514 CODE_FOR_builtin_revert_vms_condition_handler,
c4b50f1a
RH
6515
6516 /* TARGET_MAX */
6517 CODE_FOR_builtin_minub8,
6518 CODE_FOR_builtin_minsb8,
6519 CODE_FOR_builtin_minuw4,
6520 CODE_FOR_builtin_minsw4,
6521 CODE_FOR_builtin_maxub8,
6522 CODE_FOR_builtin_maxsb8,
6523 CODE_FOR_builtin_maxuw4,
6524 CODE_FOR_builtin_maxsw4,
6525 CODE_FOR_builtin_perr,
6526 CODE_FOR_builtin_pklb,
6527 CODE_FOR_builtin_pkwb,
6528 CODE_FOR_builtin_unpkbl,
6529 CODE_FOR_builtin_unpkbw,
6530
6531 /* TARGET_CIX */
36013987
RH
6532 CODE_FOR_ctzdi2,
6533 CODE_FOR_clzdi2,
6534 CODE_FOR_popcountdi2
c4b50f1a
RH
6535};
6536
6d8fd7bb
RH
6537struct alpha_builtin_def
6538{
6539 const char *name;
6540 enum alpha_builtin code;
6541 unsigned int target_mask;
36013987 6542 bool is_const;
6d8fd7bb
RH
6543};
6544
6545static struct alpha_builtin_def const zero_arg_builtins[] = {
36013987
RH
6546 { "__builtin_alpha_implver", ALPHA_BUILTIN_IMPLVER, 0, true },
6547 { "__builtin_alpha_rpcc", ALPHA_BUILTIN_RPCC, 0, false }
6d8fd7bb
RH
6548};
6549
6550static struct alpha_builtin_def const one_arg_builtins[] = {
36013987
RH
6551 { "__builtin_alpha_amask", ALPHA_BUILTIN_AMASK, 0, true },
6552 { "__builtin_alpha_pklb", ALPHA_BUILTIN_PKLB, MASK_MAX, true },
6553 { "__builtin_alpha_pkwb", ALPHA_BUILTIN_PKWB, MASK_MAX, true },
6554 { "__builtin_alpha_unpkbl", ALPHA_BUILTIN_UNPKBL, MASK_MAX, true },
6555 { "__builtin_alpha_unpkbw", ALPHA_BUILTIN_UNPKBW, MASK_MAX, true },
6556 { "__builtin_alpha_cttz", ALPHA_BUILTIN_CTTZ, MASK_CIX, true },
6557 { "__builtin_alpha_ctlz", ALPHA_BUILTIN_CTLZ, MASK_CIX, true },
6558 { "__builtin_alpha_ctpop", ALPHA_BUILTIN_CTPOP, MASK_CIX, true }
6d8fd7bb
RH
6559};
6560
6561static struct alpha_builtin_def const two_arg_builtins[] = {
36013987
RH
6562 { "__builtin_alpha_cmpbge", ALPHA_BUILTIN_CMPBGE, 0, true },
6563 { "__builtin_alpha_extbl", ALPHA_BUILTIN_EXTBL, 0, true },
6564 { "__builtin_alpha_extwl", ALPHA_BUILTIN_EXTWL, 0, true },
6565 { "__builtin_alpha_extll", ALPHA_BUILTIN_EXTLL, 0, true },
6566 { "__builtin_alpha_extql", ALPHA_BUILTIN_EXTQL, 0, true },
6567 { "__builtin_alpha_extwh", ALPHA_BUILTIN_EXTWH, 0, true },
6568 { "__builtin_alpha_extlh", ALPHA_BUILTIN_EXTLH, 0, true },
6569 { "__builtin_alpha_extqh", ALPHA_BUILTIN_EXTQH, 0, true },
6570 { "__builtin_alpha_insbl", ALPHA_BUILTIN_INSBL, 0, true },
6571 { "__builtin_alpha_inswl", ALPHA_BUILTIN_INSWL, 0, true },
6572 { "__builtin_alpha_insll", ALPHA_BUILTIN_INSLL, 0, true },
6573 { "__builtin_alpha_insql", ALPHA_BUILTIN_INSQL, 0, true },
6574 { "__builtin_alpha_inswh", ALPHA_BUILTIN_INSWH, 0, true },
6575 { "__builtin_alpha_inslh", ALPHA_BUILTIN_INSLH, 0, true },
6576 { "__builtin_alpha_insqh", ALPHA_BUILTIN_INSQH, 0, true },
6577 { "__builtin_alpha_mskbl", ALPHA_BUILTIN_MSKBL, 0, true },
6578 { "__builtin_alpha_mskwl", ALPHA_BUILTIN_MSKWL, 0, true },
6579 { "__builtin_alpha_mskll", ALPHA_BUILTIN_MSKLL, 0, true },
6580 { "__builtin_alpha_mskql", ALPHA_BUILTIN_MSKQL, 0, true },
6581 { "__builtin_alpha_mskwh", ALPHA_BUILTIN_MSKWH, 0, true },
6582 { "__builtin_alpha_msklh", ALPHA_BUILTIN_MSKLH, 0, true },
6583 { "__builtin_alpha_mskqh", ALPHA_BUILTIN_MSKQH, 0, true },
6584 { "__builtin_alpha_umulh", ALPHA_BUILTIN_UMULH, 0, true },
6585 { "__builtin_alpha_zap", ALPHA_BUILTIN_ZAP, 0, true },
6586 { "__builtin_alpha_zapnot", ALPHA_BUILTIN_ZAPNOT, 0, true },
6587 { "__builtin_alpha_minub8", ALPHA_BUILTIN_MINUB8, MASK_MAX, true },
6588 { "__builtin_alpha_minsb8", ALPHA_BUILTIN_MINSB8, MASK_MAX, true },
6589 { "__builtin_alpha_minuw4", ALPHA_BUILTIN_MINUW4, MASK_MAX, true },
6590 { "__builtin_alpha_minsw4", ALPHA_BUILTIN_MINSW4, MASK_MAX, true },
6591 { "__builtin_alpha_maxub8", ALPHA_BUILTIN_MAXUB8, MASK_MAX, true },
6592 { "__builtin_alpha_maxsb8", ALPHA_BUILTIN_MAXSB8, MASK_MAX, true },
6593 { "__builtin_alpha_maxuw4", ALPHA_BUILTIN_MAXUW4, MASK_MAX, true },
6594 { "__builtin_alpha_maxsw4", ALPHA_BUILTIN_MAXSW4, MASK_MAX, true },
6595 { "__builtin_alpha_perr", ALPHA_BUILTIN_PERR, MASK_MAX, true }
6d8fd7bb
RH
6596};
6597
36013987
RH
6598static GTY(()) tree alpha_v8qi_u;
6599static GTY(()) tree alpha_v8qi_s;
6600static GTY(()) tree alpha_v4hi_u;
6601static GTY(()) tree alpha_v4hi_s;
6602
fd930388
RH
6603static GTY(()) tree alpha_builtins[(int) ALPHA_BUILTIN_max];
6604
6605/* Return the alpha builtin for CODE. */
6606
6607static tree
6608alpha_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
6609{
6610 if (code >= ALPHA_BUILTIN_max)
6611 return error_mark_node;
6612 return alpha_builtins[code];
6613}
6614
6615/* Helper function of alpha_init_builtins. Add the built-in specified
6616 by NAME, TYPE, CODE, and ECF. */
6617
6618static void
6619alpha_builtin_function (const char *name, tree ftype,
6620 enum alpha_builtin code, unsigned ecf)
6621{
6622 tree decl = add_builtin_function (name, ftype, (int) code,
6623 BUILT_IN_MD, NULL, NULL_TREE);
6624
6625 if (ecf & ECF_CONST)
6626 TREE_READONLY (decl) = 1;
6627 if (ecf & ECF_NOTHROW)
6628 TREE_NOTHROW (decl) = 1;
6629
6630 alpha_builtins [(int) code] = decl;
6631}
6632
b6e46ca1
RS
6633/* Helper function of alpha_init_builtins. Add the COUNT built-in
6634 functions pointed to by P, with function type FTYPE. */
6635
6636static void
6637alpha_add_builtins (const struct alpha_builtin_def *p, size_t count,
6638 tree ftype)
6639{
b6e46ca1
RS
6640 size_t i;
6641
6642 for (i = 0; i < count; ++i, ++p)
6643 if ((target_flags & p->target_mask) == p->target_mask)
fd930388
RH
6644 alpha_builtin_function (p->name, ftype, p->code,
6645 (p->is_const ? ECF_CONST : 0) | ECF_NOTHROW);
b6e46ca1
RS
6646}
6647
6d8fd7bb 6648static void
a5c24926 6649alpha_init_builtins (void)
6d8fd7bb 6650{
f001093a 6651 tree dimode_integer_type_node;
fd930388 6652 tree ftype;
6d8fd7bb 6653
f001093a
EB
6654 dimode_integer_type_node = lang_hooks.types.type_for_mode (DImode, 0);
6655
b0d0a8a7
DR
6656 /* Fwrite on VMS is non-standard. */
6657#if TARGET_ABI_OPEN_VMS
6658 implicit_built_in_decls[(int) BUILT_IN_FWRITE] = NULL_TREE;
6659 implicit_built_in_decls[(int) BUILT_IN_FWRITE_UNLOCKED] = NULL_TREE;
6660#endif
6661
f001093a 6662 ftype = build_function_type (dimode_integer_type_node, void_list_node);
b6e46ca1
RS
6663 alpha_add_builtins (zero_arg_builtins, ARRAY_SIZE (zero_arg_builtins),
6664 ftype);
6d8fd7bb 6665
f001093a
EB
6666 ftype = build_function_type_list (dimode_integer_type_node,
6667 dimode_integer_type_node, NULL_TREE);
b6e46ca1
RS
6668 alpha_add_builtins (one_arg_builtins, ARRAY_SIZE (one_arg_builtins),
6669 ftype);
6d8fd7bb 6670
f001093a
EB
6671 ftype = build_function_type_list (dimode_integer_type_node,
6672 dimode_integer_type_node,
6673 dimode_integer_type_node, NULL_TREE);
b6e46ca1
RS
6674 alpha_add_builtins (two_arg_builtins, ARRAY_SIZE (two_arg_builtins),
6675 ftype);
116b7a5e
RH
6676
6677 ftype = build_function_type (ptr_type_node, void_list_node);
fd930388
RH
6678 alpha_builtin_function ("__builtin_thread_pointer", ftype,
6679 ALPHA_BUILTIN_THREAD_POINTER, ECF_NOTHROW);
116b7a5e 6680
b4de2f7d 6681 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
fd930388
RH
6682 alpha_builtin_function ("__builtin_set_thread_pointer", ftype,
6683 ALPHA_BUILTIN_SET_THREAD_POINTER, ECF_NOTHROW);
36013987 6684
221cf9ab
OH
6685 if (TARGET_ABI_OPEN_VMS)
6686 {
6687 ftype = build_function_type_list (ptr_type_node, ptr_type_node,
6688 NULL_TREE);
fd930388
RH
6689 alpha_builtin_function ("__builtin_establish_vms_condition_handler",
6690 ftype,
6691 ALPHA_BUILTIN_ESTABLISH_VMS_CONDITION_HANDLER,
6692 0);
221cf9ab
OH
6693
6694 ftype = build_function_type_list (ptr_type_node, void_type_node,
6695 NULL_TREE);
fd930388
RH
6696 alpha_builtin_function ("__builtin_revert_vms_condition_handler", ftype,
6697 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER, 0);
221cf9ab
OH
6698 }
6699
36013987
RH
6700 alpha_v8qi_u = build_vector_type (unsigned_intQI_type_node, 8);
6701 alpha_v8qi_s = build_vector_type (intQI_type_node, 8);
6702 alpha_v4hi_u = build_vector_type (unsigned_intHI_type_node, 4);
6703 alpha_v4hi_s = build_vector_type (intHI_type_node, 4);
6d8fd7bb
RH
6704}
6705
6706/* Expand an expression EXP that calls a built-in function,
6707 with result going to TARGET if that's convenient
6708 (and in mode MODE if that's convenient).
6709 SUBTARGET may be used as the target for computing one of EXP's operands.
6710 IGNORE is nonzero if the value is to be ignored. */
6711
6712static rtx
a5c24926
RH
6713alpha_expand_builtin (tree exp, rtx target,
6714 rtx subtarget ATTRIBUTE_UNUSED,
6715 enum machine_mode mode ATTRIBUTE_UNUSED,
6716 int ignore ATTRIBUTE_UNUSED)
6d8fd7bb 6717{
6d8fd7bb
RH
6718#define MAX_ARGS 2
6719
5039610b 6720 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6d8fd7bb 6721 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5039610b
SL
6722 tree arg;
6723 call_expr_arg_iterator iter;
6d8fd7bb
RH
6724 enum insn_code icode;
6725 rtx op[MAX_ARGS], pat;
6726 int arity;
116b7a5e 6727 bool nonvoid;
6d8fd7bb
RH
6728
6729 if (fcode >= ALPHA_BUILTIN_max)
6730 internal_error ("bad builtin fcode");
6731 icode = code_for_builtin[fcode];
6732 if (icode == 0)
6733 internal_error ("bad builtin fcode");
6734
116b7a5e
RH
6735 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6736
5039610b
SL
6737 arity = 0;
6738 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6d8fd7bb
RH
6739 {
6740 const struct insn_operand_data *insn_op;
6741
6d8fd7bb
RH
6742 if (arg == error_mark_node)
6743 return NULL_RTX;
6744 if (arity > MAX_ARGS)
6745 return NULL_RTX;
6746
116b7a5e
RH
6747 insn_op = &insn_data[icode].operand[arity + nonvoid];
6748
bf758008 6749 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
6d8fd7bb 6750
6d8fd7bb
RH
6751 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6752 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
5039610b 6753 arity++;
6d8fd7bb
RH
6754 }
6755
116b7a5e
RH
6756 if (nonvoid)
6757 {
6758 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6759 if (!target
6760 || GET_MODE (target) != tmode
6761 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6762 target = gen_reg_rtx (tmode);
6763 }
6d8fd7bb
RH
6764
6765 switch (arity)
6766 {
6767 case 0:
6768 pat = GEN_FCN (icode) (target);
6769 break;
6770 case 1:
116b7a5e
RH
6771 if (nonvoid)
6772 pat = GEN_FCN (icode) (target, op[0]);
6773 else
6774 pat = GEN_FCN (icode) (op[0]);
6d8fd7bb
RH
6775 break;
6776 case 2:
6777 pat = GEN_FCN (icode) (target, op[0], op[1]);
6778 break;
6779 default:
56daab84 6780 gcc_unreachable ();
6d8fd7bb
RH
6781 }
6782 if (!pat)
6783 return NULL_RTX;
6784 emit_insn (pat);
6785
116b7a5e
RH
6786 if (nonvoid)
6787 return target;
6788 else
6789 return const0_rtx;
6d8fd7bb 6790}
36013987
RH
6791
6792
6793/* Several bits below assume HWI >= 64 bits. This should be enforced
6794 by config.gcc. */
6795#if HOST_BITS_PER_WIDE_INT < 64
6796# error "HOST_WIDE_INT too small"
6797#endif
6798
6799/* Fold the builtin for the CMPBGE instruction. This is a vector comparison
a50aa827 6800 with an 8-bit output vector. OPINT contains the integer operands; bit N
36013987
RH
6801 of OP_CONST is set if OPINT[N] is valid. */
6802
6803static tree
6804alpha_fold_builtin_cmpbge (unsigned HOST_WIDE_INT opint[], long op_const)
6805{
6806 if (op_const == 3)
6807 {
6808 int i, val;
6809 for (i = 0, val = 0; i < 8; ++i)
6810 {
6811 unsigned HOST_WIDE_INT c0 = (opint[0] >> (i * 8)) & 0xff;
6812 unsigned HOST_WIDE_INT c1 = (opint[1] >> (i * 8)) & 0xff;
6813 if (c0 >= c1)
6814 val |= 1 << i;
6815 }
6816 return build_int_cst (long_integer_type_node, val);
6817 }
18410793 6818 else if (op_const == 2 && opint[1] == 0)
36013987
RH
6819 return build_int_cst (long_integer_type_node, 0xff);
6820 return NULL;
6821}
6822
6823/* Fold the builtin for the ZAPNOT instruction. This is essentially a
6824 specialized form of an AND operation. Other byte manipulation instructions
6825 are defined in terms of this instruction, so this is also used as a
6826 subroutine for other builtins.
6827
6828 OP contains the tree operands; OPINT contains the extracted integer values.
6829 Bit N of OP_CONST it set if OPINT[N] is valid. OP may be null if only
6830 OPINT may be considered. */
6831
6832static tree
6833alpha_fold_builtin_zapnot (tree *op, unsigned HOST_WIDE_INT opint[],
6834 long op_const)
6835{
6836 if (op_const & 2)
6837 {
6838 unsigned HOST_WIDE_INT mask = 0;
6839 int i;
6840
6841 for (i = 0; i < 8; ++i)
6842 if ((opint[1] >> i) & 1)
6843 mask |= (unsigned HOST_WIDE_INT)0xff << (i * 8);
6844
6845 if (op_const & 1)
6846 return build_int_cst (long_integer_type_node, opint[0] & mask);
6847
6848 if (op)
31ff73b5
KG
6849 return fold_build2 (BIT_AND_EXPR, long_integer_type_node, op[0],
6850 build_int_cst (long_integer_type_node, mask));
36013987
RH
6851 }
6852 else if ((op_const & 1) && opint[0] == 0)
6853 return build_int_cst (long_integer_type_node, 0);
6854 return NULL;
6855}
6856
6857/* Fold the builtins for the EXT family of instructions. */
6858
6859static tree
6860alpha_fold_builtin_extxx (tree op[], unsigned HOST_WIDE_INT opint[],
6861 long op_const, unsigned HOST_WIDE_INT bytemask,
6862 bool is_high)
6863{
6864 long zap_const = 2;
6865 tree *zap_op = NULL;
6866
6867 if (op_const & 2)
6868 {
6869 unsigned HOST_WIDE_INT loc;
6870
6871 loc = opint[1] & 7;
6872 if (BYTES_BIG_ENDIAN)
6873 loc ^= 7;
6874 loc *= 8;
6875
6876 if (loc != 0)
6877 {
6878 if (op_const & 1)
6879 {
6880 unsigned HOST_WIDE_INT temp = opint[0];
6881 if (is_high)
6882 temp <<= loc;
6883 else
6884 temp >>= loc;
6885 opint[0] = temp;
6886 zap_const = 3;
6887 }
6888 }
6889 else
6890 zap_op = op;
6891 }
6892
6893 opint[1] = bytemask;
6894 return alpha_fold_builtin_zapnot (zap_op, opint, zap_const);
6895}
6896
6897/* Fold the builtins for the INS family of instructions. */
6898
6899static tree
6900alpha_fold_builtin_insxx (tree op[], unsigned HOST_WIDE_INT opint[],
6901 long op_const, unsigned HOST_WIDE_INT bytemask,
6902 bool is_high)
6903{
6904 if ((op_const & 1) && opint[0] == 0)
6905 return build_int_cst (long_integer_type_node, 0);
6906
6907 if (op_const & 2)
6908 {
6909 unsigned HOST_WIDE_INT temp, loc, byteloc;
6910 tree *zap_op = NULL;
6911
6912 loc = opint[1] & 7;
6913 if (BYTES_BIG_ENDIAN)
6914 loc ^= 7;
6915 bytemask <<= loc;
6916
6917 temp = opint[0];
6918 if (is_high)
6919 {
6920 byteloc = (64 - (loc * 8)) & 0x3f;
6921 if (byteloc == 0)
6922 zap_op = op;
6923 else
6924 temp >>= byteloc;
6925 bytemask >>= 8;
6926 }
6927 else
6928 {
6929 byteloc = loc * 8;
6930 if (byteloc == 0)
6931 zap_op = op;
6932 else
6933 temp <<= byteloc;
6934 }
6935
6936 opint[0] = temp;
6937 opint[1] = bytemask;
6938 return alpha_fold_builtin_zapnot (zap_op, opint, op_const);
6939 }
6940
6941 return NULL;
6942}
6943
6944static tree
6945alpha_fold_builtin_mskxx (tree op[], unsigned HOST_WIDE_INT opint[],
6946 long op_const, unsigned HOST_WIDE_INT bytemask,
6947 bool is_high)
6948{
6949 if (op_const & 2)
6950 {
6951 unsigned HOST_WIDE_INT loc;
6952
6953 loc = opint[1] & 7;
6954 if (BYTES_BIG_ENDIAN)
6955 loc ^= 7;
6956 bytemask <<= loc;
6957
6958 if (is_high)
6959 bytemask >>= 8;
6960
6961 opint[1] = bytemask ^ 0xff;
6962 }
6963
6964 return alpha_fold_builtin_zapnot (op, opint, op_const);
6965}
6966
6967static tree
6968alpha_fold_builtin_umulh (unsigned HOST_WIDE_INT opint[], long op_const)
6969{
6970 switch (op_const)
6971 {
6972 case 3:
6973 {
6974 unsigned HOST_WIDE_INT l;
6975 HOST_WIDE_INT h;
6976
6977 mul_double (opint[0], 0, opint[1], 0, &l, &h);
6978
6979#if HOST_BITS_PER_WIDE_INT > 64
6980# error fixme
6981#endif
6982
6983 return build_int_cst (long_integer_type_node, h);
6984 }
6985
6986 case 1:
6987 opint[1] = opint[0];
6988 /* FALLTHRU */
6989 case 2:
6990 /* Note that (X*1) >> 64 == 0. */
6991 if (opint[1] == 0 || opint[1] == 1)
6992 return build_int_cst (long_integer_type_node, 0);
6993 break;
6994 }
6995 return NULL;
6996}
6997
6998static tree
6999alpha_fold_vector_minmax (enum tree_code code, tree op[], tree vtype)
7000{
7001 tree op0 = fold_convert (vtype, op[0]);
7002 tree op1 = fold_convert (vtype, op[1]);
31ff73b5 7003 tree val = fold_build2 (code, vtype, op0, op1);
ec46190f 7004 return fold_build1 (VIEW_CONVERT_EXPR, long_integer_type_node, val);
36013987
RH
7005}
7006
7007static tree
7008alpha_fold_builtin_perr (unsigned HOST_WIDE_INT opint[], long op_const)
7009{
7010 unsigned HOST_WIDE_INT temp = 0;
7011 int i;
7012
7013 if (op_const != 3)
7014 return NULL;
7015
7016 for (i = 0; i < 8; ++i)
7017 {
7018 unsigned HOST_WIDE_INT a = (opint[0] >> (i * 8)) & 0xff;
7019 unsigned HOST_WIDE_INT b = (opint[1] >> (i * 8)) & 0xff;
7020 if (a >= b)
7021 temp += a - b;
7022 else
7023 temp += b - a;
7024 }
7025
7026 return build_int_cst (long_integer_type_node, temp);
7027}
7028
7029static tree
7030alpha_fold_builtin_pklb (unsigned HOST_WIDE_INT opint[], long op_const)
7031{
7032 unsigned HOST_WIDE_INT temp;
7033
7034 if (op_const == 0)
7035 return NULL;
7036
7037 temp = opint[0] & 0xff;
7038 temp |= (opint[0] >> 24) & 0xff00;
7039
7040 return build_int_cst (long_integer_type_node, temp);
7041}
7042
7043static tree
7044alpha_fold_builtin_pkwb (unsigned HOST_WIDE_INT opint[], long op_const)
7045{
7046 unsigned HOST_WIDE_INT temp;
7047
7048 if (op_const == 0)
7049 return NULL;
7050
7051 temp = opint[0] & 0xff;
7052 temp |= (opint[0] >> 8) & 0xff00;
7053 temp |= (opint[0] >> 16) & 0xff0000;
7054 temp |= (opint[0] >> 24) & 0xff000000;
7055
7056 return build_int_cst (long_integer_type_node, temp);
7057}
7058
7059static tree
7060alpha_fold_builtin_unpkbl (unsigned HOST_WIDE_INT opint[], long op_const)
7061{
7062 unsigned HOST_WIDE_INT temp;
7063
7064 if (op_const == 0)
7065 return NULL;
7066
7067 temp = opint[0] & 0xff;
7068 temp |= (opint[0] & 0xff00) << 24;
7069
7070 return build_int_cst (long_integer_type_node, temp);
7071}
7072
7073static tree
7074alpha_fold_builtin_unpkbw (unsigned HOST_WIDE_INT opint[], long op_const)
7075{
7076 unsigned HOST_WIDE_INT temp;
7077
7078 if (op_const == 0)
7079 return NULL;
7080
7081 temp = opint[0] & 0xff;
7082 temp |= (opint[0] & 0x0000ff00) << 8;
7083 temp |= (opint[0] & 0x00ff0000) << 16;
7084 temp |= (opint[0] & 0xff000000) << 24;
7085
7086 return build_int_cst (long_integer_type_node, temp);
7087}
7088
7089static tree
7090alpha_fold_builtin_cttz (unsigned HOST_WIDE_INT opint[], long op_const)
7091{
7092 unsigned HOST_WIDE_INT temp;
7093
7094 if (op_const == 0)
7095 return NULL;
7096
7097 if (opint[0] == 0)
7098 temp = 64;
7099 else
7100 temp = exact_log2 (opint[0] & -opint[0]);
7101
7102 return build_int_cst (long_integer_type_node, temp);
7103}
7104
7105static tree
7106alpha_fold_builtin_ctlz (unsigned HOST_WIDE_INT opint[], long op_const)
7107{
7108 unsigned HOST_WIDE_INT temp;
7109
7110 if (op_const == 0)
7111 return NULL;
7112
7113 if (opint[0] == 0)
7114 temp = 64;
7115 else
7116 temp = 64 - floor_log2 (opint[0]) - 1;
7117
7118 return build_int_cst (long_integer_type_node, temp);
7119}
7120
7121static tree
7122alpha_fold_builtin_ctpop (unsigned HOST_WIDE_INT opint[], long op_const)
7123{
7124 unsigned HOST_WIDE_INT temp, op;
7125
7126 if (op_const == 0)
7127 return NULL;
7128
7129 op = opint[0];
7130 temp = 0;
7131 while (op)
7132 temp++, op &= op - 1;
7133
7134 return build_int_cst (long_integer_type_node, temp);
7135}
7136
7137/* Fold one of our builtin functions. */
7138
7139static tree
f311c3b4
NF
7140alpha_fold_builtin (tree fndecl, int n_args, tree *op,
7141 bool ignore ATTRIBUTE_UNUSED)
36013987 7142{
36013987 7143 unsigned HOST_WIDE_INT opint[MAX_ARGS];
58a11859 7144 long op_const = 0;
f311c3b4 7145 int i;
36013987 7146
f311c3b4
NF
7147 if (n_args >= MAX_ARGS)
7148 return NULL;
7149
7150 for (i = 0; i < n_args; i++)
36013987 7151 {
f311c3b4 7152 tree arg = op[i];
36013987
RH
7153 if (arg == error_mark_node)
7154 return NULL;
36013987 7155
f311c3b4 7156 opint[i] = 0;
36013987
RH
7157 if (TREE_CODE (arg) == INTEGER_CST)
7158 {
f311c3b4
NF
7159 op_const |= 1L << i;
7160 opint[i] = int_cst_value (arg);
36013987
RH
7161 }
7162 }
7163
7164 switch (DECL_FUNCTION_CODE (fndecl))
7165 {
7166 case ALPHA_BUILTIN_CMPBGE:
7167 return alpha_fold_builtin_cmpbge (opint, op_const);
7168
7169 case ALPHA_BUILTIN_EXTBL:
7170 return alpha_fold_builtin_extxx (op, opint, op_const, 0x01, false);
7171 case ALPHA_BUILTIN_EXTWL:
7172 return alpha_fold_builtin_extxx (op, opint, op_const, 0x03, false);
7173 case ALPHA_BUILTIN_EXTLL:
7174 return alpha_fold_builtin_extxx (op, opint, op_const, 0x0f, false);
7175 case ALPHA_BUILTIN_EXTQL:
7176 return alpha_fold_builtin_extxx (op, opint, op_const, 0xff, false);
7177 case ALPHA_BUILTIN_EXTWH:
7178 return alpha_fold_builtin_extxx (op, opint, op_const, 0x03, true);
7179 case ALPHA_BUILTIN_EXTLH:
7180 return alpha_fold_builtin_extxx (op, opint, op_const, 0x0f, true);
7181 case ALPHA_BUILTIN_EXTQH:
7182 return alpha_fold_builtin_extxx (op, opint, op_const, 0xff, true);
7183
7184 case ALPHA_BUILTIN_INSBL:
7185 return alpha_fold_builtin_insxx (op, opint, op_const, 0x01, false);
7186 case ALPHA_BUILTIN_INSWL:
7187 return alpha_fold_builtin_insxx (op, opint, op_const, 0x03, false);
7188 case ALPHA_BUILTIN_INSLL:
7189 return alpha_fold_builtin_insxx (op, opint, op_const, 0x0f, false);
7190 case ALPHA_BUILTIN_INSQL:
7191 return alpha_fold_builtin_insxx (op, opint, op_const, 0xff, false);
7192 case ALPHA_BUILTIN_INSWH:
7193 return alpha_fold_builtin_insxx (op, opint, op_const, 0x03, true);
7194 case ALPHA_BUILTIN_INSLH:
7195 return alpha_fold_builtin_insxx (op, opint, op_const, 0x0f, true);
7196 case ALPHA_BUILTIN_INSQH:
7197 return alpha_fold_builtin_insxx (op, opint, op_const, 0xff, true);
7198
7199 case ALPHA_BUILTIN_MSKBL:
7200 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x01, false);
7201 case ALPHA_BUILTIN_MSKWL:
7202 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x03, false);
7203 case ALPHA_BUILTIN_MSKLL:
7204 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x0f, false);
7205 case ALPHA_BUILTIN_MSKQL:
7206 return alpha_fold_builtin_mskxx (op, opint, op_const, 0xff, false);
7207 case ALPHA_BUILTIN_MSKWH:
7208 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x03, true);
7209 case ALPHA_BUILTIN_MSKLH:
7210 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x0f, true);
7211 case ALPHA_BUILTIN_MSKQH:
7212 return alpha_fold_builtin_mskxx (op, opint, op_const, 0xff, true);
7213
7214 case ALPHA_BUILTIN_UMULH:
7215 return alpha_fold_builtin_umulh (opint, op_const);
7216
7217 case ALPHA_BUILTIN_ZAP:
7218 opint[1] ^= 0xff;
7219 /* FALLTHRU */
7220 case ALPHA_BUILTIN_ZAPNOT:
7221 return alpha_fold_builtin_zapnot (op, opint, op_const);
7222
7223 case ALPHA_BUILTIN_MINUB8:
7224 return alpha_fold_vector_minmax (MIN_EXPR, op, alpha_v8qi_u);
7225 case ALPHA_BUILTIN_MINSB8:
7226 return alpha_fold_vector_minmax (MIN_EXPR, op, alpha_v8qi_s);
7227 case ALPHA_BUILTIN_MINUW4:
7228 return alpha_fold_vector_minmax (MIN_EXPR, op, alpha_v4hi_u);
7229 case ALPHA_BUILTIN_MINSW4:
7230 return alpha_fold_vector_minmax (MIN_EXPR, op, alpha_v4hi_s);
7231 case ALPHA_BUILTIN_MAXUB8:
7232 return alpha_fold_vector_minmax (MAX_EXPR, op, alpha_v8qi_u);
7233 case ALPHA_BUILTIN_MAXSB8:
7234 return alpha_fold_vector_minmax (MAX_EXPR, op, alpha_v8qi_s);
7235 case ALPHA_BUILTIN_MAXUW4:
7236 return alpha_fold_vector_minmax (MAX_EXPR, op, alpha_v4hi_u);
7237 case ALPHA_BUILTIN_MAXSW4:
7238 return alpha_fold_vector_minmax (MAX_EXPR, op, alpha_v4hi_s);
7239
7240 case ALPHA_BUILTIN_PERR:
7241 return alpha_fold_builtin_perr (opint, op_const);
7242 case ALPHA_BUILTIN_PKLB:
7243 return alpha_fold_builtin_pklb (opint, op_const);
7244 case ALPHA_BUILTIN_PKWB:
7245 return alpha_fold_builtin_pkwb (opint, op_const);
7246 case ALPHA_BUILTIN_UNPKBL:
7247 return alpha_fold_builtin_unpkbl (opint, op_const);
7248 case ALPHA_BUILTIN_UNPKBW:
7249 return alpha_fold_builtin_unpkbw (opint, op_const);
7250
7251 case ALPHA_BUILTIN_CTTZ:
7252 return alpha_fold_builtin_cttz (opint, op_const);
7253 case ALPHA_BUILTIN_CTLZ:
7254 return alpha_fold_builtin_ctlz (opint, op_const);
7255 case ALPHA_BUILTIN_CTPOP:
7256 return alpha_fold_builtin_ctpop (opint, op_const);
7257
7258 case ALPHA_BUILTIN_AMASK:
7259 case ALPHA_BUILTIN_IMPLVER:
7260 case ALPHA_BUILTIN_RPCC:
7261 case ALPHA_BUILTIN_THREAD_POINTER:
7262 case ALPHA_BUILTIN_SET_THREAD_POINTER:
7263 /* None of these are foldable at compile-time. */
7264 default:
7265 return NULL;
7266 }
7267}
6d8fd7bb 7268\f
a6f12d7c
RK
7269/* This page contains routines that are used to determine what the function
7270 prologue and epilogue code will do and write them out. */
7271
7272/* Compute the size of the save area in the stack. */
7273
89cfc2c6
RK
7274/* These variables are used for communication between the following functions.
7275 They indicate various things about the current function being compiled
7276 that are used to tell what kind of prologue, epilogue and procedure
839a4992 7277 descriptor to generate. */
89cfc2c6
RK
7278
7279/* Nonzero if we need a stack procedure. */
c2ea1ac6
DR
7280enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
7281static enum alpha_procedure_types alpha_procedure_type;
89cfc2c6
RK
7282
7283/* Register number (either FP or SP) that is used to unwind the frame. */
9c0e94a5 7284static int vms_unwind_regno;
89cfc2c6
RK
7285
7286/* Register number used to save FP. We need not have one for RA since
7287 we don't modify it for register procedures. This is only defined
7288 for register frame procedures. */
9c0e94a5 7289static int vms_save_fp_regno;
89cfc2c6
RK
7290
7291/* Register number used to reference objects off our PV. */
9c0e94a5 7292static int vms_base_regno;
89cfc2c6 7293
acd92049 7294/* Compute register masks for saved registers. */
89cfc2c6
RK
7295
7296static void
a5c24926 7297alpha_sa_mask (unsigned long *imaskP, unsigned long *fmaskP)
89cfc2c6
RK
7298{
7299 unsigned long imask = 0;
7300 unsigned long fmask = 0;
1eb356b9 7301 unsigned int i;
89cfc2c6 7302
dd292d0a
MM
7303 /* When outputting a thunk, we don't have valid register life info,
7304 but assemble_start_function wants to output .frame and .mask
7305 directives. */
3c072c6b 7306 if (cfun->is_thunk)
acd92049 7307 {
14691f8d
RH
7308 *imaskP = 0;
7309 *fmaskP = 0;
7310 return;
7311 }
89cfc2c6 7312
c2ea1ac6 7313 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
409f52d3 7314 imask |= (1UL << HARD_FRAME_POINTER_REGNUM);
89cfc2c6 7315
14691f8d
RH
7316 /* One for every register we have to save. */
7317 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7318 if (! fixed_regs[i] && ! call_used_regs[i]
6fb5fa3c 7319 && df_regs_ever_live_p (i) && i != REG_RA
14691f8d
RH
7320 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
7321 {
7322 if (i < 32)
409f52d3 7323 imask |= (1UL << i);
14691f8d 7324 else
409f52d3 7325 fmask |= (1UL << (i - 32));
14691f8d
RH
7326 }
7327
7328 /* We need to restore these for the handler. */
e3b5732b 7329 if (crtl->calls_eh_return)
ed80cd68
RH
7330 {
7331 for (i = 0; ; ++i)
7332 {
7333 unsigned regno = EH_RETURN_DATA_REGNO (i);
7334 if (regno == INVALID_REGNUM)
7335 break;
7336 imask |= 1UL << regno;
7337 }
ed80cd68 7338 }
f676971a 7339
14691f8d
RH
7340 /* If any register spilled, then spill the return address also. */
7341 /* ??? This is required by the Digital stack unwind specification
7342 and isn't needed if we're doing Dwarf2 unwinding. */
7343 if (imask || fmask || alpha_ra_ever_killed ())
409f52d3 7344 imask |= (1UL << REG_RA);
9c0e94a5 7345
89cfc2c6
RK
7346 *imaskP = imask;
7347 *fmaskP = fmask;
89cfc2c6
RK
7348}
7349
7350int
a5c24926 7351alpha_sa_size (void)
89cfc2c6 7352{
61334ebe 7353 unsigned long mask[2];
89cfc2c6 7354 int sa_size = 0;
61334ebe 7355 int i, j;
89cfc2c6 7356
61334ebe
RH
7357 alpha_sa_mask (&mask[0], &mask[1]);
7358
7359 if (TARGET_ABI_UNICOSMK)
7360 {
7361 if (mask[0] || mask[1])
7362 sa_size = 14;
7363 }
acd92049 7364 else
acd92049 7365 {
61334ebe
RH
7366 for (j = 0; j < 2; ++j)
7367 for (i = 0; i < 32; ++i)
7368 if ((mask[j] >> i) & 1)
7369 sa_size++;
acd92049 7370 }
89cfc2c6 7371
30102605
RH
7372 if (TARGET_ABI_UNICOSMK)
7373 {
7374 /* We might not need to generate a frame if we don't make any calls
7375 (including calls to __T3E_MISMATCH if this is a vararg function),
7376 don't have any local variables which require stack slots, don't
7377 use alloca and have not determined that we need a frame for other
7378 reasons. */
7379
c2ea1ac6
DR
7380 alpha_procedure_type
7381 = (sa_size || get_frame_size() != 0
38173d38 7382 || crtl->outgoing_args_size
e3b5732b 7383 || cfun->stdarg || cfun->calls_alloca
c2ea1ac6
DR
7384 || frame_pointer_needed)
7385 ? PT_STACK : PT_REGISTER;
30102605
RH
7386
7387 /* Always reserve space for saving callee-saved registers if we
7388 need a frame as required by the calling convention. */
c2ea1ac6 7389 if (alpha_procedure_type == PT_STACK)
30102605
RH
7390 sa_size = 14;
7391 }
7392 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5 7393 {
1d3499d8
OH
7394 /* Start with a stack procedure if we make any calls (REG_RA used), or
7395 need a frame pointer, with a register procedure if we otherwise need
7396 at least a slot, and with a null procedure in other cases. */
7397 if ((mask[0] >> REG_RA) & 1 || frame_pointer_needed)
c2ea1ac6
DR
7398 alpha_procedure_type = PT_STACK;
7399 else if (get_frame_size() != 0)
7400 alpha_procedure_type = PT_REGISTER;
7401 else
7402 alpha_procedure_type = PT_NULL;
61334ebe 7403
cb9a8e97 7404 /* Don't reserve space for saving FP & RA yet. Do that later after we've
61334ebe 7405 made the final decision on stack procedure vs register procedure. */
c2ea1ac6 7406 if (alpha_procedure_type == PT_STACK)
cb9a8e97 7407 sa_size -= 2;
9c0e94a5
RH
7408
7409 /* Decide whether to refer to objects off our PV via FP or PV.
7410 If we need FP for something else or if we receive a nonlocal
7411 goto (which expects PV to contain the value), we must use PV.
7412 Otherwise, start by assuming we can use FP. */
c2ea1ac6
DR
7413
7414 vms_base_regno
7415 = (frame_pointer_needed
e3b5732b 7416 || cfun->has_nonlocal_label
c2ea1ac6 7417 || alpha_procedure_type == PT_STACK
38173d38 7418 || crtl->outgoing_args_size)
c2ea1ac6 7419 ? REG_PV : HARD_FRAME_POINTER_REGNUM;
9c0e94a5
RH
7420
7421 /* If we want to copy PV into FP, we need to find some register
7422 in which to save FP. */
7423
7424 vms_save_fp_regno = -1;
7425 if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
7426 for (i = 0; i < 32; i++)
6fb5fa3c 7427 if (! fixed_regs[i] && call_used_regs[i] && ! df_regs_ever_live_p (i))
9c0e94a5
RH
7428 vms_save_fp_regno = i;
7429
221cf9ab
OH
7430 /* A VMS condition handler requires a stack procedure in our
7431 implementation. (not required by the calling standard). */
7432 if ((vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
7433 || cfun->machine->uses_condition_handler)
c2ea1ac6
DR
7434 vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
7435 else if (alpha_procedure_type == PT_NULL)
7436 vms_base_regno = REG_PV;
9c0e94a5
RH
7437
7438 /* Stack unwinding should be done via FP unless we use it for PV. */
7439 vms_unwind_regno = (vms_base_regno == REG_PV
7440 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
7441
221cf9ab
OH
7442 /* If this is a stack procedure, allow space for saving FP, RA and
7443 a condition handler slot if needed. */
c2ea1ac6 7444 if (alpha_procedure_type == PT_STACK)
221cf9ab 7445 sa_size += 2 + cfun->machine->uses_condition_handler;
9c0e94a5
RH
7446 }
7447 else
7448 {
9c0e94a5
RH
7449 /* Our size must be even (multiple of 16 bytes). */
7450 if (sa_size & 1)
7451 sa_size++;
7452 }
89cfc2c6
RK
7453
7454 return sa_size * 8;
7455}
7456
35d9c403
RH
7457/* Define the offset between two registers, one to be eliminated,
7458 and the other its replacement, at the start of a routine. */
7459
7460HOST_WIDE_INT
a5c24926
RH
7461alpha_initial_elimination_offset (unsigned int from,
7462 unsigned int to ATTRIBUTE_UNUSED)
35d9c403
RH
7463{
7464 HOST_WIDE_INT ret;
7465
7466 ret = alpha_sa_size ();
38173d38 7467 ret += ALPHA_ROUND (crtl->outgoing_args_size);
35d9c403 7468
56daab84
NS
7469 switch (from)
7470 {
7471 case FRAME_POINTER_REGNUM:
7472 break;
7473
7474 case ARG_POINTER_REGNUM:
7475 ret += (ALPHA_ROUND (get_frame_size ()
38173d38
JH
7476 + crtl->args.pretend_args_size)
7477 - crtl->args.pretend_args_size);
56daab84
NS
7478 break;
7479
7480 default:
7481 gcc_unreachable ();
7482 }
35d9c403
RH
7483
7484 return ret;
7485}
7486
1d3499d8
OH
7487#if TARGET_ABI_OPEN_VMS
7488
7b5cbb57
AS
7489/* Worker function for TARGET_CAN_ELIMINATE. */
7490
7491static bool
7492alpha_vms_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
89cfc2c6 7493{
1d3499d8 7494 /* We need the alpha_procedure_type to decide. Evaluate it now. */
89cfc2c6 7495 alpha_sa_size ();
1d3499d8
OH
7496
7497 switch (alpha_procedure_type)
7498 {
7499 case PT_NULL:
7500 /* NULL procedures have no frame of their own and we only
7501 know how to resolve from the current stack pointer. */
7502 return to == STACK_POINTER_REGNUM;
7503
7504 case PT_REGISTER:
7505 case PT_STACK:
7506 /* We always eliminate except to the stack pointer if there is no
7507 usable frame pointer at hand. */
7508 return (to != STACK_POINTER_REGNUM
7509 || vms_unwind_regno != HARD_FRAME_POINTER_REGNUM);
7510 }
7511
7512 gcc_unreachable ();
89cfc2c6
RK
7513}
7514
1d3499d8
OH
7515/* FROM is to be eliminated for TO. Return the offset so that TO+offset
7516 designates the same location as FROM. */
7517
7518HOST_WIDE_INT
7519alpha_vms_initial_elimination_offset (unsigned int from, unsigned int to)
7520{
7521 /* The only possible attempts we ever expect are ARG or FRAME_PTR to
7522 HARD_FRAME or STACK_PTR. We need the alpha_procedure_type to decide
7523 on the proper computations and will need the register save area size
7524 in most cases. */
7525
7526 HOST_WIDE_INT sa_size = alpha_sa_size ();
7527
7528 /* PT_NULL procedures have no frame of their own and we only allow
7529 elimination to the stack pointer. This is the argument pointer and we
7530 resolve the soft frame pointer to that as well. */
7531
7532 if (alpha_procedure_type == PT_NULL)
7533 return 0;
7534
7535 /* For a PT_STACK procedure the frame layout looks as follows
7536
7537 -----> decreasing addresses
7538
7539 < size rounded up to 16 | likewise >
7540 --------------#------------------------------+++--------------+++-------#
7541 incoming args # pretended args | "frame" | regs sa | PV | outgoing args #
7542 --------------#---------------------------------------------------------#
7543 ^ ^ ^ ^
7544 ARG_PTR FRAME_PTR HARD_FRAME_PTR STACK_PTR
7545
7546
7547 PT_REGISTER procedures are similar in that they may have a frame of their
7548 own. They have no regs-sa/pv/outgoing-args area.
7549
7550 We first compute offset to HARD_FRAME_PTR, then add what we need to get
7551 to STACK_PTR if need be. */
7552
7553 {
7554 HOST_WIDE_INT offset;
7555 HOST_WIDE_INT pv_save_size = alpha_procedure_type == PT_STACK ? 8 : 0;
7556
7557 switch (from)
7558 {
7559 case FRAME_POINTER_REGNUM:
7560 offset = ALPHA_ROUND (sa_size + pv_save_size);
7561 break;
7562 case ARG_POINTER_REGNUM:
7563 offset = (ALPHA_ROUND (sa_size + pv_save_size
7564 + get_frame_size ()
7565 + crtl->args.pretend_args_size)
7566 - crtl->args.pretend_args_size);
7567 break;
7568 default:
7569 gcc_unreachable ();
7570 }
7571
7572 if (to == STACK_POINTER_REGNUM)
7573 offset += ALPHA_ROUND (crtl->outgoing_args_size);
7574
7575 return offset;
7576 }
89cfc2c6
RK
7577}
7578
18fd5621
EB
7579#define COMMON_OBJECT "common_object"
7580
7581static tree
7582common_object_handler (tree *node, tree name ATTRIBUTE_UNUSED,
7583 tree args ATTRIBUTE_UNUSED, int flags ATTRIBUTE_UNUSED,
7584 bool *no_add_attrs ATTRIBUTE_UNUSED)
7585{
7586 tree decl = *node;
7587 gcc_assert (DECL_P (decl));
7588
7589 DECL_COMMON (decl) = 1;
7590 return NULL_TREE;
7591}
8289c43b 7592
6bc7bc14 7593static const struct attribute_spec vms_attribute_table[] =
a6f12d7c 7594{
62d784f7
KT
7595 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7596 affects_type_identity } */
7597 { COMMON_OBJECT, 0, 1, true, false, false, common_object_handler, false },
7598 { NULL, 0, 0, false, false, false, NULL, false }
91d231cb 7599};
a6f12d7c 7600
18fd5621
EB
7601void
7602vms_output_aligned_decl_common(FILE *file, tree decl, const char *name,
7603 unsigned HOST_WIDE_INT size,
7604 unsigned int align)
7605{
7606 tree attr = DECL_ATTRIBUTES (decl);
7607 fprintf (file, "%s", COMMON_ASM_OP);
7608 assemble_name (file, name);
7609 fprintf (file, "," HOST_WIDE_INT_PRINT_UNSIGNED, size);
7610 /* ??? Unlike on OSF/1, the alignment factor is not in log units. */
7611 fprintf (file, ",%u", align / BITS_PER_UNIT);
7612 if (attr)
7613 {
7614 attr = lookup_attribute (COMMON_OBJECT, attr);
7615 if (attr)
7616 fprintf (file, ",%s",
7617 IDENTIFIER_POINTER (TREE_VALUE (TREE_VALUE (attr))));
7618 }
7619 fputc ('\n', file);
7620}
7621
7622#undef COMMON_OBJECT
7623
8289c43b
NB
7624#endif
7625
1eb356b9 7626static int
a5c24926 7627find_lo_sum_using_gp (rtx *px, void *data ATTRIBUTE_UNUSED)
1eb356b9 7628{
77480b0b
RH
7629 return GET_CODE (*px) == LO_SUM && XEXP (*px, 0) == pic_offset_table_rtx;
7630}
7631
7632int
a5c24926 7633alpha_find_lo_sum_using_gp (rtx insn)
77480b0b
RH
7634{
7635 return for_each_rtx (&PATTERN (insn), find_lo_sum_using_gp, NULL) > 0;
1eb356b9
RH
7636}
7637
9c0e94a5 7638static int
a5c24926 7639alpha_does_function_need_gp (void)
9c0e94a5
RH
7640{
7641 rtx insn;
a6f12d7c 7642
30102605
RH
7643 /* The GP being variable is an OSF abi thing. */
7644 if (! TARGET_ABI_OSF)
9c0e94a5 7645 return 0;
a6f12d7c 7646
b64de1fe 7647 /* We need the gp to load the address of __mcount. */
e3b5732b 7648 if (TARGET_PROFILING_NEEDS_GP && crtl->profile)
9c0e94a5 7649 return 1;
d60a05a1 7650
b64de1fe 7651 /* The code emitted by alpha_output_mi_thunk_osf uses the gp. */
3c072c6b 7652 if (cfun->is_thunk)
acd92049 7653 return 1;
acd92049 7654
b64de1fe
RH
7655 /* The nonlocal receiver pattern assumes that the gp is valid for
7656 the nested function. Reasonable because it's almost always set
7657 correctly already. For the cases where that's wrong, make sure
7658 the nested function loads its gp on entry. */
e3b5732b 7659 if (crtl->has_nonlocal_goto)
b64de1fe
RH
7660 return 1;
7661
f676971a 7662 /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
9c0e94a5
RH
7663 Even if we are a static function, we still need to do this in case
7664 our address is taken and passed to something like qsort. */
a6f12d7c 7665
9c0e94a5
RH
7666 push_topmost_sequence ();
7667 insn = get_insns ();
7668 pop_topmost_sequence ();
89cfc2c6 7669
9c0e94a5 7670 for (; insn; insn = NEXT_INSN (insn))
14e58be0 7671 if (NONDEBUG_INSN_P (insn)
807bdfb6 7672 && ! JUMP_TABLE_DATA_P (insn)
9c0e94a5 7673 && GET_CODE (PATTERN (insn)) != USE
77480b0b
RH
7674 && GET_CODE (PATTERN (insn)) != CLOBBER
7675 && get_attr_usegp (insn))
7676 return 1;
a6f12d7c 7677
9c0e94a5 7678 return 0;
a6f12d7c
RK
7679}
7680
ec6840c1 7681\f
6abc6f40
RH
7682/* Helper function to set RTX_FRAME_RELATED_P on instructions, including
7683 sequences. */
7684
7685static rtx
a5c24926 7686set_frame_related_p (void)
6abc6f40 7687{
2f937369
DM
7688 rtx seq = get_insns ();
7689 rtx insn;
7690
6abc6f40
RH
7691 end_sequence ();
7692
2f937369
DM
7693 if (!seq)
7694 return NULL_RTX;
7695
7696 if (INSN_P (seq))
6abc6f40 7697 {
2f937369
DM
7698 insn = seq;
7699 while (insn != NULL_RTX)
7700 {
7701 RTX_FRAME_RELATED_P (insn) = 1;
7702 insn = NEXT_INSN (insn);
7703 }
7704 seq = emit_insn (seq);
6abc6f40
RH
7705 }
7706 else
7707 {
7708 seq = emit_insn (seq);
7709 RTX_FRAME_RELATED_P (seq) = 1;
6abc6f40 7710 }
2f937369 7711 return seq;
6abc6f40
RH
7712}
7713
7714#define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
7715
45f413e4 7716/* Generates a store with the proper unwind info attached. VALUE is
0e40b5f2 7717 stored at BASE_REG+BASE_OFS. If FRAME_BIAS is nonzero, then BASE_REG
45f413e4
RH
7718 contains SP+FRAME_BIAS, and that is the unwind info that should be
7719 generated. If FRAME_REG != VALUE, then VALUE is being stored on
7720 behalf of FRAME_REG, and FRAME_REG should be present in the unwind. */
7721
7722static void
7723emit_frame_store_1 (rtx value, rtx base_reg, HOST_WIDE_INT frame_bias,
7724 HOST_WIDE_INT base_ofs, rtx frame_reg)
7725{
7726 rtx addr, mem, insn;
7727
7728 addr = plus_constant (base_reg, base_ofs);
7729 mem = gen_rtx_MEM (DImode, addr);
7730 set_mem_alias_set (mem, alpha_sr_alias_set);
7731
7732 insn = emit_move_insn (mem, value);
7733 RTX_FRAME_RELATED_P (insn) = 1;
7734
7735 if (frame_bias || value != frame_reg)
7736 {
7737 if (frame_bias)
7738 {
7739 addr = plus_constant (stack_pointer_rtx, frame_bias + base_ofs);
7740 mem = gen_rtx_MEM (DImode, addr);
7741 }
7742
bf758008
UB
7743 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
7744 gen_rtx_SET (VOIDmode, mem, frame_reg));
45f413e4
RH
7745 }
7746}
7747
7748static void
7749emit_frame_store (unsigned int regno, rtx base_reg,
7750 HOST_WIDE_INT frame_bias, HOST_WIDE_INT base_ofs)
7751{
7752 rtx reg = gen_rtx_REG (DImode, regno);
7753 emit_frame_store_1 (reg, base_reg, frame_bias, base_ofs, reg);
7754}
7755
d3c12306
EB
7756/* Compute the frame size. SIZE is the size of the "naked" frame
7757 and SA_SIZE is the size of the register save area. */
7758
7759static HOST_WIDE_INT
7760compute_frame_size (HOST_WIDE_INT size, HOST_WIDE_INT sa_size)
7761{
7762 if (TARGET_ABI_OPEN_VMS)
7763 return ALPHA_ROUND (sa_size
7764 + (alpha_procedure_type == PT_STACK ? 8 : 0)
7765 + size
7766 + crtl->args.pretend_args_size);
7767 else if (TARGET_ABI_UNICOSMK)
7768 /* We have to allocate space for the DSIB if we generate a frame. */
7769 return ALPHA_ROUND (sa_size
7770 + (alpha_procedure_type == PT_STACK ? 48 : 0))
7771 + ALPHA_ROUND (size
7772 + crtl->outgoing_args_size);
7773 else
7774 return ALPHA_ROUND (crtl->outgoing_args_size)
7775 + sa_size
7776 + ALPHA_ROUND (size
7777 + crtl->args.pretend_args_size);
7778}
7779
a6f12d7c
RK
7780/* Write function prologue. */
7781
89cfc2c6
RK
7782/* On vms we have two kinds of functions:
7783
7784 - stack frame (PROC_STACK)
7785 these are 'normal' functions with local vars and which are
7786 calling other functions
7787 - register frame (PROC_REGISTER)
7788 keeps all data in registers, needs no stack
7789
7790 We must pass this to the assembler so it can generate the
7791 proper pdsc (procedure descriptor)
7792 This is done with the '.pdesc' command.
7793
9c0e94a5
RH
7794 On not-vms, we don't really differentiate between the two, as we can
7795 simply allocate stack without saving registers. */
89cfc2c6
RK
7796
7797void
a5c24926 7798alpha_expand_prologue (void)
89cfc2c6 7799{
9c0e94a5 7800 /* Registers to save. */
89cfc2c6
RK
7801 unsigned long imask = 0;
7802 unsigned long fmask = 0;
7803 /* Stack space needed for pushing registers clobbered by us. */
7804 HOST_WIDE_INT sa_size;
7805 /* Complete stack size needed. */
7806 HOST_WIDE_INT frame_size;
10937190
EB
7807 /* Probed stack size; it additionally includes the size of
7808 the "reserve region" if any. */
7809 HOST_WIDE_INT probed_size;
89cfc2c6 7810 /* Offset from base reg to register save area. */
9c0e94a5 7811 HOST_WIDE_INT reg_offset;
45f413e4 7812 rtx sa_reg;
89cfc2c6
RK
7813 int i;
7814
7815 sa_size = alpha_sa_size ();
d3c12306 7816 frame_size = compute_frame_size (get_frame_size (), sa_size);
89cfc2c6 7817
d3c12306
EB
7818 if (flag_stack_usage)
7819 current_function_static_stack_size = frame_size;
89cfc2c6 7820
be7b80f4 7821 if (TARGET_ABI_OPEN_VMS)
221cf9ab 7822 reg_offset = 8 + 8 * cfun->machine->uses_condition_handler;
9c0e94a5 7823 else
38173d38 7824 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size);
89cfc2c6 7825
9c0e94a5 7826 alpha_sa_mask (&imask, &fmask);
89cfc2c6 7827
941cc05a 7828 /* Emit an insn to reload GP, if needed. */
be7b80f4 7829 if (TARGET_ABI_OSF)
941cc05a
RK
7830 {
7831 alpha_function_needs_gp = alpha_does_function_need_gp ();
7832 if (alpha_function_needs_gp)
7833 emit_insn (gen_prologue_ldgp ());
7834 }
7835
4f1c5cce
RH
7836 /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
7837 the call to mcount ourselves, rather than having the linker do it
7838 magically in response to -pg. Since _mcount has special linkage,
7839 don't represent the call as a call. */
e3b5732b 7840 if (TARGET_PROFILING_NEEDS_GP && crtl->profile)
4f1c5cce 7841 emit_insn (gen_prologue_mcount ());
30102605
RH
7842
7843 if (TARGET_ABI_UNICOSMK)
7844 unicosmk_gen_dsib (&imask);
7845
89cfc2c6
RK
7846 /* Adjust the stack by the frame size. If the frame size is > 4096
7847 bytes, we need to be sure we probe somewhere in the first and last
7848 4096 bytes (we can probably get away without the latter test) and
7849 every 8192 bytes in between. If the frame size is > 32768, we
7850 do this in a loop. Otherwise, we generate the explicit probe
f676971a 7851 instructions.
89cfc2c6
RK
7852
7853 Note that we are only allowed to adjust sp once in the prologue. */
7854
10937190
EB
7855 probed_size = frame_size;
7856 if (flag_stack_check)
7857 probed_size += STACK_CHECK_PROTECT;
7858
7859 if (probed_size <= 32768)
89cfc2c6 7860 {
10937190 7861 if (probed_size > 4096)
89cfc2c6 7862 {
11eef578 7863 int probed;
89cfc2c6 7864
10937190 7865 for (probed = 4096; probed < probed_size; probed += 8192)
30102605
RH
7866 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
7867 ? -probed + 64
7868 : -probed)));
89cfc2c6 7869
10937190
EB
7870 /* We only have to do this probe if we aren't saving registers or
7871 if we are probing beyond the frame because of -fstack-check. */
7872 if ((sa_size == 0 && probed_size > probed - 4096)
7873 || flag_stack_check)
7874 emit_insn (gen_probe_stack (GEN_INT (-probed_size)));
89cfc2c6
RK
7875 }
7876
7877 if (frame_size != 0)
8207e7c6 7878 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
30102605
RH
7879 GEN_INT (TARGET_ABI_UNICOSMK
7880 ? -frame_size + 64
7881 : -frame_size))));
89cfc2c6
RK
7882 }
7883 else
7884 {
9c0e94a5 7885 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
89cfc2c6
RK
7886 number of 8192 byte blocks to probe. We then probe each block
7887 in the loop and then set SP to the proper location. If the
7888 amount remaining is > 4096, we have to do one more probe if we
10937190
EB
7889 are not saving any registers or if we are probing beyond the
7890 frame because of -fstack-check. */
89cfc2c6 7891
10937190
EB
7892 HOST_WIDE_INT blocks = (probed_size + 4096) / 8192;
7893 HOST_WIDE_INT leftover = probed_size + 4096 - blocks * 8192;
9c0e94a5
RH
7894 rtx ptr = gen_rtx_REG (DImode, 22);
7895 rtx count = gen_rtx_REG (DImode, 23);
37679e06 7896 rtx seq;
89cfc2c6 7897
9c0e94a5 7898 emit_move_insn (count, GEN_INT (blocks));
30102605
RH
7899 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
7900 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
89cfc2c6 7901
9c0e94a5
RH
7902 /* Because of the difficulty in emitting a new basic block this
7903 late in the compilation, generate the loop as a single insn. */
7904 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
89cfc2c6 7905
10937190 7906 if ((leftover > 4096 && sa_size == 0) || flag_stack_check)
9c0e94a5
RH
7907 {
7908 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
7909 MEM_VOLATILE_P (last) = 1;
7910 emit_move_insn (last, const0_rtx);
7911 }
89cfc2c6 7912
10937190 7913 if (TARGET_ABI_WINDOWS_NT || flag_stack_check)
f9d7e5cd
RH
7914 {
7915 /* For NT stack unwind (done by 'reverse execution'), it's
7916 not OK to take the result of a loop, even though the value
7917 is already in ptr, so we reload it via a single operation
f676971a 7918 and subtract it to sp.
37679e06 7919
10937190
EB
7920 Same if -fstack-check is specified, because the probed stack
7921 size is not equal to the frame size.
7922
37679e06 7923 Yes, that's correct -- we have to reload the whole constant
5c9948f4 7924 into a temporary via ldah+lda then subtract from sp. */
f9d7e5cd
RH
7925
7926 HOST_WIDE_INT lo, hi;
14eecd34
RH
7927 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7928 hi = frame_size - lo;
6abc6f40 7929
37679e06 7930 emit_move_insn (ptr, GEN_INT (hi));
5c9948f4 7931 emit_insn (gen_adddi3 (ptr, ptr, GEN_INT (lo)));
37679e06
RH
7932 seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
7933 ptr));
f9d7e5cd
RH
7934 }
7935 else
7936 {
f9d7e5cd
RH
7937 seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
7938 GEN_INT (-leftover)));
f9d7e5cd 7939 }
37679e06
RH
7940
7941 /* This alternative is special, because the DWARF code cannot
7942 possibly intuit through the loop above. So we invent this
7943 note it looks at instead. */
7944 RTX_FRAME_RELATED_P (seq) = 1;
bf758008
UB
7945 add_reg_note (seq, REG_FRAME_RELATED_EXPR,
7946 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7947 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7948 GEN_INT (TARGET_ABI_UNICOSMK
7949 ? -frame_size + 64
7950 : -frame_size))));
89cfc2c6
RK
7951 }
7952
30102605 7953 if (!TARGET_ABI_UNICOSMK)
89cfc2c6 7954 {
45f413e4
RH
7955 HOST_WIDE_INT sa_bias = 0;
7956
30102605
RH
7957 /* Cope with very large offsets to the register save area. */
7958 sa_reg = stack_pointer_rtx;
7959 if (reg_offset + sa_size > 0x8000)
7960 {
7961 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
45f413e4 7962 rtx sa_bias_rtx;
89cfc2c6 7963
30102605 7964 if (low + sa_size <= 0x8000)
45f413e4 7965 sa_bias = reg_offset - low, reg_offset = low;
f676971a 7966 else
45f413e4 7967 sa_bias = reg_offset, reg_offset = 0;
89cfc2c6 7968
30102605 7969 sa_reg = gen_rtx_REG (DImode, 24);
45f413e4
RH
7970 sa_bias_rtx = GEN_INT (sa_bias);
7971
7972 if (add_operand (sa_bias_rtx, DImode))
7973 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_bias_rtx));
7974 else
7975 {
7976 emit_move_insn (sa_reg, sa_bias_rtx);
7977 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_reg));
7978 }
30102605 7979 }
f676971a 7980
30102605 7981 /* Save regs in stack order. Beginning with VMS PV. */
c2ea1ac6 7982 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
45f413e4 7983 emit_frame_store (REG_PV, stack_pointer_rtx, 0, 0);
89cfc2c6 7984
30102605 7985 /* Save register RA next. */
409f52d3 7986 if (imask & (1UL << REG_RA))
30102605 7987 {
45f413e4 7988 emit_frame_store (REG_RA, sa_reg, sa_bias, reg_offset);
409f52d3 7989 imask &= ~(1UL << REG_RA);
30102605
RH
7990 reg_offset += 8;
7991 }
89cfc2c6 7992
30102605 7993 /* Now save any other registers required to be saved. */
ed80cd68 7994 for (i = 0; i < 31; i++)
409f52d3 7995 if (imask & (1UL << i))
30102605 7996 {
45f413e4 7997 emit_frame_store (i, sa_reg, sa_bias, reg_offset);
30102605
RH
7998 reg_offset += 8;
7999 }
89cfc2c6 8000
ed80cd68 8001 for (i = 0; i < 31; i++)
409f52d3 8002 if (fmask & (1UL << i))
30102605 8003 {
45f413e4 8004 emit_frame_store (i+32, sa_reg, sa_bias, reg_offset);
30102605
RH
8005 reg_offset += 8;
8006 }
8007 }
c2ea1ac6 8008 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
8009 {
8010 /* The standard frame on the T3E includes space for saving registers.
8011 We just have to use it. We don't have to save the return address and
8012 the old frame pointer here - they are saved in the DSIB. */
8013
8014 reg_offset = -56;
8015 for (i = 9; i < 15; i++)
409f52d3 8016 if (imask & (1UL << i))
30102605 8017 {
45f413e4 8018 emit_frame_store (i, hard_frame_pointer_rtx, 0, reg_offset);
30102605
RH
8019 reg_offset -= 8;
8020 }
8021 for (i = 2; i < 10; i++)
409f52d3 8022 if (fmask & (1UL << i))
30102605 8023 {
45f413e4 8024 emit_frame_store (i+32, hard_frame_pointer_rtx, 0, reg_offset);
30102605
RH
8025 reg_offset -= 8;
8026 }
8027 }
89cfc2c6 8028
be7b80f4 8029 if (TARGET_ABI_OPEN_VMS)
89cfc2c6 8030 {
15cb981a 8031 /* Register frame procedures save the fp. */
c2ea1ac6 8032 if (alpha_procedure_type == PT_REGISTER)
15cb981a
RH
8033 {
8034 rtx insn = emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
8035 hard_frame_pointer_rtx);
8036 add_reg_note (insn, REG_CFA_REGISTER, NULL);
8037 RTX_FRAME_RELATED_P (insn) = 1;
8038 }
89cfc2c6 8039
c2ea1ac6 8040 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
54aaa4ea
RH
8041 emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
8042 gen_rtx_REG (DImode, REG_PV)));
89cfc2c6 8043
c2ea1ac6
DR
8044 if (alpha_procedure_type != PT_NULL
8045 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
8207e7c6 8046 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 8047
9c0e94a5 8048 /* If we have to allocate space for outgoing args, do it now. */
38173d38 8049 if (crtl->outgoing_args_size != 0)
c1238896
OH
8050 {
8051 rtx seq
f676971a 8052 = emit_move_insn (stack_pointer_rtx,
c1238896
OH
8053 plus_constant
8054 (hard_frame_pointer_rtx,
8055 - (ALPHA_ROUND
38173d38 8056 (crtl->outgoing_args_size))));
f676971a 8057
c1238896
OH
8058 /* Only set FRAME_RELATED_P on the stack adjustment we just emitted
8059 if ! frame_pointer_needed. Setting the bit will change the CFA
8060 computation rule to use sp again, which would be wrong if we had
8061 frame_pointer_needed, as this means sp might move unpredictably
8062 later on.
8063
8064 Also, note that
8065 frame_pointer_needed
8066 => vms_unwind_regno == HARD_FRAME_POINTER_REGNUM
8067 and
38173d38 8068 crtl->outgoing_args_size != 0
c1238896
OH
8069 => alpha_procedure_type != PT_NULL,
8070
8071 so when we are not setting the bit here, we are guaranteed to
093354e0 8072 have emitted an FRP frame pointer update just before. */
c1238896
OH
8073 RTX_FRAME_RELATED_P (seq) = ! frame_pointer_needed;
8074 }
9c0e94a5 8075 }
30102605 8076 else if (!TARGET_ABI_UNICOSMK)
9c0e94a5
RH
8077 {
8078 /* If we need a frame pointer, set it from the stack pointer. */
8079 if (frame_pointer_needed)
8080 {
8081 if (TARGET_CAN_FAULT_IN_PROLOGUE)
6abc6f40 8082 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 8083 else
8207e7c6
RK
8084 /* This must always be the last instruction in the
8085 prologue, thus we emit a special move + clobber. */
6abc6f40
RH
8086 FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
8087 stack_pointer_rtx, sa_reg)));
89cfc2c6 8088 }
89cfc2c6
RK
8089 }
8090
9c0e94a5
RH
8091 /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
8092 the prologue, for exception handling reasons, we cannot do this for
8093 any insn that might fault. We could prevent this for mems with a
8094 (clobber:BLK (scratch)), but this doesn't work for fp insns. So we
8095 have to prevent all such scheduling with a blockage.
89cfc2c6 8096
f676971a 8097 Linux, on the other hand, never bothered to implement OSF/1's
9c0e94a5
RH
8098 exception handling, and so doesn't care about such things. Anyone
8099 planning to use dwarf2 frame-unwind info can also omit the blockage. */
89cfc2c6 8100
9c0e94a5
RH
8101 if (! TARGET_CAN_FAULT_IN_PROLOGUE)
8102 emit_insn (gen_blockage ());
ef86d2ee
WL
8103}
8104
3e487b21 8105/* Count the number of .file directives, so that .loc is up to date. */
93a27b7b 8106int num_source_filenames = 0;
3e487b21 8107
acd92049 8108/* Output the textual info surrounding the prologue. */
89cfc2c6 8109
9c0e94a5 8110void
a5c24926
RH
8111alpha_start_function (FILE *file, const char *fnname,
8112 tree decl ATTRIBUTE_UNUSED)
9ecc37f0 8113{
9c0e94a5
RH
8114 unsigned long imask = 0;
8115 unsigned long fmask = 0;
8116 /* Stack space needed for pushing registers clobbered by us. */
8117 HOST_WIDE_INT sa_size;
8118 /* Complete stack size needed. */
3ee10665 8119 unsigned HOST_WIDE_INT frame_size;
b598cb38
RS
8120 /* The maximum debuggable frame size (512 Kbytes using Tru64 as). */
8121 unsigned HOST_WIDE_INT max_frame_size = TARGET_ABI_OSF && !TARGET_GAS
8122 ? 524288
8123 : 1UL << 31;
9c0e94a5
RH
8124 /* Offset from base reg to register save area. */
8125 HOST_WIDE_INT reg_offset;
acd92049 8126 char *entry_label = (char *) alloca (strlen (fnname) + 6);
fe2786f5 8127 char *tramp_label = (char *) alloca (strlen (fnname) + 6);
9c0e94a5 8128 int i;
9ecc37f0 8129
30102605
RH
8130 /* Don't emit an extern directive for functions defined in the same file. */
8131 if (TARGET_ABI_UNICOSMK)
8132 {
8133 tree name_tree;
8134 name_tree = get_identifier (fnname);
8135 TREE_ASM_WRITTEN (name_tree) = 1;
8136 }
8137
5ea8f977
DR
8138#if TARGET_ABI_OPEN_VMS
8139 if (vms_debug_main
8140 && strncmp (vms_debug_main, fnname, strlen (vms_debug_main)) == 0)
8141 {
8142 targetm.asm_out.globalize_label (asm_out_file, VMS_DEBUG_MAIN_POINTER);
8143 ASM_OUTPUT_DEF (asm_out_file, VMS_DEBUG_MAIN_POINTER, fnname);
8144 switch_to_section (text_section);
8145 vms_debug_main = NULL;
8146 }
8147#endif
8148
941cc05a 8149 alpha_fnname = fnname;
9c0e94a5 8150 sa_size = alpha_sa_size ();
d3c12306 8151 frame_size = compute_frame_size (get_frame_size (), sa_size);
9ecc37f0 8152
be7b80f4 8153 if (TARGET_ABI_OPEN_VMS)
221cf9ab 8154 reg_offset = 8 + 8 * cfun->machine->uses_condition_handler;
9c0e94a5 8155 else
38173d38 8156 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size);
9ecc37f0 8157
9c0e94a5 8158 alpha_sa_mask (&imask, &fmask);
a6f12d7c 8159
d60a05a1 8160 /* Ecoff can handle multiple .file directives, so put out file and lineno.
48f6bfac
RK
8161 We have to do that before the .ent directive as we cannot switch
8162 files within procedures with native ecoff because line numbers are
8163 linked to procedure descriptors.
8164 Outputting the lineno helps debugging of one line functions as they
8165 would otherwise get no line number at all. Please note that we would
ddd5a7c1 8166 like to put out last_linenum from final.c, but it is not accessible. */
48f6bfac
RK
8167
8168 if (write_symbols == SDB_DEBUG)
8169 {
30102605 8170#ifdef ASM_OUTPUT_SOURCE_FILENAME
f31686a3
RH
8171 ASM_OUTPUT_SOURCE_FILENAME (file,
8172 DECL_SOURCE_FILE (current_function_decl));
30102605 8173#endif
3e487b21 8174#ifdef SDB_OUTPUT_SOURCE_LINE
48f6bfac 8175 if (debug_info_level != DINFO_LEVEL_TERSE)
3e487b21
ZW
8176 SDB_OUTPUT_SOURCE_LINE (file,
8177 DECL_SOURCE_LINE (current_function_decl));
30102605 8178#endif
48f6bfac
RK
8179 }
8180
9c0e94a5 8181 /* Issue function start and label. */
30102605
RH
8182 if (TARGET_ABI_OPEN_VMS
8183 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
33d01c33 8184 {
9c0e94a5 8185 fputs ("\t.ent ", file);
acd92049 8186 assemble_name (file, fnname);
9c0e94a5 8187 putc ('\n', file);
941cc05a
RK
8188
8189 /* If the function needs GP, we'll write the "..ng" label there.
8190 Otherwise, do it here. */
14691f8d
RH
8191 if (TARGET_ABI_OSF
8192 && ! alpha_function_needs_gp
3c072c6b 8193 && ! cfun->is_thunk)
941cc05a
RK
8194 {
8195 putc ('$', file);
8196 assemble_name (file, fnname);
8197 fputs ("..ng:\n", file);
8198 }
33d01c33 8199 }
fe2786f5
DR
8200 /* Nested functions on VMS that are potentially called via trampoline
8201 get a special transfer entry point that loads the called functions
8202 procedure descriptor and static chain. */
8203 if (TARGET_ABI_OPEN_VMS
8204 && !TREE_PUBLIC (decl)
8205 && DECL_CONTEXT (decl)
8206 && !TYPE_P (DECL_CONTEXT (decl)))
8207 {
8208 strcpy (tramp_label, fnname);
8209 strcat (tramp_label, "..tr");
8210 ASM_OUTPUT_LABEL (file, tramp_label);
8211 fprintf (file, "\tldq $1,24($27)\n");
8212 fprintf (file, "\tldq $27,16($27)\n");
8213 }
48f6bfac 8214
acd92049 8215 strcpy (entry_label, fnname);
be7b80f4 8216 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 8217 strcat (entry_label, "..en");
30102605
RH
8218
8219 /* For public functions, the label must be globalized by appending an
8220 additional colon. */
8221 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
8222 strcat (entry_label, ":");
8223
9c0e94a5
RH
8224 ASM_OUTPUT_LABEL (file, entry_label);
8225 inside_function = TRUE;
48f6bfac 8226
be7b80f4 8227 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 8228 fprintf (file, "\t.base $%d\n", vms_base_regno);
a6f12d7c 8229
30102605 8230 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
9c0e94a5 8231 && !flag_inhibit_size_directive)
9973f4a2 8232 {
9c0e94a5
RH
8233 /* Set flags in procedure descriptor to request IEEE-conformant
8234 math-library routines. The value we set it to is PDSC_EXC_IEEE
285a5742 8235 (/usr/include/pdsc.h). */
9c0e94a5 8236 fputs ("\t.eflag 48\n", file);
9973f4a2 8237 }
a6f12d7c 8238
9c0e94a5 8239 /* Set up offsets to alpha virtual arg/local debugging pointer. */
38173d38 8240 alpha_auto_offset = -frame_size + crtl->args.pretend_args_size;
9c0e94a5 8241 alpha_arg_offset = -frame_size + 48;
c97e3db7 8242
9c0e94a5
RH
8243 /* Describe our frame. If the frame size is larger than an integer,
8244 print it as zero to avoid an assembler error. We won't be
8245 properly describing such a frame, but that's the best we can do. */
30102605
RH
8246 if (TARGET_ABI_UNICOSMK)
8247 ;
8248 else if (TARGET_ABI_OPEN_VMS)
4a0a75dd
KG
8249 fprintf (file, "\t.frame $%d," HOST_WIDE_INT_PRINT_DEC ",$26,"
8250 HOST_WIDE_INT_PRINT_DEC "\n",
8251 vms_unwind_regno,
8252 frame_size >= (1UL << 31) ? 0 : frame_size,
8253 reg_offset);
9c0e94a5 8254 else if (!flag_inhibit_size_directive)
4a0a75dd
KG
8255 fprintf (file, "\t.frame $%d," HOST_WIDE_INT_PRINT_DEC ",$26,%d\n",
8256 (frame_pointer_needed
8257 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM),
b598cb38 8258 frame_size >= max_frame_size ? 0 : frame_size,
38173d38 8259 crtl->args.pretend_args_size);
0d24ff5d 8260
9c0e94a5 8261 /* Describe which registers were spilled. */
30102605
RH
8262 if (TARGET_ABI_UNICOSMK)
8263 ;
8264 else if (TARGET_ABI_OPEN_VMS)
0d24ff5d 8265 {
9c0e94a5 8266 if (imask)
30102605 8267 /* ??? Does VMS care if mask contains ra? The old code didn't
9c0e94a5 8268 set it, so I don't here. */
409f52d3 8269 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1UL << REG_RA));
9c0e94a5 8270 if (fmask)
3c303f52 8271 fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
c2ea1ac6 8272 if (alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
8273 fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
8274 }
8275 else if (!flag_inhibit_size_directive)
8276 {
8277 if (imask)
0d24ff5d 8278 {
4a0a75dd 8279 fprintf (file, "\t.mask 0x%lx," HOST_WIDE_INT_PRINT_DEC "\n", imask,
b598cb38 8280 frame_size >= max_frame_size ? 0 : reg_offset - frame_size);
9c0e94a5
RH
8281
8282 for (i = 0; i < 32; ++i)
409f52d3 8283 if (imask & (1UL << i))
9c0e94a5 8284 reg_offset += 8;
0d24ff5d 8285 }
9c0e94a5
RH
8286
8287 if (fmask)
4a0a75dd 8288 fprintf (file, "\t.fmask 0x%lx," HOST_WIDE_INT_PRINT_DEC "\n", fmask,
b598cb38 8289 frame_size >= max_frame_size ? 0 : reg_offset - frame_size);
a6f12d7c
RK
8290 }
8291
be7b80f4 8292#if TARGET_ABI_OPEN_VMS
221cf9ab
OH
8293 /* If a user condition handler has been installed at some point, emit
8294 the procedure descriptor bits to point the Condition Handling Facility
8295 at the indirection wrapper, and state the fp offset at which the user
8296 handler may be found. */
8297 if (cfun->machine->uses_condition_handler)
8298 {
8299 fprintf (file, "\t.handler __gcc_shell_handler\n");
8300 fprintf (file, "\t.handler_data %d\n", VMS_COND_HANDLER_FP_OFFSET);
8301 }
8302
d48bc59a 8303 /* Ifdef'ed cause link_section are only available then. */
d6b5193b 8304 switch_to_section (readonly_data_section);
9c0e94a5 8305 fprintf (file, "\t.align 3\n");
acd92049 8306 assemble_name (file, fnname); fputs ("..na:\n", file);
9c0e94a5 8307 fputs ("\t.ascii \"", file);
acd92049 8308 assemble_name (file, fnname);
9c0e94a5 8309 fputs ("\\0\"\n", file);
acd92049 8310 alpha_need_linkage (fnname, 1);
d6b5193b 8311 switch_to_section (text_section);
9c0e94a5
RH
8312#endif
8313}
a6f12d7c 8314
9c0e94a5 8315/* Emit the .prologue note at the scheduled end of the prologue. */
0f33506c 8316
b4c25db2 8317static void
a5c24926 8318alpha_output_function_end_prologue (FILE *file)
9c0e94a5 8319{
30102605
RH
8320 if (TARGET_ABI_UNICOSMK)
8321 ;
8322 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5 8323 fputs ("\t.prologue\n", file);
be7b80f4 8324 else if (TARGET_ABI_WINDOWS_NT)
9c0e94a5
RH
8325 fputs ("\t.prologue 0\n", file);
8326 else if (!flag_inhibit_size_directive)
14691f8d 8327 fprintf (file, "\t.prologue %d\n",
3c072c6b 8328 alpha_function_needs_gp || cfun->is_thunk);
a6f12d7c
RK
8329}
8330
8331/* Write function epilogue. */
8332
8333void
a5c24926 8334alpha_expand_epilogue (void)
a6f12d7c 8335{
9c0e94a5
RH
8336 /* Registers to save. */
8337 unsigned long imask = 0;
8338 unsigned long fmask = 0;
8339 /* Stack space needed for pushing registers clobbered by us. */
8340 HOST_WIDE_INT sa_size;
8341 /* Complete stack size needed. */
8342 HOST_WIDE_INT frame_size;
8343 /* Offset from base reg to register save area. */
8344 HOST_WIDE_INT reg_offset;
8345 int fp_is_frame_pointer, fp_offset;
8346 rtx sa_reg, sa_reg_exp = NULL;
15cb981a 8347 rtx sp_adj1, sp_adj2, mem, reg, insn;
01439aee 8348 rtx eh_ofs;
15cb981a 8349 rtx cfa_restores = NULL_RTX;
a6f12d7c
RK
8350 int i;
8351
9c0e94a5 8352 sa_size = alpha_sa_size ();
d3c12306 8353 frame_size = compute_frame_size (get_frame_size (), sa_size);
a6f12d7c 8354
be7b80f4 8355 if (TARGET_ABI_OPEN_VMS)
c2ea1ac6
DR
8356 {
8357 if (alpha_procedure_type == PT_STACK)
221cf9ab 8358 reg_offset = 8 + 8 * cfun->machine->uses_condition_handler;
c2ea1ac6
DR
8359 else
8360 reg_offset = 0;
8361 }
9c0e94a5 8362 else
38173d38 8363 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size);
9c0e94a5
RH
8364
8365 alpha_sa_mask (&imask, &fmask);
8366
c2ea1ac6
DR
8367 fp_is_frame_pointer
8368 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
8369 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
c8d8ed65
RK
8370 fp_offset = 0;
8371 sa_reg = stack_pointer_rtx;
9c0e94a5 8372
e3b5732b 8373 if (crtl->calls_eh_return)
4573b4de
RH
8374 eh_ofs = EH_RETURN_STACKADJ_RTX;
8375 else
8376 eh_ofs = NULL_RTX;
8377
30102605 8378 if (!TARGET_ABI_UNICOSMK && sa_size)
9c0e94a5
RH
8379 {
8380 /* If we have a frame pointer, restore SP from it. */
be7b80f4 8381 if ((TARGET_ABI_OPEN_VMS
9c0e94a5 8382 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
be7b80f4 8383 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
15cb981a 8384 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
0d24ff5d 8385
9c0e94a5 8386 /* Cope with very large offsets to the register save area. */
9c0e94a5 8387 if (reg_offset + sa_size > 0x8000)
a6f12d7c 8388 {
9c0e94a5
RH
8389 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
8390 HOST_WIDE_INT bias;
8391
8392 if (low + sa_size <= 0x8000)
8393 bias = reg_offset - low, reg_offset = low;
f676971a 8394 else
9c0e94a5
RH
8395 bias = reg_offset, reg_offset = 0;
8396
8397 sa_reg = gen_rtx_REG (DImode, 22);
8398 sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
8399
15cb981a 8400 emit_move_insn (sa_reg, sa_reg_exp);
a6f12d7c 8401 }
f676971a 8402
285a5742 8403 /* Restore registers in order, excepting a true frame pointer. */
a6f12d7c 8404
4573b4de 8405 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
01439aee 8406 if (! eh_ofs)
ba4828e0 8407 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8408 reg = gen_rtx_REG (DImode, REG_RA);
8409 emit_move_insn (reg, mem);
8410 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
4573b4de 8411
9c0e94a5 8412 reg_offset += 8;
409f52d3 8413 imask &= ~(1UL << REG_RA);
0f33506c 8414
ed80cd68 8415 for (i = 0; i < 31; ++i)
409f52d3 8416 if (imask & (1UL << i))
a6f12d7c 8417 {
9c0e94a5 8418 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
0f33506c
RK
8419 fp_offset = reg_offset;
8420 else
9c0e94a5 8421 {
3873d24b 8422 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
ba4828e0 8423 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8424 reg = gen_rtx_REG (DImode, i);
8425 emit_move_insn (reg, mem);
8426 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
8427 cfa_restores);
9c0e94a5 8428 }
a6f12d7c
RK
8429 reg_offset += 8;
8430 }
8431
ed80cd68 8432 for (i = 0; i < 31; ++i)
409f52d3 8433 if (fmask & (1UL << i))
a6f12d7c 8434 {
3873d24b 8435 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
ba4828e0 8436 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8437 reg = gen_rtx_REG (DFmode, i+32);
8438 emit_move_insn (reg, mem);
8439 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
a6f12d7c
RK
8440 reg_offset += 8;
8441 }
9c0e94a5 8442 }
c2ea1ac6 8443 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
8444 {
8445 /* Restore callee-saved general-purpose registers. */
8446
8447 reg_offset = -56;
8448
8449 for (i = 9; i < 15; i++)
409f52d3 8450 if (imask & (1UL << i))
30102605
RH
8451 {
8452 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
8453 reg_offset));
8454 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8455 reg = gen_rtx_REG (DImode, i);
8456 emit_move_insn (reg, mem);
8457 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
30102605
RH
8458 reg_offset -= 8;
8459 }
8460
8461 for (i = 2; i < 10; i++)
409f52d3 8462 if (fmask & (1UL << i))
30102605
RH
8463 {
8464 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
8465 reg_offset));
8466 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8467 reg = gen_rtx_REG (DFmode, i+32);
8468 emit_move_insn (reg, mem);
8469 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
30102605
RH
8470 reg_offset -= 8;
8471 }
8472
8473 /* Restore the return address from the DSIB. */
15cb981a 8474 mem = gen_rtx_MEM (DImode, plus_constant (hard_frame_pointer_rtx, -8));
30102605 8475 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8476 reg = gen_rtx_REG (DImode, REG_RA);
8477 emit_move_insn (reg, mem);
8478 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
30102605 8479 }
a6f12d7c 8480
01439aee 8481 if (frame_size || eh_ofs)
9c0e94a5 8482 {
71038426
RH
8483 sp_adj1 = stack_pointer_rtx;
8484
01439aee 8485 if (eh_ofs)
71038426
RH
8486 {
8487 sp_adj1 = gen_rtx_REG (DImode, 23);
8488 emit_move_insn (sp_adj1,
01439aee 8489 gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
71038426
RH
8490 }
8491
9c0e94a5
RH
8492 /* If the stack size is large, begin computation into a temporary
8493 register so as not to interfere with a potential fp restore,
8494 which must be consecutive with an SP restore. */
30102605 8495 if (frame_size < 32768
e3b5732b 8496 && ! (TARGET_ABI_UNICOSMK && cfun->calls_alloca))
71038426 8497 sp_adj2 = GEN_INT (frame_size);
30102605
RH
8498 else if (TARGET_ABI_UNICOSMK)
8499 {
8500 sp_adj1 = gen_rtx_REG (DImode, 23);
15cb981a 8501 emit_move_insn (sp_adj1, hard_frame_pointer_rtx);
30102605
RH
8502 sp_adj2 = const0_rtx;
8503 }
9c0e94a5
RH
8504 else if (frame_size < 0x40007fffL)
8505 {
8506 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
8507
71038426 8508 sp_adj2 = plus_constant (sp_adj1, frame_size - low);
9c0e94a5
RH
8509 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
8510 sp_adj1 = sa_reg;
8511 else
8512 {
8513 sp_adj1 = gen_rtx_REG (DImode, 23);
15cb981a 8514 emit_move_insn (sp_adj1, sp_adj2);
9c0e94a5
RH
8515 }
8516 sp_adj2 = GEN_INT (low);
8517 }
d60a05a1 8518 else
9c0e94a5 8519 {
71038426 8520 rtx tmp = gen_rtx_REG (DImode, 23);
15cb981a 8521 sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3, false);
71038426 8522 if (!sp_adj2)
9c0e94a5
RH
8523 {
8524 /* We can't drop new things to memory this late, afaik,
8525 so build it up by pieces. */
15cb981a
RH
8526 sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
8527 -(frame_size < 0));
56daab84 8528 gcc_assert (sp_adj2);
9c0e94a5 8529 }
9c0e94a5 8530 }
a6f12d7c 8531
9c0e94a5
RH
8532 /* From now on, things must be in order. So emit blockages. */
8533
8534 /* Restore the frame pointer. */
30102605
RH
8535 if (TARGET_ABI_UNICOSMK)
8536 {
8537 emit_insn (gen_blockage ());
8538 mem = gen_rtx_MEM (DImode,
8539 plus_constant (hard_frame_pointer_rtx, -16));
8540 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8541 emit_move_insn (hard_frame_pointer_rtx, mem);
8542 cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
8543 hard_frame_pointer_rtx, cfa_restores);
30102605
RH
8544 }
8545 else if (fp_is_frame_pointer)
9c0e94a5
RH
8546 {
8547 emit_insn (gen_blockage ());
8207e7c6 8548 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
ba4828e0 8549 set_mem_alias_set (mem, alpha_sr_alias_set);
15cb981a
RH
8550 emit_move_insn (hard_frame_pointer_rtx, mem);
8551 cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
8552 hard_frame_pointer_rtx, cfa_restores);
9c0e94a5 8553 }
be7b80f4 8554 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
8555 {
8556 emit_insn (gen_blockage ());
15cb981a
RH
8557 emit_move_insn (hard_frame_pointer_rtx,
8558 gen_rtx_REG (DImode, vms_save_fp_regno));
8559 cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
8560 hard_frame_pointer_rtx, cfa_restores);
9c0e94a5
RH
8561 }
8562
8563 /* Restore the stack pointer. */
8564 emit_insn (gen_blockage ());
30102605 8565 if (sp_adj2 == const0_rtx)
15cb981a 8566 insn = emit_move_insn (stack_pointer_rtx, sp_adj1);
30102605 8567 else
15cb981a
RH
8568 insn = emit_move_insn (stack_pointer_rtx,
8569 gen_rtx_PLUS (DImode, sp_adj1, sp_adj2));
8570 REG_NOTES (insn) = cfa_restores;
8571 add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
8572 RTX_FRAME_RELATED_P (insn) = 1;
9c0e94a5 8573 }
f676971a 8574 else
9c0e94a5 8575 {
15cb981a
RH
8576 gcc_assert (cfa_restores == NULL);
8577
c2ea1ac6 8578 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
8579 {
8580 emit_insn (gen_blockage ());
15cb981a
RH
8581 insn = emit_move_insn (hard_frame_pointer_rtx,
8582 gen_rtx_REG (DImode, vms_save_fp_regno));
8583 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
8584 RTX_FRAME_RELATED_P (insn) = 1;
9c0e94a5 8585 }
c2ea1ac6 8586 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
30102605
RH
8587 {
8588 /* Decrement the frame pointer if the function does not have a
8589 frame. */
30102605 8590 emit_insn (gen_blockage ());
15cb981a
RH
8591 emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8592 hard_frame_pointer_rtx, constm1_rtx));
30102605 8593 }
a6f12d7c 8594 }
9c0e94a5 8595}
1330f7d5 8596\f
9c0e94a5
RH
8597/* Output the rest of the textual info surrounding the epilogue. */
8598
8599void
a5c24926 8600alpha_end_function (FILE *file, const char *fnname, tree decl ATTRIBUTE_UNUSED)
9c0e94a5 8601{
e4bec638
RH
8602 rtx insn;
8603
8604 /* We output a nop after noreturn calls at the very end of the function to
8605 ensure that the return address always remains in the caller's code range,
8606 as not doing so might confuse unwinding engines. */
8607 insn = get_last_insn ();
8608 if (!INSN_P (insn))
8609 insn = prev_active_insn (insn);
3eb96d01 8610 if (insn && CALL_P (insn))
e4bec638
RH
8611 output_asm_insn (get_insn_template (CODE_FOR_nop, NULL), NULL);
8612
7053a0e2
BG
8613#if TARGET_ABI_OPEN_VMS
8614 alpha_write_linkage (file, fnname, decl);
8615#endif
8616
a6f12d7c 8617 /* End the function. */
30102605 8618 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
33d01c33 8619 {
9c0e94a5 8620 fputs ("\t.end ", file);
acd92049 8621 assemble_name (file, fnname);
9c0e94a5 8622 putc ('\n', file);
33d01c33 8623 }
48f6bfac 8624 inside_function = FALSE;
9973f4a2 8625
30102605
RH
8626 /* Output jump tables and the static subroutine information block. */
8627 if (TARGET_ABI_UNICOSMK)
8628 {
8629 unicosmk_output_ssib (file, fnname);
8630 unicosmk_output_deferred_case_vectors (file);
8631 }
a6f12d7c 8632}
14691f8d 8633
5e3fef6c
DR
8634#if TARGET_ABI_OPEN_VMS
8635void avms_asm_output_external (FILE *file, tree decl ATTRIBUTE_UNUSED, const char *name)
8636{
8637#ifdef DO_CRTL_NAMES
8638 DO_CRTL_NAMES;
8639#endif
8640}
8641#endif
8642
c590b625
RH
8643#if TARGET_ABI_OSF
8644/* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
14691f8d
RH
8645
8646 In order to avoid the hordes of differences between generated code
8647 with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
8648 lots of code loading up large constants, generate rtl and emit it
8649 instead of going straight to text.
8650
8651 Not sure why this idea hasn't been explored before... */
8652
c590b625 8653static void
a5c24926
RH
8654alpha_output_mi_thunk_osf (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8655 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8656 tree function)
14691f8d
RH
8657{
8658 HOST_WIDE_INT hi, lo;
0a2aaacc 8659 rtx this_rtx, insn, funexp;
14691f8d
RH
8660
8661 /* We always require a valid GP. */
8662 emit_insn (gen_prologue_ldgp ());
2e040219 8663 emit_note (NOTE_INSN_PROLOGUE_END);
14691f8d
RH
8664
8665 /* Find the "this" pointer. If the function returns a structure,
8666 the structure return pointer is in $16. */
61f71b34 8667 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 8668 this_rtx = gen_rtx_REG (Pmode, 17);
14691f8d 8669 else
0a2aaacc 8670 this_rtx = gen_rtx_REG (Pmode, 16);
14691f8d
RH
8671
8672 /* Add DELTA. When possible we use ldah+lda. Otherwise load the
8673 entire constant for the add. */
8674 lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
8675 hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
8676 if (hi + lo == delta)
8677 {
8678 if (hi)
0a2aaacc 8679 emit_insn (gen_adddi3 (this_rtx, this_rtx, GEN_INT (hi)));
14691f8d 8680 if (lo)
0a2aaacc 8681 emit_insn (gen_adddi3 (this_rtx, this_rtx, GEN_INT (lo)));
14691f8d
RH
8682 }
8683 else
8684 {
8685 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
8686 delta, -(delta < 0));
0a2aaacc 8687 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
14691f8d
RH
8688 }
8689
e2358068
RH
8690 /* Add a delta stored in the vtable at VCALL_OFFSET. */
8691 if (vcall_offset)
8692 {
8693 rtx tmp, tmp2;
8694
8695 tmp = gen_rtx_REG (Pmode, 0);
0a2aaacc 8696 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
e2358068
RH
8697
8698 lo = ((vcall_offset & 0xffff) ^ 0x8000) - 0x8000;
8699 hi = (((vcall_offset - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
8700 if (hi + lo == vcall_offset)
8701 {
8702 if (hi)
8703 emit_insn (gen_adddi3 (tmp, tmp, GEN_INT (hi)));
8704 }
8705 else
8706 {
8707 tmp2 = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 1),
8708 vcall_offset, -(vcall_offset < 0));
8709 emit_insn (gen_adddi3 (tmp, tmp, tmp2));
8710 lo = 0;
8711 }
8712 if (lo)
8713 tmp2 = gen_rtx_PLUS (Pmode, tmp, GEN_INT (lo));
8714 else
8715 tmp2 = tmp;
8716 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp2));
8717
0a2aaacc 8718 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
e2358068
RH
8719 }
8720
14691f8d
RH
8721 /* Generate a tail call to the target function. */
8722 if (! TREE_USED (function))
8723 {
8724 assemble_external (function);
8725 TREE_USED (function) = 1;
8726 }
8727 funexp = XEXP (DECL_RTL (function), 0);
8728 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
8729 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
8730 SIBLING_CALL_P (insn) = 1;
8731
8732 /* Run just enough of rest_of_compilation to get the insns emitted.
8733 There's not really enough bulk here to make other passes such as
8734 instruction scheduling worth while. Note that use_thunk calls
8735 assemble_start_function and assemble_end_function. */
8736 insn = get_insns ();
e139ab34 8737 insn_locators_alloc ();
14691f8d
RH
8738 shorten_branches (insn);
8739 final_start_function (insn, file, 1);
c9d691e9 8740 final (insn, file, 1);
14691f8d
RH
8741 final_end_function ();
8742}
c590b625 8743#endif /* TARGET_ABI_OSF */
48f6bfac
RK
8744\f
8745/* Debugging support. */
8746
8747#include "gstab.h"
8748
8749/* Count the number of sdb related labels are generated (to find block
8750 start and end boundaries). */
8751
8752int sdb_label_count = 0;
8753
48f6bfac
RK
8754/* Name of the file containing the current function. */
8755
df45c7ea 8756static const char *current_function_file = "";
48f6bfac
RK
8757
8758/* Offsets to alpha virtual arg/local debugging pointers. */
8759
8760long alpha_arg_offset;
8761long alpha_auto_offset;
8762\f
8763/* Emit a new filename to a stream. */
8764
8765void
a5c24926 8766alpha_output_filename (FILE *stream, const char *name)
48f6bfac
RK
8767{
8768 static int first_time = TRUE;
48f6bfac
RK
8769
8770 if (first_time)
8771 {
8772 first_time = FALSE;
8773 ++num_source_filenames;
8774 current_function_file = name;
8775 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8776 output_quoted_string (stream, name);
8777 fprintf (stream, "\n");
8778 if (!TARGET_GAS && write_symbols == DBX_DEBUG)
8779 fprintf (stream, "\t#@stabs\n");
8780 }
8781
6af601b3 8782 else if (write_symbols == DBX_DEBUG)
3e487b21
ZW
8783 /* dbxout.c will emit an appropriate .stabs directive. */
8784 return;
48f6bfac
RK
8785
8786 else if (name != current_function_file
5665caa2 8787 && strcmp (name, current_function_file) != 0)
48f6bfac
RK
8788 {
8789 if (inside_function && ! TARGET_GAS)
8790 fprintf (stream, "\t#.file\t%d ", num_source_filenames);
8791 else
8792 {
8793 ++num_source_filenames;
8794 current_function_file = name;
8795 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8796 }
8797
8798 output_quoted_string (stream, name);
8799 fprintf (stream, "\n");
8800 }
8801}
6245e3df
RK
8802\f
8803/* Structure to show the current status of registers and memory. */
8804
8805struct shadow_summary
8806{
8807 struct {
1d11bf18
RH
8808 unsigned int i : 31; /* Mask of int regs */
8809 unsigned int fp : 31; /* Mask of fp regs */
8810 unsigned int mem : 1; /* mem == imem | fpmem */
6245e3df
RK
8811 } used, defd;
8812};
8813
8814/* Summary the effects of expression X on the machine. Update SUM, a pointer
8815 to the summary structure. SET is nonzero if the insn is setting the
8816 object, otherwise zero. */
8817
8818static void
a5c24926 8819summarize_insn (rtx x, struct shadow_summary *sum, int set)
6245e3df 8820{
6f7d635c 8821 const char *format_ptr;
6245e3df
RK
8822 int i, j;
8823
8824 if (x == 0)
8825 return;
8826
8827 switch (GET_CODE (x))
8828 {
8829 /* ??? Note that this case would be incorrect if the Alpha had a
8830 ZERO_EXTRACT in SET_DEST. */
8831 case SET:
8832 summarize_insn (SET_SRC (x), sum, 0);
8833 summarize_insn (SET_DEST (x), sum, 1);
8834 break;
8835
8836 case CLOBBER:
8837 summarize_insn (XEXP (x, 0), sum, 1);
8838 break;
8839
8840 case USE:
8841 summarize_insn (XEXP (x, 0), sum, 0);
8842 break;
8843
f4e31cf5
RH
8844 case ASM_OPERANDS:
8845 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
8846 summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
8847 break;
8848
6245e3df 8849 case PARALLEL:
8fed04e5 8850 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6245e3df
RK
8851 summarize_insn (XVECEXP (x, 0, i), sum, 0);
8852 break;
8853
f4e31cf5 8854 case SUBREG:
9c0e94a5
RH
8855 summarize_insn (SUBREG_REG (x), sum, 0);
8856 break;
f4e31cf5 8857
6245e3df
RK
8858 case REG:
8859 {
8860 int regno = REGNO (x);
948068e2 8861 unsigned long mask = ((unsigned long) 1) << (regno % 32);
6245e3df
RK
8862
8863 if (regno == 31 || regno == 63)
8864 break;
8865
8866 if (set)
8867 {
8868 if (regno < 32)
8869 sum->defd.i |= mask;
8870 else
8871 sum->defd.fp |= mask;
8872 }
8873 else
8874 {
8875 if (regno < 32)
8876 sum->used.i |= mask;
8877 else
8878 sum->used.fp |= mask;
8879 }
8880 }
8881 break;
8882
8883 case MEM:
8884 if (set)
8885 sum->defd.mem = 1;
8886 else
8887 sum->used.mem = 1;
8888
8889 /* Find the regs used in memory address computation: */
8890 summarize_insn (XEXP (x, 0), sum, 0);
8891 break;
8892
8ba46994
RK
8893 case CONST_INT: case CONST_DOUBLE:
8894 case SYMBOL_REF: case LABEL_REF: case CONST:
368a1647 8895 case SCRATCH: case ASM_INPUT:
8ba46994
RK
8896 break;
8897
6245e3df
RK
8898 /* Handle common unary and binary ops for efficiency. */
8899 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
8900 case MOD: case UDIV: case UMOD: case AND: case IOR:
8901 case XOR: case ASHIFT: case ROTATE: case ASHIFTRT: case LSHIFTRT:
8902 case ROTATERT: case SMIN: case SMAX: case UMIN: case UMAX:
8903 case NE: case EQ: case GE: case GT: case LE:
8904 case LT: case GEU: case GTU: case LEU: case LTU:
8905 summarize_insn (XEXP (x, 0), sum, 0);
8906 summarize_insn (XEXP (x, 1), sum, 0);
8907 break;
8908
8909 case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND:
8910 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT:
8911 case FIX: case UNSIGNED_FLOAT: case UNSIGNED_FIX: case ABS:
f676971a 8912 case SQRT: case FFS:
6245e3df
RK
8913 summarize_insn (XEXP (x, 0), sum, 0);
8914 break;
8915
8916 default:
8917 format_ptr = GET_RTX_FORMAT (GET_CODE (x));
8fed04e5 8918 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
bed95fa1 8919 switch (format_ptr[i])
6245e3df
RK
8920 {
8921 case 'e':
8922 summarize_insn (XEXP (x, i), sum, 0);
8923 break;
8924
8925 case 'E':
8fed04e5 8926 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6245e3df
RK
8927 summarize_insn (XVECEXP (x, i, j), sum, 0);
8928 break;
8929
2b01d264
RH
8930 case 'i':
8931 break;
8932
6245e3df 8933 default:
56daab84 8934 gcc_unreachable ();
6245e3df
RK
8935 }
8936 }
8937}
6245e3df 8938
9c0e94a5
RH
8939/* Ensure a sufficient number of `trapb' insns are in the code when
8940 the user requests code with a trap precision of functions or
8941 instructions.
8942
8943 In naive mode, when the user requests a trap-precision of
8944 "instruction", a trapb is needed after every instruction that may
8945 generate a trap. This ensures that the code is resumption safe but
8946 it is also slow.
8947
8948 When optimizations are turned on, we delay issuing a trapb as long
8949 as possible. In this context, a trap shadow is the sequence of
8950 instructions that starts with a (potentially) trap generating
8951 instruction and extends to the next trapb or call_pal instruction
8952 (but GCC never generates call_pal by itself). We can delay (and
8953 therefore sometimes omit) a trapb subject to the following
8954 conditions:
8955
8956 (a) On entry to the trap shadow, if any Alpha register or memory
8957 location contains a value that is used as an operand value by some
8958 instruction in the trap shadow (live on entry), then no instruction
8959 in the trap shadow may modify the register or memory location.
8960
8961 (b) Within the trap shadow, the computation of the base register
8962 for a memory load or store instruction may not involve using the
8963 result of an instruction that might generate an UNPREDICTABLE
8964 result.
8965
8966 (c) Within the trap shadow, no register may be used more than once
8967 as a destination register. (This is to make life easier for the
8968 trap-handler.)
6245e3df 8969
2ea844d3 8970 (d) The trap shadow may not include any branch instructions. */
6245e3df 8971
2ea844d3 8972static void
a5c24926 8973alpha_handle_trap_shadows (void)
6245e3df 8974{
2ea844d3
RH
8975 struct shadow_summary shadow;
8976 int trap_pending, exception_nesting;
68aed21b 8977 rtx i, n;
6245e3df 8978
2ea844d3
RH
8979 trap_pending = 0;
8980 exception_nesting = 0;
8981 shadow.used.i = 0;
8982 shadow.used.fp = 0;
8983 shadow.used.mem = 0;
8984 shadow.defd = shadow.used;
f676971a 8985
18dbd950 8986 for (i = get_insns (); i ; i = NEXT_INSN (i))
2ea844d3 8987 {
7d83f4f5 8988 if (NOTE_P (i))
2ea844d3 8989 {
a38e7aa5 8990 switch (NOTE_KIND (i))
2ea844d3
RH
8991 {
8992 case NOTE_INSN_EH_REGION_BEG:
8993 exception_nesting++;
8994 if (trap_pending)
8995 goto close_shadow;
8996 break;
8997
8998 case NOTE_INSN_EH_REGION_END:
8999 exception_nesting--;
9000 if (trap_pending)
9001 goto close_shadow;
9002 break;
9003
9004 case NOTE_INSN_EPILOGUE_BEG:
9005 if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
9006 goto close_shadow;
9007 break;
9008 }
9009 }
9010 else if (trap_pending)
9011 {
9012 if (alpha_tp == ALPHA_TP_FUNC)
9013 {
7d83f4f5 9014 if (JUMP_P (i)
2ea844d3
RH
9015 && GET_CODE (PATTERN (i)) == RETURN)
9016 goto close_shadow;
9017 }
9018 else if (alpha_tp == ALPHA_TP_INSN)
9019 {
9020 if (optimize > 0)
9021 {
9022 struct shadow_summary sum;
9023
9024 sum.used.i = 0;
9025 sum.used.fp = 0;
9026 sum.used.mem = 0;
f4e31cf5 9027 sum.defd = sum.used;
2ea844d3
RH
9028
9029 switch (GET_CODE (i))
9030 {
9031 case INSN:
56daab84 9032 /* Annoyingly, get_attr_trap will die on these. */
bb02e7ea
RH
9033 if (GET_CODE (PATTERN (i)) == USE
9034 || GET_CODE (PATTERN (i)) == CLOBBER)
2ea844d3
RH
9035 break;
9036
9037 summarize_insn (PATTERN (i), &sum, 0);
9038
9039 if ((sum.defd.i & shadow.defd.i)
9040 || (sum.defd.fp & shadow.defd.fp))
9041 {
9042 /* (c) would be violated */
9043 goto close_shadow;
9044 }
9045
9046 /* Combine shadow with summary of current insn: */
9047 shadow.used.i |= sum.used.i;
9048 shadow.used.fp |= sum.used.fp;
9049 shadow.used.mem |= sum.used.mem;
9050 shadow.defd.i |= sum.defd.i;
9051 shadow.defd.fp |= sum.defd.fp;
9052 shadow.defd.mem |= sum.defd.mem;
9053
9054 if ((sum.defd.i & shadow.used.i)
9055 || (sum.defd.fp & shadow.used.fp)
9056 || (sum.defd.mem & shadow.used.mem))
9057 {
9058 /* (a) would be violated (also takes care of (b)) */
56daab84
NS
9059 gcc_assert (get_attr_trap (i) != TRAP_YES
9060 || (!(sum.defd.i & sum.used.i)
9061 && !(sum.defd.fp & sum.used.fp)));
2ea844d3
RH
9062
9063 goto close_shadow;
9064 }
9065 break;
9066
9067 case JUMP_INSN:
9068 case CALL_INSN:
9069 case CODE_LABEL:
9070 goto close_shadow;
9071
9072 default:
56daab84 9073 gcc_unreachable ();
2ea844d3
RH
9074 }
9075 }
9076 else
9077 {
9078 close_shadow:
68aed21b
RH
9079 n = emit_insn_before (gen_trapb (), i);
9080 PUT_MODE (n, TImode);
9081 PUT_MODE (i, TImode);
2ea844d3
RH
9082 trap_pending = 0;
9083 shadow.used.i = 0;
9084 shadow.used.fp = 0;
9085 shadow.used.mem = 0;
9086 shadow.defd = shadow.used;
9087 }
9088 }
9089 }
6245e3df 9090
4f3f5e9f 9091 if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
7d83f4f5 9092 && NONJUMP_INSN_P (i)
4f3f5e9f
RH
9093 && GET_CODE (PATTERN (i)) != USE
9094 && GET_CODE (PATTERN (i)) != CLOBBER
9095 && get_attr_trap (i) == TRAP_YES)
9096 {
9097 if (optimize && !trap_pending)
9098 summarize_insn (PATTERN (i), &shadow, 0);
9099 trap_pending = 1;
9100 }
6245e3df
RK
9101 }
9102}
68aed21b 9103\f
68aed21b 9104/* Alpha can only issue instruction groups simultaneously if they are
093354e0 9105 suitably aligned. This is very processor-specific. */
4ead2a39
RH
9106/* There are a number of entries in alphaev4_insn_pipe and alphaev5_insn_pipe
9107 that are marked "fake". These instructions do not exist on that target,
9108 but it is possible to see these insns with deranged combinations of
9109 command-line options, such as "-mtune=ev4 -mmax". Instead of aborting,
9110 choose a result at random. */
68aed21b 9111
3873d24b
RH
9112enum alphaev4_pipe {
9113 EV4_STOP = 0,
9114 EV4_IB0 = 1,
9115 EV4_IB1 = 2,
9116 EV4_IBX = 4
9117};
9118
68aed21b
RH
9119enum alphaev5_pipe {
9120 EV5_STOP = 0,
9121 EV5_NONE = 1,
9122 EV5_E01 = 2,
9123 EV5_E0 = 4,
9124 EV5_E1 = 8,
9125 EV5_FAM = 16,
9126 EV5_FA = 32,
9127 EV5_FM = 64
9128};
9129
3873d24b 9130static enum alphaev4_pipe
a5c24926 9131alphaev4_insn_pipe (rtx insn)
3873d24b
RH
9132{
9133 if (recog_memoized (insn) < 0)
9134 return EV4_STOP;
9135 if (get_attr_length (insn) != 4)
9136 return EV4_STOP;
9137
9138 switch (get_attr_type (insn))
9139 {
9140 case TYPE_ILD:
0b196b18 9141 case TYPE_LDSYM:
3873d24b 9142 case TYPE_FLD:
0b196b18 9143 case TYPE_LD_L:
3873d24b
RH
9144 return EV4_IBX;
9145
3873d24b
RH
9146 case TYPE_IADD:
9147 case TYPE_ILOG:
9148 case TYPE_ICMOV:
9149 case TYPE_ICMP:
3873d24b
RH
9150 case TYPE_FST:
9151 case TYPE_SHIFT:
9152 case TYPE_IMUL:
9153 case TYPE_FBR:
4ead2a39 9154 case TYPE_MVI: /* fake */
3873d24b
RH
9155 return EV4_IB0;
9156
0b196b18 9157 case TYPE_IST:
3873d24b
RH
9158 case TYPE_MISC:
9159 case TYPE_IBR:
9160 case TYPE_JSR:
d5909a79 9161 case TYPE_CALLPAL:
3873d24b
RH
9162 case TYPE_FCPYS:
9163 case TYPE_FCMOV:
9164 case TYPE_FADD:
9165 case TYPE_FDIV:
9166 case TYPE_FMUL:
0b196b18
RH
9167 case TYPE_ST_C:
9168 case TYPE_MB:
4ead2a39
RH
9169 case TYPE_FSQRT: /* fake */
9170 case TYPE_FTOI: /* fake */
9171 case TYPE_ITOF: /* fake */
3873d24b
RH
9172 return EV4_IB1;
9173
9174 default:
56daab84 9175 gcc_unreachable ();
3873d24b
RH
9176 }
9177}
9178
68aed21b 9179static enum alphaev5_pipe
a5c24926 9180alphaev5_insn_pipe (rtx insn)
68aed21b
RH
9181{
9182 if (recog_memoized (insn) < 0)
9183 return EV5_STOP;
9184 if (get_attr_length (insn) != 4)
9185 return EV5_STOP;
9186
9187 switch (get_attr_type (insn))
9188 {
9189 case TYPE_ILD:
9190 case TYPE_FLD:
9191 case TYPE_LDSYM:
9192 case TYPE_IADD:
9193 case TYPE_ILOG:
9194 case TYPE_ICMOV:
9195 case TYPE_ICMP:
9196 return EV5_E01;
9197
9198 case TYPE_IST:
9199 case TYPE_FST:
9200 case TYPE_SHIFT:
9201 case TYPE_IMUL:
9202 case TYPE_MISC:
9203 case TYPE_MVI:
0b196b18
RH
9204 case TYPE_LD_L:
9205 case TYPE_ST_C:
9206 case TYPE_MB:
4ead2a39
RH
9207 case TYPE_FTOI: /* fake */
9208 case TYPE_ITOF: /* fake */
68aed21b
RH
9209 return EV5_E0;
9210
9211 case TYPE_IBR:
9212 case TYPE_JSR:
d5909a79 9213 case TYPE_CALLPAL:
68aed21b
RH
9214 return EV5_E1;
9215
9216 case TYPE_FCPYS:
9217 return EV5_FAM;
9218
9219 case TYPE_FBR:
9220 case TYPE_FCMOV:
9221 case TYPE_FADD:
9222 case TYPE_FDIV:
4ead2a39 9223 case TYPE_FSQRT: /* fake */
68aed21b
RH
9224 return EV5_FA;
9225
9226 case TYPE_FMUL:
9227 return EV5_FM;
2c01018f
RH
9228
9229 default:
56daab84 9230 gcc_unreachable ();
68aed21b 9231 }
68aed21b
RH
9232}
9233
f676971a 9234/* IN_USE is a mask of the slots currently filled within the insn group.
3873d24b 9235 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
f676971a 9236 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
3873d24b
RH
9237
9238 LEN is, of course, the length of the group in bytes. */
9239
9240static rtx
a5c24926 9241alphaev4_next_group (rtx insn, int *pin_use, int *plen)
3873d24b
RH
9242{
9243 int len, in_use;
9244
9245 len = in_use = 0;
9246
2c3c49de 9247 if (! INSN_P (insn)
3873d24b
RH
9248 || GET_CODE (PATTERN (insn)) == CLOBBER
9249 || GET_CODE (PATTERN (insn)) == USE)
9250 goto next_and_done;
9251
9252 while (1)
9253 {
9254 enum alphaev4_pipe pipe;
9255
9256 pipe = alphaev4_insn_pipe (insn);
9257 switch (pipe)
9258 {
9259 case EV4_STOP:
9260 /* Force complex instructions to start new groups. */
9261 if (in_use)
9262 goto done;
9263
f3b569ca 9264 /* If this is a completely unrecognized insn, it's an asm.
3873d24b
RH
9265 We don't know how long it is, so record length as -1 to
9266 signal a needed realignment. */
9267 if (recog_memoized (insn) < 0)
9268 len = -1;
9269 else
9270 len = get_attr_length (insn);
9271 goto next_and_done;
9272
9273 case EV4_IBX:
9274 if (in_use & EV4_IB0)
9275 {
9276 if (in_use & EV4_IB1)
9277 goto done;
9278 in_use |= EV4_IB1;
9279 }
9280 else
9281 in_use |= EV4_IB0 | EV4_IBX;
9282 break;
9283
9284 case EV4_IB0:
9285 if (in_use & EV4_IB0)
9286 {
9287 if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
9288 goto done;
9289 in_use |= EV4_IB1;
9290 }
9291 in_use |= EV4_IB0;
9292 break;
9293
9294 case EV4_IB1:
9295 if (in_use & EV4_IB1)
9296 goto done;
9297 in_use |= EV4_IB1;
9298 break;
9299
9300 default:
56daab84 9301 gcc_unreachable ();
3873d24b
RH
9302 }
9303 len += 4;
f676971a 9304
3873d24b 9305 /* Haifa doesn't do well scheduling branches. */
7d83f4f5 9306 if (JUMP_P (insn))
3873d24b
RH
9307 goto next_and_done;
9308
9309 next:
9310 insn = next_nonnote_insn (insn);
9311
2c3c49de 9312 if (!insn || ! INSN_P (insn))
3873d24b
RH
9313 goto done;
9314
9315 /* Let Haifa tell us where it thinks insn group boundaries are. */
9316 if (GET_MODE (insn) == TImode)
9317 goto done;
9318
9319 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
9320 goto next;
9321 }
9322
9323 next_and_done:
9324 insn = next_nonnote_insn (insn);
9325
9326 done:
9327 *plen = len;
9328 *pin_use = in_use;
9329 return insn;
9330}
9331
f676971a 9332/* IN_USE is a mask of the slots currently filled within the insn group.
3873d24b 9333 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
f676971a 9334 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
68aed21b
RH
9335
9336 LEN is, of course, the length of the group in bytes. */
9337
9338static rtx
a5c24926 9339alphaev5_next_group (rtx insn, int *pin_use, int *plen)
68aed21b
RH
9340{
9341 int len, in_use;
9342
9343 len = in_use = 0;
9344
2c3c49de 9345 if (! INSN_P (insn)
2c01018f
RH
9346 || GET_CODE (PATTERN (insn)) == CLOBBER
9347 || GET_CODE (PATTERN (insn)) == USE)
9348 goto next_and_done;
68aed21b 9349
2c01018f 9350 while (1)
68aed21b
RH
9351 {
9352 enum alphaev5_pipe pipe;
68aed21b
RH
9353
9354 pipe = alphaev5_insn_pipe (insn);
9355 switch (pipe)
9356 {
9357 case EV5_STOP:
9358 /* Force complex instructions to start new groups. */
9359 if (in_use)
9360 goto done;
9361
f3b569ca 9362 /* If this is a completely unrecognized insn, it's an asm.
68aed21b
RH
9363 We don't know how long it is, so record length as -1 to
9364 signal a needed realignment. */
9365 if (recog_memoized (insn) < 0)
9366 len = -1;
9367 else
9368 len = get_attr_length (insn);
2c01018f 9369 goto next_and_done;
68aed21b 9370
56daab84
NS
9371 /* ??? Most of the places below, we would like to assert never
9372 happen, as it would indicate an error either in Haifa, or
9373 in the scheduling description. Unfortunately, Haifa never
9374 schedules the last instruction of the BB, so we don't have
9375 an accurate TI bit to go off. */
68aed21b
RH
9376 case EV5_E01:
9377 if (in_use & EV5_E0)
9378 {
9379 if (in_use & EV5_E1)
9380 goto done;
9381 in_use |= EV5_E1;
9382 }
9383 else
9384 in_use |= EV5_E0 | EV5_E01;
9385 break;
9386
9387 case EV5_E0:
9388 if (in_use & EV5_E0)
9389 {
3873d24b 9390 if (!(in_use & EV5_E01) || (in_use & EV5_E1))
68aed21b
RH
9391 goto done;
9392 in_use |= EV5_E1;
9393 }
9394 in_use |= EV5_E0;
9395 break;
9396
9397 case EV5_E1:
9398 if (in_use & EV5_E1)
9399 goto done;
9400 in_use |= EV5_E1;
9401 break;
9402
9403 case EV5_FAM:
9404 if (in_use & EV5_FA)
9405 {
9406 if (in_use & EV5_FM)
9407 goto done;
9408 in_use |= EV5_FM;
9409 }
9410 else
9411 in_use |= EV5_FA | EV5_FAM;
9412 break;
9413
9414 case EV5_FA:
9415 if (in_use & EV5_FA)
9416 goto done;
9417 in_use |= EV5_FA;
9418 break;
9419
9420 case EV5_FM:
9421 if (in_use & EV5_FM)
9422 goto done;
9423 in_use |= EV5_FM;
9424 break;
9425
9426 case EV5_NONE:
9427 break;
9428
9429 default:
56daab84 9430 gcc_unreachable ();
68aed21b
RH
9431 }
9432 len += 4;
f676971a 9433
68aed21b
RH
9434 /* Haifa doesn't do well scheduling branches. */
9435 /* ??? If this is predicted not-taken, slotting continues, except
9436 that no more IBR, FBR, or JSR insns may be slotted. */
7d83f4f5 9437 if (JUMP_P (insn))
2c01018f 9438 goto next_and_done;
68aed21b 9439
2c01018f 9440 next:
68aed21b
RH
9441 insn = next_nonnote_insn (insn);
9442
2c3c49de 9443 if (!insn || ! INSN_P (insn))
68aed21b 9444 goto done;
a874dd18 9445
68aed21b
RH
9446 /* Let Haifa tell us where it thinks insn group boundaries are. */
9447 if (GET_MODE (insn) == TImode)
9448 goto done;
9449
2c01018f
RH
9450 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
9451 goto next;
68aed21b 9452 }
2c01018f
RH
9453
9454 next_and_done:
9455 insn = next_nonnote_insn (insn);
68aed21b
RH
9456
9457 done:
9458 *plen = len;
9459 *pin_use = in_use;
9460 return insn;
68aed21b
RH
9461}
9462
3873d24b 9463static rtx
a5c24926 9464alphaev4_next_nop (int *pin_use)
3873d24b
RH
9465{
9466 int in_use = *pin_use;
9467 rtx nop;
9468
9469 if (!(in_use & EV4_IB0))
9470 {
9471 in_use |= EV4_IB0;
9472 nop = gen_nop ();
9473 }
9474 else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
9475 {
9476 in_use |= EV4_IB1;
9477 nop = gen_nop ();
9478 }
9479 else if (TARGET_FP && !(in_use & EV4_IB1))
9480 {
9481 in_use |= EV4_IB1;
9482 nop = gen_fnop ();
9483 }
9484 else
9485 nop = gen_unop ();
9486
9487 *pin_use = in_use;
9488 return nop;
9489}
9490
9491static rtx
a5c24926 9492alphaev5_next_nop (int *pin_use)
3873d24b
RH
9493{
9494 int in_use = *pin_use;
9495 rtx nop;
9496
9497 if (!(in_use & EV5_E1))
9498 {
9499 in_use |= EV5_E1;
9500 nop = gen_nop ();
9501 }
9502 else if (TARGET_FP && !(in_use & EV5_FA))
9503 {
9504 in_use |= EV5_FA;
9505 nop = gen_fnop ();
9506 }
9507 else if (TARGET_FP && !(in_use & EV5_FM))
9508 {
9509 in_use |= EV5_FM;
9510 nop = gen_fnop ();
9511 }
9512 else
9513 nop = gen_unop ();
9514
9515 *pin_use = in_use;
9516 return nop;
9517}
9518
9519/* The instruction group alignment main loop. */
9520
68aed21b 9521static void
a5c24926
RH
9522alpha_align_insns (unsigned int max_align,
9523 rtx (*next_group) (rtx, int *, int *),
9524 rtx (*next_nop) (int *))
68aed21b
RH
9525{
9526 /* ALIGN is the known alignment for the insn group. */
b81f53a1 9527 unsigned int align;
68aed21b
RH
9528 /* OFS is the offset of the current insn in the insn group. */
9529 int ofs;
0f1341c7 9530 int prev_in_use, in_use, len, ldgp;
68aed21b
RH
9531 rtx i, next;
9532
9533 /* Let shorten branches care for assigning alignments to code labels. */
18dbd950 9534 shorten_branches (get_insns ());
68aed21b 9535
30864e14
RH
9536 if (align_functions < 4)
9537 align = 4;
21cb9e60 9538 else if ((unsigned int) align_functions < max_align)
30864e14
RH
9539 align = align_functions;
9540 else
9541 align = max_align;
80db34d8 9542
68aed21b 9543 ofs = prev_in_use = 0;
18dbd950 9544 i = get_insns ();
7d83f4f5 9545 if (NOTE_P (i))
68aed21b
RH
9546 i = next_nonnote_insn (i);
9547
0f1341c7
RH
9548 ldgp = alpha_function_needs_gp ? 8 : 0;
9549
68aed21b
RH
9550 while (i)
9551 {
b81f53a1 9552 next = (*next_group) (i, &in_use, &len);
68aed21b
RH
9553
9554 /* When we see a label, resync alignment etc. */
7d83f4f5 9555 if (LABEL_P (i))
68aed21b 9556 {
b81f53a1
RK
9557 unsigned int new_align = 1 << label_to_alignment (i);
9558
68aed21b
RH
9559 if (new_align >= align)
9560 {
3873d24b 9561 align = new_align < max_align ? new_align : max_align;
68aed21b
RH
9562 ofs = 0;
9563 }
b81f53a1 9564
68aed21b
RH
9565 else if (ofs & (new_align-1))
9566 ofs = (ofs | (new_align-1)) + 1;
56daab84 9567 gcc_assert (!len);
68aed21b
RH
9568 }
9569
9570 /* Handle complex instructions special. */
9571 else if (in_use == 0)
9572 {
9573 /* Asms will have length < 0. This is a signal that we have
9574 lost alignment knowledge. Assume, however, that the asm
9575 will not mis-align instructions. */
9576 if (len < 0)
9577 {
9578 ofs = 0;
9579 align = 4;
9580 len = 0;
9581 }
9582 }
9583
9584 /* If the known alignment is smaller than the recognized insn group,
9585 realign the output. */
1eb356b9 9586 else if ((int) align < len)
68aed21b 9587 {
b81f53a1 9588 unsigned int new_log_align = len > 8 ? 4 : 3;
11cb1475 9589 rtx prev, where;
68aed21b 9590
11cb1475 9591 where = prev = prev_nonnote_insn (i);
7d83f4f5 9592 if (!where || !LABEL_P (where))
68aed21b
RH
9593 where = i;
9594
11cb1475
RH
9595 /* Can't realign between a call and its gp reload. */
9596 if (! (TARGET_EXPLICIT_RELOCS
7d83f4f5 9597 && prev && CALL_P (prev)))
11cb1475
RH
9598 {
9599 emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
9600 align = 1 << new_log_align;
9601 ofs = 0;
9602 }
68aed21b
RH
9603 }
9604
0f1341c7
RH
9605 /* We may not insert padding inside the initial ldgp sequence. */
9606 else if (ldgp > 0)
9607 ldgp -= len;
9608
68aed21b
RH
9609 /* If the group won't fit in the same INT16 as the previous,
9610 we need to add padding to keep the group together. Rather
9611 than simply leaving the insn filling to the assembler, we
9612 can make use of the knowledge of what sorts of instructions
9613 were issued in the previous group to make sure that all of
9614 the added nops are really free. */
1eb356b9 9615 else if (ofs + len > (int) align)
68aed21b
RH
9616 {
9617 int nop_count = (align - ofs) / 4;
9618 rtx where;
9619
839a4992 9620 /* Insert nops before labels, branches, and calls to truly merge
11cb1475 9621 the execution of the nops with the previous instruction group. */
68aed21b 9622 where = prev_nonnote_insn (i);
3873d24b 9623 if (where)
68aed21b 9624 {
7d83f4f5 9625 if (LABEL_P (where))
68aed21b 9626 {
3873d24b 9627 rtx where2 = prev_nonnote_insn (where);
7d83f4f5 9628 if (where2 && JUMP_P (where2))
3873d24b 9629 where = where2;
68aed21b 9630 }
7d83f4f5 9631 else if (NONJUMP_INSN_P (where))
3873d24b 9632 where = i;
68aed21b 9633 }
3873d24b
RH
9634 else
9635 where = i;
9636
f676971a 9637 do
3873d24b 9638 emit_insn_before ((*next_nop)(&prev_in_use), where);
68aed21b
RH
9639 while (--nop_count);
9640 ofs = 0;
9641 }
9642
9643 ofs = (ofs + len) & (align - 1);
9644 prev_in_use = in_use;
9645 i = next;
9646 }
9647}
76a4a1bd
UB
9648
9649/* Insert an unop between a noreturn function call and GP load. */
9650
9651static void
9652alpha_pad_noreturn (void)
9653{
9654 rtx insn, next;
9655
9656 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9657 {
9e43ad68
UB
9658 if (! (CALL_P (insn)
9659 && find_reg_note (insn, REG_NORETURN, NULL_RTX)))
76a4a1bd
UB
9660 continue;
9661
9e43ad68
UB
9662 /* Make sure we do not split a call and its corresponding
9663 CALL_ARG_LOCATION note. */
9664 if (CALL_P (insn))
9665 {
9666 next = NEXT_INSN (insn);
9667 if (next && NOTE_P (next)
9668 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
9669 insn = next;
9670 }
9671
76a4a1bd
UB
9672 next = next_active_insn (insn);
9673
9674 if (next)
9675 {
9676 rtx pat = PATTERN (next);
9677
9678 if (GET_CODE (pat) == SET
9679 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
9680 && XINT (SET_SRC (pat), 1) == UNSPECV_LDGP1)
9681 emit_insn_after (gen_unop (), insn);
9682 }
9683 }
9684}
68aed21b 9685\f
f5143c46 9686/* Machine dependent reorg pass. */
2ea844d3 9687
18dbd950 9688static void
a5c24926 9689alpha_reorg (void)
2ea844d3 9690{
76a4a1bd
UB
9691 /* Workaround for a linker error that triggers when an
9692 exception handler immediatelly follows a noreturn function.
9693
9694 The instruction stream from an object file:
9695
9696 54: 00 40 5b 6b jsr ra,(t12),58 <__func+0x58>
9697 58: 00 00 ba 27 ldah gp,0(ra)
9698 5c: 00 00 bd 23 lda gp,0(gp)
9699 60: 00 00 7d a7 ldq t12,0(gp)
9700 64: 00 40 5b 6b jsr ra,(t12),68 <__func+0x68>
9701
9702 was converted in the final link pass to:
9703
9704 fdb24: a0 03 40 d3 bsr ra,fe9a8 <_called_func+0x8>
9705 fdb28: 00 00 fe 2f unop
9706 fdb2c: 00 00 fe 2f unop
9707 fdb30: 30 82 7d a7 ldq t12,-32208(gp)
9708 fdb34: 00 40 5b 6b jsr ra,(t12),fdb38 <__func+0x68>
9709
9710 GP load instructions were wrongly cleared by the linker relaxation
9711 pass. This workaround prevents removal of GP loads by inserting
9712 an unop instruction between a noreturn function call and
9713 exception handler prologue. */
9714
9715 if (current_function_has_exception_handlers ())
9716 alpha_pad_noreturn ();
9717
68aed21b 9718 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
18dbd950 9719 alpha_handle_trap_shadows ();
68aed21b 9720
68aed21b
RH
9721 /* Due to the number of extra trapb insns, don't bother fixing up
9722 alignment when trap precision is instruction. Moreover, we can
b81f53a1 9723 only do our job when sched2 is run. */
68aed21b
RH
9724 if (optimize && !optimize_size
9725 && alpha_tp != ALPHA_TP_INSN
9726 && flag_schedule_insns_after_reload)
9727 {
8bea7f7c 9728 if (alpha_tune == PROCESSOR_EV4)
18dbd950 9729 alpha_align_insns (8, alphaev4_next_group, alphaev4_next_nop);
8bea7f7c 9730 else if (alpha_tune == PROCESSOR_EV5)
18dbd950 9731 alpha_align_insns (16, alphaev5_next_group, alphaev5_next_nop);
68aed21b 9732 }
2ea844d3 9733}
2ea844d3 9734\f
1bc7c5b6
ZW
9735#if !TARGET_ABI_UNICOSMK
9736
9737#ifdef HAVE_STAMP_H
9738#include <stamp.h>
9739#endif
9740
9741static void
9742alpha_file_start (void)
9743{
4739b00e
RH
9744#ifdef OBJECT_FORMAT_ELF
9745 /* If emitting dwarf2 debug information, we cannot generate a .file
9746 directive to start the file, as it will conflict with dwarf2out
9747 file numbers. So it's only useful when emitting mdebug output. */
38f8b050 9748 targetm.asm_file_start_file_directive = (write_symbols == DBX_DEBUG);
4739b00e
RH
9749#endif
9750
1bc7c5b6
ZW
9751 default_file_start ();
9752#ifdef MS_STAMP
cc550dce 9753 fprintf (asm_out_file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
1bc7c5b6
ZW
9754#endif
9755
9756 fputs ("\t.set noreorder\n", asm_out_file);
9757 fputs ("\t.set volatile\n", asm_out_file);
9758 if (!TARGET_ABI_OPEN_VMS)
9759 fputs ("\t.set noat\n", asm_out_file);
9760 if (TARGET_EXPLICIT_RELOCS)
9761 fputs ("\t.set nomacro\n", asm_out_file);
9762 if (TARGET_SUPPORT_ARCH | TARGET_BWX | TARGET_MAX | TARGET_FIX | TARGET_CIX)
8bea7f7c
RH
9763 {
9764 const char *arch;
9765
9766 if (alpha_cpu == PROCESSOR_EV6 || TARGET_FIX || TARGET_CIX)
9767 arch = "ev6";
9768 else if (TARGET_MAX)
9769 arch = "pca56";
9770 else if (TARGET_BWX)
9771 arch = "ev56";
9772 else if (alpha_cpu == PROCESSOR_EV5)
9773 arch = "ev5";
9774 else
9775 arch = "ev4";
9776
9777 fprintf (asm_out_file, "\t.arch %s\n", arch);
9778 }
1bc7c5b6
ZW
9779}
9780#endif
9781
b64a1b53 9782#ifdef OBJECT_FORMAT_ELF
9b580a0b
RH
9783/* Since we don't have a .dynbss section, we should not allow global
9784 relocations in the .rodata section. */
9785
9786static int
9787alpha_elf_reloc_rw_mask (void)
9788{
9789 return flag_pic ? 3 : 2;
9790}
b64a1b53 9791
d6b5193b
RS
9792/* Return a section for X. The only special thing we do here is to
9793 honor small data. */
b64a1b53 9794
d6b5193b 9795static section *
a5c24926
RH
9796alpha_elf_select_rtx_section (enum machine_mode mode, rtx x,
9797 unsigned HOST_WIDE_INT align)
b64a1b53
RH
9798{
9799 if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
093354e0 9800 /* ??? Consider using mergeable sdata sections. */
d6b5193b 9801 return sdata_section;
b64a1b53 9802 else
d6b5193b 9803 return default_elf_select_rtx_section (mode, x, align);
b64a1b53
RH
9804}
9805
ae069803
RH
9806static unsigned int
9807alpha_elf_section_type_flags (tree decl, const char *name, int reloc)
9808{
9809 unsigned int flags = 0;
9810
9811 if (strcmp (name, ".sdata") == 0
9812 || strncmp (name, ".sdata.", 7) == 0
9813 || strncmp (name, ".gnu.linkonce.s.", 16) == 0
9814 || strcmp (name, ".sbss") == 0
9815 || strncmp (name, ".sbss.", 6) == 0
9816 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
9817 flags = SECTION_SMALL;
9818
9819 flags |= default_section_type_flags (decl, name, reloc);
9820 return flags;
9821}
b64a1b53
RH
9822#endif /* OBJECT_FORMAT_ELF */
9823\f
f030826a
RH
9824/* Structure to collect function names for final output in link section. */
9825/* Note that items marked with GTY can't be ifdef'ed out. */
17211ab5
GK
9826
9827enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
f030826a 9828enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR};
17211ab5 9829
d1b38208 9830struct GTY(()) alpha_links
17211ab5 9831{
f030826a 9832 int num;
b714133e 9833 const char *target;
17211ab5 9834 rtx linkage;
f030826a
RH
9835 enum links_kind lkind;
9836 enum reloc_kind rkind;
9837};
9838
d1b38208 9839struct GTY(()) alpha_funcs
f030826a
RH
9840{
9841 int num;
9842 splay_tree GTY ((param1_is (char *), param2_is (struct alpha_links *)))
9843 links;
17211ab5
GK
9844};
9845
9846static GTY ((param1_is (char *), param2_is (struct alpha_links *)))
f030826a
RH
9847 splay_tree alpha_links_tree;
9848static GTY ((param1_is (tree), param2_is (struct alpha_funcs *)))
9849 splay_tree alpha_funcs_tree;
9850
9851static GTY(()) int alpha_funcs_num;
17211ab5 9852
be7b80f4 9853#if TARGET_ABI_OPEN_VMS
89cfc2c6 9854
e9a25f70 9855/* Return the VMS argument type corresponding to MODE. */
89cfc2c6 9856
e9a25f70 9857enum avms_arg_type
a5c24926 9858alpha_arg_type (enum machine_mode mode)
e9a25f70
JL
9859{
9860 switch (mode)
89cfc2c6 9861 {
e9a25f70
JL
9862 case SFmode:
9863 return TARGET_FLOAT_VAX ? FF : FS;
9864 case DFmode:
9865 return TARGET_FLOAT_VAX ? FD : FT;
9866 default:
9867 return I64;
89cfc2c6 9868 }
e9a25f70 9869}
89cfc2c6 9870
e9a25f70
JL
9871/* Return an rtx for an integer representing the VMS Argument Information
9872 register value. */
89cfc2c6 9873
aa388f29 9874rtx
a5c24926 9875alpha_arg_info_reg_val (CUMULATIVE_ARGS cum)
e9a25f70
JL
9876{
9877 unsigned HOST_WIDE_INT regval = cum.num_args;
9878 int i;
89cfc2c6 9879
e9a25f70
JL
9880 for (i = 0; i < 6; i++)
9881 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
89cfc2c6 9882
e9a25f70
JL
9883 return GEN_INT (regval);
9884}
9885\f
b714133e
EB
9886/* Register the need for a (fake) .linkage entry for calls to function NAME.
9887 IS_LOCAL is 1 if this is for a definition, 0 if this is for a real call.
9888 Return a SYMBOL_REF suited to the call instruction. */
a82c7f05
RH
9889
9890rtx
a5c24926 9891alpha_need_linkage (const char *name, int is_local)
89cfc2c6 9892{
a82c7f05
RH
9893 splay_tree_node node;
9894 struct alpha_links *al;
b714133e
EB
9895 const char *target;
9896 tree id;
89cfc2c6
RK
9897
9898 if (name[0] == '*')
9899 name++;
9900
1330f7d5
DR
9901 if (is_local)
9902 {
f030826a
RH
9903 struct alpha_funcs *cfaf;
9904
9905 if (!alpha_funcs_tree)
df453731
DR
9906 alpha_funcs_tree = splay_tree_new_ggc
9907 (splay_tree_compare_pointers,
9908 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
9909 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
9910
f676971a 9911
a9429e29 9912 cfaf = ggc_alloc_alpha_funcs ();
1330f7d5
DR
9913
9914 cfaf->links = 0;
9915 cfaf->num = ++alpha_funcs_num;
9916
9917 splay_tree_insert (alpha_funcs_tree,
9918 (splay_tree_key) current_function_decl,
9919 (splay_tree_value) cfaf);
1330f7d5
DR
9920 }
9921
9922 if (alpha_links_tree)
a82c7f05
RH
9923 {
9924 /* Is this name already defined? */
89cfc2c6 9925
1330f7d5 9926 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
a82c7f05
RH
9927 if (node)
9928 {
9929 al = (struct alpha_links *) node->value;
9930 if (is_local)
9931 {
9932 /* Defined here but external assumed. */
1330f7d5
DR
9933 if (al->lkind == KIND_EXTERN)
9934 al->lkind = KIND_LOCAL;
a82c7f05
RH
9935 }
9936 else
9937 {
9938 /* Used here but unused assumed. */
1330f7d5
DR
9939 if (al->lkind == KIND_UNUSED)
9940 al->lkind = KIND_LOCAL;
a82c7f05
RH
9941 }
9942 return al->linkage;
9943 }
9944 }
9945 else
df453731
DR
9946 alpha_links_tree = splay_tree_new_ggc
9947 ((splay_tree_compare_fn) strcmp,
9948 ggc_alloc_splay_tree_str_alpha_links_splay_tree_s,
9949 ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s);
89cfc2c6 9950
a9429e29 9951 al = ggc_alloc_alpha_links ();
17211ab5 9952 name = ggc_strdup (name);
89cfc2c6
RK
9953
9954 /* Assume external if no definition. */
1330f7d5 9955 al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
89cfc2c6 9956
b714133e
EB
9957 /* Ensure we have an IDENTIFIER so assemble_name can mark it used
9958 and find the ultimate alias target like assemble_name. */
9959 id = get_identifier (name);
9960 target = NULL;
9961 while (IDENTIFIER_TRANSPARENT_ALIAS (id))
9962 {
9963 id = TREE_CHAIN (id);
9964 target = IDENTIFIER_POINTER (id);
9965 }
9398dc27 9966
b714133e
EB
9967 al->target = target ? target : name;
9968 al->linkage = gen_rtx_SYMBOL_REF (Pmode, name);
a82c7f05 9969
1330f7d5 9970 splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
a82c7f05 9971 (splay_tree_value) al);
89cfc2c6 9972
a82c7f05 9973 return al->linkage;
89cfc2c6
RK
9974}
9975
b714133e
EB
9976/* Return a SYMBOL_REF representing the reference to the .linkage entry
9977 of function FUNC built for calls made from CFUNDECL. LFLAG is 1 if
9978 this is the reference to the linkage pointer value, 0 if this is the
9979 reference to the function entry value. RFLAG is 1 if this a reduced
9980 reference (code address only), 0 if this is a full reference. */
9981
1330f7d5 9982rtx
b714133e 9983alpha_use_linkage (rtx func, tree cfundecl, int lflag, int rflag)
1330f7d5
DR
9984{
9985 splay_tree_node cfunnode;
9986 struct alpha_funcs *cfaf;
9987 struct alpha_links *al;
b714133e 9988 const char *name = XSTR (func, 0);
1330f7d5
DR
9989
9990 cfaf = (struct alpha_funcs *) 0;
9991 al = (struct alpha_links *) 0;
9992
9993 cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
9994 cfaf = (struct alpha_funcs *) cfunnode->value;
9995
9996 if (cfaf->links)
9997 {
9998 splay_tree_node lnode;
9999
10000 /* Is this name already defined? */
10001
10002 lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name);
10003 if (lnode)
10004 al = (struct alpha_links *) lnode->value;
10005 }
10006 else
df453731
DR
10007 cfaf->links = splay_tree_new_ggc
10008 ((splay_tree_compare_fn) strcmp,
10009 ggc_alloc_splay_tree_str_alpha_links_splay_tree_s,
10010 ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s);
1330f7d5
DR
10011
10012 if (!al)
10013 {
10014 size_t name_len;
10015 size_t buflen;
1330f7d5
DR
10016 char *linksym;
10017 splay_tree_node node = 0;
10018 struct alpha_links *anl;
10019
10020 if (name[0] == '*')
10021 name++;
10022
10023 name_len = strlen (name);
b714133e 10024 linksym = (char *) alloca (name_len + 50);
1330f7d5 10025
a9429e29 10026 al = ggc_alloc_alpha_links ();
1330f7d5 10027 al->num = cfaf->num;
50842acb 10028 al->target = NULL;
1330f7d5
DR
10029
10030 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
10031 if (node)
10032 {
10033 anl = (struct alpha_links *) node->value;
10034 al->lkind = anl->lkind;
b714133e 10035 name = anl->target;
1330f7d5
DR
10036 }
10037
b714133e
EB
10038 sprintf (linksym, "$%d..%s..lk", cfaf->num, name);
10039 buflen = strlen (linksym);
1330f7d5
DR
10040
10041 al->linkage = gen_rtx_SYMBOL_REF
10042 (Pmode, ggc_alloc_string (linksym, buflen + 1));
10043
10044 splay_tree_insert (cfaf->links, (splay_tree_key) name,
10045 (splay_tree_value) al);
10046 }
10047
10048 if (rflag)
10049 al->rkind = KIND_CODEADDR;
10050 else
10051 al->rkind = KIND_LINKAGE;
f676971a 10052
1330f7d5
DR
10053 if (lflag)
10054 return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8));
10055 else
10056 return al->linkage;
10057}
10058
a82c7f05 10059static int
a5c24926 10060alpha_write_one_linkage (splay_tree_node node, void *data)
a82c7f05 10061{
83182544 10062 const char *const name = (const char *) node->key;
1330f7d5 10063 struct alpha_links *link = (struct alpha_links *) node->value;
a82c7f05
RH
10064 FILE *stream = (FILE *) data;
10065
1330f7d5
DR
10066 fprintf (stream, "$%d..%s..lk:\n", link->num, name);
10067 if (link->rkind == KIND_CODEADDR)
a82c7f05 10068 {
1330f7d5
DR
10069 if (link->lkind == KIND_LOCAL)
10070 {
10071 /* Local and used */
10072 fprintf (stream, "\t.quad %s..en\n", name);
10073 }
10074 else
10075 {
10076 /* External and used, request code address. */
10077 fprintf (stream, "\t.code_address %s\n", name);
10078 }
a82c7f05
RH
10079 }
10080 else
10081 {
1330f7d5
DR
10082 if (link->lkind == KIND_LOCAL)
10083 {
10084 /* Local and used, build linkage pair. */
10085 fprintf (stream, "\t.quad %s..en\n", name);
10086 fprintf (stream, "\t.quad %s\n", name);
10087 }
10088 else
10089 {
10090 /* External and used, request linkage pair. */
10091 fprintf (stream, "\t.linkage %s\n", name);
10092 }
a82c7f05
RH
10093 }
10094
10095 return 0;
10096}
89cfc2c6 10097
1330f7d5 10098static void
a5c24926 10099alpha_write_linkage (FILE *stream, const char *funname, tree fundecl)
89cfc2c6 10100{
1330f7d5
DR
10101 splay_tree_node node;
10102 struct alpha_funcs *func;
10103
d6b5193b 10104 fprintf (stream, "\t.link\n");
1330f7d5 10105 fprintf (stream, "\t.align 3\n");
d6b5193b
RS
10106 in_section = NULL;
10107
1330f7d5
DR
10108 node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl);
10109 func = (struct alpha_funcs *) node->value;
10110
10111 fputs ("\t.name ", stream);
10112 assemble_name (stream, funname);
10113 fputs ("..na\n", stream);
10114 ASM_OUTPUT_LABEL (stream, funname);
10115 fprintf (stream, "\t.pdesc ");
10116 assemble_name (stream, funname);
10117 fprintf (stream, "..en,%s\n",
10118 alpha_procedure_type == PT_STACK ? "stack"
10119 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
10120
10121 if (func->links)
c1bd46a8 10122 {
1330f7d5
DR
10123 splay_tree_foreach (func->links, alpha_write_one_linkage, stream);
10124 /* splay_tree_delete (func->links); */
c1bd46a8 10125 }
89cfc2c6
RK
10126}
10127
7c262518
RH
10128/* Switch to an arbitrary section NAME with attributes as specified
10129 by FLAGS. ALIGN specifies any known alignment requirements for
10130 the section; 0 if the default should be used. */
10131
10132static void
c18a5b6c
MM
10133vms_asm_named_section (const char *name, unsigned int flags,
10134 tree decl ATTRIBUTE_UNUSED)
7c262518 10135{
c1bd46a8
DR
10136 fputc ('\n', asm_out_file);
10137 fprintf (asm_out_file, ".section\t%s", name);
7c262518 10138
c1bd46a8
DR
10139 if (flags & SECTION_DEBUG)
10140 fprintf (asm_out_file, ",NOWRT");
10141
10142 fputc ('\n', asm_out_file);
7c262518
RH
10143}
10144
2cc07db4
RH
10145/* Record an element in the table of global constructors. SYMBOL is
10146 a SYMBOL_REF of the function to be called; PRIORITY is a number
f676971a 10147 between 0 and MAX_INIT_PRIORITY.
2cc07db4
RH
10148
10149 Differs from default_ctors_section_asm_out_constructor in that the
10150 width of the .ctors entry is always 64 bits, rather than the 32 bits
10151 used by a normal pointer. */
10152
10153static void
a5c24926 10154vms_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
2cc07db4 10155{
d6b5193b 10156 switch_to_section (ctors_section);
c8af3574
RH
10157 assemble_align (BITS_PER_WORD);
10158 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4
RH
10159}
10160
10161static void
a5c24926 10162vms_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
2cc07db4 10163{
d6b5193b 10164 switch_to_section (dtors_section);
c8af3574
RH
10165 assemble_align (BITS_PER_WORD);
10166 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4 10167}
89cfc2c6
RK
10168#else
10169
a82c7f05 10170rtx
a5c24926
RH
10171alpha_need_linkage (const char *name ATTRIBUTE_UNUSED,
10172 int is_local ATTRIBUTE_UNUSED)
89cfc2c6 10173{
a82c7f05 10174 return NULL_RTX;
89cfc2c6
RK
10175}
10176
1330f7d5 10177rtx
b714133e 10178alpha_use_linkage (rtx func ATTRIBUTE_UNUSED,
a5c24926
RH
10179 tree cfundecl ATTRIBUTE_UNUSED,
10180 int lflag ATTRIBUTE_UNUSED,
10181 int rflag ATTRIBUTE_UNUSED)
1330f7d5
DR
10182{
10183 return NULL_RTX;
10184}
10185
be7b80f4 10186#endif /* TARGET_ABI_OPEN_VMS */
30102605
RH
10187\f
10188#if TARGET_ABI_UNICOSMK
10189
fe984136
RH
10190/* This evaluates to true if we do not know how to pass TYPE solely in
10191 registers. This is the case for all arguments that do not fit in two
10192 registers. */
10193
10194static bool
586de218 10195unicosmk_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
10196{
10197 if (type == NULL)
10198 return false;
10199
10200 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
10201 return true;
10202 if (TREE_ADDRESSABLE (type))
10203 return true;
10204
10205 return ALPHA_ARG_SIZE (mode, type, 0) > 2;
10206}
10207
30102605
RH
10208/* Define the offset between two registers, one to be eliminated, and the
10209 other its replacement, at the start of a routine. */
10210
10211int
a5c24926 10212unicosmk_initial_elimination_offset (int from, int to)
30102605
RH
10213{
10214 int fixed_size;
f676971a 10215
30102605
RH
10216 fixed_size = alpha_sa_size();
10217 if (fixed_size != 0)
10218 fixed_size += 48;
10219
10220 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
f676971a 10221 return -fixed_size;
30102605
RH
10222 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10223 return 0;
10224 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
38173d38 10225 return (ALPHA_ROUND (crtl->outgoing_args_size)
30102605
RH
10226 + ALPHA_ROUND (get_frame_size()));
10227 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
10228 return (ALPHA_ROUND (fixed_size)
f676971a 10229 + ALPHA_ROUND (get_frame_size()
38173d38 10230 + crtl->outgoing_args_size));
30102605 10231 else
56daab84 10232 gcc_unreachable ();
30102605
RH
10233}
10234
10235/* Output the module name for .ident and .end directives. We have to strip
10236 directories and add make sure that the module name starts with a letter
10237 or '$'. */
10238
10239static void
a5c24926 10240unicosmk_output_module_name (FILE *file)
30102605 10241{
f5924ac1
NS
10242 const char *name = lbasename (main_input_filename);
10243 unsigned len = strlen (name);
10244 char *clean_name = alloca (len + 2);
10245 char *ptr = clean_name;
f676971a 10246
30102605
RH
10247 /* CAM only accepts module names that start with a letter or '$'. We
10248 prefix the module name with a '$' if necessary. */
10249
10250 if (!ISALPHA (*name))
f5924ac1
NS
10251 *ptr++ = '$';
10252 memcpy (ptr, name, len + 1);
10253 clean_symbol_name (clean_name);
10254 fputs (clean_name, file);
30102605
RH
10255}
10256
a5c24926 10257/* Output the definition of a common variable. */
30102605 10258
a5c24926
RH
10259void
10260unicosmk_output_common (FILE *file, const char *name, int size, int align)
30102605 10261{
a5c24926
RH
10262 tree name_tree;
10263 printf ("T3E__: common %s\n", name);
30102605 10264
d6b5193b 10265 in_section = NULL;
30102605
RH
10266 fputs("\t.endp\n\n\t.psect ", file);
10267 assemble_name(file, name);
10268 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
10269 fprintf(file, "\t.byte\t0:%d\n", size);
10270
10271 /* Mark the symbol as defined in this module. */
10272 name_tree = get_identifier (name);
10273 TREE_ASM_WRITTEN (name_tree) = 1;
10274}
10275
10276#define SECTION_PUBLIC SECTION_MACH_DEP
10277#define SECTION_MAIN (SECTION_PUBLIC << 1)
10278static int current_section_align;
10279
d6b5193b
RS
10280/* A get_unnamed_section callback for switching to the text section. */
10281
10282static void
10283unicosmk_output_text_section_asm_op (const void *data ATTRIBUTE_UNUSED)
10284{
10285 static int count = 0;
10286 fprintf (asm_out_file, "\t.endp\n\n\t.psect\tgcc@text___%d,code\n", count++);
10287}
10288
10289/* A get_unnamed_section callback for switching to the data section. */
10290
10291static void
10292unicosmk_output_data_section_asm_op (const void *data ATTRIBUTE_UNUSED)
10293{
10294 static int count = 1;
10295 fprintf (asm_out_file, "\t.endp\n\n\t.psect\tgcc@data___%d,data\n", count++);
10296}
10297
10298/* Implement TARGET_ASM_INIT_SECTIONS.
10299
10300 The Cray assembler is really weird with respect to sections. It has only
10301 named sections and you can't reopen a section once it has been closed.
10302 This means that we have to generate unique names whenever we want to
10303 reenter the text or the data section. */
10304
10305static void
10306unicosmk_init_sections (void)
10307{
10308 text_section = get_unnamed_section (SECTION_CODE,
10309 unicosmk_output_text_section_asm_op,
10310 NULL);
10311 data_section = get_unnamed_section (SECTION_WRITE,
10312 unicosmk_output_data_section_asm_op,
10313 NULL);
10314 readonly_data_section = data_section;
10315}
10316
30102605 10317static unsigned int
a5c24926
RH
10318unicosmk_section_type_flags (tree decl, const char *name,
10319 int reloc ATTRIBUTE_UNUSED)
30102605
RH
10320{
10321 unsigned int flags = default_section_type_flags (decl, name, reloc);
10322
10323 if (!decl)
10324 return flags;
10325
10326 if (TREE_CODE (decl) == FUNCTION_DECL)
10327 {
10328 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
10329 if (align_functions_log > current_section_align)
10330 current_section_align = align_functions_log;
10331
10332 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
10333 flags |= SECTION_MAIN;
10334 }
10335 else
10336 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
10337
10338 if (TREE_PUBLIC (decl))
10339 flags |= SECTION_PUBLIC;
10340
10341 return flags;
10342}
10343
10344/* Generate a section name for decl and associate it with the
10345 declaration. */
10346
ae46c4e0 10347static void
a5c24926 10348unicosmk_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
30102605
RH
10349{
10350 const char *name;
10351 int len;
10352
56daab84 10353 gcc_assert (decl);
30102605
RH
10354
10355 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
d055668e 10356 name = default_strip_name_encoding (name);
30102605
RH
10357 len = strlen (name);
10358
10359 if (TREE_CODE (decl) == FUNCTION_DECL)
10360 {
10361 char *string;
10362
f676971a
EC
10363 /* It is essential that we prefix the section name here because
10364 otherwise the section names generated for constructors and
30102605
RH
10365 destructors confuse collect2. */
10366
10367 string = alloca (len + 6);
10368 sprintf (string, "code@%s", name);
10369 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
10370 }
10371 else if (TREE_PUBLIC (decl))
10372 DECL_SECTION_NAME (decl) = build_string (len, name);
10373 else
10374 {
10375 char *string;
10376
10377 string = alloca (len + 6);
10378 sprintf (string, "data@%s", name);
10379 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
10380 }
10381}
10382
10383/* Switch to an arbitrary section NAME with attributes as specified
10384 by FLAGS. ALIGN specifies any known alignment requirements for
10385 the section; 0 if the default should be used. */
10386
10387static void
c18a5b6c
MM
10388unicosmk_asm_named_section (const char *name, unsigned int flags,
10389 tree decl ATTRIBUTE_UNUSED)
30102605
RH
10390{
10391 const char *kind;
10392
10393 /* Close the previous section. */
10394
10395 fputs ("\t.endp\n\n", asm_out_file);
10396
10397 /* Find out what kind of section we are opening. */
10398
10399 if (flags & SECTION_MAIN)
10400 fputs ("\t.start\tmain\n", asm_out_file);
10401
10402 if (flags & SECTION_CODE)
10403 kind = "code";
10404 else if (flags & SECTION_PUBLIC)
10405 kind = "common";
10406 else
10407 kind = "data";
10408
10409 if (current_section_align != 0)
10410 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
10411 current_section_align, kind);
10412 else
10413 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
10414}
10415
10416static void
a5c24926 10417unicosmk_insert_attributes (tree decl, tree *attr_ptr ATTRIBUTE_UNUSED)
30102605
RH
10418{
10419 if (DECL_P (decl)
10420 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
ae46c4e0 10421 unicosmk_unique_section (decl, 0);
30102605
RH
10422}
10423
10424/* Output an alignment directive. We have to use the macro 'gcc@code@align'
10425 in code sections because .align fill unused space with zeroes. */
f676971a 10426
30102605 10427void
a5c24926 10428unicosmk_output_align (FILE *file, int align)
30102605
RH
10429{
10430 if (inside_function)
10431 fprintf (file, "\tgcc@code@align\t%d\n", align);
10432 else
10433 fprintf (file, "\t.align\t%d\n", align);
10434}
10435
10436/* Add a case vector to the current function's list of deferred case
10437 vectors. Case vectors have to be put into a separate section because CAM
10438 does not allow data definitions in code sections. */
10439
10440void
a5c24926 10441unicosmk_defer_case_vector (rtx lab, rtx vec)
30102605
RH
10442{
10443 struct machine_function *machine = cfun->machine;
f676971a 10444
30102605
RH
10445 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
10446 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
f676971a 10447 machine->addr_list);
30102605
RH
10448}
10449
10450/* Output a case vector. */
10451
10452static void
a5c24926 10453unicosmk_output_addr_vec (FILE *file, rtx vec)
30102605
RH
10454{
10455 rtx lab = XEXP (vec, 0);
10456 rtx body = XEXP (vec, 1);
10457 int vlen = XVECLEN (body, 0);
10458 int idx;
10459
4977bab6 10460 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (lab));
30102605
RH
10461
10462 for (idx = 0; idx < vlen; idx++)
10463 {
10464 ASM_OUTPUT_ADDR_VEC_ELT
10465 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
10466 }
10467}
10468
10469/* Output current function's deferred case vectors. */
10470
10471static void
a5c24926 10472unicosmk_output_deferred_case_vectors (FILE *file)
30102605
RH
10473{
10474 struct machine_function *machine = cfun->machine;
10475 rtx t;
10476
10477 if (machine->addr_list == NULL_RTX)
10478 return;
10479
d6b5193b 10480 switch_to_section (data_section);
30102605
RH
10481 for (t = machine->addr_list; t; t = XEXP (t, 1))
10482 unicosmk_output_addr_vec (file, XEXP (t, 0));
10483}
10484
a5c24926
RH
10485/* Generate the name of the SSIB section for the current function. */
10486
10487#define SSIB_PREFIX "__SSIB_"
10488#define SSIB_PREFIX_LEN 7
10489
10490static const char *
10491unicosmk_ssib_name (void)
10492{
f676971a 10493 /* This is ok since CAM won't be able to deal with names longer than that
a5c24926
RH
10494 anyway. */
10495
10496 static char name[256];
10497
10498 rtx x;
10499 const char *fnname;
10500 int len;
10501
10502 x = DECL_RTL (cfun->decl);
7d83f4f5 10503 gcc_assert (MEM_P (x));
a5c24926 10504 x = XEXP (x, 0);
56daab84 10505 gcc_assert (GET_CODE (x) == SYMBOL_REF);
a5c24926
RH
10506 fnname = XSTR (x, 0);
10507
10508 len = strlen (fnname);
10509 if (len + SSIB_PREFIX_LEN > 255)
10510 len = 255 - SSIB_PREFIX_LEN;
10511
10512 strcpy (name, SSIB_PREFIX);
10513 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
10514 name[len + SSIB_PREFIX_LEN] = 0;
10515
10516 return name;
10517}
10518
f676971a
EC
10519/* Set up the dynamic subprogram information block (DSIB) and update the
10520 frame pointer register ($15) for subroutines which have a frame. If the
30102605
RH
10521 subroutine doesn't have a frame, simply increment $15. */
10522
10523static void
a5c24926 10524unicosmk_gen_dsib (unsigned long *imaskP)
30102605 10525{
c2ea1ac6 10526 if (alpha_procedure_type == PT_STACK)
30102605
RH
10527 {
10528 const char *ssib_name;
10529 rtx mem;
10530
10531 /* Allocate 64 bytes for the DSIB. */
10532
10533 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
10534 GEN_INT (-64))));
10535 emit_insn (gen_blockage ());
10536
10537 /* Save the return address. */
10538
10539 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
10540 set_mem_alias_set (mem, alpha_sr_alias_set);
10541 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
409f52d3 10542 (*imaskP) &= ~(1UL << REG_RA);
30102605
RH
10543
10544 /* Save the old frame pointer. */
10545
10546 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
10547 set_mem_alias_set (mem, alpha_sr_alias_set);
10548 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
409f52d3 10549 (*imaskP) &= ~(1UL << HARD_FRAME_POINTER_REGNUM);
30102605
RH
10550
10551 emit_insn (gen_blockage ());
10552
10553 /* Store the SSIB pointer. */
10554
10555 ssib_name = ggc_strdup (unicosmk_ssib_name ());
10556 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
10557 set_mem_alias_set (mem, alpha_sr_alias_set);
10558
10559 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
10560 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
10561 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
10562
10563 /* Save the CIW index. */
10564
10565 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
10566 set_mem_alias_set (mem, alpha_sr_alias_set);
10567 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
10568
10569 emit_insn (gen_blockage ());
10570
10571 /* Set the new frame pointer. */
30102605
RH
10572 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
10573 stack_pointer_rtx, GEN_INT (64))));
30102605
RH
10574 }
10575 else
10576 {
10577 /* Increment the frame pointer register to indicate that we do not
10578 have a frame. */
15cb981a
RH
10579 emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
10580 hard_frame_pointer_rtx, const1_rtx));
30102605
RH
10581 }
10582}
10583
30102605
RH
10584/* Output the static subroutine information block for the current
10585 function. */
10586
10587static void
a5c24926 10588unicosmk_output_ssib (FILE *file, const char *fnname)
30102605
RH
10589{
10590 int len;
10591 int i;
10592 rtx x;
10593 rtx ciw;
10594 struct machine_function *machine = cfun->machine;
10595
d6b5193b 10596 in_section = NULL;
30102605
RH
10597 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
10598 unicosmk_ssib_name ());
10599
10600 /* Some required stuff and the function name length. */
10601
10602 len = strlen (fnname);
10603 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
10604
10605 /* Saved registers
10606 ??? We don't do that yet. */
10607
10608 fputs ("\t.quad\t0\n", file);
10609
10610 /* Function address. */
10611
10612 fputs ("\t.quad\t", file);
10613 assemble_name (file, fnname);
10614 putc ('\n', file);
10615
10616 fputs ("\t.quad\t0\n", file);
10617 fputs ("\t.quad\t0\n", file);
10618
10619 /* Function name.
10620 ??? We do it the same way Cray CC does it but this could be
10621 simplified. */
10622
10623 for( i = 0; i < len; i++ )
10624 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
10625 if( (len % 8) == 0 )
10626 fputs ("\t.quad\t0\n", file);
10627 else
10628 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
10629
10630 /* All call information words used in the function. */
10631
10632 for (x = machine->first_ciw; x; x = XEXP (x, 1))
10633 {
10634 ciw = XEXP (x, 0);
30102605 10635#if HOST_BITS_PER_WIDE_INT == 32
4a0a75dd 10636 fprintf (file, "\t.quad\t" HOST_WIDE_INT_PRINT_DOUBLE_HEX "\n",
30102605
RH
10637 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
10638#else
4a0a75dd 10639 fprintf (file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n", INTVAL (ciw));
30102605 10640#endif
30102605
RH
10641 }
10642}
10643
10644/* Add a call information word (CIW) to the list of the current function's
10645 CIWs and return its index.
10646
10647 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
10648
10649rtx
a5c24926 10650unicosmk_add_call_info_word (rtx x)
30102605
RH
10651{
10652 rtx node;
10653 struct machine_function *machine = cfun->machine;
10654
10655 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
10656 if (machine->first_ciw == NULL_RTX)
10657 machine->first_ciw = node;
10658 else
10659 XEXP (machine->last_ciw, 1) = node;
10660
10661 machine->last_ciw = node;
10662 ++machine->ciw_count;
10663
10664 return GEN_INT (machine->ciw_count
faed5cc3 10665 + strlen (current_function_name ())/8 + 5);
30102605
RH
10666}
10667
30102605
RH
10668/* The Cray assembler doesn't accept extern declarations for symbols which
10669 are defined in the same file. We have to keep track of all global
10670 symbols which are referenced and/or defined in a source file and output
10671 extern declarations for those which are referenced but not defined at
10672 the end of file. */
10673
10674/* List of identifiers for which an extern declaration might have to be
10675 emitted. */
f030826a 10676/* FIXME: needs to use GC, so it can be saved and restored for PCH. */
30102605
RH
10677
10678struct unicosmk_extern_list
10679{
10680 struct unicosmk_extern_list *next;
10681 const char *name;
10682};
10683
10684static struct unicosmk_extern_list *unicosmk_extern_head = 0;
10685
10686/* Output extern declarations which are required for every asm file. */
10687
10688static void
a5c24926 10689unicosmk_output_default_externs (FILE *file)
30102605 10690{
83182544 10691 static const char *const externs[] =
30102605
RH
10692 { "__T3E_MISMATCH" };
10693
10694 int i;
10695 int n;
10696
10697 n = ARRAY_SIZE (externs);
10698
10699 for (i = 0; i < n; i++)
10700 fprintf (file, "\t.extern\t%s\n", externs[i]);
10701}
10702
10703/* Output extern declarations for global symbols which are have been
10704 referenced but not defined. */
10705
10706static void
a5c24926 10707unicosmk_output_externs (FILE *file)
30102605
RH
10708{
10709 struct unicosmk_extern_list *p;
10710 const char *real_name;
10711 int len;
10712 tree name_tree;
10713
10714 len = strlen (user_label_prefix);
10715 for (p = unicosmk_extern_head; p != 0; p = p->next)
10716 {
f676971a 10717 /* We have to strip the encoding and possibly remove user_label_prefix
30102605
RH
10718 from the identifier in order to handle -fleading-underscore and
10719 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
d055668e 10720 real_name = default_strip_name_encoding (p->name);
30102605
RH
10721 if (len && p->name[0] == '*'
10722 && !memcmp (real_name, user_label_prefix, len))
10723 real_name += len;
f676971a 10724
30102605
RH
10725 name_tree = get_identifier (real_name);
10726 if (! TREE_ASM_WRITTEN (name_tree))
10727 {
10728 TREE_ASM_WRITTEN (name_tree) = 1;
10729 fputs ("\t.extern\t", file);
10730 assemble_name (file, p->name);
10731 putc ('\n', file);
10732 }
10733 }
10734}
f676971a 10735
30102605
RH
10736/* Record an extern. */
10737
10738void
a5c24926 10739unicosmk_add_extern (const char *name)
30102605
RH
10740{
10741 struct unicosmk_extern_list *p;
10742
10743 p = (struct unicosmk_extern_list *)
6d9f628e 10744 xmalloc (sizeof (struct unicosmk_extern_list));
30102605
RH
10745 p->next = unicosmk_extern_head;
10746 p->name = name;
10747 unicosmk_extern_head = p;
10748}
10749
10750/* The Cray assembler generates incorrect code if identifiers which
10751 conflict with register names are used as instruction operands. We have
10752 to replace such identifiers with DEX expressions. */
10753
10754/* Structure to collect identifiers which have been replaced by DEX
10755 expressions. */
f030826a 10756/* FIXME: needs to use GC, so it can be saved and restored for PCH. */
30102605
RH
10757
10758struct unicosmk_dex {
10759 struct unicosmk_dex *next;
10760 const char *name;
10761};
10762
f676971a 10763/* List of identifiers which have been replaced by DEX expressions. The DEX
30102605
RH
10764 number is determined by the position in the list. */
10765
f676971a 10766static struct unicosmk_dex *unicosmk_dex_list = NULL;
30102605
RH
10767
10768/* The number of elements in the DEX list. */
10769
10770static int unicosmk_dex_count = 0;
10771
10772/* Check if NAME must be replaced by a DEX expression. */
10773
10774static int
a5c24926 10775unicosmk_special_name (const char *name)
30102605
RH
10776{
10777 if (name[0] == '*')
10778 ++name;
10779
10780 if (name[0] == '$')
10781 ++name;
10782
10783 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
10784 return 0;
10785
10786 switch (name[1])
10787 {
10788 case '1': case '2':
10789 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
10790
10791 case '3':
10792 return (name[2] == '\0'
10793 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
10794
10795 default:
10796 return (ISDIGIT (name[1]) && name[2] == '\0');
10797 }
10798}
10799
10800/* Return the DEX number if X must be replaced by a DEX expression and 0
10801 otherwise. */
10802
10803static int
a5c24926 10804unicosmk_need_dex (rtx x)
30102605
RH
10805{
10806 struct unicosmk_dex *dex;
10807 const char *name;
10808 int i;
f676971a 10809
30102605
RH
10810 if (GET_CODE (x) != SYMBOL_REF)
10811 return 0;
10812
10813 name = XSTR (x,0);
10814 if (! unicosmk_special_name (name))
10815 return 0;
10816
10817 i = unicosmk_dex_count;
10818 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10819 {
10820 if (! strcmp (name, dex->name))
10821 return i;
10822 --i;
10823 }
f676971a 10824
6d9f628e 10825 dex = (struct unicosmk_dex *) xmalloc (sizeof (struct unicosmk_dex));
30102605
RH
10826 dex->name = name;
10827 dex->next = unicosmk_dex_list;
10828 unicosmk_dex_list = dex;
10829
10830 ++unicosmk_dex_count;
10831 return unicosmk_dex_count;
10832}
10833
10834/* Output the DEX definitions for this file. */
10835
10836static void
a5c24926 10837unicosmk_output_dex (FILE *file)
30102605
RH
10838{
10839 struct unicosmk_dex *dex;
10840 int i;
10841
10842 if (unicosmk_dex_list == NULL)
10843 return;
10844
10845 fprintf (file, "\t.dexstart\n");
10846
10847 i = unicosmk_dex_count;
10848 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10849 {
10850 fprintf (file, "\tDEX (%d) = ", i);
10851 assemble_name (file, dex->name);
10852 putc ('\n', file);
10853 --i;
10854 }
f676971a 10855
30102605
RH
10856 fprintf (file, "\t.dexend\n");
10857}
10858
a5c24926
RH
10859/* Output text that to appear at the beginning of an assembler file. */
10860
f676971a 10861static void
1bc7c5b6 10862unicosmk_file_start (void)
a5c24926
RH
10863{
10864 int i;
10865
1bc7c5b6
ZW
10866 fputs ("\t.ident\t", asm_out_file);
10867 unicosmk_output_module_name (asm_out_file);
10868 fputs ("\n\n", asm_out_file);
a5c24926
RH
10869
10870 /* The Unicos/Mk assembler uses different register names. Instead of trying
10871 to support them, we simply use micro definitions. */
10872
10873 /* CAM has different register names: rN for the integer register N and fN
10874 for the floating-point register N. Instead of trying to use these in
10875 alpha.md, we define the symbols $N and $fN to refer to the appropriate
10876 register. */
10877
10878 for (i = 0; i < 32; ++i)
1bc7c5b6 10879 fprintf (asm_out_file, "$%d <- r%d\n", i, i);
a5c24926
RH
10880
10881 for (i = 0; i < 32; ++i)
1bc7c5b6 10882 fprintf (asm_out_file, "$f%d <- f%d\n", i, i);
a5c24926 10883
1bc7c5b6 10884 putc ('\n', asm_out_file);
a5c24926
RH
10885
10886 /* The .align directive fill unused space with zeroes which does not work
10887 in code sections. We define the macro 'gcc@code@align' which uses nops
10888 instead. Note that it assumes that code sections always have the
10889 biggest possible alignment since . refers to the current offset from
10890 the beginning of the section. */
10891
1bc7c5b6
ZW
10892 fputs ("\t.macro gcc@code@align n\n", asm_out_file);
10893 fputs ("gcc@n@bytes = 1 << n\n", asm_out_file);
10894 fputs ("gcc@here = . % gcc@n@bytes\n", asm_out_file);
10895 fputs ("\t.if ne, gcc@here, 0\n", asm_out_file);
10896 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", asm_out_file);
10897 fputs ("\tbis r31,r31,r31\n", asm_out_file);
10898 fputs ("\t.endr\n", asm_out_file);
10899 fputs ("\t.endif\n", asm_out_file);
10900 fputs ("\t.endm gcc@code@align\n\n", asm_out_file);
a5c24926
RH
10901
10902 /* Output extern declarations which should always be visible. */
1bc7c5b6 10903 unicosmk_output_default_externs (asm_out_file);
a5c24926
RH
10904
10905 /* Open a dummy section. We always need to be inside a section for the
10906 section-switching code to work correctly.
10907 ??? This should be a module id or something like that. I still have to
10908 figure out what the rules for those are. */
1bc7c5b6 10909 fputs ("\n\t.psect\t$SG00000,data\n", asm_out_file);
a5c24926
RH
10910}
10911
10912/* Output text to appear at the end of an assembler file. This includes all
10913 pending extern declarations and DEX expressions. */
10914
10915static void
10916unicosmk_file_end (void)
10917{
10918 fputs ("\t.endp\n\n", asm_out_file);
10919
10920 /* Output all pending externs. */
10921
10922 unicosmk_output_externs (asm_out_file);
10923
f676971a 10924 /* Output dex definitions used for functions whose names conflict with
a5c24926
RH
10925 register names. */
10926
10927 unicosmk_output_dex (asm_out_file);
10928
10929 fputs ("\t.end\t", asm_out_file);
10930 unicosmk_output_module_name (asm_out_file);
10931 putc ('\n', asm_out_file);
10932}
10933
30102605
RH
10934#else
10935
10936static void
a5c24926 10937unicosmk_output_deferred_case_vectors (FILE *file ATTRIBUTE_UNUSED)
30102605
RH
10938{}
10939
10940static void
a5c24926 10941unicosmk_gen_dsib (unsigned long *imaskP ATTRIBUTE_UNUSED)
30102605
RH
10942{}
10943
10944static void
a5c24926
RH
10945unicosmk_output_ssib (FILE * file ATTRIBUTE_UNUSED,
10946 const char * fnname ATTRIBUTE_UNUSED)
30102605
RH
10947{}
10948
10949rtx
a5c24926 10950unicosmk_add_call_info_word (rtx x ATTRIBUTE_UNUSED)
30102605
RH
10951{
10952 return NULL_RTX;
10953}
10954
10955static int
a5c24926 10956unicosmk_need_dex (rtx x ATTRIBUTE_UNUSED)
30102605
RH
10957{
10958 return 0;
10959}
10960
10961#endif /* TARGET_ABI_UNICOSMK */
e2500fed 10962
c15c90bb
ZW
10963static void
10964alpha_init_libfuncs (void)
10965{
10966 if (TARGET_ABI_UNICOSMK)
10967 {
10968 /* Prevent gcc from generating calls to __divsi3. */
10969 set_optab_libfunc (sdiv_optab, SImode, 0);
10970 set_optab_libfunc (udiv_optab, SImode, 0);
10971
10972 /* Use the functions provided by the system library
10973 for DImode integer division. */
10974 set_optab_libfunc (sdiv_optab, DImode, "$sldiv");
10975 set_optab_libfunc (udiv_optab, DImode, "$uldiv");
10976 }
10977 else if (TARGET_ABI_OPEN_VMS)
10978 {
10979 /* Use the VMS runtime library functions for division and
10980 remainder. */
10981 set_optab_libfunc (sdiv_optab, SImode, "OTS$DIV_I");
10982 set_optab_libfunc (sdiv_optab, DImode, "OTS$DIV_L");
10983 set_optab_libfunc (udiv_optab, SImode, "OTS$DIV_UI");
10984 set_optab_libfunc (udiv_optab, DImode, "OTS$DIV_UL");
10985 set_optab_libfunc (smod_optab, SImode, "OTS$REM_I");
10986 set_optab_libfunc (smod_optab, DImode, "OTS$REM_L");
10987 set_optab_libfunc (umod_optab, SImode, "OTS$REM_UI");
10988 set_optab_libfunc (umod_optab, DImode, "OTS$REM_UL");
5e3fef6c
DR
10989 abort_libfunc = init_one_libfunc ("decc$abort");
10990 memcmp_libfunc = init_one_libfunc ("decc$memcmp");
10991#ifdef MEM_LIBFUNCS_INIT
10992 MEM_LIBFUNCS_INIT;
10993#endif
c15c90bb
ZW
10994 }
10995}
10996
5efd84c5
NF
10997/* On the Alpha, we use this to disable the floating-point registers
10998 when they don't exist. */
10999
11000static void
11001alpha_conditional_register_usage (void)
11002{
11003 int i;
11004 if (! TARGET_FPREGS)
11005 for (i = 32; i < 63; i++)
11006 fixed_regs[i] = call_used_regs[i] = 1;
11007}
a5c24926
RH
11008\f
11009/* Initialize the GCC target structure. */
11010#if TARGET_ABI_OPEN_VMS
11011# undef TARGET_ATTRIBUTE_TABLE
11012# define TARGET_ATTRIBUTE_TABLE vms_attribute_table
7b5cbb57
AS
11013# undef TARGET_CAN_ELIMINATE
11014# define TARGET_CAN_ELIMINATE alpha_vms_can_eliminate
a5c24926
RH
11015#endif
11016
11017#undef TARGET_IN_SMALL_DATA_P
11018#define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p
11019
11020#if TARGET_ABI_UNICOSMK
11021# undef TARGET_INSERT_ATTRIBUTES
11022# define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
11023# undef TARGET_SECTION_TYPE_FLAGS
11024# define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
11025# undef TARGET_ASM_UNIQUE_SECTION
11026# define TARGET_ASM_UNIQUE_SECTION unicosmk_unique_section
ab5c8549
JJ
11027#undef TARGET_ASM_FUNCTION_RODATA_SECTION
11028#define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
a5c24926
RH
11029# undef TARGET_ASM_GLOBALIZE_LABEL
11030# define TARGET_ASM_GLOBALIZE_LABEL hook_void_FILEptr_constcharptr
fe984136
RH
11031# undef TARGET_MUST_PASS_IN_STACK
11032# define TARGET_MUST_PASS_IN_STACK unicosmk_must_pass_in_stack
a5c24926
RH
11033#endif
11034
11035#undef TARGET_ASM_ALIGNED_HI_OP
11036#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11037#undef TARGET_ASM_ALIGNED_DI_OP
11038#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
11039
11040/* Default unaligned ops are provided for ELF systems. To get unaligned
11041 data for non-ELF systems, we have to turn off auto alignment. */
3bd1b109 11042#if !defined (OBJECT_FORMAT_ELF) || TARGET_ABI_OPEN_VMS
a5c24926
RH
11043#undef TARGET_ASM_UNALIGNED_HI_OP
11044#define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
11045#undef TARGET_ASM_UNALIGNED_SI_OP
11046#define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
11047#undef TARGET_ASM_UNALIGNED_DI_OP
11048#define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
11049#endif
11050
11051#ifdef OBJECT_FORMAT_ELF
9b580a0b
RH
11052#undef TARGET_ASM_RELOC_RW_MASK
11053#define TARGET_ASM_RELOC_RW_MASK alpha_elf_reloc_rw_mask
a5c24926
RH
11054#undef TARGET_ASM_SELECT_RTX_SECTION
11055#define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section
ae069803
RH
11056#undef TARGET_SECTION_TYPE_FLAGS
11057#define TARGET_SECTION_TYPE_FLAGS alpha_elf_section_type_flags
a5c24926
RH
11058#endif
11059
11060#undef TARGET_ASM_FUNCTION_END_PROLOGUE
11061#define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
11062
c15c90bb
ZW
11063#undef TARGET_INIT_LIBFUNCS
11064#define TARGET_INIT_LIBFUNCS alpha_init_libfuncs
11065
506d7b68
PB
11066#undef TARGET_LEGITIMIZE_ADDRESS
11067#define TARGET_LEGITIMIZE_ADDRESS alpha_legitimize_address
11068
1bc7c5b6
ZW
11069#if TARGET_ABI_UNICOSMK
11070#undef TARGET_ASM_FILE_START
11071#define TARGET_ASM_FILE_START unicosmk_file_start
11072#undef TARGET_ASM_FILE_END
11073#define TARGET_ASM_FILE_END unicosmk_file_end
11074#else
11075#undef TARGET_ASM_FILE_START
11076#define TARGET_ASM_FILE_START alpha_file_start
11077#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
11078#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
11079#endif
11080
a5c24926
RH
11081#undef TARGET_SCHED_ADJUST_COST
11082#define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
11083#undef TARGET_SCHED_ISSUE_RATE
11084#define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
a5c24926
RH
11085#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
11086#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
11087 alpha_multipass_dfa_lookahead
11088
11089#undef TARGET_HAVE_TLS
11090#define TARGET_HAVE_TLS HAVE_AS_TLS
11091
fd930388
RH
11092#undef TARGET_BUILTIN_DECL
11093#define TARGET_BUILTIN_DECL alpha_builtin_decl
a5c24926
RH
11094#undef TARGET_INIT_BUILTINS
11095#define TARGET_INIT_BUILTINS alpha_init_builtins
11096#undef TARGET_EXPAND_BUILTIN
11097#define TARGET_EXPAND_BUILTIN alpha_expand_builtin
36013987
RH
11098#undef TARGET_FOLD_BUILTIN
11099#define TARGET_FOLD_BUILTIN alpha_fold_builtin
a5c24926
RH
11100
11101#undef TARGET_FUNCTION_OK_FOR_SIBCALL
11102#define TARGET_FUNCTION_OK_FOR_SIBCALL alpha_function_ok_for_sibcall
11103#undef TARGET_CANNOT_COPY_INSN_P
11104#define TARGET_CANNOT_COPY_INSN_P alpha_cannot_copy_insn_p
04886dc0
RH
11105#undef TARGET_CANNOT_FORCE_CONST_MEM
11106#define TARGET_CANNOT_FORCE_CONST_MEM alpha_cannot_force_const_mem
a5c24926
RH
11107
11108#if TARGET_ABI_OSF
11109#undef TARGET_ASM_OUTPUT_MI_THUNK
11110#define TARGET_ASM_OUTPUT_MI_THUNK alpha_output_mi_thunk_osf
11111#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 11112#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
3f620b5f
RH
11113#undef TARGET_STDARG_OPTIMIZE_HOOK
11114#define TARGET_STDARG_OPTIMIZE_HOOK alpha_stdarg_optimize_hook
a5c24926
RH
11115#endif
11116
11117#undef TARGET_RTX_COSTS
11118#define TARGET_RTX_COSTS alpha_rtx_costs
11119#undef TARGET_ADDRESS_COST
f40751dd 11120#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
a5c24926
RH
11121
11122#undef TARGET_MACHINE_DEPENDENT_REORG
11123#define TARGET_MACHINE_DEPENDENT_REORG alpha_reorg
11124
cde0f3fd
PB
11125#undef TARGET_PROMOTE_FUNCTION_MODE
11126#define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
f93c2180 11127#undef TARGET_PROMOTE_PROTOTYPES
586de218 11128#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
f93c2180
RH
11129#undef TARGET_RETURN_IN_MEMORY
11130#define TARGET_RETURN_IN_MEMORY alpha_return_in_memory
8cd5a4e0
RH
11131#undef TARGET_PASS_BY_REFERENCE
11132#define TARGET_PASS_BY_REFERENCE alpha_pass_by_reference
f93c2180
RH
11133#undef TARGET_SETUP_INCOMING_VARARGS
11134#define TARGET_SETUP_INCOMING_VARARGS alpha_setup_incoming_varargs
11135#undef TARGET_STRICT_ARGUMENT_NAMING
11136#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
11137#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
11138#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
11139#undef TARGET_SPLIT_COMPLEX_ARG
11140#define TARGET_SPLIT_COMPLEX_ARG alpha_split_complex_arg
28245018
RH
11141#undef TARGET_GIMPLIFY_VA_ARG_EXPR
11142#define TARGET_GIMPLIFY_VA_ARG_EXPR alpha_gimplify_va_arg
78a52f11
RH
11143#undef TARGET_ARG_PARTIAL_BYTES
11144#define TARGET_ARG_PARTIAL_BYTES alpha_arg_partial_bytes
0c3a9758
NF
11145#undef TARGET_FUNCTION_ARG
11146#define TARGET_FUNCTION_ARG alpha_function_arg
11147#undef TARGET_FUNCTION_ARG_ADVANCE
11148#define TARGET_FUNCTION_ARG_ADVANCE alpha_function_arg_advance
2d7b663a
RH
11149#undef TARGET_TRAMPOLINE_INIT
11150#define TARGET_TRAMPOLINE_INIT alpha_trampoline_init
6dd53648 11151
48f46219
RH
11152#undef TARGET_SECONDARY_RELOAD
11153#define TARGET_SECONDARY_RELOAD alpha_secondary_reload
11154
6dd53648
RH
11155#undef TARGET_SCALAR_MODE_SUPPORTED_P
11156#define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p
f676971a
EC
11157#undef TARGET_VECTOR_MODE_SUPPORTED_P
11158#define TARGET_VECTOR_MODE_SUPPORTED_P alpha_vector_mode_supported_p
f93c2180 11159
c35d187f
RH
11160#undef TARGET_BUILD_BUILTIN_VA_LIST
11161#define TARGET_BUILD_BUILTIN_VA_LIST alpha_build_builtin_va_list
11162
d7bd8aeb
JJ
11163#undef TARGET_EXPAND_BUILTIN_VA_START
11164#define TARGET_EXPAND_BUILTIN_VA_START alpha_va_start
11165
445cf5eb
JM
11166/* The Alpha architecture does not require sequential consistency. See
11167 http://www.cs.umd.edu/~pugh/java/memoryModel/AlphaReordering.html
11168 for an example of how it can be violated in practice. */
11169#undef TARGET_RELAXED_ORDERING
11170#define TARGET_RELAXED_ORDERING true
11171
8bea7f7c
RH
11172#undef TARGET_DEFAULT_TARGET_FLAGS
11173#define TARGET_DEFAULT_TARGET_FLAGS \
11174 (TARGET_DEFAULT | TARGET_CPU_DEFAULT | TARGET_DEFAULT_EXPLICIT_RELOCS)
11175#undef TARGET_HANDLE_OPTION
11176#define TARGET_HANDLE_OPTION alpha_handle_option
11177
c5387660
JM
11178#undef TARGET_OPTION_OVERRIDE
11179#define TARGET_OPTION_OVERRIDE alpha_option_override
11180
3020190e
JM
11181#undef TARGET_OPTION_OPTIMIZATION_TABLE
11182#define TARGET_OPTION_OPTIMIZATION_TABLE alpha_option_optimization_table
11183
7269aee7 11184#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
608063c3
JB
11185#undef TARGET_MANGLE_TYPE
11186#define TARGET_MANGLE_TYPE alpha_mangle_type
7269aee7
AH
11187#endif
11188
c6c3dba9
PB
11189#undef TARGET_LEGITIMATE_ADDRESS_P
11190#define TARGET_LEGITIMATE_ADDRESS_P alpha_legitimate_address_p
11191
5efd84c5
NF
11192#undef TARGET_CONDITIONAL_REGISTER_USAGE
11193#define TARGET_CONDITIONAL_REGISTER_USAGE alpha_conditional_register_usage
11194
a5c24926
RH
11195struct gcc_target targetm = TARGET_INITIALIZER;
11196
11197\f
e2500fed 11198#include "gt-alpha.h"